├── wheelhouse_uploader ├── __init__.py ├── __main__.py ├── cmd.py ├── fetch.py ├── commandline.py ├── upload.py └── utils.py ├── setup.cfg ├── .gitignore ├── LICENSE ├── setup.py ├── CHANGELOG.md └── README.md /wheelhouse_uploader/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /wheelhouse_uploader/__main__.py: -------------------------------------------------------------------------------- 1 | if __name__ == "__main__": 2 | from wheelhouse_uploader.commandline import main 3 | main() 4 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal=1 3 | 4 | [wheelhouse_uploader] 5 | artifact_indexes= 6 | http://fe9dda1b59826c724773-78698a7408acd71644d46cbd2b29d6b9.r1.cf2.rackcdn.com/ 7 | 8 | [nosetests] 9 | with-doctest=1 10 | 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Olivier Grisel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # Authors: Olivier Grisel 3 | # MinRK 4 | # LICENSE: MIT 5 | from setuptools import setup 6 | 7 | try: 8 | # For dogfooding only 9 | import wheelhouse_uploader.cmd 10 | cmdclass = vars(wheelhouse_uploader.cmd) 11 | except ImportError: 12 | cmdclass = {} 13 | 14 | 15 | setup( 16 | name="wheelhouse-uploader", 17 | version="0.10.4.dev0", 18 | description="Upload wheels to any cloud storage supported by Libcloud", 19 | maintainer="Olivier Grisel", 20 | maintainer_email="olivier.grisel@ensta.org", 21 | license="MIT", 22 | url='http://github.com/ogrisel/wheelhouse-uploader', 23 | packages=[ 24 | 'wheelhouse_uploader', 25 | ], 26 | setup_requires=['setuptools-markdown'], 27 | install_requires=[ 28 | "setuptools>=0.9", # required for PEP 440 version parsing 29 | "packaging", 30 | "certifi", 31 | 'futures; python_version == "2.7"', 32 | # https://github.com/ogrisel/wheelhouse-uploader/issues/29 33 | "apache-libcloud==2.2.1", 34 | ], 35 | long_description_markdown_filename='README.md', 36 | classifiers=[ 37 | 'License :: OSI Approved', 38 | 'Programming Language :: Python', 39 | 'Topic :: Software Development', 40 | 'Operating System :: Microsoft :: Windows', 41 | 'Operating System :: POSIX', 42 | 'Operating System :: Unix', 43 | 'Operating System :: MacOS', 44 | 'Programming Language :: Python :: 2', 45 | 'Programming Language :: Python :: 2.7', 46 | 'Programming Language :: Python :: 3', 47 | 'Programming Language :: Python :: 3.5', 48 | 'Programming Language :: Python :: 3.6', 49 | 'Programming Language :: Python :: 3.7', 50 | ], 51 | cmdclass=cmdclass, 52 | ) 53 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ## 0.10.3 - 2020-08-04 4 | 5 | - Fix support for PyPy tags: 6 | https://github.com/ogrisel/wheelhouse-uploader/pull/37 7 | 8 | - Make it possible to fetch sdist: 9 | https://github.com/ogrisel/wheelhouse-uploader/pull/36 10 | 11 | ## 0.10.2 - 2020-02-12 12 | 13 | - Support absolute artifacts URLs. 14 | 15 | ## 0.10.1 - 2018-07-03 16 | 17 | - Pinning apache-liblcoud==2.2.1 dependency to workaround 18 | 2.3.0 installation problems under Windows. 19 | 20 | ## 0.10.0 - 2018-07-03 21 | 22 | - Upgrade dependency to latest apache-libcloud 23 | - Use temporary files for upload and downloads to workaround 24 | a bug with Python 3.7. 25 | 26 | ## 0.9.7 - 2018-07-02 27 | 28 | - Add explicit dependency on certifi to resolve SSL 29 | verification issues on appveyor. 30 | https://github.com/ogrisel/wheelhouse-uploader/issues/26 31 | 32 | ## 0.9.5 - 2017-04-26 33 | 34 | - Pin dependency apache-libcloud==1.1.0 to workaround 35 | regression introduced by version 2.0.0: 36 | https://github.com/ogrisel/wheelhouse-uploader/issues/22 37 | 38 | ## 0.9.4 - 2017-02-13 39 | 40 | - Fix bad link markup in HTML index by Joe Rickerby 41 | https://github.com/ogrisel/wheelhouse-uploader/issues/19 42 | 43 | ## 0.9.3 - 2016-06-12 44 | 45 | - Fix support for universal wheel filenames. 46 | https://github.com/ogrisel/wheelhouse-uploader/issues/16 47 | 48 | ## 0.9.2 - 2015-12-29 49 | 50 | - Fix index.html update issue: it would not display recently 51 | uploaded wheels due to the eventually consistent behavior of 52 | container listing. 53 | https://github.com/ogrisel/wheelhouse-uploader/issues/15 54 | 55 | ## 0.9.1 - 2015-12-03 56 | 57 | - More informative error message in case of invalid credentials. 58 | 59 | ## 0.9.0 - 2015-12-03 60 | 61 | - Add a time stamp to the local version segment of uploaded dev 62 | wheels to make it possible to keep the 5 most recent uploads 63 | while making it possible for pip to download the most recent 64 | dev build at any time. 65 | https://github.com/ogrisel/wheelhouse-uploader/issues/14 66 | 67 | ## 0.8.0 - 2015-11-30 68 | 69 | - Delete previous versions of recently uploaded 'dev' packages. 70 | https://github.com/ogrisel/wheelhouse-uploader/issues/13 71 | -------------------------------------------------------------------------------- /wheelhouse_uploader/cmd.py: -------------------------------------------------------------------------------- 1 | """Custom distutils to automate commands for PyPI deployments 2 | 3 | The 'fetch_artifacts' command download the artifacts from the matching project 4 | name and version from public HTML repositories to the dist folder. 5 | 6 | The 'upload_all' command scans the content of the `dist` folder for any 7 | previously generated artifacts that match the current project version number 8 | and upload them all to PyPI at once. 9 | 10 | """ 11 | import os 12 | try: 13 | from configparser import ConfigParser, NoSectionError, NoOptionError 14 | except ImportError: 15 | from ConfigParser import ConfigParser, NoSectionError, NoOptionError 16 | from distutils.cmd import Command 17 | from distutils.command.upload import upload 18 | from distutils.errors import DistutilsOptionError 19 | from pkg_resources import safe_version 20 | 21 | from wheelhouse_uploader.utils import parse_filename 22 | from wheelhouse_uploader.fetch import download_artifacts 23 | 24 | __all__ = ['fetch_artifacts', 'upload_all'] 25 | 26 | 27 | SETUP_FILE = 'setup.cfg' 28 | SECTION = 'wheelhouse_uploader' 29 | KEY = 'artifact_indexes' 30 | 31 | 32 | class fetch_artifacts(Command): 33 | 34 | user_options = [] 35 | 36 | def initialize_options(self): 37 | config = ConfigParser() 38 | try: 39 | config.read(SETUP_FILE) 40 | artifact_indexes = config.get(SECTION, KEY) 41 | lines = [l.strip() for l in artifact_indexes.strip().split('\n')] 42 | self.index_urls = [l for l in lines if l and not l.startswith('#')] 43 | except (IOError, KeyError, NoOptionError, NoSectionError): 44 | raise DistutilsOptionError( 45 | 'Missing url of artifact index configured with key "%s" of ' 46 | 'section "%s" in file "%s"' % (KEY, SECTION, SETUP_FILE)) 47 | 48 | def finalize_options(self): 49 | pass 50 | 51 | def run(self): 52 | metadata = self.distribution.metadata 53 | project_name = metadata.get_name() 54 | version = metadata.get_version() 55 | for index_url in self.index_urls: 56 | download_artifacts(index_url, 'dist', project_name, 57 | version=version, max_workers=4) 58 | 59 | 60 | class upload_all(upload): 61 | 62 | def run(self): 63 | metadata = self.distribution.metadata 64 | project_name = metadata.get_name() 65 | version = safe_version(metadata.get_version()) 66 | print("Collecting artifacts for %s==%s in 'dist' folder:" % 67 | (project_name, version)) 68 | dist_files = [] 69 | for filename in os.listdir('dist'): 70 | try: 71 | _, file_version, pyversion, command = parse_filename( 72 | filename, project_name=project_name) 73 | if file_version != version: 74 | continue 75 | except ValueError: 76 | continue 77 | filepath = os.path.join('dist', filename) 78 | dist_files.append((command, pyversion, filepath)) 79 | 80 | if not dist_files: 81 | raise DistutilsOptionError( 82 | "No file collected from the 'dist' folder") 83 | 84 | for command, pyversion, filepath in dist_files: 85 | self.upload_file(command, pyversion, filepath) 86 | -------------------------------------------------------------------------------- /wheelhouse_uploader/fetch.py: -------------------------------------------------------------------------------- 1 | try: 2 | from urllib.request import urlopen 3 | from urllib.parse import urlparse 4 | except ImportError: 5 | # Python 2 compat 6 | from urllib2 import urlopen 7 | from urlparse import urlparse 8 | import re 9 | import os 10 | import shutil 11 | from pkg_resources import safe_version 12 | from concurrent.futures import ThreadPoolExecutor, as_completed 13 | from wheelhouse_uploader.utils import parse_filename 14 | 15 | link_pattern = re.compile(r'\bhref="([^"]+)"') 16 | 17 | 18 | def download(url, filepath, buffer_size=int(1e6), overwrite=False): 19 | if not overwrite and os.path.exists(filepath): 20 | print('%s already exists' % filepath) 21 | return 22 | print('downloading %s' % url) 23 | tmp_filepath = filepath + '.part' 24 | with open(tmp_filepath, 'wb') as f: 25 | remote = urlopen(url) 26 | try: 27 | data = remote.read(buffer_size) 28 | while data: 29 | f.write(data) 30 | data = remote.read(buffer_size) 31 | finally: 32 | if hasattr(remote, 'close'): 33 | remote.close() 34 | # download was successful: rename to the final name: 35 | if os.path.exists(filepath): 36 | os.unlink(filepath) 37 | shutil.move(tmp_filepath, filepath) 38 | 39 | 40 | def _parse_html(index_url, folder, project_name, version=None): 41 | # TODO: use correct encoding 42 | html_content = urlopen(index_url).read().decode('utf-8') 43 | artifacts = [] 44 | found_versions = set() 45 | for match in re.finditer(link_pattern, html_content): 46 | link = match.group(1) 47 | if link.startswith("/"): 48 | parsed_index_url = urlparse(index_url) 49 | url = "%s://%s%s" % (parsed_index_url.scheme, 50 | parsed_index_url.netloc, 51 | link) 52 | elif index_url.endswith('/'): 53 | url = index_url + link 54 | elif index_url.endswith('.html'): 55 | url = index_url.rsplit('/', 1)[0] + '/' + link 56 | else: 57 | url = index_url + '/' + link 58 | if '#' in link: 59 | # TODO: parse digest info to detect any file content corruption 60 | link, _ = link.split('#', 1) 61 | if '/' in link: 62 | _, filename = link.rsplit('/', 1) 63 | else: 64 | filename = link 65 | try: 66 | _, file_version, _, _ = parse_filename(filename, 67 | project_name=project_name) 68 | except ValueError: 69 | # not a supported artifact 70 | continue 71 | 72 | if version is not None and file_version != version: 73 | found_versions.add(file_version) 74 | continue 75 | 76 | artifacts.append((url, os.path.join(folder, filename))) 77 | return artifacts, list(sorted(found_versions)) 78 | 79 | 80 | def download_artifacts(index_url, folder, project_name, version=None, 81 | max_workers=4): 82 | if version is not None: 83 | version = safe_version(version) 84 | artifacts, found_versions = _parse_html(index_url, folder, project_name, 85 | version=version) 86 | if not artifacts: 87 | print('Could not find any matching artifact for project "%s" on %s' 88 | % (project_name, index_url)) 89 | if version is not None: 90 | print("Requested version: %s" % version) 91 | print("Available versions: %s" % ", ".join(sorted(found_versions))) 92 | return 93 | 94 | print('Found %d artifacts to download from %s' 95 | % (len(artifacts), index_url)) 96 | if not os.path.exists(folder): 97 | os.makedirs(folder) 98 | with ThreadPoolExecutor(max_workers=max_workers) as e: 99 | # Dispatch the file download in threads 100 | futures = [e.submit(download, url_, filepath) 101 | for url_, filepath in artifacts] 102 | for future in as_completed(futures): 103 | # We don't expect any returned results be we want to raise 104 | # an exception early in case if problem 105 | future.result() 106 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | wheelhouse-uploader 2 | =================== 3 | 4 | Upload/download wheels to/from cloud storage using Apache Libcloud. Helps 5 | package maintainers build wheels for their packages and upload them to PyPI. 6 | 7 | The cloud storage containers are typically populated by Continuous Integration 8 | servers that generate and test binary packages on various platforms (Windows 9 | and OSX for several versions and architectures for Python). At release time 10 | the project maintainer can collect all the generated package for a specific 11 | version of the project and upload them all at once to PyPI. 12 | 13 | 14 | ## Installation 15 | 16 | ~~~bash 17 | pip install wheelhouse-uploader 18 | ~~~ 19 | 20 | ## Usage 21 | 22 | The canonical use case is: 23 | 24 | 1. Continuous Integration (CI) workers build and test the project packages for 25 | various platforms and versions of Python, for instance using the commands: 26 | 27 | ~~~bash 28 | pip install wheel 29 | python setup.py bdist_wheel 30 | ~~~ 31 | 32 | 2. CI workers use `wheelhouse-uploader` to upload the generated artifacts 33 | to one or more cloud storage containers (e.g. one container per platform, 34 | or one for the master branch and the other for release tags): 35 | 36 | ~~~bash 37 | python -m wheelhouse_uploader upload container_name 38 | ~~~ 39 | 40 | 3. The project maintainer uses the `wheelhouse-uploader` distutils extensions 41 | to fetch all the generated build artifacts for a specific version number to 42 | its local `dist` folder and upload them all at once to PyPI when 43 | making a release. 44 | 45 | ~~~bash 46 | python setup.py sdist fetch_artifacts upload_all 47 | ~~~ 48 | 49 | 50 | ### Uploading artifact to a cloud storage container 51 | 52 | Use the following command: 53 | 54 | ~~~bash 55 | python -m wheelhouse_uploader upload \ 56 | --username=mycloudaccountid --secret=xxx \ 57 | --local-folder=dist/ my_wheelhouse 58 | ~~~ 59 | 60 | or: 61 | 62 | ~~~bash 63 | export WHEELHOUSE_UPLOADER_USERNAME=mycloudaccountid 64 | export WHEELHOUSE_UPLOADER_SECRET=xxx 65 | python -m wheelhouse_uploader upload --local-folder dist/ my_wheelhouse 66 | ~~~ 67 | 68 | When used in a CI setup such as http://travis-ci.org or http://appveyor.com, 69 | the environment variables are typically configured in the CI configuration 70 | files such as `.travis.yml` or `appveyor.yml`. The secret API key is typically 71 | encrypted and exposed with a `secure:` prefix in those files. 72 | 73 | The files in the `dist/` folder will be uploaded to a container named 74 | `my_wheelhouse` on the `CLOUDFILES` (Rackspace) cloud storage provider. 75 | 76 | You can pass a custom `--provider` param to select the cloud storage from 77 | the list of [supported providers]( 78 | https://libcloud.readthedocs.org/en/latest/storage/supported_providers.html). 79 | 80 | Assuming the container will be published as a static website using the cloud 81 | provider CDN options, the `upload` command also maintains an `index.html` file 82 | with links to all the files previously uploaded to the container. 83 | 84 | It is recommended to configure the container CDN cache TTL to a shorter than 85 | usual duration such as 15 minutes to be able to quickly perform a release once 86 | all artifacts have been uploaded by the CI servers. 87 | 88 | 89 | ### Fetching artifacts manually 90 | 91 | The following command downloads items that have been previously published to a 92 | web page with an index with HTML links to the project files: 93 | 94 | ~~~bash 95 | python -m wheelhouse_uploader fetch \ 96 | --version=X.Y.Z --local-folder=dist/ \ 97 | project-name http://wheelhouse.example.org/ 98 | ~~~ 99 | 100 | ### Uploading previously archived artifacts to PyPI (deprecated) 101 | 102 | **DEPRECATION NOTICE**: while the following still works, you are advised 103 | to use the alternative tool: [twine](https://pypi.python.org/pypi/twine) 104 | that makes it easy to script uploads of packages to PyPI without messing 105 | around with distutils and `setup.py`. 106 | 107 | Ensure that the `setup.py` file of the project registers the 108 | `wheelhouse-uploader` distutils extensions: 109 | 110 | ~~~python 111 | cmdclass = {} 112 | 113 | try: 114 | # Used by the release manager of the project to add support for: 115 | # python setup.py sdist fetch_artifacts upload_all 116 | import wheelhouse_uploader.cmd 117 | cmdclass.update(vars(wheelhouse_uploader.cmd)) 118 | except ImportError: 119 | pass 120 | ... 121 | 122 | setup( 123 | ... 124 | cmdclass=cmdclass, 125 | ) 126 | ~~~ 127 | 128 | Put the URL of the public artifact repositories populated by the CI workers 129 | in the `setup.cfg` file of the project: 130 | 131 | ~~~ini 132 | [wheelhouse_uploader] 133 | artifact_indexes= 134 | http://wheelhouse.site1.org/ 135 | http://wheelhouse.site2.org/ 136 | ~~~ 137 | 138 | Fetch all the artifacts matching the current version of the project as 139 | configured in the local `setup.py` file and upload them all to PyPI: 140 | 141 | ~~~bash 142 | python setup.py fetch_artifacts upload_all 143 | ~~~ 144 | 145 | Note: this will reuse PyPI credentials stored in `$HOME/.pypirc` if 146 | `python setup.py register` or `upload` were called previously. 147 | 148 | 149 | ### TODO 150 | 151 | - test on as many cloud storage providers as possible (please send an email to 152 | olivier.grisel@ensta.org if you can make it work on a non-Rackspace provider), 153 | - check that CDN activation works everywhere (it's failing on Rackspace 154 | currently: need to investigate) otherwise the workaround is to enable CDN 155 | manually in the management web UI, 156 | - make it possible to fetch private artifacts using the cloud storage protocol 157 | instead of HTML index pages. 158 | -------------------------------------------------------------------------------- /wheelhouse_uploader/commandline.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | import os 4 | from libcloud.common.types import InvalidCredsError 5 | import libcloud.security 6 | from wheelhouse_uploader.upload import Uploader 7 | from wheelhouse_uploader.fetch import download_artifacts 8 | 9 | 10 | def parse_args(): 11 | parser = argparse.ArgumentParser( 12 | description='Manage Python build artifacts', 13 | ) 14 | subparsers = parser.add_subparsers( 15 | title='Commands', 16 | ) 17 | 18 | # Options for the upload sub command: 19 | upload = subparsers.add_parser( 20 | 'upload', help='Attach a local folder to a Nuxeo server.', 21 | ) 22 | upload.set_defaults(command='upload') 23 | 24 | upload.add_argument('container_name', help='name of the target container') 25 | upload.add_argument('--local-folder', default='dist', 26 | help='path to the folder to upload') 27 | upload.add_argument('--username', 28 | help='account name for the cloud storage') 29 | upload.add_argument('--secret', 30 | help='secret API key for the cloud storage') 31 | upload.add_argument('--provider-name', default='CLOUDFILES', 32 | help='Apache Libcloud cloud storage provider') 33 | upload.add_argument('--region', default='ord', 34 | help='Apache Libcloud cloud storage provider region') 35 | upload.add_argument('--max-workers', type=int, default=4, 36 | help='maximum number of concurrent uploads') 37 | upload.add_argument('--no-ssl-check', default=False, 38 | action="store_true", 39 | help='disable SSL certificate validation') 40 | upload.add_argument('--no-enable-cdn', default=False, 41 | action="store_true", 42 | help='do not publish the container on CDN') 43 | upload.add_argument('--no-update-index', default=False, 44 | action="store_true", 45 | help='build an index.html file') 46 | upload.add_argument('--upload-pull-request', default=False, 47 | action="store_true", 48 | help='upload even if it is a pull request') 49 | 50 | # Options for the fetch sub command: 51 | fetch = subparsers.add_parser( 52 | 'fetch', help='Collect build artifacts from an HTML page.', 53 | ) 54 | fetch.set_defaults(command='fetch') 55 | fetch.add_argument('project_name', help='name of the project') 56 | fetch.add_argument('url', help='url of the HTML index page.') 57 | fetch.add_argument('--version', help='version of the artifact to collect') 58 | fetch.add_argument('--local-folder', default='dist', 59 | help='path to the folder to store fetched items') 60 | return parser.parse_args() 61 | 62 | 63 | def check_upload_credentions(options): 64 | if not options.username: 65 | options.username = os.environ.get('WHEELHOUSE_UPLOADER_USERNAME') 66 | if not options.username: 67 | print("Username required: pass the --username option or set the " 68 | "WHEELHOUSE_UPLOADER_USERNAME environment variable") 69 | sys.exit(1) 70 | 71 | if not options.secret: 72 | options.secret = os.environ.get('WHEELHOUSE_UPLOADER_SECRET') 73 | 74 | if not options.secret: 75 | # It is often useful to run travis / appveyor jobs on a specific 76 | # developer account that does not have the secret key configured. 77 | # wheelhouse-uploader should not cause such builds to fail, instead 78 | # it would just skip the upload. 79 | print("WARNING: secret API key missing: skipping package upload") 80 | sys.exit(0) 81 | 82 | 83 | def handle_upload(options): 84 | check_upload_credentions(options) 85 | 86 | if (not options.upload_pull_request and 87 | (os.environ.get('APPVEYOR_PULL_REQUEST_NUMBER') 88 | or os.environ.get('TRAVIS_PULL_REQUEST', 'false') != 'false' 89 | )): 90 | print('Skipping upload of packages for pull request.') 91 | print('Use --upload-pull-request to force upload even on pull ' 92 | 'requests.') 93 | sys.exit(0) 94 | 95 | if options.no_ssl_check: 96 | # This is needed when the host OS such as Windows does not make 97 | # make available a CA cert bundle in a standard location. 98 | libcloud.security.VERIFY_SSL_CERT = False 99 | 100 | try: 101 | uploader = Uploader(options.username, options.secret, 102 | options.provider_name, 103 | region=options.region, 104 | update_index=not options.no_update_index, 105 | max_workers=options.max_workers) 106 | uploader.upload(options.local_folder, options.container_name) 107 | 108 | if not options.no_enable_cdn: 109 | try: 110 | url = uploader.get_container_cdn_url(options.container_name) 111 | print('Wheelhouse successfully published at:') 112 | print(url) 113 | except Exception as e: 114 | print("Failed to enable CDN: %s %s" % (type(e).__name__, e)) 115 | except InvalidCredsError: 116 | print("Invalid credentials for user '%s'" % options.username) 117 | sys.exit(1) 118 | 119 | 120 | def main(): 121 | options = parse_args() 122 | if options.command == 'upload': 123 | return handle_upload(options) 124 | elif options.command == 'fetch': 125 | download_artifacts(options.url, options.local_folder, 126 | project_name=options.project_name, 127 | version=options.version) 128 | -------------------------------------------------------------------------------- /wheelhouse_uploader/upload.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | import os 3 | import json 4 | from hashlib import sha256 5 | from time import sleep 6 | from io import StringIO 7 | from traceback import print_exc 8 | import tempfile 9 | import shutil 10 | from concurrent.futures import ThreadPoolExecutor, as_completed 11 | 12 | from libcloud.common.types import InvalidCredsError 13 | from libcloud.storage.providers import get_driver 14 | from libcloud.storage.types import Provider 15 | from libcloud.storage.types import ContainerDoesNotExistError 16 | from libcloud.storage.types import ObjectDoesNotExistError 17 | 18 | from wheelhouse_uploader.utils import matching_dev_filenames, stamp_dev_wheel 19 | 20 | 21 | class Uploader(object): 22 | 23 | index_filename = "index.html" 24 | 25 | metadata_filename = 'metadata.json' 26 | 27 | def __init__(self, username, secret, provider_name, region, 28 | update_index=True, max_workers=4, 29 | delete_previous_dev_packages=True): 30 | self.username = username 31 | self.secret = secret 32 | self.provider_name = provider_name 33 | self.region = region 34 | self.max_workers = max_workers 35 | self.update_index = update_index 36 | self.delete_previous_dev_packages = delete_previous_dev_packages 37 | 38 | def make_driver(self): 39 | provider = getattr(Provider, self.provider_name) 40 | return get_driver(provider)(self.username, self.secret, 41 | region=self.region) 42 | 43 | def upload(self, local_folder, container, retry_on_error=3): 44 | """Wrapper to make upload more robust to random server errors""" 45 | try: 46 | return self._try_upload_once(local_folder, container) 47 | except InvalidCredsError: 48 | raise 49 | except Exception as e: 50 | if retry_on_error <= 0: 51 | raise 52 | # can be caused by any network or server side failure 53 | print(e) 54 | print_exc() 55 | sleep(1) 56 | self.upload(local_folder, container, 57 | retry_on_error=retry_on_error - 1) 58 | 59 | def _try_upload_once(self, local_folder, container_name): 60 | # check that the container is reachable 61 | driver = self.make_driver() 62 | try: 63 | container = driver.get_container(container_name) 64 | except ContainerDoesNotExistError: 65 | container = driver.create_container(container_name) 66 | 67 | filepaths, local_metadata = self._scan_local_files(local_folder) 68 | 69 | self._upload_files(filepaths, container_name) 70 | recently_uploaded = [os.path.basename(path) for path in filepaths] 71 | 72 | # Refresh metadata 73 | metadata = self._update_metadata_file( 74 | driver, container, local_metadata, 75 | recently_uploaded=recently_uploaded) 76 | if self.update_index: 77 | self._update_index(driver, container, metadata, 78 | recently_uploaded=recently_uploaded) 79 | 80 | def _upload_files(self, filepaths, container_name): 81 | print("About to upload %d files" % len(filepaths)) 82 | with ThreadPoolExecutor(max_workers=self.max_workers) as e: 83 | # Dispatch the file uploads in threads 84 | futures = [e.submit(self.upload_file, filepath_, container_name) 85 | for filepath_ in filepaths] 86 | for future in as_completed(futures): 87 | # We don't expect any returned results be we want to raise 88 | # an exception early in case if problem 89 | future.result() 90 | 91 | def _upload_bytes(self, payload, container, object_name): 92 | tempdir = tempfile.mkdtemp() 93 | tempfilepath = os.path.join( 94 | tempdir, '_tmp_wheelhouse_uploader_upload_' + object_name) 95 | try: 96 | with open(tempfilepath, 'wb') as f: 97 | f.write(payload) 98 | container.upload_object(file_path=tempfilepath, 99 | object_name=object_name) 100 | finally: 101 | try: 102 | shutil.rmtree(tempdir) 103 | except OSError: 104 | # Ignore permission errors on temporary directories 105 | print("WARNING: failed to delete", tempdir) 106 | 107 | def _download_bytes(self, container, object_name, missing=None): 108 | tempdir = tempfile.mkdtemp() 109 | tempfilepath = os.path.join( 110 | tempdir, '_tmp_wheelhouse_uploader_download_' + object_name) 111 | try: 112 | container.get_object(object_name).download(tempfilepath) 113 | with open(tempfilepath, 'rb') as f: 114 | return f.read() 115 | except ObjectDoesNotExistError: 116 | return missing 117 | finally: 118 | try: 119 | shutil.rmtree(tempdir) 120 | except OSError: 121 | # Ignore permission errors on temporary directories 122 | print("WARNING: faile to delete", tempdir) 123 | 124 | def _update_metadata_file(self, driver, container, local_metadata, 125 | recently_uploaded=()): 126 | data = self._download_bytes(container, self.metadata_filename) 127 | if data is None: 128 | metadata = {} 129 | else: 130 | metadata = json.loads(data.decode('utf-8')) 131 | metadata.update(local_metadata) 132 | 133 | # Garbage collect metadata for deleted files 134 | filenames = set(self._get_package_filenames(driver, container)) 135 | 136 | # Make sure that the recently uploaded files are included: the 137 | # eventual consistency semantics of the container listing might hidden 138 | # them temporarily. 139 | filenames.union(recently_uploaded) 140 | 141 | keys = list(sorted(metadata.keys())) 142 | for key in keys: 143 | if key not in filenames: 144 | del metadata[key] 145 | 146 | print('Uploading %s with %d entries' 147 | % (self.metadata_filename, len(metadata))) 148 | 149 | self._upload_bytes(json.dumps(metadata).encode('utf-8'), 150 | container, self.metadata_filename) 151 | return metadata 152 | 153 | def _get_package_filenames(self, driver, container, 154 | ignore_list=('.json', '.html')): 155 | package_filenames = [] 156 | objects = driver.list_container_objects(container) 157 | for object_ in objects: 158 | if not object_.name.endswith(ignore_list): 159 | package_filenames.append(object_.name) 160 | return package_filenames 161 | 162 | def _update_index(self, driver, container, metadata, recently_uploaded=()): 163 | # TODO use a mako template instead 164 | package_filenames = self._get_package_filenames(driver, container) 165 | 166 | # Make sure that the recently uploaded files are included: the 167 | # eventual consistency semantics of the container listing might hidden 168 | # them temporarily. 169 | package_filenames = set(package_filenames).union(recently_uploaded) 170 | package_filenames = sorted(package_filenames) 171 | 172 | print('Updating index.html with %d links' % len(package_filenames)) 173 | payload = StringIO() 174 | payload.write(u'

\n') 175 | for filename in package_filenames: 176 | object_metadata = metadata.get(filename, {}) 177 | digest = object_metadata.get('sha256') 178 | if digest is not None: 179 | payload.write( 180 | u'

  • %s
  • \n' 181 | % (filename, digest, filename)) 182 | else: 183 | payload.write(u'
  • %s
  • \n' 184 | % (filename, filename)) 185 | payload.write(u'

    \n') 186 | payload.seek(0) 187 | self._upload_bytes(payload.getvalue().encode('utf-8'), 188 | container, self.index_filename) 189 | 190 | def _scan_local_files(self, local_folder): 191 | """Collect file informations on the folder to upload. 192 | 193 | Dev wheel files will automatically get renamed to add an upload time 194 | stamp in the process. 195 | 196 | """ 197 | filepaths = [] 198 | local_metadata = {} 199 | 200 | for filename in sorted(os.listdir(local_folder)): 201 | if filename.startswith('.'): 202 | continue 203 | filepath = os.path.join(local_folder, filename) 204 | if os.path.isdir(filepath): 205 | continue 206 | 207 | try: 208 | should_rename, new_filename = stamp_dev_wheel(filename) 209 | new_filepath = os.path.join(local_folder, new_filename) 210 | if should_rename: 211 | print("Renaming dev wheel to add an upload timestamp: %s" 212 | % new_filename) 213 | os.rename(filepath, new_filepath) 214 | filepath, filename = new_filepath, new_filename 215 | except ValueError as e: 216 | print("Skipping %s: %s" % (filename, e)) 217 | continue 218 | # TODO: use a threadpool 219 | filepaths.append(filepath) 220 | with open(filepath, 'rb') as f: 221 | content = f.read() 222 | local_metadata[filename] = dict( 223 | sha256=sha256(content).hexdigest(), 224 | size=len(content), 225 | ) 226 | return filepaths, local_metadata 227 | 228 | def upload_file(self, filepath, container_name): 229 | # drivers are not thread safe, hence we create one per upload task 230 | # to make it possible to use a thread pool executor 231 | driver = self.make_driver() 232 | filename = os.path.basename(filepath) 233 | container = driver.get_container(container_name) 234 | 235 | size_mb = os.stat(filepath).st_size / 1e6 236 | print("Uploading %s [%0.3f MB]" % (filepath, size_mb)) 237 | driver.upload_object(file_path=filepath, 238 | container=container, 239 | object_name=filename) 240 | 241 | if self.delete_previous_dev_packages: 242 | existing_filenames = self._get_package_filenames(driver, container) 243 | if filename not in existing_filenames: 244 | # Eventual consistency listing might cause the just uploaded 245 | # file not be missing. Ensure this is never the case. 246 | existing_filenames.append(filename) 247 | previous_dev_filenames = matching_dev_filenames(filename, 248 | existing_filenames) 249 | 250 | # Only keep the last 5 dev builds 251 | for filename in previous_dev_filenames[5:]: 252 | print("Deleting old dev package %s" % filename) 253 | try: 254 | obj = container.get_object(filename) 255 | driver.delete_object(obj) 256 | except ObjectDoesNotExistError: 257 | pass 258 | 259 | def get_container_cdn_url(self, container_name): 260 | driver = self.make_driver() 261 | container = driver.get_container(container_name) 262 | if hasattr(driver, 'ex_enable_static_website'): 263 | driver.ex_enable_static_website(container, 264 | index_file=self.index_filename) 265 | driver.enable_container_cdn(container) 266 | return driver.get_container_cdn_url(container) 267 | -------------------------------------------------------------------------------- /wheelhouse_uploader/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import re 3 | from datetime import datetime 4 | from pkg_resources import safe_version, parse_version 5 | from packaging.version import VERSION_PATTERN 6 | 7 | # PEP440 version spec 8 | _version_regex = re.compile('^' + VERSION_PATTERN + '$', 9 | re.VERBOSE | re.IGNORECASE) 10 | 11 | _stamp_regex = re.compile(r'(\d{14})(_\w+)?') 12 | 13 | 14 | def _wheel_escape(component): 15 | return re.sub("[^\w\d.]+", "_", component, re.UNICODE) 16 | 17 | 18 | def parse_filename(filename, project_name=None, return_tags=False): 19 | """Find artifact metadata based on filename 20 | 21 | If a an expected project name is provided, consistency is checked: a 22 | ValueError is raised in case of violation. 23 | 24 | This metadata is necessary to be able to reupload previously generated 25 | to PyPI. 26 | 27 | >>> parse_filename('project-1.0-py2.py3-none-any.whl') 28 | ... # doctest: +ELLIPSIS 29 | ('project', '1.0', ..., 'bdist_wheel') 30 | 31 | >>> parse_filename('scikit_learn-0.15.2-cp34-none-win32.whl') 32 | ('scikit_learn', '0.15.2', '3.4', 'bdist_wheel') 33 | 34 | >>> parse_filename('scikit-learn-0.15.1rc.win-amd64-py2.7.exe', 35 | ... project_name='scikit-learn') 36 | ('scikit_learn', '0.15.1rc0', '2.7', 'bdist_wininst') 37 | 38 | >>> parse_filename('scikit_learn-0.15.2.dev-cp34-none-win32.whl', 39 | ... project_name='scikit-learn') 40 | ('scikit_learn', '0.15.2.dev0', '3.4', 'bdist_wheel') 41 | 42 | >>> parse_filename('scikit_learn-0.15.dev0+local3-cp27-none-win32.whl') 43 | ('scikit_learn', '0.15.dev0+local3', '2.7', 'bdist_wheel') 44 | 45 | >>> tags = parse_filename('project-0.1-cp27-none-win32.whl', 46 | ... return_tags=True)[-1] 47 | >>> tags['python'] 48 | 'cp27' 49 | >>> tags['abi'] 50 | 'none' 51 | >>> tags['platform'] 52 | 'win32' 53 | 54 | >>> parse_filename('scikit-learn-0.15.2.win32-py2.7.exe') 55 | ('scikit_learn', '0.15.2', '2.7', 'bdist_wininst') 56 | 57 | >>> parse_filename('scikit-learn-0.15.1.tar.gz') 58 | ('scikit_learn', '0.15.1', '', 'sdist') 59 | 60 | >>> parse_filename('scikit-learn-0.15.1.zip') 61 | ('scikit_learn', '0.15.1', '', 'sdist') 62 | 63 | >>> parse_filename('scikit-learn-0.15.1.zip', return_tags=True) 64 | ('scikit_learn', '0.15.1', '', 'sdist', {}) 65 | 66 | >>> parse_filename( 67 | ... 'scikit_learn-0.15.1-cp34-cp34m-macosx_10_6_intel' 68 | ... '.macosx_10_9_intel.macosx_10_9_x86_64.whl') 69 | ('scikit_learn', '0.15.1', '3.4', 'bdist_wheel') 70 | 71 | >>> parse_filename('sklearn_template-0.0.3-py2-none-any.whl') 72 | ('sklearn_template', '0.0.3', '2', 'bdist_wheel') 73 | 74 | >>> parse_filename('sklearn_template-0.0.3-py3-none-any.whl') 75 | ('sklearn_template', '0.0.3', '3', 'bdist_wheel') 76 | 77 | >>> parse_filename('sklearn-template-0.0.3.win32.exe') # doctest: +ELLIPSIS 78 | ('sklearn_template', '0.0.3', ..., 'bdist_wininst') 79 | 80 | >>> parse_filename('sklearn-template-0.0.3.win-amd64.exe') 81 | ... # doctest: +ELLIPSIS 82 | ('sklearn_template', '0.0.3', ..., 'bdist_wininst') 83 | """ 84 | if filename.endswith('.whl'): 85 | return _parse_wheel_filename(filename[:-len('.whl')], 86 | project_name=project_name, 87 | return_tags=return_tags) 88 | elif filename.endswith('.exe'): 89 | return _parse_exe_filename(filename[:-len('.exe')], 90 | project_name=project_name, 91 | return_tags=return_tags) 92 | elif filename.endswith('.zip'): 93 | return _parse_source_filename(filename[:-len('.zip')], 94 | project_name=project_name, 95 | return_tags=return_tags) 96 | elif filename.endswith('.tar.gz'): 97 | return _parse_source_filename(filename[:-len('.tar.gz')], 98 | project_name=project_name, 99 | return_tags=return_tags) 100 | else: 101 | raise ValueError('Invalid filename "%s", unrecognized extension' 102 | % filename) 103 | 104 | 105 | def _parse_wheel_filename(basename, project_name=None, return_tags=False): 106 | components = basename.split('-') 107 | distname = components[0] 108 | if (project_name is not None and 109 | distname != _wheel_escape(project_name)): 110 | raise ValueError('File %s.whl does not match project name %s' 111 | % (basename, project_name)) 112 | 113 | if len(components) < 3 or not len(components[2]) >= 3: 114 | raise ValueError('Invalid wheel filename %s.whl' % basename) 115 | version = components[1] 116 | pytag = components[2] 117 | abitag = components[3] 118 | platformtag = components[4] 119 | 120 | if pytag == 'py2.py3': 121 | # special handling of the universal Python version tag: 122 | pyversion = '.'.join(str(x) for x in sys.version_info[:2]) 123 | elif pytag[:2] == 'py' and len(pytag) == 3: 124 | pyversion = '%s' % pytag[2] 125 | elif pytag[:2] in ['pp', 'py'] and len(pytag) == 4: 126 | pyversion = '%s.%s' % (pytag[2], pytag[3]) 127 | elif pytag[:2] == 'cp': 128 | pyversion = '%s.%s' % (pytag[2], pytag[3]) 129 | else: 130 | raise ValueError('Invalid or unsupported Python version tag in ' 131 | 'filename %s.whl' % basename) 132 | if return_tags: 133 | tags = { 134 | 'python': pytag, 135 | 'abi': abitag, 136 | 'platform': platformtag, 137 | } 138 | return (distname, safe_version(version), pyversion, 'bdist_wheel', 139 | tags) 140 | else: 141 | return (distname, safe_version(version), pyversion, 'bdist_wheel') 142 | 143 | 144 | def _parse_exe_filename(basename, project_name=None, return_tags=True): 145 | remainder, pythontag = basename.rsplit('-', 1) 146 | if not pythontag.startswith('py'): 147 | # There was no python tag with this file, therefore it must be 148 | # python version independent 149 | pythontag = 'py' + '.'.join(str(x) for x in sys.version_info[:2]) 150 | remainder = basename 151 | name_and_version, platform = remainder.rsplit('.', 1) 152 | distname, version = name_and_version.rsplit('-', 1) 153 | distname = _wheel_escape(distname) 154 | if project_name is not None and distname != _wheel_escape(project_name): 155 | raise ValueError('File %s.exe does not match project name %s' 156 | % (basename, project_name)) 157 | pyversion = pythontag[2:] 158 | if return_tags: 159 | tags = { 160 | 'python': pythontag.replace('.', ''), 161 | 'platform': _wheel_escape(platform), 162 | } 163 | return (distname, safe_version(version), pyversion, 'bdist_wininst', 164 | tags) 165 | return (distname, safe_version(version), pyversion, 'bdist_wininst') 166 | 167 | 168 | def _parse_source_filename(basename, project_name=None, return_tags=True): 169 | distname, version = basename.rsplit('-', 1) 170 | distname = _wheel_escape(distname) 171 | if project_name is not None and distname != _wheel_escape(project_name): 172 | raise ValueError('File %s does not match expected project name %s' 173 | % (basename, project_name)) 174 | if return_tags: 175 | return (distname, safe_version(version), '', 'sdist', {}) 176 | return (distname, safe_version(version), '', 'sdist') 177 | 178 | 179 | def is_dev(version): 180 | """Look for dev flag in PEP440 version number 181 | 182 | >>> is_dev('0.15.dev0+local3') 183 | True 184 | >>> is_dev('0.15.dev+local3') 185 | True 186 | >>> is_dev('0.15+local3') 187 | False 188 | 189 | """ 190 | # ignore the local segment of PEP400 version strings 191 | m = _version_regex.match(version) 192 | return m is not None and m.groupdict().get('dev') is not None 193 | 194 | 195 | def matching_dev_filenames(reference_filename, existing_filenames): 196 | """Filter filenames for matching dev packages. 197 | 198 | Return filenames for dev packages with matching package names, package 199 | type, python version and platform information. 200 | 201 | Sort them by version number (higer versions first). 202 | 203 | >>> matching_dev_filenames( 204 | ... "package-1.0.dev0+001_local1-cp34-none-win32.whl", 205 | ... [ 206 | ... "package-1.0.dev0+000_local1-cp34-none-win32.whl", 207 | ... "package-1.1.dev+local1-cp34-none-win32.whl", 208 | ... "package-1.0.dev0+001_local1-cp34-none-win32.whl", 209 | ... "package-0.9-cp34-none-win32.whl", 210 | ... "package-1.0.dev+local1-cp34-none-win_amd64.whl", 211 | ... "other_package-1.0.dev+local0-cp34-none-win32.whl", 212 | ... "package-1.0.dev+local0-cp33-none-win32.whl", 213 | ... "package-1.0.dev+local1-cp34-none-win32.whl", 214 | ... ]) # doctest: +NORMALIZE_WHITESPACE 215 | ['package-1.1.dev+local1-cp34-none-win32.whl', 216 | 'package-1.0.dev0+001_local1-cp34-none-win32.whl', 217 | 'package-1.0.dev0+000_local1-cp34-none-win32.whl', 218 | 'package-1.0.dev+local1-cp34-none-win32.whl'] 219 | 220 | If the reference filename is not a dev version, an empty list is returned. 221 | 222 | >>> matching_dev_filenames("package-1.0+local1-cp34-none-win32.whl", [ 223 | ... "package-1.0.dev+local1-cp34-none-win32.whl", 224 | ... "package-0.9+local1-cp34-none-win32.whl", 225 | ... ]) 226 | [] 227 | 228 | >>> matching_dev_filenames("package-1.0.invalid", [ 229 | ... "package-1.0.dev+local1-cp34-none-win32.whl", 230 | ... "package-0.9+local1-cp34-none-win32.whl", 231 | ... ]) 232 | [] 233 | 234 | """ 235 | try: 236 | distname, version, _, disttype, tags = parse_filename( 237 | reference_filename, return_tags=True) 238 | except ValueError: 239 | # Invalid filemame: no dev match 240 | return [] 241 | 242 | if not is_dev(version): 243 | return [] 244 | 245 | reference_key = (distname, disttype, tags) 246 | matching = [] 247 | for filename in existing_filenames: 248 | try: 249 | distname, version, _, disttype, tags = parse_filename( 250 | filename, return_tags=True) 251 | except ValueError: 252 | # Invalid filemame: no dev match 253 | continue 254 | if not is_dev(version): 255 | continue 256 | candidate_key = (distname, disttype, tags) 257 | if reference_key == candidate_key: 258 | matching.append((version, filename)) 259 | matching.sort(key=lambda x: parse_version(x[0]), reverse=True) 260 | return [filename for _, filename in matching] 261 | 262 | 263 | def has_stamp(version): 264 | """Check that the local segment looks like a timestamp 265 | 266 | >>> has_stamp('0.1.dev0+20151214030042') 267 | True 268 | >>> has_stamp('0.1.dev0+20151214030042_deadbeef') 269 | True 270 | >>> has_stamp('0.1.dev0+deadbeef') 271 | False 272 | >>> has_stamp('0.1.dev0') 273 | False 274 | 275 | """ 276 | v = parse_version(version) 277 | if v.local is None: 278 | return False 279 | return _stamp_regex.match(v.local) is not None 280 | 281 | 282 | def local_stamp(version): 283 | """Prefix the local segment with a UTC timestamp 284 | 285 | The goal is to make sure that the lexical order of the dev versions 286 | is matching the CI build ordering. 287 | 288 | >>> 'deadbeef' < 'cafebabe' 289 | False 290 | 291 | >>> v1 = local_stamp('0.1.dev0+deadbeef') 292 | >>> v1 # doctest: +ELLIPSIS 293 | '0.1.dev0+..._deadbeef' 294 | 295 | >>> import time 296 | >>> time.sleep(1) # local_stamp has a second-level resolution 297 | >>> v2 = local_stamp('0.1.dev0+cafebabe') 298 | >>> v2 # doctest: +ELLIPSIS 299 | '0.1.dev0+..._cafebabe' 300 | >>> parse_version(v1) < parse_version(v2) 301 | True 302 | 303 | This also works even if the original version does not have a local 304 | segment: 305 | 306 | >>> v3 = local_stamp('0.1.dev0') 307 | >>> parse_version(v1) < parse_version(v3) 308 | True 309 | 310 | """ 311 | v = parse_version(version) 312 | timestamp = datetime.utcnow().strftime("%Y%m%d%H%M%S") 313 | if v.local is not None: 314 | return "%s+%s_%s" % (v.public, timestamp, v.local) 315 | else: 316 | return "%s+%s" % (v.public, timestamp) 317 | 318 | 319 | def stamp_dev_wheel(filename): 320 | """Rename a filename to add a timestamp only if this is a dev package 321 | 322 | >>> stamp_dev_wheel('proj-0.1.dev0-py2.py3-none-any.whl') 323 | ... # doctest: +ELLIPSIS 324 | (True, 'proj-0.1.dev0+...-py2.py3-none-any.whl') 325 | 326 | Do no stamp release packages, only dev packages: 327 | 328 | >>> stamp_dev_wheel('proj-0.1-py2.py3-none-any.whl') 329 | (False, 'proj-0.1-py2.py3-none-any.whl') 330 | 331 | Do not restamp a package that has already been stamped: 332 | 333 | >>> stamp_dev_wheel('proj-0.1.dev0+20151214030042-py2.py3-none-any.whl') 334 | (False, 'proj-0.1.dev0+20151214030042-py2.py3-none-any.whl') 335 | 336 | Non-dev non-wheel files should be left unaffected: 337 | 338 | >>> stamp_dev_wheel('scikit-learn-0.15.1rc.win-amd64-py2.7.exe') 339 | (False, 'scikit-learn-0.15.1rc.win-amd64-py2.7.exe') 340 | 341 | """ 342 | distname, version, _, disttype, tags = parse_filename( 343 | filename, return_tags=True) 344 | if not is_dev(version): 345 | # Do no stamp release packages, only dev packages 346 | return False, filename 347 | 348 | if disttype != 'bdist_wheel': 349 | raise ValueError("%s only dev wheel file can be stamped for upload" 350 | % filename) 351 | 352 | if has_stamp(version): 353 | # Package has already been stamped, do nothing 354 | return False, filename 355 | else: 356 | version = local_stamp(version) 357 | return True, "%s-%s-%s-%s-%s.whl" % (distname, version, tags['python'], 358 | tags['abi'], tags['platform']) 359 | --------------------------------------------------------------------------------