├── .gitignore
├── .travis.yml
├── CHANGELOG.md
├── LICENSE.txt
├── Makefile
├── README.rst
├── dev-requirements.txt
├── example.pypi-private.cfg
├── pypiprivate
├── __init__.py
├── azure.py
├── cli.py
├── config.py
├── publish.py
└── storage.py
├── setup.py
├── sonar-project.properties
├── tests
├── test_publish.py
└── test_storage.py
└── tox.ini
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | __pycache__
3 | *~
4 | *.egg-info/
5 | build/
6 | dist/
7 | tmp/
8 | .env/
9 | .cache/
10 | .coverage
11 | coverage.xml
12 | htmlcov/
13 | .pytest_cache/
14 | .tox/
15 |
16 | # sonarqube
17 | .reports/
18 | .coverage
19 | .scannerwork/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # travis-ci config for pypiprivate
3 |
4 | language: python
5 |
6 | python:
7 | - "2.7"
8 | - "3.4"
9 | - "3.6"
10 |
11 | install:
12 | - if [ "$TRAVIS_PYTHON_VERSION" == "2.7" ]; then pip install mock; fi
13 | - pip install pytest
14 | - pip install .
15 |
16 | script: py.test -v
17 |
18 | sudo: false
19 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | 0.5.0
5 | -----
6 |
7 | * DigitalOcean suport (PR #4)
8 |
9 | * A fix for hyphenated package names (PR #5)
10 |
11 | * Support for AWS session tokens (PR #6)
12 |
13 | * Support for default boto3 configuration methods (PR #7) in a
14 | backward compatible manner.
15 |
16 | * Azure BlobStorage support (PR #9)
17 |
18 |
19 | 0.4.0
20 | -----
21 |
22 | * Fixed importing of configparser with Python 3 (PR #1)
23 |
24 | * Fixed a bug related to uploading tar.gz files to s3 (PR #2)
25 |
26 | * Added a CLI option to display the installed version
27 |
28 | * Fixed project name in upload path as per PEP-503 (Issue #3)
29 |
30 | * The tool is now fully compatible with PEP-440 which ensures that the
31 | artifacts for the given version are correctly identified and
32 | uploaded. This changed requires `setuptools` to be added as a
33 | project dependency.
34 |
35 |
36 | 0.3.2
37 | -----
38 |
39 | * Fixed local file system backend
40 |
41 | * Added travis config file
42 |
43 |
44 | 0.3.1
45 | -----
46 |
47 | * Fixed package URL in setup.py to point to the github repo. This
48 | ensures the Homepage link on PyPI points to the github repo.
49 |
50 | * Minor fixes in the README
51 |
52 |
53 | 0.3.0
54 | -----
55 |
56 | First public version released
57 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Helpshift
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for pypiprivate
2 |
3 | .PHONY: deps test
4 |
5 | ROOT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
6 |
7 | # If the env. var VIRTUAL_ENV is not set, a new virtualenv will be
8 | # created in the ./.env directory and will be setup with all deps for
9 | # running the tests.
10 | ifndef VIRTUAL_ENV
11 | VIRTUAL_ENV ?= .env
12 | PRE_TEST := deps
13 | PRE_DEPS := .env
14 | else
15 | PRE_TEST :=
16 | PRE_DEPS :=
17 | endif
18 |
19 |
20 | .env:
21 | virtualenv -p python3 .env
22 | $(VIRTUAL_ENV)/bin/pip install -U pip setuptools
23 |
24 | bandit:
25 | $(VIRTUAL_ENV)/bin/pip install bandit
26 | $(VIRTUAL_ENV)/bin/bandit --verbose --ignore-nosec --recursive -r pythia --exclude venv -o bandit_report.json -f json ||true
27 |
28 | check:
29 | $(VIRTUAL_ENV)/bin/pip install dependency-check
30 | $(VIRTUAL_ENV)/bin/dependency-check --disableAssembly -s . --project "$(shell $(VIRTUAL_ENV)/bin/python setup.py --name)" --exclude ".git/**" --exclude ".venv/**" --exclude "**/__pycache__/**" --exclude ".tox/**" --format "ALL"
31 |
32 | clean:
33 | find . -name '*.pyc' -delete
34 | $(VIRTUAL_ENV)/bin/coverage erase
35 | rm -rf .reports
36 | rm -rf .coverage coverage.xml pylint.txt dependency-check-report.*
37 |
38 | coverage: clean deps
39 | $(VIRTUAL_ENV)/bin/pip install coverage
40 | $(VIRTUAL_ENV)/bin/coverage run -m pytest
41 | $(VIRTUAL_ENV)/bin/coverage xml
42 |
43 | deps: $(PRE_DEPS)
44 | $(VIRTUAL_ENV)/bin/pip install -e .
45 | $(VIRTUAL_ENV)/bin/pip install -r dev-requirements.txt
46 |
47 | pylint:
48 | $(VIRTUAL_ENV)/bin/pip install pylint
49 | $(VIRTUAL_ENV)/bin/pylint --exit-zero cockpit/ tests/ -r n --msg-template="{path}:{line}:[{msg_id}({symbol}), {obj}] {msg}" | tee pylint.txt
50 |
51 | safety: clean deps
52 | $(VIRTUAL_ENV)/bin/pip install safety
53 | $(VIRTUAL_ENV)/bin/safety check
54 | $(VIRTUAL_ENV)/bin/safety scan
55 |
56 | sonar: coverage check bandit pylint
57 |
58 | test: $(PRE_TEST)
59 | $(VIRTUAL_ENV)/bin/pytest
60 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | pypiprivate
2 | ===========
3 |
4 | ``pypiprivate`` is a command line tool for hosting a private
5 | PyPI_-like package index or in other words, a `manual python
6 | repository
7 | `_ backed
8 | by a file based storage.
9 |
10 | It's implemented in a way that the storage backends are pluggable. At
11 | present, only `AWS S3`_ and local file system are supported but more
12 | implementations can be added in future.
13 |
14 | The backend can be protected behind a HTTP reverse proxy (eg. Nginx_)
15 | to allow secure private access to the packages.
16 |
17 |
18 | How it works?
19 | -------------
20 |
21 | Update: We have published a blog post that explains the usage,
22 | approach and rationale in detail - `Private Python Package Index with
23 | Zero Hassle`_.
24 |
25 | At present ``pypiprivate`` comes with only one command to publish a
26 | package (more utilities for package search and discoverability are
27 | coming soon).
28 |
29 | A publish operation involves,
30 |
31 | 1. Copying all the available package artifacts for a specific version
32 | under the ``./dist`` directory to the storage backend
33 |
34 | 2. Creating the index on the same storage backend
35 |
36 | The file structure created on the backend conforms to the "Simple
37 | Repository API" specification defined in `PEP 503`_.
38 |
39 | The files can now be served securely by a webserver eg. by setting up
40 | a Nginx reverse proxy.
41 |
42 | It's important to note that although the name of the project is
43 | ``pypiprivate``, **it's upto you to ensure that the access to both,
44 | the storage and the index is really private**. If you are using S3 and
45 | Nginx, for example, then
46 |
47 | * package authors/owners will need read-write S3 creds to publish
48 | packages
49 | * nginx will authenticate with S3 using read-only S3 creds and protect
50 | the files via HTTP Basic authentication
51 | * package users will need HTTP Auth creds to install the packages
52 | using pip
53 |
54 |
55 | Installation
56 | ------------
57 |
58 | ``pypi-private`` can be installed using pip_ as follows,
59 |
60 | .. code-block:: bash
61 |
62 | $ pip install pypiprivate
63 |
64 | This will install pypiprivate with the additional dependency of
65 | ``boto3`` for AWS S3 (compatible) backend.
66 |
67 | In last master (to be released), Azure backend is also supported. If
68 | you wish to use that then for now you'll need to additionally install the
69 | ``azure-storage-blob`` package
70 |
71 | .. code-block:: bash
72 |
73 | $ pip install azure-storage-blob==12.2.0
74 |
75 | After installation, a script ``pypi-private`` which will be available
76 | at ``PATH``.
77 |
78 | You may choose to install it in a virtualenv_, but it's recommended to
79 | install it globally for all users (using ``sudo``) so that it's less
80 | confusing to build and publish projects that need to use their own
81 | virtualenvs.
82 |
83 |
84 | Configuration
85 | -------------
86 |
87 | ``pypiprivate`` requires it's own config file, the default location
88 | for which is ``~/.pypi-private.cfg``. This repo contains the example
89 | config file ``example.pypi-private.cfg``, which can be simply copied
90 | to the home directory and renamed to ``.pypi-private.cfg``.
91 |
92 | The config file is **NOT** meant for specifying the auth
93 | credentials. Instead, they should be set as environment
94 | variables. This to ensure that creds are not stored in plain text.
95 |
96 | Which env vars are to be set depends on the backend. More
97 | documentation about it can be found in the example config file.
98 |
99 | AWS S3
100 | ~~~~~~
101 |
102 | For S3 there are 2 ways to specify the credentials
103 |
104 | 1. Setting ``PP_S3_*`` env vars explicitly
105 |
106 | - ``PP_S3_ACCESS_KEY``: required
107 | - ``PP_S3_SECRET_KEY``: required
108 | - ``PP_S3_SESSION_TOKEN``: optional
109 |
110 | 2. `Configuration methods supported by Boto3`_
111 |
112 | *Since version: to be released*
113 |
114 | This method is implicit but more convenient if you already use
115 | tools such as AWS-CLI_. It'd also allow you to use
116 | profiles. However, note that only credentials will be picked up for
117 | the configured profile. The ``region`` and ``endpoint`` (if
118 | required) need to explicitly configured in the
119 | ``~/.pypi-private.cfg`` file.
120 |
121 |
122 | AZURE
123 | ~~~~~
124 |
125 | *Since version: to be released*
126 |
127 | - ``PP_AZURE_CONN_STR``: (required) Connection string of the storage
128 | account
129 |
130 |
131 | Usage
132 | -----
133 |
134 | First create the builds,
135 |
136 | .. code-block:: bash
137 |
138 | $ python setup.py sdist bdist_wheel
139 |
140 | Then to publish the built artifacts run,
141 |
142 | .. code-block:: bash
143 |
144 | $ pypi-private -v publish
145 |
146 |
147 | For other options, run
148 |
149 | .. code-block:: bash
150 |
151 | $ pypi-private -h
152 |
153 |
154 | Fetching packages published using pypiprivate
155 | ---------------------------------------------
156 |
157 | Run pip with the ``--extra-index-url`` option,
158 |
159 | .. code-block:: bash
160 |
161 | $ pip install mypackage --extra-index-url=https://:@my.private.pypi.com/simple
162 |
163 | Or, add the ``extra-index-url`` to pip config file at
164 | ``~/.pip/pip.conf`` as follows ::
165 |
166 | [install]
167 | extra-index-url = https://:@my.private.pypi.com/simple
168 |
169 | And then simply run,
170 |
171 | .. code-block:: bash
172 |
173 | $ pip install mypackage
174 |
175 |
176 | License
177 | -------
178 |
179 | MIT (See `LICENSE <./LICENSE.txt>`_)
180 |
181 |
182 | .. _PyPI: https://pypi.org/
183 | .. _AWS S3: https://aws.amazon.com/s3/
184 | .. _Nginx: http://nginx.org/
185 | .. _pip: https://pypi.org/project/pip/
186 | .. _virtualenv: https://virtualenv.pypa.io/
187 | .. _PEP 503: https://www.python.org/dev/peps/pep-0503/
188 | .. _Private Python Package Index with Zero Hassle: https://medium.com/helpshift-engineering/private-python-package-index-with-zero-hassle-6164e3831208
189 | .. _AWS-CLI: https://docs.aws.amazon.com/cli/index.html
190 | .. _Configuration methods supported by Boto3: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
191 |
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-cov
3 |
--------------------------------------------------------------------------------
/example.pypi-private.cfg:
--------------------------------------------------------------------------------
1 | [storage]
2 | type = local-filesystem
3 | # Choices: azure, aws-s3, local-filesystem
4 | #
5 | # Based on the type, the following sections (of the same name as type)
6 | # will be applicable. You may exclude the other ones
7 |
8 | [local-filesystem]
9 | base_path = /path/to/privatepypi/simple
10 |
11 | [aws-s3]
12 | bucket = mybucket
13 | prefix = simple
14 | acl = private
15 | # Use DigitalOcean Spaces or another AWS S3 compatible storage layer by
16 | # specifying the other provider's endpoint URL and service region. For
17 | # DigitalOcean Spaces, specify the region (nyc3 in this example) and it will be
18 | # interpolated into the endpoint..
19 | #
20 | #region = nyc3
21 | #endpoint = https://%(region)s.digitaloceanspaces.com
22 | #
23 | # Creds for authentication:
24 | #
25 | # For s3 auth, following creds may be explicitly set
26 | # - PP_S3_ACCESS_KEY (required)
27 | # - PP_S3_SECRET_KEY (required)
28 | # - PP_S3_SESSION_TOKEN (optional)
29 | #
30 | # In case any of the above required env vars are not set, pypiprivate
31 | # will try to authenticate using boto3's default methods. More info:
32 | # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
33 |
34 | [azure]
35 | container = mycontainer
36 | prefix = simple
37 | # Creds for authentication
38 | #
39 | # Set the connection string for the storage account as environment var
40 | # PP_AZURE_CONN_STR
41 |
--------------------------------------------------------------------------------
/pypiprivate/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.5.0'
2 |
--------------------------------------------------------------------------------
/pypiprivate/azure.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 |
4 | from azure.storage.blob import BlobServiceClient, ContentSettings
5 |
6 | from pypiprivate.storage import Storage, guess_content_type
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | class AzureBlobClientMixin(object):
12 |
13 | def __init__(self, connection_string, container):
14 | super().__init__()
15 | self._connection_string = connection_string
16 | self._container = container
17 | self._blob_service_client = None
18 | self._container_client = None
19 |
20 | @property
21 | def container(self):
22 | return self._container
23 |
24 | @property
25 | def blob_service_client(self):
26 | if self._blob_service_client:
27 | return self._blob_service_client
28 | self._blob_service_client = BlobServiceClient.from_connection_string(self._connection_string)
29 | return self._blob_service_client
30 |
31 | @property
32 | def container_client(self):
33 | if self._container_client:
34 | return self._container_client
35 | self._container_client = self.get_container_client(self._container)
36 | return self._container_client
37 |
38 | def get_container_client(self, container_name):
39 | return self.blob_service_client.get_container_client(container_name)
40 |
41 |
42 | class AzureBlobStorage(Storage, AzureBlobClientMixin):
43 |
44 | def __init__(self, connection_string, container, prefix=None):
45 | super().__init__(connection_string, container)
46 | self.prefix = prefix
47 |
48 | @classmethod
49 | def from_config(cls, config):
50 | storage_config = config.storage_config
51 | container = storage_config['container']
52 | conn_str = config.env['PP_AZURE_CONN_STR']
53 | prefix = storage_config.get('prefix')
54 | return cls(conn_str, container, prefix=prefix)
55 |
56 | def join_path(self, *args):
57 | return '/'.join(args)
58 |
59 | def prefixed_path(self, path):
60 | parts = []
61 | if self.prefix:
62 | parts.append(self.prefix)
63 | if path != '.':
64 | parts.append(path)
65 | return self.join_path(*parts)
66 |
67 | def listdir(self, path):
68 | path = self.prefixed_path(path)
69 | if path != '' and not path.endswith('/'):
70 | prefix = '{0}/'.format(path)
71 | else:
72 | prefix = path
73 | logger.debug('Listing objects prefixed with: {0}'.format(prefix))
74 | blobs = self.container_client.list_blobs(name_starts_with=prefix)
75 | files = [b.name[len(prefix):] for b in blobs]
76 | dirs = list({os.path.dirname(f) for f in files})
77 | return files + dirs
78 |
79 | def path_exists(self, path):
80 | path = self.prefixed_path(path)
81 | logger.debug('Checking if key exists: {0}'.format(path))
82 | return bool(list(self.container_client.list_blobs(name_starts_with=path)))
83 |
84 | def put_contents(self, contents, dest, sync=False):
85 | dest_path = self.prefixed_path(dest)
86 | logger.debug('Writing content to azure: {0}'.format(dest_path))
87 | content_settings = ContentSettings(content_type=guess_content_type(dest))
88 | self.container_client.upload_blob(name=dest_path, data=contents.encode('utf-8'),
89 | overwrite=True, content_settings=content_settings)
90 |
91 | def put_file(self, src, dest, sync=False):
92 | dest_path = self.prefixed_path(dest)
93 | logger.debug('Writing content to azure: {0}'.format(dest_path))
94 | content_settings = ContentSettings(content_type=guess_content_type(dest))
95 | with open(src, "rb") as data:
96 | self.container_client.upload_blob(name=dest_path, data=data,
97 | overwrite=True, content_settings=content_settings)
98 |
99 |
--------------------------------------------------------------------------------
/pypiprivate/cli.py:
--------------------------------------------------------------------------------
1 | import os
2 | import argparse
3 | import logging
4 |
5 | from . import __version__
6 | from .config import Config
7 | from .storage import load_storage
8 | from .publish import publish_package
9 |
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | LOGGING_FORMAT = '[%(asctime)s] %(levelname)s %(name)s: %(message)s'
15 |
16 |
17 | def log_level(verbosity):
18 | if verbosity == 1:
19 | return logging.INFO
20 | if verbosity > 1:
21 | return logging.DEBUG
22 | return logging.WARN
23 |
24 |
25 | def cmd_publish(args):
26 | config = Config(args.conf_path, os.environ, args.env_interpolation)
27 | storage = load_storage(config)
28 | return publish_package(args.pkg_name,
29 | args.pkg_ver,
30 | storage,
31 | args.project_path,
32 | args.dist_dir)
33 |
34 |
35 | def main():
36 | parser = argparse.ArgumentParser(description=(
37 | 'Script for publishing python package on private pypi'
38 | ))
39 | parser.add_argument('--version', action='version',
40 | version=__version__)
41 | parser.add_argument('-p', '--project-path', default='.',
42 | help='Path to project [Default: current dir]')
43 | parser.add_argument('-c', '--conf-path', default='~/.pypi-private.cfg',
44 | help='Path to config [Default: ~/.pypi-private.cfg]')
45 | parser.add_argument('-i', '--env-interpolation', action='store_true',
46 | help='Make env variables accessible in config file.')
47 | parser.add_argument('-v', '--verbose', default=1, action='count')
48 |
49 | subparsers = parser.add_subparsers(help='subcommand help')
50 |
51 | publish = subparsers.add_parser('publish', help='Publish package')
52 | publish.add_argument('-d', '--dist-dir', default='dist',
53 | help='Directory to look for built distributions')
54 | publish.add_argument('pkg_name')
55 | publish.add_argument('pkg_ver')
56 | publish.set_defaults(func=cmd_publish)
57 |
58 | args = parser.parse_args()
59 |
60 | logging.basicConfig(format=LOGGING_FORMAT)
61 |
62 | logging.getLogger('pypiprivate').setLevel(log_level(args.verbose))
63 |
64 | try:
65 | args.func(args)
66 | except AttributeError:
67 | parser.print_help()
68 | return 1
69 |
70 | return 0
71 |
--------------------------------------------------------------------------------
/pypiprivate/config.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | try:
4 | from ConfigParser import SafeConfigParser
5 | except ImportError:
6 | from configparser import SafeConfigParser
7 |
8 |
9 | class Config(object):
10 |
11 | def __init__(self, path, env, env_interpolation=False):
12 | self.path = os.path.expanduser(path)
13 | self.env = env
14 | if env_interpolation:
15 | self.c = SafeConfigParser(env)
16 | else:
17 | self.c = SafeConfigParser()
18 | with open(self.path) as f:
19 | self.c.readfp(f)
20 |
21 | @property
22 | def storage(self):
23 | return self.c.get('storage', 'type')
24 |
25 | @property
26 | def storage_config(self):
27 | return dict(self.c.items(self.storage))
28 |
--------------------------------------------------------------------------------
/pypiprivate/publish.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import logging
4 |
5 | from pkg_resources import packaging
6 | from jinja2 import Environment
7 |
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 | INDEX_HTML = 'index.html'
12 |
13 | class DistNotFound(Exception):
14 | pass
15 |
16 |
17 | def normalized_name(name):
18 | """Convert the project name to normalized form as per PEP-0503
19 |
20 | Refer: https://www.python.org/dev/peps/pep-0503/#id4
21 | """
22 | return re.sub(r"[-_.]+", "-", name).lower()
23 |
24 |
25 | def _filter_pkg_dists(dists, pkg_name, pkg_ver):
26 | # Wheels have different naming conventions: https://www.python.org/dev/peps/pep-0491/#escaping-and-unicode
27 | # We want to account for both sdist and wheel naming.
28 | wheel_name = re.sub(r"[^\w\d.]+", "_", pkg_name, re.UNICODE)
29 | pkg_name_candidates = (pkg_name, wheel_name)
30 | pkg_ver = re.escape(str(pkg_ver))
31 | name_re_alternation = '|'.join((re.escape(candidate) for candidate in pkg_name_candidates))
32 | regexp = re.compile(r'({0})-{1}[.-]'.format(name_re_alternation, pkg_ver))
33 | return filter(regexp.match, dists)
34 |
35 |
36 | def find_pkg_dists(project_path, dist_dir, pkg_name, pkg_ver):
37 | dist_dir = os.path.join(project_path, dist_dir)
38 | logger.info('Looking for package dists in {0}'.format(dist_dir))
39 | dists = _filter_pkg_dists(os.listdir(dist_dir), pkg_name, pkg_ver)
40 | dists = [{'pkg': pkg_name,
41 | 'normalized_name': normalized_name(pkg_name),
42 | 'artifact': f,
43 | 'path': os.path.join(dist_dir, f)}
44 | for f in dists]
45 | return dists
46 |
47 |
48 | def build_index(title, items, index_type='root'):
49 | tmpl = """
50 |
51 |
52 |
53 | {{title}}
54 |
55 |
56 | {%- if index_type != 'root' %}
57 | {{title}}
58 | {% endif -%}
59 | {% for item in items %}
60 | {{item}}
61 | {% endfor %}
62 |
63 |
64 | """
65 | env = Environment(autoescape=True)
66 | template = env.from_string(tmpl)
67 | return template.render(title=title, items=items,
68 | index_type=index_type)
69 |
70 |
71 | def is_dist_published(storage, dist):
72 | path = storage.join_path(dist['normalized_name'], dist['artifact'])
73 | logger.info('Ensuring dist is not already published: {0}'.format(path))
74 | return storage.path_exists(path)
75 |
76 |
77 | def upload_dist(storage, dist):
78 | logger.info('Uploading dist: {0}'.format(dist['artifact']))
79 | dest = storage.join_path(dist['normalized_name'], dist['artifact'])
80 | storage.put_file(dist['path'], dest, sync=True)
81 |
82 |
83 | def update_pkg_index(storage, pkg_name):
84 | logger.info('Updating index for package: {0}'.format(pkg_name))
85 | dists = [d for d in storage.listdir(pkg_name) if d != INDEX_HTML]
86 | title = 'Links for {0}'.format(pkg_name)
87 | index = build_index(title, dists, 'pkg')
88 | index_path = storage.join_path(pkg_name, INDEX_HTML)
89 | storage.put_contents(index, index_path)
90 |
91 |
92 | def update_root_index(storage):
93 | logger.info('Updating repository index')
94 | pkgs = sorted([p for p in storage.listdir('.') if p != INDEX_HTML])
95 | title = 'Private Index'
96 | index = build_index(title, pkgs, 'root')
97 | index_path = storage.join_path(INDEX_HTML)
98 | storage.put_contents(index, index_path)
99 |
100 |
101 | def publish_package(name, version, storage, project_path, dist_dir):
102 | version = packaging.version.Version(version)
103 | dists = find_pkg_dists(project_path, dist_dir, name, version)
104 | if not dists:
105 | raise DistNotFound((
106 | 'No package distribution found in path {0}'
107 | ).format(dist_dir))
108 | rebuild_index = False
109 | for dist in dists:
110 | if not is_dist_published(storage, dist):
111 | logger.info('Trying to publish dist: {0}'.format(dist['artifact']))
112 | upload_dist(storage, dist)
113 | rebuild_index = True
114 | else:
115 | logger.debug((
116 | 'Dist already published: {0} [skipping]'
117 | ).format(dist['artifact']))
118 | if rebuild_index:
119 | logger.info('Updating index')
120 | update_pkg_index(storage, dist['normalized_name'])
121 | update_root_index(storage)
122 | else:
123 | logger.debug('No index update required as no new dists uploaded')
124 |
--------------------------------------------------------------------------------
/pypiprivate/storage.py:
--------------------------------------------------------------------------------
1 | import os
2 | import errno
3 | import shutil
4 | import mimetypes
5 | import logging
6 |
7 | import boto3
8 | from botocore.exceptions import ClientError
9 |
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def guess_content_type(path, default='application/octet-stream'):
15 | ctype = mimetypes.guess_type(path)[0] or default
16 | logger.debug('Guessed ctype of "{0}": "{1}"'.format(path, ctype))
17 | return ctype
18 |
19 |
20 | class StorageException(Exception):
21 | pass
22 |
23 |
24 | class PathNotFound(StorageException):
25 | pass
26 |
27 |
28 | class Storage(object):
29 |
30 | def join_path(self, *args):
31 | raise NotImplementedError
32 |
33 | def listdir(self, path):
34 | raise NotImplementedError
35 |
36 | def path_exists(self, path):
37 | raise NotImplementedError
38 |
39 | def put_contents(self, contents, dest, sync=False):
40 | raise NotImplementedError
41 |
42 | def put_file(self, src, dest, sync=False):
43 | raise NotImplementedError
44 |
45 |
46 | class LocalFileSystemStorage(Storage):
47 |
48 | def __init__(self, base_path):
49 | self.base_path = base_path
50 |
51 | @classmethod
52 | def from_config(cls, config):
53 | storage_config = config.storage_config
54 | return cls(storage_config['base_path'])
55 |
56 | def join_path(self, *args):
57 | return os.path.join(*args)
58 |
59 | def listdir(self, path):
60 | path = self.join_path(self.base_path, path)
61 | try:
62 | return os.listdir(path)
63 | except OSError as e:
64 | if e.errno == errno.ENOENT:
65 | raise PathNotFound('Path {0} not found'.format(path))
66 | raise e
67 |
68 | def path_exists(self, path):
69 | path = self.join_path(self.base_path, path)
70 | return os.path.exists(path)
71 |
72 | def ensure_dir(self, path):
73 | if not os.path.exists(path):
74 | os.makedirs(path)
75 |
76 | def put_contents(self, contents, dest, sync=False):
77 | dest_path = self.join_path(self.base_path, dest)
78 | self.ensure_dir(os.path.dirname(dest_path))
79 | with open(dest_path, 'w') as f:
80 | f.write(contents)
81 | # In LocalFileSystemStorage sync makes no sense
82 | return dest_path
83 |
84 | def put_file(self, src, dest, sync=False):
85 | dest_path = self.join_path(self.base_path, dest)
86 | self.ensure_dir(os.path.dirname(dest_path))
87 | shutil.copy(src, dest_path)
88 | return dest_path
89 |
90 | def __repr__(self):
91 | return (
92 | ''
93 | ).format(self.base_path)
94 |
95 |
96 | class AWSS3Storage(Storage):
97 |
98 | def __init__(self, bucket, acl, creds=None, prefix=None,
99 | endpoint=None, region=None):
100 | if creds:
101 | logger.info('S3 Auth: using explicitly passed credentials')
102 | access_key, secret_key, session_token = creds
103 | session = boto3.Session(aws_access_key_id=access_key,
104 | aws_secret_access_key=secret_key,
105 | aws_session_token=session_token)
106 | else:
107 | logger.info('S3 Auth: using default boto3 methods')
108 | session = boto3.Session()
109 | self.endpoint = endpoint
110 | self.region = region
111 | kwargs = dict()
112 | if endpoint is not None:
113 | kwargs['endpoint_url'] = endpoint
114 | if region is not None:
115 | kwargs['region_name'] = region
116 | self.s3 = s3 = session.resource('s3', **kwargs)
117 | self.bucket = s3.Bucket(bucket)
118 | self.prefix = prefix
119 | self.acl = acl
120 |
121 | @classmethod
122 | def from_config(cls, config):
123 | storage_config = config.storage_config
124 | env = config.env
125 | bucket = storage_config['bucket']
126 | prefix = storage_config.get('prefix')
127 | acl = storage_config.get('acl', 'private')
128 | endpoint = storage_config.get('endpoint', None)
129 | region = storage_config.get('region', None)
130 | # Following 2 are the required env vars for s3 auth. If any of
131 | # these are not set, we try using the default boto3 methods
132 | # (same as the ones that AWS CLI and other tools support)
133 | pp_cred_keys = ['PP_S3_ACCESS_KEY', 'PP_S3_SECRET_KEY']
134 | if all([(k in env) for k in pp_cred_keys]):
135 | logger.debug('PP_S3_* env vars found: using them for auth')
136 | creds = (env['PP_S3_ACCESS_KEY'],
137 | env['PP_S3_SECRET_KEY'],
138 | env.get('PP_S3_SESSION_TOKEN', None))
139 | else:
140 | logger.debug((
141 | 'PP_S3_* env vars not found: '
142 | 'Falling back to default methods supported by boto3'
143 | ))
144 | creds = None
145 | return cls(bucket, acl, creds=creds, prefix=prefix,
146 | endpoint=endpoint, region=region)
147 |
148 | def join_path(self, *args):
149 | return '/'.join(args)
150 |
151 | def prefixed_path(self, path):
152 | parts = []
153 | if self.prefix:
154 | parts.append(self.prefix)
155 | if path != '.':
156 | parts.append(path)
157 | return self.join_path(*parts)
158 |
159 | def listdir(self, path):
160 | path = self.prefixed_path(path)
161 | if path != '' and not path.endswith('/'):
162 | s3_prefix = '{0}/'.format(path)
163 | else:
164 | s3_prefix = path
165 | logger.debug('Listing objects prefixed with: {0}'.format(s3_prefix))
166 | client = self.s3.meta.client
167 | paginator = client.get_paginator('list_objects')
168 | response = paginator.paginate(Bucket=self.bucket.name,
169 | Prefix=s3_prefix,
170 | Delimiter='/')
171 | file_objs = [c for c in response.search('Contents') if c]
172 | dir_objs = [cp for cp in response.search('CommonPrefixes') if cp]
173 | # If no objs found, it means the path doesn't exist
174 | if len(file_objs) == len(dir_objs) == 0:
175 | raise PathNotFound('Path {0} not found'.format(s3_prefix))
176 | files = (c['Key'][len(s3_prefix):] for c in file_objs)
177 | files = [f for f in files if f != '']
178 | dirs = [cp['Prefix'][len(s3_prefix):].rstrip('/') for cp in dir_objs]
179 | return files + dirs
180 |
181 | def path_exists(self, path):
182 | path = self.prefixed_path(path)
183 | logger.debug('Checking if key exists: {0}'.format(path))
184 | client = self.s3.meta.client
185 | try:
186 | client.head_object(Bucket=self.bucket.name, Key=path)
187 | except ClientError as e:
188 | logger.debug('Handled ClientError: {0}'.format(e))
189 | return False
190 | else:
191 | return True
192 |
193 | def put_contents(self, contents, dest, sync=False):
194 | dest_path = self.prefixed_path(dest)
195 | client = self.s3.meta.client
196 | logger.debug('Writing content to s3: {0}'.format(dest_path))
197 | client.put_object(Bucket=self.bucket.name,
198 | Key=dest_path,
199 | Body=contents.encode('utf-8'),
200 | ContentType=guess_content_type(dest),
201 | ACL=self.acl)
202 | if sync:
203 | waiter = client.get_waiter('object_exists')
204 | waiter.wait(Bucket=self.bucket.name, Key=dest_path)
205 |
206 | def put_file(self, src, dest, sync=False):
207 | dest_path = self.prefixed_path(dest)
208 | client = self.s3.meta.client
209 | logger.debug('Uploading file to s3: {0} -> {1}'.format(src, dest_path))
210 | with open(src, 'rb') as f:
211 | client.put_object(Bucket=self.bucket.name,
212 | Key=dest_path,
213 | Body=f,
214 | ContentType=guess_content_type(dest),
215 | ACL=self.acl)
216 | if sync:
217 | waiter = client.get_waiter('object_exists')
218 | waiter.wait(Bucket=self.bucket.name, Key=dest_path)
219 |
220 | def __repr__(self):
221 | return (
222 | ''
223 | ).format(self.bucket.name, self.prefix)
224 |
225 |
226 | def load_storage(config):
227 | if config.storage == 'local-filesystem':
228 | return LocalFileSystemStorage.from_config(config)
229 | elif config.storage == 'aws-s3':
230 | return AWSS3Storage.from_config(config)
231 | elif config.storage == 'azure':
232 | from pypiprivate.azure import AzureBlobStorage
233 | return AzureBlobStorage.from_config(config)
234 | else:
235 | raise ValueError('Unsupported storage "{0}"'.format(config.storage))
236 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import re
2 | import ast
3 |
4 | from setuptools import setup
5 |
6 |
7 | _version_re = re.compile(r'__version__\s+=\s+(.*)')
8 |
9 | with open('pypiprivate/__init__.py', 'rb') as f:
10 | version = str(ast.literal_eval(_version_re.search(f.read().decode('utf-8')).group(1)))
11 |
12 |
13 | with open('./README.rst') as f:
14 | long_desc = f.read()
15 |
16 |
17 | extras_require = {
18 | 'azure': [
19 | 'azure-storage-blob==12.2.0'
20 | ]
21 | }
22 |
23 |
24 | setup(
25 | name='pypiprivate',
26 | version=version,
27 | author='Vineet Naik',
28 | author_email='vineet@helpshift.com',
29 | url='https://github.com/helpshift/pypiprivate',
30 | license='MIT License',
31 | description='Private package management tool for Python projects',
32 | long_description=long_desc,
33 | install_requires=['setuptools>=36.0.0',
34 | 'Jinja2==2.10.0',
35 | 'boto3==1.5.27'],
36 | extras_require=extras_require,
37 | packages=['pypiprivate'],
38 | entry_points={
39 | 'console_scripts': [
40 | 'pypi-private = pypiprivate.cli:main'
41 | ]
42 | },
43 | classifiers=[
44 | 'Environment :: Console',
45 | 'Intended Audience :: Developers',
46 | 'Programming Language :: Python',
47 | 'Programming Language :: Python :: 2.7',
48 | 'Programming Language :: Python :: 3.4',
49 | 'Programming Language :: Python :: 3.6',
50 | ]
51 | )
52 |
--------------------------------------------------------------------------------
/sonar-project.properties:
--------------------------------------------------------------------------------
1 | sonar.projectKey=pypiprivate
2 | sonar.projectName=pypiprivate
3 | sonar.projectVersion=0.4.0
4 | sonar.sources=pypiprivate
5 | sonar.python.coverage.reportPaths=coverage.xml
6 | sonar.dependencyCheck.htmlReportPath=dependency-check-report.html
7 | sonar.dependencyCheck.jsonReportPath=dependency-check-report.json
8 | sonar.python.bandit.reportPaths=bandit_report.json
9 | sonar.python.pylint.reportPaths=pylint.txt
--------------------------------------------------------------------------------
/tests/test_publish.py:
--------------------------------------------------------------------------------
1 | import pypiprivate.publish as pp
2 |
3 | try:
4 | import mock
5 | except ImportError:
6 | from unittest import mock
7 |
8 | from pkg_resources import packaging
9 | import pytest
10 |
11 |
12 | Version = packaging.version.Version
13 |
14 |
15 | def test__filter_pkg_dists():
16 | dists = ['abc-0.1.0-py2-none-any.whl',
17 | 'abc-0.1.0.tar.gz',
18 | 'abc-0.0.1.tar.gz',
19 | 'abc-0.100.tar.gz',
20 | 'abc-0.1.0b1.tar.gz']
21 | filtered = list(pp._filter_pkg_dists(dists, 'abc', Version('0.1.0')))
22 | assert ['abc-0.1.0-py2-none-any.whl', 'abc-0.1.0.tar.gz'] == filtered
23 |
24 | filtered = list(pp._filter_pkg_dists(dists, 'abc', Version('0.1.0-beta1')))
25 | assert ['abc-0.1.0b1.tar.gz'] == filtered
26 |
27 |
28 | def test__filter_hyphenated_pkg_dists():
29 | # Wheels and sdists have different naming conventions for hyphenated package names
30 | # this checks that filtering works for both
31 | dists = ['a_bc-0.1.0-py2-none-any.whl',
32 | 'a-bc-0.1.0.tar.gz',
33 | 'a_b_c-0.1.0-cp37-cp37m-linux_x86_64.whl',
34 | 'a-b-c-0.1.0.tar.gz']
35 | filtered = list(pp._filter_pkg_dists(dists, 'a-bc', '0.1.0'))
36 | assert ['a_bc-0.1.0-py2-none-any.whl', 'a-bc-0.1.0.tar.gz'] == filtered
37 |
38 | filtered = list(pp._filter_pkg_dists(dists, 'a-b-c', '0.1.0'))
39 | assert ['a_b_c-0.1.0-cp37-cp37m-linux_x86_64.whl', 'a-b-c-0.1.0.tar.gz'] == filtered
40 |
41 |
42 | def test_find_pkg_dists():
43 | project_path = '/tmp/abc'
44 | with mock.patch('os.listdir') as mock_fn:
45 | mock_fn.return_value = ['abc-0.1.0-py2-none-any.whl',
46 | 'abc-0.1.0.tar.gz',
47 | 'FooBar-3.2.0.tar.gz',
48 | 'Bat_baz-1.8.4-py2-none-any.whl']
49 | result = list(pp.find_pkg_dists(project_path, 'dist', 'abc', '0.1.0'))
50 | expected = [{'pkg': 'abc',
51 | 'normalized_name': 'abc',
52 | 'artifact': 'abc-0.1.0-py2-none-any.whl',
53 | 'path': '/tmp/abc/dist/abc-0.1.0-py2-none-any.whl'},
54 | {'pkg': 'abc',
55 | 'normalized_name': 'abc',
56 | 'artifact': 'abc-0.1.0.tar.gz',
57 | 'path': '/tmp/abc/dist/abc-0.1.0.tar.gz'}]
58 | assert expected == result
59 |
60 | result = list(pp.find_pkg_dists(project_path, 'dist', 'FooBar', '3.2.0'))
61 | expected = [{'pkg': 'FooBar',
62 | 'normalized_name': 'foobar',
63 | 'artifact': 'FooBar-3.2.0.tar.gz',
64 | 'path': '/tmp/abc/dist/FooBar-3.2.0.tar.gz'}]
65 | assert expected == result
66 |
67 | result = list(pp.find_pkg_dists(project_path, 'dist', 'Bat_baz', '1.8.4'))
68 | expected = [{'pkg': 'Bat_baz',
69 | 'normalized_name': 'batbaz',
70 | 'artifact': 'Bat_baz-1.8.4.tar.gz',
71 | 'path': '/tmp/abc/dist/Bat_baz-1.8.4.tar.gz'}]
72 |
73 | mock_fn.assert_called_with('/tmp/abc/dist')
74 |
75 |
76 | def test_upload_dist():
77 | dist = {'pkg': 'abc',
78 | 'normalized_name': 'abc',
79 | 'artifact': 'abc-0.1.0.tar.gz',
80 | 'path': '/tmp/abc/dist/abc-0.1.0.tar.gz'}
81 | storage = mock.MagicMock()
82 | storage.join_path.return_value = 'abc/abc-0.1.0.tar.gz'
83 | pp.upload_dist(storage, dist)
84 | storage.put_file.assert_called_once_with('/tmp/abc/dist/abc-0.1.0.tar.gz',
85 | 'abc/abc-0.1.0.tar.gz',
86 | sync=True)
87 |
88 |
89 | def test_publish_package():
90 | storage = 'dummy-storage'
91 |
92 | d1 = {'pkg': 'abc',
93 | 'normalized_name': 'abc',
94 | 'artifact': 'abc-0.1.0-py2-none-any.whl',
95 | 'path': '/tmp/abc/dist/abc-0.1.0-py2-none-any.whl'}
96 | d2 = {'pkg': 'abc',
97 | 'normalized_name': 'abc',
98 | 'artifact': 'abc-0.1.0.tar.gz',
99 | 'path': '/tmp/abc/dist/abc-0.1.0.tar.gz'}
100 | pkg_dists = [d1, d2]
101 |
102 | pp.find_pkg_dists = mock.Mock()
103 | pp.find_pkg_dists.return_value = pkg_dists
104 |
105 | # When no dists are already published
106 | pp.is_dist_published = mock.Mock()
107 | pp.is_dist_published.return_value = False
108 | pp.upload_dist = mock.Mock()
109 | pp.update_pkg_index = mock.Mock()
110 | pp.update_root_index = mock.Mock()
111 |
112 | pp.publish_package('abc', '0.1.0', storage, '.', 'dist')
113 |
114 | pp.find_pkg_dists.assert_called_once_with('.', 'dist', 'abc', Version('0.1.0'))
115 | assert pp.upload_dist.call_count == 2
116 | assert pp.upload_dist.call_args_list[0][0] == (storage, d1)
117 | assert pp.upload_dist.call_args_list[1][0] == (storage, d2)
118 | pp.update_pkg_index.assert_called_once_with(storage, 'abc')
119 | pp.update_root_index.assert_called_once_with(storage)
120 |
121 | # When some dists are already published
122 | pp.find_pkg_dists = mock.Mock()
123 | pp.find_pkg_dists.return_value = pkg_dists
124 |
125 | def mock_is_dist_published(storage, dist):
126 | if dist == d1:
127 | return True
128 | elif dist == d2:
129 | return False
130 |
131 | pp.is_dist_published = mock.Mock()
132 | pp.is_dist_published.side_effect = mock_is_dist_published
133 | pp.upload_dist = mock.Mock()
134 | pp.update_pkg_index = mock.Mock()
135 | pp.update_root_index = mock.Mock()
136 |
137 | pp.publish_package('abc', '0.1.0', storage, '.', 'dist')
138 |
139 | pp.find_pkg_dists.assert_called_once_with('.', 'dist', 'abc', Version('0.1.0'))
140 | assert pp.upload_dist.call_count == 1
141 | assert pp.upload_dist.call_args_list[0][0] == (storage, d2)
142 | pp.update_pkg_index.assert_called_once_with(storage, 'abc')
143 | pp.update_root_index.assert_called_once_with(storage)
144 |
145 | # When all dists are already published
146 | pp.find_pkg_dists = mock.Mock()
147 | pp.find_pkg_dists.return_value = pkg_dists
148 | pp.is_dist_published = mock.Mock()
149 | pp.is_dist_published.return_value = True
150 | pp.upload_dist = mock.Mock()
151 | pp.update_pkg_index = mock.Mock()
152 | pp.update_root_index = mock.Mock()
153 |
154 | pp.publish_package('abc', '0.1.0', storage, '.', 'dist')
155 |
156 | pp.find_pkg_dists.assert_called_once_with('.', 'dist', 'abc', Version('0.1.0'))
157 | assert pp.upload_dist.call_count == 0
158 | assert pp.update_pkg_index.call_count == 0
159 | assert pp.update_root_index.call_count == 0
160 |
161 | # When no dists are found
162 | pp.find_pkg_dists = mock.Mock()
163 | pp.find_pkg_dists.return_value = []
164 | with pytest.raises(pp.DistNotFound):
165 | pp.publish_package('abc', '0.1.0', storage, '.', 'dist')
166 |
--------------------------------------------------------------------------------
/tests/test_storage.py:
--------------------------------------------------------------------------------
1 | import pypiprivate.storage as ps
2 |
3 |
4 | try:
5 | import mock
6 | except ImportError:
7 | from unittest import mock
8 |
9 |
10 | def test_AWSS3Storage__from_config_1():
11 | sc = {'bucket': 'mybucket',
12 | 'prefix': 'simple'}
13 | env = {'PP_S3_ACCESS_KEY': 'access',
14 | 'PP_S3_SECRET_KEY': 'secret'}
15 | config = mock.Mock(storage_config=sc, env=env)
16 | with mock.patch('pypiprivate.storage.boto3.Session') as m:
17 | s = ps.AWSS3Storage.from_config(config)
18 | assert s.endpoint is None
19 | assert s.region is None
20 | assert s.acl == 'private'
21 |
22 | # Assertions on calls made to Session object
23 | assert len(m.mock_calls) == 3
24 | c1, c2, c3 = m.mock_calls
25 | exp_c1 = mock.call(aws_access_key_id='access',
26 | aws_secret_access_key='secret',
27 | aws_session_token=None)
28 | exp_c2 = mock.call().resource('s3')
29 | exp_c3 = mock.call().resource().Bucket('mybucket')
30 | assert c1 == exp_c1
31 | assert c2 == exp_c2
32 | assert c3 == exp_c3
33 |
34 |
35 | def test_AWSS3Storage__from_config_2():
36 | sc = {'bucket': 'mybucket',
37 | 'prefix': 'simple',
38 | 'acl': 'public'}
39 | env = {'PP_S3_ACCESS_KEY': 'access',
40 | 'PP_S3_SECRET_KEY': 'secret',
41 | 'PP_S3_SESSION_TOKEN': 'session'}
42 | config = mock.Mock(storage_config=sc, env=env)
43 | with mock.patch('pypiprivate.storage.boto3.Session') as m:
44 | s = ps.AWSS3Storage.from_config(config)
45 | assert s.endpoint is None
46 | assert s.region is None
47 | assert s.acl == 'public'
48 |
49 | # Assertions on calls made to Session object
50 | assert len(m.mock_calls) == 3
51 | c1, c2, c3 = m.mock_calls
52 | exp_c1 = mock.call(aws_access_key_id='access',
53 | aws_secret_access_key='secret',
54 | aws_session_token='session')
55 | exp_c2 = mock.call().resource('s3')
56 | exp_c3 = mock.call().resource().Bucket('mybucket')
57 | assert c1 == exp_c1
58 | assert c2 == exp_c2
59 | assert c3 == exp_c3
60 |
61 |
62 | def test_AWSS3Storage__from_config_3():
63 | sc = {'bucket': 'mybucket',
64 | 'prefix': 'simple',
65 | 'endpoint': 'https://s3.us-west-2.amazonaws.com',
66 | 'region': 'us-west-2'}
67 | env = {'PP_S3_ACCESS_KEY': 'access',
68 | 'PP_S3_SECRET_KEY': 'secret',
69 | 'PP_S3_SESSION_TOKEN': 'session'}
70 | config = mock.Mock(storage_config=sc, env=env)
71 | with mock.patch('pypiprivate.storage.boto3.Session') as m:
72 | s = ps.AWSS3Storage.from_config(config)
73 | assert s.endpoint == 'https://s3.us-west-2.amazonaws.com'
74 | assert s.region == 'us-west-2'
75 | assert s.acl == 'private'
76 |
77 | # Assertions on calls made to Session object
78 | assert len(m.mock_calls) == 3
79 | c1, c2, c3 = m.mock_calls
80 | exp_c1 = mock.call(aws_access_key_id='access',
81 | aws_secret_access_key='secret',
82 | aws_session_token='session')
83 | exp_c2 = mock.call().resource('s3',
84 | endpoint_url='https://s3.us-west-2.amazonaws.com',
85 | region_name='us-west-2')
86 | exp_c3 = mock.call().resource().Bucket('mybucket')
87 | assert c1 == exp_c1
88 | assert c2 == exp_c2
89 | assert c3 == exp_c3
90 |
91 |
92 | def test_AWSS3Storage__from_config_4():
93 | sc = {'bucket': 'mybucket',
94 | 'prefix': 'simple',
95 | 'endpoint': 'https://s3.us-west-2.amazonaws.com',
96 | 'region': 'us-west-2'}
97 | env = {}
98 | config = mock.Mock(storage_config=sc, env=env)
99 | with mock.patch('pypiprivate.storage.boto3.Session') as m:
100 | s = ps.AWSS3Storage.from_config(config)
101 | assert s.endpoint == 'https://s3.us-west-2.amazonaws.com'
102 | assert s.region == 'us-west-2'
103 | assert s.acl == 'private'
104 |
105 | # Assertions on calls made to Session object
106 | assert len(m.mock_calls) == 3
107 | c1, c2, c3 = m.mock_calls
108 | exp_c1 = mock.call()
109 | exp_c2 = mock.call().resource('s3',
110 | endpoint_url='https://s3.us-west-2.amazonaws.com',
111 | region_name='us-west-2')
112 | exp_c3 = mock.call().resource().Bucket('mybucket')
113 | assert c1 == exp_c1
114 | assert c2 == exp_c2
115 | assert c3 == exp_c3
116 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py27, py34, py36
3 |
4 | [testenv:py36]
5 | commands = pytest -v tests/
6 | deps = pytest
7 |
8 | [testenv:py34]
9 | commands = pytest -v tests/
10 | deps = pytest
11 |
12 | [testenv:py27]
13 | commands = pytest -v tests/
14 | deps = mock
15 | pytest
--------------------------------------------------------------------------------