├── .git-blame-ignore-revs ├── .gitattributes ├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── .readthedocs.yaml ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── backends │ ├── amazon-S3.rst │ ├── apache_libcloud.rst │ ├── azure.rst │ ├── dropbox.rst │ ├── ftp.rst │ ├── gcloud.rst │ ├── s3_compatible │ │ ├── backblaze-B2.rst │ │ ├── cloudflare-r2.rst │ │ ├── digital-ocean-spaces.rst │ │ ├── index.rst │ │ ├── oracle-cloud.rst │ │ └── scaleway.rst │ └── sftp.rst ├── conf.py ├── index.rst ├── logos │ ├── horizontal.png │ ├── horizontal2.png │ ├── logo.png │ ├── logo.svg │ └── vertical.png ├── make.bat └── requirements.txt ├── pyproject.toml ├── setup.py ├── storages ├── __init__.py ├── backends │ ├── __init__.py │ ├── apache_libcloud.py │ ├── azure_storage.py │ ├── dropbox.py │ ├── ftp.py │ ├── gcloud.py │ ├── s3.py │ ├── s3boto3.py │ └── sftpstorage.py ├── base.py ├── compress.py └── utils.py ├── tests ├── __init__.py ├── settings.py ├── test_azure.py ├── test_dropbox.py ├── test_files │ └── windows-1252-encoded.txt ├── test_ftp.py ├── test_gcloud.py ├── test_s3.py ├── test_sftp.py ├── test_utils.py └── utils.py └── tox.ini /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # .git-blame-ignore-revs 2 | # Format code with Black 3 | 44f832202f4b434ba9d15e8eb72f859d208dd26d 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | tidelift: pypi/django-storages 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: 8 | - '**' 9 | 10 | 11 | jobs: 12 | run_unittest_tests: 13 | name: Unittest 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | os: [ubuntu-22.04] 18 | python-version: 19 | - "3.8" 20 | - "3.9" 21 | - "3.10" 22 | - "3.11" 23 | - "3.12" 24 | django-version: 25 | - "4.2" 26 | - "5.0" 27 | - "5.1" 28 | - "main" 29 | exclude: 30 | - python-version: "3.8" 31 | django-version: "5.0" 32 | - python-version: "3.9" 33 | django-version: "5.0" 34 | - python-version: "3.8" 35 | django-version: "5.1" 36 | - python-version: "3.9" 37 | django-version: "5.1" 38 | - python-version: "3.8" 39 | django-version: "main" 40 | - python-version: "3.9" 41 | django-version: "main" 42 | - python-version: "3.10" 43 | django-version: "main" 44 | - python-version: "3.11" 45 | django-version: "main" 46 | 47 | steps: 48 | - uses: actions/checkout@v4 49 | - name: setup python 50 | uses: actions/setup-python@v4 51 | with: 52 | python-version: ${{ matrix.python-version }} 53 | 54 | - name: Install Dependencies 55 | run: | 56 | pip install --upgrade setuptools 57 | pip install tox 58 | 59 | - name: Run unittest Python ${{ matrix.python-version }} -- Django ${{ matrix.django-version }} 60 | env: 61 | TOXENV: py${{ matrix.python-version }}-django${{ matrix.django-version }} 62 | run: tox 63 | 64 | run_quality_tests: 65 | name: Quality tests 66 | runs-on: ${{ matrix.os }} 67 | strategy: 68 | matrix: 69 | os: [ubuntu-22.04] 70 | python-version: ["3.8"] 71 | 72 | steps: 73 | - uses: actions/checkout@v4 74 | - name: setup python 75 | uses: actions/setup-python@v4 76 | with: 77 | python-version: ${{ matrix.python-version }} 78 | 79 | - name: Install Dependencies 80 | run: | 81 | pip install tox 82 | 83 | - name: Quality tests 84 | env: 85 | TOXENV: ruff 86 | run: | 87 | tox 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg 2 | *.egg-info 3 | *.orig 4 | *.pyc 5 | *.swp 6 | 7 | .tox/ 8 | build/ 9 | __pycache__ 10 | .coverage 11 | .cache 12 | 13 | .idea/ 14 | .vscode/ 15 | .pytest_cache/ 16 | venv/ 17 | .venv/ 18 | 19 | dist/ 20 | docs/_build 21 | 22 | .DS_Store 23 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | version: 2 5 | 6 | build: 7 | os: ubuntu-22.04 8 | tools: 9 | python: "3.11" 10 | 11 | sphinx: 12 | configuration: docs/conf.py 13 | 14 | python: 15 | install: 16 | - requirements: docs/requirements.txt 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2008 - 2023, Josh Schneier, David Larlet, et al. 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CHANGELOG.rst LICENSE README.rst 2 | recursive-include tests *.py 3 | recursive-include docs Makefile conf.py make.bat *.rst 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://raw.githubusercontent.com/jschneier/django-storages/master/docs/logos/horizontal.png 2 | :alt: Django-Storages 3 | :width: 100% 4 | 5 | .. image:: https://img.shields.io/pypi/v/django-storages.svg 6 | :target: https://pypi.org/project/django-storages/ 7 | :alt: PyPI Version 8 | 9 | .. image:: https://github.com/jschneier/django-storages/actions/workflows/ci.yml/badge.svg 10 | :target: https://github.com/jschneier/django-storages/actions/workflows/ci.yml 11 | :alt: Build Status 12 | 13 | Installation 14 | ============ 15 | Installing from PyPI is as easy as doing: 16 | 17 | .. code-block:: bash 18 | 19 | pip install django-storages 20 | 21 | If you'd prefer to install from source (maybe there is a bugfix in master that 22 | hasn't been released yet) then the magic incantation you are looking for is: 23 | 24 | .. code-block:: bash 25 | 26 | pip install -e 'git+https://github.com/jschneier/django-storages.git#egg=django-storages' 27 | 28 | For detailed instructions on how to configure the backend of your choice please consult the documentation. 29 | 30 | About 31 | ===== 32 | django-storages is a project to provide a variety of storage backends in a single library. 33 | 34 | This library is usually compatible with the currently supported versions of 35 | Django. Check the Trove classifiers in setup.py to be sure. 36 | 37 | django-storages is backed in part by `Tidelift`_. Check them out for all of your enterprise open source 38 | software commercial support needs. 39 | 40 | .. _Tidelift: https://tidelift.com/subscription/pkg/pypi-django-storages?utm_source=pypi-django-storages&utm_medium=referral&utm_campaign=enterprise&utm_term=repo 41 | 42 | Security 43 | ======== 44 | 45 | To report a security vulnerability, please use the `Tidelift security contact`_. Tidelift will coordinate the 46 | fix and disclosure. Please **do not** post a public issue on the tracker. 47 | 48 | .. _Tidelift security contact: https://tidelift.com/security 49 | 50 | 51 | Found a Bug? 52 | ============ 53 | 54 | Issues are tracked via GitHub issues at the `project issue page 55 | `_. 56 | 57 | Documentation 58 | ============= 59 | Documentation for django-storages is located at https://django-storages.readthedocs.io/. 60 | 61 | Contributing 62 | ============ 63 | 64 | #. `Check for open issues 65 | `_ at the project 66 | issue page or open a new issue to start a discussion about a feature or bug. 67 | #. Fork the `django-storages repository on GitHub 68 | `_ to start making changes. 69 | #. Add a test case to show that the bug is fixed or the feature is implemented 70 | correctly. 71 | #. Bug me until I can merge your pull request. 72 | 73 | Please don't update the library version in CHANGELOG.rst or ``storages/__init__.py``, the maintainer will do that on release. 74 | 75 | History 76 | ======= 77 | This repo began as a fork of the original library under the package name of django-storages-redux and 78 | became the official successor (releasing under django-storages on PyPI) in February of 2016. 79 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " singlehtml to make a single large HTML file" 22 | @echo " pickle to make pickle files" 23 | @echo " json to make JSON files" 24 | @echo " htmlhelp to make HTML files and a HTML help project" 25 | @echo " qthelp to make HTML files and a qthelp project" 26 | @echo " devhelp to make HTML files and a Devhelp project" 27 | @echo " epub to make an epub" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 30 | @echo " text to make text files" 31 | @echo " man to make manual pages" 32 | @echo " changes to make an overview of all changed/added/deprecated items" 33 | @echo " linkcheck to check all external links for integrity" 34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 35 | 36 | clean: 37 | -rm -rf $(BUILDDIR)/* 38 | 39 | html: 40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 41 | @echo 42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 43 | 44 | dirhtml: 45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 48 | 49 | singlehtml: 50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 51 | @echo 52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 53 | 54 | pickle: 55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 56 | @echo 57 | @echo "Build finished; now you can process the pickle files." 58 | 59 | json: 60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 61 | @echo 62 | @echo "Build finished; now you can process the JSON files." 63 | 64 | htmlhelp: 65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 66 | @echo 67 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 68 | ".hhp project file in $(BUILDDIR)/htmlhelp." 69 | 70 | qthelp: 71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 72 | @echo 73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-storages.qhcp" 76 | @echo "To view the help file:" 77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-storages.qhc" 78 | 79 | devhelp: 80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 81 | @echo 82 | @echo "Build finished." 83 | @echo "To view the help file:" 84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/django-storages" 85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-storages" 86 | @echo "# devhelp" 87 | 88 | epub: 89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 90 | @echo 91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 92 | 93 | latex: 94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 95 | @echo 96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 98 | "(use \`make latexpdf' here to do that automatically)." 99 | 100 | latexpdf: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo "Running LaTeX files through pdflatex..." 103 | make -C $(BUILDDIR)/latex all-pdf 104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 105 | 106 | text: 107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 108 | @echo 109 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 110 | 111 | man: 112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 113 | @echo 114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 115 | 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | linkcheck: 122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 123 | @echo 124 | @echo "Link check complete; look for any errors in the above output " \ 125 | "or in $(BUILDDIR)/linkcheck/output.txt." 126 | 127 | doctest: 128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 129 | @echo "Testing of doctests in the sources finished, look at the " \ 130 | "results in $(BUILDDIR)/doctest/output.txt." 131 | -------------------------------------------------------------------------------- /docs/backends/amazon-S3.rst: -------------------------------------------------------------------------------- 1 | Amazon S3 2 | ========= 3 | 4 | This backend implements the Django File Storage API for Amazon Web Services's (AWS) Simple Storage Service (S3). 5 | 6 | Installation 7 | ------------ 8 | 9 | The backend is based on the boto3 library which must be installed; the minimum required version is 1.4.4 although 10 | we always recommend the most recent. Either add it to your requirements or use the optional ``s3`` extra e.g:: 11 | 12 | pip install django-storages[s3] 13 | 14 | Configuration & Settings 15 | ------------------------ 16 | 17 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 18 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 19 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to S3 on Django 20 | >= 4.2 you'd define:: 21 | 22 | 23 | STORAGES = { 24 | "default": { 25 | "BACKEND": "storages.backends.s3.S3Storage", 26 | "OPTIONS": { 27 | ...your_options_here 28 | }, 29 | }, 30 | } 31 | 32 | On Django < 4.2 you'd instead define:: 33 | 34 | DEFAULT_FILE_STORAGE = "storages.backends.s3.S3Storage" 35 | 36 | To put static files on S3 via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 37 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 38 | 39 | STATICFILES_STORAGE = "storages.backends.s3.S3Storage" 40 | 41 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 42 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 43 | 44 | Authentication Settings 45 | ~~~~~~~~~~~~~~~~~~~~~~~ 46 | 47 | There are several different methods for specifying the AWS credentials used to create the S3 client. In the order that ``S3Storage`` 48 | searches for them: 49 | 50 | #. ``session_profile`` or ``AWS_S3_SESSION_PROFILE`` 51 | #. ``access_key`` or ``AWS_S3_ACCESS_KEY_ID`` or ``AWS_ACCESS_KEY_ID`` 52 | #. ``secret_key`` or ``AWS_S3_SECRET_ACCESS_KEY`` or ``AWS_SECRET_ACCESS_KEY`` 53 | #. ``security_token`` or ``AWS_SESSION_TOKEN`` or ``AWS_SECURITY_TOKEN`` 54 | #. The environment variables AWS_S3_ACCESS_KEY_ID and AWS_S3_SECRET_ACCESS_KEY 55 | #. The environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY 56 | #. The environment variables AWS_SESSION_TOKEN and AWS_SECURITY_TOKEN 57 | #. Use Boto3's default session 58 | 59 | Settings 60 | ~~~~~~~~ 61 | 62 | ``bucket_name`` or ``AWS_STORAGE_BUCKET_NAME`` 63 | 64 | **Required** 65 | 66 | The name of the S3 bucket that will host the files. 67 | 68 | ``object_parameters`` or ``AWS_S3_OBJECT_PARAMETERS`` 69 | 70 | Default: ``{}`` 71 | 72 | Use this to set parameters on all objects. To set these on a per-object 73 | basis, subclass the backend and override ``S3Storage.get_object_parameters``. 74 | 75 | To view a full list of possible parameters (there are many) see the `Boto3 docs for uploading files`_; an incomplete list includes: ``CacheControl``, ``SSEKMSKeyId``, ``StorageClass``, ``Tagging`` and ``Metadata``. 76 | 77 | ``default_acl`` or ``AWS_DEFAULT_ACL`` 78 | 79 | Default: ``None`` - the file will be ``private`` per Amazon's default 80 | 81 | Use this to set an ACL on your file such as ``public-read``. If not set the file will be ``private`` per Amazon's default. 82 | If the ``ACL`` parameter is set in ``object_parameters``, then this setting is ignored. 83 | 84 | Options such as ``public-read`` and ``private`` come from the `list of canned ACLs`_. 85 | 86 | ``querystring_auth`` or ``AWS_QUERYSTRING_AUTH`` 87 | 88 | Default: ``True`` 89 | 90 | Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` to remove query parameter 91 | authentication from generated URLs. This can be useful if your S3 buckets 92 | are public. 93 | 94 | ``max_memory_size`` or ``AWS_S3_MAX_MEMORY_SIZE`` 95 | 96 | Default: ``0`` i.e do not roll over 97 | 98 | The maximum amount of memory (in bytes) a file can take up before being rolled over 99 | into a temporary file on disk. 100 | 101 | ``querystring_expire`` or ``AWS_QUERYSTRING_EXPIRE`` 102 | 103 | Default: ``3600`` 104 | 105 | The number of seconds that a generated URL is valid for. 106 | 107 | ``url_protocol`` or ``AWS_S3_URL_PROTOCOL`` 108 | 109 | Default: ``https:`` 110 | 111 | The protocol to use when constructing a custom domain, ``custom_domain`` must be ``True`` for this to have any effect. 112 | 113 | .. note:: 114 | Must end in a ``:`` 115 | 116 | ``file_overwrite`` or ``AWS_S3_FILE_OVERWRITE`` 117 | 118 | Default: ``True`` 119 | 120 | By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. 121 | 122 | ``location`` or ``AWS_LOCATION`` 123 | 124 | Default: ``''`` 125 | 126 | A path prefix that will be prepended to all uploads. 127 | 128 | ``gzip`` or ``AWS_IS_GZIPPED`` 129 | 130 | Default: ``False`` 131 | 132 | Whether or not to enable gzipping of content types specified by ``gzip_content_types``. 133 | 134 | ``gzip_content_types`` or ``GZIP_CONTENT_TYPES`` 135 | 136 | Default: ``(text/css,text/javascript,application/javascript,application/x-javascript,image/svg+xml)`` 137 | 138 | The list of content types to be gzipped when ``gzip`` is ``True``. 139 | 140 | ``region_name`` or ``AWS_S3_REGION_NAME`` 141 | 142 | Default: ``None`` 143 | 144 | Name of the AWS S3 region to use (eg. eu-west-1) 145 | 146 | ``use_ssl`` or ``AWS_S3_USE_SSL`` 147 | 148 | Default: ``True`` 149 | 150 | Whether or not to use SSL when connecting to S3, this is passed to the boto3 session resource constructor. 151 | 152 | ``verify`` or ``AWS_S3_VERIFY`` 153 | 154 | Default: ``None`` 155 | 156 | Whether or not to verify the connection to S3. Can be set to False to not verify certificates or a path to a CA cert bundle. 157 | 158 | ``endpoint_url`` or ``AWS_S3_ENDPOINT_URL`` 159 | 160 | Default: ``None`` 161 | 162 | Custom S3 URL to use when connecting to S3, including scheme. Overrides ``region_name`` and ``use_ssl``. 163 | To avoid ``AuthorizationQueryParametersError`` errors, ``region_name`` should also be set. 164 | 165 | ``addressing_style`` or ``AWS_S3_ADDRESSING_STYLE`` 166 | 167 | Default: ``None`` 168 | 169 | Possible values ``virtual`` and ``path``. 170 | 171 | ``proxies`` or ``AWS_S3_PROXIES`` 172 | 173 | Default: ``None`` 174 | 175 | Dictionary of proxy servers to use by protocol or endpoint, e.g.:: 176 | 177 | {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. 178 | 179 | ``transfer_config`` or ``AWS_S3_TRANSFER_CONFIG`` 180 | 181 | Default: ``None`` 182 | 183 | Set this to customize the transfer config options such as disabling threads for ``gevent`` compatibility; 184 | See the `Boto3 docs for TransferConfig`_ for more info. 185 | 186 | 187 | ``custom_domain`` or ``AWS_S3_CUSTOM_DOMAIN`` 188 | 189 | Default: ``None`` 190 | 191 | Set this to specify a custom domain for constructed URLs. 192 | 193 | .. note:: 194 | You'll have to configure CloudFront to use the bucket as an origin for this to 195 | work. 196 | 197 | If your CloudFront config restricts viewer access you will also need to provide 198 | ``cloudfront_key`` / ``AWS_CLOUDFRONT_KEY`` and ``cloudfront_key_id`` / 199 | ``AWS_CLOUDFRONT_KEY_ID``; See those settings and 200 | :ref:`cloudfront-signed-url-header` for more info. 201 | 202 | If you have more than one storage with different viewer access permissions, you 203 | can provide ``cloudfront_signer=None`` to disable signing on one or more 204 | storages. 205 | 206 | .. warning:: 207 | 208 | Django’s STATIC_URL must end in a slash and this must not. It is best to set this variable independently of STATIC_URL. 209 | 210 | ``cloudfront_key`` or ``AWS_CLOUDFRONT_KEY`` 211 | 212 | Default: ``None`` 213 | 214 | A private PEM encoded key to use in a ``boto3`` ``CloudFrontSigner``; See 215 | :ref:`cloudfront-signed-url-header` for more info. 216 | 217 | ``cloudfront_key_id`` or ``AWS_CLOUDFRONT_KEY_ID`` 218 | 219 | Default: ``None`` 220 | 221 | The AWS key ID for the private key provided with ``cloudfront_key`` / 222 | ``AWS_CLOUDFRONT_KEY``; See :ref:`cloudfront-signed-url-header` for more info. 223 | 224 | ``cloudfront_signer`` 225 | 226 | Default: omitted 227 | 228 | By default the ``cloudfront_signer`` is generated based on the CloudFront key and ID 229 | provided. If both are provided URLs will be signed and will work for distributions 230 | with restricted viewer access, but if neither are provided then URLs will not be 231 | signed and will work for distributions with unrestricted viewer access. 232 | 233 | If you require a custom CloudFront signer you may pass a ``boto3`` 234 | ``CloudFrontSigner`` instance that can sign URLs, and to disable signing you may pass 235 | ``None``. 236 | 237 | ``signature_version`` or ``AWS_S3_SIGNATURE_VERSION`` 238 | 239 | Default: ``None`` 240 | 241 | The default signature version is ``s3v4``. Set this to ``s3`` to use the legacy 242 | signing scheme (aka ``v2``). Note that only certain regions support that version. 243 | You can check to see if your region is one of them in the `S3 region list`_. 244 | 245 | .. warning:: 246 | 247 | The signature versions are not backwards compatible so be careful about url endpoints if making this change 248 | for legacy projects. 249 | 250 | ``client_config`` or ``AWS_S3_CLIENT_CONFIG`` 251 | 252 | Default: ``None`` 253 | 254 | An instance of ``botocore.config.Config`` to do advanced configuration of the client such as 255 | ``max_pool_connections``. See all options in the `Botocore docs`_. 256 | 257 | .. note:: 258 | 259 | Setting this overrides the settings for ``addressing_style``, ``signature_version`` and 260 | ``proxies``. Include them as arguments to your ``botocore.config.Config`` class if you need them. 261 | 262 | .. _AWS Signature Version 4: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html 263 | .. _S3 region list: https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region 264 | .. _list of canned ACLs: https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl 265 | .. _Boto3 docs for uploading files: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_object 266 | .. _Boto3 docs for TransferConfig: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/customizations/s3.html#boto3.s3.transfer.TransferConfig 267 | .. _ManifestStaticFilesStorage: https://docs.djangoproject.com/en/3.1/ref/contrib/staticfiles/#manifeststaticfilesstorage 268 | .. _Botocore docs: https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html#botocore.config.Config 269 | 270 | .. _cloudfront-signed-url-header: 271 | 272 | CloudFront Signed URLs 273 | ---------------------- 274 | 275 | If you want to generate signed Cloudfront URLs, you can do so by following these steps: 276 | 277 | #. Generate a CloudFront Key Pair as specified in the `AWS docs`_. 278 | #. Add ``cloudfront_key`` and ``cloudfront_key_id`` as above with the generated settings 279 | #. Install one of `cryptography`_ or `rsa`_ 280 | #. Set both ``cloudfront_key_id/AWS_CLOUDFRONT_KEY_ID`` and ``cloudfront_key/AWS_CLOUDFRONT_KEY`` 281 | 282 | django-storages will now generate `signed cloudfront urls`_. 283 | 284 | .. _AWS docs: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html#private-content-creating-cloudfront-key-pairs-procedure 285 | .. _signed cloudfront urls: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-signed-urls.html 286 | 287 | .. _cryptography: https://pypi.org/project/cryptography/ 288 | .. _rsa: https://pypi.org/project/rsa/ 289 | 290 | IAM Policy 291 | ---------- 292 | 293 | The IAM policy definition needed for the most common use case is: 294 | 295 | .. code-block:: json 296 | 297 | { 298 | "Version": "2012-10-17", 299 | "Statement": [ 300 | { 301 | "Sid": "VisualEditor0", 302 | "Effect": "Allow", 303 | "Action": [ 304 | "s3:PutObject", 305 | "s3:GetObjectAcl", 306 | "s3:GetObject", 307 | "s3:ListBucket", 308 | "s3:DeleteObject", 309 | "s3:PutObjectAcl" 310 | ], 311 | "Principal": { 312 | "AWS": "arn:aws:iam::example-AWS-account-ID:user/example-user-name" 313 | }, 314 | "Resource": [ 315 | "arn:aws:s3:::example-bucket-name/*", 316 | "arn:aws:s3:::example-bucket-name" 317 | ] 318 | } 319 | ] 320 | } 321 | 322 | 323 | For more information about Principal, please refer to `AWS JSON Policy Elements`_ 324 | 325 | .. _AWS JSON Policy Elements: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html 326 | -------------------------------------------------------------------------------- /docs/backends/apache_libcloud.rst: -------------------------------------------------------------------------------- 1 | Apache Libcloud 2 | =============== 3 | 4 | `Apache Libcloud`_ is an API wrapper around a range of cloud storage providers. 5 | It aims to provide a consistent API for dealing with cloud storage (and, more 6 | broadly, the many other services provided by cloud providers, such as device 7 | provisioning, load balancer configuration, and DNS configuration). 8 | 9 | Use pip to install apache-libcloud from PyPI:: 10 | 11 | pip install apache-libcloud 12 | 13 | As of v0.10.1, Libcloud supports the following cloud storage providers: 14 | * `Amazon S3`_ 15 | * `Google Cloud Storage`_ 16 | * `Nimbus.io`_ 17 | * `Ninefold Cloud Storage`_ 18 | * `Rackspace CloudFiles`_ 19 | 20 | Libcloud can also be configured with relatively little effort to support any provider 21 | using EMC Atmos storage, or the OpenStack API. 22 | 23 | .. _Apache Libcloud: http://libcloud.apache.org/ 24 | .. _Amazon S3: http://aws.amazon.com/s3/ 25 | .. _Google Cloud Storage: http://cloud.google.com/products/cloud-storage.html 26 | .. _Rackspace CloudFiles: http://www.rackspace.com/cloud/cloud_hosting_products/files/ 27 | .. _Ninefold Cloud Storage: http://ninefold.com/cloud-storage/ 28 | .. _Nimbus.io: http://nimbus.io 29 | 30 | Settings 31 | -------- 32 | 33 | ``LIBCLOUD_PROVIDERS`` 34 | ~~~~~~~~~~~~~~~~~~~~~~ 35 | 36 | This setting is required to configure connections to cloud storage providers. 37 | Each entry corresponds to a single 'bucket' of storage. You can have multiple 38 | buckets for a single service provider (e.g., multiple S3 buckets), and you can 39 | define buckets at multiple providers. For example, the following configuration 40 | defines 3 providers: two buckets (``bucket-1`` and ``bucket-2``) on a US-based 41 | Amazon S3 store, and a third bucket (``bucket-3``) on Google:: 42 | 43 | 44 | LIBCLOUD_PROVIDERS = { 45 | 'amazon_1': { 46 | 'type': 'libcloud.storage.types.Provider.S3_US_STANDARD_HOST', 47 | 'user': '', 48 | 'key': '', 49 | 'bucket': 'bucket-1', 50 | }, 51 | 'amazon_2': { 52 | 'type': 'libcloud.storage.types.Provider.S3_US_STANDARD_HOST', 53 | 'user': '', 54 | 'key': '', 55 | 'bucket': 'bucket-2', 56 | }, 57 | 'google': { 58 | 'type': 'libcloud.storage.types.Provider.GOOGLE_STORAGE', 59 | 'user': '', 60 | 'key': '', 61 | 'bucket': 'bucket-3', 62 | }, 63 | } 64 | 65 | The values for the ``type``, ``user`` and ``key`` arguments will vary depending on 66 | your storage provider: 67 | 68 | **Amazon S3**: 69 | 70 | **type**: ``libcloud.storage.types.Provider.S3_US_STANDARD_HOST``, 71 | 72 | **user**: Your AWS access key ID 73 | 74 | **key**: Your AWS secret access key 75 | 76 | If you want to use a availability zone other than the US default, you 77 | can use one of ``S3_US_WEST_HOST``, ``S3_US_WEST_OREGON_HOST``, 78 | ``S3_EU_WEST_HOST``, ``S3_AP_SOUTHEAST_HOST``, or 79 | ``S3_AP_NORTHEAST_HOST`` instead of ``S3_US_STANDARD_HOST``. 80 | 81 | **Google Cloud Storage**: 82 | 83 | **type**: ``libcloud.storage.types.Provider.GOOGLE_STORAGE``, 84 | 85 | **user**: Your Google APIv1 username (20 characters) 86 | 87 | **key**: Your Google APIv1 key 88 | 89 | **Nimbus.io**: 90 | 91 | **type**: ``libcloud.storage.types.Provider.NIMBUS``, 92 | 93 | **user**: Your Nimbus.io user ID 94 | 95 | **key**: Your Nimbus.io access key 96 | 97 | **Ninefold Cloud Storage**: 98 | 99 | **type**: ``libcloud.storage.types.Provider.NINEFOLD``, 100 | 101 | **user**: Your Atmos Access Token 102 | 103 | **key**: Your Atmos Shared Secret 104 | 105 | **Rackspace Cloudfiles**: 106 | 107 | **type**: ``libcloud.storage.types.Provider.CLOUDFIULES_US`` or ``libcloud.storage.types.Provider.CLOUDFIULES_UK``, 108 | 109 | **user**: Your Rackspace user ID 110 | 111 | **key**: Your Rackspace access key 112 | 113 | You can specify any bucket name you want; however, the bucket must exist before you 114 | can start using it. If you need to create the bucket, you can use the storage API. 115 | For example, to create ``bucket-1`` from our previous example:: 116 | 117 | >>> from storages.backends.apache_libcloud import LibCloudStorage 118 | >>> store = LibCloudStorage('amazon_1') 119 | >>> store.driver.create_container('bucket-1') 120 | 121 | 122 | ``DEFAULT_LIBCLOUD_PROVIDER`` 123 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 124 | 125 | Once you have defined your Libcloud providers, you have the option of 126 | setting one provider as the default provider of Libcloud storage. This 127 | is done setting ``DEFAULT_LIBCLOUD_PROVIDER`` to the key in 128 | ``LIBCLOUD_PROVIDER`` that you want to use as the default provider. 129 | For example, if you want the ``amazon-1`` provider to be the default 130 | provider, use:: 131 | 132 | DEFAULT_LIBCLOUD_PROVIDER = 'amazon-1' 133 | 134 | If ``DEFAULT_LIBCLOUD_PROVIDER`` isn't set, the Libcloud backend will assume 135 | that the default storage backend is named ``default``. Therefore, you can 136 | avoid settings DEFAULT_LIBCLOUD_PROVIDER by simply naming one of your 137 | Libcloud providers ``default``:: 138 | 139 | LIBCLOUD_PROVIDERS = { 140 | 'default': { 141 | 'type': ... 142 | }, 143 | } 144 | 145 | 146 | ``DEFAULT_FILE_STORAGE``, ``STORAGES`` 147 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 148 | 149 | If you want your Libcloud storage to be the default Django file store, you can 150 | set:: 151 | 152 | # django < 4.2 153 | DEFAULT_FILE_STORAGE = 'storages.backends.apache_libcloud.LibCloudStorage' 154 | 155 | # django >= 4.2 156 | STORAGES = {"default": {"BACKEND": "storages.backends.apache_libcloud.LibCloudStorage"}} 157 | 158 | Your default Libcloud provider will be used as the file store. 159 | 160 | Certificate authorities 161 | ----------------------- 162 | 163 | Libcloud uses HTTPS connections, and in order to validate that these HTTPS connections are 164 | correctly signed, root CA certificates must be present. On some platforms 165 | (most notably, OS X and Windows), the required certificates may not be available 166 | by default. To test 167 | 168 | >>> from storages.backends.apache_libcloud import LibCloudStorage 169 | >>> store = LibCloudStorage('amazon_1') 170 | Traceback (most recent call last): 171 | ... 172 | ImproperlyConfigured: Unable to create libcloud driver type libcloud.storage.types.Provider.S3_US_STANDARD_HOST: No CA Certificates were found in CA_CERTS_PATH. 173 | 174 | If you get this error, you need to install a certificate authority. 175 | `Download a certificate authority file`_, and then put the following two lines 176 | into your settings.py:: 177 | 178 | import libcloud.security 179 | libcloud.security.CA_CERTS_PATH.append("/path/to/your/cacerts.pem") 180 | 181 | .. _Download a certificate authority file: http://curl.haxx.se/ca/cacert.pem 182 | -------------------------------------------------------------------------------- /docs/backends/azure.rst: -------------------------------------------------------------------------------- 1 | Azure Storage 2 | ============= 3 | 4 | A custom storage system for Django using Microsoft Azure Storage backend. 5 | 6 | 7 | Installation 8 | ------------ 9 | 10 | Install Azure SDK:: 11 | 12 | pip install django-storages[azure] 13 | 14 | Configuration & Settings 15 | ------------------------ 16 | 17 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 18 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 19 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to Azure on Django 20 | >= 4.2 you'd define:: 21 | 22 | 23 | STORAGES = { 24 | "default": { 25 | "BACKEND": "storages.backends.azure_storage.AzureStorage", 26 | "OPTIONS": { 27 | ...your_options_here 28 | }, 29 | }, 30 | } 31 | 32 | On Django < 4.2 you'd instead define:: 33 | 34 | DEFAULT_FILE_STORAGE = "storages.backends.azure_storage.AzureStorage" 35 | 36 | To put static files on Azure via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 37 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 38 | 39 | STATICFILES_STORAGE = "storages.backends.azure_storage.AzureStorage" 40 | 41 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 42 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 43 | 44 | Authentication Settings 45 | ~~~~~~~~~~~~~~~~~~~~~~~ 46 | 47 | Several different methods of authentication are provided. In order of precedence they are: 48 | 49 | #. ``connection_string`` or ``AZURE_CONNECTION_STRING`` (see `Connection string docs `_) 50 | #. (``account_key`` or ``AZURE_ACCOUNT_KEY``) and (``account_name`` or ``AZURE_ACCOUNT_NAME``) 51 | #. ``token_credential`` or ``AZURE_TOKEN_CREDENTIAL`` with ``account_name`` or ``AZURE_ACCOUNT_NAME`` 52 | #. ``sas_token`` or ``AZURE_SAS_TOKEN`` 53 | 54 | Using Managed Identity 55 | ++++++++++++++++++++++ 56 | 57 | `Azure Managed Identity `_ is an authentication method that allows you to authenticate to Azure services without storing credentials in your code. 58 | Managed Identity is the recommended mechanism for password-less authentication to Azure Storage Accounts from other Azure services like App Services, Functions, Container Apps, and VMs. 59 | 60 | To use Managed Identity you will need to configure a System Assigned Managed Identity or a User Assigned Managed Identity for your app service. Then you can use the `DefaultAzureCredential `_ class from the Azure SDK to authenticate. 61 | This class will automatically try all the available authentication methods in the order of precedence. ``DefaultAzureCredential`` will also use environment variables for local development, or VS Code Azure Login if available. 62 | 63 | This `guide `_ contains more information on assigning roles to Storage Accounts. 64 | 65 | Before using Managed Identity, you will need to install the Azure Identity package:: 66 | 67 | pip install azure-identity 68 | 69 | After creating the containers in the Azure Storage Account, you can configure Managed Identity in Django settings. 70 | Import ``DefaultAzureCredential`` from ``azure.identity`` to use it for the ``token_credential`` property:: 71 | 72 | 73 | from azure.identity import DefaultAzureCredential 74 | 75 | ... 76 | 77 | STORAGES = { 78 | "default": { 79 | "BACKEND": "storages.backends.azure_storage.AzureStorage", 80 | "OPTIONS": { 81 | "token_credential": DefaultAzureCredential(), 82 | "account_name": "mystorageaccountname", 83 | "azure_container": "media", 84 | }, 85 | }, 86 | "staticfiles": { 87 | "BACKEND": "storages.backends.azure_storage.AzureStorage", 88 | "OPTIONS": { 89 | "token_credential": DefaultAzureCredential(), 90 | "account_name": "mystorageaccountname", 91 | "azure_container": "static", 92 | }, 93 | }, 94 | } 95 | 96 | For `User assigned Managed Identity `_, pass the client ID parameter to the DefaultAzureCredential call. 97 | 98 | Settings 99 | ~~~~~~~~ 100 | 101 | ``azure_container`` or ``AZURE_CONTAINER`` 102 | 103 | **Required** 104 | 105 | This is where the files uploaded through Django will be uploaded. 106 | The container must be already created, since the storage system will not attempt to create it. 107 | 108 | ``azure_ssl`` or ``AZURE_SSL`` 109 | 110 | Default: ``True`` 111 | 112 | Set a secure connection (HTTPS), otherwise it makes an insecure connection (HTTP). 113 | 114 | ``upload_max_conn`` or ``AZURE_UPLOAD_MAX_CONN`` 115 | 116 | Default: ``2`` 117 | 118 | Number of connections to make when uploading a single file. 119 | 120 | ``timeout`` or ``AZURE_CONNECTION_TIMEOUT_SECS`` 121 | 122 | Default: ``20`` 123 | 124 | Global connection timeout in seconds. 125 | 126 | ``max_memory_size`` or ``AZURE_BLOB_MAX_MEMORY_SIZE`` 127 | 128 | Default: ``2*1024*1024`` i.e ``2MB`` 129 | 130 | Maximum memory used by a downloaded file before dumping it to disk in bytes. 131 | 132 | ``expiration_secs`` or ``AZURE_URL_EXPIRATION_SECS`` 133 | 134 | Default: ``None`` 135 | 136 | Seconds before a URL expires, set to ``None`` to never expire it. 137 | Be aware the container must have public read permissions in order 138 | to access a URL without expiration date. 139 | 140 | ``overwrite_files`` or ``AZURE_OVERWRITE_FILES`` 141 | 142 | Default: ``False`` 143 | 144 | Whether or not to overwrite a file previously uploaded with the same name. If not, random character are appended. 145 | 146 | ``location`` or ``AZURE_LOCATION`` 147 | 148 | Default: ``''`` 149 | 150 | Default location for the uploaded files. This is a path that gets prepended to every file name. 151 | 152 | ``endpoint_suffix`` or ``AZURE_ENDPOINT_SUFFIX`` 153 | 154 | Default: ``core.windows.net`` 155 | 156 | Use ``core.chinacloudapi.cn`` for azure.cn accounts. 157 | 158 | ``custom_domain`` or ``AZURE_CUSTOM_DOMAIN`` 159 | 160 | Default: ``None`` 161 | 162 | The custom domain to use for generating URLs for files. For 163 | example, ``www.mydomain.com`` or ``mycdn.azureedge.net``. 164 | 165 | ``AZURE_TOKEN_CREDENTIAL`` 166 | 167 | A token credential used to authenticate HTTPS requests. The token value 168 | should be updated before its expiration. 169 | 170 | 171 | ``cache_control`` or ``AZURE_CACHE_CONTROL`` 172 | 173 | Default: ``None`` 174 | 175 | A variable to set the Cache-Control HTTP response header. E.g.:: 176 | 177 | cache_control: "public,max-age=31536000,immutable" 178 | 179 | ``object_parameters`` or ``AZURE_OBJECT_PARAMETERS`` 180 | 181 | Default: ``{}`` 182 | 183 | Use this to set content settings on all objects. To set these on a per-object 184 | basis, subclass the backend and override ``AzureStorage.get_object_parameters``. 185 | 186 | This is a Python ``dict`` and the possible parameters are: ``content_type``, ``content_encoding``, ``content_language``, ``content_disposition``, ``cache_control``, and ``content_md5``. 187 | 188 | ``client_options`` or ``AZURE_CLIENT_OPTIONS`` 189 | 190 | Default: ``{}`` 191 | 192 | A dict of kwarg options to send to the ``BlobServiceClient``. A partial list of options can be found 193 | `in the client docs `__. 194 | 195 | Additionally, this setting can be used to configure the client retry settings. To see how follow the 196 | `Python retry docs `__. 197 | 198 | ``api_version`` or ``AZURE_API_VERSION`` 199 | 200 | Default: ``None`` 201 | 202 | **Note: This option is deprecated. Use client_options/AZURE_CLIENT_OPTIONS instead.** 203 | 204 | The Azure Storage API version to use. Default value is the most recent service version that is compatible with the current SDK. 205 | Setting to an older version may result in reduced feature compatibility. 206 | 207 | Using with Azurite (previously Azure Storage Emulator) 208 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 209 | 210 | Azurite is a local emulator for Azure Storage accounts that emulates the API for Azure Blob storage and enables local testing and development without an Azure account, free of charge. 211 | 212 | To use the Azure Storage Emulator, you download and install it from the `Azurite page `_. 213 | 214 | Copy the default `connection string `_ and set it in your settings:: 215 | 216 | STORAGES = { 217 | "default": { 218 | "BACKEND": "storages.backends.azure_storage.AzureStorage", 219 | "OPTIONS": { 220 | "connection_string": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", 221 | "azure_container": "media", 222 | }, 223 | }, 224 | "staticfiles": { 225 | "BACKEND": "storages.backends.azure_storage.AzureStorage", 226 | "OPTIONS": { 227 | "connection_string": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", 228 | "azure_container": "static", 229 | }, 230 | }, 231 | } 232 | 233 | Django Storages will not create containers if they don't exist, so you will need to create any storage containers using the Azurite CLI or the Azure Storage Explorer. 234 | 235 | Additional Notes 236 | ---------------- 237 | 238 | Filename Restrictions 239 | ~~~~~~~~~~~~~~~~~~~~~ 240 | 241 | Azure file names have some extra restrictions. They can't: 242 | 243 | - end with a dot (``.``) or slash (``/``) 244 | - contain more than 256 slashes (``/``) 245 | - be longer than 1024 characters 246 | 247 | Private vs Public URLs 248 | ~~~~~~~~~~~~~~~~~~~~~~ 249 | 250 | The difference between public and private URLs is that private includes the SAS token. 251 | With private URLs you can override certain properties stored for the blob by specifying 252 | query parameters as part of the shared access signature. These properties include the 253 | cache-control, content-type, content-encoding, content-language, and content-disposition. 254 | See https://docs.microsoft.com/rest/api/storageservices/set-blob-properties#remarks 255 | 256 | You can specify these parameters by:: 257 | 258 | az_storage = AzureStorage() 259 | az_url = az_storage.url(blob_name, parameters={'content_type': 'text/html;'}) 260 | -------------------------------------------------------------------------------- /docs/backends/dropbox.rst: -------------------------------------------------------------------------------- 1 | Dropbox 2 | ======= 3 | 4 | A Django files storage using Dropbox as a backend via the official 5 | `Dropbox SDK for Python`_. Currently only v2 of the API is supported. 6 | 7 | Installation 8 | ------------ 9 | 10 | Before you start configuration, you will need to install the SDK 11 | which can be done for you automatically by doing:: 12 | 13 | pip install django-storages[dropbox] 14 | 15 | Configuration & Settings 16 | ------------------------ 17 | 18 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 19 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 20 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to Dropbox on Django 21 | >= 4.2 you'd define:: 22 | 23 | 24 | STORAGES = { 25 | "default": { 26 | "BACKEND": "storages.backends.dropbox.DropboxStorage", 27 | "OPTIONS": { 28 | ...your_options_here 29 | }, 30 | }, 31 | } 32 | 33 | On Django < 4.2 you'd instead define:: 34 | 35 | DEFAULT_FILE_STORAGE = "storages.backends.dropbox.DropboxStorage" 36 | 37 | To put static files on Dropbox via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 38 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 39 | 40 | STATICFILES_STORAGE = "storages.backends.dropbox.DropboxStorage" 41 | 42 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 43 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 44 | 45 | Authentication 46 | -------------- 47 | 48 | Two methods of authentication are supported: 49 | 50 | #. Using an access token 51 | #. Using a refresh token with an app key and secret 52 | 53 | Dropbox has recently introduced short-lived access tokens only, and does not seem to allow new apps to generate access tokens that do not expire. Short-lived access tokens can be indentified by their prefix (short-lived access tokens start with ``'sl.'``). 54 | 55 | You can manually obtain the refresh token by following the instructions below using ``APP_KEY`` and ``APP_SECRET``. 56 | 57 | The relevant settings which can all be obtained by following the instructions in the `tutorial`_: 58 | 59 | #. ``oauth2_access_token`` or ``DROPBOX_OAUTH2_TOKEN`` 60 | #. ``oauth2_refresh_token`` or ``DROPBOX_OAUTH2_REFRESH_TOKEN`` 61 | #. ``app_secret`` or ``DROPBOX_APP_SECRET`` 62 | #. ``app_key`` or ``DROPBOX_APP_KEY`` 63 | 64 | The refresh token can be obtained using the `commandline-oauth.py`_ example from the `Dropbox SDK for Python`_. 65 | 66 | Get AUTHORIZATION_CODE 67 | ~~~~~~~~~~~~~~~~~~~~~~ 68 | 69 | Using your ``APP_KEY`` follow the link: 70 | 71 | https://www.dropbox.com/oauth2/authorize?client_id=APP_KEY&token_access_type=offline&response_type=code 72 | 73 | It will give you ``AUTHORIZATION_CODE``. 74 | 75 | Obtain the refresh token 76 | ~~~~~~~~~~~~~~~~~~~~~~~~ 77 | 78 | Usinh your ``APP_KEY``, ``APP_SECRET`` and ``AUTHORIZATION_KEY`` obtain the refresh token. 79 | 80 | .. code-block:: shell 81 | 82 | curl -u APP_KEY:APP_SECRET \ 83 | -d "code=AUTHORIZATION_CODE&grant_type=authorization_code" \ 84 | -H "Content-Type: application/x-www-form-urlencoded" \ 85 | -X POST "https://api.dropboxapi.com/oauth2/token" 86 | 87 | The response would be: 88 | 89 | .. code-block:: json 90 | 91 | { 92 | "access_token": "sl.************************", 93 | "token_type": "bearer", 94 | "expires_in": 14400, 95 | "refresh_token": "************************", <-- your REFRESH_TOKEN 96 | "scope": , 97 | "uid": "************************", 98 | "account_id": "dbid:************************" 99 | } 100 | 101 | Settings 102 | -------- 103 | 104 | ``root_path`` or ``DROPBOX_ROOT_PATH`` 105 | 106 | Default: ``'/'`` 107 | 108 | Path which will prefix all uploaded files. Must begin with a ``/``. 109 | 110 | ``timeout`` or ``DROPBOX_TIMEOUT`` 111 | 112 | Default: ``100`` 113 | 114 | Timeout in seconds for requests to the API. If ``None``, the client will wait forever. 115 | The default value matches the SDK at the time of this writing. 116 | 117 | ``write_mode`` or ``DROPBOX_WRITE_MODE`` 118 | 119 | Default: ``'add'`` 120 | 121 | Sets the Dropbox WriteMode strategy. Read more in the `official docs`_. 122 | 123 | 124 | .. _`tutorial`: https://www.dropbox.com/developers/documentation/python#tutorial 125 | .. _`Dropbox SDK for Python`: https://www.dropbox.com/developers/documentation/python#tutorial 126 | .. _`official docs`: https://dropbox-sdk-python.readthedocs.io/en/latest/api/files.html#dropbox.files.WriteMode 127 | .. _`commandline-oauth.py`: https://github.com/dropbox/dropbox-sdk-python/blob/master/example/oauth/commandline-oauth.py 128 | -------------------------------------------------------------------------------- /docs/backends/ftp.rst: -------------------------------------------------------------------------------- 1 | FTP 2 | === 3 | 4 | .. warning:: This FTP storage is not prepared to work with large files, because it uses memory for temporary data storage. It also does not close FTP connection automatically (but open it lazy and try to reestablish when disconnected). 5 | 6 | This implementation was done preliminary for upload files in admin to remote FTP location and read them back on site by HTTP. It was tested mostly in this configuration, so read/write using FTPStorageFile class may break. 7 | 8 | Configuration & Settings 9 | ------------------------ 10 | 11 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 12 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 13 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to use FTP to save media files on 14 | Django >= 4.2 you'd define:: 15 | 16 | 17 | STORAGES = { 18 | "default": { 19 | "BACKEND": "storages.backends.ftp.FTPStorage", 20 | "OPTIONS": { 21 | ...your_options_here 22 | }, 23 | }, 24 | } 25 | 26 | On Django < 4.2 you'd instead define:: 27 | 28 | DEFAULT_FILE_STORAGE = "storages.backends.ftp.FTPStorage" 29 | 30 | To use FTP to store static files via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 31 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 32 | 33 | STATICFILES_STORAGE = "storages.backends.ftp.FTPStorage" 34 | 35 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 36 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 37 | 38 | Settings 39 | ~~~~~~~~ 40 | 41 | ``location`` or ``FTP_STORAGE_LOCATION`` 42 | 43 | **Required** 44 | 45 | Format as a url like ``"{scheme}://{user}:{passwd}@{host}:{port}/"``. Supports both FTP and FTPS connections via scheme. 46 | 47 | ``allow_overwrite`` or ``FTP_ALLOW_OVERWRITE`` 48 | 49 | default: ``False`` 50 | 51 | Set to ``True`` to overwrite files instead of appending additional characters. 52 | 53 | ``encoding`` or ``FTP_STORAGE_ENCODING`` 54 | 55 | default: ``latin-1`` 56 | 57 | File encoding. 58 | 59 | ``base_url`` or ``BASE_URL`` 60 | 61 | default: ``settings.MEDIA_URL`` 62 | 63 | Serving base of files. 64 | -------------------------------------------------------------------------------- /docs/backends/gcloud.rst: -------------------------------------------------------------------------------- 1 | Google Cloud Storage 2 | ==================== 3 | 4 | This backend implements the Django File API for `Google Cloud Storage `_ 5 | using the Python library provided by Google. 6 | 7 | 8 | Installation 9 | ------------ 10 | 11 | Use pip to install from PyPI:: 12 | 13 | pip install django-storages[google] 14 | 15 | Configuration & Settings 16 | ------------------------ 17 | 18 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 19 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 20 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to GCS on Django 21 | >= 4.2 you'd define:: 22 | 23 | 24 | STORAGES = { 25 | "default": { 26 | "BACKEND": "storages.backends.gcloud.GoogleCloudStorage", 27 | "OPTIONS": { 28 | ...your_options_here 29 | }, 30 | }, 31 | } 32 | 33 | On Django < 4.2 you'd instead define:: 34 | 35 | DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" 36 | 37 | To put static files on GCS via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 38 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 39 | 40 | STATICFILES_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" 41 | 42 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 43 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 44 | 45 | .. _auth-settings: 46 | 47 | Authentication Settings 48 | ~~~~~~~~~~~~~~~~~~~~~~~ 49 | By default, this library will try to use the credentials associated with the current Google Cloud infrastructure/environment for authentication. 50 | 51 | In most cases, the default service accounts are not sufficient to read/write and sign files in GCS, so you will need to create a dedicated service account: 52 | 53 | #. Create a service account. (`Google Getting Started Guide `__) 54 | #. Make sure your service account has access to the bucket and appropriate permissions. (`Using IAM Permissions `__) 55 | #. Ensure this service account is associated to the type of compute being used (Google Compute Engine (GCE), Google Kubernetes Engine (GKE), Google Cloud Run (GCR), etc) 56 | #. If your app only handles ``publicRead`` storage objects then the above steps are all that is required 57 | #. If your app handles signed (expiring) urls, then read through the options in the ``Settings for Signed Urls`` in the following section 58 | 59 | Settings for Signed Urls 60 | ~~~~~~~~~~~~~~~~~~~~~~~~ 61 | 62 | .. _iam-sign-blob-api: 63 | 64 | IAM Sign Blob API 65 | ***************** 66 | 67 | .. note:: 68 | There is currently a limitation in the GCS client for Python which by default requires a 69 | service account private key file to be present when generating signed urls. The service 70 | account private key file is unavailable when running on compute services. Compute Services 71 | (App Engine, Cloud Run, Cloud Functions, Compute Engine, etc) fetch `access tokens from the metadata server 72 | `__ 73 | 74 | Due to the above limitation, currently the only way to generate a signed url without having the private key file mounted 75 | in the env is through the IAM Sign Blob API. 76 | 77 | .. note:: 78 | The IAM Sign Blob API has `quota limits `__ which could be a deal-breaker. 79 | 80 | To use the IAM Sign Blob API set ``iam_sign_blob`` or ``GS_IAM_SIGN_BLOB`` to ``True``. When this setting is enabled, 81 | signed urls are generated through the IAM SignBlob API using the attached service account email and access_token 82 | instead of the credentials in the key file. 83 | 84 | An additional optional setting ``sa_email`` or ``GS_SA_EMAIL`` is also available. It allows you to override the service account 85 | used to generate the signed url if it is different from the one attached to your env. It's also useful for local/development 86 | use cases where the metadata server isn't available and storing private key files is dangerous. 87 | 88 | Mounted Private Key 89 | ******************** 90 | 91 | If the above method is not sufficient for your needs you can still use the service account key file for authentication (not recommended by Google): 92 | 93 | #. Create the key and download ``your-project-XXXXX.json`` file. 94 | #. Ensure the key is mounted/available to your running app. 95 | #. Set an environment variable of ``GOOGLE_APPLICATION_CREDENTIALS`` to the path of the JSON file. 96 | 97 | Alternatively, you can set ``credentials`` or ``GS_CREDENTIALS`` to the path of the JSON file. 98 | 99 | Settings 100 | ~~~~~~~~ 101 | 102 | ``bucket_name`` or ``GS_BUCKET_NAME`` 103 | 104 | **Required** 105 | 106 | The name of the GCS bucket that will host the files. 107 | 108 | ``project_id`` or ``GS_PROJECT_ID`` 109 | 110 | default: ``None`` 111 | 112 | Your Google Cloud project ID. If unset, falls back to the default inferred from the environment. 113 | 114 | ``gzip`` or ``GS_IS_GZIPPED`` 115 | 116 | default: ``False`` 117 | 118 | Whether or not to enable gzipping of content types specified by ``gzip_content_types``. 119 | 120 | ``gzip_content_types`` or ``GZIP_CONTENT_TYPES`` 121 | 122 | default: ``(text/css,text/javascript,application/javascript,application/x-javascript,image/svg+xml)`` 123 | 124 | The list of content types to be gzipped when ``gzip`` is ``True``. 125 | 126 | ``credentials`` or ``GS_CREDENTIALS`` 127 | 128 | default: ``None`` 129 | 130 | The OAuth2 credentials to use for the connection. Be sure to read through all of :ref:`auth-settings` first. 131 | (i.e. ``GOOGLE_APPLICATION_CREDENTIALS``):: 132 | 133 | from google.oauth2 import service_account 134 | 135 | GS_CREDENTIALS = service_account.Credentials.from_service_account_file( 136 | "path/to/credentials.json" 137 | ) 138 | 139 | .. _gs-default-acl: 140 | 141 | ``default_acl`` or ``GS_DEFAULT_ACL`` 142 | 143 | default: ``None`` 144 | 145 | ACL used when creating a new blob, from the 146 | `list of predefined ACLs `_. 147 | (A "JSON API" ACL is preferred but an "XML API/gsutil" ACL will be 148 | translated.) 149 | 150 | For most cases, the blob will need to be set to the ``publicRead`` ACL in order for the file to be viewed. 151 | If ``default_acl`` is not set, the blob will have the default permissions set by the bucket. 152 | 153 | ``publicRead`` files will return a public, non-expiring url. All other files return 154 | a signed (expiring) url. 155 | 156 | .. note:: 157 | GS_DEFAULT_ACL must be set to 'publicRead' to return a public url. Even if you set 158 | the bucket to public or set the file permissions directly in GCS to public. 159 | 160 | .. note:: 161 | When using this setting, make sure you have ``fine-grained`` access control enabled on your bucket, 162 | as opposed to ``Uniform`` access control, or else, file uploads will return with HTTP 400. If you 163 | already have a bucket with ``Uniform`` access control set to public read, please keep 164 | ``GS_DEFAULT_ACL`` to ``None`` and set ``GS_QUERYSTRING_AUTH`` to ``False``. 165 | 166 | ``querystring_auth`` or ``GS_QUERYSTRING_AUTH`` 167 | 168 | default: ``True`` 169 | 170 | Whether or not to force URL signing. Set this to ``False`` for buckets where all objects are public. 171 | 172 | ``file_overwrite`` or ``GS_FILE_OVERWRITE`` 173 | 174 | default: ``True`` 175 | 176 | By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. 177 | 178 | ``max_memory_size`` or ``GS_MAX_MEMORY_SIZE`` 179 | 180 | default: ``0`` i.e do not rollover 181 | 182 | The maximum amount of memory a returned file can take up (in bytes) before being 183 | rolled over into a temporary file on disk. Default is 0: Do not roll over. 184 | 185 | ``blob_chunk_size`` or ``GS_BLOB_CHUNK_SIZE`` 186 | 187 | default: ``None`` 188 | 189 | The size of blob chunks that are sent via resumable upload. If this is not set then the generated request 190 | must fit in memory. Recommended if you are going to be uploading large files. 191 | 192 | .. note:: 193 | 194 | This must be a multiple of 256K (1024 * 256) 195 | 196 | ``object_parameters`` or ``GS_OBJECT_PARAMETERS`` 197 | 198 | default: `{}` 199 | 200 | Dictionary of key-value pairs mapping from blob property name to value. 201 | 202 | Use this to set parameters on all objects. To set these on a per-object 203 | basis, subclass the backend and override ``GoogleCloudStorage.get_object_parameters``. 204 | 205 | The valid property names are :: 206 | 207 | acl 208 | cache_control 209 | content_disposition 210 | content_encoding 211 | content_language 212 | content_type 213 | metadata 214 | storage_class 215 | 216 | If not set, the ``content_type`` property will be guessed. 217 | 218 | If set, ``acl`` overrides :ref:`GS_DEFAULT_ACL `. 219 | 220 | .. warning:: 221 | 222 | Do not set ``name``. This is set automatically based on the filename. 223 | 224 | ``custom_endpoint`` or ``GS_CUSTOM_ENDPOINT`` 225 | 226 | default: ``None`` 227 | 228 | Sets a `custom endpoint `_, 229 | that will be used instead of ``https://storage.googleapis.com`` when generating URLs for files. 230 | 231 | ``location`` or ``GS_LOCATION`` 232 | 233 | default: ``''`` 234 | 235 | Subdirectory in which files will be stored. 236 | 237 | ``expiration`` or ``GS_EXPIRATION`` 238 | 239 | default: ``timedelta(seconds=86400)``) 240 | 241 | The time that a generated URL is valid before expiration. The default is 1 day. 242 | Public files will return a url that does not expire. 243 | 244 | Note: Default Google Compute Engine (GCE) Service accounts are 245 | `unable to sign urls `_. 246 | 247 | The ``expiration`` value is handled by the underlying `Google library `_. 248 | It supports `timedelta`, `datetime`, or `integer` seconds since epoch time. 249 | 250 | Note: The maximum value for this option is 7 days (604800 seconds) in version `v4` (See this `Github issue `_) 251 | 252 | ``iam_sign_blob`` or ``GS_IAM_SIGN_BLOB`` 253 | 254 | default: ``False`` 255 | 256 | Generate signed urls using the IAM Sign Blob API. See :ref:`iam-sign-blob-api` for more info. 257 | 258 | ``sa_email`` or ``GS_SA_EMAIL`` 259 | 260 | default: ``None`` 261 | 262 | Override the service account used for generating signed urls using the IAM Sign Blob API. See :ref:`iam-sign-blob-api` for more info. 263 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/backblaze-B2.rst: -------------------------------------------------------------------------------- 1 | Backblaze B2 2 | ============ 3 | 4 | Backblaze B2 implements an `S3 Compatible API `_. To use it as a django-storages backend: 5 | 6 | #. Sign up for a `Backblaze B2 account `_, if you have not already done so. 7 | #. Create a public or private bucket. Note that object-level ACLs are not supported by B2 - all objects inherit their bucket's ACLs. 8 | #. Create an `application key `_. Best practice is to limit access to the bucket you just created. 9 | #. Follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the following exceptions: 10 | 11 | * Set ``region_name`` to your Backblaze B2 region, for example, ``us-west-004`` 12 | * Set ``endpoint_url`` to ``https://s3.${AWS_S3_REGION_NAME}.backblazeb2.com`` 13 | * Set the values of ``access_key`` and ``secret_key`` to the application key id and application key you created in step 2. 14 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/cloudflare-r2.rst: -------------------------------------------------------------------------------- 1 | Cloudflare R2 2 | ============= 3 | 4 | Cloudflare R2 implements an `S3 Compatible API `_. To use it as a django-storages backend: 5 | 6 | #. Create an R2 bucket using Cloudflare's web panel or API 7 | #. Follow `Cloudflare's docs`_ to create authentication tokens, locking down permissions as required 8 | #. Follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the following exceptions: 9 | 10 | * Set ``bucket_name`` to your previously created bucket 11 | * Set ``endpoint_url`` to ``https://.r2.cloudflarestorage.com`` 12 | * Set the values of ``access_key`` and ``secret_key`` to their respective Cloudflare keys 13 | 14 | .. note:: 15 | If you need a jurisdiction-specific endpoint or other advanced features, consult the Cloudflare docs. 16 | 17 | .. _Cloudflare's docs: https://developers.cloudflare.com/r2/api/s3/tokens/ 18 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/digital-ocean-spaces.rst: -------------------------------------------------------------------------------- 1 | Digital Ocean 2 | ============= 3 | 4 | Digital Ocean Spaces implements the S3 protocol. To use it follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the important caveats that you must: 5 | 6 | - Set ``region_name`` to your Digital Ocean region (such as ``nyc3`` or ``sfo2``) 7 | - Set ``endpoint_url`` to the value of ``https://${region_name}.digitaloceanspaces.com`` 8 | - Set the values of ``access_key`` and ``secret_key`` to the corresponding values from Digital Ocean 9 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/index.rst: -------------------------------------------------------------------------------- 1 | S3 Compatible 2 | ============= 3 | 4 | Many service providers choose to implement the S3 protocol for their storage API. 5 | Below is a collection of documentation for how to configure the :doc:`Amazon S3 <../amazon-S3>` 6 | backend for some of the most popular. 7 | 8 | .. toctree:: 9 | :maxdepth: 1 10 | :glob: 11 | 12 | backblaze-B2 13 | cloudflare-r2 14 | digital-ocean-spaces 15 | oracle-cloud 16 | scaleway 17 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/oracle-cloud.rst: -------------------------------------------------------------------------------- 1 | Oracle Cloud 2 | ============= 3 | 4 | Oracle Cloud provides an S3 compatible object storage. To use it: the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` replacing: 5 | 6 | #. Create a `Customer Secret Key`_ 7 | #. Create a bucket 8 | 9 | Then follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` documentation replacing: 10 | 11 | - ``secret_key`` with the value previously generated 12 | - ``access_key`` with the value in the **Access Key** column 13 | - ``bucket_name`` with the bucket name 14 | - ``region_name`` with the current region 15 | - ``endpoint_url`` with ``https://{ORACLE_NAMESPACE}.compat.objectstorage.{ORACLE_REGION}.oraclecloud.com`` 16 | 17 | .. note:: 18 | The ``ORACLE_NAMESPACE`` value can be found on the bucket details page 19 | 20 | 21 | References 22 | ---------- 23 | 24 | - `Customer Secret Key`_ 25 | - `Amazon S3 Compatibility API docs`_ 26 | - `Amazon S3 Compatibility API endpoints`_ 27 | - `Oracle object storage namespaces docs`_ 28 | 29 | 30 | .. _Oracle object storage namespaces docs: https://docs.oracle.com/en-us/iaas/Content/Object/Tasks/understandingnamespaces.htm#Understanding_Object_Storage_Namespaces 31 | .. _Amazon S3 Compatibility API docs: https://docs.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm# 32 | .. _Amazon S3 Compatibility API endpoints: https://docs.oracle.com/en-us/iaas/api/#/en/s3objectstorage/20160918/ 33 | .. _Customer Secret Key: https://docs.oracle.com/en-us/iaas/Content/Identity/Tasks/managingcredentials.htm#To4 34 | -------------------------------------------------------------------------------- /docs/backends/s3_compatible/scaleway.rst: -------------------------------------------------------------------------------- 1 | Scaleway 2 | ======== 3 | 4 | `Scaleway Object Storage `_ implements the S3 protocol. To use it follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the important caveats that you must: 5 | 6 | - Set ``AWS_BUCKET_NAME`` to the Bucket you want write to (such as ``my-chosen-bucket``) 7 | - Set ``AWS_S3_REGION_NAME`` to your Scaleway region (such as ``nl-ams`` or ``fr-par``) 8 | - Set ``AWS_S3_ENDPOINT_URL`` to the value of ``https://s3.${AWS_S3_REGION_NAME}.scw.cloud`` 9 | - Set ``AWS_ACCESS_KEY_ID`` to the value of your Access Key ID (i.e. ``SCW3XXXXXXXXXXXXXXXX``) 10 | - Set ``AWS_SECRET_ACCESS_KEY`` to the value of your Secret Key (i.e. ``abcdef10-ab12-cd34-ef56-acbdef123456``) 11 | 12 | With the settings above in place, saving a file with a name such as "my_chosen_file.txt" would be written to the following addresses: 13 | 14 | ``https://s3.nl-ams.scw.cloud/my-chosen-bucket/my_chosen_file.txt`` 15 | ``https://my-chosen-bucket.s3.nl-ams.scw.cloud/my_chosen_file.txt`` 16 | -------------------------------------------------------------------------------- /docs/backends/sftp.rst: -------------------------------------------------------------------------------- 1 | SFTP 2 | ==== 3 | 4 | Installation 5 | ------------ 6 | 7 | Install via:: 8 | 9 | pip install django-storages[sftp] 10 | 11 | Configuration & Settings 12 | ------------------------ 13 | 14 | Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure 15 | storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend 16 | because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to SFTP on Django 17 | >= 4.2 you'd define:: 18 | 19 | 20 | STORAGES = { 21 | "default": { 22 | "BACKEND": "storages.backends.sftpstorage.SFTPStorage", 23 | "OPTIONS": { 24 | ...your_options_here 25 | }, 26 | }, 27 | } 28 | 29 | On Django < 4.2 you'd instead define:: 30 | 31 | DEFAULT_FILE_STORAGE = "storages.backends.sftpstorage.SFTPStorage" 32 | 33 | To put static files on SFTP via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as 34 | ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: 35 | 36 | STATICFILES_STORAGE = "storages.backends.sftpstorage.SFTPStorage" 37 | 38 | The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as 39 | well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. 40 | 41 | Settings 42 | ~~~~~~~~ 43 | 44 | ``host`` or ``SFTP_STORAGE_HOST`` 45 | 46 | **Required** 47 | 48 | The hostname where you want the files to be saved. 49 | 50 | ``root_path`` or ``SFTP_STORAGE_ROOT`` 51 | 52 | Default: ``''`` 53 | 54 | The root directory on the remote host into which files should be placed. 55 | Should work the same way that ``STATIC_ROOT`` works for local files. Must 56 | include a trailing slash. 57 | 58 | ``params`` or ``SFTP_STORAGE_PARAMS`` 59 | 60 | Default: ``{}`` 61 | 62 | A dictionary containing connection parameters to be passed as keyword 63 | arguments to ``paramiko.SSHClient().connect()`` (do not include hostname here). 64 | See `paramiko SSHClient.connect() documentation`_ for details 65 | 66 | ``interactive`` or ``SFTP_STORAGE_INTERACTIVE`` 67 | 68 | Default: ``False`` 69 | 70 | A boolean indicating whether to prompt for a password if the connection cannot 71 | be made using keys, and there is not already a password in 72 | ``params``. You can set this to ``True`` to enable interactive 73 | login when running ``manage.py collectstatic``, for example. 74 | 75 | .. warning:: 76 | 77 | DO NOT set ``interactive`` to ``True`` if you are using this storage 78 | for files being uploaded to your site by users, because you'll have no way 79 | to enter the password when they submit the form 80 | 81 | ``file_mode`` or ``SFTP_STORAGE_FILE_MODE`` 82 | 83 | Default: ``None`` 84 | 85 | A bitmask for setting permissions on newly-created files. See 86 | `Python os.chmod documentation`_ for acceptable values. 87 | 88 | ``dir_mode`` or ``SFTP_STORAGE_DIR_MODE`` 89 | 90 | Default: ``None`` 91 | 92 | A bitmask for setting permissions on newly-created directories. See 93 | `Python os.chmod documentation`_ for acceptable values. 94 | 95 | .. note:: 96 | 97 | Hint: if you start the mode number with a 0 you can express it in octal 98 | just like you would when doing "chmod 775 myfile" from bash. 99 | 100 | ``uid`` or ``SFTP_STORAGE_UID`` 101 | 102 | Default: ``None`` 103 | 104 | UID of the account that should be set as the owner of the files on the remote 105 | host. You may have to be root to set this. 106 | 107 | ``gid`` or ``SFTP_STORAGE_GID`` 108 | 109 | Default: ``None`` 110 | 111 | GID of the group that should be set on the files on the remote host. You have 112 | to be a member of the group to set this. 113 | 114 | ``known_host_file`` or ``SFTP_KNOWN_HOST_FILE`` 115 | 116 | Default: ``None`` 117 | 118 | Absolute path of know host file, if it isn't set ``"~/.ssh/known_hosts"`` will be used. 119 | 120 | ``base_url`` or ``SFTP_BASE_URL`` 121 | 122 | Default: Django ``MEDIA_URL`` setting 123 | 124 | The URL to serve files from. 125 | 126 | .. _`paramiko SSHClient.connect() documentation`: http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect 127 | 128 | .. _`Python os.chmod documentation`: http://docs.python.org/library/os.html#os.chmod 129 | 130 | 131 | Standalone Use 132 | -------------- 133 | 134 | If you intend to construct a storage instance not through Django but directly, 135 | use the storage instance as a context manager to make sure the underlying SSH 136 | connection is closed after use and no longer consumes resources. 137 | 138 | .. code-block:: python 139 | 140 | from storages.backends.sftpstorage import SFTPStorage 141 | 142 | with SFTPStorage(...) as sftp: 143 | sftp.listdir("") 144 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # django-storages documentation build configuration file, created by 2 | # sphinx-quickstart on Sun Aug 28 13:44:45 2011. 3 | # 4 | # This file is execfile()d with the current directory set to its containing dir. 5 | # 6 | # Note that not all possible configuration values are present in this 7 | # autogenerated file. 8 | # 9 | # All configuration values have a default; values that are commented out 10 | # serve to show the default. 11 | 12 | import os 13 | import sys 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | sys.path.insert(0, os.path.abspath("..")) 19 | import storages 20 | 21 | # -- General configuration ----------------------------------------------------- 22 | 23 | # If your documentation needs a minimal Sphinx version, state it here. 24 | # needs_sphinx = '1.0' 25 | 26 | # Add any Sphinx extension module names here, as strings. They can be extensions 27 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 28 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] 29 | 30 | # Add any paths that contain templates here, relative to this directory. 31 | templates_path = ["_templates"] 32 | 33 | # The suffix of source filenames. 34 | source_suffix = ".rst" 35 | 36 | # The encoding of source files. 37 | # source_encoding = 'utf-8-sig' 38 | 39 | # The master toctree document. 40 | master_doc = "index" 41 | 42 | # General information about the project. 43 | project = "django-storages" 44 | copyright = "2011-2023, Josh Schneier, David Larlet, et. al." 45 | 46 | # The version info for the project you're documenting, acts as replacement for 47 | # |version| and |release|, also used in various other places throughout the 48 | # built documents. 49 | # 50 | # The short X.Y version. 51 | version = storages.__version__ 52 | # The full version, including alpha/beta/rc tags. 53 | release = storages.__version__ 54 | 55 | # The language for content autogenerated by Sphinx. Refer to documentation 56 | # for a list of supported languages. 57 | # language = None 58 | 59 | # There are two options for replacing |today|: either, you set today to some 60 | # non-false value, then it is used: 61 | # today = '' 62 | # Else, today_fmt is used as the format for a strftime call. 63 | # today_fmt = '%B %d, %Y' 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | exclude_patterns = ["_build"] 68 | 69 | # The reST default role (used for this markup: `text`) to use for all documents. 70 | # default_role = None 71 | 72 | # If true, '()' will be appended to :func: etc. cross-reference text. 73 | # add_function_parentheses = True 74 | 75 | # If true, the current module name will be prepended to all description 76 | # unit titles (such as .. function::). 77 | # add_module_names = True 78 | 79 | # If true, sectionauthor and moduleauthor directives will be shown in the 80 | # output. They are ignored by default. 81 | # show_authors = False 82 | 83 | # The name of the Pygments (syntax highlighting) style to use. 84 | pygments_style = "sphinx" 85 | 86 | # A list of ignored prefixes for module index sorting. 87 | # modindex_common_prefix = [] 88 | 89 | 90 | # -- Options for HTML output --------------------------------------------------- 91 | 92 | # The theme to use for HTML and HTML Help pages. See the documentation for 93 | # a list of builtin themes. 94 | html_theme = "furo" 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | # html_theme_options = {} 100 | 101 | # Add any paths that contain custom themes here, relative to this directory. 102 | # html_theme_path = [] 103 | 104 | # The name for this set of Sphinx documents. If None, it defaults to 105 | # " v documentation". 106 | # html_title = None 107 | 108 | # A shorter title for the navigation bar. Default is the same as html_title. 109 | # html_short_title = None 110 | 111 | # The name of an image file (relative to this directory) to place at the top 112 | # of the sidebar. 113 | # html_logo = None 114 | 115 | # The name of an image file (within the static path) to use as favicon of the 116 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 117 | # pixels large. 118 | # html_favicon = None 119 | 120 | # Add any paths that contain custom static files (such as style sheets) here, 121 | # relative to this directory. They are copied after the builtin static files, 122 | # so a file named "default.css" will overwrite the builtin "default.css". 123 | # html_static_path = ['_static'] 124 | 125 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 126 | # using the given strftime format. 127 | # html_last_updated_fmt = '%b %d, %Y' 128 | 129 | # If true, SmartyPants will be used to convert quotes and dashes to 130 | # typographically correct entities. 131 | # html_use_smartypants = True 132 | 133 | # Custom sidebar templates, maps document names to template names. 134 | # html_sidebars = {} 135 | 136 | # Additional templates that should be rendered to pages, maps page names to 137 | # template names. 138 | # html_additional_pages = {} 139 | 140 | # If false, no module index is generated. 141 | # html_domain_indices = True 142 | 143 | # If false, no index is generated. 144 | # html_use_index = True 145 | 146 | # If true, the index is split into individual pages for each letter. 147 | # html_split_index = False 148 | 149 | # If true, links to the reST sources are added to the pages. 150 | # html_show_sourcelink = True 151 | 152 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 153 | # html_show_sphinx = True 154 | 155 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 156 | # html_show_copyright = True 157 | 158 | # If true, an OpenSearch description file will be output, and all pages will 159 | # contain a tag referring to it. The value of this option must be the 160 | # base URL from which the finished HTML is served. 161 | # html_use_opensearch = '' 162 | 163 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 164 | # html_file_suffix = None 165 | 166 | # Output file base name for HTML help builder. 167 | htmlhelp_basename = "django-storagesdoc" 168 | 169 | 170 | # -- Options for LaTeX output -------------------------------------------------- 171 | 172 | # The paper size ('letter' or 'a4'). 173 | # latex_paper_size = 'letter' 174 | 175 | # The font size ('10pt', '11pt' or '12pt'). 176 | # latex_font_size = '10pt' 177 | 178 | # Grouping the document tree into LaTeX files. List of tuples 179 | # (source start file, target name, title, author, documentclass [howto/manual]). 180 | latex_documents = [ 181 | ( 182 | "index", 183 | "django-storages.tex", 184 | "django-storages Documentation", 185 | "Josh Schneier, David Larlet, et. al.", 186 | "manual", 187 | ), 188 | ] 189 | 190 | # The name of an image file (relative to this directory) to place at the top of 191 | # the title page. 192 | # latex_logo = None 193 | 194 | # For "manual" documents, if this is true, then toplevel headings are parts, 195 | # not chapters. 196 | # latex_use_parts = False 197 | 198 | # If true, show page references after internal links. 199 | # latex_show_pagerefs = False 200 | 201 | # If true, show URL addresses after external links. 202 | # latex_show_urls = False 203 | 204 | # Additional stuff for the LaTeX preamble. 205 | # latex_preamble = '' 206 | 207 | # Documents to append as an appendix to all manuals. 208 | # latex_appendices = [] 209 | 210 | # If false, no module index is generated. 211 | # latex_domain_indices = True 212 | 213 | 214 | # -- Options for manual page output -------------------------------------------- 215 | 216 | # One entry per manual page. List of tuples 217 | # (source start file, name, description, authors, manual section). 218 | man_pages = [ 219 | ( 220 | "index", 221 | "django-storages", 222 | "django-storages Documentation", 223 | ["Josh Schneier, David Larlet, et. al."], 224 | 1, 225 | ) 226 | ] 227 | 228 | 229 | # -- Options for Epub output --------------------------------------------------- 230 | 231 | # Bibliographic Dublin Core info. 232 | epub_title = "django-storages" 233 | epub_author = "Josh Schneier, David Larlet, et. al." 234 | epub_publisher = "Josh Schneier, David Larlet, et. al." 235 | epub_copyright = "2011-2023, Josh Schneier, David Larlet, et. al." 236 | 237 | # The language of the text. It defaults to the language option 238 | # or en if the language is not set. 239 | # epub_language = '' 240 | 241 | # The scheme of the identifier. Typical schemes are ISBN or URL. 242 | # epub_scheme = '' 243 | 244 | # The unique identifier of the text. This can be a ISBN number 245 | # or the project homepage. 246 | # epub_identifier = '' 247 | 248 | # A unique identification for the text. 249 | # epub_uid = '' 250 | 251 | # HTML files that should be inserted before the pages created by sphinx. 252 | # The format is a list of tuples containing the path and title. 253 | # epub_pre_files = [] 254 | 255 | # HTML files that should be inserted after the pages created by sphinx. 256 | # The format is a list of tuples containing the path and title. 257 | # epub_post_files = [] 258 | 259 | # A list of files that should not be packed into the epub file. 260 | # epub_exclude_files = [] 261 | 262 | # The depth of the table of contents in toc.ncx. 263 | # epub_tocdepth = 3 264 | 265 | # Allow duplicate toc entries. 266 | # epub_tocdup = True 267 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | django-storages 2 | =============== 3 | 4 | django-storages is a collection of custom storage backends for Django. 5 | 6 | .. toctree:: 7 | :maxdepth: 2 8 | :titlesonly: 9 | 10 | backends/amazon-S3 11 | backends/apache_libcloud 12 | backends/azure 13 | backends/dropbox 14 | backends/ftp 15 | backends/gcloud 16 | backends/sftp 17 | backends/s3_compatible/index 18 | 19 | Installation 20 | ************ 21 | 22 | Use pip to install from PyPI:: 23 | 24 | pip install django-storages 25 | 26 | Each storage backend has its own unique settings you will need to add to your settings.py file. Read the documentation for your storage engine(s) of choice to determine what you need to add. 27 | 28 | Contributing 29 | ************ 30 | 31 | To contribute to django-storages `create a fork`_ on GitHub. Clone your fork, make some changes, and submit a pull request. 32 | 33 | .. _create a fork: https://github.com/jschneier/django-storages 34 | 35 | Issues 36 | ****** 37 | 38 | Use the GitHub `issue tracker`_ for django-storages to submit bugs, issues, and feature requests. 39 | 40 | .. _issue tracker: https://github.com/jschneier/django-storages/issues 41 | 42 | Indices and tables 43 | ================== 44 | 45 | * :ref:`genindex` 46 | * :ref:`modindex` 47 | * :ref:`search` 48 | -------------------------------------------------------------------------------- /docs/logos/horizontal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/docs/logos/horizontal.png -------------------------------------------------------------------------------- /docs/logos/horizontal2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/docs/logos/horizontal2.png -------------------------------------------------------------------------------- /docs/logos/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/docs/logos/logo.png -------------------------------------------------------------------------------- /docs/logos/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | 9 | 12 | 14 | 17 | 18 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /docs/logos/vertical.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/docs/logos/vertical.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | if errorlevel 1 exit /b 1 46 | echo. 47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 48 | goto end 49 | ) 50 | 51 | if "%1" == "dirhtml" ( 52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 53 | if errorlevel 1 exit /b 1 54 | echo. 55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 56 | goto end 57 | ) 58 | 59 | if "%1" == "singlehtml" ( 60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 61 | if errorlevel 1 exit /b 1 62 | echo. 63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 64 | goto end 65 | ) 66 | 67 | if "%1" == "pickle" ( 68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 69 | if errorlevel 1 exit /b 1 70 | echo. 71 | echo.Build finished; now you can process the pickle files. 72 | goto end 73 | ) 74 | 75 | if "%1" == "json" ( 76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished; now you can process the JSON files. 80 | goto end 81 | ) 82 | 83 | if "%1" == "htmlhelp" ( 84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished; now you can run HTML Help Workshop with the ^ 88 | .hhp project file in %BUILDDIR%/htmlhelp. 89 | goto end 90 | ) 91 | 92 | if "%1" == "qthelp" ( 93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 97 | .qhcp project file in %BUILDDIR%/qthelp, like this: 98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-storages.qhcp 99 | echo.To view the help file: 100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-storages.ghc 101 | goto end 102 | ) 103 | 104 | if "%1" == "devhelp" ( 105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished. 109 | goto end 110 | ) 111 | 112 | if "%1" == "epub" ( 113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 117 | goto end 118 | ) 119 | 120 | if "%1" == "latex" ( 121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 122 | if errorlevel 1 exit /b 1 123 | echo. 124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 125 | goto end 126 | ) 127 | 128 | if "%1" == "text" ( 129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 130 | if errorlevel 1 exit /b 1 131 | echo. 132 | echo.Build finished. The text files are in %BUILDDIR%/text. 133 | goto end 134 | ) 135 | 136 | if "%1" == "man" ( 137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 141 | goto end 142 | ) 143 | 144 | if "%1" == "changes" ( 145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.The overview file is in %BUILDDIR%/changes. 149 | goto end 150 | ) 151 | 152 | if "%1" == "linkcheck" ( 153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Link check complete; look for any errors in the above output ^ 157 | or in %BUILDDIR%/linkcheck/output.txt. 158 | goto end 159 | ) 160 | 161 | if "%1" == "doctest" ( 162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Testing of doctests in the sources finished, look at the ^ 166 | results in %BUILDDIR%/doctest/output.txt. 167 | goto end 168 | ) 169 | 170 | :end 171 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | furo==2024.8.6 2 | Sphinx==7.2.6 -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # NOTE: You have to use single-quoted strings in TOML for regular expressions. 2 | # It's the equivalent of r-strings in Python. Multiline strings are treated as 3 | # verbose regular expressions by Black. Use [ ] to denote a significant space 4 | # character. 5 | 6 | [build-system] 7 | build-backend = "setuptools.build_meta" 8 | requires = [ 9 | "setuptools>=61.2", 10 | ] 11 | 12 | [project] 13 | name = "django-storages" 14 | description = "Support for many storage backends in Django" 15 | license = {text = "BSD-3-Clause"} 16 | authors = [{name = "Josh Schneier", email = "josh.schneier@gmail.com"}] 17 | requires-python = ">=3.7" 18 | classifiers = [ 19 | "Development Status :: 5 - Production/Stable", 20 | "Environment :: Web Environment", 21 | "Framework :: Django", 22 | "Framework :: Django :: 4.2", 23 | "Framework :: Django :: 5.0", 24 | "Framework :: Django :: 5.1", 25 | "Intended Audience :: Developers", 26 | "License :: OSI Approved :: BSD License", 27 | "Operating System :: OS Independent", 28 | "Programming Language :: Python", 29 | "Programming Language :: Python :: 3 :: Only", 30 | "Programming Language :: Python :: 3.8", 31 | "Programming Language :: Python :: 3.9", 32 | "Programming Language :: Python :: 3.10", 33 | "Programming Language :: Python :: 3.11", 34 | "Programming Language :: Python :: 3.12", 35 | ] 36 | dynamic = [ 37 | "readme", 38 | "version", 39 | ] 40 | dependencies = [ 41 | "Django>=3.2", 42 | ] 43 | [project.optional-dependencies] 44 | azure = [ 45 | "azure-core>=1.13", 46 | "azure-storage-blob>=12", 47 | ] 48 | boto3 = [ 49 | "boto3>=1.4.4", 50 | ] 51 | dropbox = [ 52 | "dropbox>=7.2.1", 53 | ] 54 | google = [ 55 | "google-cloud-storage>=1.36.1", 56 | ] 57 | libcloud = [ 58 | "apache-libcloud", 59 | ] 60 | s3 = [ 61 | "boto3>=1.4.4", 62 | ] 63 | sftp = [ 64 | "paramiko>=1.15", 65 | ] 66 | [project.urls] 67 | Homepage = "https://github.com/jschneier/django-storages" 68 | 69 | [tool.setuptools] 70 | zip-safe = false 71 | packages = [ 72 | "storages", 73 | "storages.backends", 74 | ] 75 | include-package-data = false 76 | 77 | [tool.setuptools.dynamic] 78 | readme = {file = ["README.rst"]} 79 | version = {attr = "storages.__version__"} 80 | 81 | [tool.ruff] 82 | lint.select = [ 83 | "AIR", # Airflow 84 | "ASYNC", # flake8-async 85 | "B", # flake8-bugbear 86 | "C4", # flake8-comprehensions 87 | "C90", # McCabe cyclomatic complexity 88 | "DJ", # flake8-django 89 | "E", # pycodestyle 90 | "EXE", # flake8-executable 91 | "F", # Pyflakes 92 | "FLY", # flynt 93 | "G", # flake8-logging-format 94 | "I", # isort 95 | "ICN", # flake8-import-conventions 96 | "INP", # flake8-no-pep420 97 | "INT", # flake8-gettext 98 | "ISC", # flake8-implicit-str-concat 99 | "NPY", # NumPy-specific rules 100 | "PD", # pandas-vet 101 | "PERF", # Perflint 102 | "PGH", # pygrep-hooks 103 | "PIE", # flake8-pie 104 | "PL", # Pylint 105 | "PYI", # flake8-pyi 106 | "RUF", # Ruff-specific rules 107 | "SLOT", # flake8-slots 108 | "T10", # flake8-debugger 109 | "T20", # flake8-print 110 | "TCH", # flake8-type-checking 111 | "TID", # flake8-tidy-imports 112 | "W", # pycodestyle 113 | "YTT", # flake8-2020 114 | # "A", # flake8-builtins 115 | # "ANN", # flake8-annotations 116 | # "ARG", # flake8-unused-arguments 117 | # "BLE", # flake8-blind-except 118 | # "COM", # flake8-commas 119 | # "D", # pydocstyle 120 | # "DTZ", # flake8-datetimez 121 | # "EM", # flake8-errmsg 122 | # "ERA", # eradicate 123 | # "FA", # flake8-future-annotations 124 | # "FBT", # flake8-boolean-trap 125 | # "FIX", # flake8-fixme 126 | # "N", # pep8-naming 127 | # "PT", # flake8-pytest-style 128 | # "PTH", # flake8-use-pathlib 129 | # "Q", # flake8-quotes 130 | # "RET", # flake8-return 131 | # "RSE", # flake8-raise 132 | # "S", # flake8-bandit 133 | # "SIM", # flake8-simplify 134 | # "SLF", # flake8-self 135 | # "TD", # flake8-todos 136 | # "TRY", # tryceratops 137 | # "UP", # pyupgrade 138 | ] 139 | lint.ignore = [ 140 | "B028", 141 | "B904", 142 | "PGH004", 143 | ] 144 | target-version = "py37" 145 | 146 | [tool.ruff.lint.isort] 147 | force-single-line = true 148 | known-first-party = ["storages"] 149 | 150 | [tool.ruff.lint.per-file-ignores] 151 | "docs/conf.py" = ["E402", "INP001"] 152 | "storages/backends/ftp.py" = ["PERF203"] 153 | "tests/test_s3.py" = ["B018"] 154 | 155 | [tool.ruff.lint.pylint] 156 | allow-magic-value-types = ["int", "str"] 157 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup() 4 | -------------------------------------------------------------------------------- /storages/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.14.6" 2 | -------------------------------------------------------------------------------- /storages/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/storages/backends/__init__.py -------------------------------------------------------------------------------- /storages/backends/apache_libcloud.py: -------------------------------------------------------------------------------- 1 | # Django storage using libcloud providers 2 | # Aymeric Barantal (mric at chamal.fr) 2011 3 | # 4 | import io 5 | from urllib.parse import urljoin 6 | 7 | from django.conf import settings 8 | from django.core.exceptions import ImproperlyConfigured 9 | from django.core.files.base import File 10 | from django.core.files.storage import Storage 11 | from django.utils.deconstruct import deconstructible 12 | 13 | from storages.utils import clean_name 14 | 15 | try: 16 | from libcloud.storage.providers import get_driver 17 | from libcloud.storage.types import ObjectDoesNotExistError 18 | from libcloud.storage.types import Provider 19 | except ImportError: 20 | raise ImproperlyConfigured("Could not load libcloud") 21 | 22 | 23 | @deconstructible 24 | class LibCloudStorage(Storage): 25 | """Django storage derived class using apache libcloud to operate 26 | on supported providers""" 27 | 28 | def __init__(self, provider_name=None, option=None): 29 | if provider_name is None: 30 | provider_name = getattr(settings, "DEFAULT_LIBCLOUD_PROVIDER", "default") 31 | 32 | self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) 33 | if not self.provider: 34 | raise ImproperlyConfigured( 35 | "LIBCLOUD_PROVIDERS %s not defined or invalid" % provider_name 36 | ) 37 | extra_kwargs = {} 38 | if "region" in self.provider: 39 | extra_kwargs["region"] = self.provider["region"] 40 | # Used by the GoogleStorageDriver 41 | if "project" in self.provider: 42 | extra_kwargs["project"] = self.provider["project"] 43 | try: 44 | provider_type = self.provider["type"] 45 | if isinstance(provider_type, str): 46 | module_path, tag = provider_type.rsplit(".", 1) 47 | if module_path != "libcloud.storage.types.Provider": 48 | raise ValueError("Invalid module path") 49 | provider_type = getattr(Provider, tag) 50 | 51 | Driver = get_driver(provider_type) 52 | self.driver = Driver( 53 | self.provider["user"], self.provider["key"], **extra_kwargs 54 | ) 55 | except Exception as e: 56 | raise ImproperlyConfigured( 57 | "Unable to create libcloud driver type %s: %s" 58 | % (self.provider.get("type"), e) 59 | ) 60 | self.bucket = self.provider["bucket"] # Limit to one container 61 | 62 | def _get_bucket(self): 63 | """Helper to get bucket object (libcloud container)""" 64 | return self.driver.get_container(self.bucket) 65 | 66 | def _get_object(self, name): 67 | """Get object by its name. ObjectDoesNotExistError will be raised if object not 68 | found""" 69 | return self.driver.get_object(self.bucket, clean_name(name)) 70 | 71 | def delete(self, name): 72 | """Delete object on remote""" 73 | try: 74 | obj = self._get_object(name) 75 | return self.driver.delete_object(obj) 76 | except ObjectDoesNotExistError: 77 | pass 78 | 79 | def exists(self, name): 80 | try: 81 | _ = self._get_object(name) 82 | except ObjectDoesNotExistError: 83 | return False 84 | return True 85 | 86 | def listdir(self, path="/"): 87 | """Lists the contents of the specified path, 88 | returning a 2-tuple of lists; the first item being 89 | directories, the second item being files. 90 | """ 91 | container = self._get_bucket() 92 | objects = self.driver.list_container_objects(container) 93 | path = clean_name(path) 94 | if not path.endswith("/"): 95 | path = "%s/" % path 96 | files = [] 97 | dirs = [] 98 | # TOFIX: better algorithm to filter correctly 99 | # (and not depend on google-storage empty folder naming) 100 | for o in objects: 101 | if path == "/": 102 | if o.name.count("/") == 0: 103 | files.append(o.name) 104 | elif o.name.count("/") == 1: 105 | dir_name = o.name[: o.name.index("/")] 106 | if dir_name not in dirs: 107 | dirs.append(dir_name) 108 | elif o.name.startswith(path): 109 | if o.name.count("/") <= path.count("/"): 110 | # TOFIX : special case for google storage with empty dir 111 | if o.name.endswith("_$folder$"): 112 | name = o.name[:-9] 113 | name = name[len(path) :] 114 | dirs.append(name) 115 | else: 116 | name = o.name[len(path) :] 117 | files.append(name) 118 | return (dirs, files) 119 | 120 | def size(self, name): 121 | obj = self._get_object(name) 122 | return obj.size if obj else -1 123 | 124 | def url(self, name): 125 | provider_type = self.provider["type"].lower() 126 | obj = self._get_object(name) 127 | if not obj: 128 | return None 129 | try: 130 | url = self.driver.get_object_cdn_url(obj) 131 | except NotImplementedError as e: 132 | object_path = "{}/{}".format(self.bucket, obj.name) 133 | if "s3" in provider_type: 134 | base_url = "https://%s" % self.driver.connection.host 135 | url = urljoin(base_url, object_path) 136 | elif "google" in provider_type: 137 | url = urljoin("https://storage.googleapis.com", object_path) 138 | elif "azure" in provider_type: 139 | base_url = "https://%s.blob.core.windows.net" % self.provider["user"] 140 | url = urljoin(base_url, object_path) 141 | elif "backblaze" in provider_type: 142 | url = urljoin("api.backblaze.com/b2api/v1/", object_path) 143 | else: 144 | raise e 145 | return url 146 | 147 | def _open(self, name, mode="rb"): 148 | remote_file = LibCloudFile(name, self, mode=mode) 149 | return remote_file 150 | 151 | def _read(self, name): 152 | try: 153 | obj = self._get_object(name) 154 | except ObjectDoesNotExistError as e: 155 | raise FileNotFoundError(str(e)) 156 | # TOFIX : we should be able to read chunk by chunk 157 | return next(self.driver.download_object_as_stream(obj, obj.size)) 158 | 159 | def _save(self, name, file): 160 | self.driver.upload_object_via_stream(iter(file), self._get_bucket(), name) 161 | return name 162 | 163 | 164 | class LibCloudFile(File): 165 | """File inherited class for libcloud storage objects read and write""" 166 | 167 | def __init__(self, name, storage, mode): 168 | self.name = name 169 | self._storage = storage 170 | self._mode = mode 171 | self._is_dirty = False 172 | self._file = None 173 | 174 | def _get_file(self): 175 | if self._file is None: 176 | data = self._storage._read(self.name) 177 | self._file = io.BytesIO(data) 178 | return self._file 179 | 180 | def _set_file(self, value): 181 | self._file = value 182 | 183 | file = property(_get_file, _set_file) 184 | 185 | @property 186 | def size(self): 187 | if not hasattr(self, "_size"): 188 | self._size = self._storage.size(self.name) 189 | return self._size 190 | 191 | def read(self, num_bytes=None): 192 | return self.file.read(num_bytes) 193 | 194 | def write(self, content): 195 | if "w" not in self._mode: 196 | raise AttributeError("File was opened for read-only access.") 197 | self.file = io.BytesIO(content) 198 | self._is_dirty = True 199 | 200 | def close(self): 201 | if self._is_dirty: 202 | self._storage._save(self.name, self.file) 203 | self.file.close() 204 | -------------------------------------------------------------------------------- /storages/backends/azure_storage.py: -------------------------------------------------------------------------------- 1 | import mimetypes 2 | import warnings 3 | from datetime import datetime 4 | from datetime import timedelta 5 | from tempfile import SpooledTemporaryFile 6 | from urllib.parse import urlparse 7 | from urllib.parse import urlunparse 8 | 9 | from azure.core.exceptions import ResourceNotFoundError 10 | from azure.core.utils import parse_connection_string 11 | from azure.storage.blob import BlobClient 12 | from azure.storage.blob import BlobSasPermissions 13 | from azure.storage.blob import BlobServiceClient 14 | from azure.storage.blob import ContentSettings 15 | from azure.storage.blob import generate_blob_sas 16 | from django.core.exceptions import SuspiciousOperation 17 | from django.core.files.base import File 18 | from django.utils import timezone 19 | from django.utils.deconstruct import deconstructible 20 | 21 | from storages.base import BaseStorage 22 | from storages.utils import clean_name 23 | from storages.utils import get_available_overwrite_name 24 | from storages.utils import safe_join 25 | from storages.utils import setting 26 | from storages.utils import to_bytes 27 | 28 | 29 | @deconstructible 30 | class AzureStorageFile(File): 31 | def __init__(self, name, mode, storage): 32 | self.name = name 33 | self._mode = mode 34 | self._storage = storage 35 | self._is_dirty = False 36 | self._file = None 37 | self._path = storage._get_valid_path(name) 38 | 39 | def _get_file(self): 40 | if self._file is not None: 41 | return self._file 42 | 43 | file = SpooledTemporaryFile( 44 | max_size=self._storage.max_memory_size, 45 | suffix=".AzureStorageFile", 46 | dir=setting("FILE_UPLOAD_TEMP_DIR", None), 47 | ) 48 | 49 | if "r" in self._mode or "a" in self._mode: 50 | download_stream = self._storage.client.download_blob( 51 | self._path, timeout=self._storage.timeout 52 | ) 53 | download_stream.readinto(file) 54 | if "r" in self._mode: 55 | file.seek(0) 56 | 57 | self._file = file 58 | return self._file 59 | 60 | def _set_file(self, value): 61 | self._file = value 62 | 63 | file = property(_get_file, _set_file) 64 | 65 | def read(self, *args, **kwargs): 66 | if "r" not in self._mode and "a" not in self._mode: 67 | raise AttributeError("File was not opened in read mode.") 68 | return super().read(*args, **kwargs) 69 | 70 | def write(self, content): 71 | if "w" not in self._mode and "+" not in self._mode and "a" not in self._mode: 72 | raise AttributeError("File was not opened in write mode.") 73 | self._is_dirty = True 74 | return super().write(to_bytes(content)) 75 | 76 | def close(self): 77 | if self._file is None: 78 | return 79 | if self._is_dirty: 80 | self._file.seek(0) 81 | self._storage._save(self.name, self._file) 82 | self._is_dirty = False 83 | self._file.close() 84 | self._file = None 85 | 86 | 87 | def _content_type(content): 88 | try: 89 | return content.file.content_type 90 | except AttributeError: 91 | pass 92 | try: 93 | return content.content_type 94 | except AttributeError: 95 | pass 96 | return None 97 | 98 | 99 | def _get_valid_path(s): 100 | # A blob name: 101 | # * must not end with dot or slash 102 | # * can contain any character 103 | # * must escape URL reserved characters 104 | # (not needed here since the azure client will do that) 105 | s = s.strip("./") 106 | if len(s) > _AZURE_NAME_MAX_LEN: 107 | raise ValueError("File name max len is %d" % _AZURE_NAME_MAX_LEN) 108 | if not len(s): 109 | raise ValueError("File name must contain one or more printable characters") 110 | if s.count("/") > 256: 111 | raise ValueError("File name must not contain more than 256 slashes") 112 | return s 113 | 114 | 115 | # Max len according to azure's docs 116 | _AZURE_NAME_MAX_LEN = 1024 117 | 118 | 119 | @deconstructible 120 | class AzureStorage(BaseStorage): 121 | def __init__(self, **settings): 122 | super().__init__(**settings) 123 | self._service_client = None 124 | self._client = None 125 | self._user_delegation_key = None 126 | self._user_delegation_key_expiry = datetime.utcnow() 127 | if self.connection_string and (not self.account_name or not self.account_key): 128 | parsed = parse_connection_string( 129 | self.connection_string, case_sensitive_keys=True 130 | ) 131 | if not self.account_name and "AccountName" in parsed: 132 | self.account_name = parsed["AccountName"] 133 | if not self.account_key and "AccountKey" in parsed: 134 | self.account_key = parsed["AccountKey"] 135 | 136 | def get_default_settings(self): 137 | return { 138 | "account_name": setting("AZURE_ACCOUNT_NAME"), 139 | "account_key": setting("AZURE_ACCOUNT_KEY"), 140 | "object_parameters": setting("AZURE_OBJECT_PARAMETERS", {}), 141 | "azure_container": setting("AZURE_CONTAINER"), 142 | "azure_ssl": setting("AZURE_SSL", True), 143 | "upload_max_conn": setting("AZURE_UPLOAD_MAX_CONN", 2), 144 | "timeout": setting("AZURE_CONNECTION_TIMEOUT_SECS", 20), 145 | "max_memory_size": setting("AZURE_BLOB_MAX_MEMORY_SIZE", 2 * 1024 * 1024), 146 | "expiration_secs": setting("AZURE_URL_EXPIRATION_SECS"), 147 | "overwrite_files": setting("AZURE_OVERWRITE_FILES", False), 148 | "location": setting("AZURE_LOCATION", ""), 149 | "default_content_type": "application/octet-stream", 150 | "cache_control": setting("AZURE_CACHE_CONTROL"), 151 | "sas_token": setting("AZURE_SAS_TOKEN"), 152 | "endpoint_suffix": setting("AZURE_ENDPOINT_SUFFIX", "core.windows.net"), 153 | "custom_domain": setting("AZURE_CUSTOM_DOMAIN"), 154 | "connection_string": setting("AZURE_CONNECTION_STRING"), 155 | "token_credential": setting("AZURE_TOKEN_CREDENTIAL"), 156 | "api_version": setting("AZURE_API_VERSION", None), 157 | "client_options": setting("AZURE_CLIENT_OPTIONS", {}), 158 | } 159 | 160 | def _get_service_client(self): 161 | if self.connection_string is not None: 162 | return BlobServiceClient.from_connection_string(self.connection_string) 163 | 164 | account_domain = "{}.blob.{}".format(self.account_name, self.endpoint_suffix) 165 | account_url = "{}://{}".format(self.azure_protocol, account_domain) 166 | 167 | credential = None 168 | if self.account_key: 169 | credential = { 170 | "account_name": self.account_name, 171 | "account_key": self.account_key, 172 | } 173 | elif self.sas_token: 174 | credential = self.sas_token 175 | elif self.token_credential: 176 | credential = self.token_credential 177 | 178 | options = self.client_options 179 | if self.api_version: 180 | warnings.warn( 181 | "The AZURE_API_VERSION/api_version setting is deprecated " 182 | "and will be removed in a future version. Use AZURE_CLIENT_OPTIONS " 183 | "to customize any of the BlobServiceClient kwargs.", 184 | DeprecationWarning, 185 | ) 186 | options["api_version"] = self.api_version 187 | return BlobServiceClient(account_url, credential=credential, **options) 188 | 189 | @property 190 | def service_client(self): 191 | if self._service_client is None: 192 | self._service_client = self._get_service_client() 193 | return self._service_client 194 | 195 | @property 196 | def client(self): 197 | if self._client is None: 198 | self._client = self.service_client.get_container_client( 199 | self.azure_container 200 | ) 201 | return self._client 202 | 203 | def get_user_delegation_key(self, expiry): 204 | # We'll only be able to get a user delegation key if we've authenticated with a 205 | # token credential. 206 | if self.token_credential is None: 207 | return None 208 | 209 | # Get a new key if we don't already have one, or if the one we have expires too 210 | # soon. 211 | if ( 212 | self._user_delegation_key is None 213 | or expiry > self._user_delegation_key_expiry 214 | ): 215 | now = datetime.utcnow() 216 | key_expiry_time = now + timedelta(days=7) 217 | self._user_delegation_key = self.service_client.get_user_delegation_key( 218 | key_start_time=now, key_expiry_time=key_expiry_time 219 | ) 220 | self._user_delegation_key_expiry = key_expiry_time 221 | 222 | return self._user_delegation_key 223 | 224 | @property 225 | def azure_protocol(self): 226 | if self.azure_ssl: 227 | return "https" 228 | else: 229 | return "http" 230 | 231 | def _normalize_name(self, name): 232 | try: 233 | return safe_join(self.location, name) 234 | except ValueError: 235 | raise SuspiciousOperation("Attempted access to '%s' denied." % name) 236 | 237 | def _get_valid_path(self, name): 238 | # Must be idempotent 239 | return _get_valid_path(self._normalize_name(clean_name(name))) 240 | 241 | def _open(self, name, mode="rb"): 242 | return AzureStorageFile(name, mode, self) 243 | 244 | def get_available_name(self, name, max_length=_AZURE_NAME_MAX_LEN): 245 | """ 246 | Returns a filename that's free on the target storage system, and 247 | available for new content to be written to. 248 | """ 249 | name = clean_name(name) 250 | if self.overwrite_files: 251 | return get_available_overwrite_name(name, max_length) 252 | return super().get_available_name(name, max_length) 253 | 254 | def exists(self, name): 255 | if not name: 256 | return True 257 | 258 | blob_client = self.client.get_blob_client(self._get_valid_path(name)) 259 | return blob_client.exists() 260 | 261 | def delete(self, name): 262 | try: 263 | self.client.delete_blob(self._get_valid_path(name), timeout=self.timeout) 264 | except ResourceNotFoundError: 265 | pass 266 | 267 | def size(self, name): 268 | blob_client = self.client.get_blob_client(self._get_valid_path(name)) 269 | properties = blob_client.get_blob_properties(timeout=self.timeout) 270 | return properties.size 271 | 272 | def _save(self, name, content): 273 | cleaned_name = clean_name(name) 274 | name = self._get_valid_path(name) 275 | params = self._get_content_settings_parameters(name, content) 276 | 277 | # Unwrap django file (wrapped by parent's save call) 278 | if isinstance(content, File): 279 | content = content.file 280 | 281 | content.seek(0) 282 | self.client.upload_blob( 283 | name, 284 | content, 285 | content_settings=ContentSettings(**params), 286 | max_concurrency=self.upload_max_conn, 287 | timeout=self.timeout, 288 | overwrite=self.overwrite_files, 289 | ) 290 | return cleaned_name 291 | 292 | def _expire_at(self, expire): 293 | # azure expects time in UTC 294 | return datetime.utcnow() + timedelta(seconds=expire) 295 | 296 | def url(self, name, expire=None, parameters=None, mode="r"): 297 | name = self._get_valid_path(name) 298 | params = parameters or {} 299 | permission = BlobSasPermissions.from_string(mode) 300 | 301 | if expire is None: 302 | expire = self.expiration_secs 303 | 304 | credential = None 305 | if expire: 306 | expiry = self._expire_at(expire) 307 | user_delegation_key = self.get_user_delegation_key(expiry) 308 | sas_token = generate_blob_sas( 309 | self.account_name, 310 | self.azure_container, 311 | name, 312 | account_key=self.account_key, 313 | user_delegation_key=user_delegation_key, 314 | permission=permission, 315 | expiry=expiry, 316 | **params, 317 | ) 318 | credential = sas_token 319 | 320 | container_blob_url = self.client.get_blob_client(name).url 321 | 322 | if self.custom_domain: 323 | # Replace the account name with the custom domain 324 | parsed_url = urlparse(container_blob_url) 325 | container_blob_url = urlunparse( 326 | parsed_url._replace(netloc=self.custom_domain) 327 | ) 328 | 329 | return BlobClient.from_blob_url(container_blob_url, credential=credential).url 330 | 331 | def _get_content_settings_parameters(self, name, content=None): 332 | params = {} 333 | 334 | guessed_type, content_encoding = mimetypes.guess_type(name) 335 | content_type = ( 336 | _content_type(content) or guessed_type or self.default_content_type 337 | ) 338 | 339 | params["cache_control"] = self.cache_control 340 | params["content_type"] = content_type 341 | params["content_encoding"] = content_encoding 342 | 343 | params.update(self.get_object_parameters(name)) 344 | return params 345 | 346 | def get_object_parameters(self, name): 347 | """ 348 | Returns a dictionary that is passed to content settings. Override this 349 | method to adjust this on a per-object basis to set e.g ContentDisposition. 350 | 351 | By default, returns the value of AZURE_OBJECT_PARAMETERS. 352 | """ 353 | return self.object_parameters.copy() 354 | 355 | def get_modified_time(self, name): 356 | """ 357 | Returns an (aware) datetime object containing the last modified time if 358 | USE_TZ is True, otherwise returns a naive datetime in the local timezone. 359 | """ 360 | blob_client = self.client.get_blob_client(self._get_valid_path(name)) 361 | properties = blob_client.get_blob_properties(timeout=self.timeout) 362 | if not setting("USE_TZ", False): 363 | return timezone.make_naive(properties.last_modified) 364 | 365 | tz = timezone.get_current_timezone() 366 | if timezone.is_naive(properties.last_modified): 367 | return timezone.make_aware(properties.last_modified, tz) 368 | 369 | # `last_modified` is in UTC time_zone, we 370 | # must convert it to settings time_zone 371 | return properties.last_modified.astimezone(tz) 372 | 373 | def list_all(self, path=""): 374 | """Return all files for a given path""" 375 | if path: 376 | path = self._get_valid_path(path) 377 | if path and not path.endswith("/"): 378 | path += "/" 379 | # XXX make generator, add start, end 380 | return [ 381 | blob.name 382 | for blob in self.client.list_blobs( 383 | name_starts_with=path, timeout=self.timeout 384 | ) 385 | ] 386 | 387 | def listdir(self, path=""): 388 | """ 389 | Return all files for a given path. 390 | Given that Azure can't return paths it only returns files. 391 | Works great for our little adventure. 392 | """ 393 | 394 | return [], self.list_all(path) 395 | -------------------------------------------------------------------------------- /storages/backends/dropbox.py: -------------------------------------------------------------------------------- 1 | # Dropbox storage class for Django pluggable storage system. 2 | # Author: Anthony Monthe 3 | # License: BSD 4 | 5 | import warnings 6 | from io import BytesIO 7 | from shutil import copyfileobj 8 | from tempfile import SpooledTemporaryFile 9 | 10 | from django.core.exceptions import ImproperlyConfigured 11 | from django.core.files.base import File 12 | from django.utils._os import safe_join 13 | from django.utils.deconstruct import deconstructible 14 | from dropbox import Dropbox 15 | from dropbox.exceptions import ApiError 16 | from dropbox.files import CommitInfo 17 | from dropbox.files import FolderMetadata 18 | from dropbox.files import UploadSessionCursor 19 | from dropbox.files import WriteMode 20 | 21 | from storages.base import BaseStorage 22 | from storages.utils import get_available_overwrite_name 23 | from storages.utils import setting 24 | 25 | _DEFAULT_TIMEOUT = 100 26 | _DEFAULT_MODE = "add" 27 | 28 | 29 | class DropboxStorageException(Exception): 30 | pass 31 | 32 | 33 | DropBoxStorageException = DropboxStorageException 34 | 35 | 36 | class DropboxFile(File): 37 | def __init__(self, name, storage): 38 | self.name = name 39 | self._storage = storage 40 | self._file = None 41 | 42 | def _get_file(self): 43 | if self._file is None: 44 | self._file = SpooledTemporaryFile() 45 | # As dropbox==9.3.0, the client returns a tuple 46 | # (dropbox.files.FileMetadata, requests.models.Response) 47 | file_metadata, response = self._storage.client.files_download(self.name) 48 | if response.status_code == 200: 49 | with BytesIO(response.content) as file_content: 50 | copyfileobj(file_content, self._file) 51 | else: 52 | # JIC the exception isn't caught by the dropbox client 53 | raise DropboxStorageException( 54 | "Dropbox server returned a {} response when accessing {}".format( 55 | response.status_code, self.name 56 | ) 57 | ) 58 | self._file.seek(0) 59 | return self._file 60 | 61 | def _set_file(self, value): 62 | self._file = value 63 | 64 | file = property(_get_file, _set_file) 65 | 66 | 67 | DropBoxFile = DropboxFile 68 | 69 | 70 | @deconstructible 71 | class DropboxStorage(BaseStorage): 72 | """Dropbox Storage class for Django pluggable storage system.""" 73 | 74 | CHUNK_SIZE = 4 * 1024 * 1024 75 | 76 | def __init__(self, oauth2_access_token=None, **settings): 77 | if oauth2_access_token is not None: 78 | settings["oauth2_access_token"] = oauth2_access_token 79 | super().__init__(**settings) 80 | 81 | if self.oauth2_access_token is None and not all( 82 | [self.app_key, self.app_secret, self.oauth2_refresh_token] 83 | ): 84 | raise ImproperlyConfigured( 85 | "You must configure an auth token at" 86 | "'settings.DROPBOX_OAUTH2_TOKEN' or " 87 | "'setting.DROPBOX_APP_KEY', " 88 | "'setting.DROPBOX_APP_SECRET' " 89 | "and 'setting.DROPBOX_OAUTH2_REFRESH_TOKEN'." 90 | ) 91 | self.client = Dropbox( 92 | self.oauth2_access_token, 93 | app_key=self.app_key, 94 | app_secret=self.app_secret, 95 | oauth2_refresh_token=self.oauth2_refresh_token, 96 | timeout=self.timeout, 97 | ) 98 | 99 | # Backwards compat 100 | if hasattr(self, "location"): 101 | warnings.warn( 102 | "Setting `root_path` with name `location` is deprecated and will be " 103 | "removed in a future version of django-storages. Please update the " 104 | "name from `location` to `root_path`", 105 | DeprecationWarning, 106 | ) 107 | self.root_path = self.location 108 | 109 | def get_default_settings(self): 110 | return { 111 | "root_path": setting("DROPBOX_ROOT_PATH", "/"), 112 | "oauth2_access_token": setting("DROPBOX_OAUTH2_TOKEN"), 113 | "app_key": setting("DROPBOX_APP_KEY"), 114 | "app_secret": setting("DROPBOX_APP_SECRET"), 115 | "oauth2_refresh_token": setting("DROPBOX_OAUTH2_REFRESH_TOKEN"), 116 | "timeout": setting("DROPBOX_TIMEOUT", _DEFAULT_TIMEOUT), 117 | "write_mode": setting("DROPBOX_WRITE_MODE", _DEFAULT_MODE), 118 | } 119 | 120 | def _full_path(self, name): 121 | if name == "/": 122 | name = "" 123 | return safe_join(self.root_path, name).replace("\\", "/") 124 | 125 | def delete(self, name): 126 | self.client.files_delete(self._full_path(name)) 127 | 128 | def exists(self, name): 129 | try: 130 | return bool(self.client.files_get_metadata(self._full_path(name))) 131 | except ApiError: 132 | return False 133 | 134 | def listdir(self, path): 135 | directories, files = [], [] 136 | full_path = self._full_path(path) 137 | 138 | if full_path == "/": 139 | full_path = "" 140 | 141 | metadata = self.client.files_list_folder(full_path) 142 | for entry in metadata.entries: 143 | if isinstance(entry, FolderMetadata): 144 | directories.append(entry.name) 145 | else: 146 | files.append(entry.name) 147 | return directories, files 148 | 149 | def size(self, name): 150 | metadata = self.client.files_get_metadata(self._full_path(name)) 151 | return metadata.size 152 | 153 | def url(self, name): 154 | try: 155 | media = self.client.files_get_temporary_link(self._full_path(name)) 156 | return media.link 157 | except ApiError: 158 | return None 159 | 160 | def _open(self, name, mode="rb"): 161 | remote_file = DropboxFile(self._full_path(name), self) 162 | return remote_file 163 | 164 | def _save(self, name, content): 165 | content.open() 166 | if content.size <= self.CHUNK_SIZE: 167 | self.client.files_upload( 168 | content.read(), self._full_path(name), mode=WriteMode(self.write_mode) 169 | ) 170 | else: 171 | self._chunked_upload(content, self._full_path(name)) 172 | content.close() 173 | return name 174 | 175 | def _chunked_upload(self, content, dest_path): 176 | upload_session = self.client.files_upload_session_start( 177 | content.read(self.CHUNK_SIZE) 178 | ) 179 | cursor = UploadSessionCursor( 180 | session_id=upload_session.session_id, offset=content.tell() 181 | ) 182 | commit = CommitInfo(path=dest_path, mode=WriteMode(self.write_mode)) 183 | 184 | while content.tell() < content.size: 185 | if (content.size - content.tell()) <= self.CHUNK_SIZE: 186 | self.client.files_upload_session_finish( 187 | content.read(self.CHUNK_SIZE), cursor, commit 188 | ) 189 | else: 190 | self.client.files_upload_session_append_v2( 191 | content.read(self.CHUNK_SIZE), cursor 192 | ) 193 | cursor.offset = content.tell() 194 | 195 | def get_available_name(self, name, max_length=None): 196 | """Overwrite existing file with the same name.""" 197 | if self.write_mode == "overwrite": 198 | return get_available_overwrite_name(name, max_length) 199 | return super().get_available_name(name, max_length) 200 | 201 | 202 | DropBoxStorage = DropboxStorage 203 | -------------------------------------------------------------------------------- /storages/backends/ftp.py: -------------------------------------------------------------------------------- 1 | # FTP storage class for Django pluggable storage system. 2 | # Author: Rafal Jonca 3 | # License: MIT 4 | # Comes from http://www.djangosnippets.org/snippets/1269/ 5 | # 6 | # Usage: 7 | # 8 | # Add below to settings.py: 9 | # FTP_STORAGE_LOCATION = '[a]ftp[s]://:@:/[path]' 10 | # 11 | # In models.py you can write: 12 | # from FTPStorage import FTPStorage 13 | # fs = FTPStorage() 14 | # For a TLS configuration, you must use 'ftps' protocol 15 | # class FTPTest(models.Model): 16 | # file = models.FileField(upload_to='a/b/c/', storage=fs) 17 | 18 | import ftplib 19 | import io 20 | import os 21 | import re 22 | import urllib.parse 23 | 24 | from django.conf import settings 25 | from django.core.exceptions import ImproperlyConfigured 26 | from django.core.files.base import File 27 | from django.utils.deconstruct import deconstructible 28 | 29 | from storages.base import BaseStorage 30 | from storages.utils import setting 31 | 32 | 33 | class FTPStorageException(Exception): 34 | pass 35 | 36 | 37 | @deconstructible 38 | class FTPStorage(BaseStorage): 39 | """FTP Storage class for Django pluggable storage system.""" 40 | 41 | def __init__(self, **settings): 42 | super().__init__(**settings) 43 | if self.location is None: 44 | raise ImproperlyConfigured( 45 | "You must set a location at instantiation " 46 | "or at settings.FTP_STORAGE_LOCATION." 47 | ) 48 | self._config = self._decode_location(self.location) 49 | self._connection = None 50 | 51 | def get_default_settings(self): 52 | return { 53 | "location": setting("FTP_STORAGE_LOCATION"), 54 | "encoding": setting("FTP_STORAGE_ENCODING", "latin-1"), 55 | "base_url": setting("BASE_URL", settings.MEDIA_URL), 56 | "allow_overwrite": setting("FTP_ALLOW_OVERWRITE", False), 57 | } 58 | 59 | def _decode_location(self, location): 60 | """Return splitted configuration data from location.""" 61 | splitted_url = re.search( 62 | r"^(?P.+)://(?P.+):(?P.+)@" 63 | r"(?P.+):(?P\d+)/(?P.*)$", 64 | location, 65 | ) 66 | 67 | if splitted_url is None: 68 | raise ImproperlyConfigured("Improperly formatted location URL") 69 | if splitted_url["scheme"] not in ("ftp", "aftp", "ftps"): 70 | raise ImproperlyConfigured("Only ftp, aftp, ftps schemes supported") 71 | if splitted_url["host"] == "": 72 | raise ImproperlyConfigured("You must at least provide host!") 73 | 74 | config = {} 75 | config["active"] = splitted_url["scheme"] == "aftp" 76 | config["secure"] = splitted_url["scheme"] == "ftps" 77 | 78 | config["path"] = splitted_url["path"] or "/" 79 | config["host"] = splitted_url["host"] 80 | config["user"] = splitted_url["user"] 81 | config["passwd"] = splitted_url["passwd"] 82 | config["port"] = int(splitted_url["port"]) 83 | 84 | return config 85 | 86 | def _start_connection(self): 87 | # Check if connection is still alive and if not, drop it. 88 | if self._connection is not None: 89 | try: 90 | self._connection.pwd() 91 | except ftplib.all_errors: 92 | self._connection = None 93 | 94 | # Real reconnect 95 | if self._connection is None: 96 | ftp = ftplib.FTP_TLS() if self._config["secure"] else ftplib.FTP() 97 | ftp.encoding = self.encoding 98 | try: 99 | ftp.connect(self._config["host"], self._config["port"]) 100 | ftp.login(self._config["user"], self._config["passwd"]) 101 | if self._config["secure"]: 102 | ftp.prot_p() 103 | if self._config["active"]: 104 | ftp.set_pasv(False) 105 | if self._config["path"] != "": 106 | ftp.cwd(self._config["path"]) 107 | self._connection = ftp 108 | return 109 | except ftplib.all_errors: 110 | raise FTPStorageException( 111 | "Connection or login error using data %s" % repr(self._config) 112 | ) 113 | 114 | def disconnect(self): 115 | self._connection.quit() 116 | self._connection = None 117 | 118 | def _mkremdirs(self, path): 119 | pwd = self._connection.pwd() 120 | path_splitted = path.split(os.path.sep) 121 | for path_part in path_splitted: 122 | try: 123 | self._connection.cwd(path_part) 124 | except ftplib.all_errors: 125 | try: 126 | self._connection.mkd(path_part) 127 | self._connection.cwd(path_part) 128 | except ftplib.all_errors: 129 | raise FTPStorageException("Cannot create directory chain %s" % path) 130 | self._connection.cwd(pwd) 131 | 132 | def _put_file(self, name, content): 133 | # Connection must be open! 134 | try: 135 | self._mkremdirs(os.path.dirname(name)) 136 | pwd = self._connection.pwd() 137 | self._connection.cwd(os.path.dirname(name)) 138 | self._connection.storbinary( 139 | "STOR " + os.path.basename(name), 140 | content.file, 141 | content.DEFAULT_CHUNK_SIZE, 142 | ) 143 | self._connection.cwd(pwd) 144 | except ftplib.all_errors: 145 | raise FTPStorageException("Error writing file %s" % name) 146 | 147 | def _open(self, name, mode="rb"): 148 | remote_file = FTPStorageFile(name, self, mode=mode) 149 | return remote_file 150 | 151 | def _read(self, name): 152 | memory_file = io.BytesIO() 153 | try: 154 | pwd = self._connection.pwd() 155 | self._connection.cwd(os.path.dirname(name)) 156 | self._connection.retrbinary( 157 | "RETR " + os.path.basename(name), memory_file.write 158 | ) 159 | self._connection.cwd(pwd) 160 | memory_file.seek(0) 161 | return memory_file 162 | except ftplib.all_errors: 163 | raise FTPStorageException("Error reading file %s" % name) 164 | 165 | def _save(self, name, content): 166 | content.open() 167 | self._start_connection() 168 | self._put_file(name, content) 169 | content.close() 170 | return name 171 | 172 | def _get_dir_details(self, path): 173 | # Connection must be open! 174 | try: 175 | lines = [] 176 | self._connection.retrlines("LIST " + path, lines.append) 177 | dirs = {} 178 | files = {} 179 | for line in lines: 180 | words = line.split() 181 | if len(words) < 6: 182 | continue 183 | if words[-2] == "->": 184 | continue 185 | if words[0][0] == "d": 186 | dirs[words[-1]] = 0 187 | elif words[0][0] == "-": 188 | files[words[-1]] = int(words[-5]) 189 | return dirs, files 190 | except ftplib.all_errors: 191 | raise FTPStorageException("Error getting listing for %s" % path) 192 | 193 | def listdir(self, path): 194 | self._start_connection() 195 | try: 196 | dirs, files = self._get_dir_details(path) 197 | return list(dirs.keys()), list(files.keys()) 198 | except FTPStorageException: 199 | raise 200 | 201 | def delete(self, name): 202 | if not self.exists(name): 203 | return 204 | self._start_connection() 205 | try: 206 | self._connection.delete(name) 207 | except ftplib.all_errors: 208 | raise FTPStorageException("Error when removing %s" % name) 209 | 210 | def exists(self, name): 211 | if self.allow_overwrite: 212 | return False 213 | 214 | self._start_connection() 215 | try: 216 | nlst = self._connection.nlst(os.path.dirname(name)) 217 | if name in nlst or os.path.basename(name) in nlst: 218 | return True 219 | else: 220 | return False 221 | except ftplib.error_temp: 222 | return False 223 | except ftplib.error_perm: 224 | # error_perm: 550 Can't find file 225 | return False 226 | except ftplib.all_errors: 227 | raise FTPStorageException("Error when testing existence of %s" % name) 228 | 229 | def size(self, name): 230 | self._start_connection() 231 | try: 232 | dirs, files = self._get_dir_details(os.path.dirname(name)) 233 | if os.path.basename(name) in files: 234 | return files[os.path.basename(name)] 235 | else: 236 | return 0 237 | except FTPStorageException: 238 | return 0 239 | 240 | def url(self, name): 241 | if self.base_url is None: 242 | raise ValueError("This file is not accessible via a URL.") 243 | return urllib.parse.urljoin(self.base_url, name).replace("\\", "/") 244 | 245 | 246 | class FTPStorageFile(File): 247 | def __init__(self, name, storage, mode): 248 | self.name = name 249 | self._storage = storage 250 | self._mode = mode 251 | self._is_dirty = False 252 | self.file = io.BytesIO() 253 | self._is_read = False 254 | 255 | @property 256 | def size(self): 257 | if not hasattr(self, "_size"): 258 | self._size = self._storage.size(self.name) 259 | return self._size 260 | 261 | def readlines(self): 262 | if not self._is_read: 263 | self._storage._start_connection() 264 | self.file = self._storage._read(self.name) 265 | self._is_read = True 266 | return self.file.readlines() 267 | 268 | def read(self, num_bytes=None): 269 | if not self._is_read: 270 | self._storage._start_connection() 271 | self.file = self._storage._read(self.name) 272 | self._is_read = True 273 | return self.file.read(num_bytes) 274 | 275 | def write(self, content): 276 | if "w" not in self._mode: 277 | raise AttributeError("File was opened for read-only access.") 278 | self.file = io.BytesIO(content) 279 | self._is_dirty = True 280 | self._is_read = True 281 | 282 | def close(self): 283 | if self._is_dirty: 284 | self._storage._start_connection() 285 | self._storage._put_file(self.name, self) 286 | self._storage.disconnect() 287 | self.file.close() 288 | -------------------------------------------------------------------------------- /storages/backends/gcloud.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import io 3 | import mimetypes 4 | from datetime import timedelta 5 | from tempfile import SpooledTemporaryFile 6 | 7 | from django.core.exceptions import ImproperlyConfigured 8 | from django.core.exceptions import SuspiciousOperation 9 | from django.core.files.base import File 10 | from django.utils import timezone 11 | from django.utils.deconstruct import deconstructible 12 | 13 | from storages.base import BaseStorage 14 | from storages.compress import CompressedFileMixin 15 | from storages.utils import check_location 16 | from storages.utils import clean_name 17 | from storages.utils import get_available_overwrite_name 18 | from storages.utils import safe_join 19 | from storages.utils import setting 20 | from storages.utils import to_bytes 21 | 22 | try: 23 | from google import auth 24 | from google.auth.credentials import TokenState 25 | from google.auth.transport import requests 26 | from google.cloud.exceptions import NotFound 27 | from google.cloud.storage import Blob 28 | from google.cloud.storage import Client 29 | from google.cloud.storage.blob import _quote 30 | from google.cloud.storage.retry import DEFAULT_RETRY 31 | except ImportError: 32 | raise ImproperlyConfigured( 33 | "Could not load Google Cloud Storage bindings.\n" 34 | "See https://github.com/GoogleCloudPlatform/gcloud-python" 35 | ) 36 | 37 | 38 | CONTENT_ENCODING = "content_encoding" 39 | CONTENT_TYPE = "content_type" 40 | 41 | 42 | class GoogleCloudFile(CompressedFileMixin, File): 43 | def __init__(self, name, mode, storage): 44 | self.name = name 45 | self.mime_type, self.mime_encoding = mimetypes.guess_type(name) 46 | self._mode = mode 47 | self._storage = storage 48 | self.blob = storage.bucket.get_blob(name, chunk_size=storage.blob_chunk_size) 49 | if not self.blob and "w" in mode: 50 | self.blob = Blob( 51 | self.name, storage.bucket, chunk_size=storage.blob_chunk_size 52 | ) 53 | self._file = None 54 | self._is_dirty = False 55 | 56 | @property 57 | def size(self): 58 | return self.blob.size 59 | 60 | def _get_file(self): 61 | if self._file is None: 62 | self._file = SpooledTemporaryFile( 63 | max_size=self._storage.max_memory_size, 64 | suffix=".GSStorageFile", 65 | dir=setting("FILE_UPLOAD_TEMP_DIR"), 66 | ) 67 | if "r" in self._mode: 68 | self._is_dirty = False 69 | # This automatically decompresses the file 70 | self.blob.download_to_file(self._file, checksum="crc32c") 71 | self._file.seek(0) 72 | return self._file 73 | 74 | def _set_file(self, value): 75 | self._file = value 76 | 77 | file = property(_get_file, _set_file) 78 | 79 | def read(self, num_bytes=None): 80 | if "r" not in self._mode: 81 | raise AttributeError("File was not opened in read mode.") 82 | 83 | if num_bytes is None: 84 | num_bytes = -1 85 | 86 | return super().read(num_bytes) 87 | 88 | def write(self, content): 89 | if "w" not in self._mode: 90 | raise AttributeError("File was not opened in write mode.") 91 | self._is_dirty = True 92 | return super().write(to_bytes(content)) 93 | 94 | def close(self): 95 | if self._file is not None: 96 | if self._is_dirty: 97 | blob_params = self._storage.get_object_parameters(self.name) 98 | self.blob.upload_from_file( 99 | self.file, 100 | rewind=True, 101 | content_type=self.mime_type, 102 | retry=DEFAULT_RETRY, 103 | predefined_acl=blob_params.get("acl", self._storage.default_acl), 104 | ) 105 | self._file.close() 106 | self._file = None 107 | 108 | 109 | @deconstructible 110 | class GoogleCloudStorage(BaseStorage): 111 | def __init__(self, **settings): 112 | super().__init__(**settings) 113 | 114 | check_location(self) 115 | 116 | self._bucket = None 117 | self._client = None 118 | 119 | def get_default_settings(self): 120 | return { 121 | "project_id": setting("GS_PROJECT_ID"), 122 | "credentials": setting("GS_CREDENTIALS"), 123 | "bucket_name": setting("GS_BUCKET_NAME"), 124 | "custom_endpoint": setting("GS_CUSTOM_ENDPOINT", None), 125 | "location": setting("GS_LOCATION", ""), 126 | "default_acl": setting("GS_DEFAULT_ACL"), 127 | "querystring_auth": setting("GS_QUERYSTRING_AUTH", True), 128 | "expiration": setting("GS_EXPIRATION", timedelta(seconds=86400)), 129 | "gzip": setting("GS_IS_GZIPPED", False), 130 | "gzip_content_types": setting( 131 | "GZIP_CONTENT_TYPES", 132 | ( 133 | "text/css", 134 | "text/javascript", 135 | "application/javascript", 136 | "application/x-javascript", 137 | "image/svg+xml", 138 | ), 139 | ), 140 | "file_overwrite": setting("GS_FILE_OVERWRITE", True), 141 | "object_parameters": setting("GS_OBJECT_PARAMETERS", {}), 142 | # The max amount of memory a returned file can take up before being 143 | # rolled over into a temporary file on disk. Default is 0: Do not 144 | # roll over. 145 | "max_memory_size": setting("GS_MAX_MEMORY_SIZE", 0), 146 | "blob_chunk_size": setting("GS_BLOB_CHUNK_SIZE"), 147 | # use in cases where service account key isn't available in env 148 | # in such cases, sign blob api is REQUIRED for signing data 149 | "iam_sign_blob": setting("GS_IAM_SIGN_BLOB", False), 150 | "sa_email": setting("GS_SA_EMAIL"), 151 | } 152 | 153 | @property 154 | def client(self): 155 | if self._client is None: 156 | if self.iam_sign_blob and not self.credentials: 157 | self.credentials, self.project_id = auth.default( 158 | scopes=["https://www.googleapis.com/auth/cloud-platform"] 159 | ) 160 | self._client = Client(project=self.project_id, credentials=self.credentials) 161 | return self._client 162 | 163 | @property 164 | def bucket(self): 165 | if self._bucket is None: 166 | self._bucket = self.client.bucket(self.bucket_name) 167 | return self._bucket 168 | 169 | def _normalize_name(self, name): 170 | """ 171 | Normalizes the name so that paths like /path/to/ignored/../something.txt 172 | and ./file.txt work. Note that clean_name adds ./ to some paths so 173 | they need to be fixed here. We check to make sure that the path pointed 174 | to is not outside the directory specified by the LOCATION setting. 175 | """ 176 | try: 177 | return safe_join(self.location, name) 178 | except ValueError: 179 | raise SuspiciousOperation("Attempted access to '%s' denied." % name) 180 | 181 | def _open(self, name, mode="rb"): 182 | name = self._normalize_name(clean_name(name)) 183 | file_object = GoogleCloudFile(name, mode, self) 184 | if not file_object.blob: 185 | raise FileNotFoundError("File does not exist: %s" % name) 186 | return file_object 187 | 188 | def _compress_content(self, content): 189 | content.seek(0) 190 | zbuf = io.BytesIO() 191 | with gzip.GzipFile(mode="wb", fileobj=zbuf, mtime=0.0) as zfile: 192 | zfile.write(to_bytes(content.read())) 193 | zbuf.seek(0) 194 | return zbuf 195 | 196 | def _save(self, name, content): 197 | cleaned_name = clean_name(name) 198 | name = self._normalize_name(cleaned_name) 199 | 200 | content.name = cleaned_name 201 | file_object = GoogleCloudFile(name, "rw", self) 202 | 203 | blob_params = self.get_object_parameters(name) 204 | if file_object.mime_encoding and CONTENT_ENCODING not in blob_params: 205 | blob_params[CONTENT_ENCODING] = file_object.mime_encoding 206 | 207 | upload_params = {} 208 | upload_params["predefined_acl"] = blob_params.pop("acl", self.default_acl) 209 | upload_params[CONTENT_TYPE] = blob_params.pop( 210 | CONTENT_TYPE, file_object.mime_type 211 | ) 212 | 213 | if ( 214 | self.gzip 215 | and upload_params[CONTENT_TYPE] in self.gzip_content_types 216 | and CONTENT_ENCODING not in blob_params 217 | ): 218 | content = self._compress_content(content) 219 | blob_params[CONTENT_ENCODING] = "gzip" 220 | 221 | for prop, val in blob_params.items(): 222 | setattr(file_object.blob, prop, val) 223 | 224 | file_object.blob.upload_from_file( 225 | content, 226 | rewind=True, 227 | retry=DEFAULT_RETRY, 228 | size=getattr(content, "size", None), 229 | **upload_params, 230 | ) 231 | return cleaned_name 232 | 233 | def get_object_parameters(self, name): 234 | """Override this to return a dictionary of overwritable blob-property to value. 235 | 236 | Returns GS_OBJECT_PARAMETERS by default. See the docs for all possible options. 237 | """ 238 | object_parameters = self.object_parameters.copy() 239 | return object_parameters 240 | 241 | def delete(self, name): 242 | name = self._normalize_name(clean_name(name)) 243 | try: 244 | self.bucket.delete_blob(name, retry=DEFAULT_RETRY) 245 | except NotFound: 246 | pass 247 | 248 | def exists(self, name): 249 | if not name: # root element aka the bucket 250 | try: 251 | self.client.get_bucket(self.bucket) 252 | return True 253 | except NotFound: 254 | return False 255 | 256 | name = self._normalize_name(clean_name(name)) 257 | return bool(self.bucket.get_blob(name)) 258 | 259 | def listdir(self, name): 260 | name = self._normalize_name(clean_name(name)) 261 | # For bucket.list_blobs and logic below name needs to end in / 262 | # but for the root path "" we leave it as an empty string 263 | if name and not name.endswith("/"): 264 | name += "/" 265 | 266 | iterator = self.bucket.list_blobs(prefix=name, delimiter="/") 267 | blobs = list(iterator) 268 | prefixes = iterator.prefixes 269 | 270 | files = [] 271 | dirs = [] 272 | 273 | for blob in blobs: 274 | parts = blob.name.split("/") 275 | files.append(parts[-1]) 276 | for folder_path in prefixes: 277 | parts = folder_path.split("/") 278 | dirs.append(parts[-2]) 279 | 280 | return list(dirs), files 281 | 282 | def _get_blob(self, name): 283 | # Wrap google.cloud.storage's blob to raise if the file doesn't exist 284 | blob = self.bucket.get_blob(name) 285 | 286 | if blob is None: 287 | raise NotFound("File does not exist: {}".format(name)) 288 | 289 | return blob 290 | 291 | def size(self, name): 292 | name = self._normalize_name(clean_name(name)) 293 | blob = self._get_blob(name) 294 | return blob.size 295 | 296 | def get_modified_time(self, name): 297 | name = self._normalize_name(clean_name(name)) 298 | blob = self._get_blob(name) 299 | updated = blob.updated 300 | return updated if setting("USE_TZ") else timezone.make_naive(updated) 301 | 302 | def get_created_time(self, name): 303 | """ 304 | Return the creation time (as a datetime) of the file specified by name. 305 | The datetime will be timezone-aware if USE_TZ=True. 306 | """ 307 | name = self._normalize_name(clean_name(name)) 308 | blob = self._get_blob(name) 309 | created = blob.time_created 310 | return created if setting("USE_TZ") else timezone.make_naive(created) 311 | 312 | def url(self, name, parameters=None): 313 | """ 314 | Return public URL or a signed URL for the Blob. 315 | 316 | To keep things snappy, the existence of blobs for public URLs is not checked. 317 | """ 318 | name = self._normalize_name(clean_name(name)) 319 | blob = self.bucket.blob(name) 320 | blob_params = self.get_object_parameters(name) 321 | no_signed_url = ( 322 | blob_params.get("acl", self.default_acl) == "publicRead" 323 | or not self.querystring_auth 324 | ) 325 | 326 | if not self.custom_endpoint and no_signed_url: 327 | return blob.public_url 328 | elif no_signed_url: 329 | return "{storage_base_url}/{quoted_name}".format( 330 | storage_base_url=self.custom_endpoint, 331 | quoted_name=_quote(name, safe=b"/~"), 332 | ) 333 | else: 334 | default_params = { 335 | "bucket_bound_hostname": self.custom_endpoint, 336 | "expiration": self.expiration, 337 | "version": "v4", 338 | } 339 | params = parameters or {} 340 | 341 | if self.iam_sign_blob: 342 | service_account_email, access_token = self._get_iam_sign_blob_params() 343 | default_params["service_account_email"] = service_account_email 344 | default_params["access_token"] = access_token 345 | 346 | for key, value in default_params.items(): 347 | if value and key not in params: 348 | params[key] = value 349 | 350 | return blob.generate_signed_url(**params) 351 | 352 | def get_available_name(self, name, max_length=None): 353 | name = clean_name(name) 354 | if self.file_overwrite: 355 | return get_available_overwrite_name(name, max_length) 356 | return super().get_available_name(name, max_length) 357 | 358 | def _get_iam_sign_blob_params(self): 359 | if self.credentials.token_state != TokenState.FRESH: 360 | self.credentials.refresh(requests.Request()) 361 | 362 | try: 363 | service_account_email = self.credentials.service_account_email 364 | except AttributeError: 365 | service_account_email = None 366 | 367 | # sa_email has final say of service_account used to sign url if provided 368 | if self.sa_email: 369 | service_account_email = self.sa_email 370 | 371 | if not service_account_email: 372 | raise AttributeError( 373 | "Sign Blob API requires service_account_email to be available " 374 | "through ADC or setting `sa_email`" 375 | ) 376 | 377 | return service_account_email, self.credentials.token 378 | -------------------------------------------------------------------------------- /storages/backends/s3boto3.py: -------------------------------------------------------------------------------- 1 | """Backwards compat shim.""" 2 | 3 | from storages.backends.s3 import S3File as S3Boto3StorageFile # noqa 4 | from storages.backends.s3 import S3ManifestStaticStorage # noqa 5 | from storages.backends.s3 import S3StaticStorage # noqa 6 | from storages.backends.s3 import S3Storage as S3Boto3Storage # noqa 7 | -------------------------------------------------------------------------------- /storages/backends/sftpstorage.py: -------------------------------------------------------------------------------- 1 | # SFTP storage backend for Django. 2 | # Author: Brent Tubbs 3 | # License: MIT 4 | # 5 | # Modeled on the FTP storage by Rafal Jonca 6 | 7 | import datetime 8 | import getpass 9 | import io 10 | import os 11 | import posixpath 12 | import stat 13 | from urllib.parse import urljoin 14 | 15 | import paramiko 16 | from django.core.files.base import File 17 | from django.utils.deconstruct import deconstructible 18 | from paramiko.util import ClosingContextManager 19 | 20 | from storages.base import BaseStorage 21 | from storages.utils import is_seekable 22 | from storages.utils import setting 23 | 24 | 25 | @deconstructible 26 | class SFTPStorage(ClosingContextManager, BaseStorage): 27 | def __init__(self, **settings): 28 | super().__init__(**settings) 29 | self._ssh = None 30 | self._sftp = None 31 | 32 | def get_default_settings(self): 33 | return { 34 | "host": setting("SFTP_STORAGE_HOST"), 35 | "params": setting("SFTP_STORAGE_PARAMS", {}), 36 | "interactive": setting("SFTP_STORAGE_INTERACTIVE", False), 37 | "file_mode": setting("SFTP_STORAGE_FILE_MODE"), 38 | "dir_mode": setting("SFTP_STORAGE_DIR_MODE"), 39 | "uid": setting("SFTP_STORAGE_UID"), 40 | "gid": setting("SFTP_STORAGE_GID"), 41 | "known_host_file": setting("SFTP_KNOWN_HOST_FILE"), 42 | "root_path": setting("SFTP_STORAGE_ROOT", ""), 43 | "base_url": setting("SFTP_BASE_URL") or setting("MEDIA_URL"), 44 | } 45 | 46 | def _connect(self): 47 | self._ssh = paramiko.SSHClient() 48 | 49 | known_host_file = self.known_host_file or os.path.expanduser( 50 | os.path.join("~", ".ssh", "known_hosts") 51 | ) 52 | 53 | if os.path.exists(known_host_file): 54 | self._ssh.load_host_keys(known_host_file) 55 | 56 | # and automatically add new host keys for hosts we haven't seen before. 57 | self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 58 | 59 | try: 60 | self._ssh.connect(self.host, **self.params) 61 | except paramiko.AuthenticationException as e: 62 | if self.interactive and "password" not in self.params: 63 | # If authentication has failed, and we haven't already tried 64 | # username/password, and configuration allows it, then try 65 | # again with username/password. 66 | if "username" not in self.params: 67 | self.params["username"] = getpass.getuser() 68 | self.params["password"] = getpass.getpass() 69 | self._connect() 70 | else: 71 | raise paramiko.AuthenticationException(e) 72 | 73 | if self._ssh.get_transport(): 74 | self._sftp = self._ssh.open_sftp() 75 | 76 | def close(self): 77 | if self._ssh is None: 78 | return 79 | self._ssh.close() 80 | 81 | @property 82 | def sftp(self): 83 | """Lazy SFTP connection""" 84 | if not self._sftp or not self._ssh.get_transport().is_active(): 85 | self._connect() 86 | return self._sftp 87 | 88 | def _remote_path(self, name): 89 | return posixpath.join(self.root_path, name) 90 | 91 | def _open(self, name, mode="rb"): 92 | return SFTPStorageFile(name, self, mode) 93 | 94 | def _read(self, name): 95 | remote_path = self._remote_path(name) 96 | return self.sftp.open(remote_path, "rb") 97 | 98 | def _chown(self, path, uid=None, gid=None): 99 | """Set uid and/or gid for file at path.""" 100 | # Paramiko's chown requires both uid and gid, so look them up first if 101 | # we're only supposed to set one. 102 | if uid is None or gid is None: 103 | attr = self.sftp.stat(path) 104 | uid = uid or attr.st_uid 105 | gid = gid or attr.st_gid 106 | self.sftp.chown(path, uid, gid) 107 | 108 | def _mkdir(self, path): 109 | """Create directory, recursing up to create parent dirs if 110 | necessary.""" 111 | parent = posixpath.dirname(path) 112 | if not self._path_exists(parent): 113 | self._mkdir(parent) 114 | self.sftp.mkdir(path) 115 | 116 | if self.dir_mode is not None: 117 | self.sftp.chmod(path, self.dir_mode) 118 | 119 | if self.uid or self.gid: 120 | self._chown(path, uid=self.uid, gid=self.gid) 121 | 122 | def _save(self, name, content): 123 | """Save file via SFTP.""" 124 | if is_seekable(content): 125 | content.seek(0, os.SEEK_SET) 126 | path = self._remote_path(name) 127 | dirname = posixpath.dirname(path) 128 | if not self._path_exists(dirname): 129 | self._mkdir(dirname) 130 | 131 | self.sftp.putfo(content, path) 132 | 133 | # set file permissions if configured 134 | if self.file_mode is not None: 135 | self.sftp.chmod(path, self.file_mode) 136 | if self.uid or self.gid: 137 | self._chown(path, uid=self.uid, gid=self.gid) 138 | return name 139 | 140 | def delete(self, name): 141 | try: 142 | self.sftp.remove(self._remote_path(name)) 143 | except OSError: 144 | pass 145 | 146 | def _path_exists(self, path): 147 | """Determines whether a file existis in the sftp storage given its 148 | absolute path.""" 149 | try: 150 | self.sftp.stat(path) 151 | return True 152 | except FileNotFoundError: 153 | return False 154 | 155 | def exists(self, name): 156 | """Determines whether a file exists within the root folder of the SFTP storage 157 | (as set by `SFTP_STORAGE_ROOT`). This method differs from `._path_exists()` 158 | in that the provided `name` is assumed to be the relative path of the file 159 | within the root folder. 160 | """ 161 | return self._path_exists(self._remote_path(name)) 162 | 163 | def _isdir_attr(self, item): 164 | # Return whether an item in sftp.listdir_attr results is a directory 165 | if item.st_mode is not None: 166 | return stat.S_IFMT(item.st_mode) == stat.S_IFDIR 167 | else: 168 | return False 169 | 170 | def listdir(self, path): 171 | remote_path = self._remote_path(path) 172 | dirs, files = [], [] 173 | for item in self.sftp.listdir_attr(remote_path): 174 | if self._isdir_attr(item): 175 | dirs.append(item.filename) 176 | else: 177 | files.append(item.filename) 178 | return dirs, files 179 | 180 | def size(self, name): 181 | remote_path = self._remote_path(name) 182 | return self.sftp.stat(remote_path).st_size 183 | 184 | # From Django 185 | def _datetime_from_timestamp(self, ts): 186 | tz = datetime.timezone.utc if setting("USE_TZ") else None 187 | return datetime.datetime.fromtimestamp(ts, tz=tz) 188 | 189 | def get_accessed_time(self, name): 190 | remote_path = self._remote_path(name) 191 | utime = self.sftp.stat(remote_path).st_atime 192 | return self._datetime_from_timestamp(utime) 193 | 194 | def get_modified_time(self, name): 195 | remote_path = self._remote_path(name) 196 | utime = self.sftp.stat(remote_path).st_mtime 197 | return self._datetime_from_timestamp(utime) 198 | 199 | def url(self, name): 200 | if self.base_url is None: 201 | raise ValueError("This file is not accessible via a URL.") 202 | return urljoin(self.base_url, name).replace("\\", "/") 203 | 204 | 205 | class SFTPStorageFile(File): 206 | def __init__(self, name, storage, mode): 207 | self.name = name 208 | self.mode = mode 209 | self.file = io.BytesIO() 210 | self._storage = storage 211 | self._is_read = False 212 | self._is_dirty = False 213 | 214 | @property 215 | def size(self): 216 | if not hasattr(self, "_size"): 217 | self._size = self._storage.size(self.name) 218 | return self._size 219 | 220 | def read(self, num_bytes=None): 221 | if not self._is_read: 222 | self.file = self._storage._read(self.name) 223 | self._is_read = True 224 | 225 | return self.file.read(num_bytes) 226 | 227 | def write(self, content): 228 | if "w" not in self.mode: 229 | raise AttributeError("File was opened for read-only access.") 230 | self.file = io.BytesIO(content) 231 | self._is_dirty = True 232 | self._is_read = True 233 | 234 | def open(self, mode=None): 235 | if not self.closed: 236 | self.seek(0) 237 | elif self.name and self._storage.exists(self.name): 238 | self.file = self._storage._open(self.name, mode or self.mode) 239 | else: 240 | raise ValueError("The file cannot be reopened.") 241 | 242 | def close(self): 243 | if self._is_dirty: 244 | self._storage._save(self.name, self) 245 | self.file.close() 246 | -------------------------------------------------------------------------------- /storages/base.py: -------------------------------------------------------------------------------- 1 | from django.core.exceptions import ImproperlyConfigured 2 | from django.core.files.storage import Storage 3 | 4 | 5 | class BaseStorage(Storage): 6 | def __init__(self, **settings): 7 | default_settings = self.get_default_settings() 8 | 9 | for name, value in default_settings.items(): 10 | if not hasattr(self, name): 11 | setattr(self, name, value) 12 | 13 | for name, value in settings.items(): 14 | if name not in default_settings: 15 | raise ImproperlyConfigured( 16 | "Invalid setting '{}' for {}".format( 17 | name, 18 | self.__class__.__name__, 19 | ) 20 | ) 21 | setattr(self, name, value) 22 | 23 | def get_default_settings(self): 24 | return {} 25 | -------------------------------------------------------------------------------- /storages/compress.py: -------------------------------------------------------------------------------- 1 | import io 2 | import zlib 3 | from gzip import GzipFile 4 | from typing import Optional 5 | 6 | from storages.utils import to_bytes 7 | 8 | 9 | class GzipCompressionWrapper(io.RawIOBase): 10 | """Wrapper for compressing file contents on the fly.""" 11 | 12 | def __init__(self, raw, level=zlib.Z_BEST_COMPRESSION): 13 | super().__init__() 14 | self.raw = raw 15 | self.compress = zlib.compressobj(level=level, wbits=31) 16 | self.leftover = bytearray() 17 | 18 | @staticmethod 19 | def readable(): 20 | return True 21 | 22 | def readinto(self, buf: bytearray) -> Optional[int]: 23 | size = len(buf) 24 | while len(self.leftover) < size: 25 | chunk = to_bytes(self.raw.read(size)) 26 | if not chunk: 27 | if self.compress: 28 | self.leftover += self.compress.flush(zlib.Z_FINISH) 29 | self.compress = None 30 | break 31 | self.leftover += self.compress.compress(chunk) 32 | if len(self.leftover) == 0: 33 | return 0 34 | output = self.leftover[:size] 35 | size = len(output) 36 | buf[:size] = output 37 | self.leftover = self.leftover[size:] 38 | return size 39 | 40 | 41 | class CompressStorageMixin: 42 | def _compress_content(self, content): 43 | """Gzip a given string content.""" 44 | return GzipCompressionWrapper(content) 45 | 46 | 47 | class CompressedFileMixin: 48 | def _decompress_file(self, mode, file, mtime=0.0): 49 | return GzipFile(mode=mode, fileobj=file, mtime=mtime) 50 | -------------------------------------------------------------------------------- /storages/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | import posixpath 4 | 5 | from django.conf import settings 6 | from django.core.exceptions import ImproperlyConfigured 7 | from django.core.exceptions import SuspiciousFileOperation 8 | from django.core.files.utils import FileProxyMixin 9 | from django.core.files.utils import validate_file_name 10 | from django.utils.encoding import force_bytes 11 | 12 | 13 | def to_bytes(content): 14 | """Wrap Django's force_bytes to pass through bytearrays.""" 15 | if isinstance(content, bytearray): 16 | return content 17 | 18 | return force_bytes(content) 19 | 20 | 21 | def setting(name, default=None): 22 | """ 23 | Helper function to get a Django setting by name. If setting doesn't exists 24 | it will return a default. 25 | 26 | :param name: Name of setting 27 | :type name: str 28 | :param default: Value if setting is unfound 29 | :returns: Setting's value 30 | """ 31 | return getattr(settings, name, default) 32 | 33 | 34 | def clean_name(name): 35 | """ 36 | Normalize the name. 37 | 38 | Includes cleaning up Windows style paths, ensuring an ending trailing slash, 39 | and coercing from pathlib.PurePath. 40 | """ 41 | if isinstance(name, pathlib.PurePath): 42 | name = str(name) 43 | 44 | # Normalize Windows style paths 45 | clean_name = posixpath.normpath(name).replace("\\", "/") 46 | 47 | # os.path.normpath() can strip trailing slashes so we implement 48 | # a workaround here. 49 | if name.endswith("/") and not clean_name.endswith("/"): 50 | # Add a trailing slash as it was stripped. 51 | clean_name += "/" 52 | 53 | # Given an empty string, os.path.normpath() will return ., which we don't want 54 | if clean_name == ".": 55 | clean_name = "" 56 | 57 | return clean_name 58 | 59 | 60 | def safe_join(base, *paths): 61 | """ 62 | A version of django.utils._os.safe_join for S3 paths. 63 | 64 | Joins one or more path components to the base path component 65 | intelligently. Returns a normalized version of the final path. 66 | 67 | The final path must be located inside of the base path component 68 | (otherwise a ValueError is raised). 69 | 70 | Paths outside the base path indicate a possible security 71 | sensitive operation. 72 | """ 73 | base_path = base 74 | base_path = base_path.rstrip("/") 75 | paths = list(paths) 76 | 77 | final_path = base_path + "/" 78 | for path in paths: 79 | _final_path = posixpath.normpath(posixpath.join(final_path, path)) 80 | # posixpath.normpath() strips the trailing /. Add it back. 81 | if path.endswith("/") or _final_path + "/" == final_path: 82 | _final_path += "/" 83 | final_path = _final_path 84 | if final_path == base_path: 85 | final_path += "/" 86 | 87 | # Ensure final_path starts with base_path and that the next character after 88 | # the base path is /. 89 | base_path_len = len(base_path) 90 | if not final_path.startswith(base_path) or final_path[base_path_len] != "/": 91 | raise ValueError( 92 | "the joined path is located outside of the base path component" 93 | ) 94 | 95 | return final_path.lstrip("/") 96 | 97 | 98 | def check_location(storage): 99 | if storage.location.startswith("/"): 100 | correct = storage.location.lstrip("/") 101 | raise ImproperlyConfigured( 102 | ( 103 | "{}.location cannot begin with a leading slash. Found '{}'. Use '{}' " 104 | "instead." 105 | ).format( 106 | storage.__class__.__name__, 107 | storage.location, 108 | correct, 109 | ) 110 | ) 111 | 112 | 113 | def lookup_env(names): 114 | """ 115 | Look up for names in environment. Returns the first element 116 | found. 117 | """ 118 | for name in names: 119 | value = os.environ.get(name) 120 | if value: 121 | return value 122 | 123 | 124 | def get_available_overwrite_name(name, max_length): 125 | # This is adapted from Django, and will be removed once 126 | # Django 5.1 is the lowest supported version 127 | dir_name, file_name = os.path.split(name) 128 | if ".." in pathlib.PurePath(dir_name).parts: 129 | raise SuspiciousFileOperation( 130 | "Detected path traversal attempt in '%s'" % dir_name 131 | ) 132 | validate_file_name(file_name, allow_relative_path=True) 133 | 134 | if max_length is None or len(name) <= max_length: 135 | return name 136 | 137 | file_root, file_ext = os.path.splitext(file_name) 138 | truncation = len(name) - max_length 139 | 140 | file_root = file_root[:-truncation] 141 | if not file_root: 142 | raise SuspiciousFileOperation( 143 | 'Storage tried to truncate away entire filename "%s". ' 144 | "Please make sure that the corresponding file field " 145 | 'allows sufficient "max_length".' % name 146 | ) 147 | name = os.path.join(dir_name, "{}{}".format(file_root, file_ext)) 148 | validate_file_name(name, allow_relative_path=True) 149 | return name 150 | 151 | 152 | def is_seekable(file_object): 153 | return not hasattr(file_object, "seekable") or file_object.seekable() 154 | 155 | 156 | class ReadBytesWrapper(FileProxyMixin): 157 | """ 158 | A wrapper for a file-like object, that makes read() always returns bytes. 159 | """ 160 | 161 | def __init__(self, file, encoding=None): 162 | """ 163 | :param file: The file-like object to wrap. 164 | :param encoding: Specify the encoding to use when file.read() returns strings. 165 | If not provided will default to file.encoding, of if that's not available, 166 | to utf-8. 167 | """ 168 | self.file = file 169 | self._encoding = encoding or getattr(file, "encoding", None) or "utf-8" 170 | 171 | def read(self, *args, **kwargs): 172 | content = self.file.read(*args, **kwargs) 173 | 174 | if not isinstance(content, bytes): 175 | content = content.encode(self._encoding) 176 | return content 177 | 178 | def close(self): 179 | self.file.close() 180 | 181 | def readable(self): 182 | return True 183 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/tests/__init__.py -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | MEDIA_URL = "/media/" 2 | 3 | DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} 4 | 5 | SECRET_KEY = "hailthesunshine" 6 | 7 | USE_TZ = True 8 | 9 | # the following test settings are required for moto to work. 10 | AWS_STORAGE_BUCKET_NAME = "test-bucket" 11 | -------------------------------------------------------------------------------- /tests/test_dropbox.py: -------------------------------------------------------------------------------- 1 | import io 2 | from datetime import datetime 3 | from unittest import mock 4 | 5 | from django.core.exceptions import ImproperlyConfigured 6 | from django.core.exceptions import SuspiciousFileOperation 7 | from django.core.files.base import File 8 | from django.test import TestCase 9 | from django.test import override_settings 10 | from dropbox.files import FileMetadata 11 | from dropbox.files import FolderMetadata 12 | from dropbox.files import GetTemporaryLinkResult 13 | from requests.models import Response 14 | 15 | from storages.backends import dropbox 16 | 17 | FILE_DATE = datetime(2015, 8, 24, 15, 6, 41) 18 | FILE_METADATA_MOCK = mock.MagicMock(spec=FileMetadata) 19 | FILE_METADATA_MOCK.size = 4 20 | FILE_METADATA_MOCK.client_modified = FILE_DATE 21 | FILE_METADATA_MOCK.server_modified = FILE_DATE 22 | FILE_METADATA_MOCK.path_lower = "/foo.txt" 23 | FILE_METADATA_MOCK.path_display = "/foo.txt" 24 | FILE_METADATA_MOCK.name = "foo.txt" 25 | FILE_METADATA_MOCK.rev = "012c0000000150c838f0" 26 | FILE_METADATA_MOCK.content_hash = ( 27 | "3865695d47c02576e8578df30d56bb3faf737c11044d804f09ffb6484453020f" 28 | ) 29 | 30 | FOLDER_METADATA_MOCK = mock.MagicMock(spec=FolderMetadata) 31 | FOLDER_METADATA_MOCK.name = "bar" 32 | 33 | FILES_MOCK = mock.MagicMock(spec=FolderMetadata) 34 | FILES_MOCK.entries = [FILE_METADATA_MOCK, FOLDER_METADATA_MOCK] 35 | 36 | FILE_MEDIA_MOCK = mock.MagicMock(spec=GetTemporaryLinkResult) 37 | FILE_MEDIA_MOCK.link = "https://dl.dropboxusercontent.com/1/view/foo" 38 | 39 | FILES_EMPTY_MOCK = mock.MagicMock(spec=FolderMetadata) 40 | FILES_EMPTY_MOCK.entries = [] 41 | 42 | RESPONSE_200_MOCK = mock.MagicMock(spec=Response) 43 | RESPONSE_200_MOCK.status_code = 200 44 | RESPONSE_200_MOCK.content = b"bar" 45 | 46 | RESPONSE_500_MOCK = mock.MagicMock(spec=Response) 47 | RESPONSE_500_MOCK.status_code = 500 48 | 49 | 50 | class DropboxTest(TestCase): 51 | def setUp(self, *args): 52 | self.storage = dropbox.DropboxStorage("foo") 53 | 54 | def test_no_access_token(self, *args): 55 | with self.assertRaises(ImproperlyConfigured): 56 | dropbox.DropboxStorage(None) 57 | 58 | def test_setting_access_token(self): 59 | with override_settings(DROPBOX_OAUTH2_TOKEN="abc"): 60 | storage = dropbox.DropboxStorage() 61 | self.assertEqual(storage.oauth2_access_token, "abc") 62 | 63 | def test_refresh_token_app_key_no_app_secret(self, *args): 64 | inputs = { 65 | "oauth2_refresh_token": "foo", 66 | "app_key": "bar", 67 | } 68 | with self.assertRaises(ImproperlyConfigured): 69 | dropbox.DropboxStorage(**inputs) 70 | 71 | def test_refresh_token_app_secret_no_app_key(self, *args): 72 | inputs = { 73 | "oauth2_refresh_token": "foo", 74 | "app_secret": "bar", 75 | } 76 | with self.assertRaises(ImproperlyConfigured): 77 | dropbox.DropboxStorage(**inputs) 78 | 79 | def test_app_key_app_secret_no_refresh_token(self, *args): 80 | inputs = { 81 | "app_key": "foo", 82 | "app_secret": "bar", 83 | } 84 | with self.assertRaises(ImproperlyConfigured): 85 | dropbox.DropboxStorage(**inputs) 86 | 87 | @mock.patch("dropbox.Dropbox.files_delete", return_value=FILE_METADATA_MOCK) 88 | def test_delete(self, *args): 89 | self.storage.delete("foo") 90 | 91 | @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=[FILE_METADATA_MOCK]) 92 | def test_exists(self, *args): 93 | exists = self.storage.exists("foo") 94 | self.assertTrue(exists) 95 | 96 | @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=[]) 97 | def test_not_exists(self, *args): 98 | exists = self.storage.exists("bar") 99 | self.assertFalse(exists) 100 | 101 | @mock.patch("dropbox.Dropbox.files_list_folder", return_value=FILES_MOCK) 102 | def test_listdir(self, *args): 103 | dirs, files = self.storage.listdir("/") 104 | dirs2, files2 = self.storage.listdir("") 105 | self.assertEqual(dirs, dirs2) 106 | self.assertEqual(files2, files2) 107 | 108 | self.assertGreater(len(dirs), 0) 109 | self.assertGreater(len(files), 0) 110 | self.assertEqual(dirs[0], "bar") 111 | self.assertEqual(files[0], "foo.txt") 112 | 113 | @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=FILE_METADATA_MOCK) 114 | def test_size(self, *args): 115 | size = self.storage.size("foo") 116 | self.assertEqual(size, FILE_METADATA_MOCK.size) 117 | 118 | def test_open(self, *args): 119 | obj = self.storage._open("foo") 120 | self.assertIsInstance(obj, File) 121 | 122 | @mock.patch("dropbox.Dropbox.files_upload", return_value="foo") 123 | @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=None) 124 | def test_save(self, files_upload, *args): 125 | name = self.storage.save("foo", File(io.BytesIO(b"bar"), "foo")) 126 | self.assertTrue(files_upload.called) 127 | self.assertEqual(name, "foo") 128 | 129 | @mock.patch("dropbox.Dropbox.files_upload") 130 | @mock.patch("dropbox.Dropbox.files_upload_session_finish") 131 | @mock.patch("dropbox.Dropbox.files_upload_session_append_v2") 132 | @mock.patch( 133 | "dropbox.Dropbox.files_upload_session_start", 134 | return_value=mock.MagicMock(session_id="foo"), 135 | ) 136 | def test_chunked_upload(self, start, append, finish, upload): 137 | large_file = File(io.BytesIO(b"bar" * self.storage.CHUNK_SIZE), "foo") 138 | self.storage._save("foo", large_file) 139 | self.assertTrue(start.called) 140 | self.assertTrue(append.called) 141 | self.assertTrue(finish.called) 142 | self.assertFalse(upload.called) 143 | 144 | @mock.patch( 145 | "dropbox.Dropbox.files_get_temporary_link", return_value=FILE_MEDIA_MOCK 146 | ) 147 | def test_url(self, *args): 148 | url = self.storage.url("foo") 149 | self.assertEqual(url, FILE_MEDIA_MOCK.link) 150 | 151 | def test_formats(self, *args): 152 | self.storage = dropbox.DropboxStorage("foo") 153 | files = self.storage._full_path("") 154 | self.assertEqual(files, self.storage._full_path("/")) 155 | self.assertEqual(files, self.storage._full_path(".")) 156 | self.assertEqual(files, self.storage._full_path("..")) 157 | self.assertEqual(files, self.storage._full_path("../..")) 158 | 159 | 160 | class DropboxFileTest(TestCase): 161 | def setUp(self, *args): 162 | self.storage = dropbox.DropboxStorage("foo") 163 | self.file = dropbox.DropboxFile("/foo.txt", self.storage) 164 | 165 | @mock.patch( 166 | "dropbox.Dropbox.files_download", 167 | return_value=(FILE_METADATA_MOCK, RESPONSE_200_MOCK), 168 | ) 169 | def test_read(self, *args): 170 | with self.storage.open("foo.txt") as file: 171 | self.assertEqual(file.read(), b"bar") 172 | 173 | @mock.patch( 174 | "dropbox.Dropbox.files_download", 175 | return_value=(FILE_METADATA_MOCK, RESPONSE_500_MOCK), 176 | ) 177 | def test_server_bad_response(self, *args): 178 | with self.assertRaises(dropbox.DropboxStorageException): 179 | with self.storage.open("foo.txt") as file: 180 | file.read() 181 | 182 | 183 | @mock.patch("dropbox.Dropbox.files_list_folder", return_value=FILES_EMPTY_MOCK) 184 | class DropboxRootPathTest(TestCase): 185 | def test_jailed(self, *args): 186 | self.storage = dropbox.DropboxStorage("foo", root_path="/bar") 187 | dirs, files = self.storage.listdir("/") 188 | self.assertFalse(dirs) 189 | self.assertFalse(files) 190 | 191 | @mock.patch("dropbox.Dropbox.files_upload", return_value="foo") 192 | @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=None) 193 | def test_saves(self, *args): 194 | self.storage = dropbox.DropboxStorage("foo", root_path="/app.qoo.foo/") 195 | for filename in ["xyz", "quark"]: 196 | with self.subTest(filename=filename): 197 | name = self.storage.save(filename, File(io.BytesIO(b"abc"), "def")) 198 | self.assertEqual(name, filename) 199 | 200 | def test_suspicious(self, *args): 201 | self.storage = dropbox.DropboxStorage("foo", root_path="/bar") 202 | with self.assertRaises((SuspiciousFileOperation, ValueError)): 203 | self.storage._full_path("..") 204 | 205 | def test_formats(self, *args): 206 | self.storage = dropbox.DropboxStorage("foo", root_path="/bar") 207 | files = self.storage._full_path("") 208 | self.assertEqual(files, self.storage._full_path("/")) 209 | self.assertEqual(files, self.storage._full_path(".")) 210 | -------------------------------------------------------------------------------- /tests/test_files/windows-1252-encoded.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jschneier/django-storages/9de3cc9da4dbd67fcc56ecafc7cbf738af90136c/tests/test_files/windows-1252-encoded.txt -------------------------------------------------------------------------------- /tests/test_ftp.py: -------------------------------------------------------------------------------- 1 | import io 2 | from unittest.mock import patch 3 | 4 | from django.core.exceptions import ImproperlyConfigured 5 | from django.core.files.base import File 6 | from django.test import TestCase 7 | from django.test import override_settings 8 | 9 | from storages.backends import ftp 10 | 11 | USER = "foo" 12 | PASSWORD = "b@r" 13 | HOST = "localhost" 14 | PORT = 2121 15 | 16 | LIST_FIXTURE = """drwxr-xr-x 2 ftp nogroup 4096 Jul 27 09:46 dir 17 | -rw-r--r-- 1 ftp nogroup 1024 Jul 27 09:45 fi 18 | -rw-r--r-- 1 ftp nogroup 2048 Jul 27 09:50 fi2""" 19 | 20 | 21 | def geturl(scheme="ftp", pwd=PASSWORD): 22 | return URL_TEMPLATE.format( 23 | scheme=scheme, user=USER, passwd=pwd, host=HOST, port=PORT 24 | ) 25 | 26 | 27 | URL_TEMPLATE = "{scheme}://{user}:{passwd}@{host}:{port}/" 28 | URL = geturl() 29 | 30 | 31 | def list_retrlines(cmd, func): 32 | for line in LIST_FIXTURE.splitlines(): 33 | func(line) 34 | 35 | 36 | class FTPTest(TestCase): 37 | def setUp(self): 38 | self.storage = ftp.FTPStorage(location=URL) 39 | 40 | def test_init_no_location(self): 41 | with self.assertRaises(ImproperlyConfigured): 42 | ftp.FTPStorage() 43 | 44 | @patch("storages.backends.ftp.setting", return_value=URL) 45 | def test_init_location_from_setting(self, mock_setting): 46 | storage = ftp.FTPStorage() 47 | self.assertTrue(mock_setting.called) 48 | self.assertEqual(storage.location, URL) 49 | 50 | def test_decode_location(self): 51 | config = self.storage._decode_location(URL) 52 | wanted_config = { 53 | "passwd": "b@r", 54 | "host": "localhost", 55 | "user": "foo", 56 | "active": False, 57 | "path": "/", 58 | "port": 2121, 59 | "secure": False, 60 | } 61 | self.assertEqual(config, wanted_config) 62 | # Test active FTP 63 | config = self.storage._decode_location("a" + URL) 64 | wanted_config = { 65 | "passwd": "b@r", 66 | "host": "localhost", 67 | "user": "foo", 68 | "active": True, 69 | "path": "/", 70 | "port": 2121, 71 | "secure": False, 72 | } 73 | self.assertEqual(config, wanted_config) 74 | 75 | def test_decode_location_error(self): 76 | with self.assertRaises(ImproperlyConfigured): 77 | self.storage._decode_location("foo") 78 | with self.assertRaises(ImproperlyConfigured): 79 | self.storage._decode_location("http://foo.pt") 80 | 81 | def test_decode_location_urlchars_password(self): 82 | self.storage._decode_location(geturl(pwd="b#r")) 83 | 84 | @override_settings(FTP_STORAGE_LOCATION=URL) 85 | def test_override_settings(self): 86 | storage = ftp.FTPStorage() 87 | self.assertEqual(storage.encoding, "latin-1") 88 | with override_settings(FTP_STORAGE_ENCODING="utf-8"): 89 | storage = ftp.FTPStorage() 90 | self.assertEqual(storage.encoding, "utf-8") 91 | storage = ftp.FTPStorage(encoding="utf-8") 92 | self.assertEqual(storage.encoding, "utf-8") 93 | 94 | @patch("ftplib.FTP") 95 | def test_start_connection(self, mock_ftp): 96 | self.storage._start_connection() 97 | self.assertIsNotNone(self.storage._connection) 98 | # Start active 99 | storage = ftp.FTPStorage(location="a" + URL) 100 | storage._start_connection() 101 | 102 | @patch("ftplib.FTP", **{"return_value.pwd.side_effect": IOError()}) 103 | def test_start_connection_timeout(self, mock_ftp): 104 | self.storage._start_connection() 105 | self.assertIsNotNone(self.storage._connection) 106 | 107 | @patch("ftplib.FTP", **{"return_value.connect.side_effect": IOError()}) 108 | def test_start_connection_error(self, mock_ftp): 109 | with self.assertRaises(ftp.FTPStorageException): 110 | self.storage._start_connection() 111 | 112 | @patch("ftplib.FTP", **{"return_value.quit.return_value": None}) 113 | def test_disconnect(self, mock_ftp_quit): 114 | self.storage._start_connection() 115 | self.storage.disconnect() 116 | self.assertIsNone(self.storage._connection) 117 | 118 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 119 | def test_mkremdirs(self, mock_ftp): 120 | self.storage._start_connection() 121 | self.storage._mkremdirs("foo/bar") 122 | 123 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 124 | def test_mkremdirs_n_subdirectories(self, mock_ftp): 125 | self.storage._start_connection() 126 | self.storage._mkremdirs("foo/bar/null") 127 | 128 | @patch( 129 | "ftplib.FTP", 130 | **{ 131 | "return_value.pwd.return_value": "foo", 132 | "return_value.storbinary.return_value": None, 133 | }, 134 | ) 135 | def test_put_file(self, mock_ftp): 136 | self.storage._start_connection() 137 | self.storage._put_file("foo", File(io.BytesIO(b"foo"), "foo")) 138 | 139 | @patch( 140 | "ftplib.FTP", 141 | **{ 142 | "return_value.pwd.return_value": "foo", 143 | "return_value.storbinary.side_effect": IOError(), 144 | }, 145 | ) 146 | def test_put_file_error(self, mock_ftp): 147 | self.storage._start_connection() 148 | with self.assertRaises(ftp.FTPStorageException): 149 | self.storage._put_file("foo", File(io.BytesIO(b"foo"), "foo")) 150 | 151 | def test_open(self): 152 | remote_file = self.storage._open("foo") 153 | self.assertIsInstance(remote_file, ftp.FTPStorageFile) 154 | 155 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 156 | def test_read(self, mock_ftp): 157 | self.storage._start_connection() 158 | self.storage._read("foo") 159 | 160 | @patch("ftplib.FTP", **{"return_value.pwd.side_effect": IOError()}) 161 | def test_read2(self, mock_ftp): 162 | self.storage._start_connection() 163 | with self.assertRaises(ftp.FTPStorageException): 164 | self.storage._read("foo") 165 | 166 | @patch( 167 | "ftplib.FTP", 168 | **{ 169 | "return_value.pwd.return_value": "foo", 170 | "return_value.storbinary.return_value": None, 171 | }, 172 | ) 173 | def test_save(self, mock_ftp): 174 | self.storage._save("foo", File(io.BytesIO(b"foo"), "foo")) 175 | 176 | @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) 177 | def test_listdir(self, mock_retrlines): 178 | dirs, files = self.storage.listdir("/") 179 | self.assertEqual(len(dirs), 1) 180 | self.assertEqual(dirs, ["dir"]) 181 | self.assertEqual(len(files), 2) 182 | self.assertEqual(sorted(files), sorted(["fi", "fi2"])) 183 | 184 | @patch("ftplib.FTP", **{"return_value.retrlines.side_effect": IOError()}) 185 | def test_listdir_error(self, mock_ftp): 186 | with self.assertRaises(ftp.FTPStorageException): 187 | self.storage.listdir("/") 188 | 189 | @patch("ftplib.FTP", **{"return_value.nlst.return_value": ["foo", "foo2"]}) 190 | def test_exists(self, mock_ftp): 191 | self.assertTrue(self.storage.exists("foo")) 192 | self.assertFalse(self.storage.exists("bar")) 193 | 194 | @patch("ftplib.FTP", **{"return_value.nlst.side_effect": IOError()}) 195 | def test_exists_error(self, mock_ftp): 196 | with self.assertRaises(ftp.FTPStorageException): 197 | self.storage.exists("foo") 198 | 199 | @patch("ftplib.FTP", **{"return_value.nlst.return_value": ["foo", "foo2"]}) 200 | def test_exists_overwrite(self, mock_ftp): 201 | with override_settings(FTP_ALLOW_OVERWRITE=True): 202 | storage = ftp.FTPStorage(location=URL) 203 | self.assertFalse(storage.exists("foo")) 204 | 205 | @patch( 206 | "ftplib.FTP", 207 | **{ 208 | "return_value.delete.return_value": None, 209 | "return_value.nlst.return_value": ["foo", "foo2"], 210 | }, 211 | ) 212 | def test_delete(self, mock_ftp): 213 | self.storage.delete("foo") 214 | self.assertTrue(mock_ftp.return_value.delete.called) 215 | 216 | @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) 217 | def test_size(self, mock_ftp): 218 | self.assertEqual(1024, self.storage.size("fi")) 219 | self.assertEqual(2048, self.storage.size("fi2")) 220 | self.assertEqual(0, self.storage.size("bar")) 221 | 222 | @patch("ftplib.FTP", **{"return_value.retrlines.side_effect": IOError()}) 223 | def test_size_error(self, mock_ftp): 224 | self.assertEqual(0, self.storage.size("foo")) 225 | 226 | def test_url(self): 227 | with self.assertRaises(ValueError): 228 | self.storage.base_url = None 229 | self.storage.url("foo") 230 | self.storage = ftp.FTPStorage(location=URL, base_url="http://foo.bar/") 231 | self.assertEqual("http://foo.bar/foo", self.storage.url("foo")) 232 | 233 | 234 | class FTPStorageFileTest(TestCase): 235 | def setUp(self): 236 | self.storage = ftp.FTPStorage(location=URL) 237 | 238 | @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) 239 | def test_size(self, mock_ftp): 240 | file_ = ftp.FTPStorageFile("fi", self.storage, "wb") 241 | self.assertEqual(file_.size, 1024) 242 | 243 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 244 | @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) 245 | def test_readlines(self, mock_ftp, mock_storage): 246 | file_ = ftp.FTPStorageFile("fi", self.storage, "wb") 247 | self.assertEqual([b"foo"], file_.readlines()) 248 | 249 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 250 | @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) 251 | def test_read(self, mock_ftp, mock_storage): 252 | file_ = ftp.FTPStorageFile("fi", self.storage, "wb") 253 | self.assertEqual(b"foo", file_.read()) 254 | 255 | def test_write(self): 256 | file_ = ftp.FTPStorageFile("fi", self.storage, "wb") 257 | file_.write(b"foo") 258 | file_.seek(0) 259 | self.assertEqual(file_.file.read(), b"foo") 260 | 261 | @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) 262 | @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) 263 | def test_close(self, mock_ftp, mock_storage): 264 | file_ = ftp.FTPStorageFile("fi", self.storage, "wb") 265 | file_.is_dirty = True 266 | file_.read() 267 | file_.close() 268 | 269 | 270 | class FTPTLSTest(TestCase): 271 | def setUp(self): 272 | self.storage = ftp.FTPStorage(location=geturl(scheme="ftps")) 273 | 274 | def test_decode_location(self): 275 | wanted_config = { 276 | "passwd": "b@r", 277 | "host": "localhost", 278 | "user": "foo", 279 | "active": False, 280 | "path": "/", 281 | "port": 2121, 282 | "secure": True, 283 | } 284 | self.assertEqual(self.storage._config, wanted_config) 285 | 286 | @patch("ftplib.FTP_TLS") 287 | def test_start_connection_calls_prot_p(self, mock_ftp): 288 | self.storage._start_connection() 289 | self.storage._connection.prot_p.assert_called_once() 290 | -------------------------------------------------------------------------------- /tests/test_sftp.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import socket 4 | import stat 5 | from unittest.mock import MagicMock 6 | from unittest.mock import patch 7 | 8 | import paramiko 9 | from django.core.files.base import File 10 | from django.test import TestCase 11 | from django.test import override_settings 12 | 13 | from storages.backends import sftpstorage 14 | from tests.utils import NonSeekableContentFile 15 | 16 | 17 | class SFTPStorageTest(TestCase): 18 | def setUp(self): 19 | self.storage = sftpstorage.SFTPStorage(host="foo", root_path="root") 20 | 21 | def test_init(self): 22 | pass 23 | 24 | @patch("paramiko.SSHClient") 25 | def test_no_known_hosts_file(self, mock_ssh): 26 | self.storage.known_host_file = "not_existed_file" 27 | self.storage._connect() 28 | self.assertEqual("foo", mock_ssh.return_value.connect.call_args[0][0]) 29 | 30 | @patch.object(os.path, "expanduser", return_value="/path/to/known_hosts") 31 | @patch.object(os.path, "exists", return_value=True) 32 | @patch("paramiko.SSHClient") 33 | def test_error_when_known_hosts_file_not_defined(self, mock_ssh, *a): 34 | self.storage._connect() 35 | self.storage._ssh.load_host_keys.assert_called_once_with("/path/to/known_hosts") 36 | 37 | @patch("paramiko.SSHClient") 38 | def test_connect(self, mock_ssh): 39 | self.storage._connect() 40 | self.assertEqual("foo", mock_ssh.return_value.connect.call_args[0][0]) 41 | 42 | @patch("paramiko.SSHClient") 43 | def test_close_unopened(self, mock_ssh): 44 | with self.storage: 45 | pass 46 | mock_ssh.return_value.close.assert_not_called() 47 | 48 | @patch("paramiko.SSHClient") 49 | def test_close_opened(self, mock_ssh): 50 | with self.storage as storage: 51 | storage._connect() 52 | mock_ssh.return_value.close.assert_called_once_with() 53 | 54 | def test_open(self): 55 | file_ = self.storage._open("foo") 56 | self.assertIsInstance(file_, sftpstorage.SFTPStorageFile) 57 | 58 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 59 | def test_read(self, mock_sftp): 60 | self.storage._read("foo") 61 | self.assertTrue(mock_sftp.open.called) 62 | 63 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 64 | def test_chown(self, mock_sftp): 65 | self.storage._chown("foo", 1, 1) 66 | self.assertEqual(mock_sftp.chown.call_args[0], ("foo", 1, 1)) 67 | 68 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 69 | def test_mkdir(self, mock_sftp): 70 | self.storage._mkdir("foo") 71 | self.assertEqual(mock_sftp.mkdir.call_args[0], ("foo",)) 72 | 73 | @patch( 74 | "storages.backends.sftpstorage.SFTPStorage.sftp", 75 | **{"stat.side_effect": (FileNotFoundError(), True)}, 76 | ) 77 | def test_mkdir_parent(self, mock_sftp): 78 | self.storage._mkdir("bar/foo") 79 | self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ("bar",)) 80 | self.assertEqual(mock_sftp.mkdir.call_args_list[1][0], ("bar/foo",)) 81 | 82 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 83 | def test_save(self, mock_sftp): 84 | self.storage._save("foo", File(io.BytesIO(b"foo"), "foo")) 85 | self.assertTrue(mock_sftp.putfo.called) 86 | 87 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 88 | def test_save_non_seekable(self, mock_sftp): 89 | self.storage._save("foo", NonSeekableContentFile("foo")) 90 | self.assertTrue(mock_sftp.putfo.called) 91 | 92 | @patch( 93 | "storages.backends.sftpstorage.SFTPStorage.sftp", 94 | **{"stat.side_effect": (FileNotFoundError(), True)}, 95 | ) 96 | def test_save_in_subdir(self, mock_sftp): 97 | self.storage._save("bar/foo", File(io.BytesIO(b"foo"), "foo")) 98 | self.assertEqual(mock_sftp.stat.call_args_list[0][0], ("root/bar",)) 99 | self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ("root/bar",)) 100 | self.assertTrue(mock_sftp.putfo.called) 101 | 102 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 103 | def test_delete(self, mock_sftp): 104 | self.storage.delete("foo") 105 | self.assertEqual(mock_sftp.remove.call_args_list[0][0], ("root/foo",)) 106 | 107 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 108 | def test_path_exists(self, mock_sftp): 109 | self.assertTrue(self.storage._path_exists("root/foo")) 110 | 111 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 112 | def test_exists(self, mock_sftp): 113 | self.assertTrue(self.storage.exists("foo")) 114 | 115 | @patch( 116 | "storages.backends.sftpstorage.SFTPStorage.sftp", 117 | **{"stat.side_effect": FileNotFoundError()}, 118 | ) 119 | def test_not_exists(self, mock_sftp): 120 | self.assertFalse(self.storage.exists("foo")) 121 | 122 | @patch( 123 | "storages.backends.sftpstorage.SFTPStorage.sftp", 124 | **{"stat.side_effect": FileNotFoundError()}, 125 | ) 126 | def test_not_path_exists(self, mock_sftp): 127 | self.assertFalse(self.storage._path_exists("root/foo")) 128 | 129 | @patch( 130 | "storages.backends.sftpstorage.SFTPStorage.sftp", 131 | **{"stat.side_effect": socket.timeout()}, 132 | ) 133 | def test_not_exists_timeout(self, mock_sftp): 134 | with self.assertRaises(socket.timeout): 135 | self.storage.exists("foo") 136 | 137 | @patch( 138 | "storages.backends.sftpstorage.SFTPStorage.sftp", 139 | **{ 140 | "listdir_attr.return_value": [ 141 | MagicMock(filename="foo", st_mode=stat.S_IFDIR), 142 | MagicMock(filename="bar", st_mode=None), 143 | ] 144 | }, 145 | ) 146 | def test_listdir(self, mock_sftp): 147 | dirs, files = self.storage.listdir("/") 148 | self.assertTrue(dirs) 149 | self.assertTrue(files) 150 | 151 | @patch( 152 | "storages.backends.sftpstorage.SFTPStorage.sftp", 153 | **{ 154 | "stat.return_value.st_size": 42, 155 | }, 156 | ) 157 | def test_size(self, mock_sftp): 158 | self.assertEqual(self.storage.size("foo"), 42) 159 | 160 | def test_url(self): 161 | self.assertEqual(self.storage.url("foo"), "/media/foo") 162 | # Test custom 163 | self.storage.base_url = "http://bar.pt/" 164 | self.assertEqual(self.storage.url("foo"), "http://bar.pt/foo") 165 | # Test error 166 | with self.assertRaises(ValueError): 167 | self.storage.base_url = None 168 | self.storage.url("foo") 169 | 170 | @patch( 171 | "storages.backends.sftpstorage.SFTPStorage.sftp", 172 | **{ 173 | "stat.return_value.st_mtime": 1720287559, 174 | "stat.return_value.st_atime": 1720287559, 175 | }, 176 | ) 177 | def test_times(self, mock_sftp): 178 | self.storage.get_modified_time("foo") 179 | self.storage.get_accessed_time("foo") 180 | 181 | @patch("paramiko.transport.Transport", **{"is_active.side_effect": (True, False)}) 182 | @patch("storages.backends.sftpstorage.SFTPStorage._connect") 183 | def test_sftp(self, connect, transport): 184 | self.assertIsNone(self.storage.sftp) 185 | self.assertTrue(connect.called) 186 | connect.reset_mock() 187 | self.storage._ssh = paramiko.SSHClient() 188 | self.storage._ssh._transport = transport 189 | 190 | self.storage._sftp = True 191 | self.assertTrue(self.storage.sftp) 192 | self.assertFalse(connect.called) 193 | 194 | self.assertTrue(self.storage.sftp) 195 | self.assertTrue(connect.called) 196 | 197 | def test_override_settings(self): 198 | with override_settings(SFTP_STORAGE_ROOT="foo1"): 199 | storage = sftpstorage.SFTPStorage() 200 | self.assertEqual(storage.root_path, "foo1") 201 | with override_settings(SFTP_STORAGE_ROOT="foo2"): 202 | storage = sftpstorage.SFTPStorage() 203 | self.assertEqual(storage.root_path, "foo2") 204 | 205 | def test_override_class_variable(self): 206 | class MyStorage1(sftpstorage.SFTPStorage): 207 | root_path = "foo1" 208 | 209 | storage = MyStorage1() 210 | self.assertEqual(storage.root_path, "foo1") 211 | 212 | class MyStorage2(sftpstorage.SFTPStorage): 213 | root_path = "foo2" 214 | 215 | storage = MyStorage2() 216 | self.assertEqual(storage.root_path, "foo2") 217 | 218 | def test_override_init_argument(self): 219 | storage = sftpstorage.SFTPStorage(root_path="foo1") 220 | self.assertEqual(storage.root_path, "foo1") 221 | storage = sftpstorage.SFTPStorage(root_path="foo2") 222 | self.assertEqual(storage.root_path, "foo2") 223 | 224 | 225 | class SFTPStorageFileTest(TestCase): 226 | def setUp(self): 227 | self.storage = sftpstorage.SFTPStorage(host="foo") 228 | self.file = sftpstorage.SFTPStorageFile("bar", self.storage, "wb") 229 | 230 | @patch( 231 | "storages.backends.sftpstorage.SFTPStorage.sftp", 232 | **{ 233 | "stat.return_value.st_size": 42, 234 | }, 235 | ) 236 | def test_size(self, mock_sftp): 237 | self.assertEqual(self.file.size, 42) 238 | 239 | @patch( 240 | "storages.backends.sftpstorage.SFTPStorage.sftp", 241 | **{ 242 | "open.return_value.read.return_value": b"foo", 243 | }, 244 | ) 245 | def test_read(self, mock_sftp): 246 | self.assertEqual(self.file.read(), b"foo") 247 | self.assertTrue(mock_sftp.open.called) 248 | 249 | def test_write(self): 250 | self.file.write(b"foo") 251 | self.assertEqual(self.file.file.read(), b"foo") 252 | 253 | @patch("storages.backends.sftpstorage.SFTPStorage.sftp") 254 | def test_close(self, mock_sftp): 255 | self.file.write(b"foo") 256 | self.file.close() 257 | self.assertTrue(mock_sftp.putfo.called) 258 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import io 3 | import os.path 4 | import pathlib 5 | 6 | from django.conf import settings 7 | from django.core.exceptions import SuspiciousFileOperation 8 | from django.test import TestCase 9 | 10 | from storages import utils 11 | from storages.utils import get_available_overwrite_name as gaon 12 | 13 | 14 | class SettingTest(TestCase): 15 | def test_get_setting(self): 16 | value = utils.setting("SECRET_KEY") 17 | self.assertEqual(settings.SECRET_KEY, value) 18 | 19 | 20 | class CleanNameTests(TestCase): 21 | def test_clean_name(self): 22 | """Test the base case of clean_name.""" 23 | path = utils.clean_name("path/to/somewhere") 24 | self.assertEqual(path, "path/to/somewhere") 25 | 26 | def test_clean_name_pathlib(self): 27 | """Test for pathlib.Path handling.""" 28 | path = pathlib.Path("path/to/anywhere") 29 | self.assertEqual(utils.clean_name(path), "path/to/anywhere") 30 | 31 | path = pathlib.PurePath("path/to/anywhere") 32 | self.assertEqual(utils.clean_name(path), "path/to/anywhere") 33 | 34 | def test_clean_name_normalize(self): 35 | """ 36 | Test the normalization of clean_name 37 | """ 38 | path = utils.clean_name("path/to/../somewhere") 39 | self.assertEqual(path, "path/somewhere") 40 | 41 | def test_clean_name_trailing_slash(self): 42 | """Test the clean_name when the path has a trailing slash.""" 43 | path = utils.clean_name("path/to/somewhere/") 44 | self.assertEqual(path, "path/to/somewhere/") 45 | 46 | def test_clean_name_windows(self): 47 | """Test the clean_name when the path has a trailing slash.""" 48 | path = utils.clean_name("path\\to\\somewhere") 49 | self.assertEqual(path, "path/to/somewhere") 50 | 51 | 52 | class SafeJoinTest(TestCase): 53 | def test_normal(self): 54 | path = utils.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") 55 | self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") 56 | 57 | def test_with_dot(self): 58 | path = utils.safe_join( 59 | "", "path/./somewhere/../other", "..", ".", "to/./somewhere" 60 | ) 61 | self.assertEqual(path, "path/to/somewhere") 62 | 63 | def test_with_only_dot(self): 64 | path = utils.safe_join("", ".") 65 | self.assertEqual(path, "") 66 | 67 | def test_base_url(self): 68 | path = utils.safe_join("base_url", "path/to/somewhere") 69 | self.assertEqual(path, "base_url/path/to/somewhere") 70 | 71 | def test_base_url_with_slash(self): 72 | path = utils.safe_join("base_url/", "path/to/somewhere") 73 | self.assertEqual(path, "base_url/path/to/somewhere") 74 | 75 | def test_suspicious_operation(self): 76 | with self.assertRaises(ValueError): 77 | utils.safe_join("base", "../../../../../../../etc/passwd") 78 | with self.assertRaises(ValueError): 79 | utils.safe_join("base", "/etc/passwd") 80 | 81 | def test_trailing_slash(self): 82 | """ 83 | Test safe_join with paths that end with a trailing slash. 84 | """ 85 | path = utils.safe_join("base_url/", "path/to/somewhere/") 86 | self.assertEqual(path, "base_url/path/to/somewhere/") 87 | 88 | def test_trailing_slash_multi(self): 89 | """ 90 | Test safe_join with multiple paths that end with a trailing slash. 91 | """ 92 | path = utils.safe_join("base_url/", "path/to/", "somewhere/") 93 | self.assertEqual(path, "base_url/path/to/somewhere/") 94 | 95 | def test_datetime_isoformat(self): 96 | dt = datetime.datetime(2017, 5, 19, 14, 45, 37, 123456) 97 | path = utils.safe_join("base_url", dt.isoformat()) 98 | self.assertEqual(path, "base_url/2017-05-19T14:45:37.123456") 99 | 100 | def test_join_empty_string(self): 101 | path = utils.safe_join("base_url", "") 102 | self.assertEqual(path, "base_url/") 103 | 104 | def test_with_base_url_and_dot(self): 105 | path = utils.safe_join("base_url", ".") 106 | self.assertEqual(path, "base_url/") 107 | 108 | def test_with_base_url_and_dot_and_path_and_slash(self): 109 | path = utils.safe_join("base_url", ".", "path/to/", ".") 110 | self.assertEqual(path, "base_url/path/to/") 111 | 112 | def test_join_nothing(self): 113 | path = utils.safe_join("") 114 | self.assertEqual(path, "") 115 | 116 | def test_with_base_url_join_nothing(self): 117 | path = utils.safe_join("base_url") 118 | self.assertEqual(path, "base_url/") 119 | 120 | 121 | class TestGetAvailableOverwriteName(TestCase): 122 | def test_maxlength_is_none(self): 123 | name = "superlong/file/with/path.txt" 124 | self.assertEqual(gaon(name, None), name) 125 | 126 | def test_maxlength_equals_name(self): 127 | name = "parent/child.txt" 128 | self.assertEqual(gaon(name, len(name)), name) 129 | 130 | def test_maxlength_is_greater_than_name(self): 131 | name = "parent/child.txt" 132 | self.assertEqual(gaon(name, len(name) + 1), name) 133 | 134 | def test_maxlength_less_than_name(self): 135 | name = "parent/child.txt" 136 | self.assertEqual(gaon(name, len(name) - 1), "parent/chil.txt") 137 | 138 | def test_truncates_away_filename_raises(self): 139 | name = "parent/child.txt" 140 | with self.assertRaises(SuspiciousFileOperation): 141 | gaon(name, len(name) - 5) 142 | 143 | def test_suspicious_file(self): 144 | name = "superlong/file/with/../path.txt" 145 | with self.assertRaises(SuspiciousFileOperation): 146 | gaon(name, 50) 147 | 148 | 149 | class TestReadBytesWrapper(TestCase): 150 | def test_with_bytes_file(self): 151 | file = io.BytesIO(b"abcd") 152 | file_wrapped = utils.ReadBytesWrapper(file) 153 | 154 | # test read() with default args 155 | self.assertEqual(b"abcd", file_wrapped.read()) 156 | 157 | # test seek() with default args 158 | self.assertEqual(0, file_wrapped.seek(0)) 159 | self.assertEqual(b"abcd", file_wrapped.read()) 160 | 161 | # test read() with custom args 162 | file_wrapped.seek(0) 163 | self.assertEqual(b"ab", file_wrapped.read(2)) 164 | 165 | # test seek() with custom args 166 | self.assertEqual(1, file_wrapped.seek(-1, io.SEEK_CUR)) 167 | self.assertEqual(b"bcd", file_wrapped.read()) 168 | 169 | def test_with_string_file(self): 170 | file = io.StringIO("wxyz") 171 | file_wrapped = utils.ReadBytesWrapper(file) 172 | 173 | # test read() with default args 174 | self.assertEqual(b"wxyz", file_wrapped.read()) 175 | 176 | # test seek() with default args 177 | self.assertEqual(0, file_wrapped.seek(0)) 178 | self.assertEqual(b"wxyz", file_wrapped.read()) 179 | 180 | # test read() with custom args 181 | file_wrapped.seek(0) 182 | self.assertEqual(b"wx", file_wrapped.read(2)) 183 | 184 | # test seek() with custom args 185 | self.assertEqual(2, file_wrapped.seek(0, io.SEEK_CUR)) 186 | self.assertEqual(b"yz", file_wrapped.read()) 187 | 188 | # I chose the characters ™€‰ for the following tests because they produce different 189 | # bytes when encoding with utf-8 vs windows-1252 vs utf-16 190 | 191 | def test_with_string_file_specified_encoding(self): 192 | content = "\u2122\u20AC\u2030" 193 | file = io.StringIO(content) 194 | file_wrapped = utils.ReadBytesWrapper(file, encoding="utf-16") 195 | 196 | # test read() returns specified encoding 197 | self.assertEqual(file_wrapped.read(), content.encode("utf-16")) 198 | 199 | def test_with_string_file_detect_encoding(self): 200 | content = "\u2122\u20AC\u2030" 201 | with open( 202 | file=os.path.join( 203 | os.path.dirname(__file__), "test_files", "windows-1252-encoded.txt" 204 | ), 205 | mode="r", 206 | encoding="windows-1252", 207 | ) as file: 208 | self.assertEqual(file.read(), content) 209 | file.seek(0) 210 | 211 | file_wrapped = utils.ReadBytesWrapper(file) 212 | 213 | # test read() returns encoding detected from file object. 214 | self.assertEqual(file_wrapped.read(), content.encode("windows-1252")) 215 | 216 | def test_with_string_file_fallback_encoding(self): 217 | content = "\u2122\u20AC\u2030" 218 | file = io.StringIO(content) 219 | file_wrapped = utils.ReadBytesWrapper(file) 220 | 221 | # test read() returns fallback utf-8 encoding 222 | self.assertEqual(file_wrapped.read(), content.encode("utf-8")) 223 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from django.core.files.base import ContentFile 2 | 3 | 4 | class NonSeekableContentFile(ContentFile): 5 | def open(self, mode=None): 6 | return self 7 | 8 | def seekable(self): 9 | return False 10 | 11 | def seek(self, pos, whence=0): 12 | raise AttributeError() 13 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 1.9 3 | envlist = 4 | py{3.8,3.9,3.10,3.11,3.12}-django4.2 5 | py{3.10,3.11,3.12}-django5.0 6 | py{3.10,3.11,3.12}-django5.1 7 | py{3.12,3.13}-djangomain 8 | ruff 9 | 10 | [testenv] 11 | setenv = 12 | DJANGO_SETTINGS_MODULE = tests.settings 13 | PYTHONWARNINGS = always 14 | PYTHONDONTWRITEBYTECODE = 1 15 | # Use a non-existent file to prevent boto3 from loading 16 | # any configuration from the user's environment 17 | AWS_CONFIG_FILE = {toxinidir}/tests/no_such_file.conf 18 | commands = pytest --cov=storages {posargs} 19 | deps = 20 | cryptography 21 | django4.2: django~=4.2.8 22 | django5.0: django~=5.0.0 23 | django5.1: django~=5.1.0 24 | djangomain: https://github.com/django/django/archive/main.tar.gz 25 | moto 26 | pytest 27 | pytest-cov 28 | rsa 29 | extras = 30 | azure 31 | boto3 32 | dropbox 33 | google 34 | libcloud 35 | sftp 36 | 37 | [testenv:ruff] 38 | deps = 39 | black 40 | ruff 41 | commands = 42 | ruff check . 43 | black --check . 44 | skip_install = true 45 | 46 | [pytest] 47 | # Default test paths to run, if no other paths are specified on the CLI 48 | # (specify paths after a -- e.g. `tox -- tests/test_s3.py`) 49 | testpaths = tests/ 50 | --------------------------------------------------------------------------------