├── .gitignore
├── AUTHORS
├── LICENSE
├── MANIFEST.in
├── README.rst
├── bootstrap.py
├── buildout.cfg
├── setup.py
└── src
└── cuddlybuddly
├── __init__.py
└── storage
├── __init__.py
└── s3
├── __init__.py
├── cache.py
├── context_processors.py
├── exceptions.py
├── lib.py
├── management
├── __init__.py
└── commands
│ ├── __init__.py
│ ├── cb_s3_sync_media.py
│ └── cb_s3_sync_static.py
├── middleware.py
├── models.py
├── storage.py
├── templatetags
├── __init__.py
└── s3_tags.py
├── tests
├── __init__.py
├── test_s3.py
└── test_s3test.py
├── testsettings.py
└── utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | tests3credentials.py
2 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | Kyle MacFarlane
2 |
3 | From the Amazon S3 lib: (c) 2006-2007 Amazon Digital Services, Inc. or its affiliates.
4 |
5 | Below is the original AUTHORS file from django-storages:
6 |
7 | By order of apparition, thanks:
8 | * Marty Alchin (S3)
9 | * David Larlet (S3)
10 | * Arne Brodowski (S3)
11 | * Sebastian Serrano (S3)
12 | * Andrew McClain (MogileFS)
13 | * Rafal Jonca (FTP)
14 | * Chris McCormick (S3 with Boto)
15 | * Ivanov E. (Database)
16 | * Ariel Núñez (packaging)
17 | * Wim Leers (SymlinkOrCopy + patches)
18 | * Michael Elsdörfer (Overwrite + PEP8 compatibility)
19 | * Christian Klein (CouchDB)
20 | * Rich Leland (Mosso Cloud Files)
21 |
22 | Extra thanks to Marty for adding this in Django,
23 | you can buy his very interesting book (Pro Django).
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2008-2009, see AUTHORS file.
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
5 |
6 | * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
7 | * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8 | * Neither the name of Cuddly Buddly, this project, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
9 |
10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
11 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include AUTHORS
2 | include bootstrap.py
3 | include buildout.cfg
4 | include LICENSE
5 | include README.rst
6 | exclude src/cuddlybuddly/storage/s3/tests3credentials.py
7 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ===============================
2 | django-cuddlybuddly-storage-s3
3 | ===============================
4 |
5 | Updated Amazon S3 storage from django-storages. Adds more fixes than I can remember, a metadata cache system and some extra utilities for dealing with ``MEDIA_URL`` and ``HTTPS``, CloudFront and for creating signed URLs.
6 |
7 |
8 | Installation
9 | ============
10 |
11 | 1. Add ``cuddlybuddly.storage.s3`` to your ``INSTALLED_APPS``.
12 | 2. Set ``DEFAULT_FILE_STORAGE`` to ``cuddlybuddly.storage.s3.S3Storage`` (as a string, don't import it).
13 | 3. Set ``MEDIA_URL`` to your bucket URL , e.g. ``http://yourbucket.s3.amazonaws.com/``.
14 | 4. Enter your AWS credentials in the settings below.
15 |
16 |
17 | Settings
18 | ========
19 |
20 | ``AWS_ACCESS_KEY_ID``
21 | ---------------------
22 |
23 | Your Amazon Web Services access key, as a string.
24 |
25 | ``AWS_SECRET_ACCESS_KEY``
26 | -------------------------
27 |
28 | Your Amazon Web Services secret access key, as a string.
29 |
30 | ``AWS_STORAGE_BUCKET_NAME``
31 | ---------------------------
32 |
33 | Your Amazon Web Services storage bucket name, as a string.
34 |
35 | ``AWS_HEADERS``
36 | ---------------
37 |
38 | A list of regular expressions which if matched add the headers to the file being uploaded to S3. The patterns are matched from first to last::
39 |
40 | # see http://developer.yahoo.com/performance/rules.html#expires
41 | AWS_HEADERS = [
42 | ('^private/', {
43 | 'x-amz-acl': 'private',
44 | 'Expires': 'Thu, 15 Apr 2000 20:00:00 GMT',
45 | 'Cache-Control': 'private, max-age=0'
46 | }),
47 | ('.*', {
48 | 'x-amz-acl': 'public-read',
49 | 'Expires': 'Sat, 30 Oct 2010 20:00:00 GMT',
50 | 'Cache-Control': 'public, max-age=31556926'
51 | })
52 | ]
53 |
54 | * ``x-amz-acl`` sets the ACL of the file on S3 and defaults to ``private``.
55 | * ``Expires`` is for old HTTP/1.0 caches and must be a perfectly formatted RFC 1123 date to work properly. ``django.utils.http.http_date`` can help you here.
56 | * ``Cache-Control`` is HTTP/1.1 and takes precedence if supported. ``max-age`` is the number of seconds into the future the response should be cached for.
57 |
58 | ``AWS_CALLING_FORMAT``
59 | ----------------------
60 |
61 | Optional and defaults to ``SUBDOMAIN``. The way you'd like to call the Amazon Web Services API, for instance if you need to use the old path method::
62 |
63 | from cuddlybuddly.storage.s3 import CallingFormat
64 | AWS_CALLING_FORMAT = CallingFormat.PATH
65 |
66 |
67 | ``CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES``
68 | ----------------------------------------------
69 |
70 | A list of content types that will be gzipped. Defaults to ``('text/css', 'application/javascript', 'application/x-javascript')``.
71 |
72 |
73 | ``CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE``
74 | ----------------------------------------
75 |
76 | A list of regular expressions of files and folders to ignore when using the synchronize commands. Defaults to ``['\.svn$', '\.git$', '\.hg$', 'Thumbs\.db$', '\.DS_Store$']``.
77 |
78 | ``CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR``
79 | ------------------------------------
80 |
81 | A tuple of a key pair ID and the contents of the private key from the security credentials page of your AWS account. This is used for signing private CloudFront URLs. For example::
82 |
83 | settings.CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR = ('PK12345EXAMPLE',
84 | """-----BEGIN RSA PRIVATE KEY-----
85 | ...key contents...
86 | -----END RSA PRIVATE KEY-----""")
87 |
88 |
89 | HTTPS
90 | =====
91 |
92 | Because when you use S3 your ``MEDIA_URL`` must be absolute (i.e. it starts with ``http``) it's more difficult to have URLs that match how the page was requested. The following things should help with that.
93 |
94 | ``cuddlybuddly.storage.s3.middleware.ThreadLocals``
95 | ----------------------------------------------------
96 |
97 | This middleware will ensure that the URLs of files retrieved from the database will have the same protocol as how the page was requested.
98 |
99 | ``cuddlybuddly.storage.s3.context_processors.media``
100 | ----------------------------------------------------
101 |
102 | This context processor returns ``MEDIA_URL`` with the protocol matching how the page was requested.
103 |
104 |
105 | Cache
106 | =====
107 |
108 | Included is a cache system to store file metadata to speed up accessing file metadata such as size and the last modified time. It is disabled by default.
109 |
110 | ``FileSystemCache``
111 | -------------------
112 |
113 | The only included cache system is ``FileSystemCache`` that stores the cache on the local disk. To use it, add the following to your settings file::
114 |
115 | CUDDLYBUDDLY_STORAGE_S3_CACHE = 'cuddlybuddly.storage.s3.cache.FileSystemCache'
116 | CUDDLYBUDDLY_STORAGE_S3_FILE_CACHE_DIR = '/location/to/store/cache'
117 |
118 | Custom Cache
119 | ------------
120 |
121 | To create your own cache system, inherit from ``cuddlybuddly.storage.s3.cache.Cache`` and implement the following methods:
122 |
123 | * exists
124 | * modified_time
125 | * save
126 | * size
127 | * remove
128 |
129 |
130 | Utilities
131 | =========
132 |
133 | ``create_signed_url(file, expires=60, secure=False, private_cloudfront=False, expires_at=None)``
134 | ------------------------------------------------------------------------------------------------
135 |
136 | Creates a signed URL to ``file`` that will expire in ``expires`` seconds. If ``secure`` is set to ``True`` an ``https`` link will be returned.
137 |
138 | The ``private_cloudfront`` argument will use they key pair setup with ``CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR`` to create signed URLs for a private CloudFront distribution.
139 |
140 | The ``expires_at`` argument will override ``expires`` and expire the URL at a specified UNIX timestamp. It was mostly just added for generating consistent URLs for testing.
141 |
142 | To import it::
143 |
144 | from cuddlybuddly.storage.s3.utils import create_signed_url
145 |
146 |
147 | ``CloudFrontURLs(default, patterns={}, https=None)``
148 | ----------------------------------------------------
149 |
150 | Use this with the context processor or storage backends to return varying ``MEDIA_URL`` or ``STATIC_URL`` depending on the path to improve page loading times.
151 |
152 | To use it add something like the following to your settings file::
153 |
154 | from cuddlybuddly.storage.s3.utils import CloudFrontURLs
155 | MEDIA_URL = CloudFrontURLs('http://cdn1.example.com/', patterns={
156 | '^images/': 'http://cdn2.example.com/',
157 | '^banners/': 'http://cdn3.example.com/',
158 | '^css/': 'http://cdn4.example.com/'
159 | }, https='https://example.cloudfront.net/')
160 |
161 | The ``https`` argument is a URL to bypass CloudFront's lack of HTTPS CNAME support.
162 |
163 | ``s3_media_url`` Template Tag
164 | -----------------------------
165 |
166 | This is for use with ``CloudFrontURLs`` and will return the appropriate URL if a match is found.
167 |
168 | Usage::
169 |
170 | {% load s3_tags %}
171 | {% s3_media_url 'css/common.css' %}
172 |
173 | For ``HTTPS``, the ``cuddlybuddly.storage.s3.middleware.ThreadLocals`` middleware must also be used.
174 |
175 |
176 | ``s3_static_url`` Template Tag
177 | ------------------------------
178 |
179 | The same as ``s3_media_url`` but uses ``STATIC_URL`` instead.
180 |
181 |
182 | ``cuddlybuddly.storage.s3.S3StorageStatic`` Storage Backend
183 | -----------------------------------------------------------
184 |
185 | A version of the storage backend that uses ``STATIC_URL`` instead. For use with ``STATICFILES_STORAGE`` and the ``static`` template tag from ``contrib.staticfiles``.
186 |
187 |
188 | Commands
189 | ========
190 |
191 | ``cb_s3_sync_media``
192 | --------------------
193 |
194 | Synchronizes a directory with your S3 bucket. It will skip files that are already up to date or newer in the bucket but will not remove old files as that has the potential to go very wrong. The headers specified in ``AWS_HEADERS`` will be applied.
195 |
196 | It has the following options:
197 |
198 | * ``--cache``, ``-c`` - Get the modified times of files from the cache (if available) instead of checking S3. This is faster but could be inaccurate.
199 | * ``--dir``, ``-d`` - The directory to synchronize with your bucket, defaults to ``MEDIA_ROOT``.
200 | * ``--exclude``, ``-e`` - A comma separated list of regular expressions to ignore files or folders. Defaults to ``CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE``.
201 | * ``--force``, ``-f`` - Uploads all files even if the version in the bucket is up to date.
202 | * ``--prefix``, ``-p`` - A prefix to prepend to every file uploaded, i.e. a subfolder to place the files in.
203 |
204 | ``cb_s3_sync_static``
205 | ---------------------
206 |
207 | Exactly the same as ``cb_s3_sync_media`` except that ``dir`` defeaults to ``STATIC_ROOT``.
208 |
209 |
210 | A note on the tests
211 | ===================
212 |
213 | The tests in ``tests/s3test.py`` are pretty much straight from Amazon but have a tendency to fail if you run them too often / too quickly. When they do this they sometimes leave behind files or buckets in your account that you will need to go and delete to make the tests pass again.
214 |
215 | The signed URL tests will also fail if your computer's clock is too far off from Amazon's servers.
216 |
--------------------------------------------------------------------------------
/bootstrap.py:
--------------------------------------------------------------------------------
1 | ##############################################################################
2 | #
3 | # Copyright (c) 2006 Zope Foundation and Contributors.
4 | # All Rights Reserved.
5 | #
6 | # This software is subject to the provisions of the Zope Public License,
7 | # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
8 | # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
9 | # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
10 | # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
11 | # FOR A PARTICULAR PURPOSE.
12 | #
13 | ##############################################################################
14 | """Bootstrap a buildout-based project
15 |
16 | Simply run this script in a directory containing a buildout.cfg.
17 | The script accepts buildout command-line options, so you can
18 | use the -c option to specify an alternate configuration file.
19 | """
20 |
21 | import os
22 | import shutil
23 | import sys
24 | import tempfile
25 |
26 | from optparse import OptionParser
27 |
28 | tmpeggs = tempfile.mkdtemp()
29 |
30 | usage = '''\
31 | [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
32 |
33 | Bootstraps a buildout-based project.
34 |
35 | Simply run this script in a directory containing a buildout.cfg, using the
36 | Python that you want bin/buildout to use.
37 |
38 | Note that by using --find-links to point to local resources, you can keep
39 | this script from going over the network.
40 | '''
41 |
42 | parser = OptionParser(usage=usage)
43 | parser.add_option("-v", "--version", help="use a specific zc.buildout version")
44 |
45 | parser.add_option("-t", "--accept-buildout-test-releases",
46 | dest='accept_buildout_test_releases',
47 | action="store_true", default=False,
48 | help=("Normally, if you do not specify a --version, the "
49 | "bootstrap script and buildout gets the newest "
50 | "*final* versions of zc.buildout and its recipes and "
51 | "extensions for you. If you use this flag, "
52 | "bootstrap and buildout will get the newest releases "
53 | "even if they are alphas or betas."))
54 | parser.add_option("-c", "--config-file",
55 | help=("Specify the path to the buildout configuration "
56 | "file to be used."))
57 | parser.add_option("-f", "--find-links",
58 | help=("Specify a URL to search for buildout releases"))
59 |
60 |
61 | options, args = parser.parse_args()
62 |
63 | ######################################################################
64 | # load/install setuptools
65 |
66 | to_reload = False
67 | try:
68 | import pkg_resources
69 | import setuptools
70 | except ImportError:
71 | ez = {}
72 |
73 | try:
74 | from urllib.request import urlopen
75 | except ImportError:
76 | from urllib2 import urlopen
77 |
78 | exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
79 | setup_args = dict(to_dir=tmpeggs, download_delay=0)
80 | ez['use_setuptools'](**setup_args)
81 |
82 | if to_reload:
83 | reload(pkg_resources)
84 | import pkg_resources
85 | # This does not (always?) update the default working set. We will
86 | # do it.
87 | for path in sys.path:
88 | if path not in pkg_resources.working_set.entries:
89 | pkg_resources.working_set.add_entry(path)
90 |
91 | ######################################################################
92 | # Install buildout
93 |
94 | ws = pkg_resources.working_set
95 |
96 | cmd = [sys.executable, '-c',
97 | 'from setuptools.command.easy_install import main; main()',
98 | '-mZqNxd', tmpeggs]
99 |
100 | find_links = os.environ.get(
101 | 'bootstrap-testing-find-links',
102 | options.find_links or
103 | ('http://downloads.buildout.org/'
104 | if options.accept_buildout_test_releases else None)
105 | )
106 | if find_links:
107 | cmd.extend(['-f', find_links])
108 |
109 | setuptools_path = ws.find(
110 | pkg_resources.Requirement.parse('setuptools')).location
111 |
112 | requirement = 'zc.buildout'
113 | version = options.version
114 | if version is None and not options.accept_buildout_test_releases:
115 | # Figure out the most recent final version of zc.buildout.
116 | import setuptools.package_index
117 | _final_parts = '*final-', '*final'
118 |
119 | def _final_version(parsed_version):
120 | for part in parsed_version:
121 | if (part[:1] == '*') and (part not in _final_parts):
122 | return False
123 | return True
124 | index = setuptools.package_index.PackageIndex(
125 | search_path=[setuptools_path])
126 | if find_links:
127 | index.add_find_links((find_links,))
128 | req = pkg_resources.Requirement.parse(requirement)
129 | if index.obtain(req) is not None:
130 | best = []
131 | bestv = None
132 | for dist in index[req.project_name]:
133 | distv = dist.parsed_version
134 | if _final_version(distv):
135 | if bestv is None or distv > bestv:
136 | best = [dist]
137 | bestv = distv
138 | elif distv == bestv:
139 | best.append(dist)
140 | if best:
141 | best.sort()
142 | version = best[-1].version
143 | if version:
144 | requirement = '=='.join((requirement, version))
145 | cmd.append(requirement)
146 |
147 | import subprocess
148 | if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=setuptools_path)) != 0:
149 | raise Exception(
150 | "Failed to execute command:\n%s",
151 | repr(cmd)[1:-1])
152 |
153 | ######################################################################
154 | # Import and run buildout
155 |
156 | ws.add_entry(tmpeggs)
157 | ws.require(requirement)
158 | import zc.buildout.buildout
159 |
160 | if not [a for a in args if '=' not in a]:
161 | args.append('bootstrap')
162 |
163 | # if -c was provided, we push it back into args for buildout' main function
164 | if options.config_file is not None:
165 | args[0:0] = ['-c', options.config_file]
166 |
167 | zc.buildout.buildout.main(args)
168 | shutil.rmtree(tmpeggs)
169 |
--------------------------------------------------------------------------------
/buildout.cfg:
--------------------------------------------------------------------------------
1 | [buildout]
2 | parts = python django
3 | develop = .
4 | eggs = django-cuddlybuddly-storage-s3
5 | versions = versions
6 |
7 | [versions]
8 | django = 1.6.5
9 |
10 | [python]
11 | recipe = zc.recipe.egg
12 | interpreter = python
13 | eggs = ${buildout:eggs}
14 |
15 | [django]
16 | recipe = djangorecipe
17 | project = cuddlybuddly.storage.s3
18 | projectegg = cuddlybuddly.storage.s3
19 | settings = testsettings
20 | test = cuddlybuddly.storage.s3
21 | eggs = ${buildout:eggs}
22 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | from setuptools import setup, find_packages
3 |
4 | def read(fname):
5 | return open(os.path.join(os.path.dirname(__file__), fname)).read()
6 |
7 | install_requires = [
8 | 'setuptools',
9 | 'pycryptodome'
10 | ]
11 |
12 | try:
13 | from collections import OrderedDict
14 | except ImportError:
15 | install_requires.append('ordereddict')
16 |
17 | setup(
18 | name = 'django-cuddlybuddly-storage-s3',
19 | version = '3.3',
20 | license = 'BSD',
21 | description = 'Updated Amazon S3 storage from django-storages. Adds more ' \
22 | 'fixes than I can remember, a metadata cache system and ' \
23 | 'some extra utilities for dealing with MEDIA_URL and HTTPS, ' \
24 | 'CloudFront and for creating signed URLs.',
25 | long_description = read('README.rst'),
26 | author = 'Kyle MacFarlane',
27 | author_email = 'kyle@deletethetrees.com',
28 |
29 | package_dir = {'': 'src'},
30 | packages = find_packages('src'),
31 | namespace_packages = ['cuddlybuddly'],
32 | include_package_data = True,
33 | zip_safe = False,
34 |
35 | install_requires = install_requires,
36 |
37 | classifiers = [
38 | 'Development Status :: 5 - Production/Stable',
39 | 'Environment :: Web Environment',
40 | 'Framework :: Django',
41 | 'Intended Audience :: Developers',
42 | 'License :: OSI Approved :: BSD License',
43 | 'Operating System :: OS Independent',
44 | 'Programming Language :: Python',
45 | 'Topic :: Internet :: WWW/HTTP'
46 | ],
47 | )
48 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/__init__.py:
--------------------------------------------------------------------------------
1 | __import__('pkg_resources').declare_namespace(__name__)
2 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/__init__.py:
--------------------------------------------------------------------------------
1 | __import__('pkg_resources').declare_namespace(__name__)
2 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/__init__.py:
--------------------------------------------------------------------------------
1 | from cuddlybuddly.storage.s3.lib import CallingFormat
2 | from cuddlybuddly.storage.s3.storage import S3Storage, S3StorageStatic
3 |
4 |
5 | __all__ = ['CallingFormat', 'S3Storage', 'S3StorageStatic']
6 |
7 |
8 | # Monkey patch form Media as I don't see a better way to do this, especially
9 | # with custom admin javascript on CloudFront that needs to be HTTPS to run.
10 | from django.conf import settings
11 | from django.forms.widgets import Media
12 | from cuddlybuddly.storage.s3.utils import CloudFrontURLs
13 | old_absolute_path = Media.absolute_path
14 | def absolute_path(self, path, prefix=None):
15 | if not isinstance(settings.STATIC_URL, CloudFrontURLs) or \
16 | path.startswith(('http://', 'https://', '/')) or \
17 | prefix is not None:
18 | return old_absolute_path(self, path, prefix)
19 | return settings.STATIC_URL.get_url(path)
20 | Media.absolute_path = absolute_path
21 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/cache.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import os
3 | from django.conf import settings
4 | from django.core.exceptions import ImproperlyConfigured
5 | from django.utils.encoding import force_text
6 |
7 |
8 | class Cache(object):
9 | """
10 | A base cache class, providing some default behaviors that all other
11 | cache systems can inherit or override, as necessary.
12 | """
13 |
14 | def exists(self, name):
15 | """
16 | Returns True if a file referened by the given name already exists in the
17 | storage system, or False if the name is available for a new file.
18 |
19 | If the cache doesn't exist then return None.
20 | """
21 | raise NotImplementedError()
22 |
23 | def size(self, name):
24 | """
25 | Returns the total size, in bytes, of the file specified by name.
26 |
27 | If the cache doesn't exist then return None.
28 | """
29 | raise NotImplementedError()
30 |
31 | def modified_time(self, name):
32 | """
33 | Return the time of last modification of name. The return value is a
34 | number giving the number of seconds since the epoch.
35 |
36 | If the cache doesn't exist then return None.
37 | """
38 | raise NotImplementedError()
39 |
40 | def save(self, name, size, mtime):
41 | """
42 | Save the values to the cache.
43 | """
44 | raise NotImplementedError()
45 |
46 | def remove(self, name):
47 | """
48 | Remove the values from the cache.
49 | """
50 | raise NotImplementedError()
51 |
52 |
53 | class FileSystemCache(Cache):
54 | def __init__(self, cache_dir=None):
55 | if cache_dir is None:
56 | cache_dir = getattr(settings, 'CUDDLYBUDDLY_STORAGE_S3_FILE_CACHE_DIR', None)
57 | if cache_dir is None:
58 | raise ImproperlyConfigured(
59 | '%s requires CUDDLYBUDDLY_STORAGE_S3_FILE_CACHE_DIR to be set to a directory.' % type(self)
60 | )
61 | self.cache_dir = cache_dir
62 |
63 | def _path(self, name):
64 | name = force_text(name).encode('utf-8')
65 | return os.path.join(self.cache_dir, hashlib.md5(name).hexdigest())
66 |
67 | def exists(self, name):
68 | return None
69 |
70 | def size(self, name):
71 | try:
72 | file = open(self._path(name))
73 | size = int(file.readlines()[1])
74 | file.close()
75 | except:
76 | size = None
77 | return size
78 |
79 | def modified_time(self, name):
80 | try:
81 | file = open(self._path(name))
82 | mtime = float(file.readlines()[2])
83 | file.close()
84 | except:
85 | mtime = None
86 | return mtime
87 |
88 | def save(self, name, size, mtime):
89 | if not os.path.exists(self.cache_dir):
90 | os.makedirs(self.cache_dir)
91 | file = open(self._path(name), 'wb')
92 | file.write(('%s\n%s\n%s' % (name, size, mtime)).encode('utf-8'))
93 | file.close()
94 |
95 | def remove(self, name):
96 | name = self._path(name)
97 | if os.path.exists(name):
98 | os.remove(name)
99 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/context_processors.py:
--------------------------------------------------------------------------------
1 | from django.conf import settings
2 |
3 |
4 | def media(request):
5 | if request.is_secure() or \
6 | getattr(request.META, 'HTTP_X_FORWARDED_SSL', 'off') == 'on':
7 | if hasattr(settings.MEDIA_URL, 'https'):
8 | url = settings.MEDIA_URL.https()
9 | else:
10 | url = settings.MEDIA_URL.replace('http://', 'https://')
11 | else:
12 | url = settings.MEDIA_URL.replace('https://', 'http://')
13 | return {'MEDIA_URL': url}
14 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/exceptions.py:
--------------------------------------------------------------------------------
1 | class S3Error(IOError):
2 | pass
3 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/lib.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # This software code is made available "AS IS" without warranties of any
4 | # kind. You may copy, display, modify and redistribute the software
5 | # code either by itself or as incorporated into your code; provided that
6 | # you do not remove any proprietary notices. Your use of this software
7 | # code is at your own risk and you waive any claim against Amazon
8 | # Digital Services, Inc. or its affiliates with respect to your use of
9 | # this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
10 | # affiliates.
11 |
12 | # Now uses Django's urlquote_plus to circumvent lack of unicode support in
13 | # urllib.
14 | #
15 | # Fixed urlquote_plus escaping slashes in urls.
16 | #
17 | # Stopped build_url_base from adding unnecessary ports (80 and 443).
18 | #
19 | # Replaced sha with hashlib.
20 | #
21 | # Date header locale fix.
22 | #
23 | # Added S3Exception.
24 | #
25 | # 2011/03/07 - Changed all uses of urlquote_plus to urlquote.
26 | #
27 | # 2014/07/26 - Python 3 support
28 | #
29 | # (c) 2009-2011 Kyle MacFarlane
30 |
31 | import base64
32 | import hmac
33 | try:
34 | import http.client as httplib # Python 3
35 | except ImportError:
36 | import httplib # Python 2
37 | import hashlib
38 | import time
39 | try:
40 | from urllib import parse as urlparse # Python 3
41 | except ImportError:
42 | import urlparse # Python 2
43 | import xml.sax
44 | from django.utils.http import urlquote
45 |
46 | DEFAULT_HOST = 's3.amazonaws.com'
47 | PORTS_BY_SECURITY = { True: 443, False: 80 }
48 | METADATA_PREFIX = 'x-amz-meta-'
49 | AMAZON_HEADER_PREFIX = 'x-amz-'
50 |
51 | class S3Exception(Exception):
52 | pass
53 |
54 | # generates the aws canonical string for the given parameters
55 | def canonical_string(method, bucket="", key="", query_args={}, headers={}, expires=None):
56 | interesting_headers = {}
57 | for header_key in headers:
58 | lk = header_key.lower()
59 | if lk in ['content-md5', 'content-type', 'date'] or lk.startswith(AMAZON_HEADER_PREFIX):
60 | interesting_headers[lk] = headers[header_key].strip()
61 |
62 | # these keys get empty strings if they don't exist
63 | if not 'content-type' in interesting_headers:
64 | interesting_headers['content-type'] = ''
65 | if not 'content-md5' in interesting_headers:
66 | interesting_headers['content-md5'] = ''
67 |
68 | # just in case someone used this. it's not necessary in this lib.
69 | if 'x-amz-date' in interesting_headers:
70 | interesting_headers['date'] = ''
71 |
72 | # if you're using expires for query string auth, then it trumps date
73 | # (and x-amz-date)
74 | if expires:
75 | interesting_headers['date'] = str(expires)
76 |
77 | sorted_header_keys = list(interesting_headers.keys())
78 | sorted_header_keys.sort()
79 |
80 | buf = "%s\n" % method
81 | for header_key in sorted_header_keys:
82 | if header_key.startswith(AMAZON_HEADER_PREFIX):
83 | buf += "%s:%s\n" % (header_key, interesting_headers[header_key])
84 | else:
85 | buf += "%s\n" % interesting_headers[header_key]
86 |
87 | # append the bucket if it exists
88 | if bucket != "":
89 | buf += "/%s" % bucket
90 |
91 | # add the key. even if it doesn't exist, add the slash
92 | buf += "/%s" % urlquote(key, '/')
93 |
94 | # handle special query string arguments
95 |
96 | if "acl" in query_args:
97 | buf += "?acl"
98 | elif "torrent" in query_args:
99 | buf += "?torrent"
100 | elif "logging" in query_args:
101 | buf += "?logging"
102 | elif "location" in query_args:
103 | buf += "?location"
104 |
105 | return buf
106 |
107 | # computes the base64'ed hmac-sha hash of the canonical string and the secret
108 | # access key, optionally urlencoding the result
109 | def encode(aws_secret_access_key, str, urlencode=False):
110 | if hasattr(base64, 'encodebytes'):
111 | encoder = base64.encodebytes # Python 3
112 | else:
113 | encoder = base64.encodestring # Python 2
114 | b64_hmac = encoder(hmac.new(
115 | aws_secret_access_key.encode('utf-8'),
116 | str.encode('utf-8'),
117 | hashlib.sha1
118 | ).digest()).strip().decode('utf-8')
119 | if urlencode:
120 | return urlquote(b64_hmac)
121 | else:
122 | return b64_hmac
123 |
124 | def merge_meta(headers, metadata):
125 | final_headers = headers.copy()
126 | for k in metadata.keys():
127 | final_headers[METADATA_PREFIX + k] = metadata[k]
128 |
129 | return final_headers
130 |
131 | # builds the query arg string
132 | def query_args_hash_to_string(query_args):
133 | query_string = ""
134 | pairs = []
135 | for k, v in query_args.items():
136 | piece = k
137 | if v != None:
138 | piece += "=%s" % urlquote(str(v))
139 | pairs.append(piece)
140 |
141 | return '&'.join(pairs)
142 |
143 |
144 | class CallingFormat:
145 | PATH = 1
146 | SUBDOMAIN = 2
147 | VANITY = 3
148 |
149 | def build_url_base(protocol, server, port, bucket, calling_format):
150 | url_base = '%s://' % protocol
151 |
152 | if bucket == '':
153 | url_base += server
154 | elif calling_format == CallingFormat.SUBDOMAIN:
155 | url_base += "%s.%s" % (bucket, server)
156 | elif calling_format == CallingFormat.VANITY:
157 | url_base += bucket
158 | else:
159 | url_base += server
160 |
161 | if port not in (80, 443):
162 | url_base += ":%s" % port
163 |
164 | if (bucket != '') and (calling_format == CallingFormat.PATH):
165 | url_base += "/%s" % bucket
166 |
167 | return url_base
168 |
169 | build_url_base = staticmethod(build_url_base)
170 |
171 |
172 |
173 | class Location:
174 | DEFAULT = None
175 | EU = 'EU'
176 |
177 |
178 |
179 | class AWSAuthConnection:
180 | def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True,
181 | server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN):
182 |
183 | if not port:
184 | port = PORTS_BY_SECURITY[is_secure]
185 |
186 | self.aws_access_key_id = aws_access_key_id
187 | self.aws_secret_access_key = aws_secret_access_key
188 | self.is_secure = is_secure
189 | self.server = server
190 | self.port = port
191 | self.calling_format = calling_format
192 |
193 | def create_bucket(self, bucket, headers={}):
194 | return Response(self._make_request('PUT', bucket, '', {}, headers))
195 |
196 | def create_located_bucket(self, bucket, location=Location.DEFAULT, headers={}):
197 | if location == Location.DEFAULT:
198 | body = ""
199 | else:
200 | body = "" + \
201 | location + \
202 | ""
203 | return Response(self._make_request('PUT', bucket, '', {}, headers, body))
204 |
205 | def check_bucket_exists(self, bucket):
206 | return self._make_request('HEAD', bucket, '', {}, {})
207 |
208 | def list_bucket(self, bucket, options={}, headers={}):
209 | return ListBucketResponse(self._make_request('GET', bucket, '', options, headers))
210 |
211 | def delete_bucket(self, bucket, headers={}):
212 | return Response(self._make_request('DELETE', bucket, '', {}, headers))
213 |
214 | def put(self, bucket, key, object, headers={}):
215 | if not isinstance(object, S3Object):
216 | object = S3Object(object)
217 |
218 | return Response(
219 | self._make_request(
220 | 'PUT',
221 | bucket,
222 | key,
223 | {},
224 | headers,
225 | object.data,
226 | object.metadata))
227 |
228 | def get(self, bucket, key, headers={}):
229 | return GetResponse(
230 | self._make_request('GET', bucket, key, {}, headers))
231 |
232 | def delete(self, bucket, key, headers={}):
233 | return Response(
234 | self._make_request('DELETE', bucket, key, {}, headers))
235 |
236 | def get_bucket_logging(self, bucket, headers={}):
237 | return GetResponse(self._make_request('GET', bucket, '', { 'logging': None }, headers))
238 |
239 | def put_bucket_logging(self, bucket, logging_xml_doc, headers={}):
240 | return Response(self._make_request('PUT', bucket, '', { 'logging': None }, headers, logging_xml_doc))
241 |
242 | def get_bucket_acl(self, bucket, headers={}):
243 | return self.get_acl(bucket, '', headers)
244 |
245 | def get_acl(self, bucket, key, headers={}):
246 | return GetResponse(
247 | self._make_request('GET', bucket, key, { 'acl': None }, headers))
248 |
249 | def put_bucket_acl(self, bucket, acl_xml_document, headers={}):
250 | return self.put_acl(bucket, '', acl_xml_document, headers)
251 |
252 | def put_acl(self, bucket, key, acl_xml_document, headers={}):
253 | return Response(
254 | self._make_request(
255 | 'PUT',
256 | bucket,
257 | key,
258 | { 'acl': None },
259 | headers,
260 | acl_xml_document))
261 |
262 | def list_all_my_buckets(self, headers={}):
263 | return ListAllMyBucketsResponse(self._make_request('GET', '', '', {}, headers))
264 |
265 | def get_bucket_location(self, bucket):
266 | return LocationResponse(self._make_request('GET', bucket, '', {'location' : None}))
267 |
268 | # end public methods
269 |
270 | def _make_request(self, method, bucket='', key='', query_args={}, headers={}, data='', metadata={}):
271 | server = ''
272 | if bucket == '':
273 | server = self.server
274 | elif self.calling_format == CallingFormat.SUBDOMAIN:
275 | server = "%s.%s" % (bucket, self.server)
276 | elif self.calling_format == CallingFormat.VANITY:
277 | server = bucket
278 | else:
279 | server = self.server
280 |
281 | path = ''
282 |
283 | if (bucket != '') and (self.calling_format == CallingFormat.PATH):
284 | path += "/%s" % bucket
285 |
286 | # add the slash after the bucket regardless
287 | # the key will be appended if it is non-empty
288 | path += "/%s" % urlquote(key, '/')
289 |
290 |
291 | # build the path_argument string
292 | # add the ? in all cases since
293 | # signature and credentials follow path args
294 | if len(query_args):
295 | path += "?" + query_args_hash_to_string(query_args)
296 |
297 | is_secure = self.is_secure
298 | host = "%s:%d" % (server, self.port)
299 | while True:
300 | if (is_secure):
301 | connection = httplib.HTTPSConnection(host)
302 | else:
303 | connection = httplib.HTTPConnection(host)
304 |
305 | final_headers = merge_meta(headers, metadata);
306 | # add auth header
307 | self._add_aws_auth_header(final_headers, method, bucket, key, query_args)
308 |
309 | connection.request(method, path, data, final_headers)
310 | resp = connection.getresponse()
311 | if resp.status < 300 or resp.status >= 400:
312 | return resp
313 | # handle redirect
314 | location = resp.getheader('location')
315 | if not location:
316 | return resp
317 | # (close connection)
318 | resp.read()
319 | scheme, host, path, params, query, fragment \
320 | = urlparse.urlparse(location)
321 | if scheme == "http": is_secure = True
322 | elif scheme == "https": is_secure = False
323 | else: raise S3Exception("Not http/https: " + location)
324 | if query: path += "?" + query
325 | # retry with redirect
326 |
327 | def _add_aws_auth_header(self, headers, method, bucket, key, query_args):
328 | if not 'Date' in headers:
329 | headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
330 |
331 | c_string = canonical_string(method, bucket, key, query_args, headers)
332 | headers['Authorization'] = \
333 | "AWS %s:%s" % (self.aws_access_key_id, encode(self.aws_secret_access_key, c_string))
334 |
335 |
336 | class QueryStringAuthGenerator:
337 | # by default, expire in 1 minute
338 | DEFAULT_EXPIRES_IN = 60
339 |
340 | def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True,
341 | server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN):
342 |
343 | if not port:
344 | port = PORTS_BY_SECURITY[is_secure]
345 |
346 | self.aws_access_key_id = aws_access_key_id
347 | self.aws_secret_access_key = aws_secret_access_key
348 | if (is_secure):
349 | self.protocol = 'https'
350 | else:
351 | self.protocol = 'http'
352 |
353 | self.is_secure = is_secure
354 | self.server = server
355 | self.port = port
356 | self.calling_format = calling_format
357 | self.__expires_in = QueryStringAuthGenerator.DEFAULT_EXPIRES_IN
358 | self.__expires = None
359 |
360 | # for backwards compatibility with older versions
361 | self.server_name = "%s:%s" % (self.server, self.port)
362 |
363 | def set_expires_in(self, expires_in):
364 | self.__expires_in = expires_in
365 | self.__expires = None
366 |
367 | def set_expires(self, expires):
368 | self.__expires = expires
369 | self.__expires_in = None
370 |
371 | def create_bucket(self, bucket, headers={}):
372 | return self.generate_url('PUT', bucket, '', {}, headers)
373 |
374 | def list_bucket(self, bucket, options={}, headers={}):
375 | return self.generate_url('GET', bucket, '', options, headers)
376 |
377 | def delete_bucket(self, bucket, headers={}):
378 | return self.generate_url('DELETE', bucket, '', {}, headers)
379 |
380 | def put(self, bucket, key, object, headers={}):
381 | if not isinstance(object, S3Object):
382 | object = S3Object(object)
383 |
384 | return self.generate_url(
385 | 'PUT',
386 | bucket,
387 | key,
388 | {},
389 | merge_meta(headers, object.metadata))
390 |
391 | def get(self, bucket, key, headers={}):
392 | return self.generate_url('GET', bucket, key, {}, headers)
393 |
394 | def delete(self, bucket, key, headers={}):
395 | return self.generate_url('DELETE', bucket, key, {}, headers)
396 |
397 | def get_bucket_logging(self, bucket, headers={}):
398 | return self.generate_url('GET', bucket, '', { 'logging': None }, headers)
399 |
400 | def put_bucket_logging(self, bucket, logging_xml_doc, headers={}):
401 | return self.generate_url('PUT', bucket, '', { 'logging': None }, headers)
402 |
403 | def get_bucket_acl(self, bucket, headers={}):
404 | return self.get_acl(bucket, '', headers)
405 |
406 | def get_acl(self, bucket, key='', headers={}):
407 | return self.generate_url('GET', bucket, key, { 'acl': None }, headers)
408 |
409 | def put_bucket_acl(self, bucket, acl_xml_document, headers={}):
410 | return self.put_acl(bucket, '', acl_xml_document, headers)
411 |
412 | # don't really care what the doc is here.
413 | def put_acl(self, bucket, key, acl_xml_document, headers={}):
414 | return self.generate_url('PUT', bucket, key, { 'acl': None }, headers)
415 |
416 | def list_all_my_buckets(self, headers={}):
417 | return self.generate_url('GET', '', '', {}, headers)
418 |
419 | def make_bare_url(self, bucket, key=''):
420 | full_url = self.generate_url(self, bucket, key)
421 | return full_url[:full_url.index('?')]
422 |
423 | def generate_url(self, method, bucket='', key='', query_args={}, headers={}):
424 | expires = 0
425 | if self.__expires_in != None:
426 | expires = int(time.time() + self.__expires_in)
427 | elif self.__expires != None:
428 | expires = int(self.__expires)
429 | else:
430 | raise S3Exception("Invalid expires state")
431 |
432 | canonical_str = canonical_string(method, bucket, key, query_args, headers, expires)
433 | encoded_canonical = encode(self.aws_secret_access_key, canonical_str)
434 |
435 | url = CallingFormat.build_url_base(self.protocol, self.server, self.port, bucket, self.calling_format)
436 |
437 | url += "/%s" % urlquote(key, '/')
438 |
439 | query_args['Signature'] = encoded_canonical
440 | query_args['Expires'] = expires
441 | query_args['AWSAccessKeyId'] = self.aws_access_key_id
442 |
443 | url += "?%s" % query_args_hash_to_string(query_args)
444 |
445 | return url
446 |
447 |
448 | class S3Object:
449 | def __init__(self, data, metadata={}):
450 | self.data = data
451 | self.metadata = metadata
452 |
453 | class Owner:
454 | def __init__(self, id='', display_name=''):
455 | self.id = id
456 | self.display_name = display_name
457 |
458 | class ListEntry:
459 | def __init__(self, key='', last_modified=None, etag='', size=0, storage_class='', owner=None):
460 | self.key = key
461 | self.last_modified = last_modified
462 | self.etag = etag
463 | self.size = size
464 | self.storage_class = storage_class
465 | self.owner = owner
466 |
467 | class CommonPrefixEntry:
468 | def __init(self, prefix=''):
469 | self.prefix = prefix
470 |
471 | class Bucket:
472 | def __init__(self, name='', creation_date=''):
473 | self.name = name
474 | self.creation_date = creation_date
475 |
476 | class Response:
477 | def __init__(self, http_response):
478 | self.http_response = http_response
479 | # you have to do this read, even if you don't expect a body.
480 | # otherwise, the next request fails.
481 | self.body = http_response.read()
482 | if http_response.status >= 300 and self.body:
483 | self.message = self.body
484 | else:
485 | self.message = "%03d %s" % (http_response.status, http_response.reason)
486 |
487 |
488 |
489 | class ListBucketResponse(Response):
490 | def __init__(self, http_response):
491 | Response.__init__(self, http_response)
492 | if http_response.status < 300:
493 | handler = ListBucketHandler()
494 | xml.sax.parseString(self.body, handler)
495 | self.entries = handler.entries
496 | self.common_prefixes = handler.common_prefixes
497 | self.name = handler.name
498 | self.marker = handler.marker
499 | self.prefix = handler.prefix
500 | self.is_truncated = handler.is_truncated
501 | self.delimiter = handler.delimiter
502 | self.max_keys = handler.max_keys
503 | self.next_marker = handler.next_marker
504 | else:
505 | self.entries = []
506 |
507 | class ListAllMyBucketsResponse(Response):
508 | def __init__(self, http_response):
509 | Response.__init__(self, http_response)
510 | if http_response.status < 300:
511 | handler = ListAllMyBucketsHandler()
512 | xml.sax.parseString(self.body, handler)
513 | self.entries = handler.entries
514 | else:
515 | self.entries = []
516 |
517 | class GetResponse(Response):
518 | def __init__(self, http_response):
519 | Response.__init__(self, http_response)
520 | response_headers = http_response.msg # older pythons don't have getheaders
521 | metadata = self.get_aws_metadata(response_headers)
522 | self.object = S3Object(self.body, metadata)
523 |
524 | def get_aws_metadata(self, headers):
525 | metadata = {}
526 | for hkey in headers.keys():
527 | if hkey.lower().startswith(METADATA_PREFIX):
528 | metadata[hkey[len(METADATA_PREFIX):]] = headers[hkey]
529 | del headers[hkey]
530 |
531 | return metadata
532 |
533 | class LocationResponse(Response):
534 | def __init__(self, http_response):
535 | Response.__init__(self, http_response)
536 | if http_response.status < 300:
537 | handler = LocationHandler()
538 | xml.sax.parseString(self.body, handler)
539 | self.location = handler.location
540 |
541 | class ListBucketHandler(xml.sax.ContentHandler):
542 | def __init__(self):
543 | self.entries = []
544 | self.curr_entry = None
545 | self.curr_text = ''
546 | self.common_prefixes = []
547 | self.curr_common_prefix = None
548 | self.name = ''
549 | self.marker = ''
550 | self.prefix = ''
551 | self.is_truncated = False
552 | self.delimiter = ''
553 | self.max_keys = 0
554 | self.next_marker = ''
555 | self.is_echoed_prefix_set = False
556 |
557 | def startElement(self, name, attrs):
558 | if name == 'Contents':
559 | self.curr_entry = ListEntry()
560 | elif name == 'Owner':
561 | self.curr_entry.owner = Owner()
562 | elif name == 'CommonPrefixes':
563 | self.curr_common_prefix = CommonPrefixEntry()
564 |
565 |
566 | def endElement(self, name):
567 | if name == 'Contents':
568 | self.entries.append(self.curr_entry)
569 | elif name == 'CommonPrefixes':
570 | self.common_prefixes.append(self.curr_common_prefix)
571 | elif name == 'Key':
572 | self.curr_entry.key = self.curr_text
573 | elif name == 'LastModified':
574 | self.curr_entry.last_modified = self.curr_text
575 | elif name == 'ETag':
576 | self.curr_entry.etag = self.curr_text
577 | elif name == 'Size':
578 | self.curr_entry.size = int(self.curr_text)
579 | elif name == 'ID':
580 | self.curr_entry.owner.id = self.curr_text
581 | elif name == 'DisplayName':
582 | self.curr_entry.owner.display_name = self.curr_text
583 | elif name == 'StorageClass':
584 | self.curr_entry.storage_class = self.curr_text
585 | elif name == 'Name':
586 | self.name = self.curr_text
587 | elif name == 'Prefix' and self.is_echoed_prefix_set:
588 | self.curr_common_prefix.prefix = self.curr_text
589 | elif name == 'Prefix':
590 | self.prefix = self.curr_text
591 | self.is_echoed_prefix_set = True
592 | elif name == 'Marker':
593 | self.marker = self.curr_text
594 | elif name == 'IsTruncated':
595 | self.is_truncated = self.curr_text == 'true'
596 | elif name == 'Delimiter':
597 | self.delimiter = self.curr_text
598 | elif name == 'MaxKeys':
599 | self.max_keys = int(self.curr_text)
600 | elif name == 'NextMarker':
601 | self.next_marker = self.curr_text
602 |
603 | self.curr_text = ''
604 |
605 | def characters(self, content):
606 | self.curr_text += content
607 |
608 |
609 | class ListAllMyBucketsHandler(xml.sax.ContentHandler):
610 | def __init__(self):
611 | self.entries = []
612 | self.curr_entry = None
613 | self.curr_text = ''
614 |
615 | def startElement(self, name, attrs):
616 | if name == 'Bucket':
617 | self.curr_entry = Bucket()
618 |
619 | def endElement(self, name):
620 | if name == 'Name':
621 | self.curr_entry.name = self.curr_text
622 | elif name == 'CreationDate':
623 | self.curr_entry.creation_date = self.curr_text
624 | elif name == 'Bucket':
625 | self.entries.append(self.curr_entry)
626 |
627 | def characters(self, content):
628 | self.curr_text = content
629 |
630 |
631 | class LocationHandler(xml.sax.ContentHandler):
632 | def __init__(self):
633 | self.location = None
634 | self.state = 'init'
635 |
636 | def startElement(self, name, attrs):
637 | if self.state == 'init':
638 | if name == 'LocationConstraint':
639 | self.state = 'tag_location'
640 | self.location = ''
641 | else: self.state = 'bad'
642 | else: self.state = 'bad'
643 |
644 | def endElement(self, name):
645 | if self.state == 'tag_location' and name == 'LocationConstraint':
646 | self.state = 'done'
647 | else: self.state = 'bad'
648 |
649 | def characters(self, content):
650 | if self.state == 'tag_location':
651 | self.location += content
652 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/management/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kylemacfarlane/django-cb-storage-s3/99f82543bd97c54ee0850a154e1cb97957c33ea9/src/cuddlybuddly/storage/s3/management/__init__.py
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/management/commands/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kylemacfarlane/django-cb-storage-s3/99f82543bd97c54ee0850a154e1cb97957c33ea9/src/cuddlybuddly/storage/s3/management/commands/__init__.py
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/management/commands/cb_s3_sync_media.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from optparse import make_option
3 | import os
4 | import re
5 | import sys
6 | from django.conf import settings
7 | from django.core.management.base import BaseCommand
8 | from cuddlybuddly.storage.s3.exceptions import S3Error
9 | from cuddlybuddly.storage.s3.storage import S3Storage
10 |
11 |
12 | output_length = 0
13 | def output(text, options, min_verbosity=1, rtrn=False, nl=False):
14 | if int(options['verbosity']) >= min_verbosity:
15 | global output_length
16 | if rtrn:
17 | if len(text) < output_length:
18 | text = text + ' ' * (output_length - len(text) - 1)
19 | text = '\r' + text
20 | output_length = 0
21 | output_length += len(text)
22 | if nl:
23 | output_length = 0
24 | text = text + '\n'
25 | sys.stdout.write(text)
26 | sys.stdout.flush()
27 |
28 |
29 | def walk(dir, options):
30 | to_sync = []
31 | for root, dirs, files in os.walk(dir):
32 | for dir in dirs:
33 | for pattern in options['exclude']:
34 | if pattern.search(os.path.join(root, dir)):
35 | dirs.remove(dir)
36 | for file in files:
37 | file = os.path.join(root, file)
38 | exclude = False
39 | for pattern in options['exclude']:
40 | if pattern.search(file):
41 | exclude = True
42 | break
43 | if exclude:
44 | continue
45 | # Because the followlinks parameter is only in >= 2.6 we have to
46 | # follow symlinks ourselves.
47 | if os.path.isdir(file) and os.path.islink(file):
48 | to_sync = to_sync + walk(file)
49 | else:
50 | to_sync.append(file)
51 | return to_sync
52 |
53 |
54 | class Command(BaseCommand):
55 | help = 'Sync folder with your S3 bucket'
56 | option_list = BaseCommand.option_list + (
57 | make_option('-c', '--cache',
58 | action='store_true',
59 | dest='cache',
60 | default=False,
61 | help='Whether or not to check the cache for the modified times'),
62 | make_option('-d', '--dir',
63 | action='store',
64 | dest='dir',
65 | type='string',
66 | default=None,
67 | help='Directory to sync to S3'),
68 | make_option('-e', '--exclude',
69 | action='store',
70 | dest='exclude',
71 | type='string',
72 | default=None,
73 | help='A comma separated list of regular expressions of files and folders to skip'),
74 | make_option('-f', '--force',
75 | action='store_true',
76 | dest='force',
77 | default=False,
78 | help='Upload all files even if the version on S3 is up to date'),
79 | make_option('-p', '--prefix',
80 | action='store',
81 | dest='prefix',
82 | type='string',
83 | default='',
84 | help='Prefix to prepend to uploaded files'),
85 | )
86 |
87 | def handle(self, *args, **options):
88 | if options['dir'] is None:
89 | options['dir'] = settings.MEDIA_ROOT
90 | if options['exclude'] is None:
91 | options['exclude'] = getattr(
92 | settings,
93 | 'CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE',
94 | ['\.svn$', '\.git$', '\.hg$', 'Thumbs\.db$', '\.DS_Store$']
95 | )
96 | else:
97 | options['exclude'] = options['exclude'].split(',')
98 | exclude = []
99 | for pattern in options['exclude']:
100 | exclude.append(re.compile(pattern))
101 | options['exclude'] = exclude
102 |
103 | files = walk(options['dir'], options)
104 | skipped = uploaded = 0
105 | output(
106 | 'Uploaded: %s, Skipped: %s, Total: %s/%s' % (0, 0, 0, len(files)),
107 | options,
108 | rtrn=True # Needed to correctly calculate padding
109 | )
110 | storage = S3Storage()
111 | for file in files:
112 | s3name = os.path.join(
113 | options['prefix'],
114 | os.path.relpath(file, options['dir'])
115 | )
116 | try:
117 | mtime = storage.modified_time(s3name, force_check=not options['cache'])
118 | except S3Error:
119 | mtime = None
120 | if options['force'] or mtime is None or \
121 | mtime < datetime.fromtimestamp(os.path.getmtime(file)):
122 | if mtime:
123 | storage.delete(s3name)
124 | fh = open(file, 'rb')
125 | output(' Uploading %s...' % s3name, options)
126 | storage.save(s3name, fh)
127 | output('Uploaded %s' % s3name, options, rtrn=True, nl=True)
128 | fh.close()
129 | uploaded += 1
130 | else:
131 | output(
132 | 'Skipped %s because it hasn\'t been modified' % s3name,
133 | options,
134 | min_verbosity=2,
135 | rtrn=True,
136 | nl=True
137 | )
138 | skipped += 1
139 | output(
140 | 'Uploaded: %s, Skipped: %s, Total: %s/%s'
141 | % (uploaded, skipped, uploaded + skipped, len(files)),
142 | options,
143 | rtrn=True
144 | )
145 | output('', options, nl=True)
146 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/management/commands/cb_s3_sync_static.py:
--------------------------------------------------------------------------------
1 | from django.conf import settings
2 | from cuddlybuddly.storage.s3.management.commands.cb_s3_sync_media import \
3 | Command as BaseCommand
4 |
5 |
6 | class Command(BaseCommand):
7 | def handle(self, *args, **options):
8 | if options['dir'] is None:
9 | options['dir'] = getattr(settings, 'STATIC_ROOT', None)
10 | return super(Command, self).handle(*args, **options)
11 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/middleware.py:
--------------------------------------------------------------------------------
1 | try:
2 | from threading import local
3 | except ImportError:
4 | from django.utils._threading_local import local
5 |
6 |
7 | _thread_locals = local()
8 |
9 |
10 | def request_is_secure():
11 | return getattr(_thread_locals, 'cb_request_is_secure', None)
12 |
13 |
14 | class ThreadLocals(object):
15 | def process_request(self, request):
16 | _thread_locals.cb_request_is_secure = request.is_secure()
17 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/models.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kylemacfarlane/django-cb-storage-s3/99f82543bd97c54ee0850a154e1cb97957c33ea9/src/cuddlybuddly/storage/s3/models.py
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/storage.py:
--------------------------------------------------------------------------------
1 | from calendar import timegm
2 | from datetime import datetime
3 | from email.utils import parsedate
4 | from gzip import GzipFile
5 | from importlib import import_module
6 | import mimetypes
7 | import os
8 | import re
9 | try:
10 | from io import BytesIO as StringIO # Python 3
11 | except ImportError:
12 | # Don't use cStringIO as it's not unicode safe
13 | from StringIO import StringIO # Python 2
14 | import sys
15 | try:
16 | from urllib.parse import urljoin # Python 3
17 | except ImportError:
18 | from urlparse import urljoin # Python 2
19 | from django.conf import settings
20 | from django.core.exceptions import ImproperlyConfigured
21 | from django.core.files.base import File
22 | from django.core.files.storage import Storage
23 | from django.utils.encoding import iri_to_uri
24 | from cuddlybuddly.storage.s3 import CallingFormat
25 | from cuddlybuddly.storage.s3.exceptions import S3Error
26 | from cuddlybuddly.storage.s3.lib import AWSAuthConnection
27 | from cuddlybuddly.storage.s3.middleware import request_is_secure
28 |
29 |
30 | ACCESS_KEY_NAME = 'AWS_ACCESS_KEY_ID'
31 | SECRET_KEY_NAME = 'AWS_SECRET_ACCESS_KEY'
32 | HEADERS = 'AWS_HEADERS'
33 |
34 |
35 | class S3Storage(Storage):
36 | """Amazon Simple Storage Service"""
37 |
38 | static = False
39 |
40 | def __init__(self, bucket=None, access_key=None, secret_key=None,
41 | headers=None, calling_format=None, cache=None, base_url=None):
42 | if bucket is None:
43 | bucket = settings.AWS_STORAGE_BUCKET_NAME
44 | if calling_format is None:
45 | calling_format = getattr(settings, 'AWS_CALLING_FORMAT',
46 | CallingFormat.SUBDOMAIN)
47 | self.bucket = bucket
48 |
49 | if not access_key and not secret_key:
50 | access_key, secret_key = self._get_access_keys()
51 |
52 | self.connection = AWSAuthConnection(access_key, secret_key,
53 | calling_format=calling_format)
54 |
55 | default_headers = getattr(settings, HEADERS, [])
56 | # Backwards compatibility for original format from django-storages
57 | if isinstance(default_headers, dict):
58 | default_headers = [('.*', default_headers)]
59 | if headers:
60 | # Headers passed to __init__ take precedence over headers from
61 | # settings file.
62 | default_headers = list(headers) + list(default_headers)
63 | self.headers = []
64 | for value in default_headers:
65 | self.headers.append((re.compile(value[0]), value[1]))
66 |
67 | if cache is not None:
68 | self.cache = cache
69 | else:
70 | cache = getattr(settings, 'CUDDLYBUDDLY_STORAGE_S3_CACHE', None)
71 | if cache is not None:
72 | self.cache = self._get_cache_class(cache)()
73 | else:
74 | self.cache = None
75 |
76 | if base_url is None:
77 | if not self.static:
78 | base_url = settings.MEDIA_URL
79 | else:
80 | base_url = settings.STATIC_URL
81 | self.base_url = base_url
82 |
83 | def _get_cache_class(self, import_path=None):
84 | try:
85 | dot = import_path.rindex('.')
86 | except ValueError:
87 | raise ImproperlyConfigured("%s isn't a cache module." % import_path)
88 | module, classname = import_path[:dot], import_path[dot+1:]
89 | try:
90 | mod = import_module(module)
91 | except ImportError as e:
92 | raise ImproperlyConfigured('Error importing cache module %s: "%s"' % (module, e))
93 | try:
94 | return getattr(mod, classname)
95 | except AttributeError:
96 | raise ImproperlyConfigured('Cache module "%s" does not define a "%s" class.' % (module, classname))
97 |
98 | def _store_in_cache(self, name, response):
99 | size = int(response.getheader('Content-Length'))
100 | date = response.getheader('Last-Modified')
101 | date = timegm(parsedate(date))
102 | self.cache.save(name, size=size, mtime=date)
103 |
104 | def _get_access_keys(self):
105 | access_key = getattr(settings, ACCESS_KEY_NAME, None)
106 | secret_key = getattr(settings, SECRET_KEY_NAME, None)
107 | if (access_key or secret_key) and (not access_key or not secret_key):
108 | access_key = os.environ.get(ACCESS_KEY_NAME)
109 | secret_key = os.environ.get(SECRET_KEY_NAME)
110 |
111 | if access_key and secret_key:
112 | # Both were provided, so use them
113 | return access_key, secret_key
114 |
115 | return None, None
116 |
117 | def _get_connection(self):
118 | return AWSAuthConnection(*self._get_access_keys())
119 |
120 | def _put_file(self, name, content):
121 | name = self._path(name)
122 | placeholder = False
123 | if self.cache:
124 | if not self.cache.exists(name):
125 | self.cache.save(name, 0, 0)
126 | placedholder = True
127 | content_type = mimetypes.guess_type(name)[0] or "application/x-octet-stream"
128 | headers = {}
129 | for pattern in self.headers:
130 | if pattern[0].match(name):
131 | headers = pattern[1].copy()
132 | break
133 | file_pos = content.tell()
134 | content.seek(0, 2)
135 | content_length = content.tell()
136 | content.seek(0)
137 | gz_cts = getattr(
138 | settings,
139 | 'CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES',
140 | (
141 | 'text/css',
142 | 'application/javascript',
143 | 'application/x-javascript'
144 | )
145 | )
146 | gz_content = None
147 | if content_length > 1024 and content_type in gz_cts:
148 | gz_content = StringIO()
149 | gzf = GzipFile(mode='wb', fileobj=gz_content)
150 | gzf.write(content.read())
151 | content.seek(0)
152 | gzf.close()
153 | gz_content.seek(0, 2)
154 | gz_content_length = gz_content.tell()
155 | gz_content.seek(0)
156 | if gz_content_length < content_length:
157 | content_length = gz_content_length
158 | headers.update({
159 | 'Content-Encoding': 'gzip'
160 | })
161 | else:
162 | gz_content = None
163 | headers.update({
164 | 'Content-Type': content_type,
165 | 'Content-Length': str(content_length)
166 | })
167 | # Httplib in < 2.6 doesn't accept file like objects. Meanwhile in
168 | # >= 2.7 it will try to join a content str object with the headers which
169 | # results in encoding problems.
170 | if sys.version_info[0] == 2 and sys.version_info[1] < 6:
171 | content_to_send = gz_content.read() if gz_content is not None else content.read()
172 | else:
173 | content_to_send = gz_content if gz_content is not None else content
174 | response = self.connection.put(self.bucket, name, content_to_send, headers)
175 | content.seek(file_pos)
176 | if response.http_response.status != 200:
177 | if placeholder:
178 | self.cache.remove(name)
179 | raise S3Error(response.message)
180 | if self.cache:
181 | date = response.http_response.getheader('Date')
182 | date = timegm(parsedate(date))
183 | self.cache.save(name, size=content_length, mtime=date)
184 |
185 | def _open(self, name, mode='rb'):
186 | remote_file = S3StorageFile(name, self, mode=mode)
187 | return remote_file
188 |
189 | def _read(self, name, start_range=None, end_range=None):
190 | name = self._path(name)
191 | headers, range_ = {}, None
192 | if start_range is not None and end_range is not None:
193 | range_ = '%s-%s' % (start_range, end_range)
194 | elif start_range is not None:
195 | range_ = '%s' % start_range
196 | if range_ is not None:
197 | headers = {'Range': 'bytes=%s' % range_}
198 | response = self.connection.get(self.bucket, name, headers)
199 | valid_responses = [200]
200 | if start_range is not None or end_range is not None:
201 | valid_responses.append(206)
202 | if response.http_response.status not in valid_responses:
203 | raise S3Error(response.message)
204 | headers = response.http_response.msg
205 | data = response.object.data
206 |
207 | if headers.get('Content-Encoding') == 'gzip':
208 | gzf = GzipFile(mode='rb', fileobj=StringIO(data))
209 | data = gzf.read()
210 | gzf.close()
211 |
212 | return data, headers.get('etag', None), headers.get('content-range', None)
213 |
214 | def _save(self, name, content):
215 | self._put_file(name, content)
216 | return name
217 |
218 | def delete(self, name):
219 | name = self._path(name)
220 | response = self.connection.delete(self.bucket, name)
221 | if response.http_response.status != 204:
222 | raise S3Error(response.message)
223 | if self.cache:
224 | self.cache.remove(name)
225 |
226 | def exists(self, name, force_check=False):
227 | if not name:
228 | return False
229 | name = self._path(name)
230 | if self.cache and not force_check:
231 | exists = self.cache.exists(name)
232 | if exists is not None:
233 | return exists
234 | response = self.connection._make_request('HEAD', self.bucket, name)
235 | exists = response.status == 200
236 | if self.cache and exists:
237 | self._store_in_cache(name, response)
238 | return exists
239 |
240 | def size(self, name, force_check=False):
241 | name = self._path(name)
242 | if self.cache and not force_check:
243 | size = self.cache.size(name)
244 | if size is not None:
245 | return size
246 | response = self.connection._make_request('HEAD', self.bucket, name)
247 | content_length = response.getheader('Content-Length')
248 | if self.cache:
249 | self._store_in_cache(name, response)
250 | return content_length and int(content_length) or 0
251 |
252 | def modified_time(self, name, force_check=False):
253 | name = self._path(name)
254 | if self.cache and not force_check:
255 | last_modified = self.cache.modified_time(name)
256 | if last_modified:
257 | return datetime.fromtimestamp(last_modified)
258 | response = self.connection._make_request('HEAD', self.bucket, name)
259 | if response.status == 404:
260 | raise S3Error("Cannot find the file specified: '%s'" % name)
261 | last_modified = timegm(parsedate(response.getheader('Last-Modified')))
262 | if self.cache:
263 | self._store_in_cache(name, response)
264 | return datetime.fromtimestamp(last_modified)
265 |
266 | def url(self, name):
267 | if self.base_url is None:
268 | raise ValueError("This file is not accessible via a URL.")
269 | name = self._path(name)
270 | if request_is_secure():
271 | if hasattr(self.base_url, 'https'):
272 | url = self.base_url.https()
273 | else:
274 | if hasattr(self.base_url, 'match'):
275 | url = self.base_url.match(name)
276 | else:
277 | url = self.base_url
278 | url = url.replace('http://', 'https://')
279 | else:
280 | if hasattr(self.base_url, 'match'):
281 | url = self.base_url.match(name)
282 | else:
283 | url = self.base_url
284 | url = url.replace('https://', 'http://')
285 | return urljoin(url, iri_to_uri(name))
286 |
287 | def listdir(self, path):
288 | path = self._path(path)
289 | if not path.endswith('/'):
290 | path = path+'/'
291 | directories, files = [], []
292 | options = {'prefix': path, 'delimiter': '/'}
293 | response = self.connection.list_bucket(self.bucket, options=options)
294 | for prefix in response.common_prefixes:
295 | directories.append(prefix.prefix.replace(path, '').strip('/'))
296 | for entry in response.entries:
297 | files.append(entry.key.replace(path, ''))
298 | return directories, files
299 |
300 | def _path(self, name):
301 | name = name.replace('\\', '/')
302 | # Because the S3 lib just loves to add slashes
303 | if name.startswith('/'):
304 | name = name[1:]
305 | return name
306 |
307 |
308 | class S3StorageFile(File):
309 | def __init__(self, name, storage, mode):
310 | self.name = name
311 | self._storage = storage
312 | self.mode = mode
313 | self._is_dirty = False
314 | self.file = StringIO()
315 | self.start_range = 0
316 |
317 | @property
318 | def size(self):
319 | if not hasattr(self, '_size'):
320 | self._size = self._storage.size(self.name)
321 | return self._size
322 |
323 | def _empty_read(self):
324 | self.file = StringIO(b'')
325 | return self.file.getvalue()
326 |
327 | def read(self, num_bytes=None):
328 | # Reading past the file size results in a 416 (InvalidRange) error from
329 | # S3, but accessing the size when not using chunked reading causes an
330 | # unnecessary HEAD call.
331 | if self.start_range and self.start_range >= self.size:
332 | return self._empty_read()
333 |
334 | args = []
335 |
336 | if num_bytes:
337 | if self.start_range < 0:
338 | offset = self.size
339 | else:
340 | offset = 0
341 | args = [self.start_range + offset, self.start_range + num_bytes - 1 + offset]
342 | elif self.start_range:
343 | args = [self.start_range, '']
344 |
345 | try:
346 | data, etags, content_range = self._storage._read(self.name, *args)
347 | except S3Error as e:
348 | # Catch InvalidRange for 0 length reads. Perhaps we should be
349 | # catching all kinds of exceptions...
350 | if 'InvalidRange
' in '%s' % e:
351 | return self._empty_read()
352 | raise
353 | if content_range is not None:
354 | current_range, size = content_range.split(' ', 1)[1].split('/', 1)
355 | start_range, end_range = current_range.split('-', 1)
356 | self._size, self.start_range = int(size), int(end_range) + 1
357 |
358 | self.file = StringIO(data)
359 | return self.file.getvalue()
360 |
361 | def write(self, content):
362 | if 'w' not in self.mode:
363 | raise AttributeError("File was opened for read-only access.")
364 | self.file = StringIO(content)
365 | self._is_dirty = True
366 |
367 | def close(self):
368 | if self._is_dirty:
369 | self._storage._put_file(self.name, self.file)
370 | self._size = len(self.file.getvalue())
371 | self.file.close()
372 |
373 | def seek(self, pos, mode=0):
374 | self.file.seek(pos, mode)
375 | if mode == 0:
376 | self.start_range = pos
377 | elif mode == 1:
378 | self.start_range += pos
379 | elif mode == 2:
380 | # While S3 does support negative positions, using them makes tell()
381 | # unreliable. Getting size is a pretty fast HEAD anyway.
382 | self.start_range = self.size + pos
383 |
384 | def tell(self):
385 | return self.start_range
386 |
387 |
388 | class S3StorageStatic(S3Storage):
389 | """
390 | For use with ``STATICFILES_STORAGE`` and ``STATIC_URL``.
391 | """
392 | static = True
393 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/templatetags/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kylemacfarlane/django-cb-storage-s3/99f82543bd97c54ee0850a154e1cb97957c33ea9/src/cuddlybuddly/storage/s3/templatetags/__init__.py
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/templatetags/s3_tags.py:
--------------------------------------------------------------------------------
1 | from django import template
2 | from django.conf import settings
3 | from cuddlybuddly.storage.s3.utils import CloudFrontURLs
4 |
5 |
6 | register = template.Library()
7 |
8 |
9 | class S3MediaURLNode(template.Node):
10 | def __init__(self, static, path, as_var=None):
11 | self.static = static
12 | self.path = template.Variable(path)
13 | self.as_var = as_var
14 |
15 | def render(self, context):
16 | path = self.path.resolve(context)
17 | if self.static:
18 | base_url = settings.STATIC_URL
19 | else:
20 | base_url = settings.MEDIA_URL
21 | if not isinstance(base_url, CloudFrontURLs):
22 | base_url = CloudFrontURLs(base_url)
23 | url = base_url.get_url(path)
24 |
25 | if self.as_var:
26 | context[self.as_var] = url
27 | return ''
28 | else:
29 | return url
30 |
31 |
32 | def do_s3_media_url(parser, token, static=False):
33 | """
34 | This is for use with ``CloudFrontURLs`` and will return the appropriate url
35 | if a match is found.
36 |
37 | Usage::
38 |
39 | {% s3_media_url path %}
40 |
41 |
42 | For ``HTTPS``, the ``cuddlybuddly.storage.s3.middleware.ThreadLocals``
43 | middleware must also be used.
44 | """
45 |
46 | split_token = token.split_contents()
47 | vars = []
48 | as_var = False
49 | for k, v in enumerate(split_token[1:]):
50 | if v == 'as':
51 | try:
52 | while len(vars) < 1:
53 | vars.append(None)
54 | vars.append(split_token[k+2])
55 | as_var = True
56 | except IndexError:
57 | raise template.TemplateSyntaxError(
58 | "%r tag requires a variable name to attach to" \
59 | % split_token[0]
60 | )
61 | break
62 | else:
63 | vars.append(v)
64 |
65 | if (not as_var and len(vars) not in (1,)) \
66 | or (as_var and len(vars) not in (2,)):
67 | raise template.TemplateSyntaxError(
68 | "%r tag requires a path or url" \
69 | % token.contents.split()[0]
70 | )
71 |
72 | return S3MediaURLNode(static, *vars)
73 |
74 |
75 | do_s3_media_url = register.tag('s3_media_url', do_s3_media_url)
76 |
77 |
78 | def do_s3_static_url(parser, token):
79 | """
80 | This is the same as ``s3_media_url`` but defaults to ``STATIC_URL`` instead.
81 | """
82 | return do_s3_media_url(parser, token, static=True)
83 |
84 |
85 | do_s3_static_url = register.tag('s3_static_url', do_s3_static_url)
86 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kylemacfarlane/django-cb-storage-s3/99f82543bd97c54ee0850a154e1cb97957c33ea9/src/cuddlybuddly/storage/s3/tests/__init__.py
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/tests/test_s3.py:
--------------------------------------------------------------------------------
1 | import base64
2 | from datetime import datetime, timedelta
3 | try:
4 | import http.client as httplib # Python 3
5 | except ImportError:
6 | import httplib # Python 2
7 | import os
8 | try:
9 | from io import BytesIO as StringIO # Python 3
10 | except ImportError:
11 | from StringIO import StringIO # Python 2
12 | from time import sleep
13 | try:
14 | from urllib import parse as urlparse # Python 3
15 | except ImportError:
16 | import urlparse # Python 2
17 | from zipfile import ZipFile
18 | from django.conf import settings
19 | from django.core.files.base import ContentFile
20 | from django.core.management import call_command
21 | from django.forms.widgets import Media
22 | from django.template import Context, Template, TemplateSyntaxError
23 | from django.test import TestCase
24 | from django.test.utils import override_settings
25 | from django.utils.encoding import force_text
26 | from django.utils.http import urlquote
27 | from cuddlybuddly.storage.s3 import lib
28 | from cuddlybuddly.storage.s3.exceptions import S3Error
29 | from cuddlybuddly.storage.s3.storage import S3Storage
30 | from cuddlybuddly.storage.s3.utils import CloudFrontURLs, create_signed_url
31 |
32 |
33 | default_storage = S3Storage()
34 |
35 |
36 | MEDIA_URL = settings.MEDIA_URL
37 | if not MEDIA_URL.endswith('/'):
38 | MEDIA_URL = MEDIA_URL+'/'
39 |
40 | DUMMY_IMAGE = base64.b64decode(b'/9j/4AAQSkZJRgABAQEAYABgAAD/2wBDAAIBAQEBAQIBAQECAgICAgQDAgICAgUEBAMEBgUGBgYF\nBgYGBwkIBgcJBwYGCAsICQoKCgoKBggLDAsKDAkKCgr/2wBDAQICAgICAgUDAwUKBwYHCgoKCgoK\nCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgr/wAARCAAKAA8DASIA\nAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQA\nAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3\nODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWm\np6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEA\nAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSEx\nBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElK\nU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3\nuLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD3PxT8\nZP23oP8AggV47sNP/Z28PS+EF8K+J0HxAf4typqaW/8Aa97mcWH9mn515UR/aeQo+YZwPWP2tPCv\nxF/afv8A4g+AvAfhqWwl8I/tpxwXdz4XiuBNeWw+GVvL9ouzvcbvNvY4sqETbHANu/LN02saXph/\n4N6/GulnToPszeC/Ewa38lfLIOrXhI24xya5n9p+8vNA1v4jXGhXctlJd/tsJ9qe0kMZm/4tfa/f\nK43fcTr/AHF9BQB//9k=\n')
41 |
42 |
43 | class UnicodeContentFile(ContentFile):
44 | """
45 | A version of ContentFile that never uses cStringIO so that it is always
46 | unicode compatible.
47 | """
48 | def __init__(self, content):
49 | super(ContentFile, self).__init__(StringIO(content))
50 | self.size = len(content)
51 |
52 |
53 | class S3StorageTests(TestCase):
54 | def run_test(self, filename, content=b'Lorem ipsum dolar sit amet'):
55 | content = UnicodeContentFile(content)
56 | filename = default_storage.save(filename, content)
57 | self.assertTrue(default_storage.exists(filename))
58 |
59 | self.assertEqual(default_storage.size(filename), content.size)
60 | now = datetime.now()
61 | delta = timedelta(minutes=5)
62 | mtime = default_storage.modified_time(filename)
63 | self.assertTrue(mtime > (now - delta))
64 | self.assertTrue(mtime < (now + delta))
65 | file = default_storage.open(filename)
66 | self.assertEqual(file.size, content.size)
67 | fileurl = force_text(file).replace('\\', '/')
68 | fileurl = urlquote(fileurl, '/')
69 | if fileurl.startswith('/'):
70 | fileurl = fileurl[1:]
71 |
72 | self.assertEqual(
73 | MEDIA_URL+fileurl,
74 | default_storage.url(filename)
75 | )
76 | file.close()
77 |
78 | default_storage.delete(filename)
79 | self.assertTrue(not default_storage.exists(filename))
80 |
81 | def test_absolute_path(self):
82 | self.run_test('/testsdir/file1.txt')
83 |
84 | def test_relative_path(self):
85 | self.run_test('testsdir/file2.txt')
86 |
87 | def test_unicode(self):
88 | self.run_test(u'testsdir/\u00E1\u00E9\u00ED\u00F3\u00FA.txt')
89 |
90 | def test_byte_contents(self):
91 | self.run_test('testsdir/filebytes.jpg', DUMMY_IMAGE)
92 |
93 | def test_filename_with_spaces(self):
94 | self.run_test('testsdir/filename with spaces.txt')
95 |
96 | def test_byte_contents_when_closing_file(self):
97 | filename = u'filebytes\u00A3.jpg'
98 | file = default_storage.open(filename, 'wb')
99 | file.write(DUMMY_IMAGE)
100 | file.close()
101 | self.assertEqual(default_storage.size(filename), file.size)
102 | default_storage.delete(filename)
103 | self.assertTrue(not default_storage.exists(filename))
104 |
105 | def test_ranged_read(self):
106 | filename = u'fileranged.jpg'
107 | file = default_storage.open(filename, 'wb')
108 | file.write(DUMMY_IMAGE)
109 | file.close()
110 | self.assertEqual(default_storage.size(filename), file.size)
111 | self.assertEqual(len(default_storage.open(filename).read(128)), 128)
112 | default_storage.delete(filename)
113 | self.assertTrue(not default_storage.exists(filename))
114 |
115 | def test_seek(self):
116 | filename = u'fileseek.jpg'
117 | file = default_storage.open(filename, 'wb')
118 | file.write(DUMMY_IMAGE)
119 | file.close()
120 | self.assertEqual(default_storage.size(filename), file.size)
121 |
122 | # Recreation of how PIL detects JPEGs.
123 | file = default_storage.open(filename)
124 | prefix = file.read(16)
125 | file.seek(0)
126 | to_compare = file.read(1)[0]
127 | if not isinstance(to_compare, int):
128 | to_compare = ord(to_compare) # Python 2
129 | self.assertEqual(to_compare, 255)
130 | file.close()
131 |
132 | default_storage.delete(filename)
133 | self.assertTrue(not default_storage.exists(filename))
134 |
135 | def test_write_to_file(self):
136 | filename = 'file6.txt'
137 | default_storage.save(filename, UnicodeContentFile(b'Lorem ipsum dolor sit amet'))
138 | self.assertTrue(default_storage.exists(filename))
139 |
140 | file = default_storage.open(filename, 'w')
141 | self.assertEqual(file.size, 26)
142 |
143 | file.write(b'Lorem ipsum')
144 | file.close()
145 | self.assertEqual(file.size, 11)
146 |
147 | default_storage.delete(filename)
148 | self.assertTrue(not default_storage.exists(filename))
149 |
150 | def run_listdir_test(self, folder):
151 | content = ('testsdir/file3.txt', 'testsdir/file4.txt',
152 | 'testsdir/sub/file5.txt')
153 | for file in content:
154 | default_storage.save(file, UnicodeContentFile(b'Lorem ipsum dolor sit amet'))
155 | self.assertTrue(default_storage.exists(file))
156 |
157 | dirs, files = default_storage.listdir(folder)
158 | self.assertEqual(dirs, ['sub'])
159 | self.assertEqual(files, ['file3.txt', 'file4.txt'])
160 | if not folder.endswith('/'):
161 | folder = folder+'/'
162 | dirs, files = default_storage.listdir(folder+dirs[0])
163 | self.assertEqual(dirs, [])
164 | self.assertEqual(files, ['file5.txt'])
165 |
166 | for file in content:
167 | default_storage.delete(file)
168 | self.assertTrue(not default_storage.exists(file))
169 |
170 | def test_listdir_absolute_path(self):
171 | self.run_listdir_test('/testsdir')
172 |
173 | def test_listdir_relative_path(self):
174 | self.run_listdir_test('testsdir')
175 |
176 | def test_listdir_ending_slash(self):
177 | self.run_listdir_test('testsdir/')
178 |
179 | def test_gzip(self):
180 | ct_backup = getattr(settings, 'CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES', None)
181 | settings.CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES = (
182 | 'text/css',
183 | 'application/javascript',
184 | 'application/x-javascript'
185 | )
186 |
187 | filename = 'testsdir/filegzip.css'
188 | file = UnicodeContentFile(b'Lorem ipsum ' * 512)
189 | self.assertEqual(file.size, 6144)
190 | default_storage.save(filename, file)
191 | self.assertEqual(default_storage.size(filename), 62)
192 |
193 | file2 = default_storage.open(filename)
194 | self.assertEqual(file2.read(), b'Lorem ipsum ' * 512, 'Failed to read Gzipped content')
195 | file2.close()
196 |
197 | default_storage.delete(filename)
198 |
199 | if ct_backup is not None:
200 | settings.CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES = ct_backup
201 |
202 | def test_exists_on_empty_path(self):
203 | self.assertTrue(not default_storage.exists(''))
204 | self.assertTrue(not default_storage.exists(None))
205 |
206 | def test_modified_time_on_non_existent_file(self):
207 | self.assertRaises(
208 | S3Error,
209 | default_storage.modified_time,
210 | 'this/file/better/not/exist'
211 | )
212 |
213 | # Turn off gzip to make file sizes more predictable
214 | @override_settings(CUDDLYBUDDLY_STORAGE_S3_GZIP_CONTENT_TYPES=())
215 | def test_chunked_read(self):
216 | filename = 'testsdir/filechunked.txt'
217 | file_ = UnicodeContentFile(b'Lorem ipsum ' * 200)
218 | self.assertEqual(file_.size, 2400)
219 | filename = default_storage.save(filename, file_)
220 |
221 | file_ = default_storage.open(filename)
222 | for i, data in enumerate(file_.chunks(1024)):
223 | if i == 3:
224 | length = 0
225 | elif i == 2:
226 | length = 352
227 | else:
228 | length = 1024
229 | self.assertEqual(len(data), length)
230 |
231 | default_storage.delete(filename)
232 |
233 | # Now for a 0 length read
234 | filename = 'testsdir/filechunkedzerolength.txt'
235 | file_ = UnicodeContentFile(b'')
236 | self.assertEqual(file_.size, 0)
237 | filename = default_storage.save(filename, file_)
238 |
239 | file_ = default_storage.open(filename)
240 | for c in file_.chunks(1024):
241 | self.assertEqual(len(c), 0)
242 |
243 | default_storage.delete(filename)
244 |
245 | def test_chunked_zipfile_read(self):
246 | """
247 | A zip file's central directory is located at the end of the file and
248 | ZipFile.infolist will try to read chunks from the end before falling
249 | back to reading the whole file.
250 | """
251 | filename = 'testsdir/filechunked.zip'
252 | file_ = StringIO()
253 | zip_ = ZipFile(file_, 'a')
254 | zip_.writestr('test.txt', 'Lorem ipsum ' * 512)
255 | zip_.close()
256 | default_storage.save(filename, file_)
257 |
258 | file2 = default_storage.open(filename)
259 | zip_ = ZipFile(file2)
260 | self.assertEqual(
261 | [i.filename for i in zip_.infolist()],
262 | ['test.txt']
263 | )
264 | file2.close()
265 |
266 | default_storage.delete(filename)
267 |
268 |
269 | class SignedURLTests(TestCase):
270 | def setUp(self):
271 | self.conn = lib.AWSAuthConnection(
272 | settings.AWS_ACCESS_KEY_ID,
273 | settings.AWS_SECRET_ACCESS_KEY
274 | )
275 | self.key = getattr(settings, 'CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR', None)
276 | settings.CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR = ('PK12345EXAMPLE',
277 | """-----BEGIN RSA PRIVATE KEY-----
278 | MIICXQIBAAKBgQDA7ki9gI/lRygIoOjV1yymgx6FYFlzJ+z1ATMaLo57nL57AavW
279 | hb68HYY8EA0GJU9xQdMVaHBogF3eiCWYXSUZCWM/+M5+ZcdQraRRScucmn6g4EvY
280 | 2K4W2pxbqH8vmUikPxir41EeBPLjMOzKvbzzQy9e/zzIQVREKSp/7y1mywIDAQAB
281 | AoGABc7mp7XYHynuPZxChjWNJZIq+A73gm0ASDv6At7F8Vi9r0xUlQe/v0AQS3yc
282 | N8QlyR4XMbzMLYk3yjxFDXo4ZKQtOGzLGteCU2srANiLv26/imXA8FVidZftTAtL
283 | viWQZBVPTeYIA69ATUYPEq0a5u5wjGyUOij9OWyuy01mbPkCQQDluYoNpPOekQ0Z
284 | WrPgJ5rxc8f6zG37ZVoDBiexqtVShIF5W3xYuWhW5kYb0hliYfkq15cS7t9m95h3
285 | 1QJf/xI/AkEA1v9l/WN1a1N3rOK4VGoCokx7kR2SyTMSbZgF9IWJNOugR/WZw7HT
286 | njipO3c9dy1Ms9pUKwUF46d7049ck8HwdQJARgrSKuLWXMyBH+/l1Dx/I4tXuAJI
287 | rlPyo+VmiOc7b5NzHptkSHEPfR9s1OK0VqjknclqCJ3Ig86OMEtEFBzjZQJBAKYz
288 | 470hcPkaGk7tKYAgP48FvxRsnzeooptURW5E+M+PQ2W9iDPPOX9739+Xi02hGEWF
289 | B0IGbQoTRFdE4VVcPK0CQQCeS84lODlC0Y2BZv2JxW3Osv/WkUQ4dslfAQl1T303
290 | 7uwwr7XTroMv8dIFQIPreoPhRKmd/SbJzbiKfS/4QDhU
291 | -----END RSA PRIVATE KEY-----""")
292 | self.media_url = settings.MEDIA_URL
293 | default_storage.base_url = settings.MEDIA_URL = CloudFrontURLs(
294 | 'http://%s.s3.amazonaws.com/' % settings.AWS_STORAGE_BUCKET_NAME,
295 | patterns={'^horizon.jpg': 'http://d604721fxaaqy9.cloudfront.net'}
296 | )
297 |
298 | def tearDown(self):
299 | if self.key is not None:
300 | settings.CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR = self.key
301 | default_storage.base_url = settings.MEDIA_URL = self.media_url
302 |
303 | def get_url(self, url):
304 | url = urlparse.urlparse(url)
305 | if url.scheme == 'https':
306 | conn = httplib.HTTPSConnection(url.netloc)
307 | else:
308 | conn = httplib.HTTPConnection(url.netloc)
309 | path = url.path
310 | if url.query:
311 | path = path+'?'+url.query
312 | conn.request('GET', path)
313 | return conn.getresponse()
314 |
315 | def run_test_signed_url(self, filename):
316 | response = self.conn.put(
317 | settings.AWS_STORAGE_BUCKET_NAME,
318 | filename,
319 | 'Lorem ipsum dolor sit amet.',
320 | {'x-amz-acl': 'private'}
321 | )
322 | self.assertEqual(response.http_response.status, 200, 'put with a string argument')
323 | response = self.get_url(default_storage.url(filename))
324 | self.assertEqual(response.status, 403)
325 |
326 | signed_url = create_signed_url(filename, expires=5, secure=True)
327 | response = self.get_url(signed_url)
328 | self.assertEqual(
329 | response.status,
330 | 200,
331 | 'If this is failing, try resyncing your computer\'s clock.'
332 | )
333 | sleep(6)
334 | response = self.get_url(signed_url)
335 | self.assertEqual(
336 | response.status,
337 | 403,
338 | 'If this is failing, try resyncing your computer\'s clock.'
339 | )
340 |
341 | default_storage.delete(filename)
342 | return signed_url
343 |
344 | def test_signed_url(self):
345 | self.run_test_signed_url('testprivatefile.txt')
346 |
347 | def test_signed_url_with_spaces(self):
348 | filename = 'test private file with spaces.txt'
349 | signed_url = self.run_test_signed_url('test private file with spaces.txt')
350 | self.assertTrue(filename.replace(' ', '+') not in signed_url)
351 | self.assertTrue(filename.replace(' ', '%20') in signed_url)
352 |
353 | def test_signed_url_with_unicode(self):
354 | self.run_test_signed_url(u'testprivatefile\u00E1\u00E9\u00ED\u00F3\u00FA.txt')
355 |
356 | def test_signed_url_in_subdir(self):
357 | self.run_test_signed_url('testdirs/testprivatefile.txt')
358 |
359 | def test_signed_url_in_subdir_with_unicode(self):
360 | self.run_test_signed_url(u'testdirs/testprivatefile\u00E1\u00E9\u00ED\u00F3\u00FA.txt')
361 |
362 | def test_signed_url_missing_file(self):
363 | signed_url = create_signed_url('testprivatemissing.txt', expires=5, secure=True)
364 | response = self.get_url(signed_url)
365 | self.assertEqual(response.status, 404)
366 |
367 | def test_private_cloudfront(self):
368 | signed_url = create_signed_url('horizon.jpg?large=yes&license=yes', secure=False, private_cloudfront=True, expires_at=1258237200)
369 | self.assertEqual(
370 | signed_url,
371 | 'http://d604721fxaaqy9.cloudfront.net/horizon.jpg?large=yes&license=yes&Expires=1258237200&Signature=Nql641NHEUkUaXQHZINK1FZ~SYeUSoBJMxjdgqrzIdzV2gyEXPDNv0pYdWJkflDKJ3xIu7lbwRpSkG98NBlgPi4ZJpRRnVX4kXAJK6tdNx6FucDB7OVqzcxkxHsGFd8VCG1BkC-Afh9~lOCMIYHIaiOB6~5jt9w2EOwi6sIIqrg_&Key-Pair-Id=PK12345EXAMPLE'
372 | )
373 |
374 | def test_encoding(self):
375 | signed_url = create_signed_url('it\'s/a/test.jpg', secure=False, private_cloudfront=True, expires_at=1258237200)
376 | self.assertTrue('/it\'s/a/test.jpg?' not in signed_url)
377 | self.assertTrue('/it%27s/a/test.jpg?' in signed_url)
378 |
379 |
380 | class TemplateTagsTests(TestCase):
381 | def render_template(self, source, context=None):
382 | if not context:
383 | context = {}
384 | context = Context(context)
385 | source = '{% load s3_tags %}' + source
386 | return Template(source).render(context)
387 |
388 | def test_bad_values(self):
389 | tests = (
390 | '{% s3_media_url %}',
391 | '{% s3_media_url "a" as %}',
392 | )
393 | for test in tests:
394 | self.assertRaises(TemplateSyntaxError, self.render_template, test)
395 |
396 | def test_good_values(self):
397 | tests = {
398 | '{% s3_media_url "test/file.txt" %}':
399 | 'test/file.txt',
400 | '{% s3_media_url "test/file2.txt" as var %}':
401 | '',
402 | '{% s3_media_url "test/file2.txt" as var %}{{ var }}':
403 | 'test/file2.txt',
404 | '{% s3_media_url file %}':
405 | ('test/file3.txt', {'file': 'test/file3.txt'}),
406 | '{% s3_media_url file as var %}{{ var }}':
407 | ('test/file4.txt', {'file': 'test/file4.txt'}),
408 | '{% s3_media_url "test/file%20quote.txt" %}':
409 | 'test/file%20quote.txt',
410 | '{% s3_media_url "test/file quote.txt" %}':
411 | 'test/file%20quote.txt',
412 | u'{% s3_media_url "test/fil\u00E9.txt" %}':
413 | 'test/fil%C3%A9.txt',
414 | '{% s3_media_url "test/fil%C3%A9.txt" %}':
415 | 'test/fil%C3%A9.txt',
416 | }
417 | for name, val in tests.items():
418 | if type(val).__name__ in ('str', 'unicode'):
419 | val = (val, None)
420 | self.assertEqual(self.render_template(name, val[1]),
421 | urlparse.urljoin(settings.MEDIA_URL, val[0]) if val[0] else '')
422 |
423 |
424 | class CommandTests(TestCase):
425 | def setUp(self):
426 | self.backup_exclude = getattr(
427 | settings,
428 | 'CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE',
429 | None
430 | )
431 | settings.CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE = ['\.svn$', 'Thumbs\.db$']
432 | self.folder = 'cbs3testsync'
433 | self.basepath = os.path.join(settings.MEDIA_ROOT, self.folder)
434 | if not os.path.exists(self.basepath):
435 | os.makedirs(self.basepath)
436 | self.files = {
437 | 'test1.txt': b'Lorem',
438 | 'test2.txt': b'Ipsum',
439 | 'test3.txt': b'Dolor'
440 | }
441 | self.exclude_files = {
442 | '.svn/test4.txt': b'Lorem',
443 | 'Thumbs.db': b'Ipsum'
444 | }
445 | self.created_paths = []
446 | for files in (self.files, self.exclude_files):
447 | for filename, contents in files.items():
448 | path = os.path.join(self.basepath, os.path.split(filename)[0])
449 | if not os.path.exists(path):
450 | self.created_paths.append(path)
451 | os.makedirs(path)
452 | fh = open(os.path.join(self.basepath, filename), 'wb')
453 | fh.write(contents)
454 | fh.close()
455 | self.created_paths.append(self.basepath)
456 |
457 | def tearDown(self):
458 | for files in (self.files, self.exclude_files):
459 | for file in files.keys():
460 | try:
461 | os.remove(os.path.join(self.basepath, file))
462 | except:
463 | pass
464 | for dir in self.created_paths:
465 | try:
466 | os.rmdir(dir)
467 | except:
468 | pass
469 | if self.backup_exclude is not None:
470 | settings.CUDDLYBUDDLY_STORAGE_S3_SYNC_EXCLUDE = self.backup_exclude
471 |
472 | def test_sync(self):
473 | for file in self.files.keys():
474 | self.assertTrue(not default_storage.exists(
475 | os.path.join(self.folder, file))
476 | )
477 | call_command(
478 | 'cb_s3_sync_media',
479 | verbosity=0,
480 | dir=self.basepath,
481 | prefix=self.folder
482 | )
483 | for file in self.files.keys():
484 | self.assertTrue(default_storage.exists(
485 | os.path.join(self.folder, file))
486 | )
487 | for file in self.exclude_files.keys():
488 | self.assertTrue(not default_storage.exists(
489 | os.path.join(self.folder, file))
490 | )
491 |
492 | modified_times = {}
493 | for file in self.files.keys():
494 | modified_times[file] = default_storage.modified_time(
495 | os.path.join(self.folder, file)
496 | )
497 |
498 | call_command(
499 | 'cb_s3_sync_media',
500 | verbosity=0,
501 | dir=self.basepath,
502 | prefix=self.folder
503 | )
504 | for file in self.files.keys():
505 | self.assertEqual(
506 | modified_times[file],
507 | default_storage.modified_time(os.path.join(self.folder, file)),
508 | 'If this is failing, try resyncing your computer\'s clock.'
509 | )
510 |
511 | call_command(
512 | 'cb_s3_sync_media',
513 | verbosity=0,
514 | dir=self.basepath,
515 | prefix=self.folder,
516 | force=True
517 | )
518 | for file in self.files.keys():
519 | self.assertTrue(
520 | modified_times[file] < \
521 | default_storage.modified_time(os.path.join(self.folder, file))
522 | )
523 |
524 | for file in self.files.keys():
525 | default_storage.delete(os.path.join(self.folder, file))
526 |
527 |
528 | class MediaMonkeyPatchTest(TestCase):
529 | def test_media_monkey_patch(self):
530 | media = Media()
531 | media.add_js((
532 | '/admin/test1.js',
533 | 'admin/test2.js',
534 | 'http://example.com/admin/test3.js',
535 | '//example.com/admin/test3.js'
536 | ))
537 | media.add_css({
538 | 'all': (
539 | '/admin/test1.css',
540 | 'admin/test2.css',
541 | 'http://example.com/admin/test2.css',
542 | '//example.com/admin/test2.css'
543 | )
544 | })
545 |
546 | no_monkey = """
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 |
555 | """.strip()
556 | monkey = """
557 |
558 |
559 |
560 |
561 |
562 |
563 |
564 |
565 | """.strip()
566 |
567 | with self.settings(STATIC_URL='/static/'):
568 | self.assertEqual(media.render(), no_monkey)
569 |
570 | with self.settings(
571 | STATIC_URL=CloudFrontURLs('http://notthis.com/', patterns={
572 | '^admin': 'http://this.com/static/'
573 | })
574 | ):
575 | self.assertEqual(media.render(), monkey)
576 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/tests/test_s3test.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # This software code is made available "AS IS" without warranties of any
4 | # kind. You may copy, display, modify and redistribute the software
5 | # code either by itself or as incorporated into your code; provided that
6 | # you do not remove any proprietary notices. Your use of this software
7 | # code is at your own risk and you waive any claim against Amazon
8 | # Digital Services, Inc. or its affiliates with respect to your use of
9 | # this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
10 | # affiliates.
11 |
12 | # Incorporated Django settings.
13 | #
14 | # 409 error fix - you can't create and delete the same bucket on US and EU
15 | # servers within a short time. Now appeds location to bucket name.
16 | #
17 | # (c) 2009 Kyle MacFarlane
18 |
19 | import unittest
20 | try:
21 | import http.client as httplib # Python 3
22 | except ImportError:
23 | import httplib # Python 2
24 | from django.conf import settings
25 | from cuddlybuddly.storage.s3 import lib as S3
26 |
27 | AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
28 | AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
29 |
30 | # for subdomains (bucket.s3.amazonaws.com),
31 | # the bucket name must be lowercase since DNS is case-insensitive
32 | BUCKET_NAME = "%s-test-bucket" % AWS_ACCESS_KEY_ID.lower();
33 |
34 |
35 | class TestAWSAuthConnection(unittest.TestCase):
36 | def setUp(self):
37 | self.conn = S3.AWSAuthConnection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
38 |
39 | # test all operations for both regular and vanity domains
40 | # regular: http://s3.amazonaws.com/bucket/key
41 | # subdomain: http://bucket.s3.amazonaws.com/key
42 | # testing pure vanity domains (http:///key) is not covered here
43 | # but is possible with some additional setup (set the server in @conn to your vanity domain)
44 |
45 | def test_subdomain_default(self):
46 | self.run_tests(S3.CallingFormat.SUBDOMAIN, S3.Location.DEFAULT)
47 |
48 | def test_subdomain_eu(self):
49 | self.run_tests(S3.CallingFormat.SUBDOMAIN, S3.Location.EU)
50 |
51 | def test_path_default(self):
52 | self.run_tests(S3.CallingFormat.PATH, S3.Location.DEFAULT)
53 |
54 |
55 | def run_tests(self, calling_format, location):
56 | bucket_name = BUCKET_NAME+str(location).lower()
57 | self.conn.calling_format = calling_format
58 |
59 | response = self.conn.create_located_bucket(bucket_name, location)
60 | self.assertEqual(response.http_response.status, 200, 'create bucket')
61 |
62 | response = self.conn.list_bucket(bucket_name)
63 | self.assertEqual(response.http_response.status, 200, 'list bucket')
64 | self.assertEqual(len(response.entries), 0, 'bucket is empty')
65 |
66 | text = b'this is a test'
67 | key = 'example.txt'
68 |
69 | response = self.conn.put(bucket_name, key, text)
70 | self.assertEqual(response.http_response.status, 200, 'put with a string argument')
71 |
72 | response = \
73 | self.conn.put(
74 | bucket_name,
75 | key,
76 | S3.S3Object(text, {'title': 'title'}),
77 | {'Content-Type': 'text/plain'})
78 |
79 | self.assertEqual(response.http_response.status, 200, 'put with complex argument and headers')
80 |
81 | response = self.conn.get(bucket_name, key)
82 | self.assertEqual(response.http_response.status, 200, 'get object')
83 | self.assertEqual(response.object.data, text, 'got right data')
84 | self.assertEqual(response.object.metadata, { 'title': 'title' }, 'metadata is correct')
85 | self.assertEqual(int(response.http_response.getheader('Content-Length')), len(text), 'got content-length header')
86 |
87 | title_with_spaces = " \t title with leading and trailing spaces "
88 | response = \
89 | self.conn.put(
90 | bucket_name,
91 | key,
92 | S3.S3Object(text, {'title': title_with_spaces}),
93 | {'Content-Type': 'text/plain'})
94 |
95 | self.assertEqual(response.http_response.status, 200, 'put with headers with spaces')
96 |
97 | response = self.conn.get(bucket_name, key)
98 | self.assertEqual(response.http_response.status, 200, 'get object')
99 | self.assertEqual(
100 | response.object.metadata,
101 | { 'title': title_with_spaces.strip() },
102 | 'metadata with spaces is correct')
103 |
104 | # delimited list tests
105 | inner_key = 'test/inner.txt'
106 | last_key = 'z-last-key.txt'
107 | response = self.conn.put(bucket_name, inner_key, text)
108 | self.assertEqual(response.http_response.status, 200, 'put inner key')
109 |
110 | response = self.conn.put(bucket_name, last_key, text)
111 | self.assertEqual(response.http_response.status, 200, 'put last key')
112 |
113 | response = self.do_delimited_list(bucket_name, False, {'delimiter': '/'}, 2, 1, 'root list')
114 |
115 | response = self.do_delimited_list(bucket_name, True, {'max-keys': 1, 'delimiter': '/'}, 1, 0, 'root list with max keys of 1', 'example.txt')
116 |
117 | response = self.do_delimited_list(bucket_name, True, {'max-keys': 2, 'delimiter': '/'}, 1, 1, 'root list with max keys of 2, page 1', 'test/')
118 |
119 | marker = response.next_marker
120 |
121 | response = self.do_delimited_list(bucket_name, False, {'marker': marker, 'max-keys': 2, 'delimiter': '/'}, 1, 0, 'root list with max keys of 2, page 2')
122 |
123 | response = self.do_delimited_list(bucket_name, False, {'prefix': 'test/', 'delimiter': '/'}, 1, 0, 'test/ list')
124 |
125 | response = self.conn.delete(bucket_name, inner_key)
126 | self.assertEqual(response.http_response.status, 204, 'delete %s' % inner_key)
127 |
128 | response = self.conn.delete(bucket_name, last_key)
129 | self.assertEqual(response.http_response.status, 204, 'delete %s' % last_key)
130 |
131 |
132 | weird_key = '&=//%# ++++'
133 |
134 | response = self.conn.put(bucket_name, weird_key, text)
135 | self.assertEqual(response.http_response.status, 200, 'put weird key')
136 |
137 | response = self.conn.get(bucket_name, weird_key)
138 | self.assertEqual(response.http_response.status, 200, 'get weird key')
139 |
140 | response = self.conn.get_acl(bucket_name, key)
141 | self.assertEqual(response.http_response.status, 200, 'get acl')
142 |
143 | acl = response.object.data
144 |
145 | response = self.conn.put_acl(bucket_name, key, acl)
146 | self.assertEqual(response.http_response.status, 200, 'put acl')
147 |
148 | response = self.conn.get_bucket_acl(bucket_name)
149 | self.assertEqual(response.http_response.status, 200, 'get bucket acl')
150 |
151 | bucket_acl = response.object.data
152 |
153 | response = self.conn.put_bucket_acl(bucket_name, bucket_acl)
154 | self.assertEqual(response.http_response.status, 200, 'put bucket acl')
155 |
156 | response = self.conn.get_bucket_acl(bucket_name)
157 | self.assertEqual(response.http_response.status, 200, 'get bucket logging')
158 |
159 | bucket_logging = response.object.data
160 |
161 | response = self.conn.put_bucket_acl(bucket_name, bucket_logging)
162 | self.assertEqual(response.http_response.status, 200, 'put bucket logging')
163 |
164 | response = self.conn.list_bucket(bucket_name)
165 | self.assertEqual(response.http_response.status, 200, 'list bucket')
166 | entries = response.entries
167 | self.assertEqual(len(entries), 2, 'got back right number of keys')
168 | # depends on weird_key < key
169 | self.assertEqual(entries[0].key, weird_key, 'first key is right')
170 | self.assertEqual(entries[1].key, key, 'second key is right')
171 |
172 | response = self.conn.list_bucket(bucket_name, {'max-keys': 1})
173 | self.assertEqual(response.http_response.status, 200, 'list bucket with args')
174 | self.assertEqual(len(response.entries), 1, 'got back right number of keys')
175 |
176 | for entry in entries:
177 | response = self.conn.delete(bucket_name, entry.key)
178 | self.assertEqual(response.http_response.status, 204, 'delete %s' % entry.key)
179 |
180 | response = self.conn.list_all_my_buckets()
181 | self.assertEqual(response.http_response.status, 200, 'list all my buckets')
182 | buckets = response.entries
183 |
184 | response = self.conn.delete_bucket(bucket_name)
185 | self.assertEqual(response.http_response.status, 204, 'delete bucket')
186 |
187 | response = self.conn.list_all_my_buckets()
188 | self.assertEqual(response.http_response.status, 200, 'list all my buckets again')
189 |
190 | self.assertEqual(len(response.entries), len(buckets) - 1, 'bucket count is correct')
191 |
192 | def verify_list_bucket_response(self, response, bucket, is_truncated, parameters, next_marker=''):
193 | prefix = ''
194 | marker = ''
195 |
196 | if 'prefix' in parameters:
197 | prefix = parameters['prefix']
198 | if 'marker' in parameters:
199 | marker = parameters['marker']
200 |
201 | self.assertEqual(bucket, response.name, 'bucket name should match')
202 | self.assertEqual(prefix, response.prefix, 'prefix should match')
203 | self.assertEqual(marker, response.marker, 'marker should match')
204 | if 'max-keys' in parameters:
205 | self.assertEqual(parameters['max-keys'], response.max_keys, 'max-keys should match')
206 | self.assertEqual(parameters['delimiter'], response.delimiter, 'delimiter should match')
207 | self.assertEqual(is_truncated, response.is_truncated, 'is_truncated should match')
208 | self.assertEqual(next_marker, response.next_marker, 'next_marker should match')
209 |
210 | def do_delimited_list(self, bucket_name, is_truncated, parameters, regular_expected, common_expected, test_name, next_marker=''):
211 | response = self.conn.list_bucket(bucket_name, parameters)
212 | self.assertEqual(response.http_response.status, 200, test_name)
213 | self.assertEqual(regular_expected, len(response.entries), 'right number of regular entries')
214 | self.assertEqual(common_expected, len(response.common_prefixes), 'right number of common prefixes')
215 |
216 | self.verify_list_bucket_response(response, bucket_name, is_truncated, parameters, next_marker)
217 |
218 | return response
219 |
220 | class TestQueryStringAuthGenerator(unittest.TestCase):
221 | def setUp(self):
222 | self.generator = S3.QueryStringAuthGenerator(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
223 | if (self.generator.is_secure == True):
224 | self.connection = httplib.HTTPSConnection(self.generator.server_name)
225 | else:
226 | self.connection = httplib.HTTPConnection(self.generator.server_name)
227 |
228 | def check_url(self, url, method, status, message, data=''):
229 | if (method == 'PUT'):
230 | headers = { 'Content-Length': str(len(data)) }
231 | self.connection.request(method, url, data, headers)
232 | else:
233 | self.connection.request(method, url)
234 |
235 | response = self.connection.getresponse()
236 | self.assertEqual(response.status, status, message)
237 |
238 | return response.read()
239 |
240 | # test all operations for both regular and vanity domains
241 | # regular: http://s3.amazonaws.com/bucket/key
242 | # subdomain: http://bucket.s3.amazonaws.com/key
243 | # testing pure vanity domains (http:///key) is not covered here
244 | # but is possible with some additional setup (set the server in @conn to your vanity domain)
245 |
246 | def test_subdomain(self):
247 | self.run_tests(S3.CallingFormat.SUBDOMAIN)
248 |
249 | def test_path(self):
250 | self.run_tests(S3.CallingFormat.PATH)
251 |
252 | def run_tests(self, calling_format):
253 | self.generator.calling_format = calling_format
254 |
255 | key = 'test'
256 |
257 | self.check_url(self.generator.create_bucket(BUCKET_NAME), 'PUT', 200, 'create_bucket')
258 | self.check_url(self.generator.put(BUCKET_NAME, key, ''), 'PUT', 200, 'put object', 'test data')
259 | self.check_url(self.generator.get(BUCKET_NAME, key), 'GET', 200, 'get object')
260 | self.check_url(self.generator.list_bucket(BUCKET_NAME), 'GET', 200, 'list bucket')
261 | self.check_url(self.generator.list_all_my_buckets(), 'GET', 200, 'list all my buckets')
262 | acl = self.check_url(self.generator.get_acl(BUCKET_NAME, key), 'GET', 200, 'get acl')
263 | self.check_url(self.generator.put_acl(BUCKET_NAME, key, acl), 'PUT', 200, 'put acl', acl)
264 | bucket_acl = self.check_url(self.generator.get_bucket_acl(BUCKET_NAME), 'GET', 200, 'get bucket acl')
265 | self.check_url(self.generator.put_bucket_acl(BUCKET_NAME, bucket_acl), 'PUT', 200, 'put bucket acl', bucket_acl)
266 | bucket_logging = self.check_url(self.generator.get_bucket_logging(BUCKET_NAME), 'GET', 200, 'get bucket logging')
267 | self.check_url(self.generator.put_bucket_logging(BUCKET_NAME, bucket_logging), 'PUT', 200, 'put bucket logging', bucket_logging)
268 | self.check_url(self.generator.delete(BUCKET_NAME, key), 'DELETE', 204, 'delete object')
269 | self.check_url(self.generator.delete_bucket(BUCKET_NAME), 'DELETE', 204, 'delete bucket')
270 |
271 |
272 | if __name__ == '__main__':
273 | unittest.main()
274 |
275 |
276 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/testsettings.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 |
5 | DEBUG = True
6 | if sys.platform[0:3] == 'win':
7 | TEMP = os.environ.get('TEMP', '')
8 | else:
9 | TEMP = '/tmp'
10 | DATABASES = {
11 | 'default': {
12 | 'ENGINE': 'django.db.backends.sqlite3',
13 | 'NAME': ':memoery:'
14 | }
15 | }
16 | INSTALLED_APPS = [
17 | 'cuddlybuddly.storage.s3'
18 | ]
19 | STATIC_ROOT = MEDIA_ROOT = os.path.join(TEMP, 'cbs3test')
20 | SECRET_KEY = 'placeholder'
21 |
22 | DEFAULT_FILE_STORAGE = 'cuddlybuddly.storage.s3.S3Storage'
23 | from cuddlybuddly.storage.s3 import CallingFormat
24 | AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
25 |
26 | # Below should contain:
27 | #
28 | # MEDIA_URL = 'http://yourbucket.s3.amazonaws.com/'
29 | # AWS_ACCESS_KEY_ID = ''
30 | # AWS_SECRET_ACCESS_KEY = ''
31 | # AWS_STORAGE_BUCKET_NAME = ''
32 | from cuddlybuddly.storage.s3.tests3credentials import *
33 |
34 | CUDDLYBUDDLY_STORAGE_S3_CACHE = 'cuddlybuddly.storage.s3.cache.FileSystemCache'
35 | CUDDLYBUDDLY_STORAGE_S3_FILE_CACHE_DIR = TEMP+'/cbs3testcache'
36 |
--------------------------------------------------------------------------------
/src/cuddlybuddly/storage/s3/utils.py:
--------------------------------------------------------------------------------
1 | import base64
2 | try:
3 | from collections import OrderedDict
4 | except ImportError:
5 | from ordereddict import OrderedDict
6 | import json
7 | import re
8 | import sys
9 | import time
10 | try:
11 | from urllib.parse import unquote # Python 3
12 | except ImportError:
13 | from urllib2 import unquote # Python 2
14 | try:
15 | from urllib.parse import urljoin, urlparse, urlunparse # Python 3
16 | except ImportError:
17 | from urlparse import urljoin, urlparse, urlunparse # Python 2
18 | from Crypto.PublicKey import RSA
19 | from Crypto.Signature import PKCS1_v1_5
20 | from Crypto.Hash import SHA
21 | from django.conf import settings
22 | from django.utils.http import urlquote
23 | from cuddlybuddly.storage.s3 import CallingFormat
24 | from cuddlybuddly.storage.s3.lib import QueryStringAuthGenerator
25 | from cuddlybuddly.storage.s3.middleware import request_is_secure
26 |
27 |
28 | def create_signed_url(file, expires=60, secure=False, private_cloudfront=False, expires_at=None):
29 | if not private_cloudfront:
30 | generator = QueryStringAuthGenerator(
31 | settings.AWS_ACCESS_KEY_ID,
32 | settings.AWS_SECRET_ACCESS_KEY,
33 | calling_format=getattr(settings, 'AWS_CALLING_FORMAT',
34 | CallingFormat.SUBDOMAIN),
35 | is_secure=secure)
36 | generator.set_expires_in(expires)
37 | return generator.generate_url(
38 | 'GET',
39 | settings.AWS_STORAGE_BUCKET_NAME,
40 | file
41 | )
42 |
43 | url = settings.MEDIA_URL
44 | if not isinstance(settings.MEDIA_URL, CloudFrontURLs):
45 | url = CloudFrontURLs(settings.MEDIA_URL)
46 | url = url.get_url(file, force_https=True if secure else False)
47 |
48 | if url.startswith('//'):
49 | # A protocol is needed for correct signing
50 | if secure:
51 | url = 'https:' + url
52 | else:
53 | url = 'http:' + url
54 |
55 | if expires_at is None:
56 | expires = int(time.time() + expires)
57 | else:
58 | expires = expires_at
59 |
60 | # Use OrderedDict to keep things predictable and testable
61 | policy = OrderedDict()
62 | policy['Resource'] = url
63 | policy['Condition'] = {
64 | 'DateLessThan': {
65 | 'AWS:EpochTime': expires
66 | }
67 | }
68 | policy = {
69 | 'Statement': [
70 | policy
71 | ]
72 | }
73 | policy = json.dumps(policy, separators=(',',':'))
74 |
75 | key = settings.CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR
76 | dig = SHA.new()
77 | dig.update(policy.encode('utf-8'))
78 | sig = PKCS1_v1_5.new(RSA.importKey(key[1]))
79 | sig = sig.sign(dig)
80 | sig = base64.b64encode(sig).decode('utf-8')
81 | sig = sig.replace('+', '-').replace('=', '_').replace('/', '~')
82 |
83 | return '%s%sExpires=%s&Signature=%s&Key-Pair-Id=%s' % (
84 | url,
85 | '&' if '?' in url else '?',
86 | expires,
87 | sig,
88 | key[0]
89 | )
90 |
91 |
92 | try:
93 | extend = unicode # Python 2
94 | except NameError:
95 | extend = str # Python 3
96 |
97 |
98 | class CloudFrontURLs(extend):
99 | def __new__(cls, default, patterns={}, https=None):
100 | obj = super(CloudFrontURLs, cls).__new__(cls, default)
101 | obj._patterns = []
102 | for key, value in patterns.items():
103 | obj._patterns.append((re.compile(key), '%s' % value))
104 | obj._https = https
105 | return obj
106 |
107 | def match(self, name):
108 | for pattern in self._patterns:
109 | if pattern[0].match(name):
110 | return pattern[1]
111 | return self
112 |
113 | def https(self):
114 | if self._https is not None:
115 | return '%s' % self._https
116 | return self.replace('http://', 'https://')
117 |
118 | def get_url(self, path, force_https=False):
119 | if force_https or request_is_secure():
120 | url = self.https()
121 | else:
122 | url = self.match(path).replace('https://', 'http://')
123 | url = list(urlparse(urljoin(url, path)))
124 | if sys.version_info[0] == 2:
125 | url[2] = url[2].encode('utf-8')
126 | url[2] = urlquote(unquote(url[2]))
127 | return urlunparse(url)
128 |
--------------------------------------------------------------------------------