├── .coveragerc ├── .editorconfig ├── .gitignore ├── .pre-commit-config.yaml ├── .travis.yml ├── CHANGELOG.md ├── MANIFEST.in ├── Makefile ├── README.md ├── development.txt ├── requirements.txt ├── s3browser ├── __init__.py ├── client.py ├── helpers.py ├── main.py └── util │ ├── __init__.py │ ├── decorators.py │ ├── list.py │ ├── parsers.py │ ├── path.py │ ├── s3.py │ └── tree.py ├── setup.cfg ├── setup.py ├── shippable.yml └── tests ├── __init__.py ├── acceptance └── __init__.py ├── functional ├── __init__.py └── test_client.py ├── unit ├── __init__.py ├── test_list_utilities.py ├── test_parsers.py ├── test_path_utilities.py ├── test_s3.py └── test_tree.py └── util.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | show_missing = True 3 | exclude_lines = 4 | if __name__ == .__main__.: 5 | raise NotImplemented. 6 | def __repr__ 7 | omit = 8 | */decorators.py 9 | 10 | [run] 11 | include = s3browser/* -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: http://EditorConfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | end_of_line = lf 7 | insert_final_newline = false 8 | indent_style = space 9 | indent_size = 4 10 | 11 | [*.py] 12 | indent_style = space 13 | indent_size = 4 14 | 15 | [*.yml] 16 | indent_style = space 17 | indent_size = 2 18 | 19 | [Makefile] 20 | indent_style = tab 21 | indent_size = 4 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.pyc 3 | .coverage 4 | *.egg-info/ 5 | dist/ 6 | .tox/ 7 | .DS_Store 8 | .build.log 9 | build/ 10 | cover/ 11 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | - repo: git@github.com:pre-commit/pre-commit-hooks 2 | sha: 616c1ebd1898c91de9a0548866a59cbd9f4547f6 3 | hooks: 4 | - id: trailing-whitespace 5 | exclude: \.html$ 6 | - id: end-of-file-fixer 7 | exclude: \.html$ 8 | - id: check-json 9 | - id: check-yaml 10 | - id: debug-statements 11 | - id: requirements-txt-fixer 12 | - id: flake8 13 | exclude: \/migrations\/ 14 | - repo: git@github.com:pre-commit/pre-commit 15 | sha: a8e1eaa51249148a40521ec7e816d45f7f5bdee1 16 | hooks: 17 | - id: validate_config 18 | - id: validate_manifest 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "2.7" 5 | 6 | env: 7 | - TEST_TYPE=unit 8 | - TEST_TYPE=functional 9 | 10 | sudo: false 11 | 12 | cache: 13 | directories: 14 | - $HOME/.cache/pip 15 | 16 | install: 17 | - pip install -r development.txt 18 | - pip install coveralls 19 | 20 | script: 21 | - make $TEST_TYPE 22 | 23 | branches: 24 | only: 25 | - master 26 | 27 | after_success: 28 | - coveralls 29 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Changelog 2 | 3 | #### 0.0.1 4 | 5 | * First Release! 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include requirements.txt 2 | include README.md -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | PACKAGE=s3browser 3 | CUSTOM_PIP_INDEX=pypi 4 | TESTS_VERBOSITY=2 5 | # 6 | 7 | EXTRA_TEST_TASKS= 8 | 9 | extra_args="$(filter-out $@,$(MAKECMDGOALS))" 10 | 11 | %: 12 | @: 13 | 14 | all: test 15 | 16 | test: unit functional $(EXTRA_TEST_TASKS) 17 | 18 | unit: setup 19 | @make run_test suite=unit pattern=$(extra_args) 20 | 21 | functional: setup 22 | @make run_test suite=functional pattern=$(extra_args) 23 | 24 | setup: clean 25 | @if [ -z $$VIRTUAL_ENV ]; then \ 26 | echo "===================================================="; \ 27 | echo "You're not running this from a virtualenv, wtf?"; \ 28 | echo "ಠ_ಠ"; \ 29 | echo "===================================================="; \ 30 | exit 1; \ 31 | fi 32 | @if [ -z $$SKIP_DEPS ]; then \ 33 | echo "Installing dependencies..."; \ 34 | pip install --quiet -r development.txt; \ 35 | fi 36 | @pre-commit install 37 | 38 | run_test: 39 | @echo "=======================================" 40 | @echo "* Running \033[0;32m$(suite)\033[0m test suite *" 41 | @echo "=======================================" 42 | @if [ $(pattern) ]; then \ 43 | tests=`grep "def test_.*$(pattern).*(" tests/$(suite)/*.py | sed 's/tests\/$(suite)\/\(.*\).py:def test_\(.*\)(.*/tests.$(suite).\1\:test_\2/' | tr '\n' ' '`; \ 44 | nosetests --stop --rednose --with-coverage --cover-html --cover-erase --cover-package=$(PACKAGE) \ 45 | --cover-branches --verbosity=$(TESTS_VERBOSITY) -s -x $$tests; \ 46 | else \ 47 | nosetests --stop --rednose --with-coverage --cover-html --cover-erase --cover-package=$(PACKAGE) \ 48 | --cover-branches --verbosity=$(TESTS_VERBOSITY) -s tests/$(suite) ; \ 49 | fi 50 | 51 | clean: 52 | @echo "Removing garbage..." 53 | @find . -name '*.pyc' -delete 54 | @rm -rf .coverage *.egg-info *.log build dist MANIFEST cover 55 | 56 | tag: 57 | @if [ $$(git rev-list $$(git describe --abbrev=0 --tags)..HEAD --count) -gt 0 ]; then \ 58 | if [ $$(git log -n 1 --oneline $$(git describe --abbrev=0 --tags)..HEAD CHANGELOG.md | wc -l) -gt 0 ]; then \ 59 | git tag $$(python setup.py --version) && git push --tags || (echo 'Version already released, update your version!'; exit 1); \ 60 | else \ 61 | echo "CHANGELOG not updated since last release!"; \ 62 | exit 1; \ 63 | fi; \ 64 | fi 65 | 66 | publish: tag 67 | @if [ -e "$$HOME/.pypirc" ]; then \ 68 | echo "Uploading to '$(CUSTOM_PIP_INDEX)'"; \ 69 | python setup.py register -r "$(CUSTOM_PIP_INDEX)"; \ 70 | python setup.py sdist upload -r "$(CUSTOM_PIP_INDEX)"; \ 71 | else \ 72 | echo "You should create a file called '.pypirc' under your home dir."; \ 73 | echo "That's the right place to configure 'pypi' repos."; \ 74 | exit 1; \ 75 | fi 76 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## S3 Browser [![Build Status](https://travis-ci.org/andrewgross/s3browser.svg?branch=master)](https://travis-ci.org/andrewgross/s3browser) [![Coverage Status](https://coveralls.io/repos/github/andrewgross/s3browser/badge.svg?branch=master)](https://coveralls.io/github/andrewgross/s3browser?branch=master) 2 | 3 | 4 | S3Browser is a tool to help you browse your S3 Buckets like a local filesystem. It features `cd`, `ls`, and `pwd` for now, with some added bonuses around showing rollups for directory sizes and the most recently modified file. For now it is read only, though if you want any features feel free to suggest them. 5 | 6 | ![](https://s3.amazonaws.com/captured-pics/tty.gif) 7 | 8 | ### Installation 9 | 10 | ``` 11 | pip install s3browser 12 | ``` 13 | 14 | ### Usage 15 | 16 | You can pass access keys to `s3browser` directly, or just let it pick them up from your environment. It uses `boto` under the hood so you can use an existing configurations for that. 17 | 18 | ``` 19 | usage: s3browser [-h] [--access-key-id ACCESS_KEY_ID] 20 | [--secret-access-key SECRET_ACCESS_KEY] 21 | 22 | Run S3Browser 23 | 24 | optional arguments: 25 | -h, --help show this help message and exit 26 | --access-key-id ACCESS_KEY_ID 27 | AWS_ACCESS_KEY_ID used by Boto 28 | --secret-access-key SECRET_ACCESS_KEY 29 | AWS_SECRET_ACCESS_KEY used by Boto 30 | ``` 31 | 32 | Once you are in the CLI, it will automatically load a list of all of your available S3 buckets. You can use the `help` command to get detailed information for each of the commands. 33 | 34 | 35 | ### Gotchas 36 | 37 | `s3browser` is written in Python, so it is not the most efficient. For really large buckets be prepared to wait a while for it to complete. S3 requires us to page through all the files to retrieve them, and currently that is done serially in chunks of 1000. Additionally, the internal representation of each S3 Key is ~800 Bytes, once you use `refresh` on a bucket with millions of keys, expect some memory pressure. 38 | 39 | I have successfully browsed ~15mm keys on my dev machine with 16GB of RAM, of which python used ~12GB. If key retrieval speed or memory usage are big issues for you, feel free to open a ticket and we can spend the time to find better ways to implement the internal structures so they are more compact! 40 | -------------------------------------------------------------------------------- /development.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | coverage==4.1 3 | freezegun==0.3.7 4 | mock==1.0.1 5 | moto==0.4.25 6 | nose==1.3.7 7 | pep8==1.5.7 8 | pre-commit==0.7.6 9 | pyflakes==0.9.2 10 | rednose==0.4.1 11 | pylint==1.1.0 12 | sure==1.2.2 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | boto>=2.36.0 2 | -------------------------------------------------------------------------------- /s3browser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | __title__ = 's3browser' 5 | __version__ = '0.0.1' 6 | -------------------------------------------------------------------------------- /s3browser/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import cmd 5 | 6 | from .util.list import parse_ls, print_files, complete_dir 7 | from .util.path import change_directory, get_pwd 8 | from .util.parsers import ls_parser 9 | from .util.s3 import get_keys, get_buckets, get_bucket 10 | from .util.tree import build_tree, S3Bucket, S3 11 | from .helpers import print_help, print_result, color_green 12 | 13 | 14 | # This makes mocking easier 15 | get_input = raw_input 16 | 17 | 18 | class S3Browser(cmd.Cmd, object): 19 | 20 | def __init__(self, connection): 21 | super(S3Browser, self).__init__() 22 | self.connection = connection 23 | self._top = S3("") 24 | self.current_directory = self._top 25 | self._get_all_buckets() 26 | self._update_prompt() 27 | 28 | def do_cd(self, line): 29 | node = change_directory(line, self.current_directory) 30 | if node: 31 | self.current_directory = node 32 | else: 33 | print_result("No such directory") 34 | 35 | def complete_cd(self, text, line, begidx, endidx): 36 | return complete_dir(self.current_directory, text) 37 | 38 | def help_cd(self): 39 | print_help("""usage: cd [dir] 40 | 41 | Changes the current directory. 42 | """) 43 | 44 | def do_buckets(self, line): 45 | if line == "": 46 | for bucket in sorted(self._top.buckets): 47 | print bucket.name 48 | else: 49 | self.current_directory = self._top.get_child(line) 50 | 51 | def help_buckets(self): 52 | print_help("""usage: buckets [] 53 | 54 | Lists all known buckets or switches the current bucket to 55 | """) 56 | 57 | def complete_buckets(self, text, line, begidx, endidx): 58 | buckets = sorted(self._top.buckets) 59 | return [b.name for b in buckets if b.name.startswith(text)] 60 | 61 | def do_refresh(self, line): 62 | if line == "": 63 | print "Refreshing all buckets! Get a snickers." 64 | for bucket in self._top.dirs: 65 | print "Refreshing {}".format(bucket.name) 66 | self._refresh_bucket(bucket) 67 | else: 68 | bucket = self._top.get_child(line) 69 | if bucket is None: 70 | print "{} is not a valid bucket name!".format(line) 71 | self._refresh_bucket(bucket) 72 | self.current_directory = bucket 73 | 74 | def _refresh_bucket(self, bucket): 75 | build_tree(bucket, get_keys(get_bucket(bucket.name, self.connection), interactive=True)) 76 | bucket.refreshed = True 77 | return bucket 78 | 79 | def complete_refresh(self, text, line, begidx, endidx): 80 | buckets = sorted(self._top.dirs) 81 | return [b.name for b in buckets if b.name.startswith(text)] 82 | 83 | def help_refresh(self): 84 | print_help("""usage: refresh 85 | 86 | Refreshes list of keys in an S3 Bucket and builds a directory tree. This can take a while. 87 | """) 88 | 89 | def do_ls(self, line): 90 | args = parse_ls(line) 91 | if args is None: 92 | return 93 | print_files(self.current_directory, args) 94 | 95 | def do_ll(self, line): 96 | self.do_ls("-l -h {}".format(line)) 97 | 98 | def help_ls(self): 99 | parser = ls_parser() 100 | parser.print_help() 101 | 102 | def do_pwd(self, line): 103 | print_result(get_pwd(self.current_directory)) 104 | 105 | def help_pwd(self): 106 | print_help("""usage: pwd 107 | 108 | Print the current directory 109 | """) 110 | 111 | def do_exit(self, line): 112 | return True 113 | 114 | def help_exit(self): 115 | print_help("""usage: exit 116 | 117 | Exit S3Browser 118 | """) 119 | 120 | def _update_prompt(self): 121 | if self.current_directory.name: 122 | self.prompt = '{} $ '.format(color_green(self.current_directory.name)) 123 | else: 124 | self.prompt = '$ ' 125 | 126 | def postcmd(self, stop, line): 127 | self._update_prompt() 128 | return stop 129 | 130 | do_EOF = do_exit 131 | 132 | def _get_all_buckets(self): 133 | print "Getting all buckets!" 134 | buckets = get_buckets(self.connection) 135 | for bucket in buckets: 136 | self._top.add_child(S3Bucket(bucket.name)) 137 | -------------------------------------------------------------------------------- /s3browser/helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals, print_function 3 | 4 | import sys 5 | import datetime 6 | 7 | # This makes mocking easier 8 | output = sys.stdout 9 | error = sys.stderr 10 | 11 | # Pretty Colors 12 | WHITE = '\033[37m' 13 | YELLOW = '\033[33m' 14 | BLUE = '\033[34m' 15 | GREEN = '\033[32m' 16 | END = '\033[0m' 17 | 18 | 19 | def print_result(*args): 20 | print(*args, file=output) # noqa 21 | 22 | 23 | def print_help(*args): 24 | print('{color}{}{end} '.format(*args, color=WHITE, end=END), file=error) # noqa 25 | 26 | 27 | def color_yellow(text): 28 | return '{color}{text}{end}'.format(color=YELLOW, end=END, text=text) 29 | 30 | 31 | def color_blue(text): 32 | return '{color}{text}{end}'.format(color=BLUE, end=END, text=text) 33 | 34 | 35 | def color_green(text): 36 | return '{color}{text}{end}'.format(color=GREEN, end=END, text=text) 37 | 38 | 39 | def convert_date(date): 40 | return datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S.%fZ") 41 | -------------------------------------------------------------------------------- /s3browser/main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from client import S3Browser 5 | from .util.s3 import get_connection 6 | from .util.parsers import main_parser 7 | from . import __version__ 8 | 9 | 10 | def main(): 11 | parser = main_parser() 12 | args = parser.parse_args() 13 | 14 | _main(access_key_id=args.access_key_id, secret_access_key=args.secret_access_key) 15 | 16 | 17 | def _main(access_key_id=None, secret_access_key=None): 18 | browser = None 19 | print("Starting s3browser version {}".format(__version__)) 20 | connection = get_connection(access_key_id=access_key_id, secret_access_key=secret_access_key) 21 | 22 | while True: 23 | try: 24 | if not browser: 25 | browser = S3Browser(connection) 26 | browser.cmdloop() 27 | except KeyboardInterrupt: 28 | print("^C") # noqa 29 | continue 30 | break 31 | 32 | 33 | if __name__ == '__main__': 34 | main() 35 | -------------------------------------------------------------------------------- /s3browser/util/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | -------------------------------------------------------------------------------- /s3browser/util/decorators.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import os 5 | import sys 6 | from contextlib import contextmanager 7 | 8 | 9 | def fileno(file_or_fd): 10 | fd = getattr(file_or_fd, 'fileno', lambda: file_or_fd)() 11 | if not isinstance(fd, int): 12 | raise ValueError("Expected a file (`.fileno()`) or a file descriptor") 13 | return fd 14 | 15 | 16 | @contextmanager 17 | def silence_stderr(to=os.devnull): 18 | """ 19 | http://stackoverflow.com/questions/4675728/redirect-stdout-to-a-file-in-python/22434262#22434262 20 | """ 21 | stderr = sys.stderr 22 | 23 | stderr_fd = fileno(stderr) 24 | # copy stderr_fd before it is overwritten 25 | # NOTE: `copied` is inheritable on Windows when duplicating a standard stream 26 | with os.fdopen(os.dup(stderr_fd), 'wb') as copied: 27 | stderr.flush() # flush library buffers that dup2 knows nothing about 28 | try: 29 | os.dup2(fileno(to), stderr_fd) # $ exec >&to 30 | except ValueError: # filename 31 | with open(to, 'wb') as to_file: 32 | os.dup2(to_file.fileno(), stderr_fd) # $ exec > to 33 | try: 34 | yield stderr # allow code to be run with the redirected stderr 35 | finally: 36 | # restore stderr to its previous value 37 | #NOTE: dup2 makes stderr_fd inheritable unconditionally 38 | stderr.flush() 39 | os.dup2(copied.fileno(), stderr_fd) # $ exec >&copied 40 | 41 | 42 | @contextmanager 43 | def silence_stdout(to=os.devnull): 44 | """ 45 | http://stackoverflow.com/questions/4675728/redirect-stdout-to-a-file-in-python/22434262#22434262 46 | """ 47 | stdout = sys.stdout 48 | 49 | stdout_fd = fileno(stdout) 50 | # copy stdout_fd before it is overwritten 51 | # NOTE: `copied` is inheritable on Windows when duplicating a standard stream 52 | with os.fdopen(os.dup(stdout_fd), 'wb') as copied: 53 | stdout.flush() # flush library buffers that dup2 knows nothing about 54 | try: 55 | os.dup2(fileno(to), stdout_fd) # $ exec >&to 56 | except ValueError: # filename 57 | with open(to, 'wb') as to_file: 58 | os.dup2(to_file.fileno(), stdout_fd) # $ exec > to 59 | try: 60 | yield stdout # allow code to be run with the redirected stdout 61 | finally: 62 | # restore stdout to its previous value 63 | #NOTE: dup2 makes stdout_fd inheritable unconditionally 64 | stdout.flush() 65 | os.dup2(copied.fileno(), stdout_fd) # $ exec >&copied 66 | -------------------------------------------------------------------------------- /s3browser/util/list.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from s3browser.util.parsers import ls_parser 5 | from s3browser.util.tree import S3Dir, S3Bucket 6 | from s3browser.helpers import color_blue, color_yellow, print_result 7 | 8 | 9 | def sort_files(files, key="name", reverse=False): 10 | if key == "last_modified": 11 | # The default time behavior in bash is most recent on top, so we must 12 | # use not(reverse) by default 13 | return sorted(files, key=lambda x: x.get_last_modified(), reverse=not(reverse)) 14 | elif key == "size": 15 | return sorted(files, key=lambda x: x.get_size(), reverse=reverse) 16 | else: 17 | return sorted(files, key=lambda x: getattr(x, key), reverse=reverse) 18 | 19 | 20 | def get_names(files): 21 | return map(lambda x: x.name, files) 22 | 23 | 24 | def parse_ls(line): 25 | parser = ls_parser() 26 | try: 27 | args = parser.parse_args(line.split(" ")) 28 | except SystemExit: 29 | args = None 30 | return args 31 | 32 | 33 | def complete_dir(current_directory, prefix): 34 | dirs = sort_files(current_directory.dirs) 35 | return [d.name for d in dirs if d.name.startswith(prefix)] 36 | 37 | 38 | def print_files(current_directory, ls_args): 39 | files = current_directory.dirs + current_directory.files 40 | sorted_files = _sorted_files(files, ls_args) 41 | for f in sorted_files: 42 | last_modified = _format_date(f.get_last_modified()) 43 | size = _format_size(f.get_size(), human=ls_args.human) 44 | name = _format_name(f) 45 | if ls_args.long: 46 | print_result(size, last_modified, name) 47 | else: 48 | print_result(name) 49 | 50 | 51 | def _sorted_files(files, ls_args): 52 | if ls_args.time: 53 | sorted_files = sort_files(files, key="last_modified", reverse=ls_args.reverse) 54 | elif ls_args.size: 55 | sorted_files = sort_files(files, key="size", reverse=ls_args.reverse) 56 | else: 57 | sorted_files = sort_files(files, reverse=ls_args.reverse) 58 | return sorted_files 59 | 60 | 61 | def _format_date(date): 62 | """ 63 | Converts a python datetime to a string 64 | """ 65 | try: 66 | return date.strftime("%Y-%m-%d %H:%M") 67 | except ValueError: 68 | return "????-??-?? ??:??" 69 | 70 | 71 | def _format_size(size, human=False): 72 | if not human: 73 | return "{:>15}B".format(size) 74 | billion = 1024 * 1024 * 1024 75 | million = 1024 * 1024 76 | thousand = 1024 77 | if size >= billion: 78 | return "{:>4}G".format(size / billion) 79 | elif size >= million: 80 | return "{:>4}M".format(size / million) 81 | elif size >= thousand: 82 | return "{:>4}K".format(size / thousand) 83 | else: 84 | return "{:>4}B".format(size) 85 | 86 | 87 | def _format_name(file): 88 | if isinstance(file, S3Bucket): 89 | if file.refreshed: 90 | return color_blue(file.name) 91 | else: 92 | return color_yellow(file.name) 93 | elif isinstance(file, S3Dir): 94 | return color_blue(file.name) 95 | return file.name 96 | -------------------------------------------------------------------------------- /s3browser/util/parsers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from argparse import ArgumentParser 5 | 6 | 7 | def main_parser(): 8 | parser = ArgumentParser(prog="s3browser", description="Run S3Browser") 9 | 10 | parser.add_argument( 11 | "--access-key-id", 12 | dest="access_key_id", 13 | type=str, 14 | default=None, 15 | help='AWS_ACCESS_KEY_ID used by Boto', 16 | action="store", 17 | required=False 18 | ) 19 | 20 | parser.add_argument( 21 | "--secret-access-key", 22 | dest="secret_access_key", 23 | type=str, 24 | default=None, 25 | help='AWS_SECRET_ACCESS_KEY used by Boto', 26 | action="store", 27 | required=False 28 | ) 29 | return parser 30 | 31 | 32 | def ls_parser(): 33 | parser = ArgumentParser(prog="ls", description="List Files", add_help=False) 34 | 35 | parser.add_argument( 36 | "expression", 37 | metavar="", 38 | nargs='?', 39 | type=str, 40 | help='Regex filtering expression', 41 | action="store", 42 | ) 43 | 44 | parser.add_argument( 45 | "-l", 46 | dest="long", 47 | default=False, 48 | help='List in Long Format', 49 | action="store_true", 50 | required=False 51 | ) 52 | 53 | parser.add_argument( 54 | "-h", 55 | dest="human", 56 | default=False, 57 | help='Display sizes as human readable', 58 | action="store_true", 59 | required=False 60 | ) 61 | 62 | parser.add_argument( 63 | "-r", 64 | dest="reverse", 65 | default=False, 66 | help='Reverse sort order', 67 | action="store_true", 68 | required=False 69 | ) 70 | 71 | sorting = parser.add_mutually_exclusive_group() 72 | 73 | sorting.add_argument( 74 | "-S", 75 | dest="size", 76 | default=False, 77 | help='Sort by File Size', 78 | action="store_true", 79 | required=False 80 | ) 81 | 82 | sorting.add_argument( 83 | "-t", 84 | dest="time", 85 | default=False, 86 | help='Sort by last_modified', 87 | action="store_true", 88 | required=False 89 | ) 90 | 91 | return parser 92 | -------------------------------------------------------------------------------- /s3browser/util/path.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | 5 | def get_pwd(node): 6 | if not node.parent: 7 | return node.name 8 | return "{}/{}".format(get_pwd(node.parent), node.name) 9 | 10 | 11 | def get_root_node(node): 12 | if node.parent: 13 | return get_root_node(node.parent) 14 | return node 15 | 16 | 17 | def change_directory(path, current_node): 18 | if path in ["", "~", "/"]: 19 | return get_root_node(current_node) 20 | if path.startswith("/"): 21 | path = path[1:] 22 | current_node = get_root_node(current_node) 23 | for p in path.split("/"): 24 | if p == ".": 25 | continue 26 | elif p == "..": 27 | if current_node.parent: 28 | current_node = current_node.parent 29 | else: 30 | child = _get_matching_dir(p, current_node) 31 | if child is None: 32 | return None 33 | current_node = child 34 | return current_node 35 | 36 | 37 | def _get_matching_dir(name, node): 38 | for child in node.dirs: 39 | if child.name == name: 40 | return child 41 | -------------------------------------------------------------------------------- /s3browser/util/s3.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import sys 5 | import datetime 6 | 7 | from boto.s3.connection import S3Connection 8 | 9 | 10 | def get_connection(access_key_id=None, secret_access_key=None): 11 | return S3Connection(aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key) 12 | 13 | 14 | def get_bucket(bucket, connection): 15 | return connection.get_bucket(bucket) 16 | 17 | 18 | def get_buckets(connection): 19 | buckets = connection.get_all_buckets() 20 | all_buckets = [b for b in buckets] 21 | return all_buckets 22 | 23 | 24 | def get_keys(bucket, interactive=False): 25 | """ 26 | Get all keys, interactive adds some fancy graphics 27 | """ 28 | key_count = 0 29 | counter, timer = _interactive(interactive=interactive, timer=datetime.datetime.min, key_count=key_count) 30 | for key in bucket: 31 | yield key 32 | counter, timer = _interactive(counter=counter, timer=timer, interactive=interactive, key_count=key_count) 33 | key_count += 1 34 | if interactive: 35 | print "\nDone!" 36 | 37 | 38 | def _interactive(counter=0, timer=None, interactive=False, key_count=0): 39 | """ 40 | Print a status banner, adding a . every second, resetting at 10 41 | """ 42 | now = datetime.datetime.now() 43 | if timer is None: 44 | timer = now 45 | after_one_second = _check_time(now, timer) 46 | if after_one_second and interactive: 47 | counter += 1 48 | counter = counter % 10 49 | _print_progress_bar(counter, key_count) 50 | timer = now 51 | return counter, timer 52 | 53 | 54 | def _get_ticker_string(counter, key_count): 55 | """ 56 | Print out our message while keeping a constant width string 57 | """ 58 | anti_counter = 10 - counter 59 | return "This can take a while.{}{} Keys Found: {}".format("." * counter, " " * anti_counter, key_count) 60 | 61 | 62 | def _print_progress_bar(counter, key_count): 63 | """ 64 | Print out a message overtop of the existing line 65 | """ 66 | ticker = _get_ticker_string(counter, key_count) 67 | sys.stdout.write(ticker) 68 | sys.stdout.flush() 69 | sys.stdout.write("\b" * (len(ticker) + 1)) # Move back to the beginning of the line 70 | 71 | 72 | def _check_time(now, timer): 73 | return (now - timer) > datetime.timedelta(seconds=1) 74 | -------------------------------------------------------------------------------- /s3browser/util/tree.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import datetime 5 | 6 | from s3browser.helpers import convert_date 7 | 8 | 9 | class S3File(object): 10 | 11 | def __init__(self, name, size, last_modified, parent=None): 12 | self.name = name 13 | self._size = size 14 | self.parent = parent 15 | if isinstance(last_modified, datetime.datetime): 16 | self._last_modified = last_modified 17 | else: 18 | self._last_modified = convert_date(last_modified) 19 | 20 | def get_size(self): 21 | return self._size 22 | 23 | def get_last_modified(self): 24 | return self._last_modified 25 | 26 | def __repr__(self): 27 | return "{} - Size: {} Last Modified: {}".format(self.name, self._size, self._last_modified) 28 | 29 | 30 | class S3Dir(object): 31 | 32 | def __init__(self, name, parent=None): 33 | self.name = name 34 | self.parent = parent 35 | self.files = [] 36 | self.dirs = [] 37 | self._size = 0 38 | self._last_modified = None 39 | 40 | def add_child(self, child): 41 | if isinstance(child, S3File): 42 | self.files.append(child) 43 | elif isinstance(child, S3Dir): 44 | self.dirs.append(child) 45 | else: 46 | raise "Attempted to add a bad child" 47 | child.parent = self 48 | 49 | def get_size(self): 50 | if not self._size: 51 | for f in self.files + self.dirs: 52 | self._size = self._size + f.get_size() 53 | return self._size 54 | 55 | def get_last_modified(self): 56 | if not self._last_modified: 57 | self._last_modified = datetime.datetime.min 58 | for f in self.files + self.dirs: 59 | if f.get_last_modified() > self._last_modified: 60 | self._last_modified = f.get_last_modified() 61 | return self._last_modified 62 | 63 | def __repr__(self): 64 | return "{} - Files: {} Dirs: {}".format(self.name, len(self.files), len(self.dirs)) 65 | 66 | 67 | class S3Bucket(object): 68 | 69 | def __init__(self, name, parent=None): 70 | self.name = name 71 | self.parent = parent 72 | self.files = [] 73 | self.dirs = [] 74 | self.refreshed = False 75 | self._size = 0 76 | self._last_modified = None 77 | 78 | def add_child(self, child): 79 | if isinstance(child, S3File): 80 | self.files.append(child) 81 | elif isinstance(child, S3Dir): 82 | self.dirs.append(child) 83 | else: 84 | raise "Attempted to add a bad child" 85 | child.parent = self 86 | 87 | def get_size(self): 88 | if not self.refreshed: 89 | return 0 90 | if not self._size: 91 | for f in self.files + self.dirs: 92 | self._size = self._size + f.get_size() 93 | return self._size 94 | 95 | def get_last_modified(self): 96 | if not self.refreshed: 97 | return datetime.date.min 98 | if not self._last_modified: 99 | self._last_modified = datetime.datetime.min 100 | for f in self.files + self.dirs: 101 | if f.get_last_modified() > self._last_modified: 102 | self._last_modified = f.get_last_modified() 103 | return self._last_modified 104 | 105 | def __repr__(self): 106 | return "{} - Refreshed: {} - Files: {} Dirs: {}".format(self.name, self.refreshed, len(self.files), len(self.dirs)) 107 | 108 | 109 | class S3(object): 110 | 111 | def __init__(self, name): 112 | self.name = name 113 | self.dirs = [] 114 | self.files = [] 115 | self.size = 0 116 | self.parent = None 117 | self._last_modified = None 118 | 119 | def add_child(self, child): 120 | if isinstance(child, S3Bucket): 121 | self.dirs.append(child) 122 | else: 123 | raise "Attempted to add a bad child" 124 | child.parent = self 125 | 126 | def get_child(self, name): 127 | for bucket in self.dirs: 128 | if bucket.name == name: 129 | return bucket 130 | 131 | def get_size(self): 132 | if not self._size: 133 | for f in self.files + self.dirs: 134 | self._size = self._size + f.get_size() 135 | return self._size 136 | 137 | def get_last_modified(self): 138 | if not self._last_modified: 139 | self._last_modified = datetime.datetime.min 140 | for f in self.files + self.dirs: 141 | if f.get_last_modified() > self._last_modified: 142 | self._last_modified = f.get_last_modified() 143 | return self._last_modified 144 | 145 | def __repr__(self): 146 | return "Top Level of S3" 147 | 148 | 149 | def add_key(node, key, partial_name): 150 | split_name = partial_name.split("/") 151 | if partial_name == "": 152 | return 153 | elif len(split_name) == 1 and split_name != "": 154 | f = S3File(partial_name, key.size, key.last_modified) 155 | node.add_child(f) 156 | else: 157 | dir_name = split_name[0] 158 | new_dir = S3Dir(dir_name) 159 | new_name = partial_name[len(dir_name) + 1:] 160 | for d in node.dirs: 161 | if d.name == dir_name: 162 | return add_key(d, key, new_name) 163 | node.add_child(new_dir) 164 | add_key(new_dir, key, new_name) 165 | 166 | 167 | def build_tree(base_node, keys): 168 | for key in keys: 169 | add_key(base_node, key, key.name) 170 | return base_node 171 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E501,E502,W293,E121,E123,E124,E125,E126,E127,E128,E265,E266 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from __future__ import unicode_literals 4 | 5 | import re 6 | import os 7 | from setuptools import setup, find_packages 8 | 9 | version = '' 10 | with open('s3browser/__init__.py', 'r') as fd: 11 | version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', 12 | fd.read(), re.MULTILINE).group(1) 13 | 14 | if not version: 15 | raise RuntimeError('Cannot find version information') 16 | 17 | 18 | def parse_requirements(): 19 | """Rudimentary parser for the `requirements.txt` file 20 | We just want to separate regular packages from links to pass them to the 21 | `install_requires` and `dependency_links` params of the `setup()` 22 | function properly. 23 | """ 24 | try: 25 | requirements = \ 26 | map(str.strip, local_file('requirements.txt').splitlines()) 27 | except IOError: 28 | raise RuntimeError("Couldn't find the `requirements.txt' file :(") 29 | 30 | links = [] 31 | pkgs = [] 32 | for req in requirements: 33 | if not req: 34 | continue 35 | if 'http:' in req or 'https:' in req: 36 | links.append(req) 37 | name, version = re.findall("\#egg=([^\-]+)-(.+$)", req)[0] 38 | pkgs.append('{0}=={1}'.format(name, version)) 39 | else: 40 | pkgs.append(req) 41 | 42 | return pkgs, links 43 | 44 | 45 | def local_file(f): 46 | return open(os.path.join(os.path.dirname(__file__), f)).read() 47 | 48 | install_requires, dependency_links = parse_requirements() 49 | 50 | if __name__ == '__main__': 51 | 52 | setup( 53 | name="s3browser", 54 | version=version, 55 | description="s3browser", 56 | long_description=local_file('README.md'), 57 | author='Andrew Gross', 58 | author_email='andrew.w.gross@gmail.com', 59 | url='https://github.com/andrewgross/s3browser', 60 | packages=find_packages(exclude=['*tests*']), 61 | install_requires=install_requires, 62 | include_package_data=True, 63 | dependency_links=dependency_links, 64 | classifiers=[ 65 | 'Programming Language :: Python', 66 | ], 67 | zip_safe=False, 68 | entry_points={ 69 | 'console_scripts': [ 70 | 's3browser = s3browser.main:main' 71 | ], 72 | }, 73 | ) 74 | -------------------------------------------------------------------------------- /shippable.yml: -------------------------------------------------------------------------------- 1 | #set your language below 2 | language: python 3 | 4 | #set language version. This is only required if you use the default Shippable image for your build 5 | python: 6 | - 2.7 7 | 8 | #specify which services you need. This is only valid if you use the default Shippable image for your build 9 | services: 10 | 11 | env: 12 | - TEST_TYPE=unit 13 | - TEST_TYPE=functional 14 | 15 | matrix: 16 | 17 | build: 18 | #commands in this section run on your build machine and not inside your CI container. 19 | pre_ci: 20 | 21 | pre_ci_boot: 22 | image_name: 23 | image_tag: 24 | pull: 25 | options: 26 | #commands in this section run inside your CI container. In addition, any commands such as ls, in this section can be used to skip default processing for CI. 27 | ci: 28 | - pip install -r development.txt 29 | - pip install coveralls 30 | - make $TEST_TYPE 31 | post_ci: 32 | on_success: 33 | on_failure: 34 | cache: true 35 | #commands in this section run on your build machine and not inside your CI container. 36 | push: 37 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | -------------------------------------------------------------------------------- /tests/acceptance/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | -------------------------------------------------------------------------------- /tests/functional/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import sure # noqa 5 | -------------------------------------------------------------------------------- /tests/functional/test_client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from s3browser.client import S3Browser 5 | from s3browser.helpers import color_blue 6 | from s3browser.util.decorators import silence_stdout 7 | 8 | from freezegun import freeze_time 9 | from mock import patch, call 10 | from moto import mock_s3 11 | 12 | from tests.util import populate_bucket 13 | 14 | 15 | @mock_s3 16 | def test_refresh(): 17 | """ 18 | Refresh should get all keys in a bucket 19 | """ 20 | # When I have a bucket with keys 21 | keys = ['foo', 'bar'] 22 | bucket, conn = populate_bucket('mybucket', keys) 23 | 24 | # And I have a client 25 | c = S3Browser(conn) 26 | 27 | # When I refresh 28 | with silence_stdout(): 29 | c.do_refresh("mybucket") 30 | 31 | # Then I get all of my keys 32 | len(c.current_directory.files).should.equal(2) 33 | 34 | 35 | @mock_s3 36 | @patch('s3browser.client.print_result') 37 | def test_pwd(output): 38 | """ 39 | pwd should show the current directory 40 | """ 41 | # When I have a client 42 | keys = ["foo", "bar"] 43 | bucket, conn = populate_bucket('mybucket', keys) 44 | c = S3Browser(conn) 45 | 46 | # And I have files 47 | with silence_stdout(): 48 | c.do_refresh("mybucket") 49 | 50 | # When I call pwd 51 | c.do_pwd("") 52 | 53 | # Then I get the top level bucket name 54 | output.assert_called_once_with("/mybucket") 55 | 56 | 57 | @mock_s3 58 | @patch('s3browser.util.list.print_result') 59 | def test_ls(output): 60 | """ 61 | ls should show the current files 62 | """ 63 | # When I have a client 64 | keys = ["foo", "bar"] 65 | bucket, conn = populate_bucket('mybucket', keys) 66 | c = S3Browser(conn) 67 | 68 | # And I have no current directory 69 | current_directory = "" 70 | c.current_directory = current_directory 71 | with silence_stdout(): 72 | c.do_refresh("mybucket") 73 | 74 | # When I call ls 75 | c.do_ls("") 76 | 77 | # Then I get the current files 78 | expected = [call("bar"), call("foo")] 79 | assert output.call_args_list == expected 80 | 81 | 82 | @mock_s3 83 | @patch('s3browser.util.list.print_result') 84 | def test_ls_directory(output): 85 | """ 86 | ls should show directories 87 | """ 88 | # When I have a client 89 | keys = ["baz", "foo/bar"] 90 | bucket, conn = populate_bucket('mybucket', keys) 91 | c = S3Browser(conn) 92 | 93 | # And I have no current directory 94 | current_directory = "" 95 | c.current_directory = current_directory 96 | with silence_stdout(): 97 | c.do_refresh("mybucket") 98 | 99 | # When I call ls 100 | c.do_ls("") 101 | 102 | # Then I get the current files and directories 103 | expected = [call("baz"), call(color_blue("foo"))] 104 | assert output.call_args_list == expected 105 | 106 | 107 | @mock_s3 108 | @freeze_time("2016-07-11 03:39:34") 109 | @patch('s3browser.util.list.print_result') 110 | def test_ls_l(output): 111 | """ 112 | ls -l should show size and last modified time 113 | """ 114 | # When I have a client 115 | keys = ["foo", "bar"] 116 | bucket, conn = populate_bucket('mybucket', keys) 117 | c = S3Browser(conn) 118 | 119 | # And I have no current directory 120 | current_directory = "" 121 | c.current_directory = current_directory 122 | with silence_stdout(): 123 | c.do_refresh("mybucket") 124 | 125 | # When I call ls 126 | c.do_ls("-l") 127 | 128 | # Then I get the current files and directories 129 | expected = [call(" 3B", "2016-07-11 03:39", "bar"), call(" 3B", "2016-07-11 03:39", "foo")] 130 | assert output.call_args_list == expected 131 | 132 | 133 | @mock_s3 134 | @freeze_time("2016-07-11 03:39:34") 135 | @patch('s3browser.util.list.print_result') 136 | def test_ls_lh(output): 137 | """ 138 | ls -lh should show human readable size 139 | """ 140 | # When I have a client 141 | keys = ["foo", "bar"] 142 | bucket, conn = populate_bucket('mybucket', keys) 143 | c = S3Browser(conn) 144 | 145 | # And I have no current directory 146 | current_directory = "" 147 | c.current_directory = current_directory 148 | with silence_stdout(): 149 | c.do_refresh("mybucket") 150 | 151 | # When I call ls 152 | c.do_ls("-lh") 153 | 154 | # Then I get the current files and directories 155 | expected = [call(" 3B", "2016-07-11 03:39", "bar"), call(" 3B", "2016-07-11 03:39", "foo")] 156 | assert output.call_args_list == expected 157 | 158 | 159 | @mock_s3 160 | @freeze_time("2016-07-11 03:39:34") 161 | @patch('s3browser.util.list.print_result') 162 | def test_ls_lhsr(output): 163 | """ 164 | ls -lhsr should sort by size, reversed 165 | """ 166 | # When I have a client 167 | keys = ["fooz", "bar"] 168 | bucket, conn = populate_bucket('mybucket', keys) 169 | c = S3Browser(conn) 170 | 171 | # And I have no current directory 172 | current_directory = "" 173 | c.current_directory = current_directory 174 | with silence_stdout(): 175 | c.do_refresh("mybucket") 176 | 177 | # When I call ls 178 | c.do_ls("-lhSr") 179 | 180 | # Then I get the current files and directories 181 | expected = [ 182 | call(" 4B", "2016-07-11 03:39", "fooz"), 183 | call(" 3B", "2016-07-11 03:39", "bar"), 184 | ] 185 | assert output.call_args_list == expected 186 | 187 | 188 | @mock_s3 189 | @patch('s3browser.util.list.print_result') 190 | def test_ls_lht(output): 191 | """ 192 | ls -lht should sort by last modified, reversed 193 | """ 194 | # When I have a client 195 | with freeze_time("2016-07-11 03:39:34"): 196 | keys = ["new"] 197 | bucket, conn = populate_bucket('mybucket', keys) 198 | with freeze_time("2016-07-10 03:39:34"): 199 | keys = ["old"] 200 | bucket, conn = populate_bucket('mybucket', keys) 201 | c = S3Browser(conn) 202 | 203 | # And I have no current directory 204 | current_directory = "" 205 | c.current_directory = current_directory 206 | with silence_stdout(): 207 | c.do_refresh("mybucket") 208 | 209 | # When I call ls 210 | c.do_ls("-lht") 211 | 212 | # Then I get the current files and directories 213 | expected = [ 214 | call(" 3B", "2016-07-11 03:39", "new"), 215 | call(" 3B", "2016-07-10 03:39", "old"), 216 | ] 217 | assert output.call_args_list == expected 218 | 219 | 220 | @mock_s3 221 | @freeze_time("2016-07-11 03:39:34") 222 | @patch('s3browser.util.list.print_result') 223 | def test_ls_l_size(output): 224 | """ 225 | ls -l should sum directory contents sizes and not duplicate entries 226 | """ 227 | # When I have a client 228 | keys = ["foo/bar", "foo/baz", "foo2"] 229 | bucket, conn = populate_bucket('mybucket', keys) 230 | c = S3Browser(conn) 231 | 232 | # And I have no current directory 233 | current_directory = "" 234 | c.current_directory = current_directory 235 | with silence_stdout(): 236 | c.do_refresh("mybucket") 237 | 238 | # When I call ls 239 | c.do_ls("-l") 240 | 241 | # Then I get the current files and directories 242 | expected = [ 243 | call(" 14B", "2016-07-11 03:39", color_blue("foo")), 244 | call(" 4B", "2016-07-11 03:39", "foo2") 245 | ] 246 | assert output.call_args_list == expected 247 | 248 | 249 | @mock_s3 250 | @freeze_time("2016-07-11 03:39:34") 251 | @patch('s3browser.util.list.print_result') 252 | def test_ls_nested(output): 253 | """ 254 | ls should work with a current directory. 255 | """ 256 | # When I have a client 257 | keys = ["foo/foo/bar/baz", "foo/bar2", "foo/baz2/baz3", "banana"] 258 | bucket, conn = populate_bucket('mybucket', keys) 259 | c = S3Browser(conn) 260 | with silence_stdout(): 261 | c.do_refresh("mybucket") 262 | 263 | # And I have a current directory 264 | c.do_cd("foo") 265 | 266 | # When I call ls 267 | c.do_ls("") 268 | 269 | # Then I get the current files and directories 270 | expected = [call("bar2"), call(color_blue("baz2")), call(color_blue("foo"))] 271 | assert output.call_args_list == expected 272 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import sure # noqa 5 | -------------------------------------------------------------------------------- /tests/unit/test_list_utilities.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from s3browser.util.list import ( 5 | sort_files, 6 | get_names, 7 | ) 8 | from tests.util import get_unsorted_list_of_files 9 | 10 | 11 | def test_sort_files(): 12 | """ 13 | Sort files based on key name 14 | """ 15 | # When I have an unsorted list of files 16 | files = get_unsorted_list_of_files() 17 | 18 | # And I sort them with the defaults 19 | _sorted = sort_files(files) 20 | 21 | # Then I have sorted files 22 | map(lambda x: x.name, _sorted).should.equal(["a", "b", "c"]) 23 | 24 | 25 | def test_get_names(): 26 | """ 27 | Get names from file objects 28 | """ 29 | # When I have an unsorted list of files 30 | files = get_unsorted_list_of_files() 31 | 32 | # And when I get the names 33 | names = get_names(files) 34 | 35 | # Then I have just names 36 | set(names).should.equal(set(("a", "b", "c"))) 37 | -------------------------------------------------------------------------------- /tests/unit/test_parsers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from s3browser.util.list import parse_ls 5 | from s3browser.util.parsers import main_parser 6 | from s3browser.util.decorators import silence_stderr 7 | 8 | 9 | def test_parse_ls(): 10 | """ 11 | Test parsing ls command with empty args 12 | """ 13 | # When I have an ls command 14 | command = "" 15 | 16 | # And I parse it 17 | parsed = parse_ls(command) 18 | 19 | # Then I get a parsed command with defaults 20 | parsed.reverse.should.be.false 21 | parsed.size.should.be.false 22 | parsed.human.should.be.false 23 | parsed.time.should.be.false 24 | parsed.long.should.be.false 25 | 26 | 27 | def test_parse_ls_l(): 28 | """ 29 | Test parsing ls command with long 30 | """ 31 | # When I have an ls command 32 | command = "-l" 33 | 34 | # And I parse it 35 | parsed = parse_ls(command) 36 | 37 | # Then I get a parsed command with long set 38 | parsed.reverse.should.be.false 39 | parsed.size.should.be.false 40 | parsed.human.should.be.false 41 | parsed.time.should.be.false 42 | parsed.long.should.be.true 43 | 44 | 45 | def test_parse_ls_s(): 46 | """ 47 | Test parsing ls command with size 48 | """ 49 | # When I have an ls command 50 | command = "-S" 51 | 52 | # And I parse it 53 | parsed = parse_ls(command) 54 | 55 | # Then I get a parsed command with size set 56 | parsed.reverse.should.be.false 57 | parsed.size.should.be.true 58 | parsed.human.should.be.false 59 | parsed.time.should.be.false 60 | parsed.long.should.be.false 61 | 62 | 63 | def test_parse_ls_t(): 64 | """ 65 | Test parsing ls command with time 66 | """ 67 | # When I have an ls command 68 | command = "-t" 69 | 70 | # And I parse it 71 | parsed = parse_ls(command) 72 | 73 | # Then I get a parsed command with time set 74 | parsed.reverse.should.be.false 75 | parsed.size.should.be.false 76 | parsed.human.should.be.false 77 | parsed.time.should.be.true 78 | parsed.long.should.be.false 79 | 80 | 81 | def test_parse_ls_r(): 82 | """ 83 | Test parsing ls command with reverse 84 | """ 85 | # When I have an ls command 86 | command = "-r" 87 | 88 | # And I parse it 89 | parsed = parse_ls(command) 90 | 91 | # Then I get a parsed command with reverse set 92 | parsed.reverse.should.be.true 93 | parsed.size.should.be.false 94 | parsed.human.should.be.false 95 | parsed.time.should.be.false 96 | parsed.long.should.be.false 97 | 98 | 99 | def test_parse_ls_h(): 100 | """ 101 | Test parsing ls command with human 102 | """ 103 | # When I have an ls command 104 | command = "-h" 105 | 106 | # And I parse it 107 | parsed = parse_ls(command) 108 | 109 | # Then I get a parsed command with human set 110 | parsed.reverse.should.be.false 111 | parsed.size.should.be.false 112 | parsed.human.should.be.true 113 | parsed.time.should.be.false 114 | parsed.long.should.be.false 115 | 116 | 117 | def test_parse_ls_multiple(): 118 | """ 119 | Test parsing ls command with multiple flags 120 | """ 121 | # When I have an ls command 122 | command = "-lhtr" 123 | 124 | # And I parse it 125 | parsed = parse_ls(command) 126 | 127 | # Then I get a parsed command with multiple flags set 128 | parsed.reverse.should.be.true 129 | parsed.size.should.be.false 130 | parsed.human.should.be.true 131 | parsed.time.should.be.true 132 | parsed.long.should.be.true 133 | 134 | 135 | def test_parse_ls_exclusive(): 136 | """ 137 | Test parsing ls command with mutually exclusive flags 138 | """ 139 | # When I have an ls command with exclusive flags 140 | command = "-tS" 141 | 142 | # And I parse it 143 | with silence_stderr(): 144 | parsed = parse_ls(command) 145 | 146 | # Then I get back nothing 147 | parsed.should.be.none 148 | 149 | 150 | def test_parse_ls_expression(): 151 | """ 152 | Test parsing ls command with expression 153 | """ 154 | # When I have an ls command 155 | command = "*foobar*" 156 | 157 | # And I parse it 158 | parsed = parse_ls(command) 159 | 160 | # Then I get a parsed command with no flags but an expression 161 | parsed.reverse.should.be.false 162 | parsed.size.should.be.false 163 | parsed.human.should.be.false 164 | parsed.time.should.be.false 165 | parsed.long.should.be.false 166 | parsed.expression.should.equal(command) 167 | 168 | 169 | def test_parse_ls_bad_args(): 170 | """ 171 | Test parsing ls command with bad_arguments 172 | """ 173 | # When I have an bad ls command 174 | command = "-laGh" 175 | 176 | # And I parse it 177 | with silence_stderr(): 178 | parsed = parse_ls(command) 179 | 180 | # Then I get back nothing 181 | parsed.should.be.none 182 | 183 | 184 | def test_main_parser(): 185 | """ 186 | Test main parser 187 | """ 188 | # When I have a parser 189 | parser = main_parser() 190 | 191 | # And I have a command 192 | command = "--access-key-id foo --secret-access-key bar" 193 | 194 | # And I parse it 195 | parsed = parser.parse_args(command.split(" ")) 196 | 197 | # Then I get my arguments 198 | parsed.access_key_id = "foo" 199 | parsed.secret_access_key = "bar" 200 | -------------------------------------------------------------------------------- /tests/unit/test_path_utilities.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from s3browser.util.path import ( 5 | change_directory, 6 | get_pwd, 7 | ) 8 | 9 | from s3browser.util.tree import S3Dir 10 | 11 | 12 | def test_change_directory_no_base_no_path(): 13 | """ 14 | Change Directory with no current directory and no path 15 | """ 16 | # When I have no directory 17 | current_directory = S3Dir("") 18 | 19 | # And I have no path 20 | path = "" 21 | 22 | # Then I stay with no directory 23 | change_directory(path, current_directory).name.should.equal("") 24 | 25 | 26 | def test_change_directory_with_base_no_path(): 27 | """ 28 | Change Directory with a current directory and no path 29 | """ 30 | # When I have a directory 31 | current_directory = S3Dir("foo") 32 | top_level = S3Dir("") 33 | top_level.add_child(current_directory) 34 | 35 | # And I have no path 36 | path = "" 37 | 38 | # Then I go back to the top level 39 | change_directory(path, current_directory).name.should.equal("") 40 | 41 | 42 | def test_change_directory_with_tilde(): 43 | """ 44 | Change Directory with a current directory and tilde 45 | """ 46 | # When I have a directory 47 | current_directory = S3Dir("foo") 48 | top_level = S3Dir("") 49 | top_level.add_child(current_directory) 50 | 51 | # And I have a tilde for a path 52 | path = "~" 53 | 54 | # Then I go back to the top level 55 | change_directory(path, current_directory).name.should.equal("") 56 | 57 | 58 | def test_change_directory_with_leading_slash_in_path(): 59 | """ 60 | Change Directory with a current directory and a leading slash in the path 61 | """ 62 | # When I have a directory 63 | current_directory = S3Dir("foo") 64 | top = S3Dir("") 65 | middle = S3Dir("bar") 66 | top.add_child(current_directory) 67 | top.add_child(middle) 68 | 69 | # And I have a leading / path 70 | path = "/bar" 71 | 72 | # Then I go to a top level directory 73 | change_directory(path, current_directory).name.should.equal("bar") 74 | 75 | 76 | def test_change_directory_with_base_and_path(): 77 | """ 78 | Change Directory with a directory and a path 79 | """ 80 | # When I have a directory 81 | current_directory = S3Dir("foo") 82 | top = S3Dir("") 83 | bottom = S3Dir("bar") 84 | top.add_child(current_directory) 85 | current_directory.add_child(bottom) 86 | 87 | # And I have a path 88 | path = "bar" 89 | 90 | # Then I stay build a new path 91 | _current = change_directory(path, current_directory) 92 | _current.name.should.equal("bar") 93 | get_pwd(_current).should.equal("/foo/bar") 94 | 95 | 96 | def test_change_directory_with_compound_base(): 97 | """ 98 | Change Directory with a compound directory and a path 99 | """ 100 | # When I have a deep directory structure 101 | current_directory = S3Dir("baz") 102 | top = S3Dir("") 103 | middle = S3Dir("foo") 104 | bottom = S3Dir("bar") 105 | top.add_child(middle) 106 | middle.add_child(current_directory) 107 | current_directory.add_child(bottom) 108 | 109 | # And I have a path 110 | path = "bar" 111 | 112 | # Then I build a new path 113 | _current = change_directory(path, current_directory) 114 | _current.name.should.equal("bar") 115 | get_pwd(_current).should.equal("/foo/baz/bar") 116 | 117 | 118 | def test_change_directory_with_compound_path(): 119 | """ 120 | Change Directory with a directory and a compound path 121 | """ 122 | # When I have a directory 123 | current_directory = "foo" 124 | current_directory = S3Dir("foo") 125 | top = S3Dir("") 126 | middle = S3Dir("baz") 127 | bottom = S3Dir("bar") 128 | top.add_child(current_directory) 129 | current_directory.add_child(middle) 130 | middle.add_child(bottom) 131 | 132 | # And I have a nested path 133 | path = "baz/bar" 134 | 135 | # Then I build a new path 136 | _current = change_directory(path, current_directory) 137 | _current.name.should.equal("bar") 138 | get_pwd(_current).should.equal("/foo/baz/bar") 139 | 140 | 141 | def test_change_directory_with_single_dot_path(): 142 | """ 143 | Change Directory with a directory and a path with a single dot 144 | """ 145 | # When I have a directory 146 | current_directory = S3Dir("foo") 147 | top = S3Dir("") 148 | top.add_child(current_directory) 149 | 150 | # And I have a path 151 | path = "." 152 | 153 | # Then I stay in my current directory 154 | change_directory(path, current_directory).name.should.equal("foo") 155 | 156 | 157 | def test_change_directory_with_double_dot_path(): 158 | """ 159 | Change Directory with a directory and a path with a double dot 160 | """ 161 | # When I have a directory tree 162 | top = S3Dir("") 163 | middle = S3Dir("foo") 164 | bottom = S3Dir("bar") 165 | top.add_child(middle) 166 | middle.add_child(bottom) 167 | 168 | # And I have a current directory 169 | current_directory = bottom 170 | 171 | # And I have a path 172 | path = ".." 173 | 174 | # Then I move up one level 175 | change_directory(path, current_directory).name.should.equal("foo") 176 | 177 | 178 | def test_change_directory_with_path_with_single_dot_in_it(): 179 | """ 180 | Change Directory with a directory and a compound path with a single dot 181 | """ 182 | # When I have a directory structure 183 | top = S3Dir("") 184 | foo = S3Dir("foo") 185 | bar = S3Dir("bar") 186 | baz = S3Dir("baz") 187 | bat = S3Dir("bat") 188 | 189 | top.add_child(foo) 190 | foo.add_child(bar) 191 | bar.add_child(baz) 192 | baz.add_child(bat) 193 | 194 | # And I have a current directory 195 | current_directory = bar 196 | 197 | # And I have a path 198 | path = "baz/./bat" 199 | 200 | # Then I stay in my current directory 201 | _current = change_directory(path, current_directory) 202 | _current.name.should.equal("bat") 203 | get_pwd(_current).should.equal("/foo/bar/baz/bat") 204 | 205 | 206 | def test_change_directory_with_path_with_double_dot_in_it(): 207 | """ 208 | Change Directory with a directory and a compound path with a double dot 209 | """ 210 | # When I have a directory structure 211 | top = S3Dir("") 212 | foo = S3Dir("foo") 213 | bar = S3Dir("bar") 214 | bat = S3Dir("bat") 215 | baz = S3Dir("baz") 216 | 217 | top.add_child(foo) 218 | foo.add_child(bar) 219 | bar.add_child(bat) 220 | bar.add_child(baz) 221 | 222 | # And I have a current directory 223 | current_directory = bar 224 | 225 | # And I have a path 226 | path = "baz/../bat" 227 | 228 | # Then I navigate to the correct directory 229 | _current = change_directory(path, current_directory) 230 | _current.name.should.equal("bat") 231 | get_pwd(_current).should.equal("/foo/bar/bat") 232 | 233 | 234 | def test_change_directory_go_up_from_top(): 235 | """ 236 | Change Directory with double dot from top level 237 | """ 238 | # When I have a directory structure 239 | top = S3Dir("") 240 | 241 | # And I have a current directory 242 | current_directory = top 243 | 244 | # And I have a path 245 | path = ".." 246 | 247 | # Then I navigate to the correct directory 248 | _current = change_directory(path, current_directory) 249 | _current.name.should.equal("") 250 | get_pwd(_current).should.equal("") 251 | 252 | 253 | def test_change_directory_to_nonexistent_dir(): 254 | """ 255 | Change Directory to a non existent directory 256 | """ 257 | # When I have a directory structure 258 | top = S3Dir("top") 259 | middle = S3Dir("middle") 260 | bottom = S3Dir("bottom") 261 | 262 | top.add_child(middle) 263 | middle.add_child(bottom) 264 | 265 | # And I have a current directory 266 | current_directory = middle 267 | 268 | # And I have an invalid path 269 | path = "foo" 270 | 271 | # Then I do not go into that directory 272 | change_directory(path, current_directory).should.be.none 273 | -------------------------------------------------------------------------------- /tests/unit/test_s3.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import datetime 5 | from freezegun import freeze_time 6 | 7 | from s3browser.util.tree import S3File, S3Dir, add_key 8 | from tests.util import S3File as BotoKey 9 | 10 | 11 | def test_s3_file_get_size(): 12 | """ 13 | An S3File should return its size with get_size 14 | """ 15 | # When I have an S3File 16 | f = S3File("foo", 42, datetime.datetime.now()) 17 | 18 | # And I get the size 19 | size = f.get_size() 20 | 21 | # Then it returns the size 22 | size.should.equal(42) 23 | 24 | 25 | def test_s3_file_get_last_modified(): 26 | """ 27 | An S3File should return its last modified time with get_last_modified 28 | """ 29 | # When I have an S3File 30 | now = datetime.datetime.now() 31 | f = S3File("foo", 42, now) 32 | 33 | # And I get the last modified time 34 | lm = f.get_last_modified() 35 | 36 | # Then it returns the size 37 | lm.should.equal(now) 38 | 39 | 40 | @freeze_time("2016-07-11 03:39:34") 41 | def test_s3_file_get_last_modified_string(): 42 | """ 43 | An S3File populated with a string time should return its last modified time with get_last_modified 44 | """ 45 | # When I have an S3File 46 | now_string = "2016-07-11T03:39:34.000Z" 47 | now = datetime.datetime.now() 48 | f = S3File("foo", 42, now_string) 49 | 50 | # And I get the last modified time 51 | lm = f.get_last_modified() 52 | 53 | # Then it returns the size 54 | lm.should.equal(now) 55 | 56 | 57 | def test_s3_dir_size(): 58 | """ 59 | An S3Dir should accumulate the sizes of its children 60 | """ 61 | # When I have S3 Files 62 | now = datetime.datetime.now() 63 | f1 = S3File("foo", 1, now) 64 | f2 = S3File("bar", 2, now) 65 | 66 | # And they are inside of a directory 67 | d = S3Dir("dir") 68 | d.add_child(f1) 69 | d.add_child(f2) 70 | 71 | # When I get the size 72 | size = d.get_size() 73 | 74 | # Then I get the accumulated size 75 | size.should.equal(3) 76 | 77 | 78 | def test_s3_dir_size_nested(): 79 | """ 80 | An S3Dir should accumulate the sizes of its children including nested dirs 81 | """ 82 | # When I have S3 Files 83 | now = datetime.datetime.now() 84 | f1 = S3File("foo", 1, now) 85 | f2 = S3File("bar", 4, now) 86 | 87 | # And they are inside of nested directories 88 | d2 = S3Dir("inner") 89 | d2.add_child(f2) 90 | 91 | d1 = S3Dir("outer") 92 | d1.add_child(d2) 93 | d1.add_child(f1) 94 | 95 | # When I get the size 96 | size_1 = d1.get_size() 97 | size_2 = d2.get_size() 98 | 99 | # Then I get the accumulated size 100 | size_1.should.equal(5) 101 | size_2.should.equal(4) 102 | 103 | 104 | def test_s3_dir_bad_child(): 105 | """ 106 | An S3Dir should not let me add a bad child 107 | """ 108 | # When I have an S3Dir 109 | d = S3Dir("foo") 110 | 111 | # If I try to add a non S3 object it should fail 112 | d.add_child.when.called_with("banana").should.throw(Exception) 113 | 114 | 115 | def test_s3_dir_last_modified(): 116 | """ 117 | An S3Dir should show the latest last modified time of its children 118 | """ 119 | # When I have an S3 Dir 120 | d1 = S3Dir("outer") 121 | d2 = S3Dir("inner") 122 | d1.add_child(d2) 123 | 124 | # And I have files 125 | new = datetime.datetime.now() 126 | old = datetime.datetime.now() - datetime.timedelta(hours=1) 127 | f1 = S3File("foo", 1, new) 128 | f2 = S3File("bar", 4, old) 129 | 130 | # And they are nested in directories 131 | d1.add_child(f1) 132 | d2.add_child(f2) 133 | 134 | # When I get the last modified time 135 | last_modified_1 = d1.get_last_modified() 136 | last_modified_2 = d2.get_last_modified() 137 | 138 | # Then it shows the lastest time of its children 139 | last_modified_1.should.equal(new) 140 | last_modified_2.should.equal(old) 141 | 142 | 143 | def test_add_node(): 144 | """ 145 | add_node should add an S3File to a node if it is a file 146 | """ 147 | # When I have a node 148 | node = S3Dir("") 149 | 150 | # And I have a boto key 151 | key = BotoKey("foo") 152 | 153 | # When I add the key to the node 154 | add_key(node, key, key.name) 155 | 156 | # Then It adds an S3File 157 | node.files[0].name.should.equal("foo") 158 | 159 | 160 | def test_add_node_nested(): 161 | """ 162 | add_node should add nested nodes if given nested keys 163 | """ 164 | # When I have a node 165 | node = S3Dir("") 166 | 167 | # And I have a boto key 168 | key = BotoKey("top/middle/foo") 169 | 170 | # When I add the key to the node 171 | add_key(node, key, key.name) 172 | 173 | # Then It adds the appropriate nodes 174 | node.dirs[0].name.should.equal("top") 175 | node.dirs[0].dirs[0].name.should.equal("middle") 176 | node.dirs[0].dirs[0].files[0].name.should.equal("foo") 177 | 178 | 179 | def test_add_directory(): 180 | """ 181 | add_node should add directories without sub files 182 | """ 183 | # When I have a node 184 | node = S3Dir("") 185 | 186 | # And I have a boto key 187 | key = BotoKey("top/aa/") 188 | key2 = BotoKey("top/zz") 189 | 190 | # When I add the key to the node 191 | add_key(node, key, key.name) 192 | add_key(node, key2, key2.name) 193 | 194 | # Then It adds the appropriate nodes 195 | node.dirs[0].name.should.equal("top") 196 | node.dirs[0].dirs[0].name.should.equal("aa") 197 | node.dirs[0].files[0].name.should.equal("zz") 198 | 199 | 200 | def test_add_multiple_files_to_directory(): 201 | """ 202 | add_node should add directories with multiple sub files 203 | """ 204 | # When I have a node 205 | node = S3Dir("") 206 | 207 | # And I have a boto key 208 | key = BotoKey("middle/aa") 209 | key2 = BotoKey("middle/zz") 210 | 211 | # When I add the key to the node 212 | add_key(node, key, key.name) 213 | add_key(node, key2, key2.name) 214 | 215 | # Then It adds the appropriate nodes 216 | node.dirs[0].name.should.equal("middle") 217 | node.dirs[0].files[0].name.should.equal("aa") 218 | node.dirs[0].files[1].name.should.equal("zz") 219 | 220 | 221 | def test_add_directory_with_multiple_sub_directories(): 222 | """ 223 | add_node should add directories with multiple sub directories 224 | """ 225 | # When I have a node 226 | node = S3Dir("") 227 | 228 | # And I have a boto key 229 | key = BotoKey("top/top/aa") 230 | key2 = BotoKey("top/bottom/zz") 231 | 232 | # When I add the key to the node 233 | add_key(node, key, key.name) 234 | add_key(node, key2, key2.name) 235 | 236 | # Then It adds the appropriate nodes 237 | node.dirs[0].name.should.equal("top") 238 | node.dirs[0].dirs[0].name.should.equal("top") 239 | node.dirs[0].dirs[1].name.should.equal("bottom") 240 | node.dirs[0].dirs[0].files[0].name.should.equal("aa") 241 | node.dirs[0].dirs[1].files[0].name.should.equal("zz") 242 | -------------------------------------------------------------------------------- /tests/unit/test_tree.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import datetime 5 | from freezegun import freeze_time 6 | 7 | from s3browser.util.tree import S3File, S3Dir, add_key 8 | from tests.util import S3File as BotoKey 9 | 10 | 11 | def test_s3_file_get_size(): 12 | """ 13 | An S3File should return its size with get_size 14 | """ 15 | # When I have an S3File 16 | f = S3File("foo", 42, datetime.datetime.now()) 17 | 18 | # And I get the size 19 | size = f.get_size() 20 | 21 | # Then it returns the size 22 | size.should.equal(42) 23 | 24 | 25 | def test_s3_file_get_last_modified(): 26 | """ 27 | An S3File should return its last modified time with get_last_modified 28 | """ 29 | # When I have an S3File 30 | now = datetime.datetime.now() 31 | f = S3File("foo", 42, now) 32 | 33 | # And I get the last modified time 34 | lm = f.get_last_modified() 35 | 36 | # Then it returns the size 37 | lm.should.equal(now) 38 | 39 | 40 | @freeze_time("2016-07-11 03:39:34") 41 | def test_s3_file_get_last_modified_string(): 42 | """ 43 | An S3File populated with a string time should return its last modified time with get_last_modified 44 | """ 45 | # When I have an S3File 46 | now_string = "2016-07-11T03:39:34.000Z" 47 | now = datetime.datetime.now() 48 | f = S3File("foo", 42, now_string) 49 | 50 | # And I get the last modified time 51 | lm = f.get_last_modified() 52 | 53 | # Then it returns the size 54 | lm.should.equal(now) 55 | 56 | 57 | def test_s3_dir_size(): 58 | """ 59 | An S3Dir should accumulate the sizes of its children 60 | """ 61 | # When I have S3 Files 62 | now = datetime.datetime.now() 63 | f1 = S3File("foo", 1, now) 64 | f2 = S3File("bar", 2, now) 65 | 66 | # And they are inside of a directory 67 | d = S3Dir("dir") 68 | d.add_child(f1) 69 | d.add_child(f2) 70 | 71 | # When I get the size 72 | size = d.get_size() 73 | 74 | # Then I get the accumulated size 75 | size.should.equal(3) 76 | 77 | 78 | def test_s3_dir_size_nested(): 79 | """ 80 | An S3Dir should accumulate the sizes of its children including nested dirs 81 | """ 82 | # When I have S3 Files 83 | now = datetime.datetime.now() 84 | f1 = S3File("foo", 1, now) 85 | f2 = S3File("bar", 4, now) 86 | 87 | # And they are inside of nested directories 88 | d2 = S3Dir("inner") 89 | d2.add_child(f2) 90 | 91 | d1 = S3Dir("outer") 92 | d1.add_child(d2) 93 | d1.add_child(f1) 94 | 95 | # When I get the size 96 | size_1 = d1.get_size() 97 | size_2 = d2.get_size() 98 | 99 | # Then I get the accumulated size 100 | size_1.should.equal(5) 101 | size_2.should.equal(4) 102 | 103 | 104 | def test_s3_dir_bad_child(): 105 | """ 106 | An S3Dir should not let me add a bad child 107 | """ 108 | # When I have an S3Dir 109 | d = S3Dir("foo") 110 | 111 | # If I try to add a non S3 object it should fail 112 | d.add_child.when.called_with("banana").should.throw(Exception) 113 | 114 | 115 | def test_s3_dir_last_modified(): 116 | """ 117 | An S3Dir should show the latest last modified time of its children 118 | """ 119 | # When I have an S3 Dir 120 | d1 = S3Dir("outer") 121 | d2 = S3Dir("inner") 122 | d1.add_child(d2) 123 | 124 | # And I have files 125 | new = datetime.datetime.now() 126 | old = datetime.datetime.now() - datetime.timedelta(hours=1) 127 | f1 = S3File("foo", 1, new) 128 | f2 = S3File("bar", 4, old) 129 | 130 | # And they are nested in directories 131 | d1.add_child(f1) 132 | d2.add_child(f2) 133 | 134 | # When I get the last modified time 135 | last_modified_1 = d1.get_last_modified() 136 | last_modified_2 = d2.get_last_modified() 137 | 138 | # Then it shows the lastest time of its children 139 | last_modified_1.should.equal(new) 140 | last_modified_2.should.equal(old) 141 | 142 | 143 | def test_add_node(): 144 | """ 145 | add_node should add an S3File to a node if it is a file 146 | """ 147 | # When I have a node 148 | node = S3Dir("") 149 | 150 | # And I have a boto key 151 | key = BotoKey("foo") 152 | 153 | # When I add the key to the node 154 | add_key(node, key, key.name) 155 | 156 | # Then It adds an S3File 157 | node.files[0].name.should.equal("foo") 158 | 159 | 160 | def test_add_node_nested(): 161 | """ 162 | add_node should add nested nodes if given nested keys 163 | """ 164 | # When I have a node 165 | node = S3Dir("") 166 | 167 | # And I have a boto key 168 | key = BotoKey("top/middle/foo") 169 | 170 | # When I add the key to the node 171 | add_key(node, key, key.name) 172 | 173 | # Then It adds the appropriate nodes 174 | node.dirs[0].name.should.equal("top") 175 | node.dirs[0].dirs[0].name.should.equal("middle") 176 | node.dirs[0].dirs[0].files[0].name.should.equal("foo") 177 | 178 | 179 | def test_add_directory(): 180 | """ 181 | add_node should add directories without sub files 182 | """ 183 | # When I have a node 184 | node = S3Dir("") 185 | 186 | # And I have a boto key 187 | key = BotoKey("top/aa/") 188 | key2 = BotoKey("top/zz") 189 | 190 | # When I add the key to the node 191 | add_key(node, key, key.name) 192 | add_key(node, key2, key2.name) 193 | 194 | # Then It adds the appropriate nodes 195 | node.dirs[0].name.should.equal("top") 196 | node.dirs[0].dirs[0].name.should.equal("aa") 197 | node.dirs[0].files[0].name.should.equal("zz") 198 | 199 | 200 | def test_add_multiple_files_to_directory(): 201 | """ 202 | add_node should add directories with multiple sub files 203 | """ 204 | # When I have a node 205 | node = S3Dir("") 206 | 207 | # And I have a boto key 208 | key = BotoKey("middle/aa") 209 | key2 = BotoKey("middle/zz") 210 | 211 | # When I add the key to the node 212 | add_key(node, key, key.name) 213 | add_key(node, key2, key2.name) 214 | 215 | # Then It adds the appropriate nodes 216 | node.dirs[0].name.should.equal("middle") 217 | node.dirs[0].files[0].name.should.equal("aa") 218 | node.dirs[0].files[1].name.should.equal("zz") 219 | 220 | 221 | def test_add_directory_with_multiple_sub_directories(): 222 | """ 223 | add_node should add directories with multiple sub directories 224 | """ 225 | # When I have a node 226 | node = S3Dir("") 227 | 228 | # And I have a boto key 229 | key = BotoKey("top/top/aa") 230 | key2 = BotoKey("top/bottom/zz") 231 | 232 | # When I add the key to the node 233 | add_key(node, key, key.name) 234 | add_key(node, key2, key2.name) 235 | 236 | # Then It adds the appropriate nodes 237 | node.dirs[0].name.should.equal("top") 238 | node.dirs[0].dirs[0].name.should.equal("top") 239 | node.dirs[0].dirs[1].name.should.equal("bottom") 240 | node.dirs[0].dirs[0].files[0].name.should.equal("aa") 241 | node.dirs[0].dirs[1].files[0].name.should.equal("zz") 242 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | import boto 5 | import datetime 6 | 7 | from boto.s3.key import Key 8 | 9 | now = datetime.datetime.now() 10 | 11 | 12 | class S3File(object): 13 | 14 | def __init__(self, name, last_modified=now, size=1): 15 | self.name = name 16 | self.last_modified = last_modified 17 | self.size = size 18 | 19 | 20 | def get_unsorted_list_of_files(prefix=None): 21 | if prefix: 22 | _prefix = "{}/".format(prefix) 23 | else: 24 | _prefix = "" 25 | a = S3File(_prefix + "a", datetime.datetime.now()) 26 | b = S3File(_prefix + "b", datetime.datetime.now() - datetime.timedelta(hours=1)) 27 | c = S3File(_prefix + "c", datetime.datetime.now() - datetime.timedelta(hours=2)) 28 | 29 | return [b, a, c] 30 | 31 | 32 | def populate_bucket(bucket_name, keys): 33 | conn = boto.connect_s3() 34 | conn.create_bucket(bucket_name) 35 | bucket = conn.get_bucket(bucket_name) 36 | for key in keys: 37 | k = Key(bucket) 38 | k.key = key 39 | k.set_contents_from_string(key) 40 | return bucket, conn 41 | --------------------------------------------------------------------------------