├── .gitignore ├── CHANGELOG.md ├── humans.txt ├── createsitedump.sh ├── backup.py ├── backupsite.sh ├── cleandb.py ├── LICENSE.md ├── _config.py ├── imageresize.sh ├── Dropbox-Uploader ├── testUnit.sh ├── CHANGELOG.md ├── dropShell.sh └── dropbox_uploader.sh ├── README.md └── _lib.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .DS_Store 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## Version 0.1 (October 12, 2016) 4 | 5 | Initial release. 6 | -------------------------------------------------------------------------------- /humans.txt: -------------------------------------------------------------------------------- 1 | Developer: Anton Fedianin 2 | Nickname: Tony Air 3 | Site: https://tony.twma.pro/ 4 | Contact: tony [at] twma.pro 5 | Skype: a2nt.fd -------------------------------------------------------------------------------- /createsitedump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Creates MySQL dump 4 | # args: 5 | # $1 sites path 6 | # $2 site name 7 | # $3 mysql username 8 | # $4 mysql password 9 | 10 | cd $1 11 | mysqldump -u$3 -p$4 $2 > $2.sql -------------------------------------------------------------------------------- /backup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Main Digital Ocean Backing up script 5 | # apt-get install python-requests 6 | 7 | import os 8 | import sys 9 | import subprocess 10 | import _config 11 | 12 | # Uncomment to sync to use Digital Ocean block storage 13 | #_config._lib.create_blockstorage() 14 | #_config._lib.create_backups() 15 | #_config._lib.delete_blockstorage() 16 | 17 | # Uncomment to sync to use Digital Ocean temporary backup server 18 | droplet = _config._lib.create_backup_server() 19 | _config._lib.sync_backup_server(droplet) 20 | _config._lib.delete_backup_server(str(droplet['id'])) 21 | -------------------------------------------------------------------------------- /backupsite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # args: 3 | # $1 backup path 4 | # $2 sites path 5 | # $3 site name 6 | # $4 config path 7 | # $5 rm 8 | # $6 scripts path 9 | 10 | mkdir -p $1 11 | cd $2 12 | if [ -n "$5" ]; then 13 | zip -qruo $1/$3.zip $3/mysite $3/site $3/assets $3.sql $3/composer.json $3/humans.txt $3/robots.txt $3/favicon.ico 14 | else 15 | zip -qrmuo $1/$3.zip $3/mysite $3/site $3/assets $3.sql $3/composer.json $3/humans.txt $3/robots.txt $3/favicon.ico 16 | fi 17 | 18 | cd $4 19 | if [ -n "$5" ]; then 20 | zip -qruo $1/$3.zip ./example_config 21 | else 22 | zip -qrmuo $1/$3.zip ./example_config 23 | fi 24 | 25 | cd $2 26 | rm $3.sql 27 | # Upload to dropbox 28 | $6/Dropbox-Uploader/dropbox_uploader.sh upload $1/$3.zip 29 | 30 | rm $1/$3.zip -------------------------------------------------------------------------------- /cleandb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # clean ups database using directory names 5 | # Example: website root has directory name my.domain.com 6 | # than mysql database shall have the same name my.domain.com 7 | 8 | # apt-get install python-mysqldb 9 | # pip install pymysql 10 | 11 | import os 12 | import MySQLdb 13 | import _config 14 | 15 | print 'Cleaning up website databases' 16 | connection = MySQLdb.connect(_config.host,_config.user,_config.password) 17 | cursor = connection.cursor() 18 | cursor.execute('SHOW DATABASES') 19 | 20 | for (db_name,) in cursor: 21 | if db_name not in _config.sites and db_name not in _config.excludedb: 22 | print 'Removing: ' + db_name 23 | cursor.execute('DROP DATABASE `'+db_name+'`') 24 | 25 | 26 | print 'Cleaning up nginx configuration files' 27 | nginxconfs = _config._lib.list_files(_config.nginxpath) 28 | 29 | for site_conf in nginxconfs: 30 | site_name = site_conf.replace('.conf','') 31 | if site_name not in _config.sites and site_conf not in _config.excludeconf: 32 | print 'Removing: ' + _config.nginxpath + '/' + site_conf 33 | os.remove(_config.nginxpath+'/'+site_conf) -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD2 License 2 | 3 | Copyright (c) 2016, Anton Fedianin (https://twma.pro) 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 7 | 8 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 9 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /_config.py: -------------------------------------------------------------------------------- 1 | import _lib 2 | 3 | # Digital Ocean API token (to create temporary block storage / server) 4 | token = '' 5 | # Digital Ocean SSH Key ID (will be used to connect to temporary backing up server) 6 | sshkeyid = '' 7 | 8 | # your web sites root folder path (with the folders named: my.domain.com) 9 | sitepath = '/srv' 10 | sites = _lib.list_dirs(sitepath) 11 | # exclude following folder names 12 | _lib.exclude('tmp') 13 | _lib.exclude('01virus.twma.pro') 14 | _lib.exclude('02empty') 15 | _lib.exclude('backups') 16 | 17 | # your block storage mount point / local backing up folder path 18 | backuppath = '/mnt/backup' 19 | # framework configuration path (to store example_config folder which will be added to backup file) 20 | configpath = '/srv-service/conf.d/php' 21 | # server scripts path 22 | scriptspath = '/srv-service/scripts' 23 | 24 | # your web sites configuraion nginx path (will be used to remove excessive my.domain.com.conf files) 25 | nginxpath = '/srv-service/conf.d/nginx/sites' 26 | # Exclude following nginx conf files 27 | excludeconf = [ 28 | '01fastcgi_cache_zone.conf', 29 | '02twma.pro.conf', 30 | '03ban_ip.conf', 31 | '04gzip.conf', 32 | ] 33 | 34 | # MySQL host (will be used to backup database and to remove excessive my.domain.com databases) 35 | host = 'localhost' 36 | # MySQL user 37 | user = 'root' 38 | # MySQL password 39 | password = '' 40 | # Exclude following MySQL DB's 41 | excludedb = [ 42 | 'performance_schema', 43 | 'information_schema', 44 | 'mysql', 45 | 'user', 46 | 'sys', 47 | ] 48 | 49 | servername = _lib.current_server_name() 50 | server = _lib.get_region_and_id(servername) 51 | region = server[0] 52 | serverid = server[1] 53 | 54 | volumeid = 0 -------------------------------------------------------------------------------- /imageresize.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Resizes all images in current directory to max size 1900x1200 5 | # 6 | 7 | #echo "Resizing images to 1900x1200" 8 | 9 | 10 | #for line in $(find . -iname '*.png'); do 11 | # echo "Resizing: $line" 12 | # mogrify -resize 1900x1200 -depth 8 -filter Triangle -define filter:support=2 -unsharp 0.25x0.08+8.3+0.045 -dither None -posterize 136 -quality 95 -define jpeg:fancy-upsampling=off -define png:compression-filter=5 -define png:compression-level=9 -define png:compression-strategy=1 -define png:exclude-chunk=all -interlace none -strip "$line" 13 | #done 14 | 15 | #for line in $(find . -iname '*.jpg'); do 16 | # echo "Resizing: $line" 17 | # mogrify -resize 1900x1200 -depth 8 -filter Triangle -define filter:support=2 -unsharp 0.25x0.08+8.3+0.045 -dither None -posterize 136 -quality 95 -define jpeg:fancy-upsampling=off -define png:compression-filter=5 -define png:compression-level=9 -define png:compression-strategy=1 -define png:exclude-chunk=all -interlace none -strip "$line" 18 | #done 19 | 20 | #for line in $(find . -iname '*.jpeg'); do 21 | # echo "Resizing: $line" 22 | # mogrify -resize 1900x1200 -depth 8 -filter Triangle -define filter:support=2 -unsharp 0.25x0.08+8.3+0.045 -dither None -posterize 136 -quality 95 -define jpeg:fancy-upsampling=off -define png:compression-filter=5 -define png:compression-level=9 -define png:compression-strategy=1 -define png:exclude-chunk=all -interlace none -strip "$line" 23 | #done 24 | 25 | #find . -name "*.png~" -delete 26 | #find . -name "*.jpg~" -delete 27 | #find . -name "*.jpeg~" -delete 28 | 29 | 30 | # 31 | # Optimize Images 32 | # 33 | 34 | find . -name "*.jpg" | xargs jpegoptim -vf 35 | find . -name "*.JPG" | xargs jpegoptim -vf 36 | find . -name "*.jpeg" | xargs jpegoptim -vf 37 | find . -name "*.JPEG" | xargs jpegoptim -vf 38 | 39 | find . -name "*.png" | xargs pngquant -v -f --ext .png 40 | find . -name "*.PNG" | xargs pngquant -v -f --ext .PNG -------------------------------------------------------------------------------- /Dropbox-Uploader/testUnit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DU=./dropbox_uploader.sh 4 | 5 | function check_exit 6 | { 7 | if [ $? -ne 0 ]; then 8 | echo " Error!!!" 9 | exit 1 10 | else 11 | echo " Passed" 12 | fi 13 | } 14 | 15 | #Creating garbage data 16 | echo -ne " - Creating garbage data...\n" 17 | rm -fr "testData" 18 | mkdir -p "testData" 19 | dd if=/dev/urandom of="testData/file 1.txt" bs=1M count=3 20 | dd if=/dev/urandom of="testData/file 2 ù.txt" bs=1M count=5 21 | mkdir -p "testData/recurse" 22 | dd if=/dev/urandom of="testData/recurse/file 3.txt" bs=1M count=1 23 | dd if=/dev/urandom of="testData/recurse/test_Ü.txt" bs=1M count=1 24 | mkdir -p "testData/recurse/dir 1/" 25 | dd if=/dev/urandom of="testData/recurse/dir 1/file 4.txt" bs=1M count=1 26 | mkdir -p "testData/recurse/dir 1/dir 3/" 27 | dd if=/dev/urandom of="testData/recurse/dir 1/dir 3/file 5.txt" bs=1M count=1 28 | mkdir -p "testData/recurse/dir 2/" 29 | 30 | rm -fr recurse 31 | 32 | #Rmdir 33 | echo -ne " - Remove remote directory..." 34 | $DU -q remove du_tests 35 | echo "" 36 | 37 | #Mkdir 38 | echo -ne " - Make remote directory..." 39 | $DU -q mkdir du_tests 40 | check_exit 41 | 42 | #Simple upload 43 | echo -ne " - Simple file upload..." 44 | $DU -q upload "testData/file 1.txt" du_tests 45 | check_exit 46 | 47 | #Checking with list 48 | echo -ne " - Checking file..." 49 | $DU -q list du_tests | grep "file 1.txt" > /dev/null 50 | check_exit 51 | 52 | #Simple upload 2 53 | echo -ne " - Simple file upload with special chars..." 54 | $DU -q upload testData/file\ 2* du_tests 55 | check_exit 56 | 57 | #Checking with list 58 | echo -ne " - Checking file..." 59 | $DU -q list du_tests | grep "file 2 ù.txt" > /dev/null 60 | check_exit 61 | 62 | #Recursive directory upload 63 | echo -ne " - Recursive directory upload..." 64 | $DU -q upload testData/recurse du_tests 65 | check_exit 66 | 67 | #Recursive directory download 68 | echo -ne " - Recursive directory download..." 69 | $DU -q download du_tests/recurse 70 | check_exit 71 | 72 | #Checking the downloaded dir 73 | echo -ne " - Checking the downloaded dir..." 74 | diff -r recurse testData/recurse/ 75 | check_exit 76 | 77 | #Again, recursive directory download 78 | echo -ne " - Again recursive directory download..." 79 | $DU -q download du_tests/recurse 80 | check_exit 81 | 82 | #Again, checking the downloaded dir 83 | echo -ne " - Checking the downloaded dir..." 84 | diff -r recurse testData/recurse/ 85 | check_exit 86 | 87 | rm -fr "recurse" 88 | rm -fr "testData" 89 | 90 | #Rmdir 91 | echo -ne " - Remove remote directory..." 92 | $DU -q remove du_tests 93 | check_exit 94 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SilverStripe Digital Ocean + Dropbox + NGINX service scripts 2 | 3 | ## Dropbox SilverStripe environment backing up scripts using Digital Ocean temporary block storage or server 4 | 5 | Allows you to save time and money by creating temporary Digital Ocean block storage / server and syncing it website dumps to Dropbox 6 | 7 | Script doesn't require additional space at your server to create backups so you don't need to buy extra disc space 8 | 9 | Extra disc space will be used just for some time to create and upload website dumps 10 | so u won't need it to waste your time doing it manually and you will save money cuz you don't need to buy extra disc space for a month. 11 | 12 | 1) Creates new Digital Ocean Server 13 | 14 | 2) Creates MySQL database dump based on website root folder name (my.site.domain.com) 15 | 16 | 3) Setups debian backing up server enviroment: installs zip, uploads backing up scripts, setting up running server Dropbox-Uploader config 17 | 18 | 4) Uploads to backing up server following folders: mysite, site, assets, example_config 19 | and files: composer.json, humans.txt, robots.txt, favicon.ico 20 | and mysql dump: site_name.sql 21 | 22 | 5) Archives following folders: mysite, site, assets, example_config 23 | and files: composer.json, humans.txt, robots.txt, favicon.ico 24 | and mysql dump: my.site.domain.com.sql 25 | to zip file my.site.domain.com 26 | 27 | 6) Syncs website dumps to dropbox 28 | 29 | 7) Deletes temporary server / storage 30 | 31 | ### Requirements 32 | 33 | * SilverStripe websites with NGINX environment at Digital Ocean 34 | * Place this scripts into /srv-service/scripts folder 35 | * Place your SilverStripe blank config to /srv-service/conf.d/php folder 36 | * Your websites shall be at /srv folders named my.site.domain.com 37 | * Your MySQL databases shall be named my.site.domain.com 38 | * Your NGINX config files shall be named my.site.domain.com 39 | * Folder paths maybe changed (see _config.py) 40 | * Take a look at backup.py if you have block storage option available 41 | 42 | ### Setup 43 | 44 | * Install python 45 | * Install following python modules: python-requests, python-sockets, python-subprocess, python-json, python-requests 46 | * Install zip 47 | * Get API key at DigitalOcean control pane (it will be used to create temporary storage / server) 48 | * Setup SSH key at your server and add it to Digital Ocean control pane (it will be used to access temporary server) 49 | * Setup variables at _config.py 50 | * run Dropbox-Uploader/dropbox_uploader.sh to setup Dropbox API variables 51 | * run backup.py to start backup process 52 | * You can put backup.py to crontab to execute it automatically 53 | 54 | ### Options 55 | * Install python-mysqldb to clean up MySQL database and NGINX configuration files based on site folder names and run cleandb.py 56 | * You can put cleandb.py to crontab to execute it automatically 57 | * Install imagemagic and run imageresize.sh script at a specific folder to resample all images bigger than 1900x1200 58 | 59 | [My personal website](https://tony.twma.pro) 60 | 61 | [Buy me a Beer](https://www.paypal.me/tonytwma) 62 | -------------------------------------------------------------------------------- /Dropbox-Uploader/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ## Version 0.2 - 10 Aug 2016 4 | * Add search option 5 | * Increase chunk size to 50MB for better performance 6 | * Add testUnit script 7 | * Upgrade to API v2 8 | * Fix #278: Documentation issue for command line 9 | * Add -h option to show human readable file sizes 10 | * Fix issue #240: the connection is retried in case of error for chunked uploads 11 | * The file name encoding problems including the issue #139 should be fixed now 12 | * Add system info to debug output 13 | * Fix issue #230: Version String isn't updated in dropbox_uploader 14 | * Fixed issue #227 15 | 16 | ## Version 0.16 - 31 Jul 2015 17 | * Add saveurl command to download remote files directly into a dropbox folder 18 | * Fixed issue #174: $DU needs quoting in dropShell 19 | * The share command will now return by default the short url 20 | * Improved portability of the file_size function 21 | * Fixed minor bash compatibility issue in the urlencode function 22 | * Improved normalize_path function and minor fix 23 | * Removed deprecated db_free_quota function 24 | 25 | ## Version 0.15 - 08 Jun 2015 26 | * Fix issue #163, List only show folders but not files 27 | * Fix issue #168, Share call not returning a share link 28 | * Fix issue #172, list command always return an unnamed dir on an empty directory 29 | * Fix issue #165, Issue with uploading a file to a folder 30 | * Fix issue #162, Uploading a folder to a folder that already exists on dropbox has unexpected behaviour 31 | * Updated setup procedure 32 | 33 | ## Version 0.14 - 15 Jul 2014 34 | * Fix issue #112, replaced bash condition -f with -e 35 | * Fix issue #117, sed: 1: invalid command code z 36 | * Fix issue #122, Directory upload doesn't work 37 | * Fix issue #132, enhanced cross compatibility for bash conditions 38 | 39 | ## Version 0.13 - 17 Jan 2014 40 | * Minor languages fixes, based on issue #88 41 | * Fix issue #91, sed take long time to process regexp on big data 42 | * Fix issue #87, second chunked upload fails after successful first 43 | * Fix issue #97, greadlink not selected on OSX Mavericks 44 | * Fix issues #99 and #100: The stat command in OSX 10.9 has no parameter named --format 45 | * Fix issue #99, clarified how to install greadlink (via homebrew) for Mac OS X. Thanks to paultreny 46 | * Improved list command and minor changes 47 | * Fix issue #103, transform Unicode code-points to characters in list command 48 | * Add VERSION output in DEBUG mode 49 | * DropShell: Fixed issue with quoted command parameters 50 | * Fix issue with absolute paths in CD command 51 | * Enhancement request #108: add initial wildcard support in upload command 52 | * Now the destination file/dir for the upload command is mandatory 53 | * Fixed issue listing files/folders names containing quotes 54 | 55 | ## Version 0.12.1 - 24 Oct 2013 56 | * Fixed unicode problem with special chars in file names 57 | * Fixed problem removing temp files 58 | * Improved detection of configuration error and better handling of http errors 59 | * Updated setup procedure 60 | * Fixed incompatibility with old bash version 61 | 62 | ## Version 0.12 - 06 Sep 2013 63 | * Fixed problem uploading twice the same directory 64 | * Add checking for not allowed file names 65 | * Handling connection/resolving host errors 66 | * Better error handling 67 | * Converted if condition to BASH native syntax 68 | * Add normalize_path function, to canonicalize paths 69 | * Add -s option to skip existing files when upload/download 70 | * Removed check for free quota on every file upload to improve performance 71 | * Add checks for file permissions before writing/reading files on the local fs 72 | * Add copy function 73 | * Fixed unicode problem in file listing 74 | * A lot of minor changes and improvements 75 | 76 | ## Version 0.11.9 - 27 Jun 2013 77 | * Add missing -f option in usage 78 | * Add some /dev/null redirects 79 | * Improved error messages 80 | * Improved grep syntax 81 | * Add function to check the cURL exit code 82 | * Improved messages output 83 | * Add new command line options: -q -d -k 84 | 85 | ## Version 0.11.8 - 05 Jun 2013 86 | * Add move/rename function 87 | * Improved the configuration file management (thanks to Robert G.) 88 | * Updated strings to reflect the new Dropbox "Create App" page 89 | * Add support for download directories 90 | * Add support for upload directories 91 | 92 | ## Version 0.11.7 - 23 Apr 2013 93 | * Fixed issue with special chars 94 | * Fix for iOS 95 | 96 | ## Version 0.11.6 - 15 Mar 2013 97 | * Add optional command-line parameter ('-f') to read dropbox configuration from a specific file (thanks to pjv) 98 | 99 | ## Version 0.11.5 - 22 Gen 2013 100 | * Added the ability to get a share link for a specified file (thanks to camspiers) 101 | 102 | ## Version 0.11.4 - 17 Gen 2013 103 | * Fix for QNAP compatibility (thanks to Fritz Ferstl) 104 | * Implemented mkdir command (thanks to Joel Maslak) 105 | * Fix for Solaris compatibility 106 | 107 | ## Version 0.11.3 - 22 Dec 2012: 108 | * Improved list command (thanks to Robert González) 109 | * Fixed problem with unicode characters 110 | 111 | ## Version 0.11.2 - 14 Nov 2012: 112 | * Added a check for the free quota before uploading a file 113 | * Now the quota informations are displayed in Mb 114 | * Removed urlencode function for incompatibility with older curl versions 115 | * Fixed problem uploading files that contains @ character 116 | * Minor changes 117 | 118 | ## Version 0.11.1 - 12 Nov 2012: 119 | * As suggested by the DropBox API documentation, the default chunk for chunked uploads is now 4Mb 120 | * Minor changes 121 | 122 | ## Version 0.11 - 11 Nov 2012: 123 | * Parameterized the curl binary location 124 | * Fix for MacOSX 10.8 (thanks to Ben - www.aquiltforever.com) 125 | 126 | ## Version 0.10 - 03 Nov 2012: 127 | * Code clean 128 | * Improved urlencode function (thanks to Stefan Trauth * www.stefantrauth.de) 129 | * Added command remove as alias of delete 130 | * Fix for Raspberry PI 131 | * Now if an error occurs during a chunk uploading, the upload is retried for a maximum of three times 132 | * Minor changes 133 | * Tested on Cygwin and MacOSX 134 | 135 | ## Version 0.9.9 - 24 Oct 2012: 136 | * Added the possibility to choose the access level (App folder o Full Dropbox) during the setup procedure 137 | * Added a check for the BASH shell version 138 | * Fixed problems in listing files/directories with special characters 139 | * Added the option CURL_ACCEPT_CERTIFICATES (see the script source) 140 | * Added back the standard upload function. Now only if the file is greater than 150Mb, the chunked_upload API will be used. 141 | * Fixed compatibility with bsd sed. Tested on FreeBSD, but probably it works on others bsd versions and osx. Let me know! 142 | * Minor changes 143 | 144 | ## Version 0.9.8 - 03 Oct 2012: 145 | * Implemented chunked upload. Now there is no limit to file size! 146 | 147 | ## Version 0.9.7 - 14 Sep 2012: 148 | * Fixed bug in listing empty directories 149 | 150 | ## Version 0.9.6 - 12 Sep 2012: 151 | * Implemented list command 152 | * Minor changes 153 | 154 | ## Version 0.9.5 - 18 Jul 2012: 155 | * Added a check for the maximum file size allowed by the DropBox API 156 | * Minor changes 157 | 158 | ## Version 0.9.4 - 19 Mar 2012: 159 | * Implemented delete command 160 | * Minor changes 161 | 162 | ## Version 0.9.3 - 01 Mar 2012: 163 | * Implemented download command 164 | * Improved info output 165 | * Fixed utime function 166 | * Added dependency check for basename 167 | * The script always returns 1 when errors occurs 168 | * Improved error handling 169 | * Fixed problem with spaces in config file name 170 | * Minor bug fixes 171 | 172 | ## Version 0.9.2 - 28 Feb 2012: 173 | * Increased security, now any user can create his own Dropbox App 174 | 175 | ## Version 0.9.1 - 27 Feb 2012: 176 | * Fixed problem with spaces in dst file name 177 | 178 | ## Version 0.9 - 27 Feb 2012: 179 | * Code rewritten from scratch (CLI changed) 180 | * Improved security and stability using official dropbox API, no more username/password needed! 181 | 182 | ## Version 0.8.2 - 07 Sep 2011: 183 | * Removed INTERACTIVE_MODE variable (now the progress bar is shown in VERBOSE mode) 184 | * Improved command line interface and error messages 185 | * Minor bug fixes 186 | 187 | ## Version 0.8.1 - 31 Aug 2011 (by Dawid Ferenczy - www.ferenczy.cz) 188 | * added prompt for the Dropbox password from keyboard, if there is no password 189 | hardcoded or given as script command line parameter (interactive mode) 190 | * added INTERACTIVE_MODE variable - when set to 1 show CURL progress bar. 191 | Set to 1 automatically when there is no password hardcoded or given as 192 | parameter. Controls verbosity of CURL. 193 | 194 | ## Version 0.7.1 - 10 Mar 2011: 195 | * Minor bug fixes 196 | 197 | ## Version 0.7 - 10 Mar 2011: 198 | * New command line interface 199 | * Code clean 200 | 201 | ## Version 0.6 - 11 Gen 2011: 202 | * Fixed issue with spaces in file/forder name 203 | 204 | ## Version 0.5 - 04 Gen 2011: 205 | * Recursive directory upload 206 | 207 | ## Version 0.4 - 29 Dec 2010: 208 | * Now works on BSD and MAC 209 | * Interactive prompt for username and password 210 | * Speeded up the uploading process 211 | * Debug mode 212 | 213 | ## Version 0.3 - 18 Nov 2010: 214 | * Regex updated 215 | 216 | ## Version 0.2 - 04 Sep 2010: 217 | * Removed dependencies from tempfile 218 | * Code clean 219 | 220 | ## Version 0.1 - 23 Aug 2010: 221 | * Initial release 222 | -------------------------------------------------------------------------------- /Dropbox-Uploader/dropShell.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # DropShell 4 | # 5 | # Copyright (C) 2013-2014 Andrea Fabrizi 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License 18 | # along with this program; if not, write to the Free Software 19 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. 20 | # 21 | 22 | #Looking for dropbox uploader 23 | if [ -f "./dropbox_uploader.sh" ]; then 24 | DU="./dropbox_uploader.sh" 25 | else 26 | DU=$(which dropbox_uploader.sh) 27 | if [ $? -ne 0 ]; then 28 | echo "Dropbox Uploader not found!" 29 | exit 1 30 | fi 31 | fi 32 | 33 | #For MacOSX, install coreutils (which includes greadlink) 34 | # $brew install coreutils 35 | if [ "${OSTYPE:0:6}" == "darwin" ]; then 36 | READLINK="greadlink" 37 | else 38 | READLINK="readlink" 39 | fi 40 | 41 | SHELL_HISTORY=~/.dropshell_history 42 | DU_OPT="-q" 43 | BIN_DEPS="id $READLINK ls basename ls pwd cut" 44 | VERSION="0.2" 45 | 46 | umask 077 47 | 48 | #Dependencies check 49 | for i in $BIN_DEPS; do 50 | which $i > /dev/null 51 | if [ $? -ne 0 ]; then 52 | echo -e "Error: Required program could not be found: $i" 53 | exit 1 54 | fi 55 | done 56 | 57 | #Check DropBox Uploader 58 | if [ ! -f "$DU" ]; then 59 | echo "Dropbox Uploader not found: $DU" 60 | echo "Please change the 'DU' variable according to the Dropbox Uploader location." 61 | exit 1 62 | else 63 | DU=$($READLINK -m "$DU") 64 | fi 65 | 66 | #Returns the current user 67 | function get_current_user 68 | { 69 | id -nu 70 | } 71 | 72 | function normalize_path 73 | { 74 | $READLINK -m "$1" 75 | } 76 | 77 | ################ 78 | #### START #### 79 | ################ 80 | 81 | echo -e "DropShell v$VERSION" 82 | echo -e "The Interactive Dropbox SHELL" 83 | echo -e "Andrea Fabrizi - andrea.fabrizi@gmail.com\n" 84 | echo -e "Type help for the list of the available commands.\n" 85 | 86 | history -r "$SHELL_HISTORY" 87 | username=$(get_current_user) 88 | 89 | #Initial Working Directory 90 | CWD="/" 91 | 92 | function sh_ls 93 | { 94 | local arg1=$1 95 | 96 | #Listing current dir 97 | if [ -z "$arg1" ]; then 98 | "$DU" $DU_OPT list "$CWD" 99 | 100 | #Listing $arg1 101 | else 102 | 103 | #Relative or absolute path? 104 | if [ ${arg1:0:1} == "/" ]; then 105 | "$DU" $DU_OPT list "$(normalize_path "$arg1")" 106 | else 107 | "$DU" $DU_OPT list "$(normalize_path "$CWD/$arg1")" 108 | fi 109 | 110 | #Checking for errors 111 | if [ $? -ne 0 ]; then 112 | echo -e "ls: cannot access '$arg1': No such file or directory" 113 | fi 114 | fi 115 | } 116 | 117 | function sh_cd 118 | { 119 | local arg1=$1 120 | 121 | OLD_CWD=$CWD 122 | 123 | if [ -z "$arg1" ]; then 124 | CWD="/" 125 | elif [ ${arg1:0:1} == "/" ]; then 126 | CWD=$arg1 127 | else 128 | CWD=$(normalize_path "$OLD_CWD/$arg1/") 129 | fi 130 | 131 | "$DU" $DU_OPT list "$CWD" > /dev/null 132 | 133 | #Checking for errors 134 | if [ $? -ne 0 ]; then 135 | echo -e "cd: $arg1: No such file or directory" 136 | CWD=$OLD_CWD 137 | fi 138 | } 139 | 140 | function sh_get 141 | { 142 | local arg1=$1 143 | local arg2=$2 144 | 145 | if [ ! -z "$arg1" ]; then 146 | 147 | #Relative or absolute path? 148 | if [ ${arg1:0:1} == "/" ]; then 149 | "$DU" $DU_OPT download "$(normalize_path "$arg1")" "$arg2" 150 | else 151 | "$DU" $DU_OPT download "$(normalize_path "$CWD/$arg1")" "$arg2" 152 | fi 153 | 154 | #Checking for errors 155 | if [ $? -ne 0 ]; then 156 | echo -e "get: Download error" 157 | fi 158 | 159 | #args error 160 | else 161 | echo -e "get: missing operand" 162 | echo -e "syntax: get [LOCAL_FILE/DIR]" 163 | fi 164 | } 165 | 166 | function sh_put 167 | { 168 | local arg1=$1 169 | local arg2=$2 170 | 171 | if [ ! -z "$arg1" ]; then 172 | 173 | #Relative or absolute path? 174 | if [ "${arg2:0:1}" == "/" ]; then 175 | "$DU" $DU_OPT upload "$arg1" "$(normalize_path "$arg2")" 176 | else 177 | "$DU" $DU_OPT upload "$arg1" "$(normalize_path "$CWD/$arg2")" 178 | fi 179 | 180 | #Checking for errors 181 | if [ $? -ne 0 ]; then 182 | echo -e "put: Upload error" 183 | fi 184 | 185 | #args error 186 | else 187 | echo -e "put: missing operand" 188 | echo -e "syntax: put " 189 | fi 190 | } 191 | 192 | function sh_rm 193 | { 194 | local arg1=$1 195 | 196 | if [ ! -z "$arg1" ]; then 197 | 198 | #Relative or absolute path? 199 | if [ ${arg1:0:1} == "/" ]; then 200 | "$DU" $DU_OPT remove "$(normalize_path "$arg1")" 201 | else 202 | "$DU" $DU_OPT remove "$(normalize_path "$CWD/$arg1")" 203 | fi 204 | 205 | #Checking for errors 206 | if [ $? -ne 0 ]; then 207 | echo -e "rm: cannot remove '$arg1'" 208 | fi 209 | 210 | #args error 211 | else 212 | echo -e "rm: missing operand" 213 | echo -e "syntax: rm " 214 | fi 215 | } 216 | 217 | function sh_mkdir 218 | { 219 | local arg1=$1 220 | 221 | if [ ! -z "$arg1" ]; then 222 | 223 | #Relative or absolute path? 224 | if [ ${arg1:0:1} == "/" ]; then 225 | "$DU" $DU_OPT mkdir "$(normalize_path "$arg1")" 226 | else 227 | "$DU" $DU_OPT mkdir "$(normalize_path "$CWD/$arg1")" 228 | fi 229 | 230 | #Checking for errors 231 | if [ $? -ne 0 ]; then 232 | echo -e "mkdir: cannot create directory '$arg1'" 233 | fi 234 | 235 | #args error 236 | else 237 | echo -e "mkdir: missing operand" 238 | echo -e "syntax: mkdir " 239 | fi 240 | } 241 | 242 | function sh_mv 243 | { 244 | local arg1=$1 245 | local arg2=$2 246 | 247 | if [ ! -z "$arg1" -a ! -z "$arg2" ]; then 248 | 249 | #SRC relative or absolute path? 250 | if [ ${arg1:0:1} == "/" ]; then 251 | SRC="$arg1" 252 | else 253 | SRC="$CWD/$arg1" 254 | fi 255 | 256 | #DST relative or absolute path? 257 | if [ ${arg2:0:1} == "/" ]; then 258 | DST="$arg2" 259 | else 260 | DST="$CWD/$arg2" 261 | fi 262 | 263 | "$DU" $DU_OPT move "$(normalize_path "$SRC")" "$(normalize_path "$DST")" 264 | 265 | #Checking for errors 266 | if [ $? -ne 0 ]; then 267 | echo -e "mv: cannot move '$arg1' to '$arg2'" 268 | fi 269 | 270 | #args error 271 | else 272 | echo -e "mv: missing operand" 273 | echo -e "syntax: mv " 274 | fi 275 | } 276 | 277 | function sh_cp 278 | { 279 | local arg1=$1 280 | local arg2=$2 281 | 282 | if [ ! -z "$arg1" -a ! -z "$arg2" ]; then 283 | 284 | #SRC relative or absolute path? 285 | if [ ${arg1:0:1} == "/" ]; then 286 | SRC="$arg1" 287 | else 288 | SRC="$CWD/$arg1" 289 | fi 290 | 291 | #DST relative or absolute path? 292 | if [ ${arg2:0:1} == "/" ]; then 293 | DST="$arg2" 294 | else 295 | DST="$CWD/$arg2" 296 | fi 297 | 298 | "$DU" $DU_OPT copy "$(normalize_path "$SRC")" "$(normalize_path "$DST")" 299 | 300 | #Checking for errors 301 | if [ $? -ne 0 ]; then 302 | echo -e "cp: cannot copy '$arg1' to '$arg2'" 303 | fi 304 | 305 | #args error 306 | else 307 | echo -e "cp: missing operand" 308 | echo -e "syntax: cp " 309 | fi 310 | } 311 | 312 | function sh_free 313 | { 314 | "$DU" $DU_OPT info | grep "Free:" | cut -f 2 315 | } 316 | 317 | function sh_cat 318 | { 319 | local arg1=$1 320 | 321 | if [ ! -z "$arg1" ]; then 322 | 323 | tmp_cat="/tmp/sh_cat_$RANDOM" 324 | sh_get "$arg1" "$tmp_cat" 325 | cat "$tmp_cat" 326 | rm -fr "$tmp_cat" 327 | 328 | #args error 329 | else 330 | echo -e "cat: missing operand" 331 | echo -e "syntax: cat " 332 | fi 333 | } 334 | 335 | while (true); do 336 | 337 | #Reading command from shell 338 | read -e -p "$username@Dropbox:$CWD$ " input 339 | 340 | #Tokenizing command 341 | eval tokens=($input) 342 | cmd=${tokens[0]} 343 | arg1=${tokens[1]} 344 | arg2=${tokens[2]} 345 | 346 | #Saving command in the history file 347 | history -s "$input" 348 | history -w "$SHELL_HISTORY" 349 | 350 | case $cmd in 351 | 352 | ls) 353 | sh_ls "$arg1" 354 | ;; 355 | 356 | cd) 357 | sh_cd "$arg1" 358 | ;; 359 | 360 | pwd) 361 | echo $CWD 362 | ;; 363 | 364 | get) 365 | sh_get "$arg1" "$arg2" 366 | ;; 367 | 368 | put) 369 | sh_put "$arg1" "$arg2" 370 | ;; 371 | 372 | rm) 373 | sh_rm "$arg1" 374 | ;; 375 | 376 | mkdir) 377 | sh_mkdir "$arg1" 378 | ;; 379 | 380 | mv) 381 | sh_mv "$arg1" "$arg2" 382 | ;; 383 | 384 | cp) 385 | sh_cp "$arg1" "$arg2" 386 | ;; 387 | 388 | cat) 389 | sh_cat "$arg1" 390 | ;; 391 | 392 | free) 393 | sh_free 394 | ;; 395 | 396 | lls) 397 | ls -l 398 | ;; 399 | 400 | lpwd) 401 | pwd 402 | ;; 403 | 404 | lcd) 405 | cd "$arg1" 406 | ;; 407 | 408 | help) 409 | echo -e "Supported commands: ls, cd, pwd, get, put, cat, rm, mkdir, mv, cp, free, lls, lpwd, lcd, help, exit\n" 410 | ;; 411 | 412 | quit|exit) 413 | exit 0 414 | ;; 415 | 416 | *) 417 | if [ ! -z "$cmd" ]; then 418 | echo -ne "Unknown command: $cmd\n" 419 | fi 420 | ;; 421 | esac 422 | done 423 | 424 | -------------------------------------------------------------------------------- /_lib.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import subprocess 4 | import socket 5 | import json 6 | import requests 7 | import time 8 | import _config 9 | 10 | def list_files(folder): 11 | return [d for d in os.listdir(folder) if os.path.isfile(os.path.join(folder, d))] 12 | def list_dirs(folder): 13 | return [d for d in os.listdir(folder) if os.path.isdir(os.path.join(folder, d))] 14 | def exclude(value): 15 | if value in _config.sites: 16 | _config.sites.remove(value) 17 | 18 | # get current server name 19 | def current_server_name(): 20 | return socket.getfqdn() 21 | 22 | # list servers 23 | def get_server_info(servername): 24 | r = requests.get( 25 | 'https://api.digitalocean.com/v2/droplets', 26 | **{'headers': {'Authorization': 'Bearer ' + _config.token}} 27 | ) 28 | data = json.loads(r.text) 29 | 30 | if r.status_code != requests.codes.ok: 31 | print 'Unable to get list of servers:' 32 | print data 33 | sys.exit(0) 34 | 35 | for droplet in data['droplets']: 36 | if servername == droplet['name']: 37 | return droplet 38 | 39 | # get current region 40 | def get_region_and_id(servername): 41 | droplet = get_server_info(servername) 42 | return [droplet['region']['slug'],str(droplet['id'])] 43 | 44 | def create_site_backup(site,archive = False): 45 | if ( 46 | os.path.exists(_config.sitepath + '/' + site + '/site') 47 | and os.path.exists(_config.sitepath + '/' + site + '/assets') 48 | ): 49 | print 'Backing up: ' + _config.sitepath + '/' + site 50 | subprocess.call([ 51 | _config.scriptspath + '/createsitedump.sh', 52 | _config.sitepath, 53 | site, 54 | _config.user, 55 | _config.password 56 | ]) 57 | if archive: 58 | subprocess.call([ 59 | _config.scriptspath + '/backupsite.sh', 60 | _config.backuppath, 61 | _config.sitepath, 62 | site, 63 | _config.configpath, 64 | _config.scriptspath 65 | ]) 66 | else: 67 | print 'Unknown framework: ' + _config.sitepath + '/' + site 68 | 69 | # create website backups 70 | def create_backups(): 71 | print 'Creating website backups ...' 72 | if not os.path.exists(_config.backuppath): 73 | os.makedirs(_config.backuppath) 74 | 75 | for site in _config.sites: 76 | create_site_backup(site,True) 77 | 78 | # create and attach block storage 79 | def create_blockstorage(): 80 | print 'Creating block storage ...' 81 | r = requests.post( 82 | 'https://api.digitalocean.com/v2/volumes', 83 | **{ 84 | 'headers': { 85 | 'Content-type': 'application/json', 86 | 'Authorization': 'Bearer '+_config.token 87 | }, 88 | 'data': '{"size_gigabytes": "20","name": "backup-'+_config.servername.replace('.','')+'","region": "'+_config.region+'"}' 89 | } 90 | ) 91 | 92 | data = json.loads(r.text) 93 | if r.status_code != requests.codes.ok: 94 | print 'Unable to create block storage:' 95 | print data 96 | sys.exit(0) 97 | 98 | _config.volumeid = str(data['volume']['id']) 99 | 100 | print 'Attaching block storage ...' 101 | r = requests.post( 102 | 'https://api.digitalocean.com/v2/volumes/'+_config.volumeid+'/actions', 103 | **{ 104 | 'headers': { 105 | 'Content-type': 'application/json', 106 | 'Authorization': 'Bearer '+_config.token 107 | }, 108 | 'data': '{"type": "attach","droplet_id": "'+_config.serverid+'","region": "'+_config.region+'"}' 109 | } 110 | ) 111 | if r.status_code != requests.codes.ok: 112 | print 'Unable to attach block storage:' 113 | print data 114 | sys.exit(0) 115 | 116 | subprocess.call(['parted','/dev/disk/by-id/scsi-0DO_Volume_backup-'+_config.servername.replace('.','')+' mklabel gpt']) 117 | subprocess.call(['parted','-a opt /dev/disk/by-id/scsi-0DO_Volume_backup-'+_config.servername.replace('.','')+' mkpart primary ext4 0% 100%']) 118 | 119 | subprocess.call(['mkfs.ext4','/dev/disk/by-id/scsi-0DO_Volume_backup-'+_config.servername.replace('.','')]) 120 | subprocess.call(['mkdir','-p /mnt/backup']) 121 | subprocess.call(['mount','-t ext4 /dev/disk/by-id/scsi-0DO_Volume_backup-'+_config.servername.replace('.','')+' -o rw /mnt/backup']) 122 | 123 | # Deattach and delete block storage 124 | def delete_blockstorage(): 125 | print 'Deattaching block storage ...' 126 | r = requests.post( 127 | 'https://api.digitalocean.com/v2/volumes/'+_config.volumeid+'/actions', 128 | **{ 129 | 'headers': { 130 | 'Content-type': 'application/json', 131 | 'Authorization': 'Bearer '+_config.token 132 | }, 133 | 'data': '{"type": "detach","droplet_id": "'+_config.serverid+'","region": "'+_config.region+'"}' 134 | } 135 | ) 136 | 137 | print 'Destroying block storage ...' 138 | r = requests.delete( 139 | 'https://api.digitalocean.com/v2/volumes?name=backup-'+_config.servername.replace('.','')+'®ion='+_config.region, 140 | **{'headers': {'Authorization': 'Bearer '+_config.token}} 141 | ) 142 | subprocess.call(['rm','-rf','/mnt/backup']) 143 | 144 | def create_backup_server(): 145 | print 'Creating backup server ...' 146 | r = requests.post( 147 | 'https://api.digitalocean.com/v2/droplets', 148 | **{ 149 | 'headers': { 150 | 'Content-type': 'application/json', 151 | 'Authorization': 'Bearer '+_config.token 152 | }, 153 | 'data': '{"name": "backup-'+_config.servername+'","region": "'+_config.region+'","ssh_keys": ["'+_config.sshkeyid+'"],"size": "512mb","image": "ubuntu-14-04-x64","private_networking": true,"backups": false}' 154 | } 155 | ) 156 | 157 | data = json.loads(r.text) 158 | if r.status_code != 202: 159 | print 'Unable to create backup server:' 160 | print data 161 | sys.exit(0) 162 | 163 | # Wait 2 minutes before server will be created and activated 164 | time.sleep(120) 165 | droplet = get_server_info('backup-'+_config.servername) 166 | 167 | try: 168 | droplet['status'] 169 | except NameError: 170 | print 'Unable to find created droplet' 171 | sys.exit(0) 172 | 173 | if droplet['status'] != 'active': 174 | print 'Unable to get droplet activated' 175 | delete_backup_server(str(droplet['id'])) 176 | sys.exit(0) 177 | 178 | return droplet 179 | 180 | def sync_backup_server(droplet): 181 | 182 | for network in droplet['networks']['v4']: 183 | if network['type'] == 'public': #'private': 184 | dropletip = network['ip_address'] 185 | 186 | print 'Syncing to backup server: ' + dropletip 187 | 188 | # Install zip 189 | subprocess.call([ 190 | 'ssh', 191 | '-oStrictHostKeyChecking=no', 192 | 'root@'+dropletip, 193 | 'apt-get -yqq install zip' 194 | ]) 195 | 196 | # make php configs dir 197 | subprocess.call(['ssh','-q','-oStrictHostKeyChecking=no','root@'+dropletip,'mkdir -p '+_config.configpath]) 198 | # sync php configs 199 | subprocess.call(['scp','-q','-oStrictHostKeyChecking=no','-r',_config.configpath,'root@'+dropletip+':'+_config.configpath+'/..']) 200 | 201 | # make scripts dir 202 | subprocess.call(['ssh','-q','-oStrictHostKeyChecking=no','root@'+dropletip,'mkdir -p '+_config.scriptspath]) 203 | # sync scripts 204 | subprocess.call(['scp','-q','-oStrictHostKeyChecking=no','-r',_config.scriptspath,'root@'+dropletip+':'+_config.scriptspath+'/..']) 205 | 206 | # sync dropbox config 207 | subprocess.call(['scp','-q','-oStrictHostKeyChecking=no','-r','/root/.dropbox_uploader','root@'+dropletip+':/root']) 208 | 209 | # Wait 10 sec to complete 210 | time.sleep(10) 211 | 212 | for site in _config.sites: 213 | if ( 214 | os.path.exists(_config.sitepath + '/' + site + '/site') 215 | and os.path.exists(_config.sitepath + '/' + site + '/assets') 216 | ): 217 | create_site_backup(site,False) 218 | # sync site backup to backing up server 219 | subprocess.call(['ssh','-oStrictHostKeyChecking=no','root@'+dropletip,'mkdir -p '+_config.sitepath+'/'+site]) 220 | subprocess.call([ 221 | 'scp', 222 | '-q', 223 | '-oStrictHostKeyChecking=no', 224 | '-r', 225 | _config.sitepath+'/'+site+'/site', 226 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 227 | ]) 228 | subprocess.call([ 229 | 'scp', 230 | '-q', 231 | '-oStrictHostKeyChecking=no', 232 | '-r', 233 | _config.sitepath+'/'+site+'/mysite', 234 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 235 | ]) 236 | subprocess.call([ 237 | 'scp', 238 | '-q', 239 | '-oStrictHostKeyChecking=no', 240 | '-r', 241 | _config.sitepath+'/'+site+'/composer.json', 242 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 243 | ]) 244 | subprocess.call([ 245 | 'scp', 246 | '-q', 247 | '-oStrictHostKeyChecking=no', 248 | '-r', 249 | _config.sitepath+'/'+site+'/humans.txt', 250 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 251 | ]) 252 | subprocess.call([ 253 | 'scp', 254 | '-q', 255 | '-oStrictHostKeyChecking=no', 256 | '-r', 257 | _config.sitepath+'/'+site+'/robots.txt', 258 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 259 | ]) 260 | subprocess.call([ 261 | 'scp', 262 | '-q', 263 | '-oStrictHostKeyChecking=no', 264 | '-r', 265 | _config.sitepath+'/'+site+'/favicon.ico', 266 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 267 | ]) 268 | subprocess.call([ 269 | 'scp', 270 | '-q', 271 | '-oStrictHostKeyChecking=no', 272 | '-r', 273 | _config.configpath+'/example_config', 274 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 275 | ]) 276 | subprocess.call([ 277 | 'scp', 278 | '-q', 279 | '-oStrictHostKeyChecking=no', 280 | '-r', 281 | _config.sitepath+'/'+site+'/assets', 282 | 'root@'+dropletip+':'+_config.sitepath+'/'+site 283 | ]) 284 | subprocess.call([ 285 | 'scp', 286 | '-q', 287 | '-oStrictHostKeyChecking=no', 288 | '-r', 289 | _config.sitepath+'/'+site+'.sql', 290 | 'root@'+dropletip+':'+_config.sitepath 291 | ]) 292 | # call dropbox sync 293 | subprocess.call([ 294 | 'ssh', 295 | '-oStrictHostKeyChecking=no', 296 | 'root@'+dropletip, 297 | _config.scriptspath+'/backupsite.sh '+_config.backuppath+' '+_config.sitepath+' '+site+' '+' '+_config.configpath+' clean' 298 | ]) 299 | 300 | # remove site 301 | subprocess.call([ 302 | 'ssh', 303 | '-oStrictHostKeyChecking=no', 304 | 'root@'+dropletip, 305 | 'rm -rf '+_config.sitepath+'/'+site 306 | ]) 307 | 308 | subprocess.call([ 309 | 'rm', 310 | _config.sitepath+'/'+site+'.sql' 311 | ]) 312 | 313 | # Wait 10 sec before uploading next site 314 | time.sleep(10) 315 | 316 | 317 | 318 | def delete_backup_server(serverid): 319 | print 'Destroying backup server ...' 320 | r = requests.delete( 321 | 'https://api.digitalocean.com/v2/droplets/' + serverid, 322 | **{'headers': {'Authorization': 'Bearer '+_config.token}} 323 | ) 324 | if r.status_code != 204: 325 | print 'Unable to delete server:' 326 | print r.text 327 | sys.exit(0) -------------------------------------------------------------------------------- /Dropbox-Uploader/dropbox_uploader.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Dropbox Uploader 4 | # 5 | # Copyright (C) 2010-2016 Andrea Fabrizi 6 | # 7 | # This program is free software; you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation; either version 2 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU General Public License 18 | # along with this program; if not, write to the Free Software 19 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. 20 | # 21 | 22 | #Default configuration file 23 | CONFIG_FILE=~/.dropbox_uploader 24 | 25 | #Default chunk size in Mb for the upload process 26 | #It is recommended to increase this value only if you have enough free space on your /tmp partition 27 | #Lower values may increase the number of http requests 28 | CHUNK_SIZE=50 29 | 30 | #Curl location 31 | #If not set, curl will be searched into the $PATH 32 | #CURL_BIN="/usr/bin/curl" 33 | 34 | #Default values 35 | TMP_DIR="/tmp" 36 | DEBUG=0 37 | QUIET=0 38 | SHOW_PROGRESSBAR=0 39 | SKIP_EXISTING_FILES=0 40 | ERROR_STATUS=0 41 | 42 | #Don't edit these... 43 | API_MIGRATE_V2="https://api.dropboxapi.com/1/oauth2/token_from_oauth1" 44 | API_LIST_FOLDER="https://api.dropboxapi.com/2/files/list_folder" 45 | API_CHUNKED_UPLOAD_START_URL="https://content.dropboxapi.com/2/files/upload_session/start" 46 | API_CHUNKED_UPLOAD_FINISH_URL="https://content.dropboxapi.com/2/files/upload_session/finish" 47 | API_CHUNKED_UPLOAD_APPEND_URL="https://content.dropboxapi.com/2/files/upload_session/append_v2" 48 | API_UPLOAD_URL="https://content.dropboxapi.com/2/files/upload" 49 | API_DOWNLOAD_URL="https://content.dropboxapi.com/2/files/download" 50 | API_DELETE_URL="https://api.dropboxapi.com/2/files/delete" 51 | API_MOVE_URL="https://api.dropboxapi.com/2/files/move" 52 | API_COPY_URL="https://api.dropboxapi.com/2/files/copy" 53 | API_METADATA_URL="https://api.dropboxapi.com/2/files/get_metadata" 54 | API_FOLDER_LIST_URL="https://api.dropboxapi.com/2/files/list_folder" 55 | API_ACCOUNT_INFO_URL="https://api.dropboxapi.com/2/users/get_current_account" 56 | API_ACCOUNT_SPACE_URL="https://api.dropboxapi.com/2/users/get_space_usage" 57 | API_MKDIR_URL="https://api.dropboxapi.com/2/files/create_folder" 58 | API_SHARE_URL="https://api.dropboxapi.com/2/sharing/create_shared_link_with_settings" 59 | API_SAVEURL_URL="https://api.dropboxapi.com/2/files/save_url" 60 | API_SAVEURL_JOBSTATUS_URL="https://api.dropboxapi.com/2/files/save_url/check_job_status" 61 | API_SEARCH_URL="https://api.dropboxapi.com/2/files/search" 62 | APP_CREATE_URL="https://www.dropbox.com/developers/apps" 63 | RESPONSE_FILE="$TMP_DIR/du_resp_$RANDOM" 64 | CHUNK_FILE="$TMP_DIR/du_chunk_$RANDOM" 65 | TEMP_FILE="$TMP_DIR/du_tmp_$RANDOM" 66 | BIN_DEPS="sed basename date grep stat dd mkdir" 67 | VERSION="0.2" 68 | 69 | umask 077 70 | 71 | #Check the shell 72 | if [ -z "$BASH_VERSION" ]; then 73 | echo -e "Error: this script requires the BASH shell!" 74 | exit 1 75 | fi 76 | 77 | shopt -s nullglob #Bash allows filename patterns which match no files to expand to a null string, rather than themselves 78 | shopt -s dotglob #Bash includes filenames beginning with a "." in the results of filename expansion 79 | 80 | #Check temp folder 81 | if [[ ! -d "$TMP_DIR" ]]; then 82 | echo -e "Error: the temporary folder $TMP_DIR doesn't exists!" 83 | echo -e "Please edit this script and set the TMP_DIR variable to a valid temporary folder to use." 84 | exit 1 85 | fi 86 | 87 | #Look for optional config file parameter 88 | while getopts ":qpskdhf:" opt; do 89 | case $opt in 90 | 91 | f) 92 | CONFIG_FILE=$OPTARG 93 | ;; 94 | 95 | d) 96 | DEBUG=1 97 | ;; 98 | 99 | q) 100 | QUIET=1 101 | ;; 102 | 103 | p) 104 | SHOW_PROGRESSBAR=1 105 | ;; 106 | 107 | k) 108 | CURL_ACCEPT_CERTIFICATES="-k" 109 | ;; 110 | 111 | s) 112 | SKIP_EXISTING_FILES=1 113 | ;; 114 | 115 | h) 116 | HUMAN_READABLE_SIZE=1 117 | ;; 118 | 119 | \?) 120 | echo "Invalid option: -$OPTARG" >&2 121 | exit 1 122 | ;; 123 | 124 | :) 125 | echo "Option -$OPTARG requires an argument." >&2 126 | exit 1 127 | ;; 128 | 129 | esac 130 | done 131 | 132 | if [[ $DEBUG != 0 ]]; then 133 | echo $VERSION 134 | uname -a 2> /dev/null 135 | cat /etc/issue 2> /dev/null 136 | set -x 137 | RESPONSE_FILE="$TMP_DIR/du_resp_debug" 138 | fi 139 | 140 | if [[ $CURL_BIN == "" ]]; then 141 | BIN_DEPS="$BIN_DEPS curl" 142 | CURL_BIN="curl" 143 | fi 144 | 145 | #Dependencies check 146 | which $BIN_DEPS > /dev/null 147 | if [[ $? != 0 ]]; then 148 | for i in $BIN_DEPS; do 149 | which $i > /dev/null || 150 | NOT_FOUND="$i $NOT_FOUND" 151 | done 152 | echo -e "Error: Required program could not be found: $NOT_FOUND" 153 | exit 1 154 | fi 155 | 156 | #Check if readlink is installed and supports the -m option 157 | #It's not necessary, so no problem if it's not installed 158 | which readlink > /dev/null 159 | if [[ $? == 0 && $(readlink -m "//test" 2> /dev/null) == "/test" ]]; then 160 | HAVE_READLINK=1 161 | else 162 | HAVE_READLINK=0 163 | fi 164 | 165 | #Forcing to use the builtin printf, if it's present, because it's better 166 | #otherwise the external printf program will be used 167 | #Note that the external printf command can cause character encoding issues! 168 | builtin printf "" 2> /dev/null 169 | if [[ $? == 0 ]]; then 170 | PRINTF="builtin printf" 171 | PRINTF_OPT="-v o" 172 | else 173 | PRINTF=$(which printf) 174 | if [[ $? != 0 ]]; then 175 | echo -e "Error: Required program could not be found: printf" 176 | fi 177 | PRINTF_OPT="" 178 | fi 179 | 180 | #Print the message based on $QUIET variable 181 | function print 182 | { 183 | if [[ $QUIET == 0 ]]; then 184 | echo -ne "$1"; 185 | fi 186 | } 187 | 188 | #Returns unix timestamp 189 | function utime 190 | { 191 | echo $(date +%s) 192 | } 193 | 194 | #Remove temporary files 195 | function remove_temp_files 196 | { 197 | if [[ $DEBUG == 0 ]]; then 198 | rm -fr "$RESPONSE_FILE" 199 | rm -fr "$CHUNK_FILE" 200 | rm -fr "$TEMP_FILE" 201 | fi 202 | } 203 | 204 | #Converts bytes to human readable format 205 | function convert_bytes 206 | { 207 | if [[ $HUMAN_READABLE_SIZE == 1 && "$1" != "" ]]; then 208 | if (($1 > 1073741824));then 209 | echo $(($1/1073741824)).$(($1%1073741824/100000000))"G"; 210 | elif (($1 > 1048576));then 211 | echo $(($1/1048576)).$(($1%1048576/100000))"M"; 212 | elif (($1 > 1024));then 213 | echo $(($1/1024)).$(($1%1024/100))"K"; 214 | else 215 | echo $1; 216 | fi 217 | else 218 | echo $1; 219 | fi 220 | } 221 | 222 | #Returns the file size in bytes 223 | function file_size 224 | { 225 | #Generic GNU 226 | SIZE=$(stat --format="%s" "$1" 2> /dev/null) 227 | if [ $? -eq 0 ]; then 228 | echo $SIZE 229 | return 230 | fi 231 | 232 | #Some embedded linux devices 233 | SIZE=$(stat -c "%s" "$1" 2> /dev/null) 234 | if [ $? -eq 0 ]; then 235 | echo $SIZE 236 | return 237 | fi 238 | 239 | #BSD, OSX and other OSs 240 | SIZE=$(stat -f "%z" "$1" 2> /dev/null) 241 | if [ $? -eq 0 ]; then 242 | echo $SIZE 243 | return 244 | fi 245 | 246 | echo "0" 247 | } 248 | 249 | 250 | #Usage 251 | function usage 252 | { 253 | echo -e "Dropbox Uploader v$VERSION" 254 | echo -e "Andrea Fabrizi - andrea.fabrizi@gmail.com\n" 255 | echo -e "Usage: $0 [PARAMETERS] COMMAND..." 256 | echo -e "\nCommands:" 257 | 258 | echo -e "\t upload " 259 | echo -e "\t download [LOCAL_FILE/DIR]" 260 | echo -e "\t delete " 261 | echo -e "\t move " 262 | echo -e "\t copy " 263 | echo -e "\t mkdir " 264 | echo -e "\t list [REMOTE_DIR]" 265 | echo -e "\t share " 266 | echo -e "\t saveurl " 267 | echo -e "\t search " 268 | echo -e "\t info" 269 | echo -e "\t space" 270 | echo -e "\t unlink" 271 | 272 | echo -e "\nOptional parameters:" 273 | echo -e "\t-f Load the configuration file from a specific file" 274 | echo -e "\t-s Skip already existing files when download/upload. Default: Overwrite" 275 | echo -e "\t-d Enable DEBUG mode" 276 | echo -e "\t-q Quiet mode. Don't show messages" 277 | echo -e "\t-h Show file sizes in human readable format" 278 | echo -e "\t-p Show cURL progress meter" 279 | echo -e "\t-k Doesn't check for SSL certificates (insecure)" 280 | 281 | echo -en "\nFor more info and examples, please see the README file.\n\n" 282 | remove_temp_files 283 | exit 1 284 | } 285 | 286 | #Check the curl exit code 287 | function check_http_response 288 | { 289 | CODE=$? 290 | 291 | #Checking curl exit code 292 | case $CODE in 293 | 294 | #OK 295 | 0) 296 | 297 | ;; 298 | 299 | #Proxy error 300 | 5) 301 | print "\nError: Couldn't resolve proxy. The given proxy host could not be resolved.\n" 302 | 303 | remove_temp_files 304 | exit 1 305 | ;; 306 | 307 | #Missing CA certificates 308 | 60|58) 309 | print "\nError: cURL is not able to performs peer SSL certificate verification.\n" 310 | print "Please, install the default ca-certificates bundle.\n" 311 | print "To do this in a Debian/Ubuntu based system, try:\n" 312 | print " sudo apt-get install ca-certificates\n\n" 313 | print "If the problem persists, try to use the -k option (insecure).\n" 314 | 315 | remove_temp_files 316 | exit 1 317 | ;; 318 | 319 | 6) 320 | print "\nError: Couldn't resolve host.\n" 321 | 322 | remove_temp_files 323 | exit 1 324 | ;; 325 | 326 | 7) 327 | print "\nError: Couldn't connect to host.\n" 328 | 329 | remove_temp_files 330 | exit 1 331 | ;; 332 | 333 | esac 334 | 335 | #Checking response file for generic errors 336 | if grep -q "HTTP/1.1 400" "$RESPONSE_FILE"; then 337 | ERROR_MSG=$(sed -n -e 's/{"error": "\([^"]*\)"}/\1/p' "$RESPONSE_FILE") 338 | 339 | case $ERROR_MSG in 340 | *access?attempt?failed?because?this?app?is?not?configured?to?have*) 341 | echo -e "\nError: The Permission type/Access level configured doesn't match the DropBox App settings!\nPlease run \"$0 unlink\" and try again." 342 | exit 1 343 | ;; 344 | esac 345 | 346 | fi 347 | 348 | } 349 | 350 | #Urlencode 351 | function urlencode 352 | { 353 | #The printf is necessary to correctly decode unicode sequences 354 | local string=$($PRINTF "${1}") 355 | local strlen=${#string} 356 | local encoded="" 357 | 358 | for (( pos=0 ; pos 1 ]]; then 379 | new_path="$new_path/" 380 | fi 381 | 382 | echo "$new_path" 383 | else 384 | echo "$path" 385 | fi 386 | } 387 | 388 | #Check if it's a file or directory 389 | #Returns FILE/DIR/ERR 390 | function db_stat 391 | { 392 | local FILE=$(normalize_path "$1") 393 | 394 | #Checking if it's a file or a directory 395 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE\"}" "$API_METADATA_URL" 2> /dev/null 396 | check_http_response 397 | 398 | local TYPE=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 399 | 400 | case $TYPE in 401 | 402 | file) 403 | echo "FILE" 404 | ;; 405 | 406 | folder) 407 | echo "DIR" 408 | ;; 409 | 410 | deleted) 411 | echo "ERR" 412 | ;; 413 | 414 | *) 415 | echo "ERR" 416 | ;; 417 | 418 | esac 419 | } 420 | 421 | #Generic upload wrapper around db_upload_file and db_upload_dir functions 422 | #$1 = Local source file/dir 423 | #$2 = Remote destination file/dir 424 | function db_upload 425 | { 426 | local SRC=$(normalize_path "$1") 427 | local DST=$(normalize_path "$2") 428 | 429 | #Checking if the file/dir exists 430 | if [[ ! -e $SRC && ! -d $SRC ]]; then 431 | print " > No such file or directory: $SRC\n" 432 | ERROR_STATUS=1 433 | return 434 | fi 435 | 436 | #Checking if the file/dir has read permissions 437 | if [[ ! -r $SRC ]]; then 438 | print " > Error reading file $SRC: permission denied\n" 439 | ERROR_STATUS=1 440 | return 441 | fi 442 | 443 | TYPE=$(db_stat "$DST") 444 | 445 | #If DST it's a file, do nothing, it's the default behaviour 446 | if [[ $TYPE == "FILE" ]]; then 447 | DST="$DST" 448 | 449 | #if DST doesn't exists and doesn't ends with a /, it will be the destination file name 450 | elif [[ $TYPE == "ERR" && "${DST: -1}" != "/" ]]; then 451 | DST="$DST" 452 | 453 | #if DST doesn't exists and ends with a /, it will be the destination folder 454 | elif [[ $TYPE == "ERR" && "${DST: -1}" == "/" ]]; then 455 | local filename=$(basename "$SRC") 456 | DST="$DST/$filename" 457 | 458 | #If DST it's a directory, it will be the destination folder 459 | elif [[ $TYPE == "DIR" ]]; then 460 | local filename=$(basename "$SRC") 461 | DST="$DST/$filename" 462 | fi 463 | 464 | #It's a directory 465 | if [[ -d $SRC ]]; then 466 | db_upload_dir "$SRC" "$DST" 467 | 468 | #It's a file 469 | elif [[ -e $SRC ]]; then 470 | db_upload_file "$SRC" "$DST" 471 | 472 | #Unsupported object... 473 | else 474 | print " > Skipping not regular file \"$SRC\"\n" 475 | fi 476 | } 477 | 478 | #Generic upload wrapper around db_chunked_upload_file and db_simple_upload_file 479 | #The final upload function will be choosen based on the file size 480 | #$1 = Local source file 481 | #$2 = Remote destination file 482 | function db_upload_file 483 | { 484 | local FILE_SRC=$(normalize_path "$1") 485 | local FILE_DST=$(normalize_path "$2") 486 | 487 | shopt -s nocasematch 488 | 489 | #Checking not allowed file names 490 | basefile_dst=$(basename "$FILE_DST") 491 | if [[ $basefile_dst == "thumbs.db" || \ 492 | $basefile_dst == "desktop.ini" || \ 493 | $basefile_dst == ".ds_store" || \ 494 | $basefile_dst == "icon\r" || \ 495 | $basefile_dst == ".dropbox" || \ 496 | $basefile_dst == ".dropbox.attr" \ 497 | ]]; then 498 | print " > Skipping not allowed file name \"$FILE_DST\"\n" 499 | return 500 | fi 501 | 502 | shopt -u nocasematch 503 | 504 | #Checking file size 505 | FILE_SIZE=$(file_size "$FILE_SRC") 506 | 507 | #Checking if the file already exists 508 | TYPE=$(db_stat "$FILE_DST") 509 | if [[ $TYPE != "ERR" && $SKIP_EXISTING_FILES == 1 ]]; then 510 | print " > Skipping already existing file \"$FILE_DST\"\n" 511 | return 512 | fi 513 | 514 | if [[ $FILE_SIZE -gt 157286000 ]]; then 515 | #If the file is greater than 150Mb, the chunked_upload API will be used 516 | db_chunked_upload_file "$FILE_SRC" "$FILE_DST" 517 | else 518 | db_simple_upload_file "$FILE_SRC" "$FILE_DST" 519 | fi 520 | 521 | } 522 | 523 | #Simple file upload 524 | #$1 = Local source file 525 | #$2 = Remote destination file 526 | function db_simple_upload_file 527 | { 528 | local FILE_SRC=$(normalize_path "$1") 529 | local FILE_DST=$(normalize_path "$2") 530 | 531 | if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then 532 | CURL_PARAMETERS="--progress-bar" 533 | LINE_CR="\n" 534 | else 535 | CURL_PARAMETERS="-L -s" 536 | LINE_CR="" 537 | fi 538 | 539 | print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR" 540 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST -i --globoff -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$FILE_SRC" "$API_UPLOAD_URL" 541 | check_http_response 542 | 543 | #Check 544 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 545 | print "DONE\n" 546 | else 547 | print "FAILED\n" 548 | print "An error occurred requesting /upload\n" 549 | ERROR_STATUS=1 550 | fi 551 | } 552 | 553 | #Chunked file upload 554 | #$1 = Local source file 555 | #$2 = Remote destination file 556 | function db_chunked_upload_file 557 | { 558 | local FILE_SRC=$(normalize_path "$1") 559 | local FILE_DST=$(normalize_path "$2") 560 | 561 | print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\"" 562 | 563 | local FILE_SIZE=$(file_size "$FILE_SRC") 564 | local OFFSET=0 565 | local UPLOAD_ID="" 566 | local UPLOAD_ERROR=0 567 | local CHUNK_PARAMS="" 568 | 569 | #Starting a new upload session 570 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_START_URL" 2> /dev/null 571 | check_http_response 572 | 573 | SESSION_ID=$(sed -n 's/{"session_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 574 | 575 | #Uploading chunks... 576 | while ([[ $OFFSET != $FILE_SIZE ]]); do 577 | 578 | let OFFSET_MB=$OFFSET/1024/1024 579 | 580 | #Create the chunk 581 | dd if="$FILE_SRC" of="$CHUNK_FILE" bs=1048576 skip=$OFFSET_MB count=$CHUNK_SIZE 2> /dev/null 582 | local CHUNK_REAL_SIZE=$(file_size "$CHUNK_FILE") 583 | 584 | #Uploading the chunk... 585 | echo > "$RESPONSE_FILE" 586 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$CHUNK_FILE" "$API_CHUNKED_UPLOAD_APPEND_URL" 2> /dev/null 587 | #check_http_response not needed, because we have to retry the request in case of error 588 | 589 | let OFFSET=$OFFSET+$CHUNK_REAL_SIZE 590 | 591 | #Check 592 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 593 | print "." 594 | UPLOAD_ERROR=0 595 | else 596 | print "*" 597 | let UPLOAD_ERROR=$UPLOAD_ERROR+1 598 | 599 | #On error, the upload is retried for max 3 times 600 | if [[ $UPLOAD_ERROR -gt 2 ]]; then 601 | print " FAILED\n" 602 | print "An error occurred requesting /chunked_upload\n" 603 | ERROR_STATUS=1 604 | return 605 | fi 606 | fi 607 | 608 | done 609 | 610 | UPLOAD_ERROR=0 611 | 612 | #Commit the upload 613 | while (true); do 614 | 615 | echo > "$RESPONSE_FILE" 616 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"commit\": {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_FINISH_URL" 2> /dev/null 617 | #check_http_response not needed, because we have to retry the request in case of error 618 | 619 | #Check 620 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 621 | print "." 622 | UPLOAD_ERROR=0 623 | break 624 | else 625 | print "*" 626 | let UPLOAD_ERROR=$UPLOAD_ERROR+1 627 | 628 | #On error, the commit is retried for max 3 times 629 | if [[ $UPLOAD_ERROR -gt 2 ]]; then 630 | print " FAILED\n" 631 | print "An error occurred requesting /commit_chunked_upload\n" 632 | ERROR_STATUS=1 633 | return 634 | fi 635 | fi 636 | 637 | done 638 | 639 | print " DONE\n" 640 | } 641 | 642 | #Directory upload 643 | #$1 = Local source dir 644 | #$2 = Remote destination dir 645 | function db_upload_dir 646 | { 647 | local DIR_SRC=$(normalize_path "$1") 648 | local DIR_DST=$(normalize_path "$2") 649 | 650 | #Creatig remote directory 651 | db_mkdir "$DIR_DST" 652 | 653 | for file in "$DIR_SRC/"*; do 654 | db_upload "$file" "$DIR_DST" 655 | done 656 | } 657 | 658 | #Generic download wrapper 659 | #$1 = Remote source file/dir 660 | #$2 = Local destination file/dir 661 | function db_download 662 | { 663 | local SRC=$(normalize_path "$1") 664 | local DST=$(normalize_path "$2") 665 | 666 | TYPE=$(db_stat "$SRC") 667 | 668 | #It's a directory 669 | if [[ $TYPE == "DIR" ]]; then 670 | 671 | #If the DST folder is not specified, I assume that is the current directory 672 | if [[ $DST == "" ]]; then 673 | DST="." 674 | fi 675 | 676 | #Checking if the destination directory exists 677 | if [[ ! -d $DST ]]; then 678 | local basedir="" 679 | else 680 | local basedir=$(basename "$SRC") 681 | fi 682 | 683 | local DEST_DIR=$(normalize_path "$DST/$basedir") 684 | print " > Downloading folder \"$SRC\" to \"$DEST_DIR\"... \n" 685 | 686 | if [[ ! -d "$DEST_DIR" ]]; then 687 | print " > Creating local directory \"$DEST_DIR\"... " 688 | mkdir -p "$DEST_DIR" 689 | 690 | #Check 691 | if [[ $? == 0 ]]; then 692 | print "DONE\n" 693 | else 694 | print "FAILED\n" 695 | ERROR_STATUS=1 696 | return 697 | fi 698 | fi 699 | 700 | #Getting folder content 701 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$SRC\", \"recursive\": false, \"include_deleted\": false}" "$API_FOLDER_LIST_URL" 2> /dev/null 702 | check_http_response 703 | 704 | #Extracting directory content [...] 705 | #and replacing "}, {" with "}\n{" 706 | #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... 707 | local DIR_CONTENT=$(sed -n 's/.*: \[{\(.*\)/\1/p' "$RESPONSE_FILE" | sed 's/}, *{/}\ 708 | {/g') 709 | 710 | #Extracting files and subfolders 711 | TMP_DIR_CONTENT_FILE="${RESPONSE_FILE}_$RANDOM" 712 | echo "$DIR_CONTENT" | sed -n 's/".tag": *"\([^"]*\).*"path_display": *"\([^"]*\).*/\2:\1/p' > $TMP_DIR_CONTENT_FILE 713 | 714 | #For each entry... 715 | while read -r line; do 716 | 717 | local FILE=${line%:*} 718 | local TYPE=${line##*:} 719 | 720 | FILE=${FILE##*/} 721 | 722 | if [[ $TYPE == "file" ]]; then 723 | db_download_file "$SRC/$FILE" "$DEST_DIR/$FILE" 724 | elif [[ $TYPE == "folder" ]]; then 725 | db_download "$SRC/$FILE" "$DEST_DIR" 726 | fi 727 | 728 | done < $TMP_DIR_CONTENT_FILE 729 | 730 | rm -fr $TMP_DIR_CONTENT_FILE 731 | 732 | #It's a file 733 | elif [[ $TYPE == "FILE" ]]; then 734 | 735 | #Checking DST 736 | if [[ $DST == "" ]]; then 737 | DST=$(basename "$SRC") 738 | fi 739 | 740 | #If the destination is a directory, the file will be download into 741 | if [[ -d $DST ]]; then 742 | DST="$DST/$SRC" 743 | fi 744 | 745 | db_download_file "$SRC" "$DST" 746 | 747 | #Doesn't exists 748 | else 749 | print " > No such file or directory: $SRC\n" 750 | ERROR_STATUS=1 751 | return 752 | fi 753 | } 754 | 755 | #Simple file download 756 | #$1 = Remote source file 757 | #$2 = Local destination file 758 | function db_download_file 759 | { 760 | local FILE_SRC=$(normalize_path "$1") 761 | local FILE_DST=$(normalize_path "$2") 762 | 763 | if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then 764 | CURL_PARAMETERS="-L --progress-bar" 765 | LINE_CR="\n" 766 | else 767 | CURL_PARAMETERS="-L -s" 768 | LINE_CR="" 769 | fi 770 | 771 | #Checking if the file already exists 772 | if [[ -e $FILE_DST && $SKIP_EXISTING_FILES == 1 ]]; then 773 | print " > Skipping already existing file \"$FILE_DST\"\n" 774 | return 775 | fi 776 | 777 | #Creating the empty file, that for two reasons: 778 | #1) In this way I can check if the destination file is writable or not 779 | #2) Curl doesn't automatically creates files with 0 bytes size 780 | dd if=/dev/zero of="$FILE_DST" count=0 2> /dev/null 781 | if [[ $? != 0 ]]; then 782 | print " > Error writing file $FILE_DST: permission denied\n" 783 | ERROR_STATUS=1 784 | return 785 | fi 786 | 787 | print " > Downloading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR" 788 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST --globoff -D "$RESPONSE_FILE" -o "$FILE_DST" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_SRC\"}" "$API_DOWNLOAD_URL" 789 | check_http_response 790 | 791 | #Check 792 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 793 | print "DONE\n" 794 | else 795 | print "FAILED\n" 796 | rm -fr "$FILE_DST" 797 | ERROR_STATUS=1 798 | return 799 | fi 800 | } 801 | 802 | #Saveurl 803 | #$1 = URL 804 | #$2 = Remote file destination 805 | function db_saveurl 806 | { 807 | local URL="$1" 808 | local FILE_DST=$(normalize_path "$2") 809 | local FILE_NAME=$(basename "$URL") 810 | 811 | print " > Downloading \"$URL\" to \"$FILE_DST\"..." 812 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST/$FILE_NAME\", \"url\": \"$URL\"}" "$API_SAVEURL_URL" 2> /dev/null 813 | check_http_response 814 | 815 | JOB_ID=$(sed -n 's/.*"async_job_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 816 | if [[ $JOB_ID == "" ]]; then 817 | print " > Error getting the job id\n" 818 | return 819 | fi 820 | 821 | #Checking the status 822 | while (true); do 823 | 824 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"async_job_id\": \"$JOB_ID\"}" "$API_SAVEURL_JOBSTATUS_URL" 2> /dev/null 825 | check_http_response 826 | 827 | STATUS=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 828 | case $STATUS in 829 | 830 | in_progress) 831 | print "+" 832 | ;; 833 | 834 | complete) 835 | print " DONE\n" 836 | break 837 | ;; 838 | 839 | failed) 840 | print " ERROR\n" 841 | MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 842 | print " > Error: $MESSAGE\n" 843 | break 844 | ;; 845 | 846 | esac 847 | 848 | sleep 2 849 | 850 | done 851 | } 852 | 853 | #Prints account info 854 | function db_account_info 855 | { 856 | print "Dropbox Uploader v$VERSION\n\n" 857 | print " > Getting info... " 858 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_INFO_URL" 2> /dev/null 859 | check_http_response 860 | 861 | #Check 862 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 863 | 864 | name=$(sed -n 's/.*"display_name": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 865 | echo -e "\n\nName:\t\t$name" 866 | 867 | uid=$(sed -n 's/.*"account_id": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 868 | echo -e "UID:\t\t$uid" 869 | 870 | email=$(sed -n 's/.*"email": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 871 | echo -e "Email:\t\t$email" 872 | 873 | country=$(sed -n 's/.*"country": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 874 | echo -e "Country:\t$country" 875 | 876 | echo "" 877 | 878 | else 879 | print "FAILED\n" 880 | ERROR_STATUS=1 881 | fi 882 | } 883 | 884 | #Prints account space usage info 885 | function db_account_space 886 | { 887 | print "Dropbox Uploader v$VERSION\n\n" 888 | print " > Getting space usage info... " 889 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_SPACE_URL" 2> /dev/null 890 | check_http_response 891 | 892 | #Check 893 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 894 | 895 | quota=$(sed -n 's/.*"allocated": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") 896 | let quota_mb=$quota/1024/1024 897 | echo -e "\n\nQuota:\t$quota_mb Mb" 898 | 899 | used=$(sed -n 's/.*"used": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") 900 | let used_mb=$used/1024/1024 901 | echo -e "Used:\t$used_mb Mb" 902 | 903 | let free_mb=($quota-$used)/1024/1024 904 | echo -e "Free:\t$free_mb Mb" 905 | 906 | echo "" 907 | 908 | else 909 | print "FAILED\n" 910 | ERROR_STATUS=1 911 | fi 912 | } 913 | 914 | #Account unlink 915 | function db_unlink 916 | { 917 | echo -ne "Are you sure you want unlink this script from your Dropbox account? [y/n]" 918 | read answer 919 | if [[ $answer == "y" ]]; then 920 | rm -fr "$CONFIG_FILE" 921 | echo -ne "DONE\n" 922 | fi 923 | } 924 | 925 | #Delete a remote file 926 | #$1 = Remote file to delete 927 | function db_delete 928 | { 929 | local FILE_DST=$(normalize_path "$1") 930 | 931 | print " > Deleting \"$FILE_DST\"... " 932 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\"}" "$API_DELETE_URL" 2> /dev/null 933 | check_http_response 934 | 935 | #Check 936 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 937 | print "DONE\n" 938 | else 939 | print "FAILED\n" 940 | ERROR_STATUS=1 941 | fi 942 | } 943 | 944 | #Move/Rename a remote file 945 | #$1 = Remote file to rename or move 946 | #$2 = New file name or location 947 | function db_move 948 | { 949 | local FILE_SRC=$(normalize_path "$1") 950 | local FILE_DST=$(normalize_path "$2") 951 | 952 | TYPE=$(db_stat "$FILE_DST") 953 | 954 | #If the destination it's a directory, the source will be moved into it 955 | if [[ $TYPE == "DIR" ]]; then 956 | local filename=$(basename "$FILE_SRC") 957 | FILE_DST=$(normalize_path "$FILE_DST/$filename") 958 | fi 959 | 960 | print " > Moving \"$FILE_SRC\" to \"$FILE_DST\" ... " 961 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_MOVE_URL" 2> /dev/null 962 | check_http_response 963 | 964 | #Check 965 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 966 | print "DONE\n" 967 | else 968 | print "FAILED\n" 969 | ERROR_STATUS=1 970 | fi 971 | } 972 | 973 | #Copy a remote file to a remote location 974 | #$1 = Remote file to rename or move 975 | #$2 = New file name or location 976 | function db_copy 977 | { 978 | local FILE_SRC=$(normalize_path "$1") 979 | local FILE_DST=$(normalize_path "$2") 980 | 981 | TYPE=$(db_stat "$FILE_DST") 982 | 983 | #If the destination it's a directory, the source will be copied into it 984 | if [[ $TYPE == "DIR" ]]; then 985 | local filename=$(basename "$FILE_SRC") 986 | FILE_DST=$(normalize_path "$FILE_DST/$filename") 987 | fi 988 | 989 | print " > Copying \"$FILE_SRC\" to \"$FILE_DST\" ... " 990 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_COPY_URL" 2> /dev/null 991 | check_http_response 992 | 993 | #Check 994 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 995 | print "DONE\n" 996 | else 997 | print "FAILED\n" 998 | ERROR_STATUS=1 999 | fi 1000 | } 1001 | 1002 | #Create a new directory 1003 | #$1 = Remote directory to create 1004 | function db_mkdir 1005 | { 1006 | local DIR_DST=$(normalize_path "$1") 1007 | 1008 | print " > Creating Directory \"$DIR_DST\"... " 1009 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\"}" "$API_MKDIR_URL" 2> /dev/null 1010 | check_http_response 1011 | 1012 | #Check 1013 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 1014 | print "DONE\n" 1015 | elif grep -q "^HTTP/1.1 403 Forbidden" "$RESPONSE_FILE"; then 1016 | print "ALREADY EXISTS\n" 1017 | else 1018 | print "FAILED\n" 1019 | ERROR_STATUS=1 1020 | fi 1021 | } 1022 | 1023 | #List remote directory 1024 | #$1 = Remote directory 1025 | function db_list 1026 | { 1027 | local DIR_DST=$(normalize_path "$1") 1028 | 1029 | print " > Listing \"$DIR_DST\"... " 1030 | 1031 | if [[ "$DIR_DST" == "/" ]]; then 1032 | DIR_DST="" 1033 | fi 1034 | 1035 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER" 2> /dev/null 1036 | check_http_response 1037 | 1038 | #Check 1039 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 1040 | 1041 | print "DONE\n" 1042 | 1043 | #Extracting directory content [...] 1044 | #and replacing "}, {" with "}\n{" 1045 | #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... 1046 | local DIR_CONTENT=$(sed -n 's/.*: \[{\(.*\)/\1/p' "$RESPONSE_FILE" | sed 's/}, *{/}\ 1047 | {/g') 1048 | 1049 | #Converting escaped quotes to unicode format 1050 | echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE" 1051 | 1052 | #Extracting files and subfolders 1053 | rm -fr "$RESPONSE_FILE" 1054 | while read -r line; do 1055 | 1056 | local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') 1057 | local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') 1058 | local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')) 1059 | 1060 | echo -e "$FILE:$TYPE;$SIZE" >> "$RESPONSE_FILE" 1061 | 1062 | done < "$TEMP_FILE" 1063 | 1064 | #Looking for the biggest file size 1065 | #to calculate the padding to use 1066 | local padding=0 1067 | while read -r line; do 1068 | local FILE=${line%:*} 1069 | local META=${line##*:} 1070 | local SIZE=${META#*;} 1071 | 1072 | if [[ ${#SIZE} -gt $padding ]]; then 1073 | padding=${#SIZE} 1074 | fi 1075 | done < "$RESPONSE_FILE" 1076 | 1077 | #For each entry, printing directories... 1078 | while read -r line; do 1079 | 1080 | local FILE=${line%:*} 1081 | local META=${line##*:} 1082 | local TYPE=${META%;*} 1083 | local SIZE=${META#*;} 1084 | 1085 | #Removing unneeded / 1086 | FILE=${FILE##*/} 1087 | 1088 | if [[ $TYPE == "folder" ]]; then 1089 | FILE=$(echo -e "$FILE") 1090 | $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE" 1091 | fi 1092 | 1093 | done < "$RESPONSE_FILE" 1094 | 1095 | #For each entry, printing files... 1096 | while read -r line; do 1097 | 1098 | local FILE=${line%:*} 1099 | local META=${line##*:} 1100 | local TYPE=${META%;*} 1101 | local SIZE=${META#*;} 1102 | 1103 | #Removing unneeded / 1104 | FILE=${FILE##*/} 1105 | 1106 | if [[ $TYPE == "file" ]]; then 1107 | FILE=$(echo -e "$FILE") 1108 | $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE" 1109 | fi 1110 | 1111 | done < "$RESPONSE_FILE" 1112 | 1113 | 1114 | else 1115 | print "FAILED\n" 1116 | ERROR_STATUS=1 1117 | fi 1118 | } 1119 | 1120 | #Share remote file 1121 | #$1 = Remote file 1122 | function db_share 1123 | { 1124 | local FILE_DST=$(normalize_path "$1") 1125 | 1126 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\",\"settings\": {\"requested_visibility\": \"public\"}}" "$API_SHARE_URL" 2> /dev/null 1127 | check_http_response 1128 | 1129 | #Check 1130 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 1131 | print " > Share link: " 1132 | SHARE_LINK=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 1133 | echo "$SHARE_LINK" 1134 | else 1135 | print "FAILED\n" 1136 | MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") 1137 | print " > Error: $MESSAGE\n" 1138 | ERROR_STATUS=1 1139 | fi 1140 | } 1141 | 1142 | #Search on Dropbox 1143 | #$1 = query 1144 | function db_search 1145 | { 1146 | local QUERY="$1" 1147 | 1148 | print " > Searching for \"$QUERY\"... " 1149 | 1150 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"\",\"query\": \"$QUERY\",\"start\": 0,\"max_results\": 1000,\"mode\": \"filename\"}" "$API_SEARCH_URL" 2> /dev/null 1151 | check_http_response 1152 | 1153 | #Check 1154 | if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then 1155 | print "DONE\n" 1156 | else 1157 | print "FAILED\n" 1158 | ERROR_STATUS=1 1159 | fi 1160 | 1161 | #Extracting directory content [...] 1162 | #and replacing "}, {" with "}\n{" 1163 | #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... 1164 | local DIR_CONTENT=$(sed 's/}, *{/}\ 1165 | {/g' "$RESPONSE_FILE") 1166 | 1167 | #Converting escaped quotes to unicode format 1168 | echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE" 1169 | 1170 | #Extracting files and subfolders 1171 | rm -fr "$RESPONSE_FILE" 1172 | while read -r line; do 1173 | 1174 | local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') 1175 | local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') 1176 | local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')) 1177 | 1178 | echo -e "$FILE:$TYPE;$SIZE" >> "$RESPONSE_FILE" 1179 | 1180 | done < "$TEMP_FILE" 1181 | 1182 | #Looking for the biggest file size 1183 | #to calculate the padding to use 1184 | local padding=0 1185 | while read -r line; do 1186 | local FILE=${line%:*} 1187 | local META=${line##*:} 1188 | local SIZE=${META#*;} 1189 | 1190 | if [[ ${#SIZE} -gt $padding ]]; then 1191 | padding=${#SIZE} 1192 | fi 1193 | done < "$RESPONSE_FILE" 1194 | 1195 | #For each entry, printing directories... 1196 | while read -r line; do 1197 | 1198 | local FILE=${line%:*} 1199 | local META=${line##*:} 1200 | local TYPE=${META%;*} 1201 | local SIZE=${META#*;} 1202 | 1203 | if [[ $TYPE == "folder" ]]; then 1204 | FILE=$(echo -e "$FILE") 1205 | $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE" 1206 | fi 1207 | 1208 | done < "$RESPONSE_FILE" 1209 | 1210 | #For each entry, printing files... 1211 | while read -r line; do 1212 | 1213 | local FILE=${line%:*} 1214 | local META=${line##*:} 1215 | local TYPE=${META%;*} 1216 | local SIZE=${META#*;} 1217 | 1218 | if [[ $TYPE == "file" ]]; then 1219 | FILE=$(echo -e "$FILE") 1220 | $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE" 1221 | fi 1222 | 1223 | done < "$RESPONSE_FILE" 1224 | 1225 | } 1226 | 1227 | ################ 1228 | #### SETUP #### 1229 | ################ 1230 | 1231 | #CHECKING FOR AUTH FILE 1232 | if [[ -e $CONFIG_FILE ]]; then 1233 | 1234 | #Loading data... and change old format config if necesary. 1235 | source "$CONFIG_FILE" 2>/dev/null || { 1236 | sed -i'' 's/:/=/' "$CONFIG_FILE" && source "$CONFIG_FILE" 2>/dev/null 1237 | } 1238 | 1239 | #Checking if it's still a v1 API configuration file 1240 | if [[ $APPKEY != "" || $APPSECRET != "" ]]; then 1241 | echo -ne "The config file contains the old v1 oauth tokens. A new oauth v2 token will be requested.\n" 1242 | echo -ne "Requesting new oauth2 token... " 1243 | $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" "$API_MIGRATE_V2/?oauth_consumer_key=$APPKEY&oauth_token=$OAUTH_ACCESS_TOKEN&oauth_signature_method=PLAINTEXT&oauth_signature=$APPSECRET%26$OAUTH_ACCESS_TOKEN_SECRET&oauth_timestamp=$(utime)&oauth_nonce=$RANDOM" 2> /dev/null 1244 | OAUTH_ACCESS_TOKEN=$(sed -n 's/.*access_token": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") 1245 | 1246 | if [[ $OAUTH_ACCESS_TOKEN == "" ]]; then 1247 | echo "Error getting access tocken, please try again!" 1248 | remove_temp_files 1249 | exit 1 1250 | fi 1251 | 1252 | echo "DONE" 1253 | echo "OAUTH_ACCESS_TOKEN=$OAUTH_ACCESS_TOKEN" > "$CONFIG_FILE" 1254 | fi 1255 | 1256 | #Checking loaded data 1257 | if [[ $OAUTH_ACCESS_TOKEN = "" ]]; then 1258 | echo -ne "Error loading data from $CONFIG_FILE...\n" 1259 | echo -ne "It is recommended to run $0 unlink\n" 1260 | remove_temp_files 1261 | exit 1 1262 | fi 1263 | 1264 | #NEW SETUP... 1265 | else 1266 | 1267 | echo -ne "\n This is the first time you run this script, please follow the instructions:\n\n" 1268 | echo -ne " 1) Open the following URL in your Browser, and log in using your account: $APP_CREATE_URL\n" 1269 | echo -ne " 2) Click on \"Create App\", then select \"Dropbox API app\"\n" 1270 | echo -ne " 3) Now go on with the configuration, choosing the app permissions and access restrictions to your DropBox folder\n" 1271 | echo -ne " 4) Enter the \"App Name\" that you prefer (e.g. MyUploader$RANDOM$RANDOM$RANDOM)\n\n" 1272 | 1273 | echo -ne " Now, click on the \"Create App\" button.\n\n" 1274 | 1275 | echo -ne " When your new App is successfully created, please click on the Generate button\n" 1276 | echo -ne " under the 'Generated access token' section, then copy and paste the new access token here:\n\n" 1277 | 1278 | echo -ne " # Access token: " 1279 | read OAUTH_ACCESS_TOKEN 1280 | 1281 | echo -ne "\n > The access token is $OAUTH_ACCESS_TOKEN. Looks ok? [y/N]: " 1282 | read answer 1283 | if [[ $answer != "y" ]]; then 1284 | remove_temp_files 1285 | exit 1 1286 | fi 1287 | 1288 | echo "OAUTH_ACCESS_TOKEN=$OAUTH_ACCESS_TOKEN" > "$CONFIG_FILE" 1289 | echo " The configuration has been saved." 1290 | 1291 | remove_temp_files 1292 | exit 0 1293 | fi 1294 | 1295 | ################ 1296 | #### START #### 1297 | ################ 1298 | 1299 | COMMAND=${@:$OPTIND:1} 1300 | ARG1=${@:$OPTIND+1:1} 1301 | ARG2=${@:$OPTIND+2:1} 1302 | 1303 | let argnum=$#-$OPTIND 1304 | 1305 | #CHECKING PARAMS VALUES 1306 | case $COMMAND in 1307 | 1308 | upload) 1309 | 1310 | if [[ $argnum -lt 2 ]]; then 1311 | usage 1312 | fi 1313 | 1314 | FILE_DST=${@:$#:1} 1315 | 1316 | for (( i=$OPTIND+1; i<$#; i++ )); do 1317 | FILE_SRC=${@:$i:1} 1318 | db_upload "$FILE_SRC" "/$FILE_DST" 1319 | done 1320 | 1321 | ;; 1322 | 1323 | download) 1324 | 1325 | if [[ $argnum -lt 1 ]]; then 1326 | usage 1327 | fi 1328 | 1329 | FILE_SRC=$ARG1 1330 | FILE_DST=$ARG2 1331 | 1332 | db_download "/$FILE_SRC" "$FILE_DST" 1333 | 1334 | ;; 1335 | 1336 | saveurl) 1337 | 1338 | if [[ $argnum -lt 1 ]]; then 1339 | usage 1340 | fi 1341 | 1342 | URL=$ARG1 1343 | FILE_DST=$ARG2 1344 | 1345 | db_saveurl "$URL" "/$FILE_DST" 1346 | 1347 | ;; 1348 | 1349 | share) 1350 | 1351 | if [[ $argnum -lt 1 ]]; then 1352 | usage 1353 | fi 1354 | 1355 | FILE_DST=$ARG1 1356 | 1357 | db_share "/$FILE_DST" 1358 | 1359 | ;; 1360 | 1361 | info) 1362 | 1363 | db_account_info 1364 | 1365 | ;; 1366 | 1367 | space) 1368 | 1369 | db_account_space 1370 | 1371 | ;; 1372 | 1373 | delete|remove) 1374 | 1375 | if [[ $argnum -lt 1 ]]; then 1376 | usage 1377 | fi 1378 | 1379 | FILE_DST=$ARG1 1380 | 1381 | db_delete "/$FILE_DST" 1382 | 1383 | ;; 1384 | 1385 | move|rename) 1386 | 1387 | if [[ $argnum -lt 2 ]]; then 1388 | usage 1389 | fi 1390 | 1391 | FILE_SRC=$ARG1 1392 | FILE_DST=$ARG2 1393 | 1394 | db_move "/$FILE_SRC" "/$FILE_DST" 1395 | 1396 | ;; 1397 | 1398 | copy) 1399 | 1400 | if [[ $argnum -lt 2 ]]; then 1401 | usage 1402 | fi 1403 | 1404 | FILE_SRC=$ARG1 1405 | FILE_DST=$ARG2 1406 | 1407 | db_copy "/$FILE_SRC" "/$FILE_DST" 1408 | 1409 | ;; 1410 | 1411 | mkdir) 1412 | 1413 | if [[ $argnum -lt 1 ]]; then 1414 | usage 1415 | fi 1416 | 1417 | DIR_DST=$ARG1 1418 | 1419 | db_mkdir "/$DIR_DST" 1420 | 1421 | ;; 1422 | 1423 | search) 1424 | 1425 | if [[ $argnum -lt 1 ]]; then 1426 | usage 1427 | fi 1428 | 1429 | QUERY=$ARG1 1430 | 1431 | db_search "$QUERY" 1432 | 1433 | ;; 1434 | 1435 | list) 1436 | 1437 | DIR_DST=$ARG1 1438 | 1439 | #Checking DIR_DST 1440 | if [[ $DIR_DST == "" ]]; then 1441 | DIR_DST="/" 1442 | fi 1443 | 1444 | db_list "/$DIR_DST" 1445 | 1446 | ;; 1447 | 1448 | unlink) 1449 | 1450 | db_unlink 1451 | 1452 | ;; 1453 | 1454 | *) 1455 | 1456 | if [[ $COMMAND != "" ]]; then 1457 | print "Error: Unknown command: $COMMAND\n\n" 1458 | ERROR_STATUS=1 1459 | fi 1460 | usage 1461 | 1462 | ;; 1463 | 1464 | esac 1465 | 1466 | remove_temp_files 1467 | exit $ERROR_STATUS 1468 | --------------------------------------------------------------------------------