├── .gitignore ├── Context.sublime-menu ├── Default (Linux).sublime-keymap ├── Default (OSX).sublime-keymap ├── Default (Windows).sublime-keymap ├── Default.sublime-commands ├── FTPSync.py ├── FTPSync.sublime-settings ├── LICENCE ├── Main.sublime-menu ├── README.md ├── Settings.tmLanguage ├── Side Bar.sublime-menu ├── __init__.py ├── ftpsync.default-settings ├── ftpsynccommon.py ├── ftpsyncexceptions.py ├── ftpsyncfiles.py ├── ftpsyncfilewatcher.py ├── ftpsyncprogress.py ├── ftpsyncpubsub.py ├── ftpsyncworker.py ├── ftpsyncwrapper.py ├── lib2 ├── __init__.py ├── ftplib.py ├── idna.py ├── simplejson │ ├── LICENSE.txt │ ├── __init__.py │ ├── compat.py │ ├── decoder.py │ ├── encoder.py │ ├── ordered_dict.py │ ├── scanner.py │ └── tool.py └── ssl.py └── lib3 ├── __init__.py ├── ftplib.py ├── idna.py ├── simplejson ├── LICENSE.txt ├── __init__.py ├── compat.py ├── decoder.py ├── encoder.py ├── ordered_dict.py ├── scanner.py └── tool.py └── ssl.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | package-metadata.json 3 | *.sublime-project 4 | *.sublime-workspace 5 | *.cache 6 | *.backup 7 | -------------------------------------------------------------------------------- /Context.sublime-menu: -------------------------------------------------------------------------------- 1 | [ 2 | { "caption": "-" }, 3 | { 4 | "caption": "FTPSync", 5 | "children": [ 6 | { 7 | "caption": "Check this file", 8 | "command": "ftp_sync_check_current" 9 | }, 10 | { 11 | "caption": "Upload this file", 12 | "command": "ftp_sync_current" 13 | }, 14 | { 15 | "caption": "Download this file", 16 | "command": "ftp_sync_down_current" 17 | }, 18 | { 19 | "caption": "Rename this file", 20 | "command": "ftp_sync_rename_current" 21 | }, 22 | { 23 | "caption": "-" 24 | }, 25 | { 26 | "caption": "Browse", 27 | "command": "ftp_sync_browse", 28 | "args": { "edit": null } 29 | }, 30 | { 31 | "caption": "Browse from last path", 32 | "command": "ftp_sync_browse_last", 33 | "args": { "edit": null } 34 | }, 35 | { 36 | "caption": "-" 37 | }, 38 | { 39 | "caption": "Enable upload on save", 40 | "command": "ftp_sync_enable_uos" 41 | }, 42 | { 43 | "caption": "Disable upload on save", 44 | "command": "ftp_sync_disable_uos" 45 | }, 46 | { 47 | "caption": "-" 48 | }, 49 | { 50 | "caption": "Open README (Github)", 51 | "command": "ftp_sync_url_readme" 52 | }, 53 | { 54 | "caption": "Report issue / suggestion (Github)", 55 | "command": "ftp_sync_url_report" 56 | }, 57 | { 58 | "caption": "Support FTPSync development", 59 | "command": "ftp_sync_url_donate" 60 | } 61 | 62 | ] 63 | }, 64 | { "caption": "-" } 65 | ] 66 | -------------------------------------------------------------------------------- /Default (Linux).sublime-keymap: -------------------------------------------------------------------------------- 1 | [ 2 | { "keys": [""], "command": "ftp_sync_current" }, 3 | { "keys": [""], "command": "ftp_sync_down_current" }, 4 | { "keys": [""], "command": "ftp_sync_check_current" }, 5 | { "keys": [""], "command": "ftp_sync_browse" }, 6 | { "keys": [""], "command": "ftp_sync_browse_last" }, 7 | { "keys": [""], "command": "ftp_sync_browse_current" } 8 | ] -------------------------------------------------------------------------------- /Default (OSX).sublime-keymap: -------------------------------------------------------------------------------- 1 | [ 2 | { "keys": [""], "command": "ftp_sync_current" }, 3 | { "keys": [""], "command": "ftp_sync_down_current" }, 4 | { "keys": [""], "command": "ftp_sync_check_current" }, 5 | { "keys": [""], "command": "ftp_sync_browse" }, 6 | { "keys": [""], "command": "ftp_sync_browse_last" }, 7 | { "keys": [""], "command": "ftp_sync_browse_current" } 8 | ] -------------------------------------------------------------------------------- /Default (Windows).sublime-keymap: -------------------------------------------------------------------------------- 1 | [ 2 | { "keys": [""], "command": "ftp_sync_current" }, 3 | { "keys": [""], "command": "ftp_sync_down_current" }, 4 | { "keys": [""], "command": "ftp_sync_check_current" }, 5 | { "keys": [""], "command": "ftp_sync_browse" }, 6 | { "keys": [""], "command": "ftp_sync_browse_last" }, 7 | { "keys": [""], "command": "ftp_sync_browse_current" } 8 | ] -------------------------------------------------------------------------------- /Default.sublime-commands: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "caption": "FTPSync: Setup FTPSync in this folder", 4 | "command": "ftp_sync_new_settings", 5 | "args": {"edit": null, "dirs": []} 6 | }, 7 | { 8 | "caption": "FTPSync: Upload current file", 9 | "command": "ftp_sync_current" 10 | }, 11 | { 12 | "caption": "FTPSync: Download current file", 13 | "command": "ftp_sync_down_current" 14 | }, 15 | { 16 | "caption": "FTPSync: Check current file", 17 | "command": "ftp_sync_check_current" 18 | }, 19 | { 20 | "caption": "FTPSync: Rename current file", 21 | "command": "ftp_sync_rename_current" 22 | }, 23 | { 24 | "caption": "FTPSync: Browse", 25 | "command": "ftp_sync_browse", 26 | "args": { "edit": null } 27 | }, 28 | { 29 | "caption": "FTPSync: Browse from last path", 30 | "command": "ftp_sync_browse_last" 31 | }, 32 | { 33 | "caption": "FTPSync: Browse from current folder", 34 | "command": "ftp_sync_browse_current" 35 | }, 36 | { 37 | "caption": "FTPSync: Open README (Github)", 38 | "command": "ftp_sync_url_readme" 39 | }, 40 | { 41 | "caption": "FTPSync: Report issue / suggestion (Github)", 42 | "command": "ftp_sync_url_report" 43 | } 44 | ] 45 | -------------------------------------------------------------------------------- /FTPSync.sublime-settings: -------------------------------------------------------------------------------- 1 | { 2 | "debug": true, 3 | "debug_verbose": true, 4 | "connection_timeout": 100, 5 | "time_format": "%Y-%m-%d %H:%M", 6 | "download_on_open_delay": 5000, 7 | "keep_alive_interval": 5, 8 | "max_threads": 4, 9 | "debug_threads": false, 10 | "debug_json": false, 11 | "ftp_retry_limit": 4, 12 | "ftp_retry_delay": 2.0, 13 | 14 | "system_notifications": true, 15 | 16 | "browse_display_details": false, 17 | "browse_open_on_download": true, 18 | "browse_display_permission": false, 19 | "browse_timestamp_format": "%Y-%m-%d %H:%M", 20 | "browse_folder_prefix": "▸ ", 21 | "browse_folder_suffix": "", 22 | "browse_file_prefix": "\t", 23 | "browse_file_suffix": "", 24 | "browse_up": "\t..", 25 | "browse_action_prefix": "\t• ", 26 | 27 | "project_defaults_name": "default", 28 | "project_defaults": { 29 | "username": null, 30 | "password": "", 31 | "private_key": null, 32 | "private_key_pass": null, 33 | "path": "/", 34 | 35 | "encoding": "auto", 36 | "tls": false, 37 | "passive": true, 38 | "use_tempfile": true, 39 | "upload_on_save": true, 40 | "port": 21, 41 | "timeout": 30, 42 | "ignore": null, 43 | "overwrite_newer_prevention": true, 44 | "download_on_open": false, 45 | "upload_delay": 0, 46 | "default_folder_permissions": "755", 47 | "default_local_permissions": "auto", 48 | "default_upload_permissions": null, 49 | "always_sync_local_permissions": true, 50 | "time_offset": 0, 51 | "set_remote_lastmodified": true, 52 | 53 | "after_save_watch": null, 54 | 55 | "debug_extras": { 56 | "print_list_result": false, 57 | "dump_config_load": false, 58 | "print_ensure_folders": false, 59 | "debug_remote_paths": false, 60 | "after_save_watch": false, 61 | "debug_get_local_path": false, 62 | "debug_mfmt": false, 63 | "debug_put_error": false 64 | } 65 | }, 66 | 67 | "ignore": "ftpsync\\.settings|\\.gitignore|\\.git|Sublime Text 2/Packages", 68 | "ascii_extensions": [ 69 | "txt","ini","xml","nfo","cgi","cfg","conf","bat","sh","inc","sfv","srt", 70 | "htaccess", 71 | "md5","sha","gitignore","pub","ppk", 72 | "html","htm","xhtml","dhtml","phtml","mht", 73 | "tpl","twig","latte", 74 | "css","scss","sass","less","styl", 75 | "js","json","coffee","dart","ts", 76 | "php","php3","php4","php5", 77 | "asp","aspx", 78 | "py","py3", 79 | "c","h","cc","ccp","hpp", 80 | "rb","java","lua","cs","scala","hx","sql", 81 | "settings","sublime-menu","sublime-commands","sublime-project","sublime-settings","sublime-workspace","sublime-keymap" 82 | ], 83 | "binary_extensions": [ 84 | "raw","exe","mpi", 85 | "pdf","psd","ai","doc","xls","docx","xlsx", 86 | "7z","zip","rar","tar","gz","gzip","ace", 87 | "bmp","jpg","jpeg","ico","png","gif","tiff","pic","tga", 88 | "wmv","avi","mkv","mp4","mpg","mpeg", 89 | "wav","mp3","ac3","flac","3gp" 90 | ] 91 | } -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012 Jiří "NoxArt" Petruželka 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /Main.sublime-menu: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "caption": "Preferences", 4 | "mnemonic": "n", 5 | "id": "preferences", 6 | "children": 7 | [ 8 | { 9 | "caption": "Package Settings", 10 | "mnemonic": "P", 11 | "id": "package-settings", 12 | "children": 13 | [ 14 | { 15 | "caption": "FTPSync", 16 | "children": 17 | [ 18 | { 19 | "command": "open_file", 20 | "args": {"file": "${packages}/FTPSync/FTPSync.sublime-settings"}, 21 | "caption": "Settings – Default" 22 | }, 23 | { 24 | "command": "open_file", 25 | "args": {"file": "${packages}/User/FTPSync.sublime-settings"}, 26 | "caption": "Settings – User" 27 | }, 28 | { 29 | "command": "ftp_sync_new_settings", 30 | "caption": "Setup FTPSync in this folder", 31 | "args": {"dirs": []} 32 | }, 33 | { "caption": "-" }, 34 | { 35 | "command": "open_file", 36 | "args": { 37 | "file": "${packages}/FTPSync/Default (Windows).sublime-keymap", 38 | "platform": "Windows" 39 | }, 40 | "caption": "Key Bindings – Default" 41 | }, 42 | { 43 | "command": "open_file", 44 | "args": { 45 | "file": "${packages}/FTPSync/Default (OSX).sublime-keymap", 46 | "platform": "OSX" 47 | }, 48 | "caption": "Key Bindings – Default" 49 | }, 50 | { 51 | "command": "open_file", 52 | "args": { 53 | "file": "${packages}/FTPSync/Default (Linux).sublime-keymap", 54 | "platform": "Linux" 55 | }, 56 | "caption": "Key Bindings – Default" 57 | }, 58 | { 59 | "command": "open_file", 60 | "args": { 61 | "file": "${packages}/User/Default (Windows).sublime-keymap", 62 | "platform": "Windows" 63 | }, 64 | "caption": "Key Bindings – User" 65 | }, 66 | { 67 | "command": "open_file", 68 | "args": { 69 | "file": "${packages}/User/Default (OSX).sublime-keymap", 70 | "platform": "OSX" 71 | }, 72 | "caption": "Key Bindings – User" 73 | }, 74 | { 75 | "command": "open_file", 76 | "args": { 77 | "file": "${packages}/User/Default (Linux).sublime-keymap", 78 | "platform": "Linux" 79 | }, 80 | "caption": "Key Bindings – User" 81 | }, 82 | { 83 | "caption": "-" 84 | }, 85 | { 86 | "caption": "Open README (Github)", 87 | "command": "ftp_sync_url_readme" 88 | }, 89 | { 90 | "caption": "Report issue / suggestion (Github)", 91 | "command": "ftp_sync_url_report" 92 | } 93 | ] 94 | } 95 | ] 96 | } 97 | ] 98 | } 99 | ] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | FTPSync 2 | ==================== 3 | *Addon for Sublime Text 2 and Sublime Text 3* 4 | 5 | 6 | # No longer maintained 7 | *Unofortunately no longer have time to continue with fixes and improvements :( I hope it will still be a bit useful. The plugin will continue to be free to use. If someone wishes to continue with development feel free to remove this message* 8 | 9 | 10 | Simple and free plugin for FTP synchronization. Just hit the _save_ as usual and it's upped. 11 | 12 | What's there for you? 13 | * Multiple named upload targets 14 | * Ignored file regex patterns 15 | * Secure transfer using TLS 16 | * Downloading via temporary file (better stability) 17 | * Determining newer remote files, overwrite protection 18 | * Manual multiple file & folder up/downloading (sidebar context menu) 19 | * Multithreaded uploading and downloading 20 | * Local&remote renaming and deleting 21 | * Progress bar for multiple up/download 22 | * Remote browsing and manipulating via file list 23 | 24 | For more info look into [Wiki](https://github.com/NoxArt/SublimeText2-FTPSync/wiki/_pages) 25 | 26 | 27 | How to use 28 | ---------- 29 | 30 | To mark a folder and descendants for upload insert **ftpsync.settings** file in following format. Don't worry - the skeleton can be simply inserted using *Preferences > Package Settings > FTPSync > Setup FTPSync in this folder* or using context menu in Side bar or using Control/CMD+Shift+P. 31 | 32 | Sample settings file with minimum of options: 33 | ( *does not contain all options* ) 34 | 35 | { 36 | 'primary': { 37 | host: 'ftp.mywebsite.com', 38 | username: 'johnsmith', 39 | password: 'secretpassword', 40 | path: '/www/', 41 | 42 | upload_on_save: true, 43 | tls: true 44 | } 45 | } 46 | 47 | Set password to `null` (don't use quotes) if you do not want to store password in a file and set in manually (FTPSync will request the password in such case). 48 | 49 | [All connection settings »](https://github.com/NoxArt/SublimeText2-FTPSync/wiki/All-settings) 50 | 51 | Files are automatically uploaded **on save** (unless disabled by _upload\_on\_save_=false setting). In your newly created settings file some options are preceded with `//`, this means they are commented out (and default value from global settings file is used) - remove the `//` to enable the entry. 52 | 53 | 54 | Drawbacks and notes 55 | --------------------- 56 | 57 | * SFTP is not supported at the moment and is not planned in near future (you can use [SFTP](http://wbond.net/sublime_packages/sftp) or [Mote](https://github.com/SublimeText/Mote) plugins) 58 | * SSL/TLS is not supported for servers that enforce SSL_REUSE (encryption support will hopefully increase in future) 59 | * Does not support continuous watching and syncing, only (after) manual action 60 | * Does not support proxy connections 61 | * Does not support remote diff at the moment 62 | * Does not support special characters in remote path at the moment 63 | 64 | 65 | About 66 | ----- 67 | 68 | Done by **Jiří @NoxArt Petruželka** ~ [Twitter](https://twitter.com/NoxArt) 69 | 70 | Released under **MIT licence**. 71 | 72 | Thank you for the financial support! 73 | 74 | Feel free to add [issues, ideas](https://github.com/NoxArt/SublimeText2-FTPSync/issues), pull requests... 75 | 76 | Thanks to [thecotne](https://github.com/thecotne), [castus](https://github.com/castus), [tommymarshall](https://github.com/tommymarshall), [TotallyInformation](https://github.com/TotallyInformation), [saiori](https://github.com/saiori), [vnabet](https://github.com/vnabet), [Jcrs](https://github.com/Jcrs), [ItayXD](https://github.com/ItayXD), [bibimij](https://github.com/bibimij), [digitalmaster](https://github.com/digitalmaster), [alfaex](https://github.com/alfaex), [seyDoggy](https://github.com/seyDoggy), Nuno, [mikedoug](https://github.com/mikedoug), [stevether](https://github.com/stevether), [zaus](https://github.com/zaus), [noAlvaro](https://github.com/noAlvaro), [zofie86](https://github.com/zofie86), [fma965](https://github.com/fma965), [PixelVibe](https://github.com/PixelVibe), [Kaisercraft](https://github.com/Kaisercraft), [benkaiser](https://github.com/benkaiser), [anupdebnath](https://github.com/anupdebnath), [sy4mil](https://github.com/sy4mil), [leek](https://github.com/leek), [surfac](https://github.com/surfac), [mitsurugi](https://github.com/mitsurugi), [MonoSnippets](https://github.com/MonoSnippets), [Zegnat](https://github.com/Zegnat), [cwhittl](https://github.com/cwhittl), [shadowsdweller](https://github.com/shadowsdweller), [adiulici01](https://github.com/adiulici01), [tablatronix](https://github.com/tablatronix), [bllim](https://github.com/bllim), [Imaulle](https://github.com/Imaulle), [friskfly](https://github.com/friskfly), [lysenkobv](https://github.com/lysenkobv), [nosfan1019](https://github.com/nosfan1019), [smoochieboochies](https://github.com/smoochieboochies), [Dmitry Loktev](https://github.com/unknownexception), [fedesilvaponte](https://github.com/fedesilvaponte), [fedegonzaleznavarro](https://github.com/fedegonzaleznavarro), [camilstaps](https://github.com/camilstaps), [maknapp](https://github.com/maknapp), [certainlyakey](https://github.com/certainlyakey), [victorhqc](https://github.com/victorhqc), [eniocarv](https://github.com/eniocarv), [molokoloco](https://github.com/molokoloco), [tq0fqeu](https://github.com/tq0fqeu), [Arachnoid](https://github.com/Arachnoid), [ahgood](https://github.com/ahgood), [SourceR85](https://github.com/SourceR85), [nirajaryal](https://github.com/nirajaryal), [Stock-webdesign](https://github.com/Stock-Webdesign), [chachan](https://github.com/chachan), [webattitude](https://github.com/webattitude), [VarinderS](https://github.com/VarinderS), [Gadoma](https://github.com/Gadoma), [pnukeid](https://github.com/pnukeid), [patatjenl](https://github.com/patatjenl), [ridethepinguin](https://github.com/ridethepenguin), [Kovas](https://github.com/Kovas), [giolvani](https://github.com/giolvani), [superDuperCyberTechno](https://github.com/superDuperCyberTechno), [druellan](https://github.com/druellan), [StuartMorris0](https://github.com/StuartMorris0), [adisos](https://github.com/adisos), [marcoflorian](https://github.com/marcoflorian), [MartinBucko](https://github.com/MartinBucko) 77 | for reporting issues, ideas and fixing! 78 | 79 | 80 | Tips 81 | ---- 82 | 83 | * **Set key bindings (hotkeys) for frequent actions you use** 84 | 85 | Please edit only `Key Bindings - User`, open using: 86 | `Preferences > Package Control > FTPSync > Key Bindings - User` 87 | You can use the contents of `Key Bindings - Default` as a template and copy it there. If you edit `Key Bindings - Default` (either Sublime's or FTPSync's), your changes will be lost on update. 88 | [More info](https://github.com/NoxArt/SublimeText2-FTPSync/wiki/Key-bindings) 89 | 90 | * **Renaming and deleting** 91 | 92 | Please keep in mind that for deleting and renaming on server you need to use `FTPSync > Rename` respectively `FTPSync > Delete` features, not those in Sublime Text 2 or SideBarEnhancements. 93 | 94 | * **Working from more places? Or in team?** 95 | 96 | You can either use *download_on_open=true* to check files upon openning or *FTPSync: Check current file* command to see whether you have the same version as is on all servers. Using *overwrite_newer_prevention* is also recommended (it's actually enabled by default). 97 | 98 | * **Upload different language versions to different servers of paths** e.g. 99 | 100 | { 101 | "": { 102 | "host": "ftp.host.en.com", 103 | "ignore": "/locale/(?!fr)\\w+/.*" 104 | }, 105 | "": { 106 | "host": "ftp.host.cz.com", 107 | "ignore": "/locale/(?!cz)\\w+/.*" 108 | } 109 | } 110 | 111 | * **Using file compilation? Want to upload as well?** 112 | 113 | You can use *after_save_watch* option to setup files to be watched for change after uploading on save. [Learn how to use in Wiki](https://github.com/NoxArt/SublimeText2-FTPSync/wiki/Why-and-how-to-use-afterwatch). 114 | -------------------------------------------------------------------------------- /Settings.tmLanguage: -------------------------------------------------------------------------------- 1 | 10 | 11 | 12 | 13 | fileTypes 14 | 15 | sublime-settings 16 | 17 | name 18 | Sublime Settings 19 | patterns 20 | 21 | 22 | match 23 | //.* 24 | name 25 | comment.single.line.sublime-settings 26 | 27 | 28 | begin 29 | /\* 30 | end 31 | \*/ 32 | name 33 | comment.block.sublime-settings 34 | 35 | 36 | captures 37 | 38 | 1 39 | 40 | name 41 | keyword.other.name.sublime-settings 42 | 43 | 44 | match 45 | "([a-z0-9_.*-]+)"\s*?: 46 | 47 | 48 | include 49 | source.jsongenericarrayelements 50 | 51 | 52 | scopeName 53 | source.sublime-settings 54 | uuid 55 | dd6dce14-1f27-4128-9c85-7e30c137ae30 56 | 57 | 58 | -------------------------------------------------------------------------------- /Side Bar.sublime-menu: -------------------------------------------------------------------------------- 1 | [ 2 | { "caption": "-" }, 3 | { 4 | "caption": "FTPSync", 5 | "children": 6 | [ 7 | { "caption": "Upload", "command": "ftp_sync_target", "args": { "edit": null, "paths": []} }, 8 | { "caption": "Download", "command": "ftp_sync_down_target", "args": { "edit": null, "paths": [], "forced": true} }, 9 | { "caption": "Rename", "command": "ftp_sync_rename", "args": { "edit": null, "paths": []} }, 10 | { "caption": "-" }, 11 | { "caption": "Delete", "command": "ftp_sync_delete", "args": { "edit": null, "paths": []} }, 12 | { "caption": "-" }, 13 | { "caption": "Browse here", "command": "ftp_sync_browse_place", "args": { "edit": null, "paths": []} }, 14 | { "caption": "Show info", "command": "ftp_sync_show_info", "args": { "edit": null, "paths": []} }, 15 | { "caption": "-" }, 16 | { "caption": "Setup FTPSync in this folder", "command": "ftp_sync_new_settings", "args": { "edit": null, "dirs": []} }, 17 | { "caption": "Open README (Github)", "command": "ftp_sync_url_readme" }, 18 | { "caption": "Report issue / suggestion (Github)", "command": "ftp_sync_url_report" }, 19 | { "caption": "-" }, 20 | { "caption": "Cleanup temporary files", "command": "ftp_sync_cleanup", "args": { "edit": null, "paths": []} } 21 | ] 22 | } 23 | ] -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NoxArt/SublimeText2-FTPSync/5893073bf081a0c7d51dff26fac77f2163d8d71f/__init__.py -------------------------------------------------------------------------------- /ftpsync.default-settings: -------------------------------------------------------------------------------- 1 | { 2 | // HELP / INFO moved to bottom 3 | // Remove "//" to uncomment settings directive 4 | 5 | "default": { 6 | 7 | "host": "ftp.example.com", 8 | "username": "your_login", // or null for anonymous login 9 | "password": "your_password", 10 | "path": "/", 11 | 12 | "upload_on_save": true, // set *false* if you do not want to upload on save! 13 | 14 | // "port": 21, 15 | // "tls": false, 16 | // "timeout": 30, // [seconds] 17 | 18 | // "passive": true, 19 | // "download_on_open": false, 20 | // "overwrite_newer_prevention": true, 21 | // "default_folder_permissions": "755", 22 | // "default_upload_permissions": null, // null = no action taken 23 | // "time_offset": 0, // [seconds] 24 | // "always_sync_local_permissions": true, 25 | 26 | // Value "auto" = use UTF-8 if availible (FEAT: UTF8), otherwise use local 27 | // "encoding": "auto", 28 | 29 | // Trade small performance impact for more stable and secure transfer (old file is intact until the download is finished) 30 | // "use_tempfile": true, 31 | 32 | // Regular expression, recommending using \b in general and /.../ for folders to avoid matching substrings 33 | // "ignore": "", 34 | 35 | // Can be used for increase of performance or to allow build scripts to finish 36 | // "upload_delay": 0, // [seconds] 37 | 38 | // Only if the server has MFMT extension installed 39 | // "set_remote_lastmodified": true, 40 | 41 | // Chmod value for files newly downloaded by FTPSync 42 | // "auto" = same as on server 43 | // null = no action taken 44 | // "0644" = example for direct value 45 | // "default_local_permissions": "auto", 46 | 47 | // List of lists with pathnames and filenames to folders to be watched for change in between delay (upload_delay) 48 | // example: after_save_watch: [ [ "code/assets/css", "*.css" ], [ "code/assets/", "*.jpg, *.png, *.gif" ] ] 49 | // used only in conjunction with upload_on_save and upload_delay ** 50 | // For more info see https://github.com/NoxArt/SublimeText2-FTPSync/wiki/Why-and-how-to-use-afterwatch 51 | // "after_save_watch": [], 52 | 53 | } 54 | 55 | // ------ INFO ---------------------------------------------------------------------- 56 | 57 | // Index page 58 | // --- https://github.com/NoxArt/SublimeText2-FTPSync/ 59 | 60 | // For settings description see: 61 | // --- https://github.com/NoxArt/SublimeText2-FTPSync/wiki/All-settings 62 | 63 | // For more info see: 64 | // --- https://github.com/NoxArt/SublimeText2-FTPSync/wiki/_pages 65 | 66 | // Want to ask? Report a bug? 67 | // --- Hit: https://github.com/NoxArt/SublimeText2-FTPSync/issues/new 68 | 69 | 70 | // ** Commas ** 71 | 72 | // from now on extra commas are allowed and recommended 73 | 74 | // ** Comments ** 75 | 76 | // The "//" are so called "comments", all text after it is ignored, 77 | // they are used for notes or deactivating an entry 78 | 79 | // Non-basic settings are deactivated and default options specified in the global settings file, 80 | // that is $packages$/FTPSync/ftpsync.sublime-settings (where $packages$ is a path where Sublime 81 | // keeps packages in your Operating System) accessible via Preferences > Package Settings > FTPSync 82 | // Use Settings - User to override the global defaults 83 | 84 | // More info about Sublime Text 2 settings on http://www.sublimetext.com/docs/2/settings.html 85 | 86 | } 87 | -------------------------------------------------------------------------------- /ftpsynccommon.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | from __future__ import unicode_literals 31 | 32 | import codecs 33 | import inspect 34 | import sys 35 | 36 | class Runtime(object): 37 | 38 | @staticmethod 39 | def getCaller(up = 0): 40 | return inspect.stack()[2 + up][3] 41 | 42 | 43 | class Types(object): 44 | if sys.version < '3': 45 | text = unicode 46 | binary = str 47 | else: 48 | text = str 49 | binary = bytes 50 | 51 | @staticmethod 52 | def u(string): 53 | if sys.version < '3': 54 | return codecs.unicode_escape_decode(string)[0] 55 | else: 56 | return string 57 | -------------------------------------------------------------------------------- /ftpsyncexceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | # Doc comment syntax inspired by http://stackoverflow.com/a/487203/387503 31 | 32 | class FileNotFoundException(Exception): 33 | pass -------------------------------------------------------------------------------- /ftpsyncfiles.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | # ==== Libraries =========================================================================== 31 | 32 | # Python's built-in libraries 33 | import datetime 34 | import fnmatch 35 | import os 36 | import re 37 | import sublime 38 | import sys 39 | import tempfile 40 | 41 | # FTPSync libraries 42 | if sys.version < '3': 43 | from ftpsynccommon import Types 44 | else: 45 | from FTPSync.ftpsynccommon import Types 46 | 47 | 48 | # ==== Initialization and optimization ===================================================== 49 | 50 | # limit for breaking down a filepath structure when looking for config files 51 | nestingLimit = 30 52 | 53 | # permission triples 54 | triples = { 55 | '---': 0, 56 | '--x': 1, 57 | '--s': 1, 58 | '--t': 1, 59 | '-w-': 2, 60 | '-wx': 3, 61 | '-ws': 3, 62 | '-wt': 3, 63 | 'r--': 4, 64 | 'r-x': 5, 65 | 'r-s': 5, 66 | 'r-t': 5, 67 | 'rw-': 6, 68 | 'rwx': 7, 69 | 'rws': 7, 70 | 'rwt': 7, 71 | } 72 | 73 | 74 | 75 | # ==== Content ============================================================================= 76 | 77 | # Returns whether the variable is some form os string 78 | def isString(var): 79 | var_type = type(var) 80 | 81 | if sys.version[0] == '3': 82 | return var_type is str or var_type is bytes 83 | else: 84 | return var_type is str or var_type is unicode 85 | 86 | # A file representation with helper methods 87 | class Metafile: 88 | 89 | def __init__(self, name, isDir, lastModified, filesize, path=None, permissions=None): 90 | self.name = name 91 | self.isDir = bool(isDir) 92 | self.lastModified = lastModified 93 | if self.lastModified is not None: 94 | self.lastModified = float(self.lastModified) 95 | 96 | self.filesize = filesize 97 | if self.filesize is not None: 98 | self.filesize = float(self.filesize) 99 | 100 | self.path = path 101 | self.permissions = permissions 102 | 103 | def getName(self): 104 | return self.name 105 | 106 | def getPath(self): 107 | return self.path 108 | 109 | def getFilepath(self): 110 | return self.path + "/" + self.name 111 | 112 | def getPermissions(self): 113 | return self.permissions 114 | 115 | def getPermissionsNumeric(self): 116 | symbolic = self.permissions 117 | 118 | numeric = "0" 119 | numeric += str(triples[symbolic[0:3]]) 120 | numeric += str(triples[symbolic[3:6]]) 121 | numeric += str(triples[symbolic[6:9]]) 122 | 123 | return numeric 124 | 125 | def isDirectory(self): 126 | return self.isDir 127 | 128 | def getLastModified(self): 129 | return self.lastModified 130 | 131 | def getLastModifiedFormatted(self, format='%Y-%m-%d %H:%M'): 132 | return formatTimestamp(self.lastModified, format) 133 | 134 | def getFilesize(self): 135 | return self.filesize 136 | 137 | def getHumanFilesize(self): 138 | if self.filesize < 1024: 139 | return str(self.filesize) + " B" 140 | 141 | if self.filesize < 1024 * 1024: 142 | return str(round(self.filesize / 1024, 2)) + " kB" 143 | 144 | if self.filesize < 1024 * 1024 * 1024: 145 | return str(round(self.filesize / 1024 / 1024, 2)) + " MB" 146 | 147 | return str(round(self.filesize / 1024 / 1024 / 1024, 2)) + " GB" 148 | 149 | def isSameFilepath(self, filepath): 150 | return os.path.realpath(self.getPath()) == os.path.realpath(filepath) 151 | 152 | def isNewerThan(self, compared_file): 153 | if self.lastModified is None: 154 | return False 155 | 156 | if isString(compared_file): 157 | if os.path.exists(compared_file) is False: 158 | return False 159 | 160 | lastModified = os.path.getmtime(compared_file) 161 | elif isinstance(compared_file, Metafile): 162 | lastModified = compared_file.getLastModified() 163 | else: 164 | raise TypeError("Compared_file must be either string (file_path) or Metafile instance") 165 | 166 | return self.lastModified > lastModified 167 | 168 | def isDifferentSizeThan(self, compared_file): 169 | if self.filesize is None: 170 | return False 171 | 172 | if isString(compared_file): 173 | if os.path.exists(compared_file) is False: 174 | return False 175 | 176 | lastModified = os.path.getsize(compared_file) 177 | elif isinstance(compared_file, Metafile): 178 | lastModified = compared_file.getLastModified() 179 | else: 180 | raise TypeError("Compared_file must be either string (file_path) or Metafile instance") 181 | 182 | return self.filesize != os.path.getsize(compared_file) 183 | 184 | 185 | # Detects if object is a string and if so converts to unicode, if not already 186 | # 187 | # @source http://farmdev.com/talks/unicode/ 188 | # @author Ivan Krstić 189 | def to_unicode_or_bust(obj, encoding='utf-8'): 190 | if isinstance(obj, basestring): 191 | if not isinstance(obj, unicode): 192 | obj = unicode(obj, encoding) 193 | return obj 194 | 195 | 196 | 197 | # Converts file_path to Metafile 198 | # 199 | # @type file_path: string 200 | # 201 | # @return Metafile 202 | def fileToMetafile(file_path): 203 | if sys.version[0] < '3' and type(file_path) is str: 204 | file_path = file_path.decode('utf-8') 205 | elif type(file_path) is bytes: 206 | file_path = file_path.decode('utf-8') 207 | 208 | name = os.path.basename(file_path) 209 | path = file_path 210 | isDir = os.path.isdir(file_path) 211 | lastModified = os.path.getmtime(file_path) 212 | filesize = os.path.getsize(file_path) 213 | 214 | return Metafile(name, isDir, lastModified, filesize, path) 215 | 216 | 217 | 218 | # Returns a timestamp formatted for humans 219 | # 220 | # @type timestamp: int|float 221 | # @type format: string 222 | # @param format: see http://docs.python.org/library/time.html#time.strftime 223 | # 224 | # @return string 225 | def formatTimestamp(timestamp, format='%Y-%m-%d %H:%M'): 226 | if timestamp is None: 227 | return "-" 228 | 229 | return datetime.datetime.fromtimestamp(int(timestamp)).strftime(format) 230 | 231 | 232 | # Get all folders paths from given path upwards 233 | # 234 | # @type file_path: string 235 | # @param file_path: absolute file path to return the paths from 236 | # 237 | # @return list of file paths 238 | # 239 | # @global nestingLimit 240 | def getFolders(file_path): 241 | if file_path is None: 242 | return [] 243 | 244 | folders = [file_path] 245 | limit = nestingLimit 246 | 247 | while True: 248 | split = os.path.split(file_path) 249 | 250 | # nothing found 251 | if len(split) == 0: 252 | break 253 | 254 | # get filepath 255 | file_path = split[0] 256 | limit -= 1 257 | 258 | # nothing else remains 259 | if len(split[1]) == 0 or limit < 0: 260 | break 261 | 262 | folders.append(split[0]) 263 | 264 | return folders 265 | 266 | 267 | # Finds a real file path among given folder paths 268 | # and returns the path or None 269 | # 270 | # @type folders: list 271 | # @param folders: list of paths to folders to look into 272 | # @type file_name: string 273 | # @param file_name: file name to search 274 | # 275 | # @return string file path or None 276 | def findFile(folders, file_name): 277 | if folders is None: 278 | return None 279 | 280 | for folder in folders: 281 | if isString(folder) is False: 282 | folder = folder.decode('utf-8') 283 | 284 | if os.path.exists(os.path.join(folder, file_name)) is True: 285 | return folder 286 | 287 | return None 288 | 289 | 290 | # Returns unique list of file paths with corresponding config 291 | # 292 | # @type folders: list 293 | # @param folders: list of paths to folders to filter 294 | # @type getConfigFile: callback 295 | # 296 | # @return list of file paths 297 | def getFiles(paths, getConfigFile): 298 | if paths is None: 299 | return [] 300 | 301 | files = [] 302 | fileNames = [] 303 | 304 | for target in paths: 305 | if target not in fileNames: 306 | fileNames.append(target) 307 | files.append([target.encode('utf-8'), getConfigFile(target.encode('utf-8'))]) 308 | 309 | return files 310 | 311 | 312 | # Goes through paths using glob and returns list of Metafiles 313 | # 314 | # @type pattern: string 315 | # @param pattern: glob-like filename pattern 316 | # @type root: string 317 | # @param root: top searched directory 318 | # 319 | # @return list 320 | def gatherMetafiles(pattern, root): 321 | if pattern is None: 322 | return [] 323 | 324 | result = {} 325 | file_names = [] 326 | 327 | for subroot, dirnames, filenames in os.walk(root): 328 | for filename in fnmatch.filter(filenames, pattern): 329 | target = os.path.join(subroot, filename).encode('utf-8') 330 | 331 | if target not in file_names: 332 | file_names.append(target) 333 | result[target] = fileToMetafile(target) 334 | 335 | for folder in dirnames: 336 | result.update(gatherMetafiles(pattern, os.path.join(root, folder)).items()) 337 | 338 | return result 339 | 340 | 341 | 342 | # Returns difference using lastModified between file dicts 343 | # 344 | # @type metafilesBefore: dict 345 | # @type metafilesAfter: dict 346 | # 347 | # @return list 348 | def getChangedFiles(metafilesBefore, metafilesAfter): 349 | changed = [] 350 | for file_path in metafilesAfter: 351 | #file_path = Types.u(file_path) 352 | 353 | if file_path in metafilesBefore and metafilesAfter[file_path].isNewerThan(metafilesBefore[file_path]): 354 | changed.append(metafilesAfter[file_path]) 355 | 356 | return changed 357 | 358 | 359 | 360 | # Abstraction of os.rename for replacing cases 361 | # 362 | # @type source: string 363 | # @param source: source file path 364 | # @type destination: string 365 | # @param destination: destination file path 366 | def replace(source, destination): 367 | destinationTemp = destination + '.ftpsync.bak' 368 | try: 369 | os.rename(source, destination) 370 | except OSError: 371 | os.rename(destination, destinationTemp) 372 | 373 | try: 374 | os.rename(source, destination) 375 | os.unlink(destinationTemp) 376 | except OSError as e: 377 | os.rename(destinationTemp, destination) 378 | raise 379 | 380 | 381 | 382 | # Performing operation on temporary file and replacing it back 383 | # 384 | # @type operation: callback(file) 385 | # @param operation: operation performed on temporary file 386 | # @type permissions: int (octal) 387 | # @type mode: string 388 | # @param mode: file opening mode 389 | def viaTempfile(file_path, operation, permissions, mode): 390 | if permissions is None: 391 | permissions = '0755' 392 | exceptionOccured = None 393 | 394 | directory = os.path.dirname(file_path) 395 | 396 | if os.path.exists(directory) is False: 397 | os.makedirs(directory, int(permissions, 8)) 398 | 399 | temp = tempfile.NamedTemporaryFile(mode, suffix = '.ftpsync.temp', dir = directory, delete = False) 400 | 401 | try: 402 | operation(temp) 403 | except Exception as exp: 404 | exceptionOccured = exp 405 | finally: 406 | temp.flush() 407 | temp.close() 408 | 409 | if exceptionOccured is None: 410 | if os.path.exists(file_path) is False: 411 | created = open(file_path, 'w+') 412 | created.close() 413 | 414 | replace(temp.name, file_path) 415 | 416 | if os.path.exists(temp.name): 417 | os.unlink(temp.name) 418 | 419 | if exceptionOccured is not None: 420 | raise exceptionOccured 421 | 422 | 423 | 424 | # Guesses whether given file is textual or not 425 | # 426 | # @type file_path: string 427 | # @type asciiWhitelist: list 428 | # 429 | # @return boolean whether it's likely textual or binary 430 | def isTextFile(file_path, asciiWhitelist): 431 | fileName, fileExtension = os.path.splitext(file_path) 432 | 433 | if fileExtension and fileExtension[1:] in asciiWhitelist: 434 | return True 435 | 436 | return False 437 | 438 | 439 | 440 | # Adds . and .. entries if missing in the collection 441 | # 442 | # @type contents: list 443 | # @type parentPath: string 444 | # 445 | # @return list 446 | def addLinks(contents, parentPath): 447 | hasSelf = False 448 | hasUp = False 449 | single = None 450 | 451 | for entry in contents: 452 | if entry.getName() == '.': 453 | hasSelf = True 454 | elif entry.getName() == '..': 455 | hasUp = True 456 | 457 | if hasSelf and hasUp: 458 | return contents 459 | else: 460 | single = entry 461 | 462 | path = parentPath 463 | if single is not None: 464 | path = single.getPath() 465 | 466 | if hasSelf == False: 467 | entrySelf = Metafile('.', True, None, None, path, None) 468 | contents.append(entrySelf) 469 | 470 | if hasUp == False: 471 | entryUp = Metafile('..', True, None, None, path, None) 472 | contents.append(entryUp) 473 | 474 | return contents 475 | 476 | 477 | # Return a relative filepath to path either from the current directory or from an optional start directory 478 | # 479 | # Contains a fix for a bug #5117 not fixed in a version used by ST2 480 | # 481 | # @type path: string 482 | # @param path: destination path 483 | # @type path: string 484 | # @param path: starting (root) path 485 | # 486 | # @return string relative path 487 | def relpath(path, start): 488 | relpath = os.path.relpath(path, start) 489 | 490 | if start == '/' and relpath[0:2] == '..': 491 | relpath = relpath[3:] 492 | 493 | return relpath 494 | -------------------------------------------------------------------------------- /ftpsyncfilewatcher.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | # Doc comment syntax inspired by http://stackoverflow.com/a/487203/387503 31 | 32 | 33 | # ==== Libraries =========================================================================== 34 | 35 | # Python's built-in libraries 36 | import os 37 | import sys 38 | 39 | # FTPSync libraries 40 | if sys.version < '3': 41 | from ftpsyncfiles import gatherMetafiles, getChangedFiles 42 | else: 43 | from FTPSync.ftpsyncfiles import gatherMetafiles, getChangedFiles 44 | 45 | 46 | # ==== Exceptions ========================================================================== 47 | 48 | class WatcherClosedException(RuntimeError): 49 | pass 50 | 51 | class NotPreparedException(Exception): 52 | pass 53 | 54 | 55 | # ==== Content ============================================================================= 56 | 57 | class FileWatcher(object): 58 | 59 | def __init__(self, config_file_path, config): 60 | self.config_file_path = config_file_path 61 | self.config = config 62 | self.prepared = False 63 | self.afterwatch = { 64 | 'before': {}, 65 | 'after': {} 66 | } 67 | 68 | 69 | # Scans watched paths for watched files, creates metafiles 70 | # 71 | # @type event: string 72 | # @param event: 'before', 'after' 73 | # @type name: string 74 | # @param name: connection name 75 | def scanWatched(self, event, name): 76 | if event is 'before' and name in self.afterwatch['before'] and len(self.afterwatch['before'][name]) > 0: 77 | return 78 | 79 | root = os.path.dirname(self.config_file_path) 80 | properties = self.config[name] 81 | watch = properties['after_save_watch'] 82 | self.afterwatch[event][name] = {} 83 | 84 | if type(watch) is list and len(watch) > 0 and properties['upload_delay'] > 0: 85 | for folder, filepattern in watch: 86 | # adds contents to dict 87 | self.afterwatch[event][name].update(gatherMetafiles(filepattern, os.path.join(root, folder)).items()) 88 | 89 | 90 | # ??? 91 | # 92 | # @type event: string 93 | # @param event: 'before', 'after' 94 | # @type name: string 95 | # @param name: connection name 96 | # @type data: ??? 97 | # @param data: ??? 98 | def setScanned(self, event, name, data): 99 | if type(self.afterwatch) is not dict: 100 | self.afterwatch = {} 101 | 102 | if event not in self.afterwatch or type(self.afterwatch[event]) is not dict: 103 | self.afterwatch[event] = {} 104 | 105 | self.afterwatch[event][name] = data 106 | 107 | 108 | # Goes through all connection configs and scans all the requested paths 109 | def prepare(self): 110 | if self.prepared: 111 | raise WatcherClosedException 112 | 113 | for name in self.config: 114 | if self.config[name]['after_save_watch']: 115 | self.scanWatched('before', name) 116 | 117 | if self.config[name]['debug_extras']['after_save_watch']: 118 | print ("FTPSync dumping pre-scan") 119 | print (self.afterwatch['before']) 120 | 121 | self.prepared = True 122 | 123 | 124 | # Returns files that got changed 125 | # 126 | # @type connectionName: string 127 | # 128 | # @return Metafile[] 129 | def getChangedFiles(self, connectionName): 130 | if self.prepared is False: 131 | raise NotPreparedException 132 | 133 | self.afterwatch['after'][connectionName] = {} 134 | self.scanWatched('after', connectionName) 135 | if self.config[connectionName]['debug_extras']['after_save_watch']: 136 | print ("FTPSync dumping post-scan") 137 | print (self.afterwatch['before']) 138 | changed = getChangedFiles(self.afterwatch['before'][connectionName], self.afterwatch['after'][connectionName]) 139 | if self.config[connectionName]['debug_extras']['after_save_watch']: 140 | print ("FTPSync dumping changed files") 141 | print ("COUNT: " + str(len(changed))) 142 | for change in changed: 143 | print ("Path: " + change.getPath() + " | Name: " + change.getName()) 144 | 145 | return changed 146 | 147 | 148 | 149 | -------------------------------------------------------------------------------- /ftpsyncprogress.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | # ==== Libraries =========================================================================== 31 | 32 | # Python's built-in libraries 33 | import math 34 | 35 | 36 | # ==== Content ============================================================================= 37 | 38 | # Class implementing logic for progress bar 39 | class Progress: 40 | def __init__(self, current=0): 41 | self.current = 0 42 | self.entries = [] 43 | 44 | # Add unfinished entries to progress bar 45 | # 46 | # @type self: Progress 47 | # @type entries: list 48 | # @param entries: list of unfinished entries, usually strings 49 | def add(self, entries): 50 | for entry in entries: 51 | if entry not in self.entries: 52 | self.entries.append(entry) 53 | 54 | 55 | # Return number of items in the progress 56 | # 57 | # @type self: Progress 58 | # 59 | # @return int 60 | def getTotal(self): 61 | return len(self.entries) 62 | 63 | 64 | # Marks a certain number of entries as finished 65 | # 66 | # @type self: Progress 67 | # @type by: integer 68 | # @param by: number of finished items 69 | def progress(self, by=1): 70 | self.current += int(by) 71 | 72 | if self.current > self.getTotal(): 73 | self.current = self.getTotal() 74 | 75 | 76 | # Returns whether the process has been finished 77 | # 78 | # @type self: Progress 79 | # 80 | # @return bool 81 | def isFinished(self): 82 | return self.current >= self.getTotal() 83 | 84 | 85 | # Get percentage of the progress bar, maybe rounded, see @return 86 | # 87 | # @type self: Progress 88 | # @type division: integer 89 | # @param division: rounding amount 90 | # 91 | # @return integer between 0 and 100 / division 92 | def getPercent(self, division=5): 93 | if division is 0: 94 | division = 1 95 | 96 | total = self.getTotal() 97 | if total is 0: 98 | total = self.current 99 | if total is 0: 100 | total = 1 101 | 102 | percent = int(math.ceil(float(self.current) / float(total) * 100)) 103 | percent = math.ceil(percent / division) 104 | 105 | return percent -------------------------------------------------------------------------------- /ftpsyncpubsub.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | class Pubsub: 31 | 32 | _instance = None 33 | 34 | @staticmethod 35 | def instance(): 36 | if Pubsub._instance is None: 37 | Pubsub._instance = Pubsub() 38 | 39 | return Pubsub._instance 40 | 41 | def __init__(self): 42 | self.handlers = {} 43 | 44 | def subscribe(self, event, handler): 45 | if not self.has(event): 46 | self.handlers[event] = [] 47 | 48 | self.handlers[event].append(handler) 49 | 50 | def publish(self, event, args = []): 51 | if not self.has(event): 52 | return 53 | 54 | for handler in self.handlers[event]: 55 | handler(*args) 56 | 57 | def has(self, event): 58 | return event in self.handlers 59 | 60 | 61 | if __name__ == '__main__': 62 | import unittest 63 | 64 | class PubsubTest(unittest.TestCase): 65 | def test_hasEvent(self): 66 | p = Pubsub() 67 | self.assertFalse(p.has('test_hasEvent')) 68 | p.subscribe('test_hasEvent', None) 69 | self.assertTrue(p.has('test_hasEvent')) 70 | 71 | def test_basic(self): 72 | p = Pubsub() 73 | 74 | result = { 75 | 'success': False, 76 | 'failure': False 77 | } 78 | 79 | def setSuccess(): 80 | result['success'] = True 81 | def setFailure(): 82 | result['failure'] = True 83 | 84 | p.subscribe('success', setSuccess) 85 | p.publish('success') 86 | 87 | self.assertTrue(result['success']) 88 | self.assertFalse(result['failure']) 89 | 90 | def test_args(self): 91 | p = Pubsub() 92 | 93 | result = { 94 | 'result': None 95 | } 96 | def multiply(a, b): 97 | result['result'] = a * b 98 | 99 | p.subscribe('test', multiply) 100 | p.publish('test', [2, 7]) 101 | 102 | self.assertEquals(14, result['result']) 103 | 104 | def test_instance(self): 105 | p = Pubsub.instance() 106 | self.assertTrue( isinstance(p, Pubsub) ) 107 | 108 | 109 | unittest.main() 110 | 111 | 112 | -------------------------------------------------------------------------------- /ftpsyncworker.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Jiri "NoxArt" Petruzelka 4 | # 5 | # Permission is hereby granted, free of charge, to any person 6 | # obtaining a copy of this software and associated documentation 7 | # files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, 9 | # copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | # copies of the Software, and to permit persons to whom the 11 | # Software is furnished to do so, subject to the following 12 | # conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be 15 | # included in all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | # OTHER DEALINGS IN THE SOFTWARE. 25 | 26 | # @author Jiri "NoxArt" Petruzelka | petruzelka@noxart.cz | @NoxArt 27 | # @copyright (c) 2012 Jiri "NoxArt" Petruzelka 28 | # @link https://github.com/NoxArt/SublimeText2-FTPSync 29 | 30 | # ==== Libraries =========================================================================== 31 | 32 | # Python's built-in libraries 33 | import threading 34 | import sys 35 | from time import sleep 36 | 37 | # FTPSync libraries 38 | if sys.version < '3': 39 | from ftpsynccommon import Types 40 | else: 41 | from FTPSync.ftpsynccommon import Types 42 | 43 | # ==== Content ============================================================================= 44 | 45 | # Command thread 46 | class RunningCommand(threading.Thread): 47 | def __init__(self, command, onFinish, debug, tid): 48 | self.command = command 49 | self.onFinish = onFinish 50 | self.debug = bool(debug) 51 | self.id = int(tid) 52 | threading.Thread.__init__(self) 53 | 54 | # Prints debug message if enabled 55 | def _debugPrint(self, message): 56 | if self.debug: 57 | print( "[command {0}]".format(self.id) + message ) 58 | 59 | # Runs command 60 | def run(self): 61 | try: 62 | self._debugPrint("Executing") 63 | self.command.execute() 64 | except Exception as e: 65 | self._debugPrint(e) 66 | self._debugPrint("Retrying") 67 | 68 | self.command.execute() 69 | finally: 70 | self._debugPrint("Ending") 71 | while self.command.isRunning(): 72 | self._debugPrint("Is running...") 73 | sleep(0.5) 74 | 75 | self.onFinish(self.command) 76 | 77 | 78 | # Class handling concurrent commands 79 | class Worker(object): 80 | 81 | def __init__(self, limit, factory, loader): 82 | self.limit = int(limit) 83 | 84 | self.connections = [] 85 | self.commands = [] 86 | self.waitingCommands = [] 87 | self.threads = [] 88 | self.index = 0 89 | self.threadId = 0 90 | self.semaphore = threading.BoundedSemaphore(self.limit) 91 | 92 | self.makeConnection = factory 93 | self.makeConfig = loader 94 | self.freeConnections = [] 95 | 96 | self.debug = False 97 | 98 | # Prints debug message if enabled 99 | def _debugPrint(self, message): 100 | if self.debug: 101 | print(message) 102 | 103 | # Enables console dumping 104 | def enableDebug(self): 105 | self.debug = True 106 | 107 | # Enables console dumping 108 | def disableDebug(self): 109 | self.debug = False 110 | 111 | # Sets a callback used for making a connection 112 | def setConnectionFactory(self, factory): 113 | self.makeConnection = factory 114 | 115 | # Adds a new connection to pool 116 | def addConnection(self, connections): 117 | self.connections.append(connections) 118 | 119 | # Creates and adds a connection if limit allows 120 | def fillConnection(self, config): 121 | if len(self.connections) <= self.limit: 122 | connection = None 123 | 124 | try: 125 | connection = self.makeConnection(self.makeConfig(config), None, False) 126 | except Exception as e: 127 | if str(e).lower().find('too many connections') != -1: 128 | self._debugPrint("FTPSync > Too many connections...") 129 | sleep(1.5) 130 | else: 131 | self._debugPrint(e) 132 | raise 133 | 134 | if connection is not None and len(connection) > 0: 135 | self.addConnection(connection) 136 | self.freeConnections.append(len(self.connections)) 137 | 138 | self._debugPrint("FTPSync > Creating new connection #{0}".format(len(self.connections))) 139 | 140 | # Adds a new command to worker 141 | def addCommand(self, command, config): 142 | self._debugPrint("FTPSync > Adding command " + self.__commandName(command)) 143 | if len(self.commands) >= self.limit: 144 | self._debugPrint("FTPSync > Queuing command " + self.__commandName(command) + " (total: {0})".format(len(self.waitingCommands) + 1)) 145 | self.__waitCommand(command) 146 | else: 147 | self._debugPrint("FTPSync > Running command " + self.__commandName(command) + " (total: {0})".format(len(self.commands) + 1)) 148 | self.__run(command, config) 149 | 150 | # Return whether has any scheduled commands 151 | def isEmpty(self): 152 | return len(self.commands) == 0 and len(self.waitingCommands) == 0 153 | 154 | # Put the command to sleep 155 | def __waitCommand(self, command): 156 | self.waitingCommands.append(command) 157 | 158 | # Run the command 159 | def __run(self, command, config): 160 | try: 161 | self.semaphore.acquire() 162 | self.threadId += 1 163 | 164 | self.fillConnection(config) 165 | while len(self.freeConnections) == 0: 166 | sleep(0.1) 167 | self.fillConnection(config) 168 | 169 | index = self.freeConnections.pop() 170 | thread = RunningCommand(command, self.__onFinish, self.debug, self.threadId) 171 | 172 | self._debugPrint("FTPSync > Scheduling thread #{0}".format(self.threadId) + " " + self.__commandName(command) + " run, using connection {0}".format(index)) 173 | 174 | command.setConnection(self.connections[index - 1]) 175 | self.commands.append({ 176 | 'command': command, 177 | 'config': config, 178 | 'thread': thread, 179 | 'index': index, 180 | 'threadId': self.threadId 181 | }) 182 | 183 | thread.start() 184 | except Exception as e: 185 | self.__onFinish(command) 186 | raise 187 | finally: 188 | self.semaphore.release() 189 | 190 | # Finish callback 191 | def __onFinish(self, command): 192 | config = None 193 | 194 | # Kick from running commands and free connection 195 | for cmd in self.commands: 196 | if cmd['command'] is command: 197 | self.freeConnections.append(cmd['index']) 198 | config = cmd['config'] 199 | self.commands.remove(cmd) 200 | 201 | self._debugPrint("FTPSync > Removing thread #{0}".format(cmd['threadId'])) 202 | 203 | self._debugPrint("FTPSync > Sleeping commands: {0}".format(len(self.waitingCommands))) 204 | 205 | # Woke up one sleeping command 206 | if len(self.waitingCommands) > 0: 207 | awakenCommand = self.waitingCommands.pop() 208 | self.__run(awakenCommand, config) 209 | 210 | # Returns classname of given command 211 | def __commandName(self, command): 212 | return Types.u(command.__class__.__name__) 213 | 214 | # Closes all connections 215 | def __del__(self): 216 | for connections in self.connections: 217 | for connection in connections: 218 | connection.close() 219 | 220 | self._debugPrint("FTPSync > Closing connection") 221 | -------------------------------------------------------------------------------- /lib2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NoxArt/SublimeText2-FTPSync/5893073bf081a0c7d51dff26fac77f2163d8d71f/lib2/__init__.py -------------------------------------------------------------------------------- /lib2/idna.py: -------------------------------------------------------------------------------- 1 | # This module implements the RFCs 3490 (IDNA) and 3491 (Nameprep) 2 | 3 | import stringprep, re, codecs 4 | from unicodedata import ucd_3_2_0 as unicodedata 5 | 6 | # IDNA section 3.1 7 | dots = re.compile("[\u002E\u3002\uFF0E\uFF61]") 8 | 9 | # IDNA section 5 10 | ace_prefix = b"xn--" 11 | sace_prefix = "xn--" 12 | 13 | # This assumes query strings, so AllowUnassigned is true 14 | def nameprep(label): 15 | # Map 16 | newlabel = [] 17 | for c in label: 18 | if stringprep.in_table_b1(c): 19 | # Map to nothing 20 | continue 21 | newlabel.append(stringprep.map_table_b2(c)) 22 | label = "".join(newlabel) 23 | 24 | # Normalize 25 | label = unicodedata.normalize("NFKC", label) 26 | 27 | # Prohibit 28 | for c in label: 29 | if stringprep.in_table_c12(c) or \ 30 | stringprep.in_table_c22(c) or \ 31 | stringprep.in_table_c3(c) or \ 32 | stringprep.in_table_c4(c) or \ 33 | stringprep.in_table_c5(c) or \ 34 | stringprep.in_table_c6(c) or \ 35 | stringprep.in_table_c7(c) or \ 36 | stringprep.in_table_c8(c) or \ 37 | stringprep.in_table_c9(c): 38 | raise UnicodeError("Invalid character %r" % c) 39 | 40 | # Check bidi 41 | RandAL = [stringprep.in_table_d1(x) for x in label] 42 | for c in RandAL: 43 | if c: 44 | # There is a RandAL char in the string. Must perform further 45 | # tests: 46 | # 1) The characters in section 5.8 MUST be prohibited. 47 | # This is table C.8, which was already checked 48 | # 2) If a string contains any RandALCat character, the string 49 | # MUST NOT contain any LCat character. 50 | if any(stringprep.in_table_d2(x) for x in label): 51 | raise UnicodeError("Violation of BIDI requirement 2") 52 | 53 | # 3) If a string contains any RandALCat character, a 54 | # RandALCat character MUST be the first character of the 55 | # string, and a RandALCat character MUST be the last 56 | # character of the string. 57 | if not RandAL[0] or not RandAL[-1]: 58 | raise UnicodeError("Violation of BIDI requirement 3") 59 | 60 | return label 61 | 62 | def ToASCII(label): 63 | try: 64 | # Step 1: try ASCII 65 | label = label.encode("ascii") 66 | except UnicodeError: 67 | pass 68 | else: 69 | # Skip to step 3: UseSTD3ASCIIRules is false, so 70 | # Skip to step 8. 71 | if 0 < len(label) < 64: 72 | return label 73 | raise UnicodeError("label empty or too long") 74 | 75 | # Step 2: nameprep 76 | label = nameprep(label) 77 | 78 | # Step 3: UseSTD3ASCIIRules is false 79 | # Step 4: try ASCII 80 | try: 81 | label = label.encode("ascii") 82 | except UnicodeError: 83 | pass 84 | else: 85 | # Skip to step 8. 86 | if 0 < len(label) < 64: 87 | return label 88 | raise UnicodeError("label empty or too long") 89 | 90 | # Step 5: Check ACE prefix 91 | if label.startswith(sace_prefix): 92 | raise UnicodeError("Label starts with ACE prefix") 93 | 94 | # Step 6: Encode with PUNYCODE 95 | label = label.encode("punycode") 96 | 97 | # Step 7: Prepend ACE prefix 98 | label = ace_prefix + label 99 | 100 | # Step 8: Check size 101 | if 0 < len(label) < 64: 102 | return label 103 | raise UnicodeError("label empty or too long") 104 | 105 | def ToUnicode(label): 106 | # Step 1: Check for ASCII 107 | if isinstance(label, bytes): 108 | pure_ascii = True 109 | else: 110 | try: 111 | label = label.encode("ascii") 112 | pure_ascii = True 113 | except UnicodeError: 114 | pure_ascii = False 115 | if not pure_ascii: 116 | # Step 2: Perform nameprep 117 | label = nameprep(label) 118 | # It doesn't say this, but apparently, it should be ASCII now 119 | try: 120 | label = label.encode("ascii") 121 | except UnicodeError: 122 | raise UnicodeError("Invalid character in IDN label") 123 | # Step 3: Check for ACE prefix 124 | if not label.startswith(ace_prefix): 125 | return str(label, "ascii") 126 | 127 | # Step 4: Remove ACE prefix 128 | label1 = label[len(ace_prefix):] 129 | 130 | # Step 5: Decode using PUNYCODE 131 | result = label1.decode("punycode") 132 | 133 | # Step 6: Apply ToASCII 134 | label2 = ToASCII(result) 135 | 136 | # Step 7: Compare the result of step 6 with the one of step 3 137 | # label2 will already be in lower case. 138 | if str(label, "ascii").lower() != str(label2, "ascii"): 139 | raise UnicodeError("IDNA does not round-trip", label, label2) 140 | 141 | # Step 8: return the result of step 5 142 | return result 143 | 144 | ### Codec APIs 145 | 146 | class Codec(codecs.Codec): 147 | def encode(self, input, errors='strict'): 148 | 149 | if errors != 'strict': 150 | # IDNA is quite clear that implementations must be strict 151 | raise UnicodeError("unsupported error handling "+errors) 152 | 153 | if not input: 154 | return b'', 0 155 | 156 | try: 157 | result = input.encode('ascii') 158 | except UnicodeEncodeError: 159 | pass 160 | else: 161 | # ASCII name: fast path 162 | labels = result.split(b'.') 163 | for label in labels[:-1]: 164 | if not (0 < len(label) < 64): 165 | raise UnicodeError("label empty or too long") 166 | if len(labels[-1]) >= 64: 167 | raise UnicodeError("label too long") 168 | return result, len(input) 169 | 170 | result = bytearray() 171 | labels = dots.split(input) 172 | if labels and not labels[-1]: 173 | trailing_dot = b'.' 174 | del labels[-1] 175 | else: 176 | trailing_dot = b'' 177 | for label in labels: 178 | if result: 179 | # Join with U+002E 180 | result.extend(b'.') 181 | result.extend(ToASCII(label)) 182 | return bytes(result+trailing_dot), len(input) 183 | 184 | def decode(self, input, errors='strict'): 185 | 186 | if errors != 'strict': 187 | raise UnicodeError("Unsupported error handling "+errors) 188 | 189 | if not input: 190 | return "", 0 191 | 192 | # IDNA allows decoding to operate on Unicode strings, too. 193 | if not isinstance(input, bytes): 194 | # XXX obviously wrong, see #3232 195 | input = bytes(input) 196 | 197 | if ace_prefix not in input: 198 | # Fast path 199 | try: 200 | return input.decode('ascii'), len(input) 201 | except UnicodeDecodeError: 202 | pass 203 | 204 | labels = input.split(b".") 205 | 206 | if labels and len(labels[-1]) == 0: 207 | trailing_dot = '.' 208 | del labels[-1] 209 | else: 210 | trailing_dot = '' 211 | 212 | result = [] 213 | for label in labels: 214 | result.append(ToUnicode(label)) 215 | 216 | return ".".join(result)+trailing_dot, len(input) 217 | 218 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder): 219 | def _buffer_encode(self, input, errors, final): 220 | if errors != 'strict': 221 | # IDNA is quite clear that implementations must be strict 222 | raise UnicodeError("unsupported error handling "+errors) 223 | 224 | if not input: 225 | return (b'', 0) 226 | 227 | labels = dots.split(input) 228 | trailing_dot = b'' 229 | if labels: 230 | if not labels[-1]: 231 | trailing_dot = b'.' 232 | del labels[-1] 233 | elif not final: 234 | # Keep potentially unfinished label until the next call 235 | del labels[-1] 236 | if labels: 237 | trailing_dot = b'.' 238 | 239 | result = bytearray() 240 | size = 0 241 | for label in labels: 242 | if size: 243 | # Join with U+002E 244 | result.extend(b'.') 245 | size += 1 246 | result.extend(ToASCII(label)) 247 | size += len(label) 248 | 249 | result += trailing_dot 250 | size += len(trailing_dot) 251 | return (bytes(result), size) 252 | 253 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder): 254 | def _buffer_decode(self, input, errors, final): 255 | if errors != 'strict': 256 | raise UnicodeError("Unsupported error handling "+errors) 257 | 258 | if not input: 259 | return ("", 0) 260 | 261 | # IDNA allows decoding to operate on Unicode strings, too. 262 | if isinstance(input, str): 263 | labels = dots.split(input) 264 | else: 265 | # Must be ASCII string 266 | input = str(input, "ascii") 267 | labels = input.split(".") 268 | 269 | trailing_dot = '' 270 | if labels: 271 | if not labels[-1]: 272 | trailing_dot = '.' 273 | del labels[-1] 274 | elif not final: 275 | # Keep potentially unfinished label until the next call 276 | del labels[-1] 277 | if labels: 278 | trailing_dot = '.' 279 | 280 | result = [] 281 | size = 0 282 | for label in labels: 283 | result.append(ToUnicode(label)) 284 | if size: 285 | size += 1 286 | size += len(label) 287 | 288 | result = ".".join(result) + trailing_dot 289 | size += len(trailing_dot) 290 | return (result, size) 291 | 292 | class StreamWriter(Codec,codecs.StreamWriter): 293 | pass 294 | 295 | class StreamReader(Codec,codecs.StreamReader): 296 | pass 297 | 298 | ### encodings module API 299 | 300 | def getregentry(): 301 | return codecs.CodecInfo( 302 | name='idna', 303 | encode=Codec().encode, 304 | decode=Codec().decode, 305 | incrementalencoder=IncrementalEncoder, 306 | incrementaldecoder=IncrementalDecoder, 307 | streamwriter=StreamWriter, 308 | streamreader=StreamReader, 309 | ) -------------------------------------------------------------------------------- /lib2/simplejson/LICENSE.txt: -------------------------------------------------------------------------------- 1 | simplejson is dual-licensed software. It is available under the terms 2 | of the MIT license, or the Academic Free License version 2.1. The full 3 | text of each license agreement is included below. This code is also 4 | licensed to the Python Software Foundation (PSF) under a Contributor 5 | Agreement. 6 | 7 | MIT License 8 | =========== 9 | 10 | Copyright (c) 2006 Bob Ippolito 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy of 13 | this software and associated documentation files (the "Software"), to deal in 14 | the Software without restriction, including without limitation the rights to 15 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 16 | of the Software, and to permit persons to whom the Software is furnished to do 17 | so, subject to the following conditions: 18 | 19 | The above copyright notice and this permission notice shall be included in all 20 | copies or substantial portions of the Software. 21 | 22 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 23 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 24 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 25 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 26 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 27 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 28 | SOFTWARE. 29 | 30 | Academic Free License v. 2.1 31 | ============================ 32 | 33 | Copyright (c) 2006 Bob Ippolito. All rights reserved. 34 | 35 | This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work: 36 | 37 | Licensed under the Academic Free License version 2.1 38 | 39 | 1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following: 40 | 41 | a) to reproduce the Original Work in copies; 42 | 43 | b) to prepare derivative works ("Derivative Works") based upon the Original Work; 44 | 45 | c) to distribute copies of the Original Work and Derivative Works to the public; 46 | 47 | d) to perform the Original Work publicly; and 48 | 49 | e) to display the Original Work publicly. 50 | 51 | 2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works. 52 | 53 | 3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work. 54 | 55 | 4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license. 56 | 57 | 5) This section intentionally omitted. 58 | 59 | 6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work. 60 | 61 | 7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer. 62 | 63 | 8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. 64 | 65 | 9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions. 66 | 67 | 10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware. 68 | 69 | 11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License. 70 | 71 | 12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License. 72 | 73 | 13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. 74 | 75 | 14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 76 | 77 | 15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You. 78 | 79 | This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner. 80 | -------------------------------------------------------------------------------- /lib2/simplejson/__init__.py: -------------------------------------------------------------------------------- 1 | r"""JSON (JavaScript Object Notation) is a subset of 2 | JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data 3 | interchange format. 4 | 5 | :mod:`simplejson` exposes an API familiar to users of the standard library 6 | :mod:`marshal` and :mod:`pickle` modules. It is the externally maintained 7 | version of the :mod:`json` library contained in Python 2.6, but maintains 8 | compatibility with Python 2.4 and Python 2.5 and (currently) has 9 | significant performance advantages, even without using the optional C 10 | extension for speedups. 11 | 12 | Encoding basic Python object hierarchies:: 13 | 14 | >>> import simplejson as json 15 | >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) 16 | '["foo", {"bar": ["baz", null, 1.0, 2]}]' 17 | >>> print(json.dumps("\"foo\bar")) 18 | "\"foo\bar" 19 | >>> print(json.dumps(u'\u1234')) 20 | "\u1234" 21 | >>> print(json.dumps('\\')) 22 | "\\" 23 | >>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)) 24 | {"a": 0, "b": 0, "c": 0} 25 | >>> from simplejson.compat import StringIO 26 | >>> io = StringIO() 27 | >>> json.dump(['streaming API'], io) 28 | >>> io.getvalue() 29 | '["streaming API"]' 30 | 31 | Compact encoding:: 32 | 33 | >>> import simplejson as json 34 | >>> obj = [1,2,3,{'4': 5, '6': 7}] 35 | >>> json.dumps(obj, separators=(',',':'), sort_keys=True) 36 | '[1,2,3,{"4":5,"6":7}]' 37 | 38 | Pretty printing:: 39 | 40 | >>> import simplejson as json 41 | >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')) 42 | { 43 | "4": 5, 44 | "6": 7 45 | } 46 | 47 | Decoding JSON:: 48 | 49 | >>> import simplejson as json 50 | >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] 51 | >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj 52 | True 53 | >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' 54 | True 55 | >>> from simplejson.compat import StringIO 56 | >>> io = StringIO('["streaming API"]') 57 | >>> json.load(io)[0] == 'streaming API' 58 | True 59 | 60 | Specializing JSON object decoding:: 61 | 62 | >>> import simplejson as json 63 | >>> def as_complex(dct): 64 | ... if '__complex__' in dct: 65 | ... return complex(dct['real'], dct['imag']) 66 | ... return dct 67 | ... 68 | >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', 69 | ... object_hook=as_complex) 70 | (1+2j) 71 | >>> from decimal import Decimal 72 | >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') 73 | True 74 | 75 | Specializing JSON object encoding:: 76 | 77 | >>> import simplejson as json 78 | >>> def encode_complex(obj): 79 | ... if isinstance(obj, complex): 80 | ... return [obj.real, obj.imag] 81 | ... raise TypeError(repr(o) + " is not JSON serializable") 82 | ... 83 | >>> json.dumps(2 + 1j, default=encode_complex) 84 | '[2.0, 1.0]' 85 | >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) 86 | '[2.0, 1.0]' 87 | >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) 88 | '[2.0, 1.0]' 89 | 90 | 91 | Using simplejson.tool from the shell to validate and pretty-print:: 92 | 93 | $ echo '{"json":"obj"}' | python -m simplejson.tool 94 | { 95 | "json": "obj" 96 | } 97 | $ echo '{ 1.2:3.4}' | python -m simplejson.tool 98 | Expecting property name: line 1 column 3 (char 2) 99 | """ 100 | from __future__ import absolute_import 101 | __version__ = '3.6.3' 102 | __all__ = [ 103 | 'dump', 'dumps', 'load', 'loads', 104 | 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', 105 | 'OrderedDict', 'simple_first', 106 | ] 107 | 108 | __author__ = 'Bob Ippolito ' 109 | 110 | from decimal import Decimal 111 | 112 | from .scanner import JSONDecodeError 113 | from .decoder import JSONDecoder 114 | from .encoder import JSONEncoder, JSONEncoderForHTML 115 | def _import_OrderedDict(): 116 | import collections 117 | try: 118 | return collections.OrderedDict 119 | except AttributeError: 120 | from . import ordered_dict 121 | return ordered_dict.OrderedDict 122 | OrderedDict = _import_OrderedDict() 123 | 124 | def _import_c_make_encoder(): 125 | try: 126 | from ._speedups import make_encoder 127 | return make_encoder 128 | except ImportError: 129 | return None 130 | 131 | _default_encoder = JSONEncoder( 132 | skipkeys=False, 133 | ensure_ascii=True, 134 | check_circular=True, 135 | allow_nan=True, 136 | indent=None, 137 | separators=None, 138 | encoding='utf-8', 139 | default=None, 140 | use_decimal=True, 141 | namedtuple_as_object=True, 142 | tuple_as_array=True, 143 | bigint_as_string=False, 144 | item_sort_key=None, 145 | for_json=False, 146 | ignore_nan=False, 147 | int_as_string_bitcount=None, 148 | ) 149 | 150 | def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, 151 | allow_nan=True, cls=None, indent=None, separators=None, 152 | encoding='utf-8', default=None, use_decimal=True, 153 | namedtuple_as_object=True, tuple_as_array=True, 154 | bigint_as_string=False, sort_keys=False, item_sort_key=None, 155 | for_json=False, ignore_nan=False, int_as_string_bitcount=None, **kw): 156 | """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a 157 | ``.write()``-supporting file-like object). 158 | 159 | If *skipkeys* is true then ``dict`` keys that are not basic types 160 | (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 161 | will be skipped instead of raising a ``TypeError``. 162 | 163 | If *ensure_ascii* is false, then the some chunks written to ``fp`` 164 | may be ``unicode`` instances, subject to normal Python ``str`` to 165 | ``unicode`` coercion rules. Unless ``fp.write()`` explicitly 166 | understands ``unicode`` (as in ``codecs.getwriter()``) this is likely 167 | to cause an error. 168 | 169 | If *check_circular* is false, then the circular reference check 170 | for container types will be skipped and a circular reference will 171 | result in an ``OverflowError`` (or worse). 172 | 173 | If *allow_nan* is false, then it will be a ``ValueError`` to 174 | serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) 175 | in strict compliance of the original JSON specification, instead of using 176 | the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See 177 | *ignore_nan* for ECMA-262 compliant behavior. 178 | 179 | If *indent* is a string, then JSON array elements and object members 180 | will be pretty-printed with a newline followed by that string repeated 181 | for each level of nesting. ``None`` (the default) selects the most compact 182 | representation without any newlines. For backwards compatibility with 183 | versions of simplejson earlier than 2.1.0, an integer is also accepted 184 | and is converted to a string with that many spaces. 185 | 186 | If specified, *separators* should be an 187 | ``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')`` 188 | if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most 189 | compact JSON representation, you should specify ``(',', ':')`` to eliminate 190 | whitespace. 191 | 192 | *encoding* is the character encoding for str instances, default is UTF-8. 193 | 194 | *default(obj)* is a function that should return a serializable version 195 | of obj or raise ``TypeError``. The default simply raises ``TypeError``. 196 | 197 | If *use_decimal* is true (default: ``True``) then decimal.Decimal 198 | will be natively serialized to JSON with full precision. 199 | 200 | If *namedtuple_as_object* is true (default: ``True``), 201 | :class:`tuple` subclasses with ``_asdict()`` methods will be encoded 202 | as JSON objects. 203 | 204 | If *tuple_as_array* is true (default: ``True``), 205 | :class:`tuple` (and subclasses) will be encoded as JSON arrays. 206 | 207 | If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher 208 | or lower than -2**53 will be encoded as strings. This is to avoid the 209 | rounding that happens in Javascript otherwise. Note that this is still a 210 | lossy operation that will not round-trip correctly and should be used 211 | sparingly. 212 | 213 | If *int_as_string_bitcount* is a positive number (n), then int of size 214 | greater than or equal to 2**n or lower than or equal to -2**n will be 215 | encoded as strings. 216 | 217 | If specified, *item_sort_key* is a callable used to sort the items in 218 | each dictionary. This is useful if you want to sort items other than 219 | in alphabetical order by key. This option takes precedence over 220 | *sort_keys*. 221 | 222 | If *sort_keys* is true (default: ``False``), the output of dictionaries 223 | will be sorted by item. 224 | 225 | If *for_json* is true (default: ``False``), objects with a ``for_json()`` 226 | method will use the return value of that method for encoding as JSON 227 | instead of the object. 228 | 229 | If *ignore_nan* is true (default: ``False``), then out of range 230 | :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as 231 | ``null`` in compliance with the ECMA-262 specification. If true, this will 232 | override *allow_nan*. 233 | 234 | To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the 235 | ``.default()`` method to serialize additional types), specify it with 236 | the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead 237 | of subclassing whenever possible. 238 | 239 | """ 240 | # cached encoder 241 | if (not skipkeys and ensure_ascii and 242 | check_circular and allow_nan and 243 | cls is None and indent is None and separators is None and 244 | encoding == 'utf-8' and default is None and use_decimal 245 | and namedtuple_as_object and tuple_as_array 246 | and not bigint_as_string and int_as_string_bitcount is None 247 | and not item_sort_key and not for_json and not ignore_nan and not kw): 248 | iterable = _default_encoder.iterencode(obj) 249 | else: 250 | if cls is None: 251 | cls = JSONEncoder 252 | iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, 253 | check_circular=check_circular, allow_nan=allow_nan, indent=indent, 254 | separators=separators, encoding=encoding, 255 | default=default, use_decimal=use_decimal, 256 | namedtuple_as_object=namedtuple_as_object, 257 | tuple_as_array=tuple_as_array, 258 | bigint_as_string=bigint_as_string, 259 | sort_keys=sort_keys, 260 | item_sort_key=item_sort_key, 261 | for_json=for_json, 262 | ignore_nan=ignore_nan, 263 | int_as_string_bitcount=int_as_string_bitcount, 264 | **kw).iterencode(obj) 265 | # could accelerate with writelines in some versions of Python, at 266 | # a debuggability cost 267 | for chunk in iterable: 268 | fp.write(chunk) 269 | 270 | 271 | def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, 272 | allow_nan=True, cls=None, indent=None, separators=None, 273 | encoding='utf-8', default=None, use_decimal=True, 274 | namedtuple_as_object=True, tuple_as_array=True, 275 | bigint_as_string=False, sort_keys=False, item_sort_key=None, 276 | for_json=False, ignore_nan=False, int_as_string_bitcount=None, **kw): 277 | """Serialize ``obj`` to a JSON formatted ``str``. 278 | 279 | If ``skipkeys`` is false then ``dict`` keys that are not basic types 280 | (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 281 | will be skipped instead of raising a ``TypeError``. 282 | 283 | If ``ensure_ascii`` is false, then the return value will be a 284 | ``unicode`` instance subject to normal Python ``str`` to ``unicode`` 285 | coercion rules instead of being escaped to an ASCII ``str``. 286 | 287 | If ``check_circular`` is false, then the circular reference check 288 | for container types will be skipped and a circular reference will 289 | result in an ``OverflowError`` (or worse). 290 | 291 | If ``allow_nan`` is false, then it will be a ``ValueError`` to 292 | serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in 293 | strict compliance of the JSON specification, instead of using the 294 | JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). 295 | 296 | If ``indent`` is a string, then JSON array elements and object members 297 | will be pretty-printed with a newline followed by that string repeated 298 | for each level of nesting. ``None`` (the default) selects the most compact 299 | representation without any newlines. For backwards compatibility with 300 | versions of simplejson earlier than 2.1.0, an integer is also accepted 301 | and is converted to a string with that many spaces. 302 | 303 | If specified, ``separators`` should be an 304 | ``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')`` 305 | if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most 306 | compact JSON representation, you should specify ``(',', ':')`` to eliminate 307 | whitespace. 308 | 309 | ``encoding`` is the character encoding for str instances, default is UTF-8. 310 | 311 | ``default(obj)`` is a function that should return a serializable version 312 | of obj or raise TypeError. The default simply raises TypeError. 313 | 314 | If *use_decimal* is true (default: ``True``) then decimal.Decimal 315 | will be natively serialized to JSON with full precision. 316 | 317 | If *namedtuple_as_object* is true (default: ``True``), 318 | :class:`tuple` subclasses with ``_asdict()`` methods will be encoded 319 | as JSON objects. 320 | 321 | If *tuple_as_array* is true (default: ``True``), 322 | :class:`tuple` (and subclasses) will be encoded as JSON arrays. 323 | 324 | If *bigint_as_string* is true (not the default), ints 2**53 and higher 325 | or lower than -2**53 will be encoded as strings. This is to avoid the 326 | rounding that happens in Javascript otherwise. 327 | 328 | If *int_as_string_bitcount* is a positive number (n), then int of size 329 | greater than or equal to 2**n or lower than or equal to -2**n will be 330 | encoded as strings. 331 | 332 | If specified, *item_sort_key* is a callable used to sort the items in 333 | each dictionary. This is useful if you want to sort items other than 334 | in alphabetical order by key. This option takes precendence over 335 | *sort_keys*. 336 | 337 | If *sort_keys* is true (default: ``False``), the output of dictionaries 338 | will be sorted by item. 339 | 340 | If *for_json* is true (default: ``False``), objects with a ``for_json()`` 341 | method will use the return value of that method for encoding as JSON 342 | instead of the object. 343 | 344 | If *ignore_nan* is true (default: ``False``), then out of range 345 | :class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as 346 | ``null`` in compliance with the ECMA-262 specification. If true, this will 347 | override *allow_nan*. 348 | 349 | To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the 350 | ``.default()`` method to serialize additional types), specify it with 351 | the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing 352 | whenever possible. 353 | 354 | """ 355 | # cached encoder 356 | if ( 357 | not skipkeys and ensure_ascii and 358 | check_circular and allow_nan and 359 | cls is None and indent is None and separators is None and 360 | encoding == 'utf-8' and default is None and use_decimal 361 | and namedtuple_as_object and tuple_as_array 362 | and not bigint_as_string and int_as_string_bitcount is None 363 | and not sort_keys and not item_sort_key and not for_json 364 | and not ignore_nan and not kw 365 | ): 366 | return _default_encoder.encode(obj) 367 | if cls is None: 368 | cls = JSONEncoder 369 | return cls( 370 | skipkeys=skipkeys, ensure_ascii=ensure_ascii, 371 | check_circular=check_circular, allow_nan=allow_nan, indent=indent, 372 | separators=separators, encoding=encoding, default=default, 373 | use_decimal=use_decimal, 374 | namedtuple_as_object=namedtuple_as_object, 375 | tuple_as_array=tuple_as_array, 376 | bigint_as_string=bigint_as_string, 377 | sort_keys=sort_keys, 378 | item_sort_key=item_sort_key, 379 | for_json=for_json, 380 | ignore_nan=ignore_nan, 381 | int_as_string_bitcount=int_as_string_bitcount, 382 | **kw).encode(obj) 383 | 384 | 385 | _default_decoder = JSONDecoder(encoding=None, object_hook=None, 386 | object_pairs_hook=None) 387 | 388 | 389 | def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, 390 | parse_int=None, parse_constant=None, object_pairs_hook=None, 391 | use_decimal=False, namedtuple_as_object=True, tuple_as_array=True, 392 | **kw): 393 | """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing 394 | a JSON document) to a Python object. 395 | 396 | *encoding* determines the encoding used to interpret any 397 | :class:`str` objects decoded by this instance (``'utf-8'`` by 398 | default). It has no effect when decoding :class:`unicode` objects. 399 | 400 | Note that currently only encodings that are a superset of ASCII work, 401 | strings of other encodings should be passed in as :class:`unicode`. 402 | 403 | *object_hook*, if specified, will be called with the result of every 404 | JSON object decoded and its return value will be used in place of the 405 | given :class:`dict`. This can be used to provide custom 406 | deserializations (e.g. to support JSON-RPC class hinting). 407 | 408 | *object_pairs_hook* is an optional function that will be called with 409 | the result of any object literal decode with an ordered list of pairs. 410 | The return value of *object_pairs_hook* will be used instead of the 411 | :class:`dict`. This feature can be used to implement custom decoders 412 | that rely on the order that the key and value pairs are decoded (for 413 | example, :func:`collections.OrderedDict` will remember the order of 414 | insertion). If *object_hook* is also defined, the *object_pairs_hook* 415 | takes priority. 416 | 417 | *parse_float*, if specified, will be called with the string of every 418 | JSON float to be decoded. By default, this is equivalent to 419 | ``float(num_str)``. This can be used to use another datatype or parser 420 | for JSON floats (e.g. :class:`decimal.Decimal`). 421 | 422 | *parse_int*, if specified, will be called with the string of every 423 | JSON int to be decoded. By default, this is equivalent to 424 | ``int(num_str)``. This can be used to use another datatype or parser 425 | for JSON integers (e.g. :class:`float`). 426 | 427 | *parse_constant*, if specified, will be called with one of the 428 | following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This 429 | can be used to raise an exception if invalid JSON numbers are 430 | encountered. 431 | 432 | If *use_decimal* is true (default: ``False``) then it implies 433 | parse_float=decimal.Decimal for parity with ``dump``. 434 | 435 | To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` 436 | kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead 437 | of subclassing whenever possible. 438 | 439 | """ 440 | return loads(fp.read(), 441 | encoding=encoding, cls=cls, object_hook=object_hook, 442 | parse_float=parse_float, parse_int=parse_int, 443 | parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, 444 | use_decimal=use_decimal, **kw) 445 | 446 | 447 | def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, 448 | parse_int=None, parse_constant=None, object_pairs_hook=None, 449 | use_decimal=False, **kw): 450 | """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON 451 | document) to a Python object. 452 | 453 | *encoding* determines the encoding used to interpret any 454 | :class:`str` objects decoded by this instance (``'utf-8'`` by 455 | default). It has no effect when decoding :class:`unicode` objects. 456 | 457 | Note that currently only encodings that are a superset of ASCII work, 458 | strings of other encodings should be passed in as :class:`unicode`. 459 | 460 | *object_hook*, if specified, will be called with the result of every 461 | JSON object decoded and its return value will be used in place of the 462 | given :class:`dict`. This can be used to provide custom 463 | deserializations (e.g. to support JSON-RPC class hinting). 464 | 465 | *object_pairs_hook* is an optional function that will be called with 466 | the result of any object literal decode with an ordered list of pairs. 467 | The return value of *object_pairs_hook* will be used instead of the 468 | :class:`dict`. This feature can be used to implement custom decoders 469 | that rely on the order that the key and value pairs are decoded (for 470 | example, :func:`collections.OrderedDict` will remember the order of 471 | insertion). If *object_hook* is also defined, the *object_pairs_hook* 472 | takes priority. 473 | 474 | *parse_float*, if specified, will be called with the string of every 475 | JSON float to be decoded. By default, this is equivalent to 476 | ``float(num_str)``. This can be used to use another datatype or parser 477 | for JSON floats (e.g. :class:`decimal.Decimal`). 478 | 479 | *parse_int*, if specified, will be called with the string of every 480 | JSON int to be decoded. By default, this is equivalent to 481 | ``int(num_str)``. This can be used to use another datatype or parser 482 | for JSON integers (e.g. :class:`float`). 483 | 484 | *parse_constant*, if specified, will be called with one of the 485 | following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This 486 | can be used to raise an exception if invalid JSON numbers are 487 | encountered. 488 | 489 | If *use_decimal* is true (default: ``False``) then it implies 490 | parse_float=decimal.Decimal for parity with ``dump``. 491 | 492 | To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` 493 | kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead 494 | of subclassing whenever possible. 495 | 496 | """ 497 | if (cls is None and encoding is None and object_hook is None and 498 | parse_int is None and parse_float is None and 499 | parse_constant is None and object_pairs_hook is None 500 | and not use_decimal and not kw): 501 | return _default_decoder.decode(s) 502 | if cls is None: 503 | cls = JSONDecoder 504 | if object_hook is not None: 505 | kw['object_hook'] = object_hook 506 | if object_pairs_hook is not None: 507 | kw['object_pairs_hook'] = object_pairs_hook 508 | if parse_float is not None: 509 | kw['parse_float'] = parse_float 510 | if parse_int is not None: 511 | kw['parse_int'] = parse_int 512 | if parse_constant is not None: 513 | kw['parse_constant'] = parse_constant 514 | if use_decimal: 515 | if parse_float is not None: 516 | raise TypeError("use_decimal=True implies parse_float=Decimal") 517 | kw['parse_float'] = Decimal 518 | return cls(encoding=encoding, **kw).decode(s) 519 | 520 | 521 | def _toggle_speedups(enabled): 522 | from . import decoder as dec 523 | from . import encoder as enc 524 | from . import scanner as scan 525 | c_make_encoder = _import_c_make_encoder() 526 | if enabled: 527 | dec.scanstring = dec.c_scanstring or dec.py_scanstring 528 | enc.c_make_encoder = c_make_encoder 529 | enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or 530 | enc.py_encode_basestring_ascii) 531 | scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner 532 | else: 533 | dec.scanstring = dec.py_scanstring 534 | enc.c_make_encoder = None 535 | enc.encode_basestring_ascii = enc.py_encode_basestring_ascii 536 | scan.make_scanner = scan.py_make_scanner 537 | dec.make_scanner = scan.make_scanner 538 | global _default_decoder 539 | _default_decoder = JSONDecoder( 540 | encoding=None, 541 | object_hook=None, 542 | object_pairs_hook=None, 543 | ) 544 | global _default_encoder 545 | _default_encoder = JSONEncoder( 546 | skipkeys=False, 547 | ensure_ascii=True, 548 | check_circular=True, 549 | allow_nan=True, 550 | indent=None, 551 | separators=None, 552 | encoding='utf-8', 553 | default=None, 554 | ) 555 | 556 | def simple_first(kv): 557 | """Helper function to pass to item_sort_key to sort simple 558 | elements to the top, then container elements. 559 | """ 560 | return (isinstance(kv[1], (list, dict, tuple)), kv[0]) 561 | -------------------------------------------------------------------------------- /lib2/simplejson/compat.py: -------------------------------------------------------------------------------- 1 | """Python 3 compatibility shims 2 | """ 3 | import sys 4 | if sys.version_info[0] < 3: 5 | PY3 = False 6 | def b(s): 7 | return s 8 | def u(s): 9 | return unicode(s, 'unicode_escape') 10 | import cStringIO as StringIO 11 | StringIO = BytesIO = StringIO.StringIO 12 | text_type = unicode 13 | binary_type = str 14 | string_types = (basestring,) 15 | integer_types = (int, long) 16 | unichr = unichr 17 | reload_module = reload 18 | def fromhex(s): 19 | return s.decode('hex') 20 | 21 | else: 22 | PY3 = True 23 | if sys.version_info[:2] >= (3, 4): 24 | from importlib import reload as reload_module 25 | else: 26 | from imp import reload as reload_module 27 | import codecs 28 | def b(s): 29 | return codecs.latin_1_encode(s)[0] 30 | def u(s): 31 | return s 32 | import io 33 | StringIO = io.StringIO 34 | BytesIO = io.BytesIO 35 | text_type = str 36 | binary_type = bytes 37 | string_types = (str,) 38 | integer_types = (int,) 39 | 40 | def unichr(s): 41 | return u(chr(s)) 42 | 43 | def fromhex(s): 44 | return bytes.fromhex(s) 45 | 46 | long_type = integer_types[-1] 47 | -------------------------------------------------------------------------------- /lib2/simplejson/decoder.py: -------------------------------------------------------------------------------- 1 | """Implementation of JSONDecoder 2 | """ 3 | from __future__ import absolute_import 4 | import re 5 | import sys 6 | import struct 7 | from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr 8 | from .scanner import make_scanner, JSONDecodeError 9 | 10 | def _import_c_scanstring(): 11 | try: 12 | from ._speedups import scanstring 13 | return scanstring 14 | except ImportError: 15 | return None 16 | c_scanstring = _import_c_scanstring() 17 | 18 | # NOTE (3.1.0): JSONDecodeError may still be imported from this module for 19 | # compatibility, but it was never in the __all__ 20 | __all__ = ['JSONDecoder'] 21 | 22 | FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL 23 | 24 | def _floatconstants(): 25 | _BYTES = fromhex('7FF80000000000007FF0000000000000') 26 | # The struct module in Python 2.4 would get frexp() out of range here 27 | # when an endian is specified in the format string. Fixed in Python 2.5+ 28 | if sys.byteorder != 'big': 29 | _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] 30 | nan, inf = struct.unpack('dd', _BYTES) 31 | return nan, inf, -inf 32 | 33 | NaN, PosInf, NegInf = _floatconstants() 34 | 35 | _CONSTANTS = { 36 | '-Infinity': NegInf, 37 | 'Infinity': PosInf, 38 | 'NaN': NaN, 39 | } 40 | 41 | STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) 42 | BACKSLASH = { 43 | '"': u('"'), '\\': u('\u005c'), '/': u('/'), 44 | 'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'), 45 | } 46 | 47 | DEFAULT_ENCODING = "utf-8" 48 | 49 | def py_scanstring(s, end, encoding=None, strict=True, 50 | _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join, 51 | _PY3=PY3, _maxunicode=sys.maxunicode): 52 | """Scan the string s for a JSON string. End is the index of the 53 | character in s after the quote that started the JSON string. 54 | Unescapes all valid JSON string escape sequences and raises ValueError 55 | on attempt to decode an invalid string. If strict is False then literal 56 | control characters are allowed in the string. 57 | 58 | Returns a tuple of the decoded string and the index of the character in s 59 | after the end quote.""" 60 | if encoding is None: 61 | encoding = DEFAULT_ENCODING 62 | chunks = [] 63 | _append = chunks.append 64 | begin = end - 1 65 | while 1: 66 | chunk = _m(s, end) 67 | if chunk is None: 68 | raise JSONDecodeError( 69 | "Unterminated string starting at", s, begin) 70 | end = chunk.end() 71 | content, terminator = chunk.groups() 72 | # Content is contains zero or more unescaped string characters 73 | if content: 74 | if not _PY3 and not isinstance(content, text_type): 75 | content = text_type(content, encoding) 76 | _append(content) 77 | # Terminator is the end of string, a literal control character, 78 | # or a backslash denoting that an escape sequence follows 79 | if terminator == '"': 80 | break 81 | elif terminator != '\\': 82 | if strict: 83 | msg = "Invalid control character %r at" 84 | raise JSONDecodeError(msg, s, end) 85 | else: 86 | _append(terminator) 87 | continue 88 | try: 89 | esc = s[end] 90 | except IndexError: 91 | raise JSONDecodeError( 92 | "Unterminated string starting at", s, begin) 93 | # If not a unicode escape sequence, must be in the lookup table 94 | if esc != 'u': 95 | try: 96 | char = _b[esc] 97 | except KeyError: 98 | msg = "Invalid \\X escape sequence %r" 99 | raise JSONDecodeError(msg, s, end) 100 | end += 1 101 | else: 102 | # Unicode escape sequence 103 | msg = "Invalid \\uXXXX escape sequence" 104 | esc = s[end + 1:end + 5] 105 | escX = esc[1:2] 106 | if len(esc) != 4 or escX == 'x' or escX == 'X': 107 | raise JSONDecodeError(msg, s, end - 1) 108 | try: 109 | uni = int(esc, 16) 110 | except ValueError: 111 | raise JSONDecodeError(msg, s, end - 1) 112 | end += 5 113 | # Check for surrogate pair on UCS-4 systems 114 | # Note that this will join high/low surrogate pairs 115 | # but will also pass unpaired surrogates through 116 | if (_maxunicode > 65535 and 117 | uni & 0xfc00 == 0xd800 and 118 | s[end:end + 2] == '\\u'): 119 | esc2 = s[end + 2:end + 6] 120 | escX = esc2[1:2] 121 | if len(esc2) == 4 and not (escX == 'x' or escX == 'X'): 122 | try: 123 | uni2 = int(esc2, 16) 124 | except ValueError: 125 | raise JSONDecodeError(msg, s, end) 126 | if uni2 & 0xfc00 == 0xdc00: 127 | uni = 0x10000 + (((uni - 0xd800) << 10) | 128 | (uni2 - 0xdc00)) 129 | end += 6 130 | char = unichr(uni) 131 | # Append the unescaped character 132 | _append(char) 133 | return _join(chunks), end 134 | 135 | 136 | # Use speedup if available 137 | scanstring = c_scanstring or py_scanstring 138 | 139 | WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) 140 | WHITESPACE_STR = ' \t\n\r' 141 | 142 | def JSONObject(state, encoding, strict, scan_once, object_hook, 143 | object_pairs_hook, memo=None, 144 | _w=WHITESPACE.match, _ws=WHITESPACE_STR): 145 | (s, end) = state 146 | # Backwards compatibility 147 | if memo is None: 148 | memo = {} 149 | memo_get = memo.setdefault 150 | pairs = [] 151 | # Use a slice to prevent IndexError from being raised, the following 152 | # check will raise a more specific ValueError if the string is empty 153 | nextchar = s[end:end + 1] 154 | # Normally we expect nextchar == '"' 155 | if nextchar != '"': 156 | if nextchar in _ws: 157 | end = _w(s, end).end() 158 | nextchar = s[end:end + 1] 159 | # Trivial empty object 160 | if nextchar == '}': 161 | if object_pairs_hook is not None: 162 | result = object_pairs_hook(pairs) 163 | return result, end + 1 164 | pairs = {} 165 | if object_hook is not None: 166 | pairs = object_hook(pairs) 167 | return pairs, end + 1 168 | elif nextchar != '"': 169 | raise JSONDecodeError( 170 | "Expecting property name enclosed in double quotes", 171 | s, end) 172 | end += 1 173 | while True: 174 | key, end = scanstring(s, end, encoding, strict) 175 | key = memo_get(key, key) 176 | 177 | # To skip some function call overhead we optimize the fast paths where 178 | # the JSON key separator is ": " or just ":". 179 | if s[end:end + 1] != ':': 180 | end = _w(s, end).end() 181 | if s[end:end + 1] != ':': 182 | raise JSONDecodeError("Expecting ':' delimiter", s, end) 183 | 184 | end += 1 185 | 186 | try: 187 | if s[end] in _ws: 188 | end += 1 189 | if s[end] in _ws: 190 | end = _w(s, end + 1).end() 191 | except IndexError: 192 | pass 193 | 194 | value, end = scan_once(s, end) 195 | pairs.append((key, value)) 196 | 197 | try: 198 | nextchar = s[end] 199 | if nextchar in _ws: 200 | end = _w(s, end + 1).end() 201 | nextchar = s[end] 202 | except IndexError: 203 | nextchar = '' 204 | end += 1 205 | 206 | if nextchar == '}': 207 | break 208 | elif nextchar != ',': 209 | raise JSONDecodeError("Expecting ',' delimiter or '}'", s, end - 1) 210 | 211 | try: 212 | nextchar = s[end] 213 | if nextchar in _ws: 214 | end += 1 215 | nextchar = s[end] 216 | if nextchar in _ws: 217 | end = _w(s, end + 1).end() 218 | nextchar = s[end] 219 | except IndexError: 220 | nextchar = '' 221 | 222 | end += 1 223 | if nextchar == '}': 224 | break 225 | 226 | if nextchar != '"': 227 | raise JSONDecodeError( 228 | "Expecting property name enclosed in double quotes", 229 | s, end - 1) 230 | 231 | if object_pairs_hook is not None: 232 | result = object_pairs_hook(pairs) 233 | return result, end 234 | pairs = dict(pairs) 235 | if object_hook is not None: 236 | pairs = object_hook(pairs) 237 | return pairs, end 238 | 239 | def JSONArray(state, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): 240 | (s, end) = state 241 | values = [] 242 | nextchar = s[end:end + 1] 243 | if nextchar in _ws: 244 | end = _w(s, end + 1).end() 245 | nextchar = s[end:end + 1] 246 | # Look-ahead for trivial empty array 247 | if nextchar == ']': 248 | return values, end + 1 249 | elif nextchar == '': 250 | raise JSONDecodeError("Expecting value or ']'", s, end) 251 | _append = values.append 252 | while True: 253 | value, end = scan_once(s, end) 254 | _append(value) 255 | nextchar = s[end:end + 1] 256 | if nextchar in _ws: 257 | end = _w(s, end + 1).end() 258 | nextchar = s[end:end + 1] 259 | end += 1 260 | if nextchar == ']': 261 | break 262 | elif nextchar != ',': 263 | raise JSONDecodeError("Expecting ',' delimiter or ']'", s, end - 1) 264 | 265 | try: 266 | if s[end] in _ws: 267 | end += 1 268 | if s[end] in _ws: 269 | end = _w(s, end + 1).end() 270 | except IndexError: 271 | pass 272 | 273 | return values, end 274 | 275 | class JSONDecoder(object): 276 | """Simple JSON decoder 277 | 278 | Performs the following translations in decoding by default: 279 | 280 | +---------------+-------------------+ 281 | | JSON | Python | 282 | +===============+===================+ 283 | | object | dict | 284 | +---------------+-------------------+ 285 | | array | list | 286 | +---------------+-------------------+ 287 | | string | str, unicode | 288 | +---------------+-------------------+ 289 | | number (int) | int, long | 290 | +---------------+-------------------+ 291 | | number (real) | float | 292 | +---------------+-------------------+ 293 | | true | True | 294 | +---------------+-------------------+ 295 | | false | False | 296 | +---------------+-------------------+ 297 | | null | None | 298 | +---------------+-------------------+ 299 | 300 | It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as 301 | their corresponding ``float`` values, which is outside the JSON spec. 302 | 303 | """ 304 | 305 | def __init__(self, encoding=None, object_hook=None, parse_float=None, 306 | parse_int=None, parse_constant=None, strict=True, 307 | object_pairs_hook=None): 308 | """ 309 | *encoding* determines the encoding used to interpret any 310 | :class:`str` objects decoded by this instance (``'utf-8'`` by 311 | default). It has no effect when decoding :class:`unicode` objects. 312 | 313 | Note that currently only encodings that are a superset of ASCII work, 314 | strings of other encodings should be passed in as :class:`unicode`. 315 | 316 | *object_hook*, if specified, will be called with the result of every 317 | JSON object decoded and its return value will be used in place of the 318 | given :class:`dict`. This can be used to provide custom 319 | deserializations (e.g. to support JSON-RPC class hinting). 320 | 321 | *object_pairs_hook* is an optional function that will be called with 322 | the result of any object literal decode with an ordered list of pairs. 323 | The return value of *object_pairs_hook* will be used instead of the 324 | :class:`dict`. This feature can be used to implement custom decoders 325 | that rely on the order that the key and value pairs are decoded (for 326 | example, :func:`collections.OrderedDict` will remember the order of 327 | insertion). If *object_hook* is also defined, the *object_pairs_hook* 328 | takes priority. 329 | 330 | *parse_float*, if specified, will be called with the string of every 331 | JSON float to be decoded. By default, this is equivalent to 332 | ``float(num_str)``. This can be used to use another datatype or parser 333 | for JSON floats (e.g. :class:`decimal.Decimal`). 334 | 335 | *parse_int*, if specified, will be called with the string of every 336 | JSON int to be decoded. By default, this is equivalent to 337 | ``int(num_str)``. This can be used to use another datatype or parser 338 | for JSON integers (e.g. :class:`float`). 339 | 340 | *parse_constant*, if specified, will be called with one of the 341 | following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This 342 | can be used to raise an exception if invalid JSON numbers are 343 | encountered. 344 | 345 | *strict* controls the parser's behavior when it encounters an 346 | invalid control character in a string. The default setting of 347 | ``True`` means that unescaped control characters are parse errors, if 348 | ``False`` then control characters will be allowed in strings. 349 | 350 | """ 351 | if encoding is None: 352 | encoding = DEFAULT_ENCODING 353 | self.encoding = encoding 354 | self.object_hook = object_hook 355 | self.object_pairs_hook = object_pairs_hook 356 | self.parse_float = parse_float or float 357 | self.parse_int = parse_int or int 358 | self.parse_constant = parse_constant or _CONSTANTS.__getitem__ 359 | self.strict = strict 360 | self.parse_object = JSONObject 361 | self.parse_array = JSONArray 362 | self.parse_string = scanstring 363 | self.memo = {} 364 | self.scan_once = make_scanner(self) 365 | 366 | def decode(self, s, _w=WHITESPACE.match, _PY3=PY3): 367 | """Return the Python representation of ``s`` (a ``str`` or ``unicode`` 368 | instance containing a JSON document) 369 | 370 | """ 371 | if _PY3 and isinstance(s, binary_type): 372 | s = s.decode(self.encoding) 373 | obj, end = self.raw_decode(s) 374 | end = _w(s, end).end() 375 | if end != len(s): 376 | raise JSONDecodeError("Extra data", s, end, len(s)) 377 | return obj 378 | 379 | def raw_decode(self, s, idx=0, _w=WHITESPACE.match, _PY3=PY3): 380 | """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` 381 | beginning with a JSON document) and return a 2-tuple of the Python 382 | representation and the index in ``s`` where the document ended. 383 | Optionally, ``idx`` can be used to specify an offset in ``s`` where 384 | the JSON document begins. 385 | 386 | This can be used to decode a JSON document from a string that may 387 | have extraneous data at the end. 388 | 389 | """ 390 | if idx < 0: 391 | # Ensure that raw_decode bails on negative indexes, the regex 392 | # would otherwise mask this behavior. #98 393 | raise JSONDecodeError('Expecting value', s, idx) 394 | if _PY3 and not isinstance(s, text_type): 395 | raise TypeError("Input string must be text, not bytes") 396 | # strip UTF-8 bom 397 | if len(s) > idx: 398 | ord0 = ord(s[idx]) 399 | if ord0 == 0xfeff: 400 | idx += 1 401 | elif ord0 == 0xef and s[idx:idx + 3] == '\xef\xbb\xbf': 402 | idx += 3 403 | return self.scan_once(s, idx=_w(s, idx).end()) 404 | -------------------------------------------------------------------------------- /lib2/simplejson/ordered_dict.py: -------------------------------------------------------------------------------- 1 | """Drop-in replacement for collections.OrderedDict by Raymond Hettinger 2 | 3 | http://code.activestate.com/recipes/576693/ 4 | 5 | """ 6 | from UserDict import DictMixin 7 | 8 | # Modified from original to support Python 2.4, see 9 | # http://code.google.com/p/simplejson/issues/detail?id=53 10 | try: 11 | all 12 | except NameError: 13 | def all(seq): 14 | for elem in seq: 15 | if not elem: 16 | return False 17 | return True 18 | 19 | class OrderedDict(dict, DictMixin): 20 | 21 | def __init__(self, *args, **kwds): 22 | if len(args) > 1: 23 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) 24 | try: 25 | self.__end 26 | except AttributeError: 27 | self.clear() 28 | self.update(*args, **kwds) 29 | 30 | def clear(self): 31 | self.__end = end = [] 32 | end += [None, end, end] # sentinel node for doubly linked list 33 | self.__map = {} # key --> [key, prev, next] 34 | dict.clear(self) 35 | 36 | def __setitem__(self, key, value): 37 | if key not in self: 38 | end = self.__end 39 | curr = end[1] 40 | curr[2] = end[1] = self.__map[key] = [key, curr, end] 41 | dict.__setitem__(self, key, value) 42 | 43 | def __delitem__(self, key): 44 | dict.__delitem__(self, key) 45 | key, prev, next = self.__map.pop(key) 46 | prev[2] = next 47 | next[1] = prev 48 | 49 | def __iter__(self): 50 | end = self.__end 51 | curr = end[2] 52 | while curr is not end: 53 | yield curr[0] 54 | curr = curr[2] 55 | 56 | def __reversed__(self): 57 | end = self.__end 58 | curr = end[1] 59 | while curr is not end: 60 | yield curr[0] 61 | curr = curr[1] 62 | 63 | def popitem(self, last=True): 64 | if not self: 65 | raise KeyError('dictionary is empty') 66 | # Modified from original to support Python 2.4, see 67 | # http://code.google.com/p/simplejson/issues/detail?id=53 68 | if last: 69 | key = reversed(self).next() 70 | else: 71 | key = iter(self).next() 72 | value = self.pop(key) 73 | return key, value 74 | 75 | def __reduce__(self): 76 | items = [[k, self[k]] for k in self] 77 | tmp = self.__map, self.__end 78 | del self.__map, self.__end 79 | inst_dict = vars(self).copy() 80 | self.__map, self.__end = tmp 81 | if inst_dict: 82 | return (self.__class__, (items,), inst_dict) 83 | return self.__class__, (items,) 84 | 85 | def keys(self): 86 | return list(self) 87 | 88 | setdefault = DictMixin.setdefault 89 | update = DictMixin.update 90 | pop = DictMixin.pop 91 | values = DictMixin.values 92 | items = DictMixin.items 93 | iterkeys = DictMixin.iterkeys 94 | itervalues = DictMixin.itervalues 95 | iteritems = DictMixin.iteritems 96 | 97 | def __repr__(self): 98 | if not self: 99 | return '%s()' % (self.__class__.__name__,) 100 | return '%s(%r)' % (self.__class__.__name__, self.items()) 101 | 102 | def copy(self): 103 | return self.__class__(self) 104 | 105 | @classmethod 106 | def fromkeys(cls, iterable, value=None): 107 | d = cls() 108 | for key in iterable: 109 | d[key] = value 110 | return d 111 | 112 | def __eq__(self, other): 113 | if isinstance(other, OrderedDict): 114 | return len(self)==len(other) and \ 115 | all(p==q for p, q in zip(self.items(), other.items())) 116 | return dict.__eq__(self, other) 117 | 118 | def __ne__(self, other): 119 | return not self == other 120 | -------------------------------------------------------------------------------- /lib2/simplejson/scanner.py: -------------------------------------------------------------------------------- 1 | """JSON token scanner 2 | """ 3 | import re 4 | def _import_c_make_scanner(): 5 | try: 6 | from simplejson._speedups import make_scanner 7 | return make_scanner 8 | except ImportError: 9 | return None 10 | c_make_scanner = _import_c_make_scanner() 11 | 12 | __all__ = ['make_scanner', 'JSONDecodeError'] 13 | 14 | NUMBER_RE = re.compile( 15 | r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?', 16 | (re.VERBOSE | re.MULTILINE | re.DOTALL)) 17 | 18 | class JSONDecodeError(ValueError): 19 | """Subclass of ValueError with the following additional properties: 20 | 21 | msg: The unformatted error message 22 | doc: The JSON document being parsed 23 | pos: The start index of doc where parsing failed 24 | end: The end index of doc where parsing failed (may be None) 25 | lineno: The line corresponding to pos 26 | colno: The column corresponding to pos 27 | endlineno: The line corresponding to end (may be None) 28 | endcolno: The column corresponding to end (may be None) 29 | 30 | """ 31 | # Note that this exception is used from _speedups 32 | def __init__(self, msg, doc, pos, end=None): 33 | ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) 34 | self.msg = msg 35 | self.doc = doc 36 | self.pos = pos 37 | self.end = end 38 | self.lineno, self.colno = linecol(doc, pos) 39 | if end is not None: 40 | self.endlineno, self.endcolno = linecol(doc, end) 41 | else: 42 | self.endlineno, self.endcolno = None, None 43 | 44 | def __reduce__(self): 45 | return self.__class__, (self.msg, self.doc, self.pos, self.end) 46 | 47 | 48 | def linecol(doc, pos): 49 | lineno = doc.count('\n', 0, pos) + 1 50 | if lineno == 1: 51 | colno = pos + 1 52 | else: 53 | colno = pos - doc.rindex('\n', 0, pos) 54 | return lineno, colno 55 | 56 | 57 | def errmsg(msg, doc, pos, end=None): 58 | lineno, colno = linecol(doc, pos) 59 | msg = msg.replace('%r', repr(doc[pos:pos + 1])) 60 | if end is None: 61 | fmt = '%s: line %d column %d (char %d)' 62 | return fmt % (msg, lineno, colno, pos) 63 | endlineno, endcolno = linecol(doc, end) 64 | fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' 65 | return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) 66 | 67 | 68 | def py_make_scanner(context): 69 | parse_object = context.parse_object 70 | parse_array = context.parse_array 71 | parse_string = context.parse_string 72 | match_number = NUMBER_RE.match 73 | encoding = context.encoding 74 | strict = context.strict 75 | parse_float = context.parse_float 76 | parse_int = context.parse_int 77 | parse_constant = context.parse_constant 78 | object_hook = context.object_hook 79 | object_pairs_hook = context.object_pairs_hook 80 | memo = context.memo 81 | 82 | def _scan_once(string, idx): 83 | errmsg = 'Expecting value' 84 | try: 85 | nextchar = string[idx] 86 | except IndexError: 87 | raise JSONDecodeError(errmsg, string, idx) 88 | 89 | if nextchar == '"': 90 | return parse_string(string, idx + 1, encoding, strict) 91 | elif nextchar == '{': 92 | return parse_object((string, idx + 1), encoding, strict, 93 | _scan_once, object_hook, object_pairs_hook, memo) 94 | elif nextchar == '[': 95 | return parse_array((string, idx + 1), _scan_once) 96 | elif nextchar == 'n' and string[idx:idx + 4] == 'null': 97 | return None, idx + 4 98 | elif nextchar == 't' and string[idx:idx + 4] == 'true': 99 | return True, idx + 4 100 | elif nextchar == 'f' and string[idx:idx + 5] == 'false': 101 | return False, idx + 5 102 | 103 | m = match_number(string, idx) 104 | if m is not None: 105 | integer, frac, exp = m.groups() 106 | if frac or exp: 107 | res = parse_float(integer + (frac or '') + (exp or '')) 108 | else: 109 | res = parse_int(integer) 110 | return res, m.end() 111 | elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': 112 | return parse_constant('NaN'), idx + 3 113 | elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': 114 | return parse_constant('Infinity'), idx + 8 115 | elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': 116 | return parse_constant('-Infinity'), idx + 9 117 | else: 118 | raise JSONDecodeError(errmsg, string, idx) 119 | 120 | def scan_once(string, idx): 121 | if idx < 0: 122 | # Ensure the same behavior as the C speedup, otherwise 123 | # this would work for *some* negative string indices due 124 | # to the behavior of __getitem__ for strings. #98 125 | raise JSONDecodeError('Expecting value', string, idx) 126 | try: 127 | return _scan_once(string, idx) 128 | finally: 129 | memo.clear() 130 | 131 | return scan_once 132 | 133 | make_scanner = c_make_scanner or py_make_scanner 134 | -------------------------------------------------------------------------------- /lib2/simplejson/tool.py: -------------------------------------------------------------------------------- 1 | r"""Command-line tool to validate and pretty-print JSON 2 | 3 | Usage:: 4 | 5 | $ echo '{"json":"obj"}' | python -m simplejson.tool 6 | { 7 | "json": "obj" 8 | } 9 | $ echo '{ 1.2:3.4}' | python -m simplejson.tool 10 | Expecting property name: line 1 column 2 (char 2) 11 | 12 | """ 13 | from __future__ import with_statement 14 | import sys 15 | import simplejson as json 16 | 17 | def main(): 18 | if len(sys.argv) == 1: 19 | infile = sys.stdin 20 | outfile = sys.stdout 21 | elif len(sys.argv) == 2: 22 | infile = open(sys.argv[1], 'r') 23 | outfile = sys.stdout 24 | elif len(sys.argv) == 3: 25 | infile = open(sys.argv[1], 'r') 26 | outfile = open(sys.argv[2], 'w') 27 | else: 28 | raise SystemExit(sys.argv[0] + " [infile [outfile]]") 29 | with infile: 30 | try: 31 | obj = json.load(infile, 32 | object_pairs_hook=json.OrderedDict, 33 | use_decimal=True) 34 | except ValueError: 35 | raise SystemExit(sys.exc_info()[1]) 36 | with outfile: 37 | json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) 38 | outfile.write('\n') 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | -------------------------------------------------------------------------------- /lib2/ssl.py: -------------------------------------------------------------------------------- 1 | # Wrapper module for _ssl, providing some additional facilities 2 | # implemented in Python. Written by Bill Janssen. 3 | 4 | """\ 5 | This module provides some more Pythonic support for SSL. 6 | 7 | Object types: 8 | 9 | SSLSocket -- subtype of socket.socket which does SSL over the socket 10 | 11 | Exceptions: 12 | 13 | SSLError -- exception raised for I/O errors 14 | 15 | Functions: 16 | 17 | cert_time_to_seconds -- convert time string used for certificate 18 | notBefore and notAfter functions to integer 19 | seconds past the Epoch (the time values 20 | returned from time.time()) 21 | 22 | fetch_server_certificate (HOST, PORT) -- fetch the certificate provided 23 | by the server running on HOST at port PORT. No 24 | validation of the certificate is performed. 25 | 26 | Integer constants: 27 | 28 | SSL_ERROR_ZERO_RETURN 29 | SSL_ERROR_WANT_READ 30 | SSL_ERROR_WANT_WRITE 31 | SSL_ERROR_WANT_X509_LOOKUP 32 | SSL_ERROR_SYSCALL 33 | SSL_ERROR_SSL 34 | SSL_ERROR_WANT_CONNECT 35 | 36 | SSL_ERROR_EOF 37 | SSL_ERROR_INVALID_ERROR_CODE 38 | 39 | The following group define certificate requirements that one side is 40 | allowing/requiring from the other side: 41 | 42 | CERT_NONE - no certificates from the other side are required (or will 43 | be looked at if provided) 44 | CERT_OPTIONAL - certificates are not required, but if provided will be 45 | validated, and if validation fails, the connection will 46 | also fail 47 | CERT_REQUIRED - certificates are required, and will be validated, and 48 | if validation fails, the connection will also fail 49 | 50 | The following constants identify various SSL protocol variants: 51 | 52 | PROTOCOL_SSLv2 53 | PROTOCOL_SSLv3 54 | PROTOCOL_SSLv23 55 | PROTOCOL_TLSv1 56 | """ 57 | 58 | import textwrap 59 | 60 | import _ssl # if we can't import it, let the error propagate 61 | 62 | from _ssl import SSLError 63 | from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED 64 | from _ssl import PROTOCOL_SSLv2, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1 65 | from _ssl import RAND_status, RAND_egd, RAND_add 66 | from _ssl import \ 67 | SSL_ERROR_ZERO_RETURN, \ 68 | SSL_ERROR_WANT_READ, \ 69 | SSL_ERROR_WANT_WRITE, \ 70 | SSL_ERROR_WANT_X509_LOOKUP, \ 71 | SSL_ERROR_SYSCALL, \ 72 | SSL_ERROR_SSL, \ 73 | SSL_ERROR_WANT_CONNECT, \ 74 | SSL_ERROR_EOF, \ 75 | SSL_ERROR_INVALID_ERROR_CODE 76 | 77 | from socket import socket, _fileobject, _delegate_methods 78 | from socket import error as socket_error 79 | from socket import getnameinfo as _getnameinfo 80 | import base64 # for DER-to-PEM translation 81 | import errno 82 | 83 | class SSLSocket(socket): 84 | 85 | """This class implements a subtype of socket.socket that wraps 86 | the underlying OS socket in an SSL context when necessary, and 87 | provides read and write methods over that channel.""" 88 | 89 | def __init__(self, sock, keyfile=None, certfile=None, 90 | server_side=False, cert_reqs=CERT_NONE, 91 | ssl_version=PROTOCOL_SSLv23, ca_certs=None, 92 | do_handshake_on_connect=True, 93 | suppress_ragged_eofs=True): 94 | socket.__init__(self, _sock=sock._sock) 95 | # The initializer for socket overrides the methods send(), recv(), etc. 96 | # in the instancce, which we don't need -- but we want to provide the 97 | # methods defined in SSLSocket. 98 | for attr in _delegate_methods: 99 | try: 100 | delattr(self, attr) 101 | except AttributeError: 102 | pass 103 | 104 | if certfile and not keyfile: 105 | keyfile = certfile 106 | # see if it's connected 107 | try: 108 | socket.getpeername(self) 109 | except socket_error, e: 110 | if e.errno != errno.ENOTCONN: 111 | raise 112 | # no, no connection yet 113 | self._sslobj = None 114 | else: 115 | # yes, create the SSL object 116 | self._sslobj = _ssl.sslwrap(self._sock, server_side, 117 | keyfile, certfile, 118 | cert_reqs, ssl_version, ca_certs) 119 | if do_handshake_on_connect: 120 | self.do_handshake() 121 | self.keyfile = keyfile 122 | self.certfile = certfile 123 | self.cert_reqs = cert_reqs 124 | self.ssl_version = ssl_version 125 | self.ca_certs = ca_certs 126 | self.do_handshake_on_connect = do_handshake_on_connect 127 | self.suppress_ragged_eofs = suppress_ragged_eofs 128 | self._makefile_refs = 0 129 | 130 | def read(self, len=1024): 131 | 132 | """Read up to LEN bytes and return them. 133 | Return zero-length string on EOF.""" 134 | 135 | try: 136 | return self._sslobj.read(len) 137 | except SSLError, x: 138 | if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: 139 | return '' 140 | else: 141 | raise 142 | 143 | def write(self, data): 144 | 145 | """Write DATA to the underlying SSL channel. Returns 146 | number of bytes of DATA actually transmitted.""" 147 | 148 | return self._sslobj.write(data) 149 | 150 | def getpeercert(self, binary_form=False): 151 | 152 | """Returns a formatted version of the data in the 153 | certificate provided by the other end of the SSL channel. 154 | Return None if no certificate was provided, {} if a 155 | certificate was provided, but not validated.""" 156 | 157 | return self._sslobj.peer_certificate(binary_form) 158 | 159 | def cipher(self): 160 | 161 | if not self._sslobj: 162 | return None 163 | else: 164 | return self._sslobj.cipher() 165 | 166 | def send(self, data, flags=0): 167 | if self._sslobj: 168 | if flags != 0: 169 | raise ValueError( 170 | "non-zero flags not allowed in calls to send() on %s" % 171 | self.__class__) 172 | while True: 173 | try: 174 | v = self._sslobj.write(data) 175 | except SSLError, x: 176 | if x.args[0] == SSL_ERROR_WANT_READ: 177 | return 0 178 | elif x.args[0] == SSL_ERROR_WANT_WRITE: 179 | return 0 180 | else: 181 | raise 182 | else: 183 | return v 184 | else: 185 | return socket.send(self, data, flags) 186 | 187 | def sendto(self, data, addr, flags=0): 188 | if self._sslobj: 189 | raise ValueError("sendto not allowed on instances of %s" % 190 | self.__class__) 191 | else: 192 | return socket.sendto(self, data, addr, flags) 193 | 194 | def sendall(self, data, flags=0): 195 | if self._sslobj: 196 | if flags != 0: 197 | raise ValueError( 198 | "non-zero flags not allowed in calls to sendall() on %s" % 199 | self.__class__) 200 | amount = len(data) 201 | count = 0 202 | while (count < amount): 203 | v = self.send(data[count:]) 204 | count += v 205 | return amount 206 | else: 207 | return socket.sendall(self, data, flags) 208 | 209 | def recv(self, buflen=1024, flags=0): 210 | if self._sslobj: 211 | if flags != 0: 212 | raise ValueError( 213 | "non-zero flags not allowed in calls to recv() on %s" % 214 | self.__class__) 215 | return self.read(buflen) 216 | else: 217 | return socket.recv(self, buflen, flags) 218 | 219 | def recv_into(self, buffer, nbytes=None, flags=0): 220 | if buffer and (nbytes is None): 221 | nbytes = len(buffer) 222 | elif nbytes is None: 223 | nbytes = 1024 224 | if self._sslobj: 225 | if flags != 0: 226 | raise ValueError( 227 | "non-zero flags not allowed in calls to recv_into() on %s" % 228 | self.__class__) 229 | tmp_buffer = self.read(nbytes) 230 | v = len(tmp_buffer) 231 | buffer[:v] = tmp_buffer 232 | return v 233 | else: 234 | return socket.recv_into(self, buffer, nbytes, flags) 235 | 236 | def recvfrom(self, addr, buflen=1024, flags=0): 237 | if self._sslobj: 238 | raise ValueError("recvfrom not allowed on instances of %s" % 239 | self.__class__) 240 | else: 241 | return socket.recvfrom(self, addr, buflen, flags) 242 | 243 | def recvfrom_into(self, buffer, nbytes=None, flags=0): 244 | if self._sslobj: 245 | raise ValueError("recvfrom_into not allowed on instances of %s" % 246 | self.__class__) 247 | else: 248 | return socket.recvfrom_into(self, buffer, nbytes, flags) 249 | 250 | def pending(self): 251 | if self._sslobj: 252 | return self._sslobj.pending() 253 | else: 254 | return 0 255 | 256 | def unwrap(self): 257 | if self._sslobj: 258 | s = self._sslobj.shutdown() 259 | self._sslobj = None 260 | return s 261 | else: 262 | raise ValueError("No SSL wrapper around " + str(self)) 263 | 264 | def shutdown(self, how): 265 | self._sslobj = None 266 | socket.shutdown(self, how) 267 | 268 | def close(self): 269 | if self._makefile_refs < 1: 270 | self._sslobj = None 271 | socket.close(self) 272 | else: 273 | self._makefile_refs -= 1 274 | 275 | def do_handshake(self): 276 | 277 | """Perform a TLS/SSL handshake.""" 278 | 279 | self._sslobj.do_handshake() 280 | 281 | def connect(self, addr): 282 | 283 | """Connects to remote ADDR, and then wraps the connection in 284 | an SSL channel.""" 285 | 286 | # Here we assume that the socket is client-side, and not 287 | # connected at the time of the call. We connect it, then wrap it. 288 | if self._sslobj: 289 | raise ValueError("attempt to connect already-connected SSLSocket!") 290 | socket.connect(self, addr) 291 | self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile, 292 | self.cert_reqs, self.ssl_version, 293 | self.ca_certs) 294 | if self.do_handshake_on_connect: 295 | self.do_handshake() 296 | 297 | def accept(self): 298 | 299 | """Accepts a new connection from a remote client, and returns 300 | a tuple containing that new connection wrapped with a server-side 301 | SSL channel, and the address of the remote client.""" 302 | 303 | newsock, addr = socket.accept(self) 304 | return (SSLSocket(newsock, 305 | keyfile=self.keyfile, 306 | certfile=self.certfile, 307 | server_side=True, 308 | cert_reqs=self.cert_reqs, 309 | ssl_version=self.ssl_version, 310 | ca_certs=self.ca_certs, 311 | do_handshake_on_connect=self.do_handshake_on_connect, 312 | suppress_ragged_eofs=self.suppress_ragged_eofs), 313 | addr) 314 | 315 | def makefile(self, mode='r', bufsize=-1): 316 | 317 | """Make and return a file-like object that 318 | works with the SSL connection. Just use the code 319 | from the socket module.""" 320 | 321 | self._makefile_refs += 1 322 | # close=True so as to decrement the reference count when done with 323 | # the file-like object. 324 | return _fileobject(self, mode, bufsize, close=True) 325 | 326 | 327 | 328 | def wrap_socket(sock, keyfile=None, certfile=None, 329 | server_side=False, cert_reqs=CERT_NONE, 330 | ssl_version=PROTOCOL_SSLv23, ca_certs=None, 331 | do_handshake_on_connect=True, 332 | suppress_ragged_eofs=True): 333 | 334 | return SSLSocket(sock, keyfile=keyfile, certfile=certfile, 335 | server_side=server_side, cert_reqs=cert_reqs, 336 | ssl_version=ssl_version, ca_certs=ca_certs, 337 | do_handshake_on_connect=do_handshake_on_connect, 338 | suppress_ragged_eofs=suppress_ragged_eofs) 339 | 340 | 341 | # some utility functions 342 | 343 | def cert_time_to_seconds(cert_time): 344 | 345 | """Takes a date-time string in standard ASN1_print form 346 | ("MON DAY 24HOUR:MINUTE:SEC YEAR TIMEZONE") and return 347 | a Python time value in seconds past the epoch.""" 348 | 349 | import time 350 | return time.mktime(time.strptime(cert_time, "%b %d %H:%M:%S %Y GMT")) 351 | 352 | PEM_HEADER = "-----BEGIN CERTIFICATE-----" 353 | PEM_FOOTER = "-----END CERTIFICATE-----" 354 | 355 | def DER_cert_to_PEM_cert(der_cert_bytes): 356 | 357 | """Takes a certificate in binary DER format and returns the 358 | PEM version of it as a string.""" 359 | 360 | if hasattr(base64, 'standard_b64encode'): 361 | # preferred because older API gets line-length wrong 362 | f = base64.standard_b64encode(der_cert_bytes) 363 | return (PEM_HEADER + '\n' + 364 | textwrap.fill(f, 64) + '\n' + 365 | PEM_FOOTER + '\n') 366 | else: 367 | return (PEM_HEADER + '\n' + 368 | base64.encodestring(der_cert_bytes) + 369 | PEM_FOOTER + '\n') 370 | 371 | def PEM_cert_to_DER_cert(pem_cert_string): 372 | 373 | """Takes a certificate in ASCII PEM format and returns the 374 | DER-encoded version of it as a byte sequence""" 375 | 376 | if not pem_cert_string.startswith(PEM_HEADER): 377 | raise ValueError("Invalid PEM encoding; must start with %s" 378 | % PEM_HEADER) 379 | if not pem_cert_string.strip().endswith(PEM_FOOTER): 380 | raise ValueError("Invalid PEM encoding; must end with %s" 381 | % PEM_FOOTER) 382 | d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)] 383 | return base64.decodestring(d) 384 | 385 | def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None): 386 | 387 | """Retrieve the certificate from the server at the specified address, 388 | and return it as a PEM-encoded string. 389 | If 'ca_certs' is specified, validate the server cert against it. 390 | If 'ssl_version' is specified, use it in the connection attempt.""" 391 | 392 | host, port = addr 393 | if (ca_certs is not None): 394 | cert_reqs = CERT_REQUIRED 395 | else: 396 | cert_reqs = CERT_NONE 397 | s = wrap_socket(socket(), ssl_version=ssl_version, 398 | cert_reqs=cert_reqs, ca_certs=ca_certs) 399 | s.connect(addr) 400 | dercert = s.getpeercert(True) 401 | s.close() 402 | return DER_cert_to_PEM_cert(dercert) 403 | 404 | def get_protocol_name(protocol_code): 405 | if protocol_code == PROTOCOL_TLSv1: 406 | return "TLSv1" 407 | elif protocol_code == PROTOCOL_SSLv23: 408 | return "SSLv23" 409 | elif protocol_code == PROTOCOL_SSLv2: 410 | return "SSLv2" 411 | elif protocol_code == PROTOCOL_SSLv3: 412 | return "SSLv3" 413 | else: 414 | return "" 415 | 416 | 417 | # a replacement for the old socket.ssl function 418 | 419 | def sslwrap_simple(sock, keyfile=None, certfile=None): 420 | 421 | """A replacement for the old socket.ssl function. Designed 422 | for compability with Python 2.5 and earlier. Will disappear in 423 | Python 3.0.""" 424 | 425 | if hasattr(sock, "_sock"): 426 | sock = sock._sock 427 | 428 | ssl_sock = _ssl.sslwrap(sock, 0, keyfile, certfile, CERT_NONE, 429 | PROTOCOL_SSLv23, None) 430 | try: 431 | sock.getpeername() 432 | except: 433 | # no, no connection yet 434 | pass 435 | else: 436 | # yes, do the handshake 437 | ssl_sock.do_handshake() 438 | 439 | return ssl_sock 440 | -------------------------------------------------------------------------------- /lib3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NoxArt/SublimeText2-FTPSync/5893073bf081a0c7d51dff26fac77f2163d8d71f/lib3/__init__.py -------------------------------------------------------------------------------- /lib3/idna.py: -------------------------------------------------------------------------------- 1 | # This module implements the RFCs 3490 (IDNA) and 3491 (Nameprep) 2 | 3 | import stringprep, re, codecs 4 | from unicodedata import ucd_3_2_0 as unicodedata 5 | 6 | # IDNA section 3.1 7 | dots = re.compile("[\u002E\u3002\uFF0E\uFF61]") 8 | 9 | # IDNA section 5 10 | ace_prefix = b"xn--" 11 | sace_prefix = "xn--" 12 | 13 | # This assumes query strings, so AllowUnassigned is true 14 | def nameprep(label): 15 | # Map 16 | newlabel = [] 17 | for c in label: 18 | if stringprep.in_table_b1(c): 19 | # Map to nothing 20 | continue 21 | newlabel.append(stringprep.map_table_b2(c)) 22 | label = "".join(newlabel) 23 | 24 | # Normalize 25 | label = unicodedata.normalize("NFKC", label) 26 | 27 | # Prohibit 28 | for c in label: 29 | if stringprep.in_table_c12(c) or \ 30 | stringprep.in_table_c22(c) or \ 31 | stringprep.in_table_c3(c) or \ 32 | stringprep.in_table_c4(c) or \ 33 | stringprep.in_table_c5(c) or \ 34 | stringprep.in_table_c6(c) or \ 35 | stringprep.in_table_c7(c) or \ 36 | stringprep.in_table_c8(c) or \ 37 | stringprep.in_table_c9(c): 38 | raise UnicodeError("Invalid character %r" % c) 39 | 40 | # Check bidi 41 | RandAL = [stringprep.in_table_d1(x) for x in label] 42 | for c in RandAL: 43 | if c: 44 | # There is a RandAL char in the string. Must perform further 45 | # tests: 46 | # 1) The characters in section 5.8 MUST be prohibited. 47 | # This is table C.8, which was already checked 48 | # 2) If a string contains any RandALCat character, the string 49 | # MUST NOT contain any LCat character. 50 | if any(stringprep.in_table_d2(x) for x in label): 51 | raise UnicodeError("Violation of BIDI requirement 2") 52 | 53 | # 3) If a string contains any RandALCat character, a 54 | # RandALCat character MUST be the first character of the 55 | # string, and a RandALCat character MUST be the last 56 | # character of the string. 57 | if not RandAL[0] or not RandAL[-1]: 58 | raise UnicodeError("Violation of BIDI requirement 3") 59 | 60 | return label 61 | 62 | def ToASCII(label): 63 | try: 64 | # Step 1: try ASCII 65 | label = label.encode("ascii") 66 | except UnicodeError: 67 | pass 68 | else: 69 | # Skip to step 3: UseSTD3ASCIIRules is false, so 70 | # Skip to step 8. 71 | if 0 < len(label) < 64: 72 | return label 73 | raise UnicodeError("label empty or too long") 74 | 75 | # Step 2: nameprep 76 | label = nameprep(label) 77 | 78 | # Step 3: UseSTD3ASCIIRules is false 79 | # Step 4: try ASCII 80 | try: 81 | label = label.encode("ascii") 82 | except UnicodeError: 83 | pass 84 | else: 85 | # Skip to step 8. 86 | if 0 < len(label) < 64: 87 | return label 88 | raise UnicodeError("label empty or too long") 89 | 90 | # Step 5: Check ACE prefix 91 | if label.startswith(sace_prefix): 92 | raise UnicodeError("Label starts with ACE prefix") 93 | 94 | # Step 6: Encode with PUNYCODE 95 | label = label.encode("punycode") 96 | 97 | # Step 7: Prepend ACE prefix 98 | label = ace_prefix + label 99 | 100 | # Step 8: Check size 101 | if 0 < len(label) < 64: 102 | return label 103 | raise UnicodeError("label empty or too long") 104 | 105 | def ToUnicode(label): 106 | # Step 1: Check for ASCII 107 | if isinstance(label, bytes): 108 | pure_ascii = True 109 | else: 110 | try: 111 | label = label.encode("ascii") 112 | pure_ascii = True 113 | except UnicodeError: 114 | pure_ascii = False 115 | if not pure_ascii: 116 | # Step 2: Perform nameprep 117 | label = nameprep(label) 118 | # It doesn't say this, but apparently, it should be ASCII now 119 | try: 120 | label = label.encode("ascii") 121 | except UnicodeError: 122 | raise UnicodeError("Invalid character in IDN label") 123 | # Step 3: Check for ACE prefix 124 | if not label.startswith(ace_prefix): 125 | return str(label, "ascii") 126 | 127 | # Step 4: Remove ACE prefix 128 | label1 = label[len(ace_prefix):] 129 | 130 | # Step 5: Decode using PUNYCODE 131 | result = label1.decode("punycode") 132 | 133 | # Step 6: Apply ToASCII 134 | label2 = ToASCII(result) 135 | 136 | # Step 7: Compare the result of step 6 with the one of step 3 137 | # label2 will already be in lower case. 138 | if str(label, "ascii").lower() != str(label2, "ascii"): 139 | raise UnicodeError("IDNA does not round-trip", label, label2) 140 | 141 | # Step 8: return the result of step 5 142 | return result 143 | 144 | ### Codec APIs 145 | 146 | class Codec(codecs.Codec): 147 | def encode(self, input, errors='strict'): 148 | 149 | if errors != 'strict': 150 | # IDNA is quite clear that implementations must be strict 151 | raise UnicodeError("unsupported error handling "+errors) 152 | 153 | if not input: 154 | return b'', 0 155 | 156 | try: 157 | result = input.encode('ascii') 158 | except UnicodeEncodeError: 159 | pass 160 | else: 161 | # ASCII name: fast path 162 | labels = result.split(b'.') 163 | for label in labels[:-1]: 164 | if not (0 < len(label) < 64): 165 | raise UnicodeError("label empty or too long") 166 | if len(labels[-1]) >= 64: 167 | raise UnicodeError("label too long") 168 | return result, len(input) 169 | 170 | result = bytearray() 171 | labels = dots.split(input) 172 | if labels and not labels[-1]: 173 | trailing_dot = b'.' 174 | del labels[-1] 175 | else: 176 | trailing_dot = b'' 177 | for label in labels: 178 | if result: 179 | # Join with U+002E 180 | result.extend(b'.') 181 | result.extend(ToASCII(label)) 182 | return bytes(result+trailing_dot), len(input) 183 | 184 | def decode(self, input, errors='strict'): 185 | 186 | if errors != 'strict': 187 | raise UnicodeError("Unsupported error handling "+errors) 188 | 189 | if not input: 190 | return "", 0 191 | 192 | # IDNA allows decoding to operate on Unicode strings, too. 193 | if not isinstance(input, bytes): 194 | # XXX obviously wrong, see #3232 195 | input = bytes(input) 196 | 197 | if ace_prefix not in input: 198 | # Fast path 199 | try: 200 | return input.decode('ascii'), len(input) 201 | except UnicodeDecodeError: 202 | pass 203 | 204 | labels = input.split(b".") 205 | 206 | if labels and len(labels[-1]) == 0: 207 | trailing_dot = '.' 208 | del labels[-1] 209 | else: 210 | trailing_dot = '' 211 | 212 | result = [] 213 | for label in labels: 214 | result.append(ToUnicode(label)) 215 | 216 | return ".".join(result)+trailing_dot, len(input) 217 | 218 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder): 219 | def _buffer_encode(self, input, errors, final): 220 | if errors != 'strict': 221 | # IDNA is quite clear that implementations must be strict 222 | raise UnicodeError("unsupported error handling "+errors) 223 | 224 | if not input: 225 | return (b'', 0) 226 | 227 | labels = dots.split(input) 228 | trailing_dot = b'' 229 | if labels: 230 | if not labels[-1]: 231 | trailing_dot = b'.' 232 | del labels[-1] 233 | elif not final: 234 | # Keep potentially unfinished label until the next call 235 | del labels[-1] 236 | if labels: 237 | trailing_dot = b'.' 238 | 239 | result = bytearray() 240 | size = 0 241 | for label in labels: 242 | if size: 243 | # Join with U+002E 244 | result.extend(b'.') 245 | size += 1 246 | result.extend(ToASCII(label)) 247 | size += len(label) 248 | 249 | result += trailing_dot 250 | size += len(trailing_dot) 251 | return (bytes(result), size) 252 | 253 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder): 254 | def _buffer_decode(self, input, errors, final): 255 | if errors != 'strict': 256 | raise UnicodeError("Unsupported error handling "+errors) 257 | 258 | if not input: 259 | return ("", 0) 260 | 261 | # IDNA allows decoding to operate on Unicode strings, too. 262 | if isinstance(input, str): 263 | labels = dots.split(input) 264 | else: 265 | # Must be ASCII string 266 | input = str(input, "ascii") 267 | labels = input.split(".") 268 | 269 | trailing_dot = '' 270 | if labels: 271 | if not labels[-1]: 272 | trailing_dot = '.' 273 | del labels[-1] 274 | elif not final: 275 | # Keep potentially unfinished label until the next call 276 | del labels[-1] 277 | if labels: 278 | trailing_dot = '.' 279 | 280 | result = [] 281 | size = 0 282 | for label in labels: 283 | result.append(ToUnicode(label)) 284 | if size: 285 | size += 1 286 | size += len(label) 287 | 288 | result = ".".join(result) + trailing_dot 289 | size += len(trailing_dot) 290 | return (result, size) 291 | 292 | class StreamWriter(Codec,codecs.StreamWriter): 293 | pass 294 | 295 | class StreamReader(Codec,codecs.StreamReader): 296 | pass 297 | 298 | ### encodings module API 299 | 300 | def getregentry(): 301 | return codecs.CodecInfo( 302 | name='idna', 303 | encode=Codec().encode, 304 | decode=Codec().decode, 305 | incrementalencoder=IncrementalEncoder, 306 | incrementaldecoder=IncrementalDecoder, 307 | streamwriter=StreamWriter, 308 | streamreader=StreamReader, 309 | ) -------------------------------------------------------------------------------- /lib3/simplejson/LICENSE.txt: -------------------------------------------------------------------------------- 1 | simplejson is dual-licensed software. It is available under the terms 2 | of the MIT license, or the Academic Free License version 2.1. The full 3 | text of each license agreement is included below. This code is also 4 | licensed to the Python Software Foundation (PSF) under a Contributor 5 | Agreement. 6 | 7 | MIT License 8 | =========== 9 | 10 | Copyright (c) 2006 Bob Ippolito 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy of 13 | this software and associated documentation files (the "Software"), to deal in 14 | the Software without restriction, including without limitation the rights to 15 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 16 | of the Software, and to permit persons to whom the Software is furnished to do 17 | so, subject to the following conditions: 18 | 19 | The above copyright notice and this permission notice shall be included in all 20 | copies or substantial portions of the Software. 21 | 22 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 23 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 24 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 25 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 26 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 27 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 28 | SOFTWARE. 29 | 30 | Academic Free License v. 2.1 31 | ============================ 32 | 33 | Copyright (c) 2006 Bob Ippolito. All rights reserved. 34 | 35 | This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work: 36 | 37 | Licensed under the Academic Free License version 2.1 38 | 39 | 1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following: 40 | 41 | a) to reproduce the Original Work in copies; 42 | 43 | b) to prepare derivative works ("Derivative Works") based upon the Original Work; 44 | 45 | c) to distribute copies of the Original Work and Derivative Works to the public; 46 | 47 | d) to perform the Original Work publicly; and 48 | 49 | e) to display the Original Work publicly. 50 | 51 | 2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works. 52 | 53 | 3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work. 54 | 55 | 4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license. 56 | 57 | 5) This section intentionally omitted. 58 | 59 | 6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work. 60 | 61 | 7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer. 62 | 63 | 8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. 64 | 65 | 9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions. 66 | 67 | 10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware. 68 | 69 | 11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License. 70 | 71 | 12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License. 72 | 73 | 13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. 74 | 75 | 14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 76 | 77 | 15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You. 78 | 79 | This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner. 80 | -------------------------------------------------------------------------------- /lib3/simplejson/compat.py: -------------------------------------------------------------------------------- 1 | """Python 3 compatibility shims 2 | """ 3 | import sys 4 | if sys.version_info[0] < 3: 5 | PY3 = False 6 | def b(s): 7 | return s 8 | def u(s): 9 | return unicode(s, 'unicode_escape') 10 | import cStringIO as StringIO 11 | StringIO = BytesIO = StringIO.StringIO 12 | text_type = unicode 13 | binary_type = str 14 | string_types = (basestring,) 15 | integer_types = (int, long) 16 | unichr = unichr 17 | reload_module = reload 18 | def fromhex(s): 19 | return s.decode('hex') 20 | 21 | else: 22 | PY3 = True 23 | if sys.version_info[:2] >= (3, 4): 24 | from importlib import reload as reload_module 25 | else: 26 | from imp import reload as reload_module 27 | import codecs 28 | def b(s): 29 | return codecs.latin_1_encode(s)[0] 30 | def u(s): 31 | return s 32 | import io 33 | StringIO = io.StringIO 34 | BytesIO = io.BytesIO 35 | text_type = str 36 | binary_type = bytes 37 | string_types = (str,) 38 | integer_types = (int,) 39 | 40 | def unichr(s): 41 | return u(chr(s)) 42 | 43 | def fromhex(s): 44 | return bytes.fromhex(s) 45 | 46 | long_type = integer_types[-1] 47 | -------------------------------------------------------------------------------- /lib3/simplejson/decoder.py: -------------------------------------------------------------------------------- 1 | """Implementation of JSONDecoder 2 | """ 3 | from __future__ import absolute_import 4 | import re 5 | import sys 6 | import struct 7 | from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr 8 | from .scanner import make_scanner, JSONDecodeError 9 | 10 | def _import_c_scanstring(): 11 | try: 12 | from ._speedups import scanstring 13 | return scanstring 14 | except ImportError: 15 | return None 16 | c_scanstring = _import_c_scanstring() 17 | 18 | # NOTE (3.1.0): JSONDecodeError may still be imported from this module for 19 | # compatibility, but it was never in the __all__ 20 | __all__ = ['JSONDecoder'] 21 | 22 | FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL 23 | 24 | def _floatconstants(): 25 | _BYTES = fromhex('7FF80000000000007FF0000000000000') 26 | # The struct module in Python 2.4 would get frexp() out of range here 27 | # when an endian is specified in the format string. Fixed in Python 2.5+ 28 | if sys.byteorder != 'big': 29 | _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] 30 | nan, inf = struct.unpack('dd', _BYTES) 31 | return nan, inf, -inf 32 | 33 | NaN, PosInf, NegInf = _floatconstants() 34 | 35 | _CONSTANTS = { 36 | '-Infinity': NegInf, 37 | 'Infinity': PosInf, 38 | 'NaN': NaN, 39 | } 40 | 41 | STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) 42 | BACKSLASH = { 43 | '"': u('"'), '\\': u('\u005c'), '/': u('/'), 44 | 'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'), 45 | } 46 | 47 | DEFAULT_ENCODING = "utf-8" 48 | 49 | def py_scanstring(s, end, encoding=None, strict=True, 50 | _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join, 51 | _PY3=PY3, _maxunicode=sys.maxunicode): 52 | """Scan the string s for a JSON string. End is the index of the 53 | character in s after the quote that started the JSON string. 54 | Unescapes all valid JSON string escape sequences and raises ValueError 55 | on attempt to decode an invalid string. If strict is False then literal 56 | control characters are allowed in the string. 57 | 58 | Returns a tuple of the decoded string and the index of the character in s 59 | after the end quote.""" 60 | if encoding is None: 61 | encoding = DEFAULT_ENCODING 62 | chunks = [] 63 | _append = chunks.append 64 | begin = end - 1 65 | while 1: 66 | chunk = _m(s, end) 67 | if chunk is None: 68 | raise JSONDecodeError( 69 | "Unterminated string starting at", s, begin) 70 | end = chunk.end() 71 | content, terminator = chunk.groups() 72 | # Content is contains zero or more unescaped string characters 73 | if content: 74 | if not _PY3 and not isinstance(content, text_type): 75 | content = text_type(content, encoding) 76 | _append(content) 77 | # Terminator is the end of string, a literal control character, 78 | # or a backslash denoting that an escape sequence follows 79 | if terminator == '"': 80 | break 81 | elif terminator != '\\': 82 | if strict: 83 | msg = "Invalid control character %r at" 84 | raise JSONDecodeError(msg, s, end) 85 | else: 86 | _append(terminator) 87 | continue 88 | try: 89 | esc = s[end] 90 | except IndexError: 91 | raise JSONDecodeError( 92 | "Unterminated string starting at", s, begin) 93 | # If not a unicode escape sequence, must be in the lookup table 94 | if esc != 'u': 95 | try: 96 | char = _b[esc] 97 | except KeyError: 98 | msg = "Invalid \\X escape sequence %r" 99 | raise JSONDecodeError(msg, s, end) 100 | end += 1 101 | else: 102 | # Unicode escape sequence 103 | msg = "Invalid \\uXXXX escape sequence" 104 | esc = s[end + 1:end + 5] 105 | escX = esc[1:2] 106 | if len(esc) != 4 or escX == 'x' or escX == 'X': 107 | raise JSONDecodeError(msg, s, end - 1) 108 | try: 109 | uni = int(esc, 16) 110 | except ValueError: 111 | raise JSONDecodeError(msg, s, end - 1) 112 | end += 5 113 | # Check for surrogate pair on UCS-4 systems 114 | # Note that this will join high/low surrogate pairs 115 | # but will also pass unpaired surrogates through 116 | if (_maxunicode > 65535 and 117 | uni & 0xfc00 == 0xd800 and 118 | s[end:end + 2] == '\\u'): 119 | esc2 = s[end + 2:end + 6] 120 | escX = esc2[1:2] 121 | if len(esc2) == 4 and not (escX == 'x' or escX == 'X'): 122 | try: 123 | uni2 = int(esc2, 16) 124 | except ValueError: 125 | raise JSONDecodeError(msg, s, end) 126 | if uni2 & 0xfc00 == 0xdc00: 127 | uni = 0x10000 + (((uni - 0xd800) << 10) | 128 | (uni2 - 0xdc00)) 129 | end += 6 130 | char = unichr(uni) 131 | # Append the unescaped character 132 | _append(char) 133 | return _join(chunks), end 134 | 135 | 136 | # Use speedup if available 137 | scanstring = c_scanstring or py_scanstring 138 | 139 | WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) 140 | WHITESPACE_STR = ' \t\n\r' 141 | 142 | def JSONObject(state, encoding, strict, scan_once, object_hook, 143 | object_pairs_hook, memo=None, 144 | _w=WHITESPACE.match, _ws=WHITESPACE_STR): 145 | (s, end) = state 146 | # Backwards compatibility 147 | if memo is None: 148 | memo = {} 149 | memo_get = memo.setdefault 150 | pairs = [] 151 | # Use a slice to prevent IndexError from being raised, the following 152 | # check will raise a more specific ValueError if the string is empty 153 | nextchar = s[end:end + 1] 154 | # Normally we expect nextchar == '"' 155 | if nextchar != '"': 156 | if nextchar in _ws: 157 | end = _w(s, end).end() 158 | nextchar = s[end:end + 1] 159 | # Trivial empty object 160 | if nextchar == '}': 161 | if object_pairs_hook is not None: 162 | result = object_pairs_hook(pairs) 163 | return result, end + 1 164 | pairs = {} 165 | if object_hook is not None: 166 | pairs = object_hook(pairs) 167 | return pairs, end + 1 168 | elif nextchar != '"': 169 | raise JSONDecodeError( 170 | "Expecting property name enclosed in double quotes", 171 | s, end) 172 | end += 1 173 | while True: 174 | key, end = scanstring(s, end, encoding, strict) 175 | key = memo_get(key, key) 176 | 177 | # To skip some function call overhead we optimize the fast paths where 178 | # the JSON key separator is ": " or just ":". 179 | if s[end:end + 1] != ':': 180 | end = _w(s, end).end() 181 | if s[end:end + 1] != ':': 182 | raise JSONDecodeError("Expecting ':' delimiter", s, end) 183 | 184 | end += 1 185 | 186 | try: 187 | if s[end] in _ws: 188 | end += 1 189 | if s[end] in _ws: 190 | end = _w(s, end + 1).end() 191 | except IndexError: 192 | pass 193 | 194 | value, end = scan_once(s, end) 195 | pairs.append((key, value)) 196 | 197 | try: 198 | nextchar = s[end] 199 | if nextchar in _ws: 200 | end = _w(s, end + 1).end() 201 | nextchar = s[end] 202 | except IndexError: 203 | nextchar = '' 204 | end += 1 205 | 206 | if nextchar == '}': 207 | break 208 | elif nextchar != ',': 209 | raise JSONDecodeError("Expecting ',' delimiter or '}'", s, end - 1) 210 | 211 | try: 212 | nextchar = s[end] 213 | if nextchar in _ws: 214 | end += 1 215 | nextchar = s[end] 216 | if nextchar in _ws: 217 | end = _w(s, end + 1).end() 218 | nextchar = s[end] 219 | except IndexError: 220 | nextchar = '' 221 | 222 | end += 1 223 | if nextchar == '}': 224 | break 225 | 226 | if nextchar != '"': 227 | raise JSONDecodeError( 228 | "Expecting property name enclosed in double quotes", 229 | s, end - 1) 230 | 231 | if object_pairs_hook is not None: 232 | result = object_pairs_hook(pairs) 233 | return result, end 234 | pairs = dict(pairs) 235 | if object_hook is not None: 236 | pairs = object_hook(pairs) 237 | return pairs, end 238 | 239 | def JSONArray(state, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): 240 | (s, end) = state 241 | values = [] 242 | nextchar = s[end:end + 1] 243 | if nextchar in _ws: 244 | end = _w(s, end + 1).end() 245 | nextchar = s[end:end + 1] 246 | # Look-ahead for trivial empty array 247 | if nextchar == ']': 248 | return values, end + 1 249 | elif nextchar == '': 250 | raise JSONDecodeError("Expecting value or ']'", s, end) 251 | _append = values.append 252 | while True: 253 | value, end = scan_once(s, end) 254 | _append(value) 255 | nextchar = s[end:end + 1] 256 | if nextchar in _ws: 257 | end = _w(s, end + 1).end() 258 | nextchar = s[end:end + 1] 259 | end += 1 260 | if nextchar == ']': 261 | break 262 | elif nextchar != ',': 263 | raise JSONDecodeError("Expecting ',' delimiter or ']'", s, end - 1) 264 | 265 | try: 266 | if s[end] in _ws: 267 | end += 1 268 | if s[end] in _ws: 269 | end = _w(s, end + 1).end() 270 | except IndexError: 271 | pass 272 | 273 | return values, end 274 | 275 | class JSONDecoder(object): 276 | """Simple JSON decoder 277 | 278 | Performs the following translations in decoding by default: 279 | 280 | +---------------+-------------------+ 281 | | JSON | Python | 282 | +===============+===================+ 283 | | object | dict | 284 | +---------------+-------------------+ 285 | | array | list | 286 | +---------------+-------------------+ 287 | | string | str, unicode | 288 | +---------------+-------------------+ 289 | | number (int) | int, long | 290 | +---------------+-------------------+ 291 | | number (real) | float | 292 | +---------------+-------------------+ 293 | | true | True | 294 | +---------------+-------------------+ 295 | | false | False | 296 | +---------------+-------------------+ 297 | | null | None | 298 | +---------------+-------------------+ 299 | 300 | It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as 301 | their corresponding ``float`` values, which is outside the JSON spec. 302 | 303 | """ 304 | 305 | def __init__(self, encoding=None, object_hook=None, parse_float=None, 306 | parse_int=None, parse_constant=None, strict=True, 307 | object_pairs_hook=None): 308 | """ 309 | *encoding* determines the encoding used to interpret any 310 | :class:`str` objects decoded by this instance (``'utf-8'`` by 311 | default). It has no effect when decoding :class:`unicode` objects. 312 | 313 | Note that currently only encodings that are a superset of ASCII work, 314 | strings of other encodings should be passed in as :class:`unicode`. 315 | 316 | *object_hook*, if specified, will be called with the result of every 317 | JSON object decoded and its return value will be used in place of the 318 | given :class:`dict`. This can be used to provide custom 319 | deserializations (e.g. to support JSON-RPC class hinting). 320 | 321 | *object_pairs_hook* is an optional function that will be called with 322 | the result of any object literal decode with an ordered list of pairs. 323 | The return value of *object_pairs_hook* will be used instead of the 324 | :class:`dict`. This feature can be used to implement custom decoders 325 | that rely on the order that the key and value pairs are decoded (for 326 | example, :func:`collections.OrderedDict` will remember the order of 327 | insertion). If *object_hook* is also defined, the *object_pairs_hook* 328 | takes priority. 329 | 330 | *parse_float*, if specified, will be called with the string of every 331 | JSON float to be decoded. By default, this is equivalent to 332 | ``float(num_str)``. This can be used to use another datatype or parser 333 | for JSON floats (e.g. :class:`decimal.Decimal`). 334 | 335 | *parse_int*, if specified, will be called with the string of every 336 | JSON int to be decoded. By default, this is equivalent to 337 | ``int(num_str)``. This can be used to use another datatype or parser 338 | for JSON integers (e.g. :class:`float`). 339 | 340 | *parse_constant*, if specified, will be called with one of the 341 | following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This 342 | can be used to raise an exception if invalid JSON numbers are 343 | encountered. 344 | 345 | *strict* controls the parser's behavior when it encounters an 346 | invalid control character in a string. The default setting of 347 | ``True`` means that unescaped control characters are parse errors, if 348 | ``False`` then control characters will be allowed in strings. 349 | 350 | """ 351 | if encoding is None: 352 | encoding = DEFAULT_ENCODING 353 | self.encoding = encoding 354 | self.object_hook = object_hook 355 | self.object_pairs_hook = object_pairs_hook 356 | self.parse_float = parse_float or float 357 | self.parse_int = parse_int or int 358 | self.parse_constant = parse_constant or _CONSTANTS.__getitem__ 359 | self.strict = strict 360 | self.parse_object = JSONObject 361 | self.parse_array = JSONArray 362 | self.parse_string = scanstring 363 | self.memo = {} 364 | self.scan_once = make_scanner(self) 365 | 366 | def decode(self, s, _w=WHITESPACE.match, _PY3=PY3): 367 | """Return the Python representation of ``s`` (a ``str`` or ``unicode`` 368 | instance containing a JSON document) 369 | 370 | """ 371 | if _PY3 and isinstance(s, binary_type): 372 | s = s.decode(self.encoding) 373 | obj, end = self.raw_decode(s) 374 | end = _w(s, end).end() 375 | if end != len(s): 376 | raise JSONDecodeError("Extra data", s, end, len(s)) 377 | return obj 378 | 379 | def raw_decode(self, s, idx=0, _w=WHITESPACE.match, _PY3=PY3): 380 | """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` 381 | beginning with a JSON document) and return a 2-tuple of the Python 382 | representation and the index in ``s`` where the document ended. 383 | Optionally, ``idx`` can be used to specify an offset in ``s`` where 384 | the JSON document begins. 385 | 386 | This can be used to decode a JSON document from a string that may 387 | have extraneous data at the end. 388 | 389 | """ 390 | if idx < 0: 391 | # Ensure that raw_decode bails on negative indexes, the regex 392 | # would otherwise mask this behavior. #98 393 | raise JSONDecodeError('Expecting value', s, idx) 394 | if _PY3 and not isinstance(s, text_type): 395 | raise TypeError("Input string must be text, not bytes") 396 | # strip UTF-8 bom 397 | if len(s) > idx: 398 | ord0 = ord(s[idx]) 399 | if ord0 == 0xfeff: 400 | idx += 1 401 | elif ord0 == 0xef and s[idx:idx + 3] == '\xef\xbb\xbf': 402 | idx += 3 403 | return self.scan_once(s, idx=_w(s, idx).end()) 404 | -------------------------------------------------------------------------------- /lib3/simplejson/ordered_dict.py: -------------------------------------------------------------------------------- 1 | """Drop-in replacement for collections.OrderedDict by Raymond Hettinger 2 | 3 | http://code.activestate.com/recipes/576693/ 4 | 5 | """ 6 | from UserDict import DictMixin 7 | 8 | # Modified from original to support Python 2.4, see 9 | # http://code.google.com/p/simplejson/issues/detail?id=53 10 | try: 11 | all 12 | except NameError: 13 | def all(seq): 14 | for elem in seq: 15 | if not elem: 16 | return False 17 | return True 18 | 19 | class OrderedDict(dict, DictMixin): 20 | 21 | def __init__(self, *args, **kwds): 22 | if len(args) > 1: 23 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) 24 | try: 25 | self.__end 26 | except AttributeError: 27 | self.clear() 28 | self.update(*args, **kwds) 29 | 30 | def clear(self): 31 | self.__end = end = [] 32 | end += [None, end, end] # sentinel node for doubly linked list 33 | self.__map = {} # key --> [key, prev, next] 34 | dict.clear(self) 35 | 36 | def __setitem__(self, key, value): 37 | if key not in self: 38 | end = self.__end 39 | curr = end[1] 40 | curr[2] = end[1] = self.__map[key] = [key, curr, end] 41 | dict.__setitem__(self, key, value) 42 | 43 | def __delitem__(self, key): 44 | dict.__delitem__(self, key) 45 | key, prev, next = self.__map.pop(key) 46 | prev[2] = next 47 | next[1] = prev 48 | 49 | def __iter__(self): 50 | end = self.__end 51 | curr = end[2] 52 | while curr is not end: 53 | yield curr[0] 54 | curr = curr[2] 55 | 56 | def __reversed__(self): 57 | end = self.__end 58 | curr = end[1] 59 | while curr is not end: 60 | yield curr[0] 61 | curr = curr[1] 62 | 63 | def popitem(self, last=True): 64 | if not self: 65 | raise KeyError('dictionary is empty') 66 | # Modified from original to support Python 2.4, see 67 | # http://code.google.com/p/simplejson/issues/detail?id=53 68 | if last: 69 | key = reversed(self).next() 70 | else: 71 | key = iter(self).next() 72 | value = self.pop(key) 73 | return key, value 74 | 75 | def __reduce__(self): 76 | items = [[k, self[k]] for k in self] 77 | tmp = self.__map, self.__end 78 | del self.__map, self.__end 79 | inst_dict = vars(self).copy() 80 | self.__map, self.__end = tmp 81 | if inst_dict: 82 | return (self.__class__, (items,), inst_dict) 83 | return self.__class__, (items,) 84 | 85 | def keys(self): 86 | return list(self) 87 | 88 | setdefault = DictMixin.setdefault 89 | update = DictMixin.update 90 | pop = DictMixin.pop 91 | values = DictMixin.values 92 | items = DictMixin.items 93 | iterkeys = DictMixin.iterkeys 94 | itervalues = DictMixin.itervalues 95 | iteritems = DictMixin.iteritems 96 | 97 | def __repr__(self): 98 | if not self: 99 | return '%s()' % (self.__class__.__name__,) 100 | return '%s(%r)' % (self.__class__.__name__, self.items()) 101 | 102 | def copy(self): 103 | return self.__class__(self) 104 | 105 | @classmethod 106 | def fromkeys(cls, iterable, value=None): 107 | d = cls() 108 | for key in iterable: 109 | d[key] = value 110 | return d 111 | 112 | def __eq__(self, other): 113 | if isinstance(other, OrderedDict): 114 | return len(self)==len(other) and \ 115 | all(p==q for p, q in zip(self.items(), other.items())) 116 | return dict.__eq__(self, other) 117 | 118 | def __ne__(self, other): 119 | return not self == other 120 | -------------------------------------------------------------------------------- /lib3/simplejson/scanner.py: -------------------------------------------------------------------------------- 1 | """JSON token scanner 2 | """ 3 | import re 4 | def _import_c_make_scanner(): 5 | try: 6 | from simplejson._speedups import make_scanner 7 | return make_scanner 8 | except ImportError: 9 | return None 10 | c_make_scanner = _import_c_make_scanner() 11 | 12 | __all__ = ['make_scanner', 'JSONDecodeError'] 13 | 14 | NUMBER_RE = re.compile( 15 | r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?', 16 | (re.VERBOSE | re.MULTILINE | re.DOTALL)) 17 | 18 | class JSONDecodeError(ValueError): 19 | """Subclass of ValueError with the following additional properties: 20 | 21 | msg: The unformatted error message 22 | doc: The JSON document being parsed 23 | pos: The start index of doc where parsing failed 24 | end: The end index of doc where parsing failed (may be None) 25 | lineno: The line corresponding to pos 26 | colno: The column corresponding to pos 27 | endlineno: The line corresponding to end (may be None) 28 | endcolno: The column corresponding to end (may be None) 29 | 30 | """ 31 | # Note that this exception is used from _speedups 32 | def __init__(self, msg, doc, pos, end=None): 33 | ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) 34 | self.msg = msg 35 | self.doc = doc 36 | self.pos = pos 37 | self.end = end 38 | self.lineno, self.colno = linecol(doc, pos) 39 | if end is not None: 40 | self.endlineno, self.endcolno = linecol(doc, end) 41 | else: 42 | self.endlineno, self.endcolno = None, None 43 | 44 | def __reduce__(self): 45 | return self.__class__, (self.msg, self.doc, self.pos, self.end) 46 | 47 | 48 | def linecol(doc, pos): 49 | lineno = doc.count('\n', 0, pos) + 1 50 | if lineno == 1: 51 | colno = pos + 1 52 | else: 53 | colno = pos - doc.rindex('\n', 0, pos) 54 | return lineno, colno 55 | 56 | 57 | def errmsg(msg, doc, pos, end=None): 58 | lineno, colno = linecol(doc, pos) 59 | msg = msg.replace('%r', repr(doc[pos:pos + 1])) 60 | if end is None: 61 | fmt = '%s: line %d column %d (char %d)' 62 | return fmt % (msg, lineno, colno, pos) 63 | endlineno, endcolno = linecol(doc, end) 64 | fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' 65 | return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) 66 | 67 | 68 | def py_make_scanner(context): 69 | parse_object = context.parse_object 70 | parse_array = context.parse_array 71 | parse_string = context.parse_string 72 | match_number = NUMBER_RE.match 73 | encoding = context.encoding 74 | strict = context.strict 75 | parse_float = context.parse_float 76 | parse_int = context.parse_int 77 | parse_constant = context.parse_constant 78 | object_hook = context.object_hook 79 | object_pairs_hook = context.object_pairs_hook 80 | memo = context.memo 81 | 82 | def _scan_once(string, idx): 83 | errmsg = 'Expecting value' 84 | try: 85 | nextchar = string[idx] 86 | except IndexError: 87 | raise JSONDecodeError(errmsg, string, idx) 88 | 89 | if nextchar == '"': 90 | return parse_string(string, idx + 1, encoding, strict) 91 | elif nextchar == '{': 92 | return parse_object((string, idx + 1), encoding, strict, 93 | _scan_once, object_hook, object_pairs_hook, memo) 94 | elif nextchar == '[': 95 | return parse_array((string, idx + 1), _scan_once) 96 | elif nextchar == 'n' and string[idx:idx + 4] == 'null': 97 | return None, idx + 4 98 | elif nextchar == 't' and string[idx:idx + 4] == 'true': 99 | return True, idx + 4 100 | elif nextchar == 'f' and string[idx:idx + 5] == 'false': 101 | return False, idx + 5 102 | 103 | m = match_number(string, idx) 104 | if m is not None: 105 | integer, frac, exp = m.groups() 106 | if frac or exp: 107 | res = parse_float(integer + (frac or '') + (exp or '')) 108 | else: 109 | res = parse_int(integer) 110 | return res, m.end() 111 | elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': 112 | return parse_constant('NaN'), idx + 3 113 | elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': 114 | return parse_constant('Infinity'), idx + 8 115 | elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': 116 | return parse_constant('-Infinity'), idx + 9 117 | else: 118 | raise JSONDecodeError(errmsg, string, idx) 119 | 120 | def scan_once(string, idx): 121 | if idx < 0: 122 | # Ensure the same behavior as the C speedup, otherwise 123 | # this would work for *some* negative string indices due 124 | # to the behavior of __getitem__ for strings. #98 125 | raise JSONDecodeError('Expecting value', string, idx) 126 | try: 127 | return _scan_once(string, idx) 128 | finally: 129 | memo.clear() 130 | 131 | return scan_once 132 | 133 | make_scanner = c_make_scanner or py_make_scanner 134 | -------------------------------------------------------------------------------- /lib3/simplejson/tool.py: -------------------------------------------------------------------------------- 1 | r"""Command-line tool to validate and pretty-print JSON 2 | 3 | Usage:: 4 | 5 | $ echo '{"json":"obj"}' | python -m simplejson.tool 6 | { 7 | "json": "obj" 8 | } 9 | $ echo '{ 1.2:3.4}' | python -m simplejson.tool 10 | Expecting property name: line 1 column 2 (char 2) 11 | 12 | """ 13 | from __future__ import with_statement 14 | import sys 15 | import simplejson as json 16 | 17 | def main(): 18 | if len(sys.argv) == 1: 19 | infile = sys.stdin 20 | outfile = sys.stdout 21 | elif len(sys.argv) == 2: 22 | infile = open(sys.argv[1], 'r') 23 | outfile = sys.stdout 24 | elif len(sys.argv) == 3: 25 | infile = open(sys.argv[1], 'r') 26 | outfile = open(sys.argv[2], 'w') 27 | else: 28 | raise SystemExit(sys.argv[0] + " [infile [outfile]]") 29 | with infile: 30 | try: 31 | obj = json.load(infile, 32 | object_pairs_hook=json.OrderedDict, 33 | use_decimal=True) 34 | except ValueError: 35 | raise SystemExit(sys.exc_info()[1]) 36 | with outfile: 37 | json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) 38 | outfile.write('\n') 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | -------------------------------------------------------------------------------- /lib3/ssl.py: -------------------------------------------------------------------------------- 1 | # Wrapper module for _ssl, providing some additional facilities 2 | # implemented in Python. Written by Bill Janssen. 3 | 4 | """This module provides some more Pythonic support for SSL. 5 | 6 | Object types: 7 | 8 | SSLSocket -- subtype of socket.socket which does SSL over the socket 9 | 10 | Exceptions: 11 | 12 | SSLError -- exception raised for I/O errors 13 | 14 | Functions: 15 | 16 | cert_time_to_seconds -- convert time string used for certificate 17 | notBefore and notAfter functions to integer 18 | seconds past the Epoch (the time values 19 | returned from time.time()) 20 | 21 | fetch_server_certificate (HOST, PORT) -- fetch the certificate provided 22 | by the server running on HOST at port PORT. No 23 | validation of the certificate is performed. 24 | 25 | Integer constants: 26 | 27 | SSL_ERROR_ZERO_RETURN 28 | SSL_ERROR_WANT_READ 29 | SSL_ERROR_WANT_WRITE 30 | SSL_ERROR_WANT_X509_LOOKUP 31 | SSL_ERROR_SYSCALL 32 | SSL_ERROR_SSL 33 | SSL_ERROR_WANT_CONNECT 34 | 35 | SSL_ERROR_EOF 36 | SSL_ERROR_INVALID_ERROR_CODE 37 | 38 | The following group define certificate requirements that one side is 39 | allowing/requiring from the other side: 40 | 41 | CERT_NONE - no certificates from the other side are required (or will 42 | be looked at if provided) 43 | CERT_OPTIONAL - certificates are not required, but if provided will be 44 | validated, and if validation fails, the connection will 45 | also fail 46 | CERT_REQUIRED - certificates are required, and will be validated, and 47 | if validation fails, the connection will also fail 48 | 49 | The following constants identify various SSL protocol variants: 50 | 51 | PROTOCOL_SSLv2 52 | PROTOCOL_SSLv3 53 | PROTOCOL_SSLv23 54 | PROTOCOL_TLSv1 55 | """ 56 | 57 | import textwrap 58 | import re 59 | 60 | import _ssl # if we can't import it, let the error propagate 61 | 62 | from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION 63 | from _ssl import _SSLContext, SSLError 64 | from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED 65 | from _ssl import (PROTOCOL_SSLv2, PROTOCOL_SSLv3, PROTOCOL_SSLv23, 66 | PROTOCOL_TLSv1) 67 | from _ssl import OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1 68 | from _ssl import RAND_status, RAND_egd, RAND_add 69 | from _ssl import ( 70 | SSL_ERROR_ZERO_RETURN, 71 | SSL_ERROR_WANT_READ, 72 | SSL_ERROR_WANT_WRITE, 73 | SSL_ERROR_WANT_X509_LOOKUP, 74 | SSL_ERROR_SYSCALL, 75 | SSL_ERROR_SSL, 76 | SSL_ERROR_WANT_CONNECT, 77 | SSL_ERROR_EOF, 78 | SSL_ERROR_INVALID_ERROR_CODE, 79 | ) 80 | from _ssl import HAS_SNI 81 | 82 | from socket import getnameinfo as _getnameinfo 83 | from socket import error as socket_error 84 | from socket import socket, AF_INET, SOCK_STREAM 85 | import base64 # for DER-to-PEM translation 86 | import traceback 87 | import errno 88 | 89 | 90 | class CertificateError(ValueError): 91 | pass 92 | 93 | 94 | def _dnsname_to_pat(dn): 95 | pats = [] 96 | for frag in dn.split(r'.'): 97 | if frag == '*': 98 | # When '*' is a fragment by itself, it matches a non-empty dotless 99 | # fragment. 100 | pats.append('[^.]+') 101 | else: 102 | # Otherwise, '*' matches any dotless fragment. 103 | frag = re.escape(frag) 104 | pats.append(frag.replace(r'\*', '[^.]*')) 105 | return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) 106 | 107 | 108 | def match_hostname(cert, hostname): 109 | """Verify that *cert* (in decoded format as returned by 110 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules 111 | are mostly followed, but IP addresses are not accepted for *hostname*. 112 | 113 | CertificateError is raised on failure. On success, the function 114 | returns nothing. 115 | """ 116 | if not cert: 117 | raise ValueError("empty or no certificate") 118 | dnsnames = [] 119 | san = cert.get('subjectAltName', ()) 120 | for key, value in san: 121 | if key == 'DNS': 122 | if _dnsname_to_pat(value).match(hostname): 123 | return 124 | dnsnames.append(value) 125 | if not san: 126 | # The subject is only checked when subjectAltName is empty 127 | for sub in cert.get('subject', ()): 128 | for key, value in sub: 129 | # XXX according to RFC 2818, the most specific Common Name 130 | # must be used. 131 | if key == 'commonName': 132 | if _dnsname_to_pat(value).match(hostname): 133 | return 134 | dnsnames.append(value) 135 | if len(dnsnames) > 1: 136 | raise CertificateError("hostname %r " 137 | "doesn't match either of %s" 138 | % (hostname, ', '.join(map(repr, dnsnames)))) 139 | elif len(dnsnames) == 1: 140 | raise CertificateError("hostname %r " 141 | "doesn't match %r" 142 | % (hostname, dnsnames[0])) 143 | else: 144 | raise CertificateError("no appropriate commonName or " 145 | "subjectAltName fields were found") 146 | 147 | 148 | class SSLContext(_SSLContext): 149 | """An SSLContext holds various SSL-related configuration options and 150 | data, such as certificates and possibly a private key.""" 151 | 152 | __slots__ = ('protocol',) 153 | 154 | def __new__(cls, protocol, *args, **kwargs): 155 | return _SSLContext.__new__(cls, protocol) 156 | 157 | def __init__(self, protocol): 158 | self.protocol = protocol 159 | 160 | def wrap_socket(self, sock, server_side=False, 161 | do_handshake_on_connect=True, 162 | suppress_ragged_eofs=True, 163 | server_hostname=None): 164 | return SSLSocket(sock=sock, server_side=server_side, 165 | do_handshake_on_connect=do_handshake_on_connect, 166 | suppress_ragged_eofs=suppress_ragged_eofs, 167 | server_hostname=server_hostname, 168 | _context=self) 169 | 170 | 171 | class SSLSocket(socket): 172 | """This class implements a subtype of socket.socket that wraps 173 | the underlying OS socket in an SSL context when necessary, and 174 | provides read and write methods over that channel.""" 175 | 176 | def __init__(self, sock=None, keyfile=None, certfile=None, 177 | server_side=False, cert_reqs=CERT_NONE, 178 | ssl_version=PROTOCOL_SSLv23, ca_certs=None, 179 | do_handshake_on_connect=True, 180 | family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None, 181 | suppress_ragged_eofs=True, ciphers=None, 182 | server_hostname=None, 183 | _context=None): 184 | 185 | if _context: 186 | self.context = _context 187 | else: 188 | if server_side and not certfile: 189 | raise ValueError("certfile must be specified for server-side " 190 | "operations") 191 | if keyfile and not certfile: 192 | raise ValueError("certfile must be specified") 193 | if certfile and not keyfile: 194 | keyfile = certfile 195 | self.context = SSLContext(ssl_version) 196 | self.context.verify_mode = cert_reqs 197 | if ca_certs: 198 | self.context.load_verify_locations(ca_certs) 199 | if certfile: 200 | self.context.load_cert_chain(certfile, keyfile) 201 | if ciphers: 202 | self.context.set_ciphers(ciphers) 203 | self.keyfile = keyfile 204 | self.certfile = certfile 205 | self.cert_reqs = cert_reqs 206 | self.ssl_version = ssl_version 207 | self.ca_certs = ca_certs 208 | self.ciphers = ciphers 209 | if server_side and server_hostname: 210 | raise ValueError("server_hostname can only be specified " 211 | "in client mode") 212 | self.server_side = server_side 213 | self.server_hostname = server_hostname 214 | self.do_handshake_on_connect = do_handshake_on_connect 215 | self.suppress_ragged_eofs = suppress_ragged_eofs 216 | connected = False 217 | if sock is not None: 218 | socket.__init__(self, 219 | family=sock.family, 220 | type=sock.type, 221 | proto=sock.proto, 222 | fileno=sock.fileno()) 223 | self.settimeout(sock.gettimeout()) 224 | # see if it's connected 225 | try: 226 | sock.getpeername() 227 | except socket_error as e: 228 | if e.errno != errno.ENOTCONN: 229 | raise 230 | else: 231 | connected = True 232 | sock.detach() 233 | elif fileno is not None: 234 | socket.__init__(self, fileno=fileno) 235 | else: 236 | socket.__init__(self, family=family, type=type, proto=proto) 237 | 238 | self._closed = False 239 | self._sslobj = None 240 | self._connected = connected 241 | if connected: 242 | # create the SSL object 243 | try: 244 | self._sslobj = self.context._wrap_socket(self, server_side, 245 | server_hostname) 246 | if do_handshake_on_connect: 247 | timeout = self.gettimeout() 248 | if timeout == 0.0: 249 | # non-blocking 250 | raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets") 251 | self.do_handshake() 252 | 253 | except socket_error as x: 254 | self.close() 255 | raise x 256 | 257 | def dup(self): 258 | raise NotImplemented("Can't dup() %s instances" % 259 | self.__class__.__name__) 260 | 261 | def _checkClosed(self, msg=None): 262 | # raise an exception here if you wish to check for spurious closes 263 | pass 264 | 265 | def read(self, len=0, buffer=None): 266 | """Read up to LEN bytes and return them. 267 | Return zero-length string on EOF.""" 268 | 269 | self._checkClosed() 270 | try: 271 | if buffer is not None: 272 | v = self._sslobj.read(len, buffer) 273 | else: 274 | v = self._sslobj.read(len or 1024) 275 | return v 276 | except SSLError as x: 277 | if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs: 278 | if buffer is not None: 279 | return 0 280 | else: 281 | return b'' 282 | else: 283 | raise 284 | 285 | def write(self, data): 286 | """Write DATA to the underlying SSL channel. Returns 287 | number of bytes of DATA actually transmitted.""" 288 | 289 | self._checkClosed() 290 | return self._sslobj.write(data) 291 | 292 | def getpeercert(self, binary_form=False): 293 | """Returns a formatted version of the data in the 294 | certificate provided by the other end of the SSL channel. 295 | Return None if no certificate was provided, {} if a 296 | certificate was provided, but not validated.""" 297 | 298 | self._checkClosed() 299 | return self._sslobj.peer_certificate(binary_form) 300 | 301 | def cipher(self): 302 | self._checkClosed() 303 | if not self._sslobj: 304 | return None 305 | else: 306 | return self._sslobj.cipher() 307 | 308 | def send(self, data, flags=0): 309 | self._checkClosed() 310 | if self._sslobj: 311 | if flags != 0: 312 | raise ValueError( 313 | "non-zero flags not allowed in calls to send() on %s" % 314 | self.__class__) 315 | while True: 316 | try: 317 | v = self._sslobj.write(data) 318 | except SSLError as x: 319 | if x.args[0] == SSL_ERROR_WANT_READ: 320 | return 0 321 | elif x.args[0] == SSL_ERROR_WANT_WRITE: 322 | return 0 323 | else: 324 | raise 325 | else: 326 | return v 327 | else: 328 | return socket.send(self, data, flags) 329 | 330 | def sendto(self, data, flags_or_addr, addr=None): 331 | self._checkClosed() 332 | if self._sslobj: 333 | raise ValueError("sendto not allowed on instances of %s" % 334 | self.__class__) 335 | elif addr is None: 336 | return socket.sendto(self, data, flags_or_addr) 337 | else: 338 | return socket.sendto(self, data, flags_or_addr, addr) 339 | 340 | def sendall(self, data, flags=0): 341 | self._checkClosed() 342 | if self._sslobj: 343 | if flags != 0: 344 | raise ValueError( 345 | "non-zero flags not allowed in calls to sendall() on %s" % 346 | self.__class__) 347 | amount = len(data) 348 | count = 0 349 | while (count < amount): 350 | v = self.send(data[count:]) 351 | count += v 352 | return amount 353 | else: 354 | return socket.sendall(self, data, flags) 355 | 356 | def recv(self, buflen=1024, flags=0): 357 | self._checkClosed() 358 | if self._sslobj: 359 | if flags != 0: 360 | raise ValueError( 361 | "non-zero flags not allowed in calls to recv() on %s" % 362 | self.__class__) 363 | return self.read(buflen) 364 | else: 365 | return socket.recv(self, buflen, flags) 366 | 367 | def recv_into(self, buffer, nbytes=None, flags=0): 368 | self._checkClosed() 369 | if buffer and (nbytes is None): 370 | nbytes = len(buffer) 371 | elif nbytes is None: 372 | nbytes = 1024 373 | if self._sslobj: 374 | if flags != 0: 375 | raise ValueError( 376 | "non-zero flags not allowed in calls to recv_into() on %s" % 377 | self.__class__) 378 | return self.read(nbytes, buffer) 379 | else: 380 | return socket.recv_into(self, buffer, nbytes, flags) 381 | 382 | def recvfrom(self, buflen=1024, flags=0): 383 | self._checkClosed() 384 | if self._sslobj: 385 | raise ValueError("recvfrom not allowed on instances of %s" % 386 | self.__class__) 387 | else: 388 | return socket.recvfrom(self, buflen, flags) 389 | 390 | def recvfrom_into(self, buffer, nbytes=None, flags=0): 391 | self._checkClosed() 392 | if self._sslobj: 393 | raise ValueError("recvfrom_into not allowed on instances of %s" % 394 | self.__class__) 395 | else: 396 | return socket.recvfrom_into(self, buffer, nbytes, flags) 397 | 398 | def pending(self): 399 | self._checkClosed() 400 | if self._sslobj: 401 | return self._sslobj.pending() 402 | else: 403 | return 0 404 | 405 | def shutdown(self, how): 406 | self._checkClosed() 407 | self._sslobj = None 408 | socket.shutdown(self, how) 409 | 410 | def unwrap(self): 411 | if self._sslobj: 412 | s = self._sslobj.shutdown() 413 | self._sslobj = None 414 | return s 415 | else: 416 | raise ValueError("No SSL wrapper around " + str(self)) 417 | 418 | def _real_close(self): 419 | self._sslobj = None 420 | # self._closed = True 421 | socket._real_close(self) 422 | 423 | def do_handshake(self, block=False): 424 | """Perform a TLS/SSL handshake.""" 425 | 426 | timeout = self.gettimeout() 427 | try: 428 | if timeout == 0.0 and block: 429 | self.settimeout(None) 430 | self._sslobj.do_handshake() 431 | finally: 432 | self.settimeout(timeout) 433 | 434 | def _real_connect(self, addr, return_errno): 435 | if self.server_side: 436 | raise ValueError("can't connect in server-side mode") 437 | # Here we assume that the socket is client-side, and not 438 | # connected at the time of the call. We connect it, then wrap it. 439 | if self._connected: 440 | raise ValueError("attempt to connect already-connected SSLSocket!") 441 | self._sslobj = self.context._wrap_socket(self, False, self.server_hostname) 442 | try: 443 | socket.connect(self, addr) 444 | if self.do_handshake_on_connect: 445 | self.do_handshake() 446 | except socket_error as e: 447 | if return_errno: 448 | return e.errno 449 | else: 450 | self._sslobj = None 451 | raise e 452 | self._connected = True 453 | return 0 454 | 455 | def connect(self, addr): 456 | """Connects to remote ADDR, and then wraps the connection in 457 | an SSL channel.""" 458 | self._real_connect(addr, False) 459 | 460 | def connect_ex(self, addr): 461 | """Connects to remote ADDR, and then wraps the connection in 462 | an SSL channel.""" 463 | return self._real_connect(addr, True) 464 | 465 | def accept(self): 466 | """Accepts a new connection from a remote client, and returns 467 | a tuple containing that new connection wrapped with a server-side 468 | SSL channel, and the address of the remote client.""" 469 | 470 | newsock, addr = socket.accept(self) 471 | return (SSLSocket(sock=newsock, 472 | keyfile=self.keyfile, certfile=self.certfile, 473 | server_side=True, 474 | cert_reqs=self.cert_reqs, 475 | ssl_version=self.ssl_version, 476 | ca_certs=self.ca_certs, 477 | ciphers=self.ciphers, 478 | do_handshake_on_connect= 479 | self.do_handshake_on_connect), 480 | addr) 481 | 482 | def __del__(self): 483 | # sys.stderr.write("__del__ on %s\n" % repr(self)) 484 | self._real_close() 485 | 486 | 487 | def wrap_socket(sock, keyfile=None, certfile=None, 488 | server_side=False, cert_reqs=CERT_NONE, 489 | ssl_version=PROTOCOL_SSLv23, ca_certs=None, 490 | do_handshake_on_connect=True, 491 | suppress_ragged_eofs=True, ciphers=None): 492 | 493 | return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile, 494 | server_side=server_side, cert_reqs=cert_reqs, 495 | ssl_version=ssl_version, ca_certs=ca_certs, 496 | do_handshake_on_connect=do_handshake_on_connect, 497 | suppress_ragged_eofs=suppress_ragged_eofs, 498 | ciphers=ciphers) 499 | 500 | # some utility functions 501 | 502 | def cert_time_to_seconds(cert_time): 503 | """Takes a date-time string in standard ASN1_print form 504 | ("MON DAY 24HOUR:MINUTE:SEC YEAR TIMEZONE") and return 505 | a Python time value in seconds past the epoch.""" 506 | 507 | import time 508 | return time.mktime(time.strptime(cert_time, "%b %d %H:%M:%S %Y GMT")) 509 | 510 | PEM_HEADER = "-----BEGIN CERTIFICATE-----" 511 | PEM_FOOTER = "-----END CERTIFICATE-----" 512 | 513 | def DER_cert_to_PEM_cert(der_cert_bytes): 514 | """Takes a certificate in binary DER format and returns the 515 | PEM version of it as a string.""" 516 | 517 | f = str(base64.standard_b64encode(der_cert_bytes), 'ASCII', 'strict') 518 | return (PEM_HEADER + '\n' + 519 | textwrap.fill(f, 64) + '\n' + 520 | PEM_FOOTER + '\n') 521 | 522 | def PEM_cert_to_DER_cert(pem_cert_string): 523 | """Takes a certificate in ASCII PEM format and returns the 524 | DER-encoded version of it as a byte sequence""" 525 | 526 | if not pem_cert_string.startswith(PEM_HEADER): 527 | raise ValueError("Invalid PEM encoding; must start with %s" 528 | % PEM_HEADER) 529 | if not pem_cert_string.strip().endswith(PEM_FOOTER): 530 | raise ValueError("Invalid PEM encoding; must end with %s" 531 | % PEM_FOOTER) 532 | d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)] 533 | return base64.decodebytes(d.encode('ASCII', 'strict')) 534 | 535 | def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None): 536 | """Retrieve the certificate from the server at the specified address, 537 | and return it as a PEM-encoded string. 538 | If 'ca_certs' is specified, validate the server cert against it. 539 | If 'ssl_version' is specified, use it in the connection attempt.""" 540 | 541 | host, port = addr 542 | if (ca_certs is not None): 543 | cert_reqs = CERT_REQUIRED 544 | else: 545 | cert_reqs = CERT_NONE 546 | s = wrap_socket(socket(), ssl_version=ssl_version, 547 | cert_reqs=cert_reqs, ca_certs=ca_certs) 548 | s.connect(addr) 549 | dercert = s.getpeercert(True) 550 | s.close() 551 | return DER_cert_to_PEM_cert(dercert) 552 | 553 | def get_protocol_name(protocol_code): 554 | if protocol_code == PROTOCOL_TLSv1: 555 | return "TLSv1" 556 | elif protocol_code == PROTOCOL_SSLv23: 557 | return "SSLv23" 558 | elif protocol_code == PROTOCOL_SSLv2: 559 | return "SSLv2" 560 | elif protocol_code == PROTOCOL_SSLv3: 561 | return "SSLv3" 562 | else: 563 | return "" 564 | --------------------------------------------------------------------------------