├── .circleci └── config.yml ├── .editorconfig ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── deepcode ├── __init__.py ├── analysis.py ├── auth.py ├── bundle.py ├── cli.py ├── connection.py ├── constants.py ├── files.py ├── formatter.py ├── git_utils.py └── utils.py ├── docs └── Development.md ├── poetry.lock ├── pyproject.toml └── tests ├── .dcignore ├── __init__.py ├── sample-repo ├── .dcignore ├── .dockerignore ├── .eslintrc.json ├── AnnotatorTest.cpp ├── Dockerfile ├── GitHubAccessTokenScrambler12.java ├── README.md ├── app.js ├── controllers │ └── sharks.js ├── db.js ├── main.js ├── models │ └── sharks.js └── routes │ ├── index.js │ └── sharks.js └── test_deepcode.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | jobs: 4 | build: 5 | working_directory: ~/cli-project 6 | docker: 7 | - image: circleci/python:3.8 8 | steps: 9 | - checkout 10 | - run: curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - 11 | - run: poetry install && poetry build 12 | - run: poetry run pytest --maxfail=1 tests 13 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | charset = utf-8 11 | trim_trailing_whitespace = true 12 | max_line_length = 120 13 | 14 | # 4 space indentation 15 | [*.py] 16 | indent_style = space 17 | indent_size = 4 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | venv 3 | *.pyc 4 | /*.egg-info 5 | dist 6 | .vscode 7 | .pytest_cache 8 | .idea 9 | .dccache 10 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [1.0.18] - 2020-09-07 2 | - Improved tests 3 | 4 | ## [1.0.17] - 2020-06-17 5 | - Analyzing remote git repository with . (dot) in repository name 6 | 7 | ## [1.0.16] - 2020-06-16 8 | - Analyzing remote git repository with specific commit hash 9 | 10 | ## [1.0.15] - 2020-06-10 11 | - Tiny fix of calling webbrowser for log-in page 12 | 13 | ## [1.0.14] - 2020-06-08 14 | - Removed explicit dependency from asyncio, as it's included to python itself and conflicts with some virtual environments 15 | 16 | ## [1.0.13] - 2020-05-04 17 | - Added a way to specify custom 'source' argument 18 | 19 | ## [1.0.12] - 2020-04-16 20 | - Added symlink support 21 | - Added direct file analysis 22 | 23 | ## [1.0.11] - 2020-03-27 24 | - Fixed bug with analysis of huge repostories 25 | - Fixed relative path, when analysis only one path 26 | - Improved file/folder ignoring patterns 27 | 28 | ## [1.0.10] - 2020-03-23 29 | - Added new argument "severity" to CLI 30 | 31 | ## [1.0.9] - 2020-03-23 32 | - Changed status from Beta to Stable 33 | 34 | ## [1.0.8] - 2020-03-23 35 | - Added C/C++ into supported languages 36 | 37 | ## [1.0.7] - 2020-03-10 38 | - Improved processing of very large folders 39 | 40 | ## [1.0.6] - 2020-02-24 41 | - Proper exit codes to use the CLI in pipelines 42 | 43 | ## [1.0.5] - 2020-02-24 44 | - Optimized file paths to send only relative paths 45 | 46 | ## [1.0.4] - 2020-02-18 47 | - Fixed file processing in Windows 48 | 49 | ## [1.0.0] - 2020-02-14 50 | - Rewritten from scratch 51 | - Much faster and reliable execution 52 | - Backwards incompatible changes 53 | - Stringent python requirements: 3.6+ 54 | 55 | ## [0.0.8] - 2020-02-02 56 | - Removed confirmation step for uploading folders 57 | - Simplified logging 58 | 59 | ## [0.0.7] - 2020-01-16 60 | - Added support of very large folders 61 | - Bugfixes 62 | 63 | ## [0.0.5] - 2020-01-15 64 | - Hotfixes 65 | - Added "--version" command argument 66 | 67 | ## [0.0.4] - 2019-11-20 68 | - Hotfixes 69 | - Improved multi threading 70 | 71 | ## [0.0.2 - 0.0.3] - 2019-11-18 72 | - Hotfixes 73 | - README and classifier improvements 74 | 75 | ## [0.0.1] - 2019-11-14 76 | ### Added 77 | - Initial release 78 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2019] [DeepCode AG] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeepCode's command line interface. 2 | 3 | [![deepcode](https://www.deepcode.ai/api/gh/badge?key=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJwbGF0Zm9ybTEiOiJnaCIsIm93bmVyMSI6IkRlZXBDb2RlQUkiLCJyZXBvMSI6ImNsaSIsImluY2x1ZGVMaW50IjpmYWxzZSwiYXV0aG9ySWQiOjEyNDY5LCJpYXQiOjE1OTYwOTY5Njd9.NAHffzuy6KvoVYXF6VyTIVHccXqr2ZGW5ATcv7PlsqI)](https://www.deepcode.ai/app/gh/DeepCodeAI/cli/_/dashboard?utm_content=gh%2FDeepCodeAI%2Fcli) 4 | 5 | ## Installation 6 | 7 | Minimal supported python version is: 3.6.5 8 | 9 | ### Installation of published package from PyPI 10 | 11 | Pip: 12 | ```bash 13 | pip install deepcode 14 | ``` 15 | 16 | Poetry: 17 | ```bash 18 | poetry add deepcode 19 | ``` 20 | 21 | For detailed information see [deepcode project on PYPI](https://pypi.org/project/deepcode/) 22 | 23 | ### Installation of package locally and development mode 24 | 25 | For detailed information see [development docs](https://github.com/DeepCodeAI/cli/blob/master/docs/Development.md) 26 | 27 | 28 | ## CLI 29 | 30 | ```bash 31 | deepcode --help 32 | 33 | Usage: deepcode [OPTIONS] COMMAND [ARGS]... 34 | 35 | A tool, that detects bugs and quality issues in JavaScript, TypeScript, 36 | Python, Java and C/C++. It uses a mighty engine based on AI from Deepcode. 37 | 38 | Options: 39 | -s, --service-url TEXT Custom DeepCode service URL (default: 40 | https://www.deepcode.ai) 41 | -a, --api-key TEXT Deepcode API key 42 | -c, --config-file FILE Config file (default: ~/.deepcode.json) 43 | --help Show this message and exit. 44 | 45 | Commands: 46 | analyze Analyzes your code using Deepcode AI engine. 47 | config Store configuration values in a file. 48 | login Initiate a new login protocol. 49 | ``` 50 | 51 | 52 | - login: this will open a browser window to log into DeepCode. 53 | ```bash 54 | deepcode login 55 | ``` 56 | - create/update configuration: this will walk you through some configuration steps in case you do not want to connect to 57 | https://www.deepcode.ai but to some other host running a DeepCode instance. 58 | ```bash 59 | deepcode config 60 | ``` 61 | - analyze: do analysis for a specific version of code. This will show all suggestions that are present in the code. 62 | 63 | ``` 64 | Usage: deepcode analyze [OPTIONS] 65 | 66 | Analyzes your code using Deepcode AI engine. 67 | 68 | Exit codes: 69 | 0 - not issues found 70 | 1 - some issues found 71 | 2 - Execution was interrupted by the user 72 | 3 - Some error happened while executing 73 | 74 | Options: 75 | Source location: [mutually_exclusive, required] 76 | The configuration of repository location 77 | -p, --path DIRECTORY Path to folder to be processed. Multiple 78 | paths are allowed 79 | -r, --git-uri REMOTE Git URI (e.g. 80 | git@:/.git@ or 81 | https:////.git@) 82 | -l, --with-linters Enable linters 83 | -log, --log-file FILE Forward all debugging messages to a file 84 | -txt, --result-text Present results in txt format 85 | -sev, --severity [info|warning|critical] 86 | Minimum severity level (default: info) 87 | --help Show this message and exit. 88 | ``` 89 | 90 | Examples: 91 | 92 | ```bash 93 | deepcode analyze --path () --path () --with-linters --log-file=~/.deepcode.log -txt --severity warning 94 | 95 | deepcode analyze --path () 96 | deepcode analyze --git-uri git@github.com:DefinitelyTyped/DefinitelyTyped.git --with-linters 97 | ``` 98 | 99 | AnalysisResults in json format as described [here](https://www.deepcode.ai/docs/REST%20APIs%2FBundles) 100 | 101 | ## Python native usage 102 | 103 | DeepCode CLI can be also used as a module and can be imported into another python code with python import system. 104 | 105 | To use both functions, make sure, you set an environment variable 'DEEPCODE_API_KEY' with your API KEY, that [can be obtained here](https://www.deepcode.ai/app/~platform/account) 106 | 107 | To use self-managed instance of Deepcode, set an environment variable 'DEEPCODE_SERVICE_URL' with its user (e.g. https://example.org) 108 | 109 | Available methods: 110 | 111 | - analyze_folders(paths, linters_enabled=False): 112 | 113 | ```` 114 | :param [paths] - Paths should be a list of absolute paths to bundle dir 115 | :param [linters_enabled] - optional. requests also linter analysis 116 | :return - dictionary with results e.g. as described [here](https://www.deepcode.ai/docs/REST%20APIs%2FBundles) plus: 'id' for bundle id and 'url' for online view of analysis results 117 | 118 | example: 119 | import deepcode 120 | deepcode.analyze_folders('') 121 | ```` 122 | 123 | - analyze_git(platform, owner, repo, oid=None, linters_enabled=False): 124 | 125 | ```` 126 | :param [platform] - github.com, bitbucket.org, gitlab.com. 127 | :param [owner] - repository account. (e.g. facebook) 128 | :param [repo] - repository. (e.g. react) 129 | :param [linters_enabled] - optional. requests also linter analysis 130 | :return - dictionary with results e.g. as described [here](https://www.deepcode.ai/docs/REST%20APIs%2FBundles) plus: 'id' for bundle id and 'url' for online view of analysis results 131 | 132 | example: 133 | import deepcode 134 | deepcode.analyze_git('github.com', 'facebook', 'react') 135 | ```` 136 | -------------------------------------------------------------------------------- /deepcode/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | from tqdm import tqdm 4 | 5 | from .files import collect_bundle_files, prepare_bundle_hashes 6 | from .bundle import get_filters, generate_bundle, create_git_bundle 7 | from .analysis import get_analysis 8 | from .utils import logger, profile_speed 9 | 10 | 11 | @profile_speed 12 | async def analyze_folders(paths, linters_enabled=False, symlinks_enabled=False, severity=1): 13 | """ Entire flow of analyzing local folders. """ 14 | 15 | with tqdm(total=5, desc='Analizing folders', unit='step', leave=False) as pbar: 16 | 17 | pbar.set_description('Fetching supported extensions') 18 | file_filter = await get_filters() 19 | pbar.update(1) 20 | 21 | pbar.set_description('Scanning for files') 22 | bundle_files = collect_bundle_files(paths, file_filter, symlinks_enabled=symlinks_enabled) 23 | bundle_files = tuple( 24 | tqdm(bundle_files, desc='Found files', unit='f', leave=False) # progress bar 25 | ) 26 | pbar.update(1) 27 | 28 | # change dir to destination folder, if paths list contains only one item 29 | # We do it to exclude repetitive root path in the results 30 | if len(paths) == 1: 31 | dirname = paths[0] if os.path.isdir(paths[0]) else os.path.dirname(paths[0]) 32 | os.chdir(dirname) 33 | 34 | pbar.set_description('Computing file hashes') 35 | file_hashes = prepare_bundle_hashes( 36 | tqdm(bundle_files, desc='Calculated hashes', unit='files', leave=False) # progress bar 37 | ) 38 | pbar.update(1) 39 | 40 | pbar.set_description('Sending data') 41 | 42 | bundle_id = await generate_bundle(file_hashes) 43 | pbar.update(1) 44 | 45 | pbar.set_description('Requesting audit results') 46 | res = await get_analysis(bundle_id, linters_enabled=linters_enabled, severity=severity) 47 | pbar.update(1) 48 | pbar.set_description('Finished analysis') 49 | 50 | return res 51 | 52 | 53 | @profile_speed 54 | async def analyze_git(platform, owner, repo, oid=None, linters_enabled=False, severity=1): 55 | """ Entire flow of analyzing remote git repositories. """ 56 | bundle_id = await create_git_bundle(platform, owner, repo, oid) 57 | 58 | return await get_analysis(bundle_id, linters_enabled=linters_enabled, severity=severity) 59 | -------------------------------------------------------------------------------- /deepcode/analysis.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module dedicated to implementing an analysis protocol. 3 | """ 4 | 5 | import asyncio 6 | import aiohttp 7 | from tqdm import tqdm 8 | 9 | from .connection import api_call 10 | from .utils import logger 11 | from .constants import (ANALYSIS_PROGRESS_INTERVAL, ANALYSIS_RETRY_DELAY, ANALYSIS_RETRIES) 12 | 13 | STATUS_MAPPING = { 14 | 'DC_DONE': 'Linters running', 15 | 'DONE': 'Completed analysis' 16 | } 17 | 18 | def _status_decription(status): 19 | return STATUS_MAPPING.get(status, status).lower().capitalize() 20 | 21 | 22 | async def get_analysis(bundle_id, linters_enabled=False, severity=1): 23 | """ Initiate analysis via API and wait for results. """ 24 | 25 | success_statuses = ['DONE'] if linters_enabled else ['DONE', 'DC_DONE'] 26 | attempt = 0 27 | 28 | path = { 29 | True: 'analysis/{}?severity={}&linters', 30 | False: 'analysis/{}?severity={}' 31 | }[linters_enabled].format(bundle_id, severity) 32 | 33 | with tqdm(total=100, unit='%', leave=False) as pbar: 34 | 35 | current_percentage = 0 36 | 37 | while(True): 38 | 39 | data = await api_call(path) 40 | 41 | pbar.set_description( 42 | _status_decription(data.get('status', '')) 43 | ) 44 | 45 | if data.get('status') in success_statuses and data.get('analysisResults'): 46 | 47 | return { 48 | 'id': bundle_id, 49 | 'url': data['analysisURL'], 50 | 'results': data['analysisResults'] 51 | } 52 | 53 | elif data['status'] == 'FAILED': 54 | if attempt >= ANALYSIS_RETRIES: 55 | raise RuntimeError("Analysis failed for {} times. It seems, Deepcode has some issues. Please contact Deepcode. Response --> {}".format(ANALYSIS_RETRIES, data)) 56 | 57 | logger.warning('Analysis failed. Retrying in {} sec'.format(ANALYSIS_RETRY_DELAY)) 58 | attempt += 1 59 | await asyncio.sleep(ANALYSIS_RETRY_DELAY) 60 | 61 | elif data.get('progress'): 62 | 63 | progress = int(data['progress'] * 100) 64 | pbar.update(progress - current_percentage) 65 | current_percentage = progress 66 | 67 | await asyncio.sleep(ANALYSIS_PROGRESS_INTERVAL) 68 | 69 | else: 70 | await asyncio.sleep(ANALYSIS_PROGRESS_INTERVAL) 71 | -------------------------------------------------------------------------------- /deepcode/auth.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module dedicated to authenticating users via Deepcode website 3 | """ 4 | 5 | import asyncio 6 | import webbrowser 7 | import aiohttp 8 | 9 | from .utils import logger 10 | from .connection import api_call 11 | from .constants import AUTH_POLLING_INTERVAL, DEFAULT_SOURCE 12 | 13 | 14 | async def login(service_url, source=DEFAULT_SOURCE): 15 | """ 16 | Initiate a new login protocol. 17 | User will be forwarded to Deepcode website to complete the process. 18 | """ 19 | res = await api_call('login', method="POST", data={'source': source}) 20 | api_key, login_url = res['sessionToken'], res['loginURL'] 21 | 22 | # Open browser to complete authentication and assign api_key and user 23 | webbrowser.open_new_tab(login_url) 24 | 25 | while(True): 26 | try: 27 | await asyncio.sleep(AUTH_POLLING_INTERVAL) 28 | status = await api_call('session', method="GET", 29 | callback=asyncio.coroutine(lambda r: r.status), 30 | api_key=api_key) 31 | if status == 304: 32 | print('Please, complete login process in opened browser. Re-checking session in {} sec'.format(AUTH_POLLING_INTERVAL)) 33 | continue 34 | else: 35 | # it means success, when we got 200 status code 36 | return api_key 37 | except aiohttp.client_exceptions.ClientResponseError: 38 | logger.error('Missing or invalid sessionToken') 39 | raise 40 | -------------------------------------------------------------------------------- /deepcode/bundle.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module dedicated to implementing an bundle protocol. 3 | """ 4 | 5 | import os 6 | import json 7 | import asyncio 8 | import time 9 | from funcy import chunks 10 | from tqdm import tqdm 11 | from functools import partial 12 | 13 | from .connection import api_call 14 | from .files import get_file_content, compose_file_buckets, prepare_file_path 15 | from .utils import logger 16 | from .constants import MAX_BUCKET_SIZE 17 | 18 | 19 | async def get_filters(api_key=''): 20 | """ Fetch supported file extensions """ 21 | filters = await api_call('filters', api_key=api_key) 22 | logger.debug('allowed files: {}'.format(filters)) 23 | supported_extensions, expected_config_files = set(filters['extensions']), set(filters['configFiles']) 24 | return lambda n: os.path.splitext(n)[-1] in supported_extensions or n in expected_config_files 25 | 26 | 27 | async def _request_file_bundle(path, method, file_hashes, api_key): 28 | 29 | files = {prepare_file_path(p): h for p, h in file_hashes} 30 | 31 | res = await api_call( 32 | path=path, method=method, 33 | data={'files': files, 'removedFiles': []}, 34 | compression_level=9, 35 | api_key=api_key) 36 | 37 | bundle_id, missing_files = res['bundleId'], res['missingFiles'] 38 | logger.debug('bundle id: {} | missing_files: {}'.format(bundle_id, len(missing_files))) 39 | return bundle_id, missing_files 40 | 41 | 42 | async def generate_bundle(file_hashes, api_key=''): 43 | """ Generate bundles via API. Incapsulates all logic of our bundle protocol. """ 44 | 45 | async def _complete_bundle(bundle_func, api_key): 46 | bundle_id, missing_files = await bundle_func(api_key=api_key) 47 | while(missing_files): 48 | await fulfill_bundle(bundle_id, missing_files, api_key) # Send all missing files 49 | missing_files = await check_bundle(bundle_id, api_key) # Check that all files are uploaded 50 | 51 | return bundle_id 52 | 53 | bundle_id = None 54 | 55 | with tqdm(total=len(file_hashes), desc='Generated bundles', unit='bundle', leave=False) as pbar: 56 | 57 | for chunked_files in chunks(int(MAX_BUCKET_SIZE // 300), file_hashes): 58 | if not bundle_id: 59 | bundle_func = partial(_request_file_bundle, path='bundle', method='POST', file_hashes=chunked_files) 60 | else: 61 | bundle_func = partial(_request_file_bundle, path='bundle/{}'.format(bundle_id), method='PUT', file_hashes=chunked_files) 62 | 63 | bundle_id = await _complete_bundle( bundle_func, api_key) 64 | pbar.update(len(chunked_files)) 65 | 66 | return bundle_id 67 | 68 | 69 | async def create_git_bundle(platform, owner, repo, oid): 70 | """ Create a git bundle via API """ 71 | data = { 72 | 'platform': platform, 73 | 'owner': owner, 74 | 'repo': repo 75 | } 76 | 77 | if oid: 78 | data['oid'] = oid 79 | 80 | res = await api_call('bundle', method='POST', data=data, compression_level=9) 81 | return res['bundleId'] 82 | 83 | 84 | async def check_bundle(bundle_id, api_key=''): 85 | """ Check missing files in bundle via API """ 86 | data = await api_call('bundle/{}'.format(bundle_id), method='GET', api_key=api_key) 87 | return data['missingFiles'] 88 | 89 | 90 | def upload_bundle_files(bundle_id, entries, api_key): 91 | """ 92 | Each entry should contain of: (path, hash) 93 | """ 94 | 95 | data = [] 96 | for file_path, file_hash in entries: 97 | file_content = get_file_content(file_path) 98 | data.append({ 99 | 'fileHash': file_hash, 100 | 'fileContent': file_content 101 | }) 102 | 103 | return api_call( 104 | 'file/{}'.format(bundle_id), 105 | method='POST', 106 | data=data, 107 | callback=lambda resp: resp.text(), 108 | api_key=api_key 109 | ) 110 | 111 | 112 | async def fulfill_bundle(bundle_id, missing_files, api_key=''): 113 | """ Upload missing files to bundle via API """ 114 | if not missing_files: 115 | return 116 | logger.debug('Uploading {} missing files'.format(len(missing_files))) 117 | with tqdm(total=len(missing_files), desc='Uploading missing files', unit='files', leave=False) as pbar: 118 | 119 | async def _wrap(chunk): 120 | await upload_bundle_files(bundle_id, chunk, api_key) 121 | pbar.update(len(chunk)) 122 | 123 | tasks = [ 124 | _wrap(chunk) 125 | for chunk in compose_file_buckets(missing_files) 126 | ] 127 | if tasks: 128 | await asyncio.wait(tasks) 129 | else: 130 | logger.debug('No new files sent, as all files have been uploaded earlier') 131 | -------------------------------------------------------------------------------- /deepcode/cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Command line interface for Deepcode 3 | """ 4 | 5 | import sys 6 | import click 7 | import json 8 | import asyncio 9 | import aiohttp 10 | import os.path 11 | import logging 12 | from click_option_group import optgroup, RequiredMutuallyExclusiveOptionGroup 13 | 14 | from . import analyze_folders, analyze_git 15 | from .utils import logger, coro 16 | from .auth import login as login_task 17 | from .git_utils import parse_git_uri 18 | from .constants import (DEFAULT_SERVICE_URL, CONFIG_FILE_PATH, SERVICE_URL_ENV, SOURCE_ENV, API_KEY_ENV, DEFAULT_SOURCE) 19 | from .formatter import format_txt, text_decorations, text_with_colors 20 | 21 | def _save_config(service_url, api_key, config_file): 22 | data = { 23 | 'service_url': service_url, 24 | 'api_key': api_key, 25 | } 26 | data = {k:v for k,v in data.items() if v} 27 | 28 | with open(config_file, 'w') as cfg: 29 | cfg.write(json.dumps(data)) 30 | 31 | def _config_logging(log_file): 32 | if log_file: 33 | logging.basicConfig( 34 | level=logging.DEBUG, 35 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', 36 | datefmt='%m-%d %H:%M', 37 | filename=os.path.expanduser(log_file), 38 | filemode='w') 39 | 40 | console = logging.StreamHandler() 41 | console.setLevel(logging.WARNING) 42 | formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') 43 | console.setFormatter(formatter) 44 | logging.getLogger('').addHandler(console) 45 | 46 | 47 | @click.group() 48 | @click.option('--service-url', '-s', 'service_url', 49 | default=lambda: os.environ.get(SERVICE_URL_ENV, ''), 50 | help="Custom DeepCode service URL (default: {})".format(DEFAULT_SERVICE_URL)) 51 | @click.option('--api-key', '-a', 'api_key', 52 | default=lambda: os.environ.get(API_KEY_ENV, ''), 53 | help="Deepcode API key") 54 | @click.option( 55 | '--config-file', '-c', 56 | type=click.Path(dir_okay=False), 57 | default=CONFIG_FILE_PATH, 58 | help="Config file (default: {})".format(CONFIG_FILE_PATH) 59 | ) 60 | @click.option('--source', '-S', 'source', 61 | default=lambda: os.environ.get(SOURCE_ENV, ''), 62 | help="DeepCode client authentication source(default: {})".format(DEFAULT_SOURCE)) 63 | @click.pass_context 64 | def main(ctx, service_url, api_key, config_file, source): 65 | """ 66 | A tool, that detects bugs and quality issues in JavaScript, TypeScript, Python, Java and C/C++. 67 | It uses a mighty engine based on AI from Deepcode. 68 | """ 69 | 70 | filename = os.path.expanduser(config_file) 71 | 72 | config_data = {} 73 | if (not service_url or not api_key) and os.path.exists(filename): 74 | with open(filename) as cfg: 75 | try: 76 | # deepcode ignore replace~read~decode~json.loads: 'str' object has no attribute 'decode' 77 | config_data = json.loads(cfg.read()) 78 | except json.JSONDecodeError: 79 | logger.error('config file seems to be broken. Please run \"deepcode config\"') 80 | 81 | ctx.obj = { 82 | 'service_url': service_url or config_data.get('service_url', ''), 83 | 'source': source, 84 | 'api_key': api_key or config_data.get('api_key', ''), 85 | 'config_file': filename 86 | } 87 | 88 | service_url = ctx.obj.get('service_url', '') 89 | if service_url: 90 | os.environ[SERVICE_URL_ENV] = service_url 91 | 92 | api_key = ctx.obj.get('api_key', '') 93 | if api_key: 94 | os.environ[API_KEY_ENV] = api_key 95 | 96 | @main.command() 97 | @click.pass_context 98 | def config(ctx): 99 | """ 100 | Store configuration values in a file. 101 | """ 102 | 103 | service_url = click.prompt( 104 | "Please enter Deepcode Service URL (or leave it blank to use {})".format(DEFAULT_SERVICE_URL), 105 | default=ctx.obj.get('service_url', '') 106 | ) 107 | 108 | api_key = click.prompt( 109 | "Please enter your API key", 110 | default=ctx.obj.get('api_key', '') 111 | ) 112 | 113 | _save_config(service_url, api_key, ctx.obj['config_file']) 114 | 115 | 116 | @main.command() 117 | @click.pass_context 118 | @coro 119 | async def login(ctx): 120 | """ 121 | Initiate a new login protocol. 122 | User will be forwarded to Deepcode website to complete the process. 123 | """ 124 | 125 | service_url = ctx.obj.get('service_url', '') 126 | source = ctx.obj.get('source', '') 127 | 128 | api_key = await login_task(service_url, source or DEFAULT_SOURCE) 129 | 130 | _save_config(service_url, api_key, ctx.obj['config_file']) 131 | 132 | print(text_with_colors['green']('Login Successful!')) 133 | print('You API key {} has been saved.'.format(api_key)) 134 | print('You can manage your keys online: {}'.format( 135 | text_decorations['underlined']('{}/app/~platform/account'.format(service_url or DEFAULT_SERVICE_URL)) 136 | )) 137 | 138 | 139 | class GitURI(click.ParamType): 140 | name = 'Remote' 141 | 142 | def convert(self, value, param, ctx): 143 | data = parse_git_uri(value) 144 | if not data: 145 | self.fail( 146 | f'{value} is not a valid Git URI. (e.g. git@:/.git@ or https:////.git@)', 147 | param, 148 | ctx, 149 | ) 150 | 151 | return data 152 | 153 | @main.command() 154 | @optgroup.group('Source location', 155 | cls=RequiredMutuallyExclusiveOptionGroup, 156 | help='The configuration of repository location') 157 | @optgroup.option("--path", "-p", "paths", 158 | multiple=True, 159 | type=click.Path(exists=True, file_okay=True, dir_okay=True, readable=True, resolve_path=True), 160 | help="Path to folder to be processed. Multiple paths are allowed") 161 | @optgroup.option("--git-uri", "-r", "remote_params", 162 | type=GitURI(), 163 | help="Git URI (e.g. git@:/.git@ or https:////.git@)", 164 | ) 165 | @click.option('--with-linters', '-l', 'linters_enabled', is_flag=True, help="Enable linters") 166 | @click.option('--follow-symlinks', '-s', 'symlinks_enabled', is_flag=True, help="Follow symbolic links") 167 | @click.option('--log-file', '-log', 'log_file', 168 | type=click.Path(file_okay=True, dir_okay=False), 169 | help="Forward all debugging messages to a file") 170 | @click.option('--result-text', '-txt', 'result_txt', is_flag=True, help="Present results in txt format") 171 | @click.option('--severity', '-sev', 'severity', 172 | type=click.Choice(['info', 'warning', 'critical'], case_sensitive=False), 173 | default='info', 174 | help="Minimum severity level (default: info)") 175 | @click.pass_context 176 | @coro 177 | async def analyze(ctx, linters_enabled, symlinks_enabled, paths, remote_params, log_file, result_txt, severity): 178 | """ 179 | Analyzes your code using Deepcode AI engine. 180 | 181 | Exit codes: 182 | 0 - not issues found; 183 | 1 - some issues found; 184 | 2 - Execution was interrupted by the user; 185 | 3 - Some error happened while executing 186 | """ 187 | _config_logging(log_file) 188 | 189 | exit_code = 0 190 | 191 | severity = { 192 | 'info': 1, 193 | 'warning': 2, 194 | 'critical': 3 195 | }.get(severity) or 1 196 | 197 | try: 198 | if paths: # Local folders are going to be analysed 199 | paths = [os.path.abspath(p) for p in paths] 200 | results = await analyze_folders(paths=paths, linters_enabled=linters_enabled, symlinks_enabled=symlinks_enabled, severity=severity) 201 | else: 202 | # Deepcode server will fetch git repository and analyze it 203 | results = await analyze_git(linters_enabled=linters_enabled, severity=severity, **remote_params) 204 | 205 | # Present results in json or textual way 206 | print( format_txt(results) if result_txt else json.dumps(results, sort_keys=True, indent=2) ) 207 | 208 | if results['results']['suggestions']: 209 | exit_code = 1 210 | 211 | except aiohttp.client_exceptions.ClientResponseError as exc: 212 | if exc.status == 401: 213 | logger.error('Auth token seems to be missing or incorrect. Run \"deepcode login\"') 214 | else: 215 | logger.error(exc) 216 | exit_code = 3 217 | finally: 218 | sys.exit(exit_code) 219 | -------------------------------------------------------------------------------- /deepcode/connection.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module dedicated to communication with Deepcode API. 3 | """ 4 | from urllib.parse import urljoin 5 | import aiohttp 6 | import asyncio 7 | import zlib 8 | import os 9 | from json import dumps 10 | from functools import wraps 11 | 12 | from .utils import logger 13 | from .constants import (DEFAULT_SERVICE_URL, NETWORK_RETRY_DELAY, SERVICE_URL_ENV, API_KEY_ENV) 14 | 15 | def reconnect(func): 16 | 17 | @wraps(func) 18 | async def wrapper(*args, **kwargs): 19 | while(True): 20 | try: 21 | return await func(*args, **kwargs) 22 | except aiohttp.client_exceptions.ClientConnectionError: 23 | logger.warning("Server is not available. Retrying in {} seconds".format(NETWORK_RETRY_DELAY)) 24 | # In case of network disruptions, we just retry without affecting any logic 25 | await asyncio.sleep(NETWORK_RETRY_DELAY) 26 | except aiohttp.client_exceptions.ClientResponseError as exc: 27 | if exc.status == 500: 28 | logger.warning("Server gives 500. Retrying in {} seconds".format(NETWORK_RETRY_DELAY)) 29 | # In case of temporary server failures, we just retry without affecting any logic 30 | await asyncio.sleep(NETWORK_RETRY_DELAY) 31 | else: 32 | raise 33 | 34 | return wrapper 35 | 36 | 37 | @reconnect 38 | async def api_call(path, method='GET', data=None, extra_headers={}, callback=lambda resp: resp.json(), compression_level=6, api_key=''): 39 | SERVICE_URL = os.environ.get(SERVICE_URL_ENV, '') or DEFAULT_SERVICE_URL 40 | API_KEY = api_key or os.environ.get(API_KEY_ENV, '') 41 | 42 | url = urljoin(urljoin(SERVICE_URL, '/publicapi/'), path) 43 | 44 | default_headers = { 45 | 'Session-Token': API_KEY, 46 | } 47 | 48 | if data: 49 | # Expect json string here 50 | data = dumps(data).encode('utf-8') 51 | data = zlib.compress(data, level=compression_level) 52 | 53 | default_headers.update({ 54 | 'Content-Type': 'application/json', 55 | 'Content-Encoding': 'deflate' 56 | }) 57 | 58 | # async def on_request_start(session, trace_config_ctx, params): 59 | # logger.debug("Starting request") 60 | 61 | # async def on_request_end(session, trace_config_ctx, params): 62 | # logger.debug("Ending request") 63 | 64 | async with aiohttp.request( 65 | url=url, method=method, 66 | data=data, 67 | raise_for_status=True, 68 | headers=dict(default_headers, **extra_headers), 69 | compress=None 70 | ) as resp: 71 | 72 | # logger.debug('status --> {}'.format(resp.status)) 73 | # content = await resp.text() 74 | 75 | return await callback(resp) 76 | -------------------------------------------------------------------------------- /deepcode/constants.py: -------------------------------------------------------------------------------- 1 | # ================ Connection ================= 2 | 3 | DEFAULT_SERVICE_URL = 'https://www.deepcode.ai' 4 | NETWORK_RETRY_DELAY = 5 5 | DEFAULT_SOURCE = 'cli' 6 | 7 | # BACKEND_STATUS_CODES = { 8 | # 'success': 200, 9 | # 'login_in_progress': 304, 10 | # 'token': 401, 11 | # 'invalid_content': 400, 12 | # 'invalid_bundle_access': 403, 13 | # 'expired_bundle': 404, 14 | # 'large_payload': 413 15 | # } 16 | 17 | # ================ Analysis ================= 18 | 19 | ANALYSIS_PROGRESS_INTERVAL = 2 20 | ANALYSIS_RETRY_DELAY = 5 21 | ANALYSIS_RETRIES = 3 22 | 23 | # ================ Authentication ================= 24 | 25 | AUTH_POLLING_INTERVAL = 2 26 | 27 | # ================ Files ================= 28 | 29 | IGNORES_DEFAULT = { 30 | '**/.git', 31 | } 32 | 33 | IGNORE_FILES_NAMES = { 34 | '.gitignore', 35 | '.dcignore' 36 | } 37 | 38 | MAX_BUCKET_SIZE = 1024 * 1024 * 4 39 | 40 | # ================ CLI ================= 41 | 42 | CONFIG_FILE_PATH = '~/.deepcode.json' 43 | 44 | SERVICE_URL_ENV = 'DEEPCODE_SERVICE_URL' 45 | API_KEY_ENV = 'DEEPCODE_API_KEY' 46 | SOURCE_ENV = 'DEEPCODE_SOURCE' 47 | -------------------------------------------------------------------------------- /deepcode/files.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module dedicated to working with local folders/files. 3 | """ 4 | import os 5 | from copy import copy 6 | import fnmatch 7 | from itertools import chain 8 | import hashlib 9 | 10 | from .utils import logger 11 | 12 | from .constants import (IGNORES_DEFAULT, IGNORE_FILES_NAMES, MAX_BUCKET_SIZE) 13 | 14 | def prepare_file_path(filepath): 15 | """ 16 | 1. Get relative path 17 | 2. Modify Windows path 18 | 3. Prefix with / 19 | """ 20 | relpath = os.path.relpath(filepath).replace('\\', '/') 21 | return '/{}'.format(relpath) 22 | 23 | def resolve_file_path(bundle_filepath): 24 | """ Reversive function to prepare_file_path """ 25 | path = bundle_filepath[1:] 26 | 27 | if os.name != 'posix': 28 | path = path.replace('/', '\\') 29 | 30 | return os.path.abspath(path) 31 | 32 | def get_file_content(file_path): 33 | with open(file_path, encoding='utf-8', mode='r') as f: 34 | return f.read() 35 | 36 | def parse_file_ignores(file_path): 37 | dirname = os.path.dirname(file_path) 38 | with open(file_path, encoding='utf-8', mode='r') as f: 39 | for l in f.readlines(): 40 | rule = l.strip().rstrip('/') # Trim whitespaces and ending slash 41 | if rule and not rule.startswith('#'): 42 | yield os.path.join(dirname, rule) 43 | if not rule.startswith('/'): 44 | yield os.path.join(dirname, '**', rule) 45 | 46 | 47 | def is_ignored(path, file_ignores): 48 | for i in file_ignores: 49 | if fnmatch.fnmatch(path, i): 50 | logger.debug('pattern: {} | ignored: {}'.format(i, path)) 51 | return True 52 | 53 | return False 54 | 55 | 56 | def collect_bundle_files(paths, file_filter, symlinks_enabled=False, file_ignores=IGNORES_DEFAULT): 57 | local_file_ignores = copy(file_ignores) 58 | for path in paths: 59 | # Check if symlink and exclude if requested 60 | if os.path.islink(path) and not symlinks_enabled: 61 | continue 62 | 63 | if os.path.isfile(path): 64 | if file_filter(path) and not is_ignored(path, file_ignores): 65 | yield path 66 | elif os.path.isdir(path): 67 | with os.scandir(path) as it: 68 | local_files = [] 69 | sub_dirs = [] 70 | local_ignore_file = False 71 | for entry in it: 72 | 73 | if entry.is_symlink() and not symlinks_enabled: 74 | continue 75 | 76 | if entry.is_dir(follow_symlinks=symlinks_enabled): 77 | sub_dirs.append(entry.path) 78 | continue 79 | 80 | if entry.name in IGNORE_FILES_NAMES: 81 | for ignore_rule in parse_file_ignores(entry.path): 82 | local_file_ignores.add(ignore_rule) 83 | local_ignore_file = True 84 | logger.debug('recognized ignore rules in file --> {}'.format(entry.path)) 85 | continue 86 | 87 | if entry.is_file(follow_symlinks=symlinks_enabled) \ 88 | and file_filter(entry.name) \ 89 | and not is_ignored(entry.path, local_file_ignores): 90 | local_files.append(entry.path) 91 | 92 | if local_ignore_file: 93 | local_files = [p for p in local_files if not is_ignored(p, local_file_ignores)] 94 | 95 | yield from local_files 96 | 97 | sub_dirs = [ 98 | subdir for subdir in sub_dirs 99 | if not is_ignored(subdir, local_file_ignores) 100 | ] 101 | yield from collect_bundle_files(sub_dirs, file_filter, symlinks_enabled, local_file_ignores) 102 | 103 | 104 | def get_file_meta(file_path): 105 | content = get_file_content(file_path) 106 | hasher = hashlib.sha256() 107 | hasher.update(content.encode('utf-8')) 108 | 109 | return (len(content), hasher.hexdigest()) 110 | 111 | 112 | def prepare_bundle_hashes(bundle_files, bucket_size=MAX_BUCKET_SIZE): 113 | items = [] 114 | for file_path in bundle_files: 115 | try: 116 | file_size, file_hash = get_file_meta(file_path) 117 | except UnicodeDecodeError: 118 | logger.debug('ecxluded a file --> {} (Unicode Decode Error)'.format(file_path)) 119 | else: 120 | if file_size < bucket_size: 121 | items.append((file_path, file_hash)) 122 | 123 | return items 124 | 125 | 126 | def compose_file_buckets(missing_files, bucket_size=MAX_BUCKET_SIZE): 127 | """ 128 | Splits files into buckets with limiting max size 129 | Returns list of items: (path, hash) 130 | """ 131 | buckets = [{ 132 | 'size': bucket_size, 133 | 'files': [] 134 | }] 135 | 136 | def route_file_to_bucket(raw_file_path): 137 | 138 | file_path = resolve_file_path(raw_file_path) 139 | 140 | # Get file details 141 | file_size, file_hash = get_file_meta(file_path) 142 | 143 | # Check that file does not exceed max bucket size 144 | if file_size > bucket_size: 145 | logger.debug('excluded big file --> {} ({} bytes)'.format(file_path, file_size)) 146 | return 147 | 148 | # Try to find existing bucket 149 | for bucket in buckets: 150 | if bucket['size'] >= file_size: 151 | bucket['files'].append( (file_path, file_hash) ) 152 | bucket['size'] -= file_size 153 | return bucket 154 | 155 | bucket = { 156 | 'size': bucket_size - file_size, 157 | 'files': [ (file_path, file_hash) ] 158 | } 159 | buckets.append(bucket) 160 | return bucket 161 | 162 | for raw_file_path in missing_files: 163 | bucket = route_file_to_bucket(raw_file_path) 164 | if not bucket: 165 | continue 166 | 167 | if bucket['size'] < bucket_size * 0.01: 168 | yield bucket['files'] # Give bucket to requester 169 | buckets.remove(bucket) # Remove it as fullfilled 170 | 171 | # Send all left-over buckets 172 | for bucket in buckets: 173 | if bucket['files']: 174 | yield bucket['files'] 175 | -------------------------------------------------------------------------------- /deepcode/formatter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module dedicated to formatting suggestion data to nice text view. 3 | """ 4 | 5 | import json 6 | from operator import itemgetter 7 | 8 | 9 | SEVERITIES = { 10 | 1: { 11 | 'title': 'Info', 12 | 'color': 'blue', 13 | }, 14 | 2: { 15 | 'title': 'Warning', 16 | 'color': 'yellow', 17 | }, 18 | 3: { 19 | 'title': 'Critical', 20 | 'color': 'red', 21 | }, 22 | } 23 | 24 | # colors text background like marker 25 | text_with_color_marker = { 26 | 'blue': lambda sev: "\x1b[5;30;44m{}\x1b[0m".format(sev), 27 | 'yellow': lambda sev: "\x1b[5;30;43m{}\x1b[0m".format(sev), 28 | 'red': lambda sev: "\x1b[5;30;41m{}\x1b[0m".format(sev), 29 | } 30 | 31 | # colors text font 32 | text_with_colors = { 33 | # blue text for info 34 | 'blue': lambda text: "\33[94m{}\33[0m".format(text), 35 | # yellow text for warnings 36 | 'yellow': lambda text: "\33[93m{}\33[0m".format(text), 37 | # red text for critical or errors 38 | 'red': lambda text: "\33[91m{}\33[0m".format(text), 39 | # green color for success 40 | 'green': lambda text: "\33[92m{}\33[0m".format(text), 41 | } 42 | 43 | text_decorations = { 44 | 'bold': lambda t: "\33[1m{}\33[0m".format(t), 45 | 'underlined': lambda t: '\033[4m{}\033[0m'.format(t) 46 | } 47 | 48 | SINGLELINE_POSITIONS_TEMPLATE = '{}line {}, symbols from {} to {}' 49 | MULTILINE_POSITIONS_TEMPLATE = '{}lines from {} to {}, symbols from {} to {}' 50 | 51 | def construct_severity_sub_header(severity_idx): 52 | color = SEVERITIES[severity_idx]['color'] 53 | return text_with_color_marker[color]( 54 | '{} issues'.format( SEVERITIES[severity_idx]['title'] ) 55 | ) 56 | 57 | 58 | def construct_issue_txt_view(file_path, positions_list, severity, message): 59 | color = SEVERITIES[severity]['color'] 60 | 61 | return '{filepath} {issue_msg}\n Issue positions:\n{positions}'.format( 62 | filepath=text_decorations['bold'](file_path), 63 | issue_msg=text_with_colors[color](message), 64 | positions=construct_issue_positions_txt_view(positions_list) 65 | ) 66 | 67 | 68 | def construct_issue_positions_txt_view(issues_positions_list): 69 | positions = [] 70 | EXTRA_SPACES_FOR_POSITION = ' '*5 71 | for position in issues_positions_list: 72 | rows = position['rows'] 73 | cols = position['cols'] 74 | start_row, end_row = rows 75 | 76 | if start_row == end_row: 77 | item = SINGLELINE_POSITIONS_TEMPLATE.format( 78 | EXTRA_SPACES_FOR_POSITION, start_row, *cols) 79 | else: 80 | item = MULTILINE_POSITIONS_TEMPLATE.format( 81 | EXTRA_SPACES_FOR_POSITION, start_row, end_row, *cols) 82 | 83 | positions.append(item.rstrip()) 84 | 85 | if 'markers' in position: 86 | positions.append( 87 | create_issue_markers_positions(position['markers']) 88 | ) 89 | 90 | return '\n'.join(positions) 91 | 92 | 93 | def create_issue_markers_positions(markers): 94 | EXTRA_SPACES_FOR_MARKERS_POSITION = ' '*10 95 | 96 | markers_positions = [] 97 | for marker in markers: 98 | for pos in marker['pos']: 99 | cols = pos['cols'] 100 | start_row, end_row = pos['rows'] 101 | if start_row == end_row: 102 | marker_position = SINGLELINE_POSITIONS_TEMPLATE.format( 103 | EXTRA_SPACES_FOR_MARKERS_POSITION, 104 | start_row, *cols) 105 | 106 | else: 107 | marker_position = MULTILINE_POSITIONS_TEMPLATE.format( 108 | EXTRA_SPACES_FOR_MARKERS_POSITION, 109 | start_row, end_row, *cols) 110 | 111 | markers_positions.append(marker_position) 112 | 113 | if not markers_positions: 114 | return '' 115 | 116 | return '\n{}{}:\n{}'.format( 117 | EXTRA_SPACES_FOR_MARKERS_POSITION, 118 | text_decorations['underlined']('issue helpers'), 119 | '\n'.join(markers_positions)) 120 | 121 | 122 | def format_txt(data): 123 | """ 124 | Presentation level. Parse json results into textual form. 125 | """ 126 | 127 | url, results = itemgetter('url', 'results')(data) 128 | files, suggestions = itemgetter('files', 'suggestions')(results) 129 | if not len(files) and not len(suggestions): 130 | return text_with_colors['green']('Everything is fine. No issues found.') 131 | 132 | paragraphs = [] 133 | 134 | grouped_issues = dict(zip(SEVERITIES.keys(), ([], [], []))) 135 | 136 | for file_path in files: 137 | for suggestion in files[file_path]: 138 | severity = suggestions[suggestion]['severity'] 139 | issue_txt_view = construct_issue_txt_view( 140 | file_path, 141 | files[file_path][suggestion], 142 | severity, 143 | suggestions[suggestion]['message'] 144 | ) 145 | grouped_issues[severity].append(issue_txt_view) 146 | 147 | for severity, issues in sorted(grouped_issues.items(), key=lambda i: -i[0]): 148 | if issues: 149 | paragraphs.append( 150 | '\n'.join([ 151 | construct_severity_sub_header(severity), 152 | '\n'.join(issues) 153 | ]) 154 | ) 155 | 156 | paragraphs.extend([ 157 | text_with_colors['green']('\n[Online analysis results]: '), 158 | text_decorations['underlined'](url), 159 | ]) 160 | 161 | return '\n'.join(paragraphs) 162 | -------------------------------------------------------------------------------- /deepcode/git_utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | GIT_URI_OBJ = re.compile('((git@|https://)(?P[\w\.@]+)(/|:))(?P[\w,\-,\_/]+)/(?P[\w\.,\-,\_]+)\.git((/){0,1})((@(?P[0-9a-z]+)){0,1})', re.IGNORECASE) 4 | 5 | def parse_git_uri(uri): 6 | """ 7 | Parses git uri into dictionary. Both SSH and HTTPS versions are supported 8 | SSH version git@github.com:DeepCodeAI/cli.git@1234 9 | HTTPS version https://github.com/DeepCodeAI/cli.git@1234 10 | 11 | In both cases the result should be: \{ platform: gh, owner: DeepCodeAI, repo: cli, oid: 1234 \} 12 | """ 13 | match = GIT_URI_OBJ.match(uri) 14 | if match: 15 | return match.groupdict() 16 | -------------------------------------------------------------------------------- /deepcode/utils.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | import time 3 | import logging 4 | import inspect 5 | import asyncio 6 | 7 | logger = logging.getLogger('deepcode') 8 | 9 | 10 | def coro(f): 11 | @wraps(f) 12 | def wrapper(*args, **kwargs): 13 | loop = asyncio.get_event_loop() 14 | loop.run_until_complete(f(*args, **kwargs)) 15 | #return asyncio.run() # supported only from 3.7+ 16 | 17 | return wrapper 18 | 19 | def profile_speed(func): 20 | 21 | log_timing = lambda d: logger.debug("- {:6.2f} sec: Done - \"{}\"".format( 22 | d, func.__doc__ or func.__name__)) 23 | 24 | if inspect.iscoroutinefunction(func): 25 | @wraps(func) 26 | async def wrapper(*args, **kwargs): 27 | start_time = time.time() 28 | try: 29 | return await func(*args, **kwargs) 30 | finally: 31 | log_timing(time.time() - start_time) 32 | 33 | return wrapper 34 | else: 35 | @wraps(func) 36 | def wrapper(*args, **kwargs): 37 | start_time = time.time() 38 | try: 39 | return func(*args, **kwargs) 40 | finally: 41 | log_timing(time.time() - start_time) 42 | 43 | return wrapper 44 | -------------------------------------------------------------------------------- /docs/Development.md: -------------------------------------------------------------------------------- 1 | ### DEEPCODE CLI development mode description for developers 2 | 3 | Python >= 3.6 is required for this package 4 | If you have an older version, please consider using [pyenv](https://realpython.com/intro-to-pyenv/) to manage different python versions. 5 | 6 | ### Environment setup 7 | 8 | Package can be developed/built with Poetry: 9 | 10 | Install poetry, dependencies and activate virtual environment: 11 | ```bash 12 | curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python 13 | ``` 14 | 15 | Add a following line to your shell specific config file (~/.bashrc, ~/.zshrc): 16 | ```bash 17 | export PATH="$HOME/.poetry/bin:$PATH" 18 | ``` 19 | 20 | For more details refer to [Poetry documentation](https://python-poetry.org/docs/) 21 | 22 | ### Create virtual environment 23 | ```bash 24 | poetry shell 25 | ``` 26 | 27 | (OPTIONAL) If you prefer to have all dependencies in the same place together with your code, create a virtualenv manually: 28 | ```bash 29 | virtualenv ./venv --python python3 30 | source ./venv/bin/activate 31 | ``` 32 | 33 | Troubleshooting: Make sure that your user has full rights to user folder with all subfolders 34 | 35 | ### Install dependencies 36 | ```bash 37 | poetry install 38 | ``` 39 | 40 | ## Description of cli options 41 | 42 | ```bash 43 | poetry run deepcode --help 44 | ``` 45 | 46 | ## Module mode 47 | 48 | CLI can work as command line interface and as imported module. 49 | To read more about module mode, see [readme docs](README.md) 50 | 51 | ## Package build 52 | 53 | - Increment version in pyproject.toml. 54 | - Update changelog 55 | - Build 56 | 57 | ```bash 58 | poetry build 59 | ``` 60 | 61 | ### Publishing 62 | 63 | ```bash 64 | poetry publish 65 | ``` 66 | 67 | ### Tests 68 | 69 | Make sure you have an API KEY in your [account](https://www.deepcode.ai/app/gh/account) 70 | Copy your key. 71 | 72 | Run tests: 73 | 74 | ```bash 75 | DEEPCODE_API_KEY= poetry run pytest tests 76 | ``` 77 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "main" 3 | description = "Simple DNS resolver for asyncio" 4 | name = "aiodns" 5 | optional = false 6 | python-versions = "*" 7 | version = "2.0.0" 8 | 9 | [package.dependencies] 10 | pycares = ">=3.0.0" 11 | 12 | [package.dependencies.typing] 13 | python = "<3.7" 14 | version = "*" 15 | 16 | [[package]] 17 | category = "main" 18 | description = "Async http client/server framework (asyncio)" 19 | name = "aiohttp" 20 | optional = false 21 | python-versions = ">=3.5.3" 22 | version = "3.6.2" 23 | 24 | [package.dependencies] 25 | async-timeout = ">=3.0,<4.0" 26 | attrs = ">=17.3.0" 27 | chardet = ">=2.0,<4.0" 28 | multidict = ">=4.5,<5.0" 29 | yarl = ">=1.0,<2.0" 30 | 31 | [package.dependencies.idna-ssl] 32 | python = "<3.7" 33 | version = ">=1.0" 34 | 35 | [package.dependencies.typing-extensions] 36 | python = "<3.7" 37 | version = ">=3.6.5" 38 | 39 | [package.extras] 40 | speedups = ["aiodns", "brotlipy", "cchardet"] 41 | 42 | [[package]] 43 | category = "dev" 44 | description = "An abstract syntax tree for Python with inference support." 45 | name = "astroid" 46 | optional = false 47 | python-versions = ">=3.5" 48 | version = "2.4.2" 49 | 50 | [package.dependencies] 51 | lazy-object-proxy = ">=1.4.0,<1.5.0" 52 | six = ">=1.12,<2.0" 53 | wrapt = ">=1.11,<2.0" 54 | 55 | [package.dependencies.typed-ast] 56 | python = "<3.8" 57 | version = ">=1.4.0,<1.5" 58 | 59 | [[package]] 60 | category = "main" 61 | description = "Timeout context manager for asyncio programs" 62 | name = "async-timeout" 63 | optional = false 64 | python-versions = ">=3.5.3" 65 | version = "3.0.1" 66 | 67 | [[package]] 68 | category = "dev" 69 | description = "Atomic file writes." 70 | marker = "sys_platform == \"win32\"" 71 | name = "atomicwrites" 72 | optional = false 73 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 74 | version = "1.4.0" 75 | 76 | [[package]] 77 | category = "main" 78 | description = "Classes Without Boilerplate" 79 | name = "attrs" 80 | optional = false 81 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 82 | version = "19.3.0" 83 | 84 | [package.extras] 85 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"] 86 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"] 87 | docs = ["sphinx", "zope.interface"] 88 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] 89 | 90 | [[package]] 91 | category = "main" 92 | description = "cChardet is high speed universal character encoding detector." 93 | name = "cchardet" 94 | optional = false 95 | python-versions = "*" 96 | version = "2.1.6" 97 | 98 | [[package]] 99 | category = "main" 100 | description = "Foreign Function Interface for Python calling C code." 101 | name = "cffi" 102 | optional = false 103 | python-versions = "*" 104 | version = "1.14.0" 105 | 106 | [package.dependencies] 107 | pycparser = "*" 108 | 109 | [[package]] 110 | category = "main" 111 | description = "Universal encoding detector for Python 2 and 3" 112 | name = "chardet" 113 | optional = false 114 | python-versions = "*" 115 | version = "3.0.4" 116 | 117 | [[package]] 118 | category = "main" 119 | description = "Composable command line interface toolkit" 120 | name = "click" 121 | optional = false 122 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 123 | version = "7.1.2" 124 | 125 | [[package]] 126 | category = "main" 127 | description = "Option groups missing in Click" 128 | name = "click-option-group" 129 | optional = false 130 | python-versions = ">=3.6,<4" 131 | version = "0.2.3" 132 | 133 | [package.dependencies] 134 | Click = ">=7.0,<8" 135 | 136 | [[package]] 137 | category = "dev" 138 | description = "Cross-platform colored terminal text." 139 | marker = "sys_platform == \"win32\"" 140 | name = "colorama" 141 | optional = false 142 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 143 | version = "0.4.3" 144 | 145 | [[package]] 146 | category = "main" 147 | description = "A fancy and practical functional tools" 148 | name = "funcy" 149 | optional = false 150 | python-versions = "*" 151 | version = "1.14" 152 | 153 | [[package]] 154 | category = "main" 155 | description = "Internationalized Domain Names in Applications (IDNA)" 156 | name = "idna" 157 | optional = false 158 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 159 | version = "2.9" 160 | 161 | [[package]] 162 | category = "main" 163 | description = "Patch ssl.match_hostname for Unicode(idna) domains support" 164 | marker = "python_version < \"3.7\"" 165 | name = "idna-ssl" 166 | optional = false 167 | python-versions = "*" 168 | version = "1.1.0" 169 | 170 | [package.dependencies] 171 | idna = ">=2.0" 172 | 173 | [[package]] 174 | category = "dev" 175 | description = "Read metadata from Python packages" 176 | marker = "python_version < \"3.8\"" 177 | name = "importlib-metadata" 178 | optional = false 179 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 180 | version = "1.6.1" 181 | 182 | [package.dependencies] 183 | zipp = ">=0.5" 184 | 185 | [package.extras] 186 | docs = ["sphinx", "rst.linker"] 187 | testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] 188 | 189 | [[package]] 190 | category = "dev" 191 | description = "A Python utility / library to sort Python imports." 192 | name = "isort" 193 | optional = false 194 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 195 | version = "4.3.21" 196 | 197 | [package.extras] 198 | pipfile = ["pipreqs", "requirementslib"] 199 | pyproject = ["toml"] 200 | requirements = ["pipreqs", "pip-api"] 201 | xdg_home = ["appdirs (>=1.4.0)"] 202 | 203 | [[package]] 204 | category = "dev" 205 | description = "A fast and thorough lazy object proxy." 206 | name = "lazy-object-proxy" 207 | optional = false 208 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 209 | version = "1.4.3" 210 | 211 | [[package]] 212 | category = "dev" 213 | description = "McCabe checker, plugin for flake8" 214 | name = "mccabe" 215 | optional = false 216 | python-versions = "*" 217 | version = "0.6.1" 218 | 219 | [[package]] 220 | category = "dev" 221 | description = "More routines for operating on iterables, beyond itertools" 222 | name = "more-itertools" 223 | optional = false 224 | python-versions = ">=3.5" 225 | version = "8.3.0" 226 | 227 | [[package]] 228 | category = "main" 229 | description = "multidict implementation" 230 | name = "multidict" 231 | optional = false 232 | python-versions = ">=3.5" 233 | version = "4.7.6" 234 | 235 | [[package]] 236 | category = "dev" 237 | description = "Core utilities for Python packages" 238 | name = "packaging" 239 | optional = false 240 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 241 | version = "20.4" 242 | 243 | [package.dependencies] 244 | pyparsing = ">=2.0.2" 245 | six = "*" 246 | 247 | [[package]] 248 | category = "dev" 249 | description = "plugin and hook calling mechanisms for python" 250 | name = "pluggy" 251 | optional = false 252 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 253 | version = "0.13.1" 254 | 255 | [package.dependencies] 256 | [package.dependencies.importlib-metadata] 257 | python = "<3.8" 258 | version = ">=0.12" 259 | 260 | [package.extras] 261 | dev = ["pre-commit", "tox"] 262 | 263 | [[package]] 264 | category = "dev" 265 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 266 | name = "py" 267 | optional = false 268 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 269 | version = "1.8.1" 270 | 271 | [[package]] 272 | category = "main" 273 | description = "Python interface for c-ares" 274 | name = "pycares" 275 | optional = false 276 | python-versions = "*" 277 | version = "3.1.1" 278 | 279 | [package.dependencies] 280 | cffi = ">=1.5.0" 281 | 282 | [package.extras] 283 | idna = ["idna (>=2.1)"] 284 | 285 | [[package]] 286 | category = "main" 287 | description = "C parser in Python" 288 | name = "pycparser" 289 | optional = false 290 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 291 | version = "2.20" 292 | 293 | [[package]] 294 | category = "dev" 295 | description = "python code static checker" 296 | name = "pylint" 297 | optional = false 298 | python-versions = ">=3.5.*" 299 | version = "2.5.3" 300 | 301 | [package.dependencies] 302 | astroid = ">=2.4.0,<=2.5" 303 | colorama = "*" 304 | isort = ">=4.2.5,<5" 305 | mccabe = ">=0.6,<0.7" 306 | toml = ">=0.7.1" 307 | 308 | [[package]] 309 | category = "dev" 310 | description = "Python parsing module" 311 | name = "pyparsing" 312 | optional = false 313 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 314 | version = "2.4.7" 315 | 316 | [[package]] 317 | category = "dev" 318 | description = "pytest: simple powerful testing with Python" 319 | name = "pytest" 320 | optional = false 321 | python-versions = ">=3.5" 322 | version = "5.4.3" 323 | 324 | [package.dependencies] 325 | atomicwrites = ">=1.0" 326 | attrs = ">=17.4.0" 327 | colorama = "*" 328 | more-itertools = ">=4.0.0" 329 | packaging = "*" 330 | pluggy = ">=0.12,<1.0" 331 | py = ">=1.5.0" 332 | wcwidth = "*" 333 | 334 | [package.dependencies.importlib-metadata] 335 | python = "<3.8" 336 | version = ">=0.12" 337 | 338 | [package.extras] 339 | checkqa-mypy = ["mypy (v0.761)"] 340 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 341 | 342 | [[package]] 343 | category = "dev" 344 | description = "Pytest support for asyncio." 345 | name = "pytest-asyncio" 346 | optional = false 347 | python-versions = ">= 3.5" 348 | version = "0.10.0" 349 | 350 | [package.dependencies] 351 | pytest = ">=3.0.6" 352 | 353 | [package.extras] 354 | testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=3.64)"] 355 | 356 | [[package]] 357 | category = "dev" 358 | description = "Python 2 and 3 compatibility utilities" 359 | name = "six" 360 | optional = false 361 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 362 | version = "1.15.0" 363 | 364 | [[package]] 365 | category = "dev" 366 | description = "Python Library for Tom's Obvious, Minimal Language" 367 | name = "toml" 368 | optional = false 369 | python-versions = "*" 370 | version = "0.10.1" 371 | 372 | [[package]] 373 | category = "main" 374 | description = "Fast, Extensible Progress Meter" 375 | name = "tqdm" 376 | optional = false 377 | python-versions = ">=2.6, !=3.0.*, !=3.1.*" 378 | version = "4.46.1" 379 | 380 | [package.extras] 381 | dev = ["py-make (>=0.1.0)", "twine", "argopt", "pydoc-markdown"] 382 | 383 | [[package]] 384 | category = "dev" 385 | description = "a fork of Python 2 and 3 ast modules with type comment support" 386 | marker = "implementation_name == \"cpython\" and python_version < \"3.8\"" 387 | name = "typed-ast" 388 | optional = false 389 | python-versions = "*" 390 | version = "1.4.1" 391 | 392 | [[package]] 393 | category = "main" 394 | description = "Type Hints for Python" 395 | marker = "python_version < \"3.7\"" 396 | name = "typing" 397 | optional = false 398 | python-versions = "*" 399 | version = "3.7.4.1" 400 | 401 | [[package]] 402 | category = "main" 403 | description = "Backported and Experimental Type Hints for Python 3.5+" 404 | marker = "python_version < \"3.7\"" 405 | name = "typing-extensions" 406 | optional = false 407 | python-versions = "*" 408 | version = "3.7.4.2" 409 | 410 | [[package]] 411 | category = "dev" 412 | description = "Measures the displayed width of unicode strings in a terminal" 413 | name = "wcwidth" 414 | optional = false 415 | python-versions = "*" 416 | version = "0.2.4" 417 | 418 | [[package]] 419 | category = "dev" 420 | description = "Module for decorators, wrappers and monkey patching." 421 | name = "wrapt" 422 | optional = false 423 | python-versions = "*" 424 | version = "1.12.1" 425 | 426 | [[package]] 427 | category = "main" 428 | description = "Yet another URL library" 429 | name = "yarl" 430 | optional = false 431 | python-versions = ">=3.5" 432 | version = "1.4.2" 433 | 434 | [package.dependencies] 435 | idna = ">=2.0" 436 | multidict = ">=4.0" 437 | 438 | [[package]] 439 | category = "dev" 440 | description = "Backport of pathlib-compatible object wrapper for zip files" 441 | marker = "python_version < \"3.8\"" 442 | name = "zipp" 443 | optional = false 444 | python-versions = ">=3.6" 445 | version = "3.1.0" 446 | 447 | [package.extras] 448 | docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] 449 | testing = ["jaraco.itertools", "func-timeout"] 450 | 451 | [metadata] 452 | content-hash = "c387aaccf0058bc83bb320df3d0370dd94b5c37cdc5a085d47a79056e384966c" 453 | python-versions = "^3.6 | ^3.7 | ^3.8" 454 | 455 | [metadata.files] 456 | aiodns = [ 457 | {file = "aiodns-2.0.0-py2.py3-none-any.whl", hash = "sha256:aaa5ac584f40fe778013df0aa6544bf157799bd3f608364b451840ed2c8688de"}, 458 | {file = "aiodns-2.0.0.tar.gz", hash = "sha256:815fdef4607474295d68da46978a54481dd1e7be153c7d60f9e72773cd38d77d"}, 459 | ] 460 | aiohttp = [ 461 | {file = "aiohttp-3.6.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e"}, 462 | {file = "aiohttp-3.6.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec"}, 463 | {file = "aiohttp-3.6.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48"}, 464 | {file = "aiohttp-3.6.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59"}, 465 | {file = "aiohttp-3.6.2-cp36-cp36m-win32.whl", hash = "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a"}, 466 | {file = "aiohttp-3.6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17"}, 467 | {file = "aiohttp-3.6.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a"}, 468 | {file = "aiohttp-3.6.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd"}, 469 | {file = "aiohttp-3.6.2-cp37-cp37m-win32.whl", hash = "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965"}, 470 | {file = "aiohttp-3.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654"}, 471 | {file = "aiohttp-3.6.2-py3-none-any.whl", hash = "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4"}, 472 | {file = "aiohttp-3.6.2.tar.gz", hash = "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326"}, 473 | ] 474 | astroid = [ 475 | {file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"}, 476 | {file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"}, 477 | ] 478 | async-timeout = [ 479 | {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, 480 | {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, 481 | ] 482 | atomicwrites = [ 483 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 484 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 485 | ] 486 | attrs = [ 487 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, 488 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, 489 | ] 490 | cchardet = [ 491 | {file = "cchardet-2.1.6-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:2aa1b008965c703ad6597361b0f6d427c8971fe94a2c99ec3724c228ae50d6a6"}, 492 | {file = "cchardet-2.1.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:fd16f57ce42a72397cd9fe38977fc809eb02172731cb354572f28a6d8e4cf322"}, 493 | {file = "cchardet-2.1.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:953fe382304b19f5aa8fc2da4b092a3bb58a477d33af4def4b81abdce4c9288c"}, 494 | {file = "cchardet-2.1.6-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:ccb9f6f06265382028468b47e726f2d42539256fb498d1b0e473c39037b42b8a"}, 495 | {file = "cchardet-2.1.6-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:2c05b66b12f9ab0493c5ffb666036fd8c9004a9cc9d5a9264dc24738b50ab8c3"}, 496 | {file = "cchardet-2.1.6-cp35-cp35m-win32.whl", hash = "sha256:dff9480d9b6260f59ad10e1cec5be13905be5da88a4a2bd5a5bd4d49c49c4a05"}, 497 | {file = "cchardet-2.1.6-cp35-cp35m-win_amd64.whl", hash = "sha256:84d2ce838cf3c2fe7f0517941702d42f7e598e5173632ec47a113cd521669b98"}, 498 | {file = "cchardet-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4486f6e5bdf06f0081d13832f2a061d9e90597eb02093fda9d37e3985e3b2ef2"}, 499 | {file = "cchardet-2.1.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:7a2d98df461d3f36b403fdd8d7890c823ed05bd98eb074412ed56fbfedb94751"}, 500 | {file = "cchardet-2.1.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:27b0f23088873d1dd36d2c8a2e45c9167e312e1aac7e4baeb47f7428a2669638"}, 501 | {file = "cchardet-2.1.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:cf134e1cfb0c53f08abb1ab9158a7e7f859c3ddb451d5fe535a2cc5f2958a688"}, 502 | {file = "cchardet-2.1.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f5c94994d876d8709847c3a92643309d716f43716580a2e5831262366a9ee8b6"}, 503 | {file = "cchardet-2.1.6-cp36-cp36m-win32.whl", hash = "sha256:217a7008bd399bdb61f6a0a2570acc5c3a9f96140e0a0d089b9e748c4d4e4c4e"}, 504 | {file = "cchardet-2.1.6-cp36-cp36m-win_amd64.whl", hash = "sha256:2a958fb093f69ee5f16be7a1aee5122e07aff4350fa4dc9b953b87c34468e605"}, 505 | {file = "cchardet-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4096759825a130cb27a58ddf6d58e10abdd0127d29fbf53fde26df7ad879737b"}, 506 | {file = "cchardet-2.1.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:7bba1cbb4358dc9a2d2da00f4b38b159a5483d2f3b1d698a7c2cae518f955170"}, 507 | {file = "cchardet-2.1.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0f6e4e464e332da776b9c1a34e4e83b6301d38c2724efc93848c46ade66d02bb"}, 508 | {file = "cchardet-2.1.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:acc96b4a8f756af289fa90ffa67ddef57401d99131e51e71872e3609483941ce"}, 509 | {file = "cchardet-2.1.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:79b0e113144c2ef0050bc9fe647c7657c5298f3012ecd8937d930b24ddd61404"}, 510 | {file = "cchardet-2.1.6-cp37-cp37m-win32.whl", hash = "sha256:8b1d02c99f6444c63336a76638741eaf4ac4005b454e3b8252a40074bf0d84a1"}, 511 | {file = "cchardet-2.1.6-cp37-cp37m-win_amd64.whl", hash = "sha256:e27771798c8ad50df1375e762d59369354af94eb8ac21eca5bfd1eeef589f545"}, 512 | {file = "cchardet-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:68409e00d75ff13dd7a192ec49559f5527ee8959a51a9f4dd7b168df972b4d44"}, 513 | {file = "cchardet-2.1.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:40c199f9c0569ac479fae7c4e12d2e16fc1e8237836b928474fdd228b8d11477"}, 514 | {file = "cchardet-2.1.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8f7ade2578b2326a0a554c03f60c8d079331220179a592e83e143c9556b7f5b2"}, 515 | {file = "cchardet-2.1.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5e38cfad9d3ca0f571c4352e9ca0f5ab718508f492a37d3236ae70810140e250"}, 516 | {file = "cchardet-2.1.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:af284494ea6c40f9613b4d939abe585eb9290cb92037eab66122c93190fcb338"}, 517 | {file = "cchardet-2.1.6-cp38-cp38-win32.whl", hash = "sha256:54d2653520237ebbd2928f2c0f2eb7c616ee2b5194d73d945060cd54a7846b64"}, 518 | {file = "cchardet-2.1.6-cp38-cp38-win_amd64.whl", hash = "sha256:f245f045054e8d6dab2a0e366d3c74f3a47fb7dec2595ae2035b234b1a829c7a"}, 519 | {file = "cchardet-2.1.6.tar.gz", hash = "sha256:b76afb2059ad69eab576949980a17413c1e9e5a5624abf9e43542d8853f146b3"}, 520 | ] 521 | cffi = [ 522 | {file = "cffi-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384"}, 523 | {file = "cffi-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30"}, 524 | {file = "cffi-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"}, 525 | {file = "cffi-1.14.0-cp27-cp27m-win32.whl", hash = "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78"}, 526 | {file = "cffi-1.14.0-cp27-cp27m-win_amd64.whl", hash = "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793"}, 527 | {file = "cffi-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e"}, 528 | {file = "cffi-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a"}, 529 | {file = "cffi-1.14.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff"}, 530 | {file = "cffi-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f"}, 531 | {file = "cffi-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa"}, 532 | {file = "cffi-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5"}, 533 | {file = "cffi-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4"}, 534 | {file = "cffi-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d"}, 535 | {file = "cffi-1.14.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc"}, 536 | {file = "cffi-1.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac"}, 537 | {file = "cffi-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f"}, 538 | {file = "cffi-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b"}, 539 | {file = "cffi-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3"}, 540 | {file = "cffi-1.14.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66"}, 541 | {file = "cffi-1.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0"}, 542 | {file = "cffi-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f"}, 543 | {file = "cffi-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26"}, 544 | {file = "cffi-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd"}, 545 | {file = "cffi-1.14.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55"}, 546 | {file = "cffi-1.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2"}, 547 | {file = "cffi-1.14.0-cp38-cp38-win32.whl", hash = "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8"}, 548 | {file = "cffi-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b"}, 549 | {file = "cffi-1.14.0.tar.gz", hash = "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6"}, 550 | ] 551 | chardet = [ 552 | {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, 553 | {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, 554 | ] 555 | click = [ 556 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 557 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 558 | ] 559 | click-option-group = [ 560 | {file = "click-option-group-0.2.3.tar.gz", hash = "sha256:41170a969a18d679347b77fc4b994fbf0d378924c86e93350dd71ade470642bd"}, 561 | {file = "click_option_group-0.2.3-py3-none-any.whl", hash = "sha256:984bce9c06fecff751178e6b2216b993c06874544f8cb1708e922b8591498e75"}, 562 | ] 563 | colorama = [ 564 | {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, 565 | {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, 566 | ] 567 | funcy = [ 568 | {file = "funcy-1.14.tar.gz", hash = "sha256:75ee84c3b446f92e68a857c2267b15a1b49c631c9d5a87a5f063cd2d6761a5c4"}, 569 | ] 570 | idna = [ 571 | {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"}, 572 | {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"}, 573 | ] 574 | idna-ssl = [ 575 | {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, 576 | ] 577 | importlib-metadata = [ 578 | {file = "importlib_metadata-1.6.1-py2.py3-none-any.whl", hash = "sha256:15ec6c0fd909e893e3a08b3a7c76ecb149122fb14b7efe1199ddd4c7c57ea958"}, 579 | {file = "importlib_metadata-1.6.1.tar.gz", hash = "sha256:0505dd08068cfec00f53a74a0ad927676d7757da81b7436a6eefe4c7cf75c545"}, 580 | ] 581 | isort = [ 582 | {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, 583 | {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, 584 | ] 585 | lazy-object-proxy = [ 586 | {file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, 587 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, 588 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, 589 | {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, 590 | {file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, 591 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, 592 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, 593 | {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, 594 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, 595 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, 596 | {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, 597 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, 598 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, 599 | {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, 600 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, 601 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, 602 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, 603 | {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, 604 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, 605 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, 606 | {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, 607 | ] 608 | mccabe = [ 609 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 610 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 611 | ] 612 | more-itertools = [ 613 | {file = "more-itertools-8.3.0.tar.gz", hash = "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be"}, 614 | {file = "more_itertools-8.3.0-py3-none-any.whl", hash = "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982"}, 615 | ] 616 | multidict = [ 617 | {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, 618 | {file = "multidict-4.7.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a"}, 619 | {file = "multidict-4.7.6-cp35-cp35m-win32.whl", hash = "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5"}, 620 | {file = "multidict-4.7.6-cp35-cp35m-win_amd64.whl", hash = "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3"}, 621 | {file = "multidict-4.7.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87"}, 622 | {file = "multidict-4.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2"}, 623 | {file = "multidict-4.7.6-cp36-cp36m-win32.whl", hash = "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7"}, 624 | {file = "multidict-4.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463"}, 625 | {file = "multidict-4.7.6-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"}, 626 | {file = "multidict-4.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255"}, 627 | {file = "multidict-4.7.6-cp37-cp37m-win32.whl", hash = "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507"}, 628 | {file = "multidict-4.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c"}, 629 | {file = "multidict-4.7.6-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b"}, 630 | {file = "multidict-4.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7"}, 631 | {file = "multidict-4.7.6-cp38-cp38-win32.whl", hash = "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d"}, 632 | {file = "multidict-4.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19"}, 633 | {file = "multidict-4.7.6.tar.gz", hash = "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430"}, 634 | ] 635 | packaging = [ 636 | {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, 637 | {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, 638 | ] 639 | pluggy = [ 640 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 641 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 642 | ] 643 | py = [ 644 | {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"}, 645 | {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"}, 646 | ] 647 | pycares = [ 648 | {file = "pycares-3.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:81edb016d9e43dde7473bc3999c29cdfee3a6b67308fed1ea21049f458e83ae0"}, 649 | {file = "pycares-3.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1917b82494907a4a342db420bc4dd5bac355a5fa3984c35ba9bf51422b020b48"}, 650 | {file = "pycares-3.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:a5089fd660f0b0d228b14cdaa110d0d311edfa5a63f800618dbf1321dcaef66b"}, 651 | {file = "pycares-3.1.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:48a7750f04e69e1f304f4332b755728067e7c4b1abe2760bba1cacd9ff7a847a"}, 652 | {file = "pycares-3.1.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d88a279cbc5af613f73e86e19b3f63850f7a2e2736e249c51995dedcc830b1bb"}, 653 | {file = "pycares-3.1.1-cp35-cp35m-win32.whl", hash = "sha256:96c90e11b4a4c7c0b8ff5aaaae969c5035493136586043ff301979aae0623941"}, 654 | {file = "pycares-3.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:eee7b6a5f5b5af050cb7d66ab28179287b416f06d15a8974ac831437fec51336"}, 655 | {file = "pycares-3.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:050f00b39ed77ea8a4e555f09417d4b1a6b5baa24bb9531a3e15d003d2319b3f"}, 656 | {file = "pycares-3.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2e4f74677542737fb5af4ea9a2e415ec5ab31aa67e7b8c3c969fdb15c069f679"}, 657 | {file = "pycares-3.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f8deaefefc3a589058df1b177275f79233e8b0eeee6734cf4336d80164ecd022"}, 658 | {file = "pycares-3.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c5cb72644b04e5e5abfb1e10a0e7eb75da6684ea0e60871652f348e412cf3b11"}, 659 | {file = "pycares-3.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c457a709e6f2befea7e2996c991eda6d79705dd075f6521593ba6ebc1485b811"}, 660 | {file = "pycares-3.1.1-cp36-cp36m-win32.whl", hash = "sha256:1d8d177c40567de78108a7835170f570ab04f09084bfd32df9919c0eaec47aa1"}, 661 | {file = "pycares-3.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f41ac1c858687e53242828c9f59c2e7b0b95dbcd5bdd09c7e5d3c48b0f89a25a"}, 662 | {file = "pycares-3.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:9a0a1845f8cb2e62332bca0aaa9ad5494603ac43fb60d510a61d5b5b170d7216"}, 663 | {file = "pycares-3.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:eba9a9227438da5e78fc8eee32f32eb35d9a50cf0a0bd937eb6275c7cc3015fe"}, 664 | {file = "pycares-3.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0c5bd1f6f885a219d5e972788d6eef7b8043b55c3375a845e5399638436e0bba"}, 665 | {file = "pycares-3.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:a05bbfdfd41f8410a905a818f329afe7510cbd9ee65c60f8860a72b6c64ce5dc"}, 666 | {file = "pycares-3.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:11c628402cc8fc8ef461076d4e47f88afc1f8609989ebbff0dbffcd54c97239f"}, 667 | {file = "pycares-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:fadb97d2e02dabdc15a0091591a972a938850d79ddde23d385d813c1731983f0"}, 668 | {file = "pycares-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cce46dd4717debfd2aab79d6d7f0cbdf6b1e982dc4d9bebad81658d59ede07c2"}, 669 | {file = "pycares-3.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0a24d2e580a8eb567140d7b69f12cb7de90c836bd7b6488ec69394d308605ac3"}, 670 | {file = "pycares-3.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:fa78e919f3bd7d6d075db262aa41079b4c02da315c6043c6f43881e2ebcdd623"}, 671 | {file = "pycares-3.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:236286f81664658b32c141c8e79d20afc3d54f6e2e49dfc8b702026be7265855"}, 672 | {file = "pycares-3.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:7d86e62b700b21401ffe7fd1bbfe91e08489416fecae99c6570ab023c6896022"}, 673 | {file = "pycares-3.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1b90fa00a89564df059fb18e796458864cc4e00cb55e364dbf921997266b7c55"}, 674 | {file = "pycares-3.1.1-cp38-cp38-win32.whl", hash = "sha256:cfdd1f90bcf373b00f4b2c55ea47868616fe2f779f792fc913fa82a3d64ffe43"}, 675 | {file = "pycares-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7e2d7effd08d2e5a3cb95d98a7286ebab71ab2fbce84fa93cc2dd56caf7240dd"}, 676 | {file = "pycares-3.1.1.tar.gz", hash = "sha256:18dfd4fd300f570d6c4536c1d987b7b7673b2a9d14346592c5d6ed716df0d104"}, 677 | ] 678 | pycparser = [ 679 | {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, 680 | {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, 681 | ] 682 | pylint = [ 683 | {file = "pylint-2.5.3-py3-none-any.whl", hash = "sha256:d0ece7d223fe422088b0e8f13fa0a1e8eb745ebffcb8ed53d3e95394b6101a1c"}, 684 | {file = "pylint-2.5.3.tar.gz", hash = "sha256:7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc"}, 685 | ] 686 | pyparsing = [ 687 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 688 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 689 | ] 690 | pytest = [ 691 | {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, 692 | {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, 693 | ] 694 | pytest-asyncio = [ 695 | {file = "pytest-asyncio-0.10.0.tar.gz", hash = "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf"}, 696 | {file = "pytest_asyncio-0.10.0-py3-none-any.whl", hash = "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"}, 697 | ] 698 | six = [ 699 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, 700 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, 701 | ] 702 | toml = [ 703 | {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, 704 | {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, 705 | ] 706 | tqdm = [ 707 | {file = "tqdm-4.46.1-py2.py3-none-any.whl", hash = "sha256:07c06493f1403c1380b630ae3dcbe5ae62abcf369a93bbc052502279f189ab8c"}, 708 | {file = "tqdm-4.46.1.tar.gz", hash = "sha256:cd140979c2bebd2311dfb14781d8f19bd5a9debb92dcab9f6ef899c987fcf71f"}, 709 | ] 710 | typed-ast = [ 711 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, 712 | {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, 713 | {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, 714 | {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, 715 | {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, 716 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, 717 | {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, 718 | {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, 719 | {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, 720 | {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, 721 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, 722 | {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, 723 | {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, 724 | {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, 725 | {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, 726 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, 727 | {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, 728 | {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, 729 | {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, 730 | {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, 731 | {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, 732 | ] 733 | typing = [ 734 | {file = "typing-3.7.4.1-py2-none-any.whl", hash = "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36"}, 735 | {file = "typing-3.7.4.1-py3-none-any.whl", hash = "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"}, 736 | {file = "typing-3.7.4.1.tar.gz", hash = "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23"}, 737 | ] 738 | typing-extensions = [ 739 | {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"}, 740 | {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"}, 741 | {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"}, 742 | ] 743 | wcwidth = [ 744 | {file = "wcwidth-0.2.4-py2.py3-none-any.whl", hash = "sha256:79375666b9954d4a1a10739315816324c3e73110af9d0e102d906fdb0aec009f"}, 745 | {file = "wcwidth-0.2.4.tar.gz", hash = "sha256:8c6b5b6ee1360b842645f336d9e5d68c55817c26d3050f46b235ef2bc650e48f"}, 746 | ] 747 | wrapt = [ 748 | {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, 749 | ] 750 | yarl = [ 751 | {file = "yarl-1.4.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b"}, 752 | {file = "yarl-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1"}, 753 | {file = "yarl-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080"}, 754 | {file = "yarl-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a"}, 755 | {file = "yarl-1.4.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f"}, 756 | {file = "yarl-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea"}, 757 | {file = "yarl-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb"}, 758 | {file = "yarl-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70"}, 759 | {file = "yarl-1.4.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d"}, 760 | {file = "yarl-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce"}, 761 | {file = "yarl-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2"}, 762 | {file = "yarl-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce"}, 763 | {file = "yarl-1.4.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b"}, 764 | {file = "yarl-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae"}, 765 | {file = "yarl-1.4.2-cp38-cp38-win32.whl", hash = "sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462"}, 766 | {file = "yarl-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6"}, 767 | {file = "yarl-1.4.2.tar.gz", hash = "sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b"}, 768 | ] 769 | zipp = [ 770 | {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"}, 771 | {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"}, 772 | ] 773 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "deepcode" 3 | version = "1.0.18" 4 | description = "A client for DeepCode API (code analysis made by AI)" 5 | authors = ["Deepcode ", "Arvid Paeglit "] 6 | license = "MIT" 7 | readme = "README.md" 8 | homepage = "https://www.deepcode.ai" 9 | repository = "https://github.com/DeepCodeAI/cli" 10 | classifiers = [ 11 | "Development Status :: 5 - Production/Stable", 12 | "Intended Audience :: Developers", 13 | "Topic :: Software Development :: Quality Assurance", 14 | "Programming Language :: Python :: 3", 15 | "License :: OSI Approved :: MIT License", 16 | "Operating System :: OS Independent", 17 | ] 18 | 19 | [tool.poetry.urls] 20 | "Bug Tracker" = "https://github.com/DeepCodeAI/cli/issues" 21 | 22 | [tool.poetry.dependencies] 23 | python = "^3.6 | ^3.7 | ^3.8" 24 | funcy = "^1.14" 25 | aiohttp = "^3.6.2" 26 | cchardet = "^2.1.5" 27 | click = "^7.0" 28 | click-option-group = "^0.2.3" 29 | tqdm = "^4.42.1" 30 | aiodns = "^2.0.0" 31 | 32 | [tool.poetry.dev-dependencies] 33 | pytest = "^5.2" 34 | pylint = "^2.4.4" 35 | pytest-asyncio = "^0.10.0" 36 | 37 | [tool.poetry.scripts] 38 | deepcode = 'deepcode.cli:main' 39 | 40 | [build-system] 41 | requires = ["poetry>=0.12"] 42 | build-backend = "poetry.masonry.api" 43 | -------------------------------------------------------------------------------- /tests/.dcignore: -------------------------------------------------------------------------------- 1 | sample-repo 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DeepCodeAI/cli/60f6f603e73f79c3fd38ffc11c8e67143627a677/tests/__init__.py -------------------------------------------------------------------------------- /tests/sample-repo/.dcignore: -------------------------------------------------------------------------------- 1 | models 2 | **/controllers 3 | -------------------------------------------------------------------------------- /tests/sample-repo/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | Dockerfile 4 | .dockerignore 5 | .git 6 | README.md 7 | .gitignore 8 | .env 9 | 10 | -------------------------------------------------------------------------------- /tests/sample-repo/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["eslint:recommended"], 3 | "env": { 4 | "es6": true, 5 | "node": true 6 | }, 7 | "rules": { 8 | "arrow-parens": ["error", "required"], 9 | "object-curly-spacing": "warn" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/sample-repo/AnnotatorTest.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | int main() { 4 | ReffedClientGraph* to_unref = nullptr; 5 | if (to_unref != nullptr) {} 6 | 7 | string fname = "fff"; 8 | std::fstream f; 9 | f.open(fname.c_str(), std::fstream::out); 10 | f << "WhiteList:\n"; 11 | } 12 | -------------------------------------------------------------------------------- /tests/sample-repo/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:10-alpine 2 | 3 | # Packaged dependencies 4 | RUN apk add --no-cache \ 5 | --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing \ 6 | --repository http://dl-cdn.alpinelinux.org/alpine/edge/main \ 7 | bash 8 | 9 | # The /wait command is to wait for an image to load before running the next command 10 | ADD https://github.com/ufoscout/docker-compose-wait/releases/download/2.7.2/wait /wait 11 | RUN chmod +x /wait 12 | 13 | RUN mkdir -p /home/node/node_modules && mkdir -p /home/node/app && chown -R node:node /home/node 14 | ENV NPM_CONFIG_PREFIX=/home/node/node_modules 15 | ENV PATH="/home/node/node_modules/.bin:${PATH}" 16 | 17 | WORKDIR /home/node/app 18 | 19 | COPY package*.json ./ 20 | 21 | USER node 22 | 23 | RUN npm install 24 | 25 | COPY --chown=node:node . . 26 | 27 | EXPOSE 8080 28 | 29 | # CMD [ "node", "app.js" ] 30 | CMD /wait && node ./app.js 31 | -------------------------------------------------------------------------------- /tests/sample-repo/GitHubAccessTokenScrambler12.java: -------------------------------------------------------------------------------- 1 | 2 | 3 | import org.apache.commons.codec.binary.Base64; 4 | 5 | import javax.crypto.Cipher; 6 | import javax.crypto.spec.IvParameterSpec; 7 | import javax.crypto.spec.SecretKeySpec; 8 | 9 | public class GitHubAccessTokenScrambler12 { 10 | static final String myInitVector = "RandomInitVector"; 11 | static final String myKey = "GitHubErrorToken"; 12 | 13 | static String encrypt(String value) { 14 | try { 15 | IvParameterSpec iv = new IvParameterSpec(myInitVector.getBytes("UTF-8")); 16 | SecretKeySpec keySpec = new SecretKeySpec(myKey.getBytes("UTF-8"), "AES"); 17 | 18 | Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5PADDING"); 19 | cipher.init(Cipher.ENCRYPT_MODE, keySpec, iv); 20 | 21 | byte[] encrypted = cipher.doFinal(value.getBytes()); 22 | return Base64.encodeBase64String(encrypted); 23 | } catch (Exception ex) { 24 | ex.printStackTrace(); 25 | } 26 | return null; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /tests/sample-repo/README.md: -------------------------------------------------------------------------------- 1 | Project code for tutorial on how to develop a Node.js/MongoDB application using Docker Compose: https://www.digitalocean.com/community/tutorials/containerizing-a-node-js-application-for-development-with-docker-compose 2 | -------------------------------------------------------------------------------- /tests/sample-repo/app.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | 3 | const app = express(); 4 | const router = express.Router() 5 | let db = require('./db'); 6 | const sharks = require('./routes/sharks'); 7 | 8 | const path = __dirname + '/views/'; 9 | const port = process.env.PORT || 8080; 10 | 11 | app.engine('html', require('ejs').renderFile); 12 | app.set('view engine', 'html'); 13 | app.use(express.urlencoded({ extended: true })); 14 | app.use(express.static(path)); 15 | app.use('/sharks', sharks); 16 | 17 | app.listen(port, function () { 18 | console.log(`Example app listening on ${port}!`) 19 | }) 20 | -------------------------------------------------------------------------------- /tests/sample-repo/controllers/sharks.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const Shark = require('../models/sharks'); 3 | 4 | exports.index = function (req, res) { 5 | res.sendFile(path.resolve('views/sharks.html')); 6 | }; 7 | 8 | exports.create = function (req, res) { 9 | var newShark = new Shark(req.body); 10 | console.log(req.body); 11 | newShark.save(function (err) { 12 | if(err) { 13 | res.status(400).send('Unable to save shark to database'); 14 | } else { 15 | res.redirect('/sharks/getshark'); 16 | } 17 | }); 18 | }; 19 | 20 | exports.list = function (req, res) { 21 | Shark.find({}).exec(function (err, sharks) { 22 | if (err) { 23 | return res.send(500, err); 24 | } 25 | res.render('getshark', { 26 | sharks: sharks 27 | }); 28 | }); 29 | }; 30 | -------------------------------------------------------------------------------- /tests/sample-repo/db.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | 3 | const { 4 | MONGO_USERNAME, 5 | MONGO_PASSWORD, 6 | MONGO_HOSTNAME, 7 | MONGO_PORT, 8 | MONGO_DB 9 | } = process.env; 10 | 11 | const options = { 12 | useNewUrlParser: true, 13 | reconnectTries: Number.MAX_VALUE, 14 | reconnectInterval: 500, 15 | connectTimeoutMS: 10000, 16 | } 17 | 18 | const url = `mongodb://${MONGO_USERNAME}:${MONGO_PASSWORD}@${MONGO_HOSTNAME}:${MONGO_PORT}/${MONGO_DB}?authSource=admin` 19 | 20 | mongoose.connect(url, options).then( function() { 21 | console.log('MongoDB is connected'); 22 | }).catch( err => { 23 | console.log(err); 24 | }); 25 | -------------------------------------------------------------------------------- /tests/sample-repo/main.js: -------------------------------------------------------------------------------- 1 | // This file is part of Natural Docs, which is Copyright � 2003-2010 Greg Valure 2 | // Natural Docs is licensed under version 3 of the GNU Affero General Public License (AGPL) 3 | // Refer to License.txt for the complete details 4 | 5 | // This file may be distributed with documentation files generated by Natural Docs. 6 | // Such documentation is not covered by Natural Docs' copyright and licensing, 7 | // and may have its own copyright and distribution terms as decided by its author. 8 | 9 | // 10 | // Browser Styles 11 | // ____________________________________________________________________________ 12 | 13 | var agt = navigator.userAgent.toLowerCase(); 14 | var browserType; 15 | var browserVer; 16 | 17 | if (agt.indexOf("opera") != -1) { 18 | browserType = "Opera"; 19 | 20 | if (agt.indexOf("opera 7") != -1 || agt.indexOf("opera/7") != -1) { 21 | browserVer = "Opera7"; 22 | } else if (agt.indexOf("opera 8") != -1 || agt.indexOf("opera/8") != -1) { 23 | browserVer = "Opera8"; 24 | } else if (agt.indexOf("opera 9") != -1 || agt.indexOf("opera/9") != -1) { 25 | browserVer = "Opera9"; 26 | } 27 | } else if (agt.indexOf("applewebkit") != -1) { 28 | browserType = "Safari"; 29 | 30 | if (agt.indexOf("version/3") != -1) { 31 | browserVer = "Safari3"; 32 | } else if (agt.indexOf("safari/4") != -1) { 33 | browserVer = "Safari2"; 34 | } 35 | } else if (agt.indexOf("khtml") != -1) { 36 | browserType = "Konqueror"; 37 | } else if (agt.indexOf("msie") != -1) { 38 | browserType = "IE"; 39 | 40 | if (agt.indexOf("msie 6") != -1) { 41 | browserVer = "IE6"; 42 | } else if (agt.indexOf("msie 7") != -1) { 43 | browserVer = "IE7"; 44 | } 45 | } else if (agt.indexOf("gecko") != -1) { 46 | browserType = "Firefox"; 47 | 48 | if (agt.indexOf("rv:1.7") != -1) { 49 | browserVer = "Firefox1"; 50 | } else if (agt.indexOf("rv:1.8)") != -1 || agt.indexOf("rv:1.8.0") != -1) { 51 | browserVer = "Firefox15"; 52 | } else if (agt.indexOf("rv:1.8.1") != -1) { 53 | browserVer = "Firefox2"; 54 | } 55 | } 56 | 57 | // 58 | // Support Functions 59 | // ____________________________________________________________________________ 60 | 61 | function GetXPosition(item) { 62 | var position = 0; 63 | 64 | if (item.offsetWidth != null) { 65 | while (item != document.body && item != null) { 66 | position += item.offsetLeft; 67 | item = item.offsetParent; 68 | } 69 | } 70 | 71 | return position; 72 | } 73 | 74 | function GetYPosition(item) { 75 | var position = 0; 76 | 77 | if (item.offsetWidth != null) { 78 | while (item != document.body && item != null) { 79 | position += item.offsetTop; 80 | item = item.offsetParent; 81 | } 82 | } 83 | 84 | return position; 85 | } 86 | 87 | function MoveToPosition(item, x, y) { 88 | // Opera 5 chokes on the px extension, so it can use the Microsoft one instead. 89 | 90 | if (item.style.left != null) { 91 | item.style.left = x + "px"; 92 | item.style.top = y + "px"; 93 | } else if (item.style.pixelLeft != null) { 94 | item.style.pixelLeft = x; 95 | item.style.pixelTop = y; 96 | } 97 | } 98 | 99 | // 100 | // Menu 101 | // ____________________________________________________________________________ 102 | 103 | function ToggleMenu(id) { 104 | if (!window.document.getElementById) { 105 | return; 106 | } 107 | 108 | var display = window.document.getElementById(id).style.display; 109 | 110 | if (display == "none") { 111 | display = "block"; 112 | } else { 113 | display = "none"; 114 | } 115 | 116 | window.document.getElementById(id).style.display = display; 117 | } 118 | 119 | function HideAllBut(ids, max) { 120 | if (document.getElementById) { 121 | ids.sort(function(a, b) { 122 | return a - b; 123 | }); 124 | var number = 1; 125 | 126 | while (number < max) { 127 | if (ids.length > 0 && number == ids[0]) { 128 | ids.shift(); 129 | } else { 130 | document.getElementById("MGroupContent" + number).style.display = 131 | "none"; 132 | } 133 | 134 | number++; 135 | } 136 | } 137 | } 138 | 139 | // 140 | // Tooltips 141 | // ____________________________________________________________________________ 142 | 143 | var tooltipTimer = 0; 144 | 145 | function ShowTip(event, tooltipID, linkID) { 146 | if (tooltipTimer) { 147 | clearTimeout(tooltipTimer); 148 | } 149 | 150 | var docX = event.clientX + window.pageXOffset; 151 | var docY = event.clientY + window.pageYOffset; 152 | 153 | var showCommand = 154 | "ReallyShowTip('" + 155 | tooltipID + 156 | "', '" + 157 | linkID + 158 | "', " + 159 | docX + 160 | ", " + 161 | docY + 162 | ")"; 163 | 164 | tooltipTimer = setTimeout(showCommand, 1000); 165 | } 166 | 167 | function ReallyShowTip(tooltipID, linkID, docX, docY) { 168 | tooltipTimer = 0; 169 | 170 | var tooltip; 171 | var link; 172 | 173 | if (document.getElementById) { 174 | tooltip = document.getElementById(tooltipID); 175 | link = document.getElementById(linkID); 176 | } 177 | /* else if (document.all) 178 | { 179 | tooltip = eval("document.all['" + tooltipID + "']"); 180 | link = eval("document.all['" + linkID + "']"); 181 | } 182 | */ 183 | if (tooltip) { 184 | var left = GetXPosition(link); 185 | var top = GetYPosition(link); 186 | top += link.offsetHeight; 187 | 188 | // The fallback method is to use the mouse X and Y relative to the document. We use a separate if and test if its a number 189 | // in case some browser snuck through the above if statement but didn't support everything. 190 | 191 | if (!isFinite(top) || top == 0) { 192 | left = docX; 193 | top = docY; 194 | } 195 | 196 | // Some spacing to get it out from under the cursor. 197 | 198 | top += 10; 199 | 200 | // Make sure the tooltip doesnt get smushed by being too close to the edge, or in some browsers, go off the edge of the 201 | // page. We do it here because Konqueror does get offsetWidth right even if it doesnt get the positioning right. 202 | 203 | if (tooltip.offsetWidth != null) { 204 | var width = tooltip.offsetWidth; 205 | var docWidth = document.body.clientWidth; 206 | 207 | if (left + width > docWidth) { 208 | left = docWidth - width - 1; 209 | } 210 | 211 | // If there's a horizontal scroll bar we could go past zero because it's using the page width, not the window width. 212 | if (left < 0) { 213 | left = 0; 214 | } 215 | } 216 | 217 | MoveToPosition(tooltip, left, top); 218 | tooltip.style.visibility = "visible"; 219 | } 220 | } 221 | 222 | function HideTip(tooltipID) { 223 | if (tooltipTimer) { 224 | clearTimeout(tooltipTimer); 225 | tooltipTimer = 0; 226 | } 227 | 228 | var tooltip; 229 | 230 | if (document.getElementById) { 231 | tooltip = document.getElementById(tooltipID); 232 | } else if (document.all) { 233 | tooltip = eval("document.all['" + tooltipID + "']"); 234 | } 235 | 236 | if (tooltip) { 237 | tooltip.style.visibility = "hidden"; 238 | } 239 | } 240 | 241 | // 242 | // Blockquote fix for IE 243 | // ____________________________________________________________________________ 244 | 245 | function NDOnLoad() { 246 | if (browserVer == "IE6") { 247 | var scrollboxes = document.getElementsByTagName("blockquote"); 248 | 249 | if (scrollboxes.item(0)) { 250 | NDDoResize(); 251 | window.onresize = NDOnResize; 252 | } 253 | } 254 | } 255 | 256 | var resizeTimer = 0; 257 | 258 | function NDOnResize() { 259 | if (resizeTimer != 0) { 260 | clearTimeout(resizeTimer); 261 | } 262 | 263 | resizeTimer = setTimeout(NDDoResize, 250); 264 | } 265 | 266 | function NDDoResize() { 267 | var scrollboxes = document.getElementsByTagName("blockquote"); 268 | 269 | var i; 270 | var item; 271 | 272 | i = 0; 273 | while ((item = scrollboxes.item(i))) { 274 | item.style.width = 100; 275 | i++; 276 | } 277 | 278 | i = 0; 279 | while ((item = scrollboxes.item(i))) { 280 | item.style.width = item.parentNode.offsetWidth; 281 | i++; 282 | } 283 | 284 | clearTimeout(resizeTimer); 285 | resizeTimer = 0; 286 | } 287 | 288 | /* ________________________________________________________________________________________________________ 289 | 290 | Class: SearchPanel 291 | ________________________________________________________________________________________________________ 292 | 293 | A class handling everything associated with the search panel. 294 | 295 | Parameters: 296 | 297 | name - The name of the global variable that will be storing this instance. Is needed to be able to set timeouts. 298 | mode - The mode the search is going to work in. Pass CommandLineOption()>, so the 299 | value will be something like "HTML" or "FramedHTML". 300 | 301 | ________________________________________________________________________________________________________ 302 | */ 303 | 304 | function SearchPanel(name, mode, resultsPath) { 305 | if (!name || !mode || !resultsPath) { 306 | alert("Incorrect parameters to SearchPanel."); 307 | } 308 | 309 | // Group: Variables 310 | // ________________________________________________________________________ 311 | 312 | /* 313 | var: name 314 | The name of the global variable that will be storing this instance of the class. 315 | */ 316 | this.name = name; 317 | 318 | /* 319 | var: mode 320 | The mode the search is going to work in, such as "HTML" or "FramedHTML". 321 | */ 322 | this.mode = mode; 323 | 324 | /* 325 | var: resultsPath 326 | The relative path from the current HTML page to the results page directory. 327 | */ 328 | this.resultsPath = resultsPath; 329 | 330 | /* 331 | var: keyTimeout 332 | The timeout used between a keystroke and when a search is performed. 333 | */ 334 | this.keyTimeout = 0; 335 | 336 | /* 337 | var: keyTimeoutLength 338 | The length of in thousandths of a second. 339 | */ 340 | this.keyTimeoutLength = 500; 341 | 342 | /* 343 | var: lastSearchValue 344 | The last search string executed, or an empty string if none. 345 | */ 346 | this.lastSearchValue = ""; 347 | 348 | /* 349 | var: lastResultsPage 350 | The last results page. The value is only relevant if is set. 351 | */ 352 | this.lastResultsPage = ""; 353 | 354 | /* 355 | var: deactivateTimeout 356 | 357 | The timeout used between when a control is deactivated and when the entire panel is deactivated. Is necessary 358 | because a control may be deactivated in favor of another control in the same panel, in which case it should stay 359 | active. 360 | */ 361 | this.deactivateTimout = 0; 362 | 363 | /* 364 | var: deactivateTimeoutLength 365 | The length of in thousandths of a second. 366 | */ 367 | this.deactivateTimeoutLength = 200; 368 | 369 | // Group: DOM Elements 370 | // ________________________________________________________________________ 371 | 372 | // Function: DOMSearchField 373 | this.DOMSearchField = function() { 374 | return document.getElementById("MSearchField"); 375 | }; 376 | 377 | // Function: DOMSearchType 378 | this.DOMSearchType = function() { 379 | return document.getElementById("MSearchType"); 380 | }; 381 | 382 | // Function: DOMPopupSearchResults 383 | this.DOMPopupSearchResults = function() { 384 | return document.getElementById("MSearchResults"); 385 | }; 386 | 387 | // Function: DOMPopupSearchResultsWindow 388 | this.DOMPopupSearchResultsWindow = function() { 389 | return document.getElementById("MSearchResultsWindow"); 390 | }; 391 | 392 | // Function: DOMSearchPanel 393 | this.DOMSearchPanel = function() { 394 | return document.getElementById("MSearchPanel"); 395 | }; 396 | 397 | // Group: Event Handlers 398 | // ________________________________________________________________________ 399 | 400 | /* 401 | Function: OnSearchFieldFocus 402 | Called when focus is added or removed from the search field. 403 | */ 404 | this.OnSearchFieldFocus = function(isActive) { 405 | this.Activate(isActive); 406 | }; 407 | 408 | /* 409 | Function: OnSearchFieldChange 410 | Called when the content of the search field is changed. 411 | */ 412 | this.OnSearchFieldChange = function() { 413 | if (this.keyTimeout) { 414 | clearTimeout(this.keyTimeout); 415 | this.keyTimeout = 0; 416 | } 417 | 418 | var searchValue = this.DOMSearchField().value.replace(/ +/g, ""); 419 | 420 | if (searchValue != this.lastSearchValue) { 421 | if (searchValue != "") { 422 | this.keyTimeout = setTimeout( 423 | this.name + ".Search()", 424 | this.keyTimeoutLength 425 | ); 426 | } else { 427 | if (this.mode == "HTML") { 428 | this.DOMPopupSearchResultsWindow().style.display = "none"; 429 | } 430 | this.lastSearchValue = ""; 431 | } 432 | } 433 | }; 434 | 435 | /* 436 | Function: OnSearchTypeFocus 437 | Called when focus is added or removed from the search type. 438 | */ 439 | this.OnSearchTypeFocus = function(isActive) { 440 | this.Activate(isActive); 441 | }; 442 | 443 | /* 444 | Function: OnSearchTypeChange 445 | Called when the search type is changed. 446 | */ 447 | this.OnSearchTypeChange = function() { 448 | var searchValue = this.DOMSearchField().value.replace(/ +/g, ""); 449 | 450 | if (searchValue != "") { 451 | this.Search(); 452 | } 453 | }; 454 | 455 | // Group: Action Functions 456 | // ________________________________________________________________________ 457 | 458 | /* 459 | Function: CloseResultsWindow 460 | Closes the results window. 461 | */ 462 | this.CloseResultsWindow = function() { 463 | this.DOMPopupSearchResultsWindow().style.display = "none"; 464 | this.Activate(false, true); 465 | }; 466 | 467 | /* 468 | Function: Search 469 | Performs a search. 470 | */ 471 | this.Search = function() { 472 | this.keyTimeout = 0; 473 | 474 | var searchValue = this.DOMSearchField().value.replace(/^ +/, ""); 475 | var searchTopic = this.DOMSearchType().value; 476 | 477 | var pageExtension = searchValue.substr(0, 1); 478 | 479 | if (pageExtension.match(/^[a-z]/i)) { 480 | pageExtension = pageExtension.toUpperCase(); 481 | } else if (pageExtension.match(/^[0-9]/)) { 482 | pageExtension = "Numbers"; 483 | } else { 484 | pageExtension = "Symbols"; 485 | } 486 | 487 | var resultsPage; 488 | var resultsPageWithSearch; 489 | var hasResultsPage; 490 | 491 | // indexSectionsWithContent is defined in searchdata.js 492 | if (indexSectionsWithContent[searchTopic][pageExtension] == true) { 493 | resultsPage = 494 | this.resultsPath + "/" + searchTopic + pageExtension + ".html"; 495 | resultsPageWithSearch = resultsPage + "?" + escape(searchValue); 496 | hasResultsPage = true; 497 | } else { 498 | resultsPage = this.resultsPath + "/NoResults.html"; 499 | resultsPageWithSearch = resultsPage; 500 | hasResultsPage = false; 501 | } 502 | 503 | var resultsFrame; 504 | if (this.mode == "HTML") { 505 | resultsFrame = window.frames.MSearchResults; 506 | } else if (this.mode == "FramedHTML") { 507 | resultsFrame = window.top.frames["Content"]; 508 | } 509 | 510 | if ( 511 | resultsPage != this.lastResultsPage || 512 | // Bug in IE. If everything becomes hidden in a run, none of them will be able to be reshown in the next for some 513 | // reason. It counts the right number of results, and you can even read the display as "block" after setting it, but it 514 | // just doesn't work in IE 6 or IE 7. So if we're on the right page but the previous search had no results, reload the 515 | // page anyway to get around the bug. 516 | (browserType == "IE" && 517 | hasResultsPage && 518 | (!resultsFrame.searchResults || 519 | resultsFrame.searchResults.lastMatchCount == 0)) 520 | ) { 521 | resultsFrame.location.href = resultsPageWithSearch; 522 | } 523 | 524 | // So if the results page is right and there's no IE bug, reperform the search on the existing page. We have to check if there 525 | // are results because NoResults.html doesn't have any JavaScript, and it would be useless to do anything on that page even 526 | // if it did. 527 | else if (hasResultsPage) { 528 | // We need to check if this exists in case the frame is present but didn't finish loading. 529 | if (resultsFrame.searchResults) { 530 | resultsFrame.searchResults.Search(searchValue); 531 | } 532 | 533 | // Otherwise just reload instead of waiting. 534 | else { 535 | resultsFrame.location.href = resultsPageWithSearch; 536 | } 537 | } 538 | 539 | var domPopupSearchResultsWindow = this.DOMPopupSearchResultsWindow(); 540 | 541 | if ( 542 | this.mode == "HTML" && 543 | domPopupSearchResultsWindow.style.display != "block" 544 | ) { 545 | var domSearchType = this.DOMSearchType(); 546 | 547 | var left = GetXPosition(domSearchType); 548 | var top = GetYPosition(domSearchType) + domSearchType.offsetHeight; 549 | 550 | MoveToPosition(domPopupSearchResultsWindow, left, top); 551 | domPopupSearchResultsWindow.style.display = "block"; 552 | } 553 | 554 | this.lastSearchValue = searchValue; 555 | this.lastResultsPage = resultsPage; 556 | }; 557 | 558 | // Group: Activation Functions 559 | // Functions that handle whether the entire panel is active or not. 560 | // ________________________________________________________________________ 561 | 562 | /* 563 | Function: Activate 564 | 565 | Activates or deactivates the search panel, resetting things to their default values if necessary. You can call this on every 566 | control's OnBlur() and it will handle not deactivating the entire panel when focus is just switching between them transparently. 567 | 568 | Parameters: 569 | 570 | isActive - Whether you're activating or deactivating the panel. 571 | ignoreDeactivateDelay - Set if you're positive the action will deactivate the panel and thus want to skip the delay. 572 | */ 573 | this.Activate = function(isActive, ignoreDeactivateDelay) { 574 | // We want to ignore isActive being false while the results window is open. 575 | if ( 576 | isActive || 577 | (this.mode == "HTML" && 578 | this.DOMPopupSearchResultsWindow().style.display == "block") 579 | ) { 580 | if (this.inactivateTimeout) { 581 | clearTimeout(this.inactivateTimeout); 582 | this.inactivateTimeout = 0; 583 | } 584 | 585 | this.DOMSearchPanel().className = "MSearchPanelActive"; 586 | 587 | var searchField = this.DOMSearchField(); 588 | 589 | if (searchField.value == "Search") { 590 | searchField.value = ""; 591 | } 592 | } else if (!ignoreDeactivateDelay) { 593 | this.inactivateTimeout = setTimeout( 594 | this.name + ".InactivateAfterTimeout()", 595 | this.inactivateTimeoutLength 596 | ); 597 | } else { 598 | this.InactivateAfterTimeout(); 599 | } 600 | }; 601 | 602 | /* 603 | Function: InactivateAfterTimeout 604 | 605 | Called by , which is set by . Inactivation occurs on a timeout because a control may 606 | receive OnBlur() when focus is really transferring to another control in the search panel. In this case we don't want to 607 | actually deactivate the panel because not only would that cause a visible flicker but it could also reset the search value. 608 | So by doing it on a timeout instead, there's a short period where the second control's OnFocus() can cancel the deactivation. 609 | */ 610 | this.InactivateAfterTimeout = function() { 611 | this.inactivateTimeout = 0; 612 | 613 | this.DOMSearchPanel().className = "MSearchPanelInactive"; 614 | this.DOMSearchField().value = "Search"; 615 | 616 | this.lastSearchValue = ""; 617 | this.lastResultsPage = ""; 618 | }; 619 | } 620 | 621 | /* ________________________________________________________________________________________________________ 622 | 623 | Class: SearchResults 624 | _________________________________________________________________________________________________________ 625 | 626 | The class that handles everything on the search results page. 627 | _________________________________________________________________________________________________________ 628 | */ 629 | 630 | function SearchResults(name, mode) { 631 | /* 632 | var: mode 633 | The mode the search is going to work in, such as "HTML" or "FramedHTML". 634 | */ 635 | this.mode = mode; 636 | 637 | /* 638 | var: lastMatchCount 639 | The number of matches from the last run of . 640 | */ 641 | this.lastMatchCount = 0; 642 | 643 | /* 644 | Function: Toggle 645 | Toggles the visibility of the passed element ID. 646 | */ 647 | this.Toggle = function(id) { 648 | if (this.mode == "FramedHTML") { 649 | return; 650 | } 651 | 652 | var parentElement = document.getElementById(id); 653 | 654 | var element = parentElement.firstChild; 655 | 656 | while (element && element != parentElement) { 657 | if (element.nodeName == "DIV" && element.className == "ISubIndex") { 658 | if (element.style.display == "block") { 659 | element.style.display = "none"; 660 | } else { 661 | element.style.display = "block"; 662 | } 663 | } 664 | 665 | if (element.nodeName == "DIV" && element.hasChildNodes()) { 666 | element = element.firstChild; 667 | } else if (element.nextSibling) { 668 | element = element.nextSibling; 669 | } else { 670 | do { 671 | element = element.parentNode; 672 | } while (element && element != parentElement && !element.nextSibling); 673 | 674 | if (element && element != parentElement) { 675 | element = element.nextSibling; 676 | } 677 | } 678 | } 679 | }; 680 | 681 | /* 682 | Function: Search 683 | 684 | Searches for the passed string. If there is no parameter, it takes it from the URL query. 685 | 686 | Always returns true, since other documents may try to call it and that may or may not be possible. 687 | */ 688 | this.Search = function(search) { 689 | if (!search) { 690 | search = window.location.search; 691 | search = search.substring(1); // Remove the leading ? 692 | search = unescape(search); 693 | } 694 | 695 | search = search.replace(/^ +/, ""); 696 | search = search.replace(/ +$/, ""); 697 | search = search.toLowerCase(); 698 | 699 | if (search.match(/[^a-z0-9]/)) { 700 | // Just a little speedup so it doesn't have to go through the below unnecessarily. 701 | search = search.replace(/\_/g, "_und"); 702 | search = search.replace(/\ +/gi, "_spc"); 703 | search = search.replace(/\~/g, "_til"); 704 | search = search.replace(/\!/g, "_exc"); 705 | search = search.replace(/\@/g, "_att"); 706 | search = search.replace(/\#/g, "_num"); 707 | search = search.replace(/\$/g, "_dol"); 708 | search = search.replace(/\%/g, "_pct"); 709 | search = search.replace(/\^/g, "_car"); 710 | search = search.replace(/\&/g, "_amp"); 711 | search = search.replace(/\*/g, "_ast"); 712 | search = search.replace(/\(/g, "_lpa"); 713 | search = search.replace(/\)/g, "_rpa"); 714 | search = search.replace(/\-/g, "_min"); 715 | search = search.replace(/\+/g, "_plu"); 716 | search = search.replace(/\=/g, "_equ"); 717 | search = search.replace(/\{/g, "_lbc"); 718 | search = search.replace(/\}/g, "_rbc"); 719 | search = search.replace(/\[/g, "_lbk"); 720 | search = search.replace(/\]/g, "_rbk"); 721 | search = search.replace(/\:/g, "_col"); 722 | search = search.replace(/\;/g, "_sco"); 723 | search = search.replace(/\"/g, "_quo"); 724 | search = search.replace(/\'/g, "_apo"); 725 | search = search.replace(/\/g, "_ran"); 727 | search = search.replace(/\,/g, "_com"); 728 | search = search.replace(/\./g, "_per"); 729 | search = search.replace(/\?/g, "_que"); 730 | search = search.replace(/\//g, "_sla"); 731 | search = search.replace(/[^a-z0-9\_]i/gi, "_zzz"); 732 | } 733 | 734 | var resultRows = document.getElementsByTagName("div"); 735 | var matches = 0; 736 | 737 | var i = 0; 738 | while (i < resultRows.length) { 739 | var row = resultRows.item(i); 740 | 741 | if (row.className == "SRResult") { 742 | var rowMatchName = row.id.toLowerCase(); 743 | rowMatchName = rowMatchName.replace(/^sr\d*_/, ""); 744 | 745 | if ( 746 | search.length <= rowMatchName.length && 747 | rowMatchName.substr(0, search.length) == search 748 | ) { 749 | row.style.display = "block"; 750 | matches++; 751 | } else { 752 | row.style.display = "none"; 753 | } 754 | } 755 | 756 | i++; 757 | } 758 | 759 | document.getElementById("Searching").style.display = "none"; 760 | 761 | if (matches == 0) { 762 | document.getElementById("NoMatches").style.display = "block"; 763 | } else { 764 | document.getElementById("NoMatches").style.display = "none"; 765 | } 766 | 767 | this.lastMatchCount = matches; 768 | 769 | return true; 770 | }; 771 | } 772 | -------------------------------------------------------------------------------- /tests/sample-repo/models/sharks.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const Schema = mongoose.Schema; 3 | 4 | const Shark = new Schema ({ 5 | name: { type: String, required: true }, 6 | character: { type: String, required: true }, 7 | }); 8 | 9 | module.exports = mongoose.model('Shark', Shark) 10 | -------------------------------------------------------------------------------- /tests/sample-repo/routes/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const router = express.Router(); 3 | const path = require('path'); 4 | 5 | router.use (function (req,res,next) { 6 | console.log('/' + req.method); 7 | next(); 8 | }); 9 | 10 | router.get('/',function(req,res){ 11 | res.sendFile(path.resolve('views/index.html')); 12 | }); 13 | 14 | module.exports = router; 15 | -------------------------------------------------------------------------------- /tests/sample-repo/routes/sharks.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const router = express.Router(); 3 | const shark = require('../controllers/sharks'); 4 | 5 | router.get('/', function(req, res){ 6 | shark.index(req,res); 7 | }); 8 | 9 | router.post('/addshark', function(req, res) { 10 | shark.create(req,res); 11 | }); 12 | 13 | router.get('/getshark', function(req, res) { 14 | shark.list(req,res); 15 | }); 16 | 17 | module.exports = router; 18 | -------------------------------------------------------------------------------- /tests/test_deepcode.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import pytest 3 | import aiohttp 4 | 5 | from deepcode.files import get_file_meta, collect_bundle_files, prepare_bundle_hashes 6 | from deepcode.bundle import get_filters, generate_bundle, create_git_bundle 7 | from deepcode.analysis import get_analysis 8 | from deepcode.constants import DEFAULT_SERVICE_URL, API_KEY_ENV, SERVICE_URL_ENV 9 | from deepcode import analyze_folders, analyze_git 10 | 11 | MOCKED_FILTERS = { 12 | 'extensions': [ 13 | '.py', 14 | '.c', 15 | '.cc', 16 | '.cpp', 17 | '.cxx', 18 | '.h', 19 | '.hpp', 20 | '.hxx', 21 | '.es', 22 | '.es6', 23 | '.htm', 24 | '.html', 25 | '.js', 26 | '.jsx', 27 | '.ts', 28 | '.tsx', 29 | '.vue', 30 | '.java', 31 | ], 32 | 'configFiles': [ 33 | '.dcignore', 34 | '.gitignore', 35 | '.pylintrc', 36 | 'pylintrc', 37 | '.pmdrc.xml', 38 | '.ruleset.xml', 39 | 'ruleset.xml', 40 | 'tslint.json', 41 | '.eslintrc.js', 42 | '.eslintrc.json', 43 | '.eslintrc.yml', 44 | ] 45 | } 46 | 47 | mock_file_filter = lambda n: os.path.splitext(n)[-1] in MOCKED_FILTERS['extensions'] or n in MOCKED_FILTERS['configFiles'] 48 | 49 | 50 | API_KEY = os.environ.get(API_KEY_ENV) or '' 51 | SERVICE_URL = os.environ.get(SERVICE_URL_ENV) or DEFAULT_SERVICE_URL 52 | 53 | # Clean environment variable 54 | os.environ[API_KEY_ENV] = '' 55 | 56 | def test_api_key_provided(): 57 | assert bool(API_KEY) 58 | 59 | @pytest.mark.asyncio 60 | async def test_filters(): 61 | 62 | # Call without api key and it should work 63 | filter_func = await get_filters(api_key=API_KEY) 64 | 65 | assert filter_func('sample-repo/app.js') == True 66 | 67 | 68 | def test_meta_utf8_file(): 69 | path = os.path.join(os.path.dirname(__file__), 'sample-repo', 'app.js') 70 | 71 | assert get_file_meta(path) == (510, '40f937553fda7b9986c3a87d39802b96e77fb2ba306dd602f9b2d28949316c98') 72 | 73 | 74 | def test_meta_iso8859_file(): 75 | path = os.path.join(os.path.dirname(__file__), 'sample-repo', 'main.js') 76 | 77 | assert get_file_meta(path) == (22325, 'a7f2b4086183e471a0024b96a2de53b4a46eef78f4cf33b8dab61eae5e27eb83') 78 | 79 | 80 | 81 | def test_bundle_hashes(): 82 | path = os.path.join(os.path.dirname(__file__), 'sample-repo') 83 | bundle_files = list(collect_bundle_files([path], file_filter=mock_file_filter)) 84 | 85 | assert len(bundle_files) == 9 86 | 87 | file_hashes = prepare_bundle_hashes(bundle_files) 88 | 89 | assert len(file_hashes) == 9 90 | 91 | annotator_app_file = next((f for f in file_hashes if 'AnnotatorTest.cpp' in f[0]), None) 92 | assert 'AnnotatorTest.cpp' in annotator_app_file[0] 93 | assert annotator_app_file[1] == '9bf5582f88c6f5a93207efc66b3df6dd36b16de3807f93894b58baa90735b91d' 94 | 95 | db_file = next((f for f in file_hashes if 'db.js' in f[0]), None) 96 | assert 'db.js' in db_file[0] 97 | assert db_file[1] == '6f8d7925b5c86bd6d31b0b23bdce1dcfc94e28a1d5ebdc0ba91fac7dc7e95657' 98 | 99 | return file_hashes 100 | 101 | 102 | @pytest.mark.asyncio 103 | async def test_generate_bundle(): 104 | """ Test generating bundles """ 105 | file_hashes = test_bundle_hashes() 106 | 107 | # Try to call without api key 108 | with pytest.raises(aiohttp.client_exceptions.ClientResponseError): 109 | await generate_bundle(file_hashes) 110 | 111 | bundle_id = await generate_bundle(file_hashes, API_KEY) 112 | assert bool(bundle_id) 113 | 114 | return bundle_id 115 | 116 | @pytest.mark.asyncio 117 | async def test_analysis(): 118 | 119 | # Try to call with wrong bundle id and without api key 120 | with pytest.raises(aiohttp.client_exceptions.ClientResponseError): 121 | await get_analysis('sdfs', linters_enabled=True) 122 | 123 | bundle_id = await test_generate_bundle() 124 | 125 | # Set API KEY env variable 126 | os.environ[API_KEY_ENV] = API_KEY 127 | 128 | results = await get_analysis(bundle_id, linters_enabled=True) 129 | assert list(results.keys()) == ['id', 'url', 'results'] 130 | assert results['id'] == bundle_id 131 | assert results['url'] == '{}/app/{}/_/%2F/code/?'.format(SERVICE_URL, bundle_id) 132 | assert list(results['results'].keys()) == ['files', 'suggestions', 'timing'] 133 | assert len(results['results']['files'].keys()) == 5 134 | assert '/sample-repo/AnnotatorTest.cpp' in list(results['results']['files'].keys())[0] 135 | assert len(results['results']['suggestions'].keys()) == 8 136 | assert list(results['results']['timing'].keys()) == ['fetchingCode', 'analysis', 'queue'] 137 | assert results['results']['timing']['fetchingCode'] <= results['results']['timing']['analysis'] 138 | assert results['results']['timing']['queue'] >= 0 139 | 140 | @pytest.mark.asyncio 141 | async def test_analyze_folders(): 142 | path = os.path.join(os.path.dirname(__file__), 'sample-repo') 143 | results = await analyze_folders([path], linters_enabled=True) 144 | assert list(results.keys()) == ['id', 'url', 'results'] 145 | assert len(results['results']['files'].keys()) == 5 146 | assert len(results['results']['suggestions'].keys()) == 8 147 | 148 | 149 | @pytest.mark.asyncio 150 | async def test_analyze_file(): 151 | path = os.path.join(os.path.dirname(__file__), 'sample-repo', 'app.js') 152 | results = await analyze_folders([path], linters_enabled=True) 153 | assert list(results.keys()) == ['id', 'url', 'results'] 154 | assert len(results['results']['files'].keys()) == 1 155 | assert len(results['results']['suggestions'].keys()) == 1 156 | 157 | 158 | @pytest.mark.asyncio 159 | async def test_analyze_folders_severity(): 160 | path = os.path.join(os.path.dirname(__file__), 'sample-repo') 161 | results = await analyze_folders([path], linters_enabled=True, severity=2) 162 | assert list(results.keys()) == ['id', 'url', 'results'] 163 | assert len(results['results']['files'].keys()) == 5 164 | assert len(results['results']['suggestions'].keys()) == 6 165 | 166 | 167 | @pytest.mark.asyncio 168 | async def test_remote_analysis(): 169 | results = await analyze_git('github.com', 'DeepcodeAI', 'TinyTests', '84b024559a6440e70faadf4d2b30609a7944f237') 170 | assert list(results.keys()) == ['id', 'url', 'results'] 171 | assert set(results['results']['files'].keys()) == set(['/New.js', '/Test.java', '/Test1.java', '/Test2.java', '/Test3.java', '/Test4.java', '/Test7.java']) 172 | assert len(results['results']['suggestions'].keys()) == 6 173 | 174 | 175 | @pytest.mark.asyncio 176 | async def test_remote_analysis_severity(): 177 | results = await analyze_git('github.com', 'DeepcodeAI', 'cli', '320d98a6896f5376efe6cefefb6e70b46b97d566', severity=2) 178 | assert list(results.keys()) == ['id', 'url', 'results'] 179 | assert len(results['results']['files'].keys()) == 0 180 | assert len(results['results']['suggestions'].keys()) == 0 181 | --------------------------------------------------------------------------------