├── .coveragerc ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── avmp ├── __init__.py ├── core │ ├── VERSION │ ├── __init__.py │ ├── cli.py │ ├── exceptions.py │ ├── models.py │ ├── updater.py │ └── wrapper.py ├── tools │ ├── __init__.py │ ├── jira_tools.py │ ├── python_tools.py │ ├── slack_tools.py │ └── tenable_tools.py └── utils │ ├── __init__.py │ ├── ip_tools.py │ ├── ip_tools_test.py │ ├── logging_utils.py │ └── vuln_db.py ├── examples ├── dynamic_process_configs │ └── readme.md └── static_process_configs │ ├── readme.md │ ├── tenable_asv_process.json │ └── ticket_buckets_for_engineer_time_tracking.json ├── logo.png ├── poetry.lock ├── pyproject.toml ├── requirements.txt └── setup.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch=True 3 | source = 4 | avmp 5 | omit = 6 | .venv/* 7 | .vscode/* 8 | 9 | [report] 10 | show_missing=True -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | # .github/workflows/main.yml 2 | name: Python Workflow 3 | 4 | on: 5 | push: 6 | branches: 7 | - main 8 | pull_request: 9 | 10 | jobs: 11 | linting: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-python@v2 16 | #---------------------------------------------- 17 | # load pip cache if cache exists 18 | #---------------------------------------------- 19 | - uses: actions/cache@v2 20 | with: 21 | path: ~/.cache/pip 22 | key: ${{ runner.os }}-pip 23 | restore-keys: ${{ runner.os }}-pip 24 | #---------------------------------------------- 25 | # install and run linters 26 | #---------------------------------------------- 27 | - run: python -m pip install black isort 28 | - run: | 29 | black . --check 30 | isort . 31 | test: 32 | needs: linting 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | os: [ubuntu-latest, macos-latest] 37 | python-version: ["3.9", "3.10"] 38 | runs-on: ${{ matrix.os }} 39 | steps: 40 | #---------------------------------------------- 41 | # check-out repo and set-up python 42 | #---------------------------------------------- 43 | - name: Check out repository 44 | uses: actions/checkout@v1 45 | - name: Set up python ${{ matrix.python-version }} 46 | uses: actions/setup-python@v1 47 | with: 48 | python-version: ${{ matrix.python-version }} 49 | architecture: x64 50 | #---------------------------------------------- 51 | # ----- install & configure poetry ----- 52 | #---------------------------------------------- 53 | - name: Install Poetry 54 | uses: snok/install-poetry@v1 55 | with: 56 | virtualenvs-create: true 57 | virtualenvs-in-project: true 58 | #---------------------------------------------- 59 | # load cached venv if cache exists 60 | #---------------------------------------------- 61 | - name: Load cached venv 62 | id: cache-python-packages 63 | uses: actions/cache@v2 64 | with: 65 | path: .venv 66 | key: venv-${{ runner.os }}-${{ hashFiles('poetry.lock') }}-${{ hashFiles('pyproject.toml') }} 67 | #---------------------------------------------- 68 | # install dependencies if cache does not exist 69 | #---------------------------------------------- 70 | - name: Install dependencies 71 | if: steps.cache-python-packages.outputs.cache-hit != 'true' 72 | run: poetry install --no-interaction --no-root 73 | #---------------------------------------------- 74 | # install your root project, if required 75 | #---------------------------------------------- 76 | - name: Install library 77 | run: poetry install --no-interaction 78 | #---------------------------------------------- 79 | # run test suite 80 | #---------------------------------------------- 81 | - run: | 82 | source .venv/bin/activate 83 | poetry run pytest -v 84 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Code editor settings 2 | .vscode 3 | 4 | # Python temp files 5 | *.egg-info 6 | __pycache__/ 7 | .coverage -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: xenial 3 | sudo: true 4 | python: 5 | - "3.7" 6 | # Install dependencies 7 | install: 8 | - pip install -r requirements.txt 9 | # Run tests 10 | script: 11 | - pytest -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 - Present @RackReaver 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Alt text](https://raw.githubusercontent.com/RackReaver/AVMP/main/logo.png?raw=true "logo") 2 | 3 | # Automated Vulnerability Management Program (AVMP) 4 | 5 | A collection of tools for managing and automating vulnerability management. 6 | 7 | Streamline the way vulnerability management programs are created and run. This project is made to be modular so automation can be put into place at any program level. 8 | 9 | --- 10 | 11 | ## Table of Contents 12 | 13 | - [Things to Note](#things-to-note) 14 | - [Installation](#installation) 15 | - [Running the tests](#running-the-tests) 16 | - [Deployment](#deployment) 17 | - [How to use](#how-to-use) 18 | - [Folder Structure](#folder-structure-vulnmanager) 19 | - [Main configuration file "config.json"](#main-configuration-file-configjson) 20 | - [Dynamic process config "dynamic_process_config.json](#dynamic-process-config-dynamicprocessconfigjson) 21 | - [Static process config "static_process_config.json"](#static-process-config-staticprocessconfigjson) 22 | - [TO-DO](#to-do) 23 | - [Authors](#authors) 24 | - [License](#license) 25 | 26 | --- 27 | 28 | ## Things to Note 29 | 30 | 1. **API keys for both Tenable IO and Jira are required.** 31 | 32 | 2. I developed this tool using Jira on-prem and assume it would work for the cloud version as well however am unable to verify that. 33 | 34 | 3. There is a bit of setup to use the tool as it is in development, I am planning on creating a quick start script but until then please see [How to use](#how-to-use). 35 | 36 | ## Installation 37 | 38 | ``` 39 | pip install git+https://github.com/RackReaver/AVMP 40 | ``` 41 | 42 | ## Running the tests 43 | 44 | Check code coverage 45 | 46 | ``` 47 | >>> coverage run -m pytest 48 | >>> 49 | >>> coverage report 50 | ``` 51 | 52 | ## Deployment 53 | 54 | At this time the tool can only be deployed locally. 55 | 56 | ## How to use 57 | 58 | ``` 59 | $ avmp --help 60 | __ ____ __ _____ 61 | /\ \ / / \/ | __ \ 62 | / \ \ / /| \ / | |__) | 63 | / /\ \ \/ / | |\/| | ___/ 64 | / ____ \ / | | | | | 65 | /_/ \_\/ |_| |_|_| 66 | 67 | A collection of tools for managing and automating vulnerability management. 68 | 69 | Usage: 70 | avmp run [--config filepath] 71 | avmp update [--config filepath] 72 | avmp -h | --help 73 | avmp --version 74 | 75 | Options: 76 | -h --help Show this screen. 77 | --version Show version. 78 | --config=filepath AVMP configuration file [default: config.json] 79 | ``` 80 | 81 | #### Folder Structure `vuln_manager`: 82 | 83 | ``` 84 | vuln_manager 85 | | 86 | +-- process_configs 87 | | | 88 | | +-- dynamic/ # Configurations for generating vulnerability tickets 89 | | +-- static/ # Configurations for generating repetitive project/task tickets 90 | | 91 | +-- config.json 92 | +-- tickets.db # This is generated automatically and is mapped to in the process_configs 93 | ``` 94 | 95 | #### Main configuration file `config.json`: 96 | 97 | ```json 98 | { 99 | "creds": { 100 | "tenable": { 101 | "access_key": "", 102 | "secret_key": "" 103 | }, 104 | "jira": { 105 | "server": "", 106 | "username": "", 107 | "password": "" 108 | } 109 | }, 110 | "types": { 111 | "JIRA_PROJECT_ID": ["JIRA_FIELD_1", "JIRA_FIELD_2", "JIRA_FIELD_3"] 112 | }, 113 | "due_dates": { 114 | "Critical": "DAYS_TO_PATCH", 115 | "High": "DAYS_TO_PATCH", 116 | "Medium": "DAYS_TO_PATCH", 117 | "Low": "DAYS_TO_PATCH", 118 | }, 119 | "priorities": { 120 | "Critical": "JIRA_ID", 121 | "High": "JIRA_ID", 122 | "Medium": "JIRA_ID", 123 | "Low": "JIRA_ID" 124 | } 125 | ``` 126 | 127 | | Field | Required | Description | 128 | | ---------- | -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 129 | | creds | yes | Data required from APIs to run package | 130 | | types | no | List of required fields for a given Jira project (not required, but a good idea to ensure process_configs contain all required fields before making an API request). | 131 | | due_dates | yes | Used to set Jira due date based on Tenable's severity rating. | 132 | | priorities | yes | Mapping Tenable severity rating to Jira priorities (defaults to `Low` if others are unavailable). | 133 | 134 | #### Dynamic process config `dynamic_process_config.json`: 135 | 136 | See [examples](examples/dynamic_process_configs) for context on use cases. 137 | 138 | ```json 139 | { 140 | "process_type": "dynamic", 141 | "allow_ticket_duplication": "False", 142 | "scan_name": "TENABLE_SCAN_NAME", 143 | "max_tickets": 10, 144 | "assignee": "", 145 | "min_cvss_score": 6.0, 146 | "ticket_db_filepath": "tickets.db", 147 | "default_ticket_status": "Open", 148 | "time_saved_per_ticket": "10m", 149 | "root_ticket": "", 150 | "comments": [], 151 | "data": { 152 | "project": { "key": "JIRA_PROJECT_KEY" }, 153 | "summary": "Vuln: ", 154 | "description": "", 155 | "issuetype": { "id": "JIRA_ISSUE_TYPE_ID" }, 156 | "priority": { "id": "" }, 157 | "duedate": "" 158 | } 159 | } 160 | ``` 161 | 162 | | Field | Required | Description | 163 | | ------------------------ | -------- | ---------------------------------------------------------------------------------------------------------------------------------- | 164 | | process_type | yes | Must be `dynamic` | 165 | | allow_ticket_duplication | no | [default: false] Prevent multiple tickets for same plugin_id to be generated (This is based on the `ticket_db_filepath` provided). | 166 | | scan_name | yes | Name of scan inside of Tenable IO | 167 | | max_tickets | no | Number of tickets to be created each time this configuration is used (optional - will create all if value is blank). | 168 | | assignee | no | Username to assign all created tickets to (optional). | 169 | | min_cvss_score | yes | This is based on the CVSS Base Score provided by Tenable IO, allows for configurations based on severity. | 170 | | ticket_db_filepath | yes | Location of SQLite database file for tracking tickets (DB will be created if ones doesn't already exist on the path provided). | 171 | | default_ticket_status | yes | First status for database entry, this will change when the auto updater is run. | 172 | | time_saved_per_ticket | yes | Jira time value to log work for calculating time saved. | 173 | | root_ticket | no | If unable to log work against newly created ticket this value will provide a ticket that allows work to be logged against it. | 174 | | comments | no | A list of strings that will generate comments. | 175 | | data | yes | API values required to generate a Jira ticket (issue). | 176 | 177 | #### Static process config `static_process_config.json`: 178 | 179 | See [examples](examples/static_process_configs) for context on use cases. 180 | 181 | ```json 182 | { 183 | "process_type": "static", 184 | "time_saved_per_ticket": "5m", 185 | "time_saved_comment": "Time saved through automation", 186 | "parent_ticket": { 187 | "project": { "key": "JIRA_PROJECT_KEY" }, 188 | "summary": "SUMMARY", 189 | "description": "DESCRIPTION", 190 | "issuetype": { "name": "ISSUE_TYPE_NAME" }, 191 | "assignee": { "name": "" }, 192 | "priority": { "id": "PRIORITY_ID" } 193 | }, 194 | "sub_tasks": { 195 | "sub_task_1": { 196 | "project": { "key": "JIRA_PROJECT_KEY" }, 197 | "summary": "SUMMARY", 198 | "description": "DESCRIPTION", 199 | "issuetype": { "name": "Sub-task" }, 200 | "assignee": { "name": "" } 201 | }, 202 | "sub_task_2": { 203 | "project": { "key": "JIRA_PROJECT_KEY" }, 204 | "summary": "SUMMARY", 205 | "description": "DESCRIPTION", 206 | "issuetype": { "name": "Sub-task" }, 207 | "assignee": { "name": "" } 208 | } 209 | } 210 | } 211 | ``` 212 | 213 | | Field Name | Required | Description | 214 | | --------------------- | -------- | ---------------------------------------------------------------------------- | 215 | | process_name | yes | Must be `static` | 216 | | time_saved_per_ticket | no | Jira time value to log work for calculating time saved. | 217 | | time_saved_comment | no | Comment for Jira work log for time saved. | 218 | | parent_ticket | yes | API values required to generate a Jira ticket (issue). | 219 | | sub_tasks | no | JSON container for any sub tasks that should be created under parent ticket. | 220 | 221 | ## TO-DO 222 | 223 | - Add persistent logging to wrapper.main() as a return value 224 | - Add ticket reference table to database 225 | - Track project and process ticket numbers for automated linking 226 | - Add support for [SecurityScorecard](https://securityscorecard.com/) 227 | - Build tests for code base 228 | - [x]Add example dynamic and static process configs 229 | - Add database tracking of static_process tickets 230 | 231 | ## Authors 232 | 233 | - **Matt Ferreira** - _Developer_ - [RackReaver](https://github.com/RackReaver) 234 | 235 | ## License 236 | 237 | This project is licensed under the Apache License - see the [LICENSE](LICENSE) file for details 238 | -------------------------------------------------------------------------------- /avmp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RackReaver/AVMP/7b28116f5623212243b59b1b7d6685854316e527/avmp/__init__.py -------------------------------------------------------------------------------- /avmp/core/VERSION: -------------------------------------------------------------------------------- 1 | 0.0.2 -------------------------------------------------------------------------------- /avmp/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RackReaver/AVMP/7b28116f5623212243b59b1b7d6685854316e527/avmp/core/__init__.py -------------------------------------------------------------------------------- /avmp/core/cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | __ ____ __ _____ 3 | /\ \ / / \/ | __ \ 4 | / \ \ / /| \ / | |__) | 5 | / /\ \ \/ / | |\/| | ___/ 6 | / ____ \ / | | | | | 7 | /_/ \_\/ |_| |_|_| 8 | 9 | A collection of tools for managing and automating vulnerability management. 10 | 11 | Usage: 12 | avmp run [--config filepath] 13 | avmp update [--config filepath] 14 | avmp -h | --help 15 | avmp --version 16 | 17 | Options: 18 | -h --help Show this screen. 19 | --version Show version. 20 | --config=filepath AVMP configuration file [default: config.json] 21 | """ 22 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 23 | __license__ = "Apache License" 24 | 25 | import json 26 | 27 | from docopt import docopt 28 | 29 | from avmp.core import updater, wrapper 30 | 31 | 32 | def main(): 33 | args = docopt(__doc__, version="0.0.2") 34 | 35 | if args["run"] == True: 36 | 37 | with open(args["--config"], "r") as openFile: 38 | config = json.load(openFile) 39 | with open(args[""], "r") as openFile: 40 | process_config = json.load(openFile) 41 | 42 | wrapper.main(config, process_config) 43 | 44 | elif args["update"] == True: 45 | 46 | with open(args["--config"], "r") as openFile: 47 | config = json.load(openFile) 48 | 49 | updater.main(args[""], config) 50 | -------------------------------------------------------------------------------- /avmp/core/exceptions.py: -------------------------------------------------------------------------------- 1 | """User-defined exceptions. 2 | """ 3 | __copyright__ = "Copyright (C) 2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | 7 | class Error(Exception): 8 | """Base class for other exceptions""" 9 | 10 | pass 11 | 12 | 13 | class APIConnectionError(Error): 14 | """Raised when connection to API breaks""" 15 | 16 | pass 17 | 18 | 19 | class MissingConfiguration(Error): 20 | """Missing variables in configuration file""" 21 | 22 | pass 23 | 24 | 25 | class InputError(Error): 26 | """Incorrect input provided to function or class""" 27 | 28 | pass 29 | -------------------------------------------------------------------------------- /avmp/core/models.py: -------------------------------------------------------------------------------- 1 | """Class models for AVMP. 2 | """ 3 | __copyright__ = "Copyright (C) 2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import logging 7 | import os 8 | 9 | from avmp.core.exceptions import MissingConfiguration 10 | from avmp.tools.jira_tools import JiraToolsAPI 11 | from avmp.tools.tenable_tools import TenableToolsAPI 12 | 13 | 14 | class App: 15 | """Data associated with running AVMP.""" 16 | 17 | def __init__(self, config, process_config): 18 | """Build AVMP app given the config file. 19 | 20 | args: 21 | config (dict): Imported json data from config 22 | process_config (dict): Imported json data from process_config. 23 | None is also accepted. 24 | 25 | return: None 26 | """ 27 | assert isinstance(config, dict) 28 | assert isinstance(process_config, dict) 29 | 30 | self.config = self.config_setup(config) 31 | self.process_config = self.process_config_setup(process_config) 32 | 33 | def config_setup(self, config): 34 | """Setup function for config file. Used to process/add missing information. 35 | 36 | args: 37 | config (dict): AVMP configuration file 38 | 39 | return (dict): Modified python config dictionary 40 | """ 41 | 42 | return config 43 | 44 | def process_config_setup(self, process_config): 45 | """Setup function for process config file. Used to process/add missing information. 46 | 47 | args: 48 | config (dict): Process configuration file 49 | 50 | return (dict): Modified python process config dictionary 51 | """ 52 | 53 | if "allow_ticket_duplication" not in process_config: 54 | process_config["allow_ticket_duplication"] = False 55 | 56 | return process_config 57 | 58 | def tenAPIcon(self): 59 | """Check for credentials in config and connect to Tenable IO.""" 60 | if ( 61 | "access_key" in self.config["creds"]["tenable"] 62 | and "secret_key" in self.config["creds"]["tenable"] 63 | ): 64 | 65 | self.tenAPI = TenableToolsAPI( 66 | self.config["creds"]["tenable"]["access_key"], 67 | self.config["creds"]["tenable"]["secret_key"], 68 | ) 69 | else: 70 | message = "Tenable access_key and secret_key are required and must be provided in the config file." 71 | logging.critical(message) 72 | raise MissingConfiguration(message) 73 | 74 | def jiraAPIcon(self): 75 | """Check for credentials in config and connect to Jira.""" 76 | 77 | if ( 78 | "server" in self.config["creds"]["jira"] 79 | and "username" in self.config["creds"]["jira"] 80 | and "password" in self.config["creds"]["jira"] 81 | ): 82 | 83 | self.jiraAPI = JiraToolsAPI( 84 | self.config["creds"]["jira"]["server"], 85 | username=self.config["creds"]["jira"]["username"], 86 | password=self.config["creds"]["jira"]["password"], 87 | ) 88 | else: 89 | message = "Jira server, username and password are required and must be provided in the config file." 90 | logging.critical(message) 91 | raise MissingConfiguration(message) 92 | 93 | 94 | if __name__ == "__main__": 95 | import json 96 | 97 | config_location = "avmp/test_data/config.json" 98 | config = json.load(open(config_location, "r")) 99 | 100 | app = App(config).jiraAPI() 101 | -------------------------------------------------------------------------------- /avmp/core/updater.py: -------------------------------------------------------------------------------- 1 | """ 2 | """ 3 | __copyright__ = "Copyright (C) 2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import logging 7 | 8 | from avmp.tools.jira_tools import JiraToolsAPI 9 | from avmp.utils.vuln_db import TenableSqliteVulnDB 10 | 11 | 12 | def main(vuln_db_filepath, config): 13 | root_ticket = input("Root ticket for logging time: ") 14 | db = TenableSqliteVulnDB(vuln_db_filepath) 15 | tickets = db.get_all_tickets() 16 | 17 | try: 18 | jiraAPI = JiraToolsAPI( 19 | config["creds"]["jira"]["server"], 20 | username=config["creds"]["jira"]["username"], 21 | password=config["creds"]["jira"]["password"], 22 | ) 23 | except Exception as e: 24 | logging.debug( 25 | f"{config['creds']['jira']['api_username']} failed to authenticate with Jira.\n\n{e}\n\n" 26 | ) 27 | 28 | COMMENT = "Updating vulnerability database." 29 | 30 | for num, ticket in enumerate(tickets): 31 | print(f"Updating {ticket[0]} ({num+1} of {len(tickets)})") 32 | data = jiraAPI._JIRA.issue(ticket[0]) 33 | logged_work = jiraAPI.log_work(ticket[0], "5m", comment=COMMENT) 34 | 35 | # Attempt logging time to root ticket if original ticket does not allow 36 | if root_ticket != "" and logged_work == False: 37 | root_comment = "{} - {}".format(ticket[0], COMMENT) 38 | jiraAPI.log_work(root_ticket, "5m", comment=root_comment) 39 | 40 | db.update_status_by_ticket_number(ticket[0], str(data.fields.status)) 41 | -------------------------------------------------------------------------------- /avmp/core/wrapper.py: -------------------------------------------------------------------------------- 1 | """Runtime for AVMP. 2 | """ 3 | __copyright__ = "Copyright (C) 2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import logging 7 | import os 8 | import socket 9 | from datetime import datetime, timedelta 10 | 11 | from avmp.core.exceptions import MissingConfiguration 12 | from avmp.core.models import App 13 | from avmp.tools.tenable_tools import TenableToolsCSV, build_jira_description 14 | from avmp.utils.logging_utils import logging_setup 15 | from avmp.utils.vuln_db import TenableSqliteVulnDB 16 | 17 | 18 | def main(config, process_config): 19 | os.system("cls" if os.name == "nt" else "clear") 20 | logging_setup(os.path.basename(__file__), stdout=True) 21 | 22 | app = App(config, process_config) 23 | app.jiraAPIcon() 24 | 25 | # Check for process_type variable in scan config 26 | if "process_type" in app.process_config: 27 | # Determine scan type and run function for said type 28 | if "static" in app.process_config["process_type"]: 29 | static(app) 30 | elif "dynamic" in app.process_config["process_type"]: 31 | dynamic(app) 32 | else: 33 | message = '"process_type" (static or dynamic) is required. See documentation for examples.' 34 | logging.error(message) 35 | raise MissingConfiguration(message) 36 | else: 37 | message = 'No "process_type" variable provided in scan config. See documentation for examples.' 38 | logging.error(message) 39 | raise MissingConfiguration(message) 40 | 41 | 42 | def static(app): 43 | """Generate static tickets found in config file.""" 44 | # Create parent ticket 45 | logging.info("Creating tickets...") 46 | parent_ticket = app.jiraAPI.create(app.process_config["parent_ticket"]) 47 | logging.info( 48 | '{} used "{}" to create parent ticket "{}" successfully in Jira'.format( 49 | socket.gethostname(), app.config["creds"]["jira"]["username"], parent_ticket 50 | ) 51 | ) 52 | 53 | # Log time saved 54 | jira_log_time(app, parent_ticket) 55 | 56 | logging.info("Successfully created parent ticket ({})".format(parent_ticket)) 57 | 58 | # Create sub-tasks 59 | for key in app.process_config["sub_tasks"]: 60 | 61 | # Set parent ticket in config if it doesn't exist 62 | if "parent" not in app.process_config["sub_tasks"][key]: 63 | app.process_config["sub_tasks"][key]["parent"] = {"key": parent_ticket} 64 | 65 | child = app.jiraAPI.create(app.process_config["sub_tasks"][key]) 66 | logging.info( 67 | '{} used "{}" to create child ticket "{}" under "{}" successfully in Jira'.format( 68 | socket.gethostname(), 69 | app.config["creds"]["jira"]["username"], 70 | child, 71 | parent_ticket, 72 | ) 73 | ) 74 | 75 | # Log time saved 76 | jira_log_time(app, child) 77 | 78 | logging.info("Successfully created sub_task ticket ({})".format(child)) 79 | 80 | logging.info("Tickets created successfully") 81 | 82 | 83 | def dynamic(app): 84 | """Generate tickets for Tenable vulnerabilities.""" 85 | # Setup connection to Tenable IO 86 | app.tenAPIcon() 87 | 88 | # Get raw scan data 89 | logging.info("Checking Tenable for new scan.") 90 | 91 | if app.tenAPI.check_scan_in_progress(app.process_config["scan_name"]) == True: 92 | logging.info("Latest scan is still running, try again later.") 93 | return False 94 | else: 95 | filepath = app.tenAPI.export_latest_scan( 96 | app.process_config["scan_name"], 97 | os.path.join(os.getcwd(), "data", "scans"), 98 | overwrite=False, 99 | ) 100 | 101 | # Build vulnerability database 102 | db = TenableSqliteVulnDB(app.process_config["ticket_db_filepath"]) 103 | 104 | logging.info("Starting scan data import") 105 | items = TenableToolsCSV( 106 | filepath, min_cvss_score=app.process_config["min_cvss_score"] 107 | ).group_by("Plugin ID") 108 | 109 | if items == None: 110 | logging.debug("No data was found given the min_cvss_score") 111 | return False 112 | 113 | tickets = TenableToolsCSV.organize(items) 114 | logging.info("Completed scan data import") 115 | logging.info( 116 | 'Creating tickets for "{}"'.format(app.process_config["scan_name"]) 117 | ) 118 | 119 | ticket_counter = 0 120 | for ticket in tickets.values(): 121 | # Looping according to the max ticket count 122 | if ( 123 | ticket_counter < app.process_config["max_tickets"] 124 | or app.process_config["max_tickets"] == 0 125 | ): 126 | 127 | data = {**app.process_config["data"]} 128 | 129 | # Check to ensure all required fields are included 130 | if ( 131 | len(app.config["types"]) > 0 132 | and data["project"]["key"] in app.config["types"] 133 | ): 134 | missing_fields = [] 135 | for field in app.config["types"][data["project"]["key"]]: 136 | if field not in app.process_config["data"]: 137 | missing_fields.append(field) 138 | if len(missing_fields) != 0: 139 | raise NameError( 140 | f"The following fields were missing {missing_fields}" 141 | ) 142 | 143 | # Append variable data to data fields 144 | data["summary"] += ticket["Vuln Data"]["Synopsis"] 145 | data["summary"] = data["summary"].replace("\n", " ") 146 | data["description"] += build_jira_description(ticket) 147 | if data["priority"]["id"] == "": 148 | # Selects proper priority rating inside of Jira 149 | if ticket["Vuln Data"]["Risk"] == "Critical": 150 | data["priority"]["id"] = app.config["priorities"]["Critical"] 151 | elif ticket["Vuln Data"]["Risk"] == "High": 152 | data["priority"]["id"] = app.config["priorities"]["High"] 153 | elif ticket["Vuln Data"]["Risk"] == "Medium": 154 | data["priority"]["id"] = app.config["priorities"]["Medium"] 155 | else: 156 | data["priority"]["id"] = app.config["priorities"]["Low"] 157 | 158 | if data["duedate"] == "": 159 | # Build due date 160 | today = datetime.now() 161 | if ticket["Vuln Data"]["Risk"] in app.config["due_dates"]: 162 | plus_days = app.config["due_dates"][ticket["Vuln Data"]["Risk"]] 163 | else: 164 | plus_days = app.config["due_dates"]["Low"] 165 | 166 | final_date = today + timedelta(days=int(plus_days)) 167 | data["duedate"] = final_date.strftime("%Y-%m-%d") 168 | 169 | ip_list = set([x["IP Address"] for x in ticket["Hosts"].values()]) 170 | 171 | # Check for already open tickets 172 | dups = db.get_all_tickets_by_plugin_id(ticket["Vuln Data"]["Plugin ID"]) 173 | 174 | # TODO: Add abilty to open ticket for new IP's and link to existing vuln ticket. 175 | 176 | if ( 177 | len(dups) == 0 178 | or app.process_config["allow_ticket_duplication"] == True 179 | ): 180 | if len(dups) != 0: 181 | logging.info( 182 | "Plugin ID ({}) has an open ticket. Creating duplicate...".format( 183 | ticket["Vuln Data"]["Plugin ID"] 184 | ) 185 | ) 186 | 187 | current = app.jiraAPI.create(data) 188 | ticket_counter += 1 189 | db.add_ticket( 190 | current, 191 | ticket["Vuln Data"]["Plugin ID"], 192 | app.process_config["default_ticket_status"], 193 | list(ip_list), 194 | ) 195 | 196 | # Attempt adding comment(s) to ticket 197 | if app.process_config["comments"] != "": 198 | for comment in app.process_config["comments"]: 199 | try: 200 | app.jiraAPI.comment(current, comment) 201 | logging.info( 202 | "Successfully applied comment on {}".format(current) 203 | ) 204 | except: 205 | logging.error( 206 | "Failed to apply comment on {}".format(current) 207 | ) 208 | 209 | # Log time saved 210 | jira_log_time(app, current) 211 | 212 | # Link ticket back to root ticket 213 | if app.process_config["root_ticket"] != "": 214 | try: 215 | app.jiraAPI.link( 216 | app.process_config["root_ticket"], 217 | current, 218 | issue_link_name="depends on", 219 | ) 220 | logging.info( 221 | "Linked {} to {}".format( 222 | app.process_config["root_ticket"], current 223 | ) 224 | ) 225 | except: 226 | logging.error( 227 | "Failed to link {} to root ticket {}".format( 228 | current, app.process_config["root_ticket"] 229 | ) 230 | ) 231 | else: 232 | logging.info( 233 | "Plugin ID ({}) has an open ticket. Skipping...".format( 234 | ticket["Vuln Data"]["Plugin ID"] 235 | ) 236 | ) 237 | 238 | 239 | def jira_log_time(app, ticket): 240 | """Log time to newly created ticket, if fails log against root ticket if exists. 241 | 242 | app (class): Instance of the current runtime 243 | ticket (str): Ticket that was just created 244 | 245 | return (bool): Confirmation if time was logged. 246 | """ 247 | logged_work = app.jiraAPI.log_work( 248 | ticket, 249 | app.process_config["time_saved_per_ticket"], 250 | comment=app.process_config["time_saved_comment"], 251 | ) 252 | 253 | if ( 254 | "root_ticket" in app.process_config 255 | and app.process_config["root_ticket"] != "" 256 | and logged_work == False 257 | ): 258 | root_comment = "{} - {}".format( 259 | ticket, app.process_config["time_saved_comment"] 260 | ) 261 | root_ticket_logged_work = app.jiraAPI.log_work( 262 | app.process_config["root_ticket"], 263 | app.process_config["time_saved_per_ticket"], 264 | comment=root_comment, 265 | ) 266 | 267 | if logged_work == True or root_ticket_logged_work == True: 268 | return True 269 | else: 270 | return False 271 | 272 | 273 | if __name__ == "__main__": 274 | import json 275 | 276 | config_location = "avmp/test_data/config.json" 277 | config = json.load(open(config_location, "r")) 278 | 279 | config_location = "avmp/test_data/process_configs/asv_process_config.json" 280 | process_config = json.load(open(config_location, "r")) 281 | 282 | main(config, process_config) 283 | -------------------------------------------------------------------------------- /avmp/tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RackReaver/AVMP/7b28116f5623212243b59b1b7d6685854316e527/avmp/tools/__init__.py -------------------------------------------------------------------------------- /avmp/tools/jira_tools.py: -------------------------------------------------------------------------------- 1 | """Classes and functions for communicating with Jira. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import logging 7 | from getpass import getpass 8 | 9 | from jira import JIRA 10 | 11 | from avmp.core.exceptions import InputError 12 | 13 | 14 | class JiraToolsAPI: 15 | def __init__(self, jira_server_link, username=None, password=None): 16 | """Initalizes the Jira API connector. If a username or password is not provided you will be prompted for it. 17 | 18 | args: 19 | jira_server_link (str): Link to the Jira server to touch API 20 | 21 | kwargs: 22 | username (str): Overwrites jira username prompt 23 | password (str): Overwrites jira password prompt 24 | 25 | return: None 26 | """ 27 | self.jira_server_link = jira_server_link 28 | self.jira_options = {"server": self.jira_server_link} 29 | 30 | if username == None: 31 | username = input("Username: ") 32 | if password == None: 33 | password = getpass() 34 | 35 | self.username = username 36 | self.password = password 37 | 38 | self._JIRA = JIRA(self.jira_options, basic_auth=(self.username, self.password)) 39 | logging.info(f"Authenticated successfully with Jira with {self.username}") 40 | 41 | def create(self, data): 42 | """Create a single Jira ticket. 43 | 44 | args: 45 | data (dict): Fields required or needed to create the ticket. 46 | 47 | return (str): Ticket number / 'False' if fails 48 | """ 49 | try: 50 | jira_ticket = self._JIRA.create_issue(fields=data) 51 | logging.info(f"Successfully created Jira issue '{jira_ticket.key}'") 52 | return jira_ticket.key 53 | 54 | except Exception as error: 55 | logging.debug( 56 | f"Failed to create Jira issue '{jira_ticket.key}'\n\n{error}\n\n" 57 | ) 58 | return False 59 | 60 | def link(self, issue_from, issue_to, issue_link_name=None): 61 | """Link two issues together. Defaults to 'Relates' unless issue_link_name is specified. 62 | 63 | args: 64 | issue_from (str): Issue that will be linked from. 65 | issue_to (str): Issue that will be linked to. 66 | 67 | 68 | kwargs: 69 | issue_link_name (str): issue link name that should be applied. 70 | 71 | return (bool): Will return 'True' if it completed successfully. 72 | """ 73 | try: 74 | self._JIRA.create_issue_link(issue_link_name, issue_from, issue_to) 75 | logging.info( 76 | f"Successfully created a '{issue_link_name}' link between '{issue_from}' and '{issue_to}'." 77 | ) 78 | return True 79 | 80 | except Exception as error: 81 | logging.debug( 82 | f"Failed to create a link between '{issue_from}' and '{issue_to}'\n\n{error}\n\n" 83 | ) 84 | return False 85 | 86 | def label(self, issue, labels): 87 | """Apply labels to a given issue. 88 | 89 | args: 90 | issue (str): Issue that labels will be applied to. 91 | labels (list): list of labels that should be applied to the issue. 92 | 93 | Return (bool): Will return 'True' if it completed successfully. 94 | """ 95 | if type(labels) == list: 96 | try: 97 | issue_instance = self._JIRA.issue(issue) 98 | issue_instance.update( 99 | fields={"labels": issue_instance.fields.labels + labels} 100 | ) 101 | logging.info(f"Successfully added labels '{labels}' to '{issue}'") 102 | return True 103 | 104 | except Exception as error: 105 | logging.debug( 106 | f"Failed to add labels '{labels}' to '{issue}'\n\n{error}\n\n" 107 | ) 108 | return False 109 | 110 | else: 111 | raise InputError("A list must be passed to the labels argument") 112 | 113 | def comment(self, issue, comment): 114 | """Apply a comment to a given issue. 115 | 116 | args: 117 | issue (str): Issue that comment will be applied to. 118 | comment (str): comment that should be applied to the issue. 119 | 120 | return (bool): Will return 'True' if it completed successfully. 121 | """ 122 | try: 123 | self._JIRA.add_comment(issue, comment) 124 | logging.info(f"Successfully added comment '{comment}' to '{issue}'") 125 | return True 126 | except Exception as error: 127 | logging.debug( 128 | f"Failed to add comment '{comment}' to '{issue}'\n\n{error}\n\n" 129 | ) 130 | return False 131 | 132 | def log_work(self, issue, time_spent, comment=None): 133 | """Log work to a given issue. 134 | 135 | args: 136 | issue (str): Issue to log work. 137 | time_spent (str): Time that should be logged to the issue. 138 | 139 | kwargs: 140 | comment (str): Description of what this time represents. 141 | 142 | return (bool): Will return 'True' if it completed successfully. 143 | """ 144 | try: 145 | if comment != None and type(comment) == str: 146 | self._JIRA.add_worklog(issue, time_spent, comment=comment) 147 | else: 148 | self._JIRA.add_worklog(issue, time_spent) 149 | logging.info(f"Successfully logged time to '{issue}'") 150 | return True 151 | 152 | except Exception as error: 153 | logging.info(f"Failed to log work to '{issue}' See debug logs for more.") 154 | logging.debug(f"\n{error}\n") 155 | return False 156 | 157 | def add_attachment(self, issue, attachment): 158 | """Attach file to Jira issue. 159 | 160 | args: 161 | issue (str): Issue name 162 | attachment (str): Location of file that should be attached. 163 | 164 | Return (bool): Will return 'True' if completed successfully 165 | """ 166 | assert isinstance(issue, str) 167 | assert isinstance(attachment, str) 168 | 169 | try: 170 | self._JIRA.add_attachment(issue=issue, attachment=attachment) 171 | logging.info(f'Successfully attached document to "{issue}"') 172 | return True 173 | 174 | except Exception as error: 175 | logging.debug(f"Failed to attach document to '{issue}'\n\n{error}\n\n") 176 | return False 177 | 178 | def update_status(self, id, end_status, transfer_statuses=[], timeout_attempts=10): 179 | """Change issue to desired status. 180 | 181 | Due to the workflow features of Jira it might not be possible to transition 182 | directly to the wanted status, intermediary statuses might be required and 183 | this funcation allows for that using 'transfer_statuses'. 184 | 185 | args: 186 | id (str): Issue id for status update 187 | end_status (str): Name of status to update ticket to. 188 | 189 | kwargs: 190 | transfer_statuses (list): Ordered list of intermediary statuses 191 | timeout_attempts (num): Number of times before while loop times out. 192 | 193 | return (bool): Will return 'True' if completed successfully 194 | """ 195 | while timeout_attempts != 0: 196 | transitions = self._JIRA.transitions(id) 197 | for transition in transitions: 198 | if transition["name"] == end_status: 199 | jira_ticket = self._JIRA.transition_issue(id, transition["id"]) 200 | logging.info(f"Updated status of '{id}' to '{end_status}'") 201 | return True 202 | elif transition["name"] in transfer_statuses: 203 | jira_ticket = self._JIRA.transition_issue(id, transition["id"]) 204 | timeout_attempts -= 1 205 | logging.debug(f"Failed to update status of '{id}' to end_status ({end_status})") 206 | return False 207 | -------------------------------------------------------------------------------- /avmp/tools/python_tools.py: -------------------------------------------------------------------------------- 1 | """Python tools for package. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import json 7 | import logging 8 | import os 9 | 10 | 11 | class DataSetup: 12 | def __init__(self, filename): 13 | """Standard data file setup. 14 | 15 | args: 16 | filename (str): Script filename. 17 | 18 | Return: None 19 | """ 20 | assert isinstance(filename, str) 21 | self.filename = filename[:-3] + ".json" 22 | self.filepath = "data/" + self.filename 23 | 24 | # Check/Create data folder 25 | if os.path.isdir("data") == False: 26 | os.mkdir("data") 27 | logging.info("Creating data folder.") 28 | if not os.path.isfile(self.filepath): 29 | with open(self.filepath, "w") as openFile: 30 | openFile.write("{}") 31 | 32 | def get_data(self): 33 | """Pull data from file 34 | 35 | Return (dict): Data found in data file. 36 | """ 37 | with open(self.filepath, "r") as openFile: 38 | data = json.load(openFile) 39 | logging.info("Loaded JSON data file.") 40 | return data 41 | 42 | def put_data(self, data): 43 | """Re-write data to file 44 | 45 | args: 46 | data (dict): Data to be converted to json and written to file 47 | 48 | Return (bool): If successful True, else False 49 | """ 50 | assert isinstance(data, dict) 51 | try: 52 | with open(self.filepath, "w") as openFile: 53 | openFile.write(json.dumps(data)) 54 | 55 | logging.info("JSON exported to data file.") 56 | 57 | return True 58 | except Exception as e: 59 | logging.warning(e) 60 | return False 61 | -------------------------------------------------------------------------------- /avmp/tools/slack_tools.py: -------------------------------------------------------------------------------- 1 | """Python tools for package. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import json 7 | 8 | import requests 9 | 10 | 11 | class SlackTools: 12 | def __init__(self): 13 | pass 14 | 15 | @staticmethod 16 | def push_alert(webhook_url, message, channel=None, custom_payload=None): 17 | """Push an alert to slack channel given a webhooks url and message. 18 | 19 | args: 20 | webhook_url (str): Incoming Webhook provided by slack. 21 | message (str): Message to push via webhook. 22 | kwargs: 23 | channel (str): Allow messages to be sent to any public channel. 24 | custom_payload (json): Overwrites message and channel inputs. 25 | """ 26 | if custom_payload == None: 27 | if channel != None: 28 | payload = { 29 | "channel": channel, 30 | "blocks": [ 31 | {"type": "section", "text": {"type": "mrkdwn", "text": message}} 32 | ], 33 | } 34 | else: 35 | payload = { 36 | "blocks": [ 37 | {"type": "section", "text": {"type": "mrkdwn", "text": message}} 38 | ] 39 | } 40 | 41 | r = requests.post( 42 | webhook_url, 43 | data=json.dumps(payload), 44 | headers={"Content-Type": "application/json"}, 45 | ) 46 | 47 | return r 48 | -------------------------------------------------------------------------------- /avmp/tools/tenable_tools.py: -------------------------------------------------------------------------------- 1 | """Tools for accessing and importing Tenable data. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import csv 7 | import logging 8 | import os 9 | from datetime import datetime 10 | 11 | from tenable.io import TenableIO 12 | 13 | 14 | class DataError(Exception): 15 | """raised error""" 16 | 17 | 18 | class TenableToolsAPI: 19 | def __init__(self, access_key, secret_key): 20 | """Capture access and secret key for Tenable.IO API 21 | 22 | args: 23 | access_key (str): Access key for Tenable.IO. 24 | secret_key (str): Secret key for Tenable.IO. 25 | 26 | Return: None 27 | """ 28 | self.access_key = access_key 29 | self.secret_key = secret_key 30 | 31 | self.tio = TenableIO(self.access_key, self.secret_key) 32 | self.scans = self.get_all_scan_names() 33 | logging.info("Authenticated successfully with Tenable") 34 | 35 | def get_all_scan_names(self): 36 | scan_list = self.tio.scans.list() 37 | 38 | scans = [] 39 | for scan in scan_list: 40 | scans.append(scan["name"]) 41 | 42 | return scans 43 | 44 | def get_scan_meta_data(self, scan_name): 45 | """Gets scan meta data for a provided scan_name. 46 | 47 | args: 48 | scan_name (str): Name of scan found in Tenable. 49 | 50 | Return (dict): Scan meta data. 51 | """ 52 | # Get list of all scans in Tenable 53 | scan_list = self.tio.scans.list() 54 | 55 | for scan in scan_list: 56 | if scan["name"] == scan_name: 57 | return scan 58 | 59 | def get_scan_info(self, scan_name): 60 | """Gets scan info for a provided scan_name. 61 | 62 | args: 63 | scan_name (str): Name of scan found in Tenable. 64 | 65 | Return (dict): Scan meta data. 66 | """ 67 | # Get list of all scans in Tenable 68 | scan_list = self.tio.scans.list() 69 | 70 | for scan in scan_list: 71 | if scan["name"] == scan_name: 72 | latest_scan_uuid = next(self.tio.scans.history(scan["id"]))["scan_uuid"] 73 | return self.tio.scans.info(scan["id"], latest_scan_uuid) 74 | 75 | def export_latest_scan( 76 | self, scan_name, folder, overwrite=None, export_format="csv" 77 | ): 78 | """Export the latest scan given a scan name and folder filepath. 79 | 80 | args: 81 | scan_name (str): Name of scan found in Tenable. 82 | folder (str): Root folder where scan data should be saved. 83 | 84 | kwargs: 85 | overwrite (bool): [default: None] Programatic way to determine if existing files should be overwriten. 86 | export_format (str): [default: csv] Format data should be saved as (this aligns with the TenableIO documentation). 87 | 88 | Return (str): Filepath of scan 89 | """ 90 | assert isinstance(scan_name, str) 91 | assert isinstance(folder, str) 92 | assert isinstance(export_format, str) 93 | 94 | SCAN_NAME = scan_name 95 | SAVE_FOLDER = os.path.join(folder, SCAN_NAME) 96 | 97 | # Creates sub-folder if one does not exist 98 | if os.path.isdir(SAVE_FOLDER) is not True: 99 | logging.info("subFolder not found... Creating now.") 100 | os.makedirs(SAVE_FOLDER) 101 | 102 | scan_meta_data = self.get_scan_meta_data(SCAN_NAME) 103 | 104 | # Get latest scan history id 105 | history_id, scan_date = self._get_latest_history_id(str(scan_meta_data["id"])) 106 | scan_date = datetime.fromtimestamp(scan_date).strftime("%Y-%m-%d") 107 | 108 | filename = f"{scan_date} - {SCAN_NAME}" 109 | filepath = f"{SAVE_FOLDER}/{filename}.{export_format}" 110 | 111 | # Check if file exists 112 | if os.path.exists(f"{filepath}"): 113 | if overwrite == False or overwrite == True: 114 | pass 115 | else: 116 | user_input = input( 117 | f'"{filename}" exists, would you like to overwrite it? (n): ' 118 | ) 119 | if user_input.lower() == "y" or user_input.lower() == "yes": 120 | overwrite = True 121 | else: 122 | overwrite = False 123 | else: 124 | overwrite = True 125 | 126 | # Checking if overwriting is permitted 127 | if overwrite != False: 128 | # Save to supplied folder 129 | logging.info("Downloading scan to supplied folder") 130 | with open(filepath, "wb") as openFile: 131 | self.tio.scans.export( 132 | scan_meta_data["id"], 133 | history_id=history_id, 134 | fobj=openFile, 135 | format=export_format, 136 | ) 137 | else: 138 | logging.info("Existing file found, overwriting not permitted.") 139 | 140 | return filepath 141 | 142 | def _get_latest_history_id(self, scan_id): 143 | """Class function to identify latest history id. 144 | 145 | args: 146 | scan_id (str): Scan id to capture latest history id. 147 | 148 | Return: history_id (str), scan_date (datetime). 149 | """ 150 | assert isinstance(scan_id, str) 151 | 152 | first = True 153 | for history in self.tio.scans.history(scan_id): 154 | 155 | history_time_start = history["time_start"] 156 | 157 | if first: 158 | scan_date = history_time_start 159 | history_id = history["id"] 160 | first = False 161 | else: 162 | if scan_date < history_time_start: 163 | scan_date = history_time_start 164 | history_id = history["id"] 165 | 166 | return history_id, scan_date 167 | 168 | def check_scan_in_progress(self, scan_name): 169 | scan_meta_data = self.get_scan_info(scan_name) 170 | 171 | if scan_meta_data["status"] == "running": 172 | return True 173 | else: 174 | return False 175 | 176 | 177 | class TenableToolsCSV: 178 | def __init__(self, filepath, min_cvss_score=None): 179 | assert isinstance(filepath, str) 180 | 181 | self.filepath = filepath 182 | self.min_cvss_score = min_cvss_score 183 | self.columns, self.rows = self._importer() 184 | 185 | def _importer(self): 186 | """Import Tenable csv file. 187 | 188 | Return: column names (tuple), rows (tuple) 189 | returns 'None' if no data is available. 190 | """ 191 | with open(self.filepath, "r", encoding="utf-8-sig") as import_file: 192 | reader = csv.reader(import_file) 193 | first = True 194 | final_list = [] 195 | for num, row in enumerate(reader): 196 | temp_dict = {} 197 | if first == True: 198 | title_info = tuple(row) 199 | first = False 200 | else: 201 | for sub_num, item in enumerate(row): 202 | temp_dict[title_info[sub_num]] = item 203 | if self.min_cvss_score != None: 204 | if float(temp_dict["CVSS"]) >= float(self.min_cvss_score): 205 | final_list.append(temp_dict) 206 | 207 | if len(final_list) == 0: 208 | return None 209 | 210 | return tuple(title_info), tuple(final_list) 211 | 212 | def group_by(self, column_name): 213 | """Group data by a given column name. 214 | 215 | args: 216 | column_name (str): Column name to group by. 217 | 218 | return (dict): Grouped dictionary 219 | return 'None' if no data to group. 220 | """ 221 | 222 | # Check if data was imported 223 | if len(self.rows) != 0: 224 | data = self.rows 225 | 226 | # Check if column name exists 227 | column_exists = False 228 | for row in data: 229 | for key in row.keys(): 230 | if key == column_name: 231 | column_exists = True 232 | # Error out if column does not exist 233 | if column_exists == False: 234 | raise DataError("group_by column not found.") 235 | 236 | # Group by column name 237 | grouped_dict = {} 238 | for row in data: 239 | if row[column_name] not in grouped_dict: 240 | grouped_dict[row[column_name]] = {} 241 | 242 | next_num = len(grouped_dict[row[column_name]]) + 1 243 | 244 | grouped_dict[row[column_name]][next_num] = row 245 | 246 | return grouped_dict 247 | else: 248 | return None 249 | 250 | @staticmethod 251 | def organize(grouped_dict): 252 | """Given data from grouped_dict function seperate the 253 | vulnerability and host information. 254 | 255 | args: 256 | grouped_dict (dict): Data from grouped_dict function. 257 | 258 | return (dict): Organized dictionary [i.e. {'Vuln Data': {...}, 'Hosts': {...}] 259 | 260 | """ 261 | static_data_keys = [ 262 | "CVE", 263 | "CVSS", 264 | "CVSS Base Score", 265 | "CVSS Temporal Score", 266 | "CVSS Temporal Vector", 267 | "CVSS Vector", 268 | "CVSS3 Base Score", 269 | "CVSS3 Temporal Score", 270 | "CVSS3 Temporal Vector", 271 | "CVSS3 Vector", 272 | "Description", 273 | "Name", 274 | "Plugin Family", 275 | "Plugin ID", 276 | "Risk", 277 | "Solution", 278 | "See Also", 279 | "Synopsis", 280 | ] 281 | 282 | organized_data = {} 283 | 284 | # Loop through grouping 285 | for group_name, group_value in grouped_dict.items(): 286 | new_grouped_dict = {} 287 | static_data = {} 288 | host_data = {} 289 | 290 | first = True 291 | # Loop through items 292 | for key, item in group_value.items(): 293 | temp_host_items = {} 294 | 295 | # Loop through value pairs 296 | for title, value in item.items(): 297 | 298 | # Extract vulnerability information 299 | if first and title in static_data_keys: 300 | static_data[title] = value 301 | 302 | # Extract host information 303 | elif title not in static_data_keys: 304 | temp_host_items[title] = value 305 | 306 | if first: 307 | new_grouped_dict["Vuln Data"] = static_data 308 | first = False 309 | host_data[key] = temp_host_items 310 | 311 | new_grouped_dict["Hosts"] = host_data 312 | organized_data[group_name] = new_grouped_dict 313 | 314 | return organized_data 315 | 316 | 317 | def build_jira_description(ticket): 318 | """Given a ticket after running TenableToolsCSV.organize this will build a formatted jira description. 319 | 320 | args: 321 | ticket (dict): This must br from TenableToolsCSV.organize 322 | 323 | return (str): Description string. 324 | """ 325 | jira_description = "h3. (!) *PLEASE BE SURE TO PROVIDE EVIDENCE!* *Screenshots, a config output, etc.* (!)\n" 326 | jira_description += "----\n" 327 | jira_description += "h2. *{}* ".format(ticket["Vuln Data"]["Name"].strip()) 328 | jira_description += "*({})*\n".format(ticket["Vuln Data"]["Plugin ID"].strip()) 329 | jira_description += "{panel:title=Overview}\n" 330 | jira_description += "h3. +Synopsis+\n" 331 | jira_description += ticket["Vuln Data"]["Synopsis"].strip() + "\n" 332 | jira_description += "h3. +Description+\n" 333 | jira_description += ticket["Vuln Data"]["Description"].strip() + "\n" 334 | jira_description += "h3. +Solution+\n" 335 | jira_description += ticket["Vuln Data"]["Solution"].strip() + "\n" 336 | jira_description += "h3. +See Also+\n" 337 | jira_description += ticket["Vuln Data"]["See Also"].strip() + "\n" 338 | jira_description += "{panel}\n" 339 | jira_description += "{panel:title=Supplement Information}\n" 340 | 341 | # Loop through static data 342 | for key, value in ticket["Vuln Data"].items(): 343 | if key not in ["Name", "Synopsis", "Description", "Solution", "See Also"]: 344 | jira_description += "* *{}:* {}\n".format(key, value) 345 | 346 | # Build host section and table header 347 | jira_description += "{panel}\n" 348 | jira_description += "{panel:title=Hosts}\n" 349 | jira_description += "||IP Address||Port||Host||OS||MAC Address||Scan Start||Scan End||Plugin Output||\n" 350 | 351 | # Loop through hosts 352 | for host in ticket["Hosts"].values(): 353 | jira_description += "|" 354 | jira_description += "{} |".format(host["IP Address"]) 355 | jira_description += "{} |".format(host["Port"]) 356 | jira_description += "{} |".format(host["Host"]) 357 | jira_description += "{} |".format(host["OS"]) 358 | jira_description += "{} |".format(host["MAC Address"]) 359 | jira_description += "{} |".format(zulu_to_mdy(host["Host Start"])) 360 | jira_description += "{} |".format(zulu_to_mdy(host["Host End"])) 361 | code_str = "{code}" 362 | jira_description += "{}{}{} |".format(code_str, host["Plugin Output"], code_str) 363 | jira_description += "\n" 364 | 365 | jira_description += "{panel}\n" 366 | jira_description += "----\n" 367 | jira_description += "h3. (!) *PLEASE BE SURE TO PROVIDE EVIDENCE!* *Screenshots, a config output, etc.* (!)\n" 368 | 369 | return jira_description 370 | 371 | 372 | def zulu_to_mdy(zulu_date): 373 | """Converts Tenables time format (Zulu) to MDY (Month, Day, Year) 374 | 375 | args: 376 | zulu_date (str): Tenable given time format 377 | 378 | return (str): MDY (Month, Day, Year) 379 | """ 380 | return datetime.strptime(zulu_date[:19], "%Y-%m-%dT%H:%M:%S").strftime( 381 | "%m-%d-%Y %I:%M %p" 382 | ) 383 | -------------------------------------------------------------------------------- /avmp/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RackReaver/AVMP/7b28116f5623212243b59b1b7d6685854316e527/avmp/utils/__init__.py -------------------------------------------------------------------------------- /avmp/utils/ip_tools.py: -------------------------------------------------------------------------------- 1 | """Functions to manipulate network IPs. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | from typing import List 7 | from netaddr import IPNetwork 8 | 9 | 10 | def get_all_network_ips(network: str) -> List[str]: 11 | """Given a network (0.0.0.0/24) return all possible ip addresses 12 | 13 | arg: 14 | network (str): IP network to extract IP's from 15 | 16 | return (list): IP's in given network 17 | """ 18 | return [str(ip) for ip in IPNetwork(network)] 19 | 20 | 21 | def get_all_networks_ips(networks: List[str]) -> List[List[str]]: 22 | """Given a list of networks return all possible ip addresses. 23 | 24 | arg: 25 | networks (list): List of IP networks to extract IP's from 26 | 27 | return (list): [ ['IP', 'Network'], ['IP2', 'Network'] ] 28 | """ 29 | final_list = [] 30 | for network in networks: 31 | ips = get_all_network_ips(network) 32 | final = [[ip, network] for ip in ips] 33 | final_list = final_list + final 34 | return final_list 35 | -------------------------------------------------------------------------------- /avmp/utils/ip_tools_test.py: -------------------------------------------------------------------------------- 1 | from avmp.utils.ip_tools import get_all_network_ips, get_all_networks_ips 2 | 3 | 4 | def test_get_all_network_ips(): 5 | assert type(get_all_network_ips("192.168.1.1/24")) == list 6 | assert get_all_network_ips("192.168.1.1/28") == [ 7 | "192.168.1.0", 8 | "192.168.1.1", 9 | "192.168.1.2", 10 | "192.168.1.3", 11 | "192.168.1.4", 12 | "192.168.1.5", 13 | "192.168.1.6", 14 | "192.168.1.7", 15 | "192.168.1.8", 16 | "192.168.1.9", 17 | "192.168.1.10", 18 | "192.168.1.11", 19 | "192.168.1.12", 20 | "192.168.1.13", 21 | "192.168.1.14", 22 | "192.168.1.15", 23 | ] 24 | assert get_all_network_ips("10.0.0.1/29") == [ 25 | "10.0.0.0", 26 | "10.0.0.1", 27 | "10.0.0.2", 28 | "10.0.0.3", 29 | "10.0.0.4", 30 | "10.0.0.5", 31 | "10.0.0.6", 32 | "10.0.0.7", 33 | ] 34 | 35 | 36 | def test_get_all_networks_ips(): 37 | assert type(get_all_networks_ips(["10.0.0.1/30"])) == list 38 | assert get_all_networks_ips(["10.0.0.1/30", "172.16.0.1/30"]) == [ 39 | ["10.0.0.0", "10.0.0.1/30"], 40 | ["10.0.0.1", "10.0.0.1/30"], 41 | ["10.0.0.2", "10.0.0.1/30"], 42 | ["10.0.0.3", "10.0.0.1/30"], 43 | ["172.16.0.0", "172.16.0.1/30"], 44 | ["172.16.0.1", "172.16.0.1/30"], 45 | ["172.16.0.2", "172.16.0.1/30"], 46 | ["172.16.0.3", "172.16.0.1/30"], 47 | ] 48 | -------------------------------------------------------------------------------- /avmp/utils/logging_utils.py: -------------------------------------------------------------------------------- 1 | """Functions for setting up logging accross this project. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | 7 | import json 8 | import logging 9 | import os 10 | import sys 11 | 12 | 13 | def logging_setup(filename: str, stdout: bool = False) -> None: 14 | """Standard logging setup. 15 | 16 | args: 17 | filename (str): Script filename. 18 | 19 | kwargs: 20 | stdout (bool): Switch logging to stdout for testing 21 | 22 | return: None 23 | """ 24 | isinstance(filename, str) 25 | 26 | if os.path.isdir("logs") == False: 27 | os.mkdir("logs") 28 | logging.info("Creating logs folder.") 29 | 30 | # Logging configuration 31 | if stdout == False: 32 | fmtstr = "%(asctime)s:%(levelname)s:%(module)s:%(message)s" 33 | logging.basicConfig( 34 | filename="logs/{}.log".format(filename[:-3]), 35 | level=logging.DEBUG, 36 | filemode="a", 37 | format=fmtstr, 38 | ) 39 | else: 40 | fmtstr = "[%(levelname)s]\t%(message)s" 41 | logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=fmtstr) 42 | -------------------------------------------------------------------------------- /avmp/utils/vuln_db.py: -------------------------------------------------------------------------------- 1 | """Class functions for managing a sqlite database for Tenable vulnerabilities. 2 | """ 3 | __copyright__ = "Copyright (C) 2020-2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | import datetime 7 | import logging 8 | import os 9 | import sqlite3 10 | 11 | 12 | class TenableSqliteVulnDB: 13 | def __init__( 14 | self, filepath, ignore_statuses=["Done", "Closed", "Platform Complete"] 15 | ): 16 | isinstance(filepath, str) 17 | isinstance(ignore_statuses, list) 18 | 19 | self.filepath = filepath 20 | self.db_name = os.path.basename(filepath) 21 | 22 | if not os.path.exists(filepath): 23 | logging.info("DB Not Found...") 24 | self._build_db() 25 | 26 | self.con = sqlite3.connect(filepath) 27 | self.ignore_statuses = ignore_statuses 28 | 29 | def _build_db(self): 30 | logging.info("Building DB now...") 31 | 32 | con = sqlite3.connect(self.db_name) 33 | 34 | con.execute( 35 | """ 36 | CREATE TABLE "tickets" ( 37 | "ticket_id" TEXT NOT NULL, 38 | "plugin_id" TEXT NOT NULL, 39 | "status" INTEGER, 40 | "created_date" TEXT NOT NULL, 41 | "modified_date" TEXT NOT NULL, 42 | PRIMARY KEY("ticket_id")) 43 | """ 44 | ) 45 | con.execute( 46 | """ 47 | CREATE TABLE "hosts" ( 48 | "host" TEXT NOT NULL, 49 | PRIMARY KEY("host")) 50 | """ 51 | ) 52 | con.execute( 53 | """ 54 | CREATE TABLE "hosts_tickets" ( 55 | "id" INTEGER NOT NULL UNIQUE, 56 | "host" INTEGER NOT NULL, 57 | "ticket_id" INTEGER NOT NULL, 58 | FOREIGN KEY("host") REFERENCES "hosts"("host"), 59 | FOREIGN KEY("ticket_id") REFERENCES "hosts"("ticket_id"), 60 | PRIMARY KEY("id" AUTOINCREMENT)) 61 | """ 62 | ) 63 | con.close() 64 | 65 | def add_ticket( 66 | self, 67 | ticket_number, 68 | plugin_id, 69 | status, 70 | hosts, 71 | date=datetime.datetime.now().strftime("%Y-%m-%d"), 72 | ): 73 | """Add a created ticket to database. 74 | 75 | args: 76 | ticket_number (str): Identification value for ticketing system. 77 | plugin_id (str): Plugin ID provided by Tenable. 78 | status (str): Status of new ticket in ticketing system. 79 | hosts (list): List of ip's associated with ticket. 80 | kwargs: 81 | date (str): [default: current date YYYY-MM-DD] Date ticket was created. 82 | 83 | Return (bool): Confirmation of completion. 84 | """ 85 | isinstance(ticket_number, str) 86 | isinstance(plugin_id, str) 87 | isinstance(status, str) 88 | isinstance(hosts, list) 89 | isinstance(date, str) 90 | 91 | try: 92 | assert ( 93 | self.check_by_ticket_number(ticket_number) == False 94 | ), "Ticket already exists in DB" 95 | except AssertionError: 96 | logging.exception( 97 | '"{}" is already in {}'.format(ticket_number, self.db_name) 98 | ) 99 | return False 100 | 101 | sql = "INSERT INTO tickets (ticket_id, plugin_id, status, created_date, modified_date) " 102 | sql += 'values ("{}","{}","{}","{}","{}")'.format( 103 | ticket_number, plugin_id, status, date, date 104 | ) 105 | with self.con: 106 | self.con.execute(sql) 107 | 108 | for ip in hosts: 109 | if not self.check_by_host(ip): 110 | logging.info(f"Adding {ip} to db") 111 | sql = "INSERT INTO hosts (host)" 112 | sql += 'values ("{}")'.format(ip) 113 | 114 | with self.con: 115 | self.con.execute(sql) 116 | 117 | sql = "INSERT INTO hosts_tickets (host, ticket_id)" 118 | sql += 'values ("{}","{}")'.format(ip, ticket_number) 119 | 120 | with self.con: 121 | self.con.execute(sql) 122 | 123 | return True 124 | 125 | def check_by_host(self, host): 126 | """Given a host check if it exists in the database. 127 | 128 | args: 129 | host (str): IP/Hostname to check database for 130 | 131 | return (bool): Confirmation of existance. 132 | """ 133 | assert isinstance(host, str) 134 | sql = f'SELECT * FROM hosts WHERE host="{host}"' 135 | with self.con: 136 | data = self.con.execute(sql).fetchone() 137 | if data == None: 138 | logging.info(f'"{host}" does not exist.') 139 | return False 140 | else: 141 | logging.info(f'"{host}" exists.') 142 | return True 143 | 144 | def check_by_ticket_number(self, ticket_number): 145 | """Given a ticket number check if it exists in the database. 146 | 147 | args: 148 | ticket_number (str): Identification value for ticketing system to check. 149 | 150 | return (bool): Confirmation of existance. 151 | """ 152 | assert isinstance(ticket_number, str) 153 | sql = f'SELECT * FROM tickets WHERE ticket_id="{ticket_number}"' 154 | with self.con: 155 | data = self.con.execute(sql).fetchone() 156 | if data == None: 157 | logging.info(f'"{ticket_number}" does not exist.') 158 | return False 159 | else: 160 | logging.info(f'"{ticket_number}" exists.') 161 | return True 162 | 163 | def update_status_by_ticket_number( 164 | self, ticket_number, status, date=datetime.datetime.now().strftime("%Y-%m-%d") 165 | ): 166 | """Update status on database row of given ticket_number. 167 | 168 | args: 169 | ticket_number (str): Identification value for ticketing system to update. 170 | status (str): New status to be added to database row. 171 | kwargs: 172 | date (str): [default: current date YYYY-MM-DD] Date ticket was created. 173 | 174 | Return (bool): Confirmation of update. 175 | """ 176 | isinstance(ticket_number, str) 177 | isinstance(status, str) 178 | isinstance(date, str) 179 | 180 | sql = f'UPDATE tickets SET status="{status}", modified_date="{date}" WHERE ticket_id="{ticket_number}"' 181 | 182 | try: 183 | with self.con: 184 | data = self.con.execute(sql) 185 | logging.info(f'Successfully updated status for "{ticket_number}"') 186 | return True 187 | except: 188 | logging.info( 189 | f'"{ticket_number}" is either already in a completed state or does not exist in "{self.db_name}"' 190 | ) 191 | return False 192 | 193 | def check_by_plugin_id(self, plugin_id): 194 | """Given a plugin id check if it exists in the database 195 | 196 | args: 197 | plugin_id (str): Plugin id for a given vulnerability. 198 | 199 | return (bool): Confirmation of exisitance. 200 | """ 201 | assert isinstance(plugin_id, str) 202 | sql = f'SELECT * FROM tickets WHERE plugin_id="{plugin_id}"' 203 | with self.con: 204 | data = self.con.execute(sql).fetchall() 205 | if data == None: 206 | logging.info(f'"{plugin_id}" does not exist.') 207 | return False 208 | else: 209 | logging.info(f'"{plugin_id}" exists.') 210 | return True 211 | 212 | def get_by_plugin_id(self, plugin_id): 213 | """Given a plugin id return ticket_id and status 214 | 215 | args: 216 | plugin_id (str): Plugin id for a given vulnerability. 217 | 218 | return (list): List of each ticket_id and status 219 | """ 220 | assert isinstance(plugin_id, str) 221 | sql = f'SELECT * FROM tickets WHERE plugin_id="{plugin_id}"' 222 | with self.con: 223 | data = self.con.execute(sql).fetchall() 224 | if data == None: 225 | logging.info((f'No data found for "{plugin_id}"')) 226 | return None 227 | else: 228 | logging.info(f'""') 229 | 230 | # TODO: This is still incomplete and is a work in progress 231 | 232 | def get_all_tickets_by_plugin_id(self, plugin_id): 233 | """Given a plugin id get all tickets with working statuses. 234 | i.e. Ticket status can not be in self.ignore_statuses 235 | 236 | args: 237 | plugin_id (str): Plugin if for a given vulnerability 238 | 239 | return (list): List of all tickets. 240 | """ 241 | assert isinstance(plugin_id, str) 242 | ignore_statuses = ['status != "{}"'.format(x) for x in self.ignore_statuses] 243 | where = " AND ".join(ignore_statuses) 244 | sql = f'SELECT * FROM tickets WHERE plugin_id="{plugin_id}" AND {where}' 245 | 246 | with self.con: 247 | data = self.con.execute(sql).fetchall() 248 | 249 | return data 250 | 251 | def get_all_tickets(self): 252 | if len(self.ignore_statuses) != 0: 253 | ignore_statuses = ['status != "{}"'.format(x) for x in self.ignore_statuses] 254 | where = " AND ".join(ignore_statuses) 255 | sql = f"SELECT ticket_id FROM tickets WHERE {where}" 256 | else: 257 | sql = "SELECT ticket_id FROM tickets" 258 | 259 | with self.con: 260 | data = self.con.execute(sql).fetchall() 261 | 262 | return data 263 | 264 | 265 | if __name__ == "__main__": 266 | db = TenableSqliteVulnDB("tickets.db") 267 | print(db.get_all_tickets_by_plugin_id("5192")) 268 | -------------------------------------------------------------------------------- /examples/dynamic_process_configs/readme.md: -------------------------------------------------------------------------------- 1 | # Work In Progress 2 | -------------------------------------------------------------------------------- /examples/static_process_configs/readme.md: -------------------------------------------------------------------------------- 1 | # Static Process Configurations 2 | 3 | | Name | Description | 4 | | ------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------ | 5 | | [Tenable ASV Process](tenable_asv_process.json) | Ticket process to ensure gaps are avoided for future PCI audits | 6 | | [Ticket Buckets for Engineer Time Tracking](ticket_buckets_for_engineer_time_tracking.json) | Ticket buckets for tracking time associated with specific standard InfoSec functions | 7 | -------------------------------------------------------------------------------- /examples/static_process_configs/tenable_asv_process.json: -------------------------------------------------------------------------------- 1 | { 2 | "process_type": "static", 3 | "time_saved_per_ticket": "5m", 4 | "time_saved_comment": "Time saved through automation", 5 | "parent_ticket": { 6 | "project": {"key": ""}, 7 | "summary": "QX YYYY - Quarterly ASV External Scan", 8 | "description": "Root ticket for tracking the remediation efforts for the quarterly external asv scan.\n\n* Please see linked tickets for individual vulnerability information.\n* Subtasks are for ensuring the proper steps are taken each quarter.", 9 | "issuetype": {"name": "Story"}, 10 | "assignee": {"name": ""}, 11 | "labels": ["pci"] 12 | }, 13 | "sub_tasks": { 14 | "ip_verification": { 15 | "project": {"key": ""}, 16 | "summary": "Request Verification of External IP Address List from Network Manager", 17 | "description": "Open a ticket with network team to review the current external ip address list.\n\nCurrent External IP Scan List:\n{code}IP'S HERE{code}", 18 | "issuetype": {"name": "Sub-task"}, 19 | "assignee": {"name": ""}, 20 | "labels": ["pci"] 21 | }, 22 | "vulnerability_remediation": { 23 | "project": {"key": ""}, 24 | "summary": "QX YYYY - Remediation Process", 25 | "description": "See linked tickets for vulnerability information.", 26 | "issuetype": {"name": "Sub-task"}, 27 | "assignee": {"name": ""}, 28 | "labels": ["pci"] 29 | }, 30 | "false_positive_review_and_approval": { 31 | "project": {"key": ""}, 32 | "summary": "False Positive Review and Approval", 33 | "description": "Describe who is required for review and approval and how to request it.", 34 | "issuetype": {"name": "Sub-task"}, 35 | "assignee": {"name": ""}, 36 | "labels": ["pci"] 37 | }, 38 | "submit_to_asv_workbench": { 39 | "project": {"key": ""}, 40 | "summary": "Submit Scan to ASV Workbench in Tenable", 41 | "description": "", 42 | "issuetype": {"name": "Sub-task"}, 43 | "assignee": {"name": ""}, 44 | "labels": ["pci"] 45 | }, 46 | "upload_evidence": { 47 | "project": {"key": ""}, 48 | "summary": "Upload Evidence for False Positives", 49 | "description": "", 50 | "issuetype": {"name": "Sub-task"}, 51 | "assignee": {"name": ""}, 52 | "labels": ["pci"] 53 | }, 54 | "submit_for_attestation": { 55 | "project": {"key": ""}, 56 | "summary": "Submit Scan for Attestation", 57 | "description": "", 58 | "issuetype": {"name": "Sub-task"}, 59 | "assignee": {"name": ""}, 60 | "labels": ["pci"] 61 | }, 62 | "dispute_issues": { 63 | "project": {"key": ""}, 64 | "summary": "Dispute Issues with Tenable ASV Scanning Team (if needed)", 65 | "description": "", 66 | "issuetype": {"name": "Sub-task"}, 67 | "assignee": {"name": ""}, 68 | "labels": ["pci"] 69 | }, 70 | "upload_to_asv_drive_folder": { 71 | "project": {"key": ""}, 72 | "summary": "Upload Results to ASV Scan Drive Folder", 73 | "description": "Link to ASV Scan Drive Folder (special permissions required to view):\n<>", 74 | "issuetype": {"name": "Sub-task"}, 75 | "assignee": {"name": ""}, 76 | "labels": ["pci"] 77 | }, 78 | "add_scan_data_to_root_ticket": { 79 | "project": {"key": ""}, 80 | "summary": "Upload Scan Data to Root Ticket", 81 | "description": "The following should be uploaded to the root ticket:\n* Initial ASV Scan Results\n* Completed ASV Attestation", 82 | "issuetype": {"name": "Sub-task"}, 83 | "assignee": {"name": ""}, 84 | "labels": ["pci"] 85 | } 86 | } 87 | } -------------------------------------------------------------------------------- /examples/static_process_configs/ticket_buckets_for_engineer_time_tracking.json: -------------------------------------------------------------------------------- 1 | { 2 | "process_type": "static", 3 | "time_saved_per_ticket": "5m", 4 | "time_saved_comment": "Time saved through automation", 5 | "parent_ticket": { 6 | "project": {"key": ""}, 7 | "summary": "Weekly Information Security Engineer To-Do's", 8 | "description": "See sub-tasks for details.", 9 | "issuetype": {"name": "Story"}, 10 | "assignee": {"name": ""}, 11 | "priority": {"id": ""} 12 | }, 13 | "sub_tasks": { 14 | "meetings": { 15 | "project": {"key": ""}, 16 | "summary": "Business Meetings", 17 | "description": "", 18 | "issuetype": {"name": "Sub-task"}, 19 | "assignee": {"name": ""}, 20 | "labels": ["meeting"] 21 | }, 22 | "admin_tasks": { 23 | "project": {"key": ""}, 24 | "summary": "Admin Tasks", 25 | "description": "Emails, slack messages, etc.", 26 | "issuetype": {"name": "Sub-task"}, 27 | "assignee": {"name": ""} 28 | }, 29 | "threat_vulnerability_management": { 30 | "project": {"key": ""}, 31 | "summary": "Threat/Vulnerability Management", 32 | "description": "Tasks done inside of the endpoint protection platform and vulnerability management platform.", 33 | "issuetype": {"name": "Sub-task"}, 34 | "assignee": {"name": ""} 35 | }, 36 | "logs_records_management": { 37 | "project": {"key": ""}, 38 | "summary": "Logs/Records Management", 39 | "description": "Tasks done inside of SIEM.", 40 | "issuetype": {"name": "Sub-task"}, 41 | "assignee": {"name": ""} 42 | }, 43 | "incidents_events": { 44 | "project": {"key": ""}, 45 | "summary": "Incidents/Events", 46 | "description": "", 47 | "issuetype": {"name": "Sub-task"}, 48 | "assignee": {"name": ""} 49 | }, 50 | "automation_scripts": { 51 | "project": {"key": ""}, 52 | "summary": "Automation/Scripting", 53 | "description": "", 54 | "issuetype": {"name": "Sub-task"}, 55 | "assignee": {"name": ""} 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RackReaver/AVMP/7b28116f5623212243b59b1b7d6685854316e527/logo.png -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "arrow" 3 | version = "1.2.1" 4 | description = "Better dates & times for Python" 5 | category = "main" 6 | optional = false 7 | python-versions = ">=3.6" 8 | 9 | [package.dependencies] 10 | python-dateutil = ">=2.7.0" 11 | 12 | [[package]] 13 | name = "atomicwrites" 14 | version = "1.4.0" 15 | description = "Atomic file writes." 16 | category = "dev" 17 | optional = false 18 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 19 | 20 | [[package]] 21 | name = "attrs" 22 | version = "21.2.0" 23 | description = "Classes Without Boilerplate" 24 | category = "dev" 25 | optional = false 26 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 27 | 28 | [package.extras] 29 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] 30 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 31 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] 32 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] 33 | 34 | [[package]] 35 | name = "autopep8" 36 | version = "1.5.4" 37 | description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" 38 | category = "dev" 39 | optional = false 40 | python-versions = "*" 41 | 42 | [package.dependencies] 43 | pycodestyle = ">=2.6.0" 44 | toml = "*" 45 | 46 | [[package]] 47 | name = "black" 48 | version = "22.1.0" 49 | description = "The uncompromising code formatter." 50 | category = "dev" 51 | optional = false 52 | python-versions = ">=3.6.2" 53 | 54 | [package.dependencies] 55 | click = ">=8.0.0" 56 | mypy-extensions = ">=0.4.3" 57 | pathspec = ">=0.9.0" 58 | platformdirs = ">=2" 59 | tomli = ">=1.1.0" 60 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 61 | 62 | [package.extras] 63 | colorama = ["colorama (>=0.4.3)"] 64 | d = ["aiohttp (>=3.7.4)"] 65 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 66 | uvloop = ["uvloop (>=0.15.2)"] 67 | 68 | [[package]] 69 | name = "certifi" 70 | version = "2021.10.8" 71 | description = "Python package for providing Mozilla's CA Bundle." 72 | category = "main" 73 | optional = false 74 | python-versions = "*" 75 | 76 | [[package]] 77 | name = "cffi" 78 | version = "1.15.0" 79 | description = "Foreign Function Interface for Python calling C code." 80 | category = "main" 81 | optional = false 82 | python-versions = "*" 83 | 84 | [package.dependencies] 85 | pycparser = "*" 86 | 87 | [[package]] 88 | name = "chardet" 89 | version = "4.0.0" 90 | description = "Universal encoding detector for Python 2 and 3" 91 | category = "main" 92 | optional = false 93 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 94 | 95 | [[package]] 96 | name = "click" 97 | version = "8.0.4" 98 | description = "Composable command line interface toolkit" 99 | category = "dev" 100 | optional = false 101 | python-versions = ">=3.6" 102 | 103 | [package.dependencies] 104 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 105 | 106 | [[package]] 107 | name = "colorama" 108 | version = "0.4.4" 109 | description = "Cross-platform colored terminal text." 110 | category = "dev" 111 | optional = false 112 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 113 | 114 | [[package]] 115 | name = "coverage" 116 | version = "5.5" 117 | description = "Code coverage measurement for Python" 118 | category = "dev" 119 | optional = false 120 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 121 | 122 | [package.extras] 123 | toml = ["toml"] 124 | 125 | [[package]] 126 | name = "cryptography" 127 | version = "3.4.8" 128 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 129 | category = "main" 130 | optional = false 131 | python-versions = ">=3.6" 132 | 133 | [package.dependencies] 134 | cffi = ">=1.12" 135 | 136 | [package.extras] 137 | docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] 138 | docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] 139 | pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] 140 | sdist = ["setuptools-rust (>=0.11.4)"] 141 | ssh = ["bcrypt (>=3.1.5)"] 142 | test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] 143 | 144 | [[package]] 145 | name = "defusedxml" 146 | version = "0.7.1" 147 | description = "XML bomb protection for Python stdlib modules" 148 | category = "main" 149 | optional = false 150 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 151 | 152 | [[package]] 153 | name = "docopt" 154 | version = "0.6.2" 155 | description = "Pythonic argument parser, that will make you smile" 156 | category = "main" 157 | optional = false 158 | python-versions = "*" 159 | 160 | [[package]] 161 | name = "idna" 162 | version = "2.10" 163 | description = "Internationalized Domain Names in Applications (IDNA)" 164 | category = "main" 165 | optional = false 166 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 167 | 168 | [[package]] 169 | name = "iniconfig" 170 | version = "1.1.1" 171 | description = "iniconfig: brain-dead simple config-ini parsing" 172 | category = "dev" 173 | optional = false 174 | python-versions = "*" 175 | 176 | [[package]] 177 | name = "isort" 178 | version = "5.10.1" 179 | description = "A Python utility / library to sort Python imports." 180 | category = "dev" 181 | optional = false 182 | python-versions = ">=3.6.1,<4.0" 183 | 184 | [package.extras] 185 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"] 186 | requirements_deprecated_finder = ["pipreqs", "pip-api"] 187 | colors = ["colorama (>=0.4.3,<0.5.0)"] 188 | plugins = ["setuptools"] 189 | 190 | [[package]] 191 | name = "jira" 192 | version = "2.0.0" 193 | description = "Python library for interacting with JIRA via REST APIs." 194 | category = "main" 195 | optional = false 196 | python-versions = "*" 197 | 198 | [package.dependencies] 199 | defusedxml = "*" 200 | oauthlib = {version = ">=1.0.0", extras = ["signedtoken"]} 201 | pbr = ">=3.0.0" 202 | requests = ">=2.10.0" 203 | requests-oauthlib = ">=0.6.1" 204 | requests-toolbelt = "*" 205 | six = ">=1.10.0" 206 | 207 | [package.extras] 208 | async = ["requests-futures (>=0.9.7)"] 209 | cli = ["ipython (>=4.0.0,<6.0.0)", "ipython (>=4.0.0)"] 210 | opt = ["filemagic (>=1.6)", "pyjwt", "requests-jwt", "requests-kerberos"] 211 | 212 | [[package]] 213 | name = "marshmallow" 214 | version = "3.14.1" 215 | description = "A lightweight library for converting complex datatypes to and from native Python datatypes." 216 | category = "main" 217 | optional = false 218 | python-versions = ">=3.6" 219 | 220 | [package.extras] 221 | dev = ["pytest", "pytz", "simplejson", "mypy (==0.910)", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "pre-commit (>=2.4,<3.0)", "tox"] 222 | docs = ["sphinx (==4.3.0)", "sphinx-issues (==1.2.0)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.7)"] 223 | lint = ["mypy (==0.910)", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "pre-commit (>=2.4,<3.0)"] 224 | tests = ["pytest", "pytz", "simplejson"] 225 | 226 | [[package]] 227 | name = "mypy-extensions" 228 | version = "0.4.3" 229 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 230 | category = "dev" 231 | optional = false 232 | python-versions = "*" 233 | 234 | [[package]] 235 | name = "netaddr" 236 | version = "0.8.0" 237 | description = "A network address manipulation library for Python" 238 | category = "main" 239 | optional = false 240 | python-versions = "*" 241 | 242 | [[package]] 243 | name = "oauthlib" 244 | version = "3.1.1" 245 | description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" 246 | category = "main" 247 | optional = false 248 | python-versions = ">=3.6" 249 | 250 | [package.dependencies] 251 | cryptography = {version = ">=3.0.0,<4", optional = true, markers = "extra == \"signedtoken\""} 252 | pyjwt = {version = ">=2.0.0,<3", optional = true, markers = "extra == \"signedtoken\""} 253 | 254 | [package.extras] 255 | rsa = ["cryptography (>=3.0.0,<4)"] 256 | signals = ["blinker (>=1.4.0)"] 257 | signedtoken = ["cryptography (>=3.0.0,<4)", "pyjwt (>=2.0.0,<3)"] 258 | 259 | [[package]] 260 | name = "packaging" 261 | version = "21.3" 262 | description = "Core utilities for Python packages" 263 | category = "dev" 264 | optional = false 265 | python-versions = ">=3.6" 266 | 267 | [package.dependencies] 268 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 269 | 270 | [[package]] 271 | name = "pathspec" 272 | version = "0.9.0" 273 | description = "Utility library for gitignore style pattern matching of file paths." 274 | category = "dev" 275 | optional = false 276 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 277 | 278 | [[package]] 279 | name = "pbr" 280 | version = "5.8.0" 281 | description = "Python Build Reasonableness" 282 | category = "main" 283 | optional = false 284 | python-versions = ">=2.6" 285 | 286 | [[package]] 287 | name = "platformdirs" 288 | version = "2.5.1" 289 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 290 | category = "dev" 291 | optional = false 292 | python-versions = ">=3.7" 293 | 294 | [package.extras] 295 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 296 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 297 | 298 | [[package]] 299 | name = "pluggy" 300 | version = "0.13.1" 301 | description = "plugin and hook calling mechanisms for python" 302 | category = "dev" 303 | optional = false 304 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 305 | 306 | [package.extras] 307 | dev = ["pre-commit", "tox"] 308 | 309 | [[package]] 310 | name = "py" 311 | version = "1.11.0" 312 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 313 | category = "dev" 314 | optional = false 315 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 316 | 317 | [[package]] 318 | name = "pycodestyle" 319 | version = "2.8.0" 320 | description = "Python style guide checker" 321 | category = "dev" 322 | optional = false 323 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 324 | 325 | [[package]] 326 | name = "pycparser" 327 | version = "2.21" 328 | description = "C parser in Python" 329 | category = "main" 330 | optional = false 331 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 332 | 333 | [[package]] 334 | name = "pyjwt" 335 | version = "2.3.0" 336 | description = "JSON Web Token implementation in Python" 337 | category = "main" 338 | optional = false 339 | python-versions = ">=3.6" 340 | 341 | [package.extras] 342 | crypto = ["cryptography (>=3.3.1)"] 343 | dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] 344 | docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] 345 | tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] 346 | 347 | [[package]] 348 | name = "pyparsing" 349 | version = "3.0.6" 350 | description = "Python parsing module" 351 | category = "dev" 352 | optional = false 353 | python-versions = ">=3.6" 354 | 355 | [package.extras] 356 | diagrams = ["jinja2", "railroad-diagrams"] 357 | 358 | [[package]] 359 | name = "pytenable" 360 | version = "1.2.6" 361 | description = "Python library to interface into Tenable's products and applications" 362 | category = "main" 363 | optional = false 364 | python-versions = "*" 365 | 366 | [package.dependencies] 367 | marshmallow = ">=3.6" 368 | python-box = ">=4.0" 369 | python-dateutil = ">=2.6" 370 | requests = ">=2.19" 371 | restfly = ">=1.3.5" 372 | semver = ">=2.8.1" 373 | 374 | [package.extras] 375 | NessusReportv2 = ["defusedxml (>=0.5.0)"] 376 | PWCertAuth = ["requests-pkcs12 (>=1.3)"] 377 | complete = ["defusedxml (>=0.5.0)", "requests-pkcs12 (>=1.3)", "docker (>=3.7.2)"] 378 | docker = ["docker (>=3.7.2)"] 379 | 380 | [[package]] 381 | name = "pytest" 382 | version = "6.2.4" 383 | description = "pytest: simple powerful testing with Python" 384 | category = "dev" 385 | optional = false 386 | python-versions = ">=3.6" 387 | 388 | [package.dependencies] 389 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 390 | attrs = ">=19.2.0" 391 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 392 | iniconfig = "*" 393 | packaging = "*" 394 | pluggy = ">=0.12,<1.0.0a1" 395 | py = ">=1.8.2" 396 | toml = "*" 397 | 398 | [package.extras] 399 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 400 | 401 | [[package]] 402 | name = "python-box" 403 | version = "5.4.1" 404 | description = "Advanced Python dictionaries with dot notation access" 405 | category = "main" 406 | optional = false 407 | python-versions = ">=3.6" 408 | 409 | [package.extras] 410 | pyyaml = ["pyyaml"] 411 | all = ["ruamel.yaml", "toml", "msgpack"] 412 | msgpack = ["msgpack"] 413 | "ruamel.yaml" = ["ruamel.yaml"] 414 | toml = ["toml"] 415 | yaml = ["ruamel.yaml"] 416 | 417 | [[package]] 418 | name = "python-dateutil" 419 | version = "2.8.2" 420 | description = "Extensions to the standard Python datetime module" 421 | category = "main" 422 | optional = false 423 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 424 | 425 | [package.dependencies] 426 | six = ">=1.5" 427 | 428 | [[package]] 429 | name = "requests" 430 | version = "2.25.1" 431 | description = "Python HTTP for Humans." 432 | category = "main" 433 | optional = false 434 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 435 | 436 | [package.dependencies] 437 | certifi = ">=2017.4.17" 438 | chardet = ">=3.0.2,<5" 439 | idna = ">=2.5,<3" 440 | urllib3 = ">=1.21.1,<1.27" 441 | 442 | [package.extras] 443 | security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] 444 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 445 | 446 | [[package]] 447 | name = "requests-oauthlib" 448 | version = "1.3.0" 449 | description = "OAuthlib authentication support for Requests." 450 | category = "main" 451 | optional = false 452 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 453 | 454 | [package.dependencies] 455 | oauthlib = ">=3.0.0" 456 | requests = ">=2.0.0" 457 | 458 | [package.extras] 459 | rsa = ["oauthlib[signedtoken] (>=3.0.0)"] 460 | 461 | [[package]] 462 | name = "requests-toolbelt" 463 | version = "0.9.1" 464 | description = "A utility belt for advanced users of python-requests" 465 | category = "main" 466 | optional = false 467 | python-versions = "*" 468 | 469 | [package.dependencies] 470 | requests = ">=2.0.1,<3.0.0" 471 | 472 | [[package]] 473 | name = "restfly" 474 | version = "1.4.1" 475 | description = "A library to make API wrappers creation easier" 476 | category = "main" 477 | optional = false 478 | python-versions = "*" 479 | 480 | [package.dependencies] 481 | arrow = ">=1.0.2" 482 | python-box = ">=5.3.0" 483 | requests = ">=2.25.1" 484 | 485 | [[package]] 486 | name = "semver" 487 | version = "2.13.0" 488 | description = "Python helper for Semantic Versioning (http://semver.org/)" 489 | category = "main" 490 | optional = false 491 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 492 | 493 | [[package]] 494 | name = "six" 495 | version = "1.16.0" 496 | description = "Python 2 and 3 compatibility utilities" 497 | category = "main" 498 | optional = false 499 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 500 | 501 | [[package]] 502 | name = "toml" 503 | version = "0.10.2" 504 | description = "Python Library for Tom's Obvious, Minimal Language" 505 | category = "dev" 506 | optional = false 507 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 508 | 509 | [[package]] 510 | name = "tomli" 511 | version = "2.0.1" 512 | description = "A lil' TOML parser" 513 | category = "dev" 514 | optional = false 515 | python-versions = ">=3.7" 516 | 517 | [[package]] 518 | name = "typing-extensions" 519 | version = "4.1.1" 520 | description = "Backported and Experimental Type Hints for Python 3.6+" 521 | category = "dev" 522 | optional = false 523 | python-versions = ">=3.6" 524 | 525 | [[package]] 526 | name = "urllib3" 527 | version = "1.26.7" 528 | description = "HTTP library with thread-safe connection pooling, file post, and more." 529 | category = "main" 530 | optional = false 531 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 532 | 533 | [package.extras] 534 | brotli = ["brotlipy (>=0.6.0)"] 535 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 536 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 537 | 538 | [metadata] 539 | lock-version = "1.1" 540 | python-versions = "^3.9" 541 | content-hash = "a1fe4c2f6799452b582ed9f28b40ce02eac7d72ff537d1ec1d71e7f32b5871df" 542 | 543 | [metadata.files] 544 | arrow = [ 545 | {file = "arrow-1.2.1-py3-none-any.whl", hash = "sha256:6b2914ef3997d1fd7b37a71ce9dd61a6e329d09e1c7b44f4d3099ca4a5c0933e"}, 546 | {file = "arrow-1.2.1.tar.gz", hash = "sha256:c2dde3c382d9f7e6922ce636bf0b318a7a853df40ecb383b29192e6c5cc82840"}, 547 | ] 548 | atomicwrites = [ 549 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 550 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 551 | ] 552 | attrs = [ 553 | {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, 554 | {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, 555 | ] 556 | autopep8 = [ 557 | {file = "autopep8-1.5.4.tar.gz", hash = "sha256:d21d3901cb0da6ebd1e83fc9b0dfbde8b46afc2ede4fe32fbda0c7c6118ca094"}, 558 | ] 559 | black = [ 560 | {file = "black-22.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1297c63b9e1b96a3d0da2d85d11cd9bf8664251fd69ddac068b98dc4f34f73b6"}, 561 | {file = "black-22.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2ff96450d3ad9ea499fc4c60e425a1439c2120cbbc1ab959ff20f7c76ec7e866"}, 562 | {file = "black-22.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e21e1f1efa65a50e3960edd068b6ae6d64ad6235bd8bfea116a03b21836af71"}, 563 | {file = "black-22.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f69158a7d120fd641d1fa9a921d898e20d52e44a74a6fbbcc570a62a6bc8ab"}, 564 | {file = "black-22.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:228b5ae2c8e3d6227e4bde5920d2fc66cc3400fde7bcc74f480cb07ef0b570d5"}, 565 | {file = "black-22.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b1a5ed73ab4c482208d20434f700d514f66ffe2840f63a6252ecc43a9bc77e8a"}, 566 | {file = "black-22.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35944b7100af4a985abfcaa860b06af15590deb1f392f06c8683b4381e8eeaf0"}, 567 | {file = "black-22.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7835fee5238fc0a0baf6c9268fb816b5f5cd9b8793423a75e8cd663c48d073ba"}, 568 | {file = "black-22.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dae63f2dbf82882fa3b2a3c49c32bffe144970a573cd68d247af6560fc493ae1"}, 569 | {file = "black-22.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fa1db02410b1924b6749c245ab38d30621564e658297484952f3d8a39fce7e8"}, 570 | {file = "black-22.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c8226f50b8c34a14608b848dc23a46e5d08397d009446353dad45e04af0c8e28"}, 571 | {file = "black-22.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2d6f331c02f0f40aa51a22e479c8209d37fcd520c77721c034517d44eecf5912"}, 572 | {file = "black-22.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:742ce9af3086e5bd07e58c8feb09dbb2b047b7f566eb5f5bc63fd455814979f3"}, 573 | {file = "black-22.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fdb8754b453fb15fad3f72cd9cad3e16776f0964d67cf30ebcbf10327a3777a3"}, 574 | {file = "black-22.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5660feab44c2e3cb24b2419b998846cbb01c23c7fe645fee45087efa3da2d61"}, 575 | {file = "black-22.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:6f2f01381f91c1efb1451998bd65a129b3ed6f64f79663a55fe0e9b74a5f81fd"}, 576 | {file = "black-22.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:efbadd9b52c060a8fc3b9658744091cb33c31f830b3f074422ed27bad2b18e8f"}, 577 | {file = "black-22.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8871fcb4b447206904932b54b567923e5be802b9b19b744fdff092bd2f3118d0"}, 578 | {file = "black-22.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccad888050f5393f0d6029deea2a33e5ae371fd182a697313bdbd835d3edaf9c"}, 579 | {file = "black-22.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e5c049442d7ca1a2fc273c79d1aecbbf1bc858f62e8184abe1ad175c4f7cc2"}, 580 | {file = "black-22.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:373922fc66676133ddc3e754e4509196a8c392fec3f5ca4486673e685a421321"}, 581 | {file = "black-22.1.0-py3-none-any.whl", hash = "sha256:3524739d76b6b3ed1132422bf9d82123cd1705086723bc3e235ca39fd21c667d"}, 582 | {file = "black-22.1.0.tar.gz", hash = "sha256:a7c0192d35635f6fc1174be575cb7915e92e5dd629ee79fdaf0dcfa41a80afb5"}, 583 | ] 584 | certifi = [ 585 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, 586 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, 587 | ] 588 | cffi = [ 589 | {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, 590 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, 591 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, 592 | {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, 593 | {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, 594 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, 595 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, 596 | {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, 597 | {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, 598 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, 599 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, 600 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, 601 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, 602 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, 603 | {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, 604 | {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, 605 | {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, 606 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, 607 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, 608 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, 609 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, 610 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, 611 | {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, 612 | {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, 613 | {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, 614 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, 615 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, 616 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, 617 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, 618 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, 619 | {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, 620 | {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, 621 | {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, 622 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, 623 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, 624 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, 625 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, 626 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, 627 | {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, 628 | {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, 629 | {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, 630 | {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, 631 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, 632 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, 633 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, 634 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, 635 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, 636 | {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, 637 | {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, 638 | {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, 639 | ] 640 | chardet = [ 641 | {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, 642 | {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, 643 | ] 644 | click = [ 645 | {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, 646 | {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, 647 | ] 648 | colorama = [ 649 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 650 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 651 | ] 652 | coverage = [ 653 | {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, 654 | {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, 655 | {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, 656 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, 657 | {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, 658 | {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, 659 | {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, 660 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, 661 | {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, 662 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, 663 | {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, 664 | {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, 665 | {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, 666 | {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, 667 | {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, 668 | {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, 669 | {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, 670 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, 671 | {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, 672 | {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, 673 | {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, 674 | {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, 675 | {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, 676 | {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, 677 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, 678 | {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, 679 | {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, 680 | {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, 681 | {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, 682 | {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, 683 | {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, 684 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, 685 | {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, 686 | {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, 687 | {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, 688 | {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, 689 | {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, 690 | {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, 691 | {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, 692 | {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, 693 | {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, 694 | {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, 695 | {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, 696 | {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, 697 | {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, 698 | {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, 699 | {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, 700 | {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, 701 | {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, 702 | {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, 703 | {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, 704 | {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, 705 | ] 706 | cryptography = [ 707 | {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, 708 | {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, 709 | {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, 710 | {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, 711 | {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, 712 | {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, 713 | {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, 714 | {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, 715 | {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, 716 | {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, 717 | {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, 718 | {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, 719 | {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, 720 | {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, 721 | {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, 722 | {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, 723 | {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, 724 | {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, 725 | {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, 726 | ] 727 | defusedxml = [ 728 | {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, 729 | {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, 730 | ] 731 | docopt = [ 732 | {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, 733 | ] 734 | idna = [ 735 | {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, 736 | {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, 737 | ] 738 | iniconfig = [ 739 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 740 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 741 | ] 742 | isort = [ 743 | {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, 744 | {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, 745 | ] 746 | jira = [ 747 | {file = "jira-2.0.0-py2.py3-none-any.whl", hash = "sha256:9adeead4d5f5a6aff74c630787f8bd2d4b0e154f3a3036641298064e91b2d25d"}, 748 | {file = "jira-2.0.0.tar.gz", hash = "sha256:e2a94adff98e45b29ded030adc76103eab34fa7d4d57303f211f572bedba0e93"}, 749 | ] 750 | marshmallow = [ 751 | {file = "marshmallow-3.14.1-py3-none-any.whl", hash = "sha256:04438610bc6dadbdddb22a4a55bcc7f6f8099e69580b2e67f5a681933a1f4400"}, 752 | {file = "marshmallow-3.14.1.tar.gz", hash = "sha256:4c05c1684e0e97fe779c62b91878f173b937fe097b356cd82f793464f5bc6138"}, 753 | ] 754 | mypy-extensions = [ 755 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 756 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 757 | ] 758 | netaddr = [ 759 | {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, 760 | {file = "netaddr-0.8.0.tar.gz", hash = "sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243"}, 761 | ] 762 | oauthlib = [ 763 | {file = "oauthlib-3.1.1-py2.py3-none-any.whl", hash = "sha256:42bf6354c2ed8c6acb54d971fce6f88193d97297e18602a3a886603f9d7730cc"}, 764 | {file = "oauthlib-3.1.1.tar.gz", hash = "sha256:8f0215fcc533dd8dd1bee6f4c412d4f0cd7297307d43ac61666389e3bc3198a3"}, 765 | ] 766 | packaging = [ 767 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 768 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 769 | ] 770 | pathspec = [ 771 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 772 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 773 | ] 774 | pbr = [ 775 | {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, 776 | {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, 777 | ] 778 | platformdirs = [ 779 | {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, 780 | {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, 781 | ] 782 | pluggy = [ 783 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 784 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 785 | ] 786 | py = [ 787 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 788 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 789 | ] 790 | pycodestyle = [ 791 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 792 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 793 | ] 794 | pycparser = [ 795 | {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, 796 | {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, 797 | ] 798 | pyjwt = [ 799 | {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"}, 800 | {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"}, 801 | ] 802 | pyparsing = [ 803 | {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, 804 | {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, 805 | ] 806 | pytenable = [ 807 | {file = "pyTenable-1.2.6.tar.gz", hash = "sha256:fe08504f9ab368c2619c8caef86fdd339bf08fd9e30f5dae6ecd2bccb90f1ebc"}, 808 | ] 809 | pytest = [ 810 | {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, 811 | {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, 812 | ] 813 | python-box = [ 814 | {file = "python-box-5.4.1.tar.gz", hash = "sha256:b68e0f8abc86f3deda751b3390f64df64a0989459de51ba4db949662a7b4d8ac"}, 815 | {file = "python_box-5.4.1-py3-none-any.whl", hash = "sha256:60ae9156de34cf92b899bd099580950df70a5b0813e67a3310a1cdd1976457fa"}, 816 | ] 817 | python-dateutil = [ 818 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 819 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 820 | ] 821 | requests = [ 822 | {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, 823 | {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, 824 | ] 825 | requests-oauthlib = [ 826 | {file = "requests-oauthlib-1.3.0.tar.gz", hash = "sha256:b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"}, 827 | {file = "requests_oauthlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:7f71572defaecd16372f9006f33c2ec8c077c3cfa6f5911a9a90202beb513f3d"}, 828 | {file = "requests_oauthlib-1.3.0-py3.7.egg", hash = "sha256:fa6c47b933f01060936d87ae9327fead68768b69c6c9ea2109c48be30f2d4dbc"}, 829 | ] 830 | requests-toolbelt = [ 831 | {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, 832 | {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, 833 | ] 834 | restfly = [ 835 | {file = "restfly-1.4.1.tar.gz", hash = "sha256:036fa18929dce4b104ab67d35fa86629460caf10f45bc35a661adfebf5463f29"}, 836 | ] 837 | semver = [ 838 | {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, 839 | {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, 840 | ] 841 | six = [ 842 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 843 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 844 | ] 845 | toml = [ 846 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 847 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 848 | ] 849 | tomli = [ 850 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 851 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 852 | ] 853 | typing-extensions = [ 854 | {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, 855 | {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, 856 | ] 857 | urllib3 = [ 858 | {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, 859 | {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, 860 | ] 861 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "avmp" 3 | version = "0.1.1" 4 | description = "Command line vulnerability program manager." 5 | authors = ["RackReaver "] 6 | license = "Apache-2.0" 7 | 8 | readme = "README.md" # Markdown files are supported 9 | 10 | repository = "https://github.com/RackReaver/AVMP" 11 | homepage = "https://github.com/RackReaver/AVMP" 12 | 13 | keywords = ["automation", "jira", "vulnerability-detection", "vulnerability-management", "vulnerability-scanners", "vulnerability-assessment", "tenable", "vulnerability-scanning", "tenableio"] 14 | 15 | [tool.poetry.scripts] 16 | avmp = "avmp.core.cli: main" 17 | 18 | [tool.poetry.dependencies] 19 | python = "^3.9" 20 | jira = "2.0.0" 21 | pyTenable = "1.2.6" 22 | requests = "2.25.1" 23 | docopt = "^0.6.2" 24 | netaddr = "^0.8.0" 25 | 26 | [tool.poetry.dev-dependencies] 27 | autopep8 = "1.5.4" 28 | coverage = "5.5" 29 | pytest = "6.2.4" 30 | isort = "^5.10.1" 31 | black = "^22.1.0" 32 | 33 | [build-system] 34 | requires = ["poetry-core>=1.0.0"] 35 | build-backend = "poetry.core.masonry.api" 36 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | atomicwrites==1.4.0 2 | attrs==21.2.0 3 | autopep8==1.5.4 4 | certifi==2020.12.5 5 | cffi==1.14.4 6 | chardet==4.0.0 7 | click==7.1.2 8 | colorama==0.4.4 9 | coverage==5.5 10 | cryptography==3.3.1 11 | defusedxml==0.6.0 12 | gitdb==4.0.7 13 | GitPython==3.1.17 14 | idna==2.10 15 | importlib-metadata==4.5.0 16 | iniconfig==1.1.1 17 | jira==2.0.0 18 | marshmallow==3.10.0 19 | netaddr==0.8.0 20 | oauthlib==3.1.0 21 | packaging==20.9 22 | pbr==5.5.1 23 | pluggy==0.13.1 24 | py==1.10.0 25 | pycodestyle==2.6.0 26 | pycparser==2.20 27 | PyJWT==2.0.1 28 | pyparsing==2.4.7 29 | pyTenable==1.2.6 30 | pytest==6.2.4 31 | python-box==5.2.0 32 | python-dateutil==2.8.1 33 | requests==2.25.1 34 | requests-oauthlib==1.3.0 35 | requests-toolbelt==0.9.1 36 | restfly==1.3.5 37 | rope==0.18.0 38 | semver==2.13.0 39 | six==1.15.0 40 | smmap==4.0.0 41 | toml==0.10.2 42 | typing-extensions==3.10.0.0 43 | urllib3==1.26.2 44 | zipp==3.4.1 45 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setup file. 2 | """ 3 | __copyright__ = "Copyright (C) 2021 Matt Ferreira" 4 | __license__ = "Apache License" 5 | 6 | # Read contents of README.md file 7 | from os import path 8 | 9 | from setuptools import find_packages, setup 10 | 11 | this_directory = path.abspath(path.dirname(__file__)) 12 | with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: 13 | README = f.read() 14 | 15 | with open("avmp/core/VERSION") as version_file: 16 | version = version_file.read().strip() 17 | assert isinstance(version, str) 18 | 19 | install_requirements = ["pyTenable", "requests", "jira", "docopt"] 20 | 21 | setup( 22 | name="avmp", 23 | version=version, 24 | description="Command line vulnerability program manager.", 25 | long_description=README, 26 | license="Apache License", 27 | author="Matt Ferreira", 28 | author_email="rackreaver@gmail.com", 29 | download_url="https://github.com/RackReaver/AVMP", 30 | install_requires=install_requirements, 31 | packages=find_packages(), 32 | entry_points={"console_scripts": ["avmp = avmp.core.cli: main"]}, 33 | ) 34 | --------------------------------------------------------------------------------