├── .env ├── .github └── workflows │ └── build-and-push.yml ├── ACKNOWLEDGEMENT.md ├── AUTHORS.md ├── COPYRIGHT.md ├── Dockerfile ├── LICENSE.md ├── README.md ├── docker-compose.yml ├── docker-entrypoint.sh ├── docs └── assets │ ├── api_cve.png │ ├── api_redoc.png │ └── api_status.png ├── src ├── common │ ├── __init__.py │ ├── models │ │ ├── __init__.py │ │ ├── cpe.py │ │ ├── cve.py │ │ ├── cvss-v4.0.json │ │ ├── cvss_v2.py │ │ ├── cvss_v30.py │ │ ├── cvss_v31.py │ │ ├── cvss_v40.py │ │ └── models.py │ ├── search.py │ └── util.py ├── config │ ├── requirements.txt │ ├── setenv.sh │ └── setenv │ │ ├── config.ini │ │ ├── setenv_dev.ini │ │ ├── setenv_local.ini │ │ ├── setenv_prod.ini │ │ └── setenv_test.ini ├── db │ ├── __init__.py │ ├── alembic.ini │ ├── create_schema.py │ ├── database.py │ ├── schema.sh │ ├── scripts │ │ ├── env.py │ │ ├── script.py.mako │ │ └── versions │ │ │ ├── 03f01bf9a755_add_vuln_cert_table.py │ │ │ ├── 2f14a6a5afe8_add_capec_table.py │ │ │ ├── 892034da2349_custom_method.py │ │ │ ├── 9a14a98e9e6d_add_epss_table.py │ │ │ ├── c1f79cef457f_cwe_table.py │ │ │ ├── d4df74b54307_initial_setup.py │ │ │ └── ecd29e77afe3_change_vuln_description_index_type.py │ ├── setup_db.sh │ └── tables.py ├── generic │ ├── __init__.py │ ├── config.py │ ├── context.py │ ├── exc.py │ └── logging_manager.py ├── load ├── search └── web │ ├── __init__.py │ ├── app.py │ ├── dependencies.py │ ├── models │ ├── __init__.py │ └── search.py │ └── routers │ ├── __init__.py │ └── search.py ├── start_web.sh ├── tests ├── __init__.py ├── conftest.py ├── runners │ ├── __init__.py │ └── cli_runner.py └── unit │ ├── __init__.py │ ├── test_cpe_search.py │ ├── test_cve_search.py │ └── test_cwe_search.py └── vars /.env: -------------------------------------------------------------------------------- 1 | POSTGRES_PASSWORD="postgres_password" 2 | FCDB_USER="fastcve_db_user" 3 | FCDB_PASS="fastcve_db_pass" 4 | FASTCVE_DOCKER_IMG="binare/fastcve" 5 | FASTCVE_DOCKER_TAG="latest" 6 | NVD_API_KEY= -------------------------------------------------------------------------------- /.github/workflows/build-and-push.yml: -------------------------------------------------------------------------------- 1 | name: Auto Push Docker Image 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | build-and-update-docker: 9 | runs-on: ubuntu-latest 10 | steps: 11 | # Step 1: Check out the repository 12 | - name: Check out repository 13 | uses: actions/checkout@v3 14 | 15 | # Step 2: Log in to Docker Hub 16 | - name: Log in to Docker Hub 17 | uses: docker/login-action@v2 18 | with: 19 | username: ${{ secrets.DOCKER_USERNAME }} 20 | password: ${{ secrets.DOCKER_TOKEN }} 21 | 22 | # Step 3: Set up Docker Buildx 23 | - name: Set up Docker Buildx 24 | uses: docker/setup-buildx-action@v2 25 | 26 | # Step 4: Build Docker Image with release and latest tags 27 | - name: Build Docker Image 28 | run: | 29 | docker build --build-arg APP_VERSION=${{ github.ref_name }} \ 30 | -t ${{ secrets.DOCKER_USERNAME }}/fastcve:${{ github.ref_name }} \ 31 | -t ${{ secrets.DOCKER_USERNAME }}/fastcve:latest . 32 | 33 | # Step 5: Push Docker Image for both tags 34 | - name: Push Docker Image 35 | run: | 36 | docker push ${{ secrets.DOCKER_USERNAME }}/fastcve:${{ github.ref_name }} 37 | docker push ${{ secrets.DOCKER_USERNAME }}/fastcve:latest 38 | 39 | # Step 6: Check if README.md file exists 40 | - name: Check README.md file exists 41 | id: check_readme 42 | run: | 43 | if [ -f "README.md" ]; then 44 | echo "exists=true" >> $GITHUB_OUTPUT 45 | else 46 | echo "exists=false" >> $GITHUB_OUTPUT 47 | fi 48 | 49 | # Step 7: Update Docker Hub description with README.md content (if exists) 50 | - name: Update Docker Hub description 51 | if: ${{ steps.check_readme.outputs.exists == 'true' }} 52 | env: 53 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 54 | DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} 55 | REPO: fastcve 56 | run: | 57 | # Prepare the README content for JSON payload 58 | README_CONTENT=$(jq -sR '.' README.md) 59 | 60 | # Use Docker Hub API to update the README 61 | curl -s -X PATCH "https://hub.docker.com/v2/repositories/${DOCKER_USERNAME}/${REPO}/" \ 62 | -H "Content-Type: application/json" \ 63 | -H "Authorization: Bearer ${DOCKER_TOKEN}" \ 64 | -d "{\"full_description\": ${README_CONTENT}}" 65 | 66 | -------------------------------------------------------------------------------- /ACKNOWLEDGEMENT.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/ACKNOWLEDGEMENT.md -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | Primary Authors 2 | =============== 3 | 4 | * __[Vadim Bogulean](https://github.com/vadimbo)__ 5 | 6 | @vadimbo is the current maintainer of the code and has written the current code base, for version 1.0.0. 7 | 8 | Other Contributors 9 | ================== 10 | 11 | * __[Narges Yousefnezhad (narcissus1394)](https://github.com/narcissus1394)__ 12 | * __[LRVT (l4rm4nd)](https://github.com/l4rm4nd)__ 13 | * __[fabianjarrett-ncc](https://github.com/fabianjarrett-ncc)__ 14 | * __[Fuziih](https://github.com/Fuziih)__ -------------------------------------------------------------------------------- /COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | Binare Oy (binare.io) © IoT Firmware Cybersecurity, 2020. 2 | 3 | FastCVE software is licensed under the BSD 3-Clause [License](./LICENSE.md) 4 | 5 | Follows a list of the Open Source Softwares and their respective applicable Open 6 | Source licenses and copyright notices. 7 | 8 | Observation: Information may be subject to alteration. 9 | 10 | 1. Docker PostgreSQL 11 | ------------------------- 12 | - Docker PostgreSQL released under the MIT license. 13 | - https://github.com/docker-library/postgres 14 | 15 | 2. Alembic 16 | -------------------------- 17 | - Alembic released under the MIT license 18 | - https://github.com/sqlalchemy/alembic 19 | 20 | 3. SQLAlchemy 21 | -------------------------- 22 | - SQLAlchemy released under the MIT license 23 | - https://github.com/sqlalchemy/sqlalchemy 24 | 25 | 4. psycopg2 26 | -------------------------- 27 | - psycopg2 released under the GNU Lesser General Public License 28 | - https://github.com/psycopg/psycopg2 29 | 30 | 5. requests 31 | -------------------------- 32 | - requests released under the Appache 2.0 License 33 | - https://github.com/psf/requests 34 | 35 | 6. tqdm 36 | ------------------------ 37 | - tqdm released under the MIT License 38 | - https://github.com/tqdm/tqdm 39 | 40 | 7. pytz 41 | ------------------------ 42 | - pytz released under the MIT License 43 | - https://github.com/stub42/pytz 44 | 45 | 8. pydantic 46 | ------------------------ 47 | - pydantic released under the MIT License 48 | - https://github.com/pydantic/pydantic 49 | 50 | 9. FastAPI 51 | ------------------------ 52 | - FastAPI released under the MIT License 53 | - https://github.com/tiangolo/fastapi 54 | 55 | 10. uvicorn 56 | ------------------------ 57 | - Uvicorn released under the BSD 3-Clause "New" or "Revised" License 58 | - Copyright © 2017-present, Encode OSS Ltd. All rights reserved. 59 | - https://github.com/encode/uvicorn 60 | 61 | 11. xmltodict 62 | ------------------------ 63 | - xmltodict released under the MIT License 64 | - https://github.com/martinblech/xmltodict 65 | 66 | 12. pydantic-argparse 67 | ------------------------ 68 | - pydantic-argparse released under the MIT License 69 | - https://github.com/SupImDos/pydantic-argparse 70 | 71 | 13. argcomplete 72 | ------------------------ 73 | - argcomplete released under the Appache 2.0 License 74 | - https://github.com/kislyuk/argcomplete 75 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:16-alpine3.19 2 | 3 | ARG APP_VERSION=notset 4 | 5 | ENV FCDB_HOME=/fastcve FCDB_NAME=vuln_db POSTGRES_PASSWORD= FCDB_USER= FCDB_PASS= INP_ENV_NAME=dev 6 | ENV PATH=${FCDB_HOME}:$PATH 7 | 8 | RUN apk add gcc g++ build-base python3-dev py3-pip 9 | 10 | WORKDIR ${FCDB_HOME} 11 | 12 | COPY ./src/config/requirements.txt /tmp 13 | 14 | RUN pip install --break-system-packages -r /tmp/requirements.txt 15 | 16 | COPY ./docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh 17 | COPY ./start_web.sh /always-init.d/start_web.sh 18 | COPY ./src ${FCDB_HOME} 19 | 20 | RUN sed -i "1i\export APP_VERSION=${APP_VERSION}" ${FCDB_HOME}/config/setenv/setenv_* 21 | 22 | RUN mkdir -p ${FCDB_HOME}/logs && chmod +wx ${FCDB_HOME}/logs \ 23 | && chmod +x ${FCDB_HOME}/db/setup_db.sh \ 24 | && chmod +x ${FCDB_HOME}/db/schema.sh \ 25 | && chmod -x ${FCDB_HOME}/config/setenv.sh \ 26 | && ln -s ${FCDB_HOME}/db/setup_db.sh /docker-entrypoint-initdb.d \ 27 | && ln -s ${FCDB_HOME}/config/setenv.sh /docker-entrypoint-initdb.d \ 28 | && chown -R postgres:postgres ${FCDB_HOME} 29 | 30 | USER postgres 31 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 2 | 3 | FastCVE - fast, rich and API-based search for CVE and more (CPE, CWE, CAPEC) 4 | ================== 5 | 6 | `fastcve` is a command-line tool that allows you to search for vulnerabilities in the Common Vulnerabilities and Exposures (CVE) database. The tool provides an easy way to search for vulnerabilities and retrieve relevant information about them, including their descriptions, CVSS scores, and references to related security advisories. 7 | 8 | `fastcve` is designed to be fast, lightweight, and easy to use. It provides a simple interface for querying the CVE database, allowing you to search for vulnerabilities based on vendors, products, and other criteria. The tool can be used by security professionals, system administrators, and developers to stay informed about known security vulnerabilities and to assess the risk of their systems and applications. 9 | 10 | Overall, `fastcve` is a useful tool for anyone who is interested in keeping up-to-date with the latest information about security vulnerabilities and how they can be addressed. 11 | 12 | Technical details 13 | ----------------- 14 | 15 | 16 | Some key technical characteristics of `fastcve` tool: 17 | 18 | 1. **Docker containerization**: This tool would run as a Docker container, meaning it is packaged and runs as a standalone executable unit that includes all its dependencies. This makes it easy to deploy and run the tool on any system that supports Docker. 19 | 20 | 2. **Automatically creates the DB on first start**: The tool would automatically create the database when it is started for the first time, eliminating the need for manual setup. 21 | 22 | 3. **Automatically creates and upgrades the DB schema if needed**: The tool would automatically manage the database schema, creating it when necessary and upgrading it as needed to support new features or changes in the data structure. 23 | 24 | 3. **Capability to populate the DB using NVD APIs (2.0)**: The tool would have the capability to populate the database using APIs from the National Vulnerability Database (NVD), a comprehensive database of vulnerabilities that is maintained by the US National Institute of Standards and Technology. 25 | 26 | 4. **Automatically updates the DB schema for the latest changes from the previous update**: The tool would automatically update the database with the latest changes, new vulnerabilities, and new Common Platform Enumeration (CPE) entries, ensuring that the database remains up-to-date and relevant. 27 | 28 | The tool is a comprehensive solution for managing a database of vulnerabilities, Common Platform Enumeration (CPE) entries, designed to make it easy to keep the database up-to-date and relevant, and to simplify the process of querying the database for vulnerabilities and information about them. 29 | 30 | It was designed to be able to handle multiple query entries and maintain a high level of performance. It is optimized to be able to handle a large number of queries efficiently and quickly. The goal is to provide a fast and reliable solution for searching, maintaining and updating information about vulnerabilities and their corresponding CPE entries. The tool is also able to provide information about CWE (Common Weakness Enumerations) and CAPEC (Common Attack Pattern Enumerations and Classification) 31 | 32 | 33 | Build 34 | ---------- 35 | 36 | 37 | In order to build the docker image you need to trigger: 38 | ``` 39 | docker compose build 40 | ``` 41 | 42 | this would create a new image named as `fastcve:latest`. In case a special tag is needed then export before hand ENV var `DOCKER_TAG=` to generate `fastcve:` 43 | 44 | 45 | First Run 46 | --------- 47 | 48 | Before starting the container for the first time several env variables has to be exported first: 49 | ``` 50 | export INP_ENV_NAME= 51 | export POSTGRES_PASSWORD= 52 | export FCDB_USER= 53 | export FCDB_PASS= 54 | ``` 55 | 56 | To run the container: 57 | ``` 58 | docker compose up 59 | ``` 60 | 61 | Configuration parameters 62 | ------------------------ 63 | 64 | The docker container holds the relevant configuration files under `/fastcve/config/setenv` 65 | This map would contain the following files: 66 | - `config.ini` - contains app related config parameters 67 | - `setenv_<${INP_ENV_NAME}>.ini` - contains parameters that are env dependent as per the `INP_ENV_NAME` env variable value. 68 | 69 | For an easy access and modification of the config files, mapping between HOST and container's folder `/fastcve/config/setenv` is recomended that can be specified in the `docker-compose.yml` file. 70 | 71 | How To 72 | ------ 73 | 74 | 75 | - **Populate the CVE, CPE, CWE, and CAPEC data for the first time**: 76 | ``` 77 | docker exec fastcve load --data cve cpe cwe capec 78 | ``` 79 | 80 | - **Populate the CVE, and CPE data for the next times**: 81 | ``` 82 | docker exec fastcve load --data cve cpe 83 | ``` 84 | 85 | this will fetch the new/modified data for the period from the last data fetch (for a max of `n` days: parameter set in the config. NVD is allowing max 120 days period) 86 | 87 | If there is a need to repopulate the DB for the CWE/CAPEC info, then `--full` and `--drop` options are available for the CWE/CAPEC info load command. `--full` will cause to ignore the fact the CWE/CAPEC data is already present and `--drop` will cause to drop any exiting CWE/CAPEC related data before processing the new downloaded data. When using `--data epss` in combination with `--epss-now`, the load command explicitly fetches the EPSS data for the current date. If `--epss-now` is not specified, the script defaults to retrieve EPSS data from the previous day. 88 | 89 | - search for the data: **get the CVEs details (JSON) for a list of CVE-IDs** 90 | ``` 91 | docker exec fastcve search --search-info cve --cve CVE-YEAR-ID1 CVE-YEAR-ID2 92 | ``` 93 | 94 | - search for the data: **search CVEs by the summary text** 95 | ``` 96 | docker exec fastcve search --search-info cve --keyword '(buffer overflow|memory corruption).*before.*2\.1[0-9]' 97 | ``` 98 | Above will search in the CVE text and return the details of those CVEs that would match the summary/description text with the specified keyword. It is possible to specify more than one keywords and each keyword can be specified in form of regexp pattern. If multiple keywords are specified, it would consider as AND condition between the keywords. 99 | 100 | - search for the data: **get the CVEs IDs for a specific CPE** 101 | ``` 102 | docker exec fastcve search --search-info cve --cpe23 cpe:2.3:*:*:linux_kernel:2.6.32: --output id 103 | ``` 104 | 105 | above will return the list of CVE-IDs that are related to `linux_kernel` product for version 2.6.32. 106 | 107 | To get the CVE details, request the output in JSON format: `--output json`. 108 | 109 | To get only those CVEs that were modified in the last `n` days, add the option `--days-back n` i.e. `--days-back 10` - only created/modified in the last **10** days 110 | 111 | additional filters are available for the search in CVE DB: 112 | ``` 113 | --cvss-severity-v2 {low, medium, high} # retrieve only those CVEs that has the severity as per CVSS score Version 2 114 | --cvss-severity-v3 {low, medium, high, critical} # retrieve only those CVEs that has the severity as per CVSS score Version 3.x 115 | --cvss-metrics-v2 CvssVector # CVSS V2.0 vector string to search for (default: None) 116 | --cvss-metrics-v3 CvssVector # CVSS V3.x vector string to search for (default: None) 117 | --cwe CWE [CWE ...] # retrieve only those CVEs that are related to the specified list of CWE IDs 118 | --pub-start-date # retrieve only those CVEs that are published after the start date 119 | --pub-end-date # retrieve only those CVEs that are published before the end date 120 | --last-mod-start-date # retrieve only those CVEs that are last modified after the start date 121 | --last-mod-end-date # retrieve only those CVEs that are last modified before the end date 122 | ``` 123 | - search for the data: **get the valid list of CPE names for a query on part/vendor/product/version etc**. 124 | 125 | ``` 126 | docker exec fastcve search --search-info cpe --cpe23 cpe:2.3:h:*:dl*:*: --output id 127 | ``` 128 | 129 | Above will search for all valid existing CPE 2.3 names that are of hardware type, for any vendor, product starts with `dl`*, and version is any 130 | 131 | To see for the other options available for both `load` and `search` commands run these with `-h` option 132 | 133 | ``` 134 | docker exec fastcve search -h 135 | docker exec fastcve load -h 136 | ``` 137 | 138 | The same search capabilities are exposed through the web interface as well. The web interface is exposed through port 8000 by default. Can be changed in the `docker-compose.yml` file. 139 | 140 | The following endpoints are exposed through HTTP requests 141 | ``` 142 | /api/search/cve - search for CVE data 143 | /api/search/cpe - search for CPE data 144 | /api/search/cwe - search for CWE data 145 | /api/search/capec - search for CAPEC data 146 | ``` 147 | 148 | For the api definitions and execution instructions you can access on 149 | ``` 150 | http://localhost:8000/docs 151 | ``` 152 | 153 | For the API documentation you can access on 154 | ``` 155 | http://localhost:8000/redoc 156 | ``` 157 | 158 | Screenshots 159 | ============================ 160 | Example status API 161 | 162 | ![Screenshot example1](./docs/assets/api_status.png "Example 1") 163 | 164 | Example search CVE API 165 | 166 | ![Screenshot example2](./docs/assets/api_cve.png "Example 2") 167 | 168 | API documentation 169 | 170 | ![ScreenShot example3](./docs/assets/api_redoc.png "Example 3") 171 | 172 | License 173 | ============================ 174 | This software is released under the BSD 3-Clause License 175 | 176 | See the [LICENSE](./LICENSE.md) file 177 | 178 | Authors 179 | ============================ 180 | See the [AUTHORS](./AUTHORS.md) file 181 | 182 | Copyright 183 | ============================ 184 | Binare Oy (binare.io) © IoT Firmware Cybersecurity, 2020. 185 | 186 | See the [COPYRIGHT](./COPYRIGHT.md) file 187 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | #-------------------------------------------------------------------------- 3 | # Application Database service 4 | #-------------------------------------------------------------------------- 5 | fastcve: 6 | image: "${FASTCVE_DOCKER_IMG}:${FASTCVE_DOCKER_TAG}" 7 | 8 | container_name: fastcve 9 | 10 | command: postgres ${ENV_POSTGRES_CONF} 11 | 12 | build: 13 | context: $PWD 14 | dockerfile: $PWD/Dockerfile 15 | 16 | args: 17 | APP_VERSION: ${FASTCVE_DOCKER_TAG:-notset} 18 | 19 | environment: 20 | - INP_ENV_NAME=${INP_ENV_NAME} 21 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} 22 | - FCDB_USER=${FCDB_USER} 23 | - FCDB_PASS=${FCDB_PASS} 24 | - FCDB_WEB_PARAMS=--host 0.0.0.0 --port 8000 --workers 4 25 | - NVD_API_KEY=${NVD_API_KEY} 26 | 27 | volumes: 28 | - vol_fastcve_db:/var/lib/postgresql/data 29 | #- ./src/config/setenv/:/fastcve/config/setenv/ 30 | - ./:/tmp 31 | 32 | ports: 33 | - "6630:5432" 34 | - "8000:8000" 35 | 36 | networks: 37 | - backend 38 | 39 | #------------------------------------------------------------------------------ 40 | # Definition of networks used 41 | #------------------------------------------------------------------------------ 42 | networks: 43 | backend: 44 | 45 | #------------------------------------------------------------------------------ 46 | # Definition of docker volumes 47 | #------------------------------------------------------------------------------ 48 | volumes: 49 | vol_fastcve_db: # DB data for the vuln DB 50 | -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -Eeo pipefail 3 | # TODO swap to -Eeuo pipefail above (after handling all potentially-unset variables) 4 | 5 | # usage: file_env VAR [DEFAULT] 6 | # ie: file_env 'XYZ_DB_PASSWORD' 'example' 7 | # (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of 8 | # "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature) 9 | file_env() { 10 | local var="$1" 11 | local fileVar="${var}_FILE" 12 | local def="${2:-}" 13 | if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then 14 | echo >&2 "error: both $var and $fileVar are set (but are exclusive)" 15 | exit 1 16 | fi 17 | local val="$def" 18 | if [ "${!var:-}" ]; then 19 | val="${!var}" 20 | elif [ "${!fileVar:-}" ]; then 21 | val="$(< "${!fileVar}")" 22 | fi 23 | export "$var"="$val" 24 | unset "$fileVar" 25 | } 26 | 27 | # check to see if this file is being run or sourced from another script 28 | _is_sourced() { 29 | # https://unix.stackexchange.com/a/215279 30 | [ "${#FUNCNAME[@]}" -ge 2 ] \ 31 | && [ "${FUNCNAME[0]}" = '_is_sourced' ] \ 32 | && [ "${FUNCNAME[1]}" = 'source' ] 33 | } 34 | 35 | # used to create initial postgres directories and if run as root, ensure ownership to the "postgres" user 36 | docker_create_db_directories() { 37 | local user; user="$(id -u)" 38 | 39 | mkdir -p "$PGDATA" 40 | # ignore failure since there are cases where we can't chmod (and PostgreSQL might fail later anyhow - it's picky about permissions of this directory) 41 | chmod 700 "$PGDATA" || : 42 | 43 | # ignore failure since it will be fine when using the image provided directory; see also https://github.com/docker-library/postgres/pull/289 44 | mkdir -p /var/run/postgresql || : 45 | chmod 775 /var/run/postgresql || : 46 | 47 | # Create the transaction log directory before initdb is run so the directory is owned by the correct user 48 | if [ -n "$POSTGRES_INITDB_WALDIR" ]; then 49 | mkdir -p "$POSTGRES_INITDB_WALDIR" 50 | if [ "$user" = '0' ]; then 51 | find "$POSTGRES_INITDB_WALDIR" \! -user postgres -exec chown postgres '{}' + 52 | fi 53 | chmod 700 "$POSTGRES_INITDB_WALDIR" 54 | fi 55 | 56 | # allow the container to be started with `--user` 57 | if [ "$user" = '0' ]; then 58 | find "$PGDATA" \! -user postgres -exec chown postgres '{}' + 59 | find /var/run/postgresql \! -user postgres -exec chown postgres '{}' + 60 | fi 61 | } 62 | 63 | # initialize empty PGDATA directory with new database via 'initdb' 64 | # arguments to `initdb` can be passed via POSTGRES_INITDB_ARGS or as arguments to this function 65 | # `initdb` automatically creates the "postgres", "template0", and "template1" dbnames 66 | # this is also where the database user is created, specified by `POSTGRES_USER` env 67 | docker_init_database_dir() { 68 | # "initdb" is particular about the current user existing in "/etc/passwd", so we use "nss_wrapper" to fake that if necessary 69 | # see https://github.com/docker-library/postgres/pull/253, https://github.com/docker-library/postgres/issues/359, https://cwrap.org/nss_wrapper.html 70 | local uid; uid="$(id -u)" 71 | if ! getent passwd "$uid" &> /dev/null; then 72 | # see if we can find a suitable "libnss_wrapper.so" (https://salsa.debian.org/sssd-team/nss-wrapper/-/commit/b9925a653a54e24d09d9b498a2d913729f7abb15) 73 | local wrapper 74 | for wrapper in {/usr,}/lib{/*,}/libnss_wrapper.so; do 75 | if [ -s "$wrapper" ]; then 76 | NSS_WRAPPER_PASSWD="$(mktemp)" 77 | NSS_WRAPPER_GROUP="$(mktemp)" 78 | export LD_PRELOAD="$wrapper" NSS_WRAPPER_PASSWD NSS_WRAPPER_GROUP 79 | local gid; gid="$(id -g)" 80 | echo "postgres:x:$uid:$gid:PostgreSQL:$PGDATA:/bin/false" > "$NSS_WRAPPER_PASSWD" 81 | echo "postgres:x:$gid:" > "$NSS_WRAPPER_GROUP" 82 | break 83 | fi 84 | done 85 | fi 86 | 87 | if [ -n "$POSTGRES_INITDB_WALDIR" ]; then 88 | set -- --waldir "$POSTGRES_INITDB_WALDIR" "$@" 89 | fi 90 | 91 | eval 'initdb --username="$POSTGRES_USER" --pwfile=<(echo "$POSTGRES_PASSWORD") '"$POSTGRES_INITDB_ARGS"' "$@"' 92 | 93 | # unset/cleanup "nss_wrapper" bits 94 | if [ "${LD_PRELOAD:-}" = '/usr/lib/libnss_wrapper.so' ]; then 95 | rm -f "$NSS_WRAPPER_PASSWD" "$NSS_WRAPPER_GROUP" 96 | unset LD_PRELOAD NSS_WRAPPER_PASSWD NSS_WRAPPER_GROUP 97 | fi 98 | } 99 | 100 | # print large warning if POSTGRES_PASSWORD is long 101 | # error if both POSTGRES_PASSWORD is empty and POSTGRES_HOST_AUTH_METHOD is not 'trust' 102 | # print large warning if POSTGRES_HOST_AUTH_METHOD is set to 'trust' 103 | # assumes database is not set up, ie: [ -z "$DATABASE_ALREADY_EXISTS" ] 104 | docker_verify_minimum_env() { 105 | # check password first so we can output the warning before postgres 106 | # messes it up 107 | if [ "${#POSTGRES_PASSWORD}" -ge 100 ]; then 108 | cat >&2 <<-'EOWARN' 109 | 110 | WARNING: The supplied POSTGRES_PASSWORD is 100+ characters. 111 | 112 | This will not work if used via PGPASSWORD with "psql". 113 | 114 | https://www.postgresql.org/message-id/flat/E1Rqxp2-0004Qt-PL%40wrigleys.postgresql.org (BUG #6412) 115 | https://github.com/docker-library/postgres/issues/507 116 | 117 | EOWARN 118 | fi 119 | if [ -z "$POSTGRES_PASSWORD" ] && [ 'trust' != "$POSTGRES_HOST_AUTH_METHOD" ]; then 120 | # The - option suppresses leading tabs but *not* spaces. :) 121 | cat >&2 <<-'EOE' 122 | Error: Database is uninitialized and superuser password is not specified. 123 | You must specify POSTGRES_PASSWORD to a non-empty value for the 124 | superuser. For example, "-e POSTGRES_PASSWORD=password" on "docker run". 125 | 126 | You may also use "POSTGRES_HOST_AUTH_METHOD=trust" to allow all 127 | connections without a password. This is *not* recommended. 128 | 129 | See PostgreSQL documentation about "trust": 130 | https://www.postgresql.org/docs/current/auth-trust.html 131 | EOE 132 | exit 1 133 | fi 134 | if [ 'trust' = "$POSTGRES_HOST_AUTH_METHOD" ]; then 135 | cat >&2 <<-'EOWARN' 136 | ******************************************************************************** 137 | WARNING: POSTGRES_HOST_AUTH_METHOD has been set to "trust". This will allow 138 | anyone with access to the Postgres port to access your database without 139 | a password, even if POSTGRES_PASSWORD is set. See PostgreSQL 140 | documentation about "trust": 141 | https://www.postgresql.org/docs/current/auth-trust.html 142 | In Docker's default configuration, this is effectively any other 143 | container on the same system. 144 | 145 | It is not recommended to use POSTGRES_HOST_AUTH_METHOD=trust. Replace 146 | it with "-e POSTGRES_PASSWORD=password" instead to set a password in 147 | "docker run". 148 | ******************************************************************************** 149 | EOWARN 150 | fi 151 | } 152 | 153 | # usage: docker_process_init_files [file [file [...]]] 154 | # ie: docker_process_init_files /always-initdb.d/* 155 | # process initializer files, based on file extensions and permissions 156 | docker_process_init_files() { 157 | # psql here for backwards compatibility "${psql[@]}" 158 | psql=( docker_process_sql ) 159 | 160 | echo 161 | local f 162 | for f; do 163 | case "$f" in 164 | *.sh) 165 | # https://github.com/docker-library/postgres/issues/450#issuecomment-393167936 166 | # https://github.com/docker-library/postgres/pull/452 167 | if [ -x "$f" ]; then 168 | echo "$0: running $f" 169 | "$f" 170 | else 171 | echo "$0: sourcing $f" 172 | . "$f" 173 | fi 174 | ;; 175 | *.sql) echo "$0: running $f"; docker_process_sql -f "$f"; echo ;; 176 | *.sql.gz) echo "$0: running $f"; gunzip -c "$f" | docker_process_sql; echo ;; 177 | *.sql.xz) echo "$0: running $f"; xzcat "$f" | docker_process_sql; echo ;; 178 | *) echo "$0: ignoring $f" ;; 179 | esac 180 | echo 181 | done 182 | } 183 | 184 | # Execute sql script, passed via stdin (or -f flag of pqsl) 185 | # usage: docker_process_sql [psql-cli-args] 186 | # ie: docker_process_sql --dbname=mydb <<<'INSERT ...' 187 | # ie: docker_process_sql -f my-file.sql 188 | # ie: docker_process_sql > "$PGDATA/pg_hba.conf" 256 | } 257 | 258 | # start socket-only postgresql server for setting up or running scripts 259 | # all arguments will be passed along as arguments to `postgres` (via pg_ctl) 260 | docker_temp_server_start() { 261 | if [ "$1" = 'postgres' ]; then 262 | shift 263 | fi 264 | 265 | # internal start of server in order to allow setup using psql client 266 | # does not listen on external TCP/IP and waits until start finishes 267 | set -- "$@" -c listen_addresses='' -p "${PGPORT:-5432}" 268 | 269 | PGUSER="${PGUSER:-$POSTGRES_USER}" \ 270 | pg_ctl -D "$PGDATA" \ 271 | -o "$(printf '%q ' "$@")" \ 272 | -w start 273 | } 274 | 275 | # stop postgresql server after done setting up user and running scripts 276 | docker_temp_server_stop() { 277 | PGUSER="${PGUSER:-postgres}" \ 278 | pg_ctl -D "$PGDATA" -m fast -w stop 279 | } 280 | 281 | # check arguments for an option that would cause postgres to stop 282 | # return true if there is one 283 | _pg_want_help() { 284 | local arg 285 | for arg; do 286 | case "$arg" in 287 | # postgres --help | grep 'then exit' 288 | # leaving out -C on purpose since it always fails and is unhelpful: 289 | # postgres: could not access the server configuration file "/var/lib/postgresql/data/postgresql.conf": No such file or directory 290 | -'?'|--help|--describe-config|-V|--version) 291 | return 0 292 | ;; 293 | esac 294 | done 295 | return 1 296 | } 297 | 298 | _main() { 299 | # if first arg looks like a flag, assume we want to run postgres server 300 | if [ "${1:0:1}" = '-' ]; then 301 | set -- postgres "$@" 302 | fi 303 | 304 | if [ "$1" = 'postgres' ] && ! _pg_want_help "$@"; then 305 | docker_setup_env 306 | # setup data directories and permissions (when run as root) 307 | docker_create_db_directories 308 | if [ "$(id -u)" = '0' ]; then 309 | # then restart script as postgres user 310 | exec su-exec postgres "$BASH_SOURCE" "$@" 311 | fi 312 | 313 | # only run initialization on an empty data directory 314 | if [ -z "$DATABASE_ALREADY_EXISTS" ]; then 315 | docker_verify_minimum_env 316 | 317 | # check dir permissions to reduce likelihood of half-initialized database 318 | ls /docker-entrypoint-initdb.d/ > /dev/null 319 | 320 | docker_init_database_dir 321 | pg_setup_hba_conf "$@" 322 | 323 | # PGPASSWORD is required for psql when authentication is required for 'local' connections via pg_hba.conf and is otherwise harmless 324 | # e.g. when '--auth=md5' or '--auth-local=md5' is used in POSTGRES_INITDB_ARGS 325 | export PGPASSWORD="${PGPASSWORD:-$POSTGRES_PASSWORD}" 326 | docker_temp_server_start "$@" 327 | 328 | docker_setup_db 329 | docker_process_init_files /docker-entrypoint-initdb.d/* 330 | 331 | docker_temp_server_stop 332 | unset PGPASSWORD 333 | 334 | echo 335 | echo 'PostgreSQL init process complete; ready for start up.' 336 | echo 337 | else 338 | echo 339 | echo 'PostgreSQL Database directory appears to contain a database; Skipping initialization' 340 | echo 341 | fi 342 | fi 343 | 344 | docker_process_init_files /always-init.d/* 345 | exec "$@" 346 | } 347 | 348 | if ! _is_sourced; then 349 | _main "$@" 350 | fi 351 | -------------------------------------------------------------------------------- /docs/assets/api_cve.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/docs/assets/api_cve.png -------------------------------------------------------------------------------- /docs/assets/api_redoc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/docs/assets/api_redoc.png -------------------------------------------------------------------------------- /docs/assets/api_status.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/docs/assets/api_status.png -------------------------------------------------------------------------------- /src/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/src/common/__init__.py -------------------------------------------------------------------------------- /src/common/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .models import * -------------------------------------------------------------------------------- /src/common/models/cpe.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: https://csrc.nist.gov/schema/nvd/api/2.0/cpe_api_json_2.0.schema 3 | # timestamp: 2023-03-02T10:17:39+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from datetime import datetime 8 | from enum import Enum 9 | from typing import List, Optional 10 | from uuid import UUID 11 | 12 | from pydantic import BaseModel, Extra, Field, constr 13 | 14 | 15 | class DefTitle(BaseModel): 16 | """ 17 | Human readable title for CPE 18 | """ 19 | 20 | class Config: 21 | extra = Extra.forbid 22 | 23 | title: str 24 | lang: str 25 | 26 | 27 | class Type(Enum): 28 | Advisory = 'Advisory' 29 | Change_Log = 'Change Log' 30 | Product = 'Product' 31 | Project = 'Project' 32 | Vendor = 'Vendor' 33 | Version = 'Version' 34 | 35 | 36 | class DefReference(BaseModel): 37 | """ 38 | Internet resource for CPE 39 | """ 40 | 41 | class Config: 42 | extra = Extra.forbid 43 | 44 | ref: constr( 45 | regex=r'^([A-Za-z][A-Za-z0-9+.-]+):(\/\/([^@]+@)?([A-Za-z0-9.\-_~]+)(:\d+)?)?((?:[A-Za-z0-9-._~]|%[A-Fa-f0-9]|[!$&\'\[\]()*+,;=:@])+(?:\/(?:[A-Za-z0-9-._~]|%[A-Fa-f0-9]|[!$&\'\[\]()*+,;=:@])*)*|(?:\/(?:[A-Za-z0-9-._~]|%[A-Fa-f0-9]|[!$&\'()*+,;=:@])+)*)?(\?(?:[A-Za-z0-9-._~]|%[A-Fa-f0-9]|[!$&\'\[\]()*+,;=:@]|[/?])*)?(\#(?:[A-Za-z0-9-._~]|%[A-Fa-f0-9]|[!$&\'\[\]()*+,;=:@]|[/?])*)?$' 46 | ) 47 | type: Optional[Type] = None 48 | 49 | 50 | class DeprecatedByItem(BaseModel): 51 | cpeName: Optional[str] = None 52 | cpeNameId: Optional[UUID] = None 53 | 54 | 55 | class Deprecate(BaseModel): 56 | cpeName: Optional[str] = None 57 | cpeNameId: Optional[UUID] = None 58 | 59 | 60 | class CpeItem(BaseModel): 61 | class Config: 62 | extra = Extra.forbid 63 | 64 | deprecated: bool 65 | cpeName: str 66 | cpeNameId: UUID 67 | created: datetime 68 | lastModified: datetime 69 | titles: Optional[List[DefTitle]] = None 70 | refs: Optional[List[DefReference]] = None 71 | deprecatedBy: Optional[List[DeprecatedByItem]] = None 72 | deprecates: Optional[List[Deprecate]] = None 73 | 74 | 75 | class DefCpe(BaseModel): 76 | class Config: 77 | extra = Extra.forbid 78 | 79 | cpe: CpeItem 80 | 81 | 82 | class CpeModel(BaseModel): 83 | class Config: 84 | extra = Extra.forbid 85 | 86 | resultsPerPage: int 87 | startIndex: int 88 | totalResults: int 89 | format: str 90 | version: str 91 | timestamp: datetime 92 | products: List[DefCpe] = Field(..., description='NVD feed array of CPE') 93 | -------------------------------------------------------------------------------- /src/common/models/cve.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: cve.json 3 | # timestamp: 2023-03-02T09:59:46+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from datetime import date, datetime 8 | from enum import Enum 9 | from typing import List, Optional 10 | from uuid import UUID 11 | 12 | from pydantic import BaseModel, Extra, Field, confloat, constr 13 | from .cvss_v2 import CveCvssDataModel as CveCvssDataV2 14 | from .cvss_v30 import CveCvssDataModel as CveCvssDataV30 15 | from .cvss_v31 import CveCvssDataModel as CveCvssDataV31 16 | from .cvss_v40 import CveCvssDataModel as CveCvssDataV40 17 | 18 | 19 | class Type(Enum): 20 | Primary = 'Primary' 21 | Secondary = 'Secondary' 22 | 23 | 24 | class LangString(BaseModel): 25 | class Config: 26 | extra = Extra.forbid 27 | 28 | lang: str 29 | value: constr(max_length=4096) 30 | 31 | 32 | class Reference(BaseModel): 33 | class Config: 34 | extra = Extra.forbid 35 | 36 | url: str 37 | source: Optional[str] = None 38 | tags: Optional[List[str]] = None 39 | 40 | 41 | class VendorComment(BaseModel): 42 | class Config: 43 | extra = Extra.forbid 44 | 45 | organization: str 46 | comment: str 47 | lastModified: datetime 48 | 49 | 50 | class Weakness(BaseModel): 51 | class Config: 52 | extra = Extra.forbid 53 | 54 | source: str 55 | type: str 56 | description: List[LangString] = Field(..., min_items=0) 57 | 58 | 59 | class Operator(Enum): 60 | AND = 'AND' 61 | OR = 'OR' 62 | 63 | 64 | class CpeMatch(BaseModel): 65 | """ 66 | CPE match string or range 67 | """ 68 | 69 | class Config: 70 | extra = Extra.forbid 71 | 72 | vulnerable: bool 73 | criteria: str 74 | matchCriteriaId: UUID 75 | versionStartExcluding: Optional[str] = None 76 | versionStartIncluding: Optional[str] = None 77 | versionEndExcluding: Optional[str] = None 78 | versionEndIncluding: Optional[str] = None 79 | 80 | 81 | class CvssV2(BaseModel): 82 | class Config: 83 | extra = Extra.forbid 84 | 85 | source: str 86 | type: Type 87 | cvssData: CveCvssDataV2 88 | baseSeverity: Optional[str] = None 89 | exploitabilityScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 90 | None, description='CVSS subscore.' 91 | ) 92 | impactScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 93 | None, description='CVSS subscore.' 94 | ) 95 | acInsufInfo: Optional[bool] = None 96 | obtainAllPrivilege: Optional[bool] = None 97 | obtainUserPrivilege: Optional[bool] = None 98 | obtainOtherPrivilege: Optional[bool] = None 99 | userInteractionRequired: Optional[bool] = None 100 | 101 | 102 | class CvssV30(BaseModel): 103 | class Config: 104 | extra = Extra.forbid 105 | 106 | source: str 107 | type: Type 108 | cvssData: CveCvssDataV30 109 | exploitabilityScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 110 | None, description='CVSS subscore.' 111 | ) 112 | impactScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 113 | None, description='CVSS subscore.' 114 | ) 115 | 116 | 117 | class CvssV31(BaseModel): 118 | class Config: 119 | extra = Extra.forbid 120 | 121 | source: str 122 | type: Type 123 | cvssData: CveCvssDataV31 124 | exploitabilityScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 125 | None, description='CVSS subscore.' 126 | ) 127 | impactScore: Optional[confloat(ge=0.0, le=10.0)] = Field( 128 | None, description='CVSS subscore.' 129 | ) 130 | 131 | class CvssV40(BaseModel): 132 | class Config: 133 | extra = Extra.forbid 134 | 135 | source: str 136 | type: Type 137 | cvssData: CveCvssDataV40 138 | 139 | 140 | class Epss(BaseModel): 141 | class Config: 142 | extra = Extra.forbid 143 | 144 | score: Optional[confloat(ge=0.0, le=1.0)] = Field( 145 | None, description='Epss score.' 146 | ) 147 | percentile: Optional[confloat(ge=0.0, le=1.0)] = Field( 148 | None, description='Epss percentile.' 149 | ) 150 | date: Optional[date]= None 151 | 152 | class Node(BaseModel): 153 | """ 154 | Defines a configuration node in an NVD applicability statement. 155 | """ 156 | 157 | class Config: 158 | extra = Extra.forbid 159 | 160 | operator: Operator 161 | negate: Optional[bool] = None 162 | cpeMatch: List[CpeMatch] 163 | 164 | 165 | class Metrics(BaseModel): 166 | """ 167 | Metric scores for a vulnerability as found on NVD. 168 | """ 169 | 170 | class Config: 171 | extra = Extra.forbid 172 | 173 | cvssMetricV40: Optional[List[CvssV40]] = Field(None, description='CVSS V4.0 score.') 174 | cvssMetricV31: Optional[List[CvssV31]] = Field(None, description='CVSS V3.1 score.') 175 | cvssMetricV30: Optional[List[CvssV30]] = Field(None, description='CVSS V3.0 score.') 176 | cvssMetricV2: Optional[List[CvssV2]] = Field(None, description='CVSS V2.0 score.') 177 | epss: Optional[Epss] = Field(None, description='EPSS details.') 178 | 179 | 180 | class Config(BaseModel): 181 | class Config: 182 | extra = Extra.forbid 183 | 184 | operator: Optional[Operator] = None 185 | negate: Optional[bool] = None 186 | nodes: List[Node] 187 | 188 | class CveItem(BaseModel): 189 | class Config: 190 | extra = Extra.ignore 191 | 192 | id: constr(regex=r'^CVE-[0-9]{4}-[0-9]{4,}$') 193 | sourceIdentifier: Optional[str] = None 194 | vulnStatus: Optional[str] = None 195 | published: datetime 196 | lastModified: datetime 197 | evaluatorComment: Optional[str] = None 198 | evaluatorSolution: Optional[str] = None 199 | evaluatorImpact: Optional[str] = None 200 | cisaExploitAdd: Optional[date] = None 201 | cisaActionDue: Optional[date] = None 202 | cisaRequiredAction: Optional[str] = None 203 | cisaVulnerabilityName: Optional[str] = None 204 | descriptions: List[LangString] = Field(..., min_items=1) 205 | references: List[Reference] = Field(..., max_items=2000, min_items=0) 206 | metrics: Optional[Metrics] = Field( 207 | None, description='Metric scores for a vulnerability as found on NVD.' 208 | ) 209 | weaknesses: Optional[List[Weakness]] = None 210 | configurations: Optional[List[Config]] = None 211 | vendorComments: Optional[List[VendorComment]] = None 212 | 213 | 214 | class DefCveItem(BaseModel): 215 | class Config: 216 | extra = Extra.forbid 217 | 218 | cve: CveItem 219 | 220 | 221 | class CveModel(BaseModel): 222 | class Config: 223 | extra = Extra.forbid 224 | 225 | resultsPerPage: int 226 | startIndex: int 227 | totalResults: int 228 | format: str 229 | version: str 230 | timestamp: datetime 231 | vulnerabilities: List[DefCveItem] = Field(..., description='NVD feed array of CVE') 232 | -------------------------------------------------------------------------------- /src/common/models/cvss-v4.0.json: -------------------------------------------------------------------------------- 1 | { 2 | "license": [ 3 | "Copyright (c) 2023, FIRST.ORG, INC.", 4 | "All rights reserved.", 5 | "", 6 | "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the ", 7 | "following conditions are met:", 8 | "1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following ", 9 | " disclaimer.", 10 | "2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the ", 11 | " following disclaimer in the documentation and/or other materials provided with the distribution.", 12 | "3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote ", 13 | " products derived from this software without specific prior written permission.", 14 | "", 15 | "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, ", 16 | "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ", 17 | "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ", 18 | "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ", 19 | "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ", 20 | "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ", 21 | "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." 22 | ], 23 | 24 | "$schema": "http://json-schema.org/draft-07/schema#", 25 | "title": "JSON Schema for Common Vulnerability Scoring System version 4.0", 26 | "$id": "https://www.first.org/cvss/cvss-v4.0.json?20240216", 27 | "type": "object", 28 | "definitions": { 29 | "attackVectorType": { 30 | "type": "string", 31 | "enum": [ "NETWORK", "ADJACENT", "LOCAL", "PHYSICAL" ] 32 | }, 33 | "modifiedAttackVectorType": { 34 | "type": "string", 35 | "enum": [ "NETWORK", "ADJACENT", "LOCAL", "PHYSICAL", "NOT_DEFINED" ], 36 | "default": "NOT_DEFINED" 37 | }, 38 | "attackComplexityType": { 39 | "type": "string", 40 | "enum": [ "HIGH", "LOW" ] 41 | }, 42 | "modifiedAttackComplexityType": { 43 | "type": "string", 44 | "enum": [ "HIGH", "LOW", "NOT_DEFINED" ], 45 | "default": "NOT_DEFINED" 46 | }, 47 | "attackRequirementsType": { 48 | "type": "string", 49 | "enum": [ "NONE", "PRESENT" ] 50 | }, 51 | "modifiedAttackRequirementsType": { 52 | "type": "string", 53 | "enum": [ "NONE", "PRESENT", "NOT_DEFINED" ], 54 | "default": "NOT_DEFINED" 55 | }, 56 | "privilegesRequiredType": { 57 | "type": "string", 58 | "enum": [ "HIGH", "LOW", "NONE" ] 59 | }, 60 | "modifiedPrivilegesRequiredType": { 61 | "type": "string", 62 | "enum": [ "HIGH", "LOW", "NONE", "NOT_DEFINED" ], 63 | "default": "NOT_DEFINED" 64 | }, 65 | "userInteractionType": { 66 | "type": "string", 67 | "enum": [ "NONE", "PASSIVE", "ACTIVE" ] 68 | }, 69 | "modifiedUserInteractionType": { 70 | "type": "string", 71 | "enum": [ "NONE", "PASSIVE", "ACTIVE", "NOT_DEFINED" ], 72 | "default": "NOT_DEFINED" 73 | }, 74 | "vulnCiaType": { 75 | "type": "string", 76 | "enum": [ "NONE", "LOW", "HIGH" ] 77 | }, 78 | "modifiedVulnCiaType": { 79 | "type": "string", 80 | "enum": [ "NONE", "LOW", "HIGH", "NOT_DEFINED" ], 81 | "default": "NOT_DEFINED" 82 | }, 83 | "subCiaType": { 84 | "type": "string", 85 | "enum": [ "NONE", "LOW", "HIGH" ] 86 | }, 87 | "modifiedSubCType": { 88 | "type": "string", 89 | "enum": [ "NEGLIGIBLE", "LOW", "HIGH", "NOT_DEFINED" ], 90 | "default": "NOT_DEFINED" 91 | }, 92 | "modifiedSubIaType": { 93 | "type": "string", 94 | "enum": [ "NEGLIGIBLE", "LOW", "HIGH", "SAFETY", "NOT_DEFINED" ], 95 | "default": "NOT_DEFINED" 96 | }, 97 | "exploitMaturityType": { 98 | "type": "string", 99 | "enum": [ "UNREPORTED", "PROOF_OF_CONCEPT", "ATTACKED", "NOT_DEFINED" ], 100 | "default": "NOT_DEFINED" 101 | }, 102 | "ciaRequirementType": { 103 | "type": "string", 104 | "enum": [ "LOW", "MEDIUM", "HIGH", "NOT_DEFINED" ], 105 | "default": "NOT_DEFINED" 106 | }, 107 | "safetyType": { 108 | "type": "string", 109 | "enum": [ "NEGLIGIBLE", "PRESENT", "NOT_DEFINED" ], 110 | "default": "NOT_DEFINED" 111 | }, 112 | "AutomatableType": { 113 | "type": "string", 114 | "enum": [ "NO", "YES", "NOT_DEFINED" ], 115 | "default": "NOT_DEFINED" 116 | }, 117 | "recoveryType": { 118 | "type": "string", 119 | "enum": [ "AUTOMATIC", "USER", "IRRECOVERABLE", "NOT_DEFINED" ], 120 | "default": "NOT_DEFINED" 121 | }, 122 | "valueDensityType": { 123 | "type": "string", 124 | "enum": [ "DIFFUSE", "CONCENTRATED", "NOT_DEFINED" ], 125 | "default": "NOT_DEFINED" 126 | }, 127 | "vulnerabilityResponseEffortType": { 128 | "type": "string", 129 | "enum": [ "LOW", "MODERATE", "HIGH", "NOT_DEFINED" ], 130 | "default": "NOT_DEFINED" 131 | }, 132 | "providerUrgencyType": { 133 | "type": "string", 134 | "enum": [ "CLEAR", "GREEN", "AMBER", "RED", "NOT_DEFINED" ], 135 | "default": "NOT_DEFINED" 136 | }, 137 | "noneScoreType": { 138 | "type": "number", 139 | "minimum": 0.0, 140 | "maximum": 0.0 141 | }, 142 | "lowScoreType": { 143 | "type": "number", 144 | "minimum": 0.1, 145 | "maximum": 3.9, 146 | "multipleOf": 0.1 147 | }, 148 | "mediumScoreType": { 149 | "type": "number", 150 | "minimum": 4.0, 151 | "maximum": 6.9, 152 | "multipleOf": 0.1 153 | }, 154 | "highScoreType": { 155 | "type": "number", 156 | "minimum": 7.0, 157 | "maximum": 8.9, 158 | "multipleOf": 0.1 159 | }, 160 | "criticalScoreType": { 161 | "type": "number", 162 | "minimum": 9.0, 163 | "maximum": 10, 164 | "multipleOf": 0.1 165 | }, 166 | "noneSeverityType": { 167 | "const": "NONE" 168 | }, 169 | "lowSeverityType": { 170 | "const": "LOW" 171 | }, 172 | "mediumSeverityType": { 173 | "const": "MEDIUM" 174 | }, 175 | "highSeverityType": { 176 | "const": "HIGH" 177 | }, 178 | "criticalSeverityType": { 179 | "const": "CRITICAL" 180 | } 181 | }, 182 | "properties": { 183 | "version": { 184 | "description": "CVSS Version", 185 | "type": "string", 186 | "enum": [ "4.0" ] 187 | }, 188 | "vectorString": { 189 | "type": "string", 190 | "pattern": "^CVSS:4[.]0\/AV:[NALP]\/AC:[LH]\/AT:[NP]\/PR:[NLH]\/UI:[NPA]\/VC:[HLN]\/VI:[HLN]\/VA:[HLN]\/SC:[HLN]\/SI:[HLN]\/SA:[HLN](\/E:[XAPU])?(\/CR:[XHML])?(\/IR:[XHML])?(\/AR:[XHML])?(\/MAV:[XNALP])?(\/MAC:[XLH])?(\/MAT:[XNP])?(\/MPR:[XNLH])?(\/MUI:[XNPA])?(\/MVC:[XNLH])?(\/MVI:[XNLH])?(\/MVA:[XNLH])?(\/MSC:[XNLH])?(\/MSI:[XNLHS])?(\/MSA:[XNLHS])?(\/S:[XNP])?(\/AU:[XNY])?(\/R:[XAUI])?(\/V:[XDC])?(\/RE:[XLMH])?(\/U:(X|Clear|Green|Amber|Red))?$" 191 | }, 192 | "attackVector": { "$ref": "#/definitions/attackVectorType" }, 193 | "attackComplexity": { "$ref": "#/definitions/attackComplexityType" }, 194 | "attackRequirements": { "$ref": "#/definitions/attackRequirementsType" }, 195 | "privilegesRequired": { "$ref": "#/definitions/privilegesRequiredType" }, 196 | "userInteraction": { "$ref": "#/definitions/userInteractionType" }, 197 | "vulnerableSystemConfidentiality": { "$ref": "#/definitions/vulnCiaType" }, 198 | "vulnerableSystemIntegrity": { "$ref": "#/definitions/vulnCiaType" }, 199 | "vulnerableSystemAvailability": { "$ref": "#/definitions/vulnCiaType" }, 200 | "subsequentSystemConfidentiality": { "$ref": "#/definitions/subCiaType" }, 201 | "subsequentSystemIntegrity": { "$ref": "#/definitions/subCiaType" }, 202 | "subsequentSystemAvailability": { "$ref": "#/definitions/subCiaType" }, 203 | "exploitMaturity": { "$ref": "#/definitions/exploitMaturityType" }, 204 | "confidentialityRequirements": { "$ref": "#/definitions/ciaRequirementType" }, 205 | "integrityRequirements": { "$ref": "#/definitions/ciaRequirementType" }, 206 | "availabilityRequirements": { "$ref": "#/definitions/ciaRequirementType" }, 207 | "modifiedAttackVector": { "$ref": "#/definitions/modifiedAttackVectorType" }, 208 | "modifiedAttackComplexity": { "$ref": "#/definitions/modifiedAttackComplexityType" }, 209 | "modifiedAttackRequirements": { "$ref": "#/definitions/modifiedAttackRequirementsType" }, 210 | "modifiedPrivilegesRequired": { "$ref": "#/definitions/modifiedPrivilegesRequiredType" }, 211 | "modifiedUserInteraction": { "$ref": "#/definitions/modifiedUserInteractionType" }, 212 | "modifiedvulnerableSystemConfidentiality": { "$ref": "#/definitions/modifiedVulnCiaType" }, 213 | "modifiedvulnerableSystemIntegrity": { "$ref": "#/definitions/modifiedVulnCiaType" }, 214 | "modifiedvulnerableSystemAvailability": { "$ref": "#/definitions/modifiedVulnCiaType" }, 215 | "modifiedsubsequentSystemConfidentiality": { "$ref": "#/definitions/modifiedSubCType" }, 216 | "modifiedsubsequentSystemIntegrity": { "$ref": "#/definitions/modifiedSubIaType" }, 217 | "modifiedSubsequentSystemAvailability": { "$ref": "#/definitions/modifiedSubIaType" }, 218 | "safety": { "$ref": "#/definitions/safetyType" }, 219 | "automatable": { "$ref": "#/definitions/AutomatableType" }, 220 | "recovery": { "$ref": "#/definitions/recoveryType" }, 221 | "valueDensity": { "$ref": "#/definitions/valueDensityType" }, 222 | "vulnerabilityResponseEffort": { "$ref": "#/definitions/vulnerabilityResponseEffortType" }, 223 | "providerUrgency": { "$ref": "#/definitions/providerUrgencyType" } 224 | }, 225 | "allOf": [ 226 | { 227 | "properties": { 228 | "baseScore" : { 229 | "anyOf": [ 230 | {"$ref": "#/definitions/noneScoreType"}, 231 | {"$ref": "#/definitions/lowScoreType"}, 232 | {"$ref": "#/definitions/mediumScoreType"}, 233 | {"$ref": "#/definitions/highScoreType"}, 234 | {"$ref": "#/definitions/criticalScoreType"} 235 | ] 236 | }, 237 | "baseSeverity" : { 238 | "anyOf": [ 239 | {"$ref": "#/definitions/noneSeverityType"}, 240 | {"$ref": "#/definitions/lowSeverityType"}, 241 | {"$ref": "#/definitions/mediumSeverityType"}, 242 | {"$ref": "#/definitions/highSeverityType"}, 243 | {"$ref": "#/definitions/criticalSeverityType"} 244 | ] 245 | } 246 | } 247 | }, 248 | { 249 | "properties": { 250 | "threatScore" : { 251 | "anyOf": [ 252 | {"$ref": "#/definitions/noneScoreType"}, 253 | {"$ref": "#/definitions/lowScoreType"}, 254 | {"$ref": "#/definitions/mediumScoreType"}, 255 | {"$ref": "#/definitions/highScoreType"}, 256 | {"$ref": "#/definitions/criticalScoreType"} 257 | ] 258 | }, 259 | "threatSeverity" : { 260 | "anyOf": [ 261 | {"$ref": "#/definitions/noneSeverityType"}, 262 | {"$ref": "#/definitions/lowSeverityType"}, 263 | {"$ref": "#/definitions/mediumSeverityType"}, 264 | {"$ref": "#/definitions/highSeverityType"}, 265 | {"$ref": "#/definitions/criticalSeverityType"} 266 | 267 | ] 268 | } 269 | } 270 | } 271 | ], 272 | "required": [ "version", "vectorString", "baseScore", "baseSeverity" ] 273 | } -------------------------------------------------------------------------------- /src/common/models/cvss_v2.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: https://csrc.nist.gov/schema/nvd/api/2.0/external/cvss-v2.0.json 3 | # timestamp: 2023-03-02T09:56:35+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from enum import Enum 8 | from typing import Optional 9 | 10 | from pydantic import BaseModel, Field, confloat, constr 11 | 12 | 13 | class Version(Enum): 14 | """ 15 | CVSS Version 16 | """ 17 | 18 | field_2_0 = '2.0' 19 | 20 | 21 | class AccessVectorType(Enum): 22 | NETWORK = 'NETWORK' 23 | ADJACENT_NETWORK = 'ADJACENT_NETWORK' 24 | LOCAL = 'LOCAL' 25 | 26 | 27 | class AccessComplexityType(Enum): 28 | HIGH = 'HIGH' 29 | MEDIUM = 'MEDIUM' 30 | LOW = 'LOW' 31 | 32 | 33 | class AuthenticationType(Enum): 34 | MULTIPLE = 'MULTIPLE' 35 | SINGLE = 'SINGLE' 36 | NONE = 'NONE' 37 | 38 | 39 | class CiaType(Enum): 40 | NONE = 'NONE' 41 | PARTIAL = 'PARTIAL' 42 | COMPLETE = 'COMPLETE' 43 | 44 | 45 | class ExploitabilityType(Enum): 46 | UNPROVEN = 'UNPROVEN' 47 | PROOF_OF_CONCEPT = 'PROOF_OF_CONCEPT' 48 | FUNCTIONAL = 'FUNCTIONAL' 49 | HIGH = 'HIGH' 50 | NOT_DEFINED = 'NOT_DEFINED' 51 | 52 | 53 | class RemediationLevelType(Enum): 54 | OFFICIAL_FIX = 'OFFICIAL_FIX' 55 | TEMPORARY_FIX = 'TEMPORARY_FIX' 56 | WORKAROUND = 'WORKAROUND' 57 | UNAVAILABLE = 'UNAVAILABLE' 58 | NOT_DEFINED = 'NOT_DEFINED' 59 | 60 | 61 | class ReportConfidenceType(Enum): 62 | UNCONFIRMED = 'UNCONFIRMED' 63 | UNCORROBORATED = 'UNCORROBORATED' 64 | CONFIRMED = 'CONFIRMED' 65 | NOT_DEFINED = 'NOT_DEFINED' 66 | 67 | 68 | class CollateralDamagePotentialType(Enum): 69 | NONE = 'NONE' 70 | LOW = 'LOW' 71 | LOW_MEDIUM = 'LOW_MEDIUM' 72 | MEDIUM_HIGH = 'MEDIUM_HIGH' 73 | HIGH = 'HIGH' 74 | NOT_DEFINED = 'NOT_DEFINED' 75 | 76 | 77 | class TargetDistributionType(Enum): 78 | NONE = 'NONE' 79 | LOW = 'LOW' 80 | MEDIUM = 'MEDIUM' 81 | HIGH = 'HIGH' 82 | NOT_DEFINED = 'NOT_DEFINED' 83 | 84 | 85 | class CiaRequirementType(Enum): 86 | LOW = 'LOW' 87 | MEDIUM = 'MEDIUM' 88 | HIGH = 'HIGH' 89 | NOT_DEFINED = 'NOT_DEFINED' 90 | 91 | 92 | class CveCvssDataModel(BaseModel): 93 | version: Version = Field(..., description='CVSS Version') 94 | vectorString: constr( 95 | regex=r'^((AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))/)*(AV:[NAL]|AC:[LMH]|Au:[MSN]|[CIA]:[NPC]|E:(U|POC|F|H|ND)|RL:(OF|TF|W|U|ND)|RC:(UC|UR|C|ND)|CDP:(N|L|LM|MH|H|ND)|TD:(N|L|M|H|ND)|[CIA]R:(L|M|H|ND))$' 96 | ) 97 | accessVector: Optional[AccessVectorType] = None 98 | accessComplexity: Optional[AccessComplexityType] = None 99 | authentication: Optional[AuthenticationType] = None 100 | confidentialityImpact: Optional[CiaType] = None 101 | integrityImpact: Optional[CiaType] = None 102 | availabilityImpact: Optional[CiaType] = None 103 | baseScore: confloat(ge=0.0, le=10.0) 104 | exploitability: Optional[ExploitabilityType] = None 105 | remediationLevel: Optional[RemediationLevelType] = None 106 | reportConfidence: Optional[ReportConfidenceType] = None 107 | temporalScore: Optional[confloat(ge=0.0, le=10.0)] = None 108 | collateralDamagePotential: Optional[CollateralDamagePotentialType] = None 109 | targetDistribution: Optional[TargetDistributionType] = None 110 | confidentialityRequirements: Optional[CiaRequirementType] = None 111 | integrityRequirements: Optional[CiaRequirementType] = None 112 | availabilityRequirements: Optional[CiaRequirementType] = None 113 | environmentalScore: Optional[confloat(ge=0.0, le=10.0)] = None 114 | -------------------------------------------------------------------------------- /src/common/models/cvss_v30.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: https://csrc.nist.gov/schema/nvd/api/2.0/external/cvss-v3.0.json 3 | # timestamp: 2023-03-02T09:53:38+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from enum import Enum 8 | from typing import Optional 9 | 10 | from pydantic import BaseModel, Field, confloat, constr 11 | 12 | 13 | class Version(Enum): 14 | """ 15 | CVSS Version 16 | """ 17 | 18 | field_3_0 = '3.0' 19 | 20 | 21 | class AttackVectorType(Enum): 22 | NETWORK = 'NETWORK' 23 | ADJACENT_NETWORK = 'ADJACENT_NETWORK' 24 | LOCAL = 'LOCAL' 25 | PHYSICAL = 'PHYSICAL' 26 | 27 | 28 | class ModifiedAttackVectorType(Enum): 29 | NETWORK = 'NETWORK' 30 | ADJACENT_NETWORK = 'ADJACENT_NETWORK' 31 | LOCAL = 'LOCAL' 32 | PHYSICAL = 'PHYSICAL' 33 | NOT_DEFINED = 'NOT_DEFINED' 34 | 35 | 36 | class AttackComplexityType(Enum): 37 | HIGH = 'HIGH' 38 | LOW = 'LOW' 39 | 40 | 41 | class ModifiedAttackComplexityType(Enum): 42 | HIGH = 'HIGH' 43 | LOW = 'LOW' 44 | NOT_DEFINED = 'NOT_DEFINED' 45 | 46 | 47 | class PrivilegesRequiredType(Enum): 48 | HIGH = 'HIGH' 49 | LOW = 'LOW' 50 | NONE = 'NONE' 51 | 52 | 53 | class ModifiedPrivilegesRequiredType(Enum): 54 | HIGH = 'HIGH' 55 | LOW = 'LOW' 56 | NONE = 'NONE' 57 | NOT_DEFINED = 'NOT_DEFINED' 58 | 59 | 60 | class UserInteractionType(Enum): 61 | NONE = 'NONE' 62 | REQUIRED = 'REQUIRED' 63 | 64 | 65 | class ModifiedUserInteractionType(Enum): 66 | NONE = 'NONE' 67 | REQUIRED = 'REQUIRED' 68 | NOT_DEFINED = 'NOT_DEFINED' 69 | 70 | 71 | class ScopeType(Enum): 72 | UNCHANGED = 'UNCHANGED' 73 | CHANGED = 'CHANGED' 74 | 75 | 76 | class ModifiedScopeType(Enum): 77 | UNCHANGED = 'UNCHANGED' 78 | CHANGED = 'CHANGED' 79 | NOT_DEFINED = 'NOT_DEFINED' 80 | 81 | 82 | class CiaType(Enum): 83 | NONE = 'NONE' 84 | LOW = 'LOW' 85 | HIGH = 'HIGH' 86 | 87 | 88 | class ModifiedCiaType(Enum): 89 | NONE = 'NONE' 90 | LOW = 'LOW' 91 | HIGH = 'HIGH' 92 | NOT_DEFINED = 'NOT_DEFINED' 93 | 94 | 95 | class ExploitCodeMaturityType(Enum): 96 | UNPROVEN = 'UNPROVEN' 97 | PROOF_OF_CONCEPT = 'PROOF_OF_CONCEPT' 98 | FUNCTIONAL = 'FUNCTIONAL' 99 | HIGH = 'HIGH' 100 | NOT_DEFINED = 'NOT_DEFINED' 101 | 102 | 103 | class RemediationLevelType(Enum): 104 | OFFICIAL_FIX = 'OFFICIAL_FIX' 105 | TEMPORARY_FIX = 'TEMPORARY_FIX' 106 | WORKAROUND = 'WORKAROUND' 107 | UNAVAILABLE = 'UNAVAILABLE' 108 | NOT_DEFINED = 'NOT_DEFINED' 109 | 110 | 111 | class ConfidenceType(Enum): 112 | UNKNOWN = 'UNKNOWN' 113 | REASONABLE = 'REASONABLE' 114 | CONFIRMED = 'CONFIRMED' 115 | NOT_DEFINED = 'NOT_DEFINED' 116 | 117 | 118 | class CiaRequirementType(Enum): 119 | LOW = 'LOW' 120 | MEDIUM = 'MEDIUM' 121 | HIGH = 'HIGH' 122 | NOT_DEFINED = 'NOT_DEFINED' 123 | 124 | 125 | class SeverityType(Enum): 126 | NONE = 'NONE' 127 | LOW = 'LOW' 128 | MEDIUM = 'MEDIUM' 129 | HIGH = 'HIGH' 130 | CRITICAL = 'CRITICAL' 131 | 132 | 133 | class CveCvssDataModel(BaseModel): 134 | version: Version = Field(..., description='CVSS Version') 135 | vectorString: constr( 136 | regex=r'^CVSS:3[.]0/((AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[UNLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XUNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$' 137 | ) 138 | attackVector: Optional[AttackVectorType] = None 139 | attackComplexity: Optional[AttackComplexityType] = None 140 | privilegesRequired: Optional[PrivilegesRequiredType] = None 141 | userInteraction: Optional[UserInteractionType] = None 142 | scope: Optional[ScopeType] = None 143 | confidentialityImpact: Optional[CiaType] = None 144 | integrityImpact: Optional[CiaType] = None 145 | availabilityImpact: Optional[CiaType] = None 146 | baseScore: confloat(ge=0.0, le=10.0) 147 | baseSeverity: SeverityType 148 | exploitCodeMaturity: Optional[ExploitCodeMaturityType] = None 149 | remediationLevel: Optional[RemediationLevelType] = None 150 | reportConfidence: Optional[ConfidenceType] = None 151 | temporalScore: Optional[confloat(ge=0.0, le=10.0)] = None 152 | temporalSeverity: Optional[SeverityType] = None 153 | confidentialityRequirements: Optional[CiaRequirementType] = None 154 | integrityRequirements: Optional[CiaRequirementType] = None 155 | availabilityRequirements: Optional[CiaRequirementType] = None 156 | modifiedAttackVector: Optional[ModifiedAttackVectorType] = None 157 | modifiedAttackComplexity: Optional[ModifiedAttackComplexityType] = None 158 | modifiedPrivilegesRequired: Optional[ModifiedPrivilegesRequiredType] = None 159 | modifiedUserInteraction: Optional[ModifiedUserInteractionType] = None 160 | modifiedScope: Optional[ModifiedScopeType] = None 161 | modifiedConfidentialityImpact: Optional[ModifiedCiaType] = None 162 | modifiedIntegrityImpact: Optional[ModifiedCiaType] = None 163 | modifiedAvailabilityImpact: Optional[ModifiedCiaType] = None 164 | environmentalScore: Optional[confloat(ge=0.0, le=10.0)] = None 165 | environmentalSeverity: Optional[SeverityType] = None 166 | -------------------------------------------------------------------------------- /src/common/models/cvss_v31.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: https://csrc.nist.gov/schema/nvd/api/2.0/external/cvss-v3.1.json 3 | # timestamp: 2023-03-02T09:55:24+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from enum import Enum 8 | from typing import Optional 9 | 10 | from pydantic import BaseModel, Field, confloat, constr 11 | 12 | 13 | class Version(Enum): 14 | """ 15 | CVSS Version 16 | """ 17 | 18 | field_3_1 = '3.1' 19 | 20 | 21 | class AttackVectorType(Enum): 22 | NETWORK = 'NETWORK' 23 | ADJACENT_NETWORK = 'ADJACENT_NETWORK' 24 | LOCAL = 'LOCAL' 25 | PHYSICAL = 'PHYSICAL' 26 | 27 | 28 | class ModifiedAttackVectorType(Enum): 29 | NETWORK = 'NETWORK' 30 | ADJACENT_NETWORK = 'ADJACENT_NETWORK' 31 | LOCAL = 'LOCAL' 32 | PHYSICAL = 'PHYSICAL' 33 | NOT_DEFINED = 'NOT_DEFINED' 34 | 35 | 36 | class AttackComplexityType(Enum): 37 | HIGH = 'HIGH' 38 | LOW = 'LOW' 39 | 40 | 41 | class ModifiedAttackComplexityType(Enum): 42 | HIGH = 'HIGH' 43 | LOW = 'LOW' 44 | NOT_DEFINED = 'NOT_DEFINED' 45 | 46 | 47 | class PrivilegesRequiredType(Enum): 48 | HIGH = 'HIGH' 49 | LOW = 'LOW' 50 | NONE = 'NONE' 51 | 52 | 53 | class ModifiedPrivilegesRequiredType(Enum): 54 | HIGH = 'HIGH' 55 | LOW = 'LOW' 56 | NONE = 'NONE' 57 | NOT_DEFINED = 'NOT_DEFINED' 58 | 59 | 60 | class UserInteractionType(Enum): 61 | NONE = 'NONE' 62 | REQUIRED = 'REQUIRED' 63 | 64 | 65 | class ModifiedUserInteractionType(Enum): 66 | NONE = 'NONE' 67 | REQUIRED = 'REQUIRED' 68 | NOT_DEFINED = 'NOT_DEFINED' 69 | 70 | 71 | class ScopeType(Enum): 72 | UNCHANGED = 'UNCHANGED' 73 | CHANGED = 'CHANGED' 74 | 75 | 76 | class ModifiedScopeType(Enum): 77 | UNCHANGED = 'UNCHANGED' 78 | CHANGED = 'CHANGED' 79 | NOT_DEFINED = 'NOT_DEFINED' 80 | 81 | 82 | class CiaType(Enum): 83 | NONE = 'NONE' 84 | LOW = 'LOW' 85 | HIGH = 'HIGH' 86 | 87 | 88 | class ModifiedCiaType(Enum): 89 | NONE = 'NONE' 90 | LOW = 'LOW' 91 | HIGH = 'HIGH' 92 | NOT_DEFINED = 'NOT_DEFINED' 93 | 94 | 95 | class ExploitCodeMaturityType(Enum): 96 | UNPROVEN = 'UNPROVEN' 97 | PROOF_OF_CONCEPT = 'PROOF_OF_CONCEPT' 98 | FUNCTIONAL = 'FUNCTIONAL' 99 | HIGH = 'HIGH' 100 | NOT_DEFINED = 'NOT_DEFINED' 101 | 102 | 103 | class RemediationLevelType(Enum): 104 | OFFICIAL_FIX = 'OFFICIAL_FIX' 105 | TEMPORARY_FIX = 'TEMPORARY_FIX' 106 | WORKAROUND = 'WORKAROUND' 107 | UNAVAILABLE = 'UNAVAILABLE' 108 | NOT_DEFINED = 'NOT_DEFINED' 109 | 110 | 111 | class ConfidenceType(Enum): 112 | UNKNOWN = 'UNKNOWN' 113 | REASONABLE = 'REASONABLE' 114 | CONFIRMED = 'CONFIRMED' 115 | NOT_DEFINED = 'NOT_DEFINED' 116 | 117 | 118 | class CiaRequirementType(Enum): 119 | LOW = 'LOW' 120 | MEDIUM = 'MEDIUM' 121 | HIGH = 'HIGH' 122 | NOT_DEFINED = 'NOT_DEFINED' 123 | 124 | 125 | class SeverityType(Enum): 126 | NONE = 'NONE' 127 | LOW = 'LOW' 128 | MEDIUM = 'MEDIUM' 129 | HIGH = 'HIGH' 130 | CRITICAL = 'CRITICAL' 131 | 132 | 133 | class CveCvssDataModel(BaseModel): 134 | version: Version = Field(..., description='CVSS Version') 135 | vectorString: constr( 136 | regex=r'^CVSS:3[.]1/((AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])/)*(AV:[NALP]|AC:[LH]|PR:[NLH]|UI:[NR]|S:[UC]|[CIA]:[NLH]|E:[XUPFH]|RL:[XOTWU]|RC:[XURC]|[CIA]R:[XLMH]|MAV:[XNALP]|MAC:[XLH]|MPR:[XNLH]|MUI:[XNR]|MS:[XUC]|M[CIA]:[XNLH])$' 137 | ) 138 | attackVector: Optional[AttackVectorType] = None 139 | attackComplexity: Optional[AttackComplexityType] = None 140 | privilegesRequired: Optional[PrivilegesRequiredType] = None 141 | userInteraction: Optional[UserInteractionType] = None 142 | scope: Optional[ScopeType] = None 143 | confidentialityImpact: Optional[CiaType] = None 144 | integrityImpact: Optional[CiaType] = None 145 | availabilityImpact: Optional[CiaType] = None 146 | baseScore: confloat(ge=0.0, le=10.0) 147 | baseSeverity: SeverityType 148 | exploitCodeMaturity: Optional[ExploitCodeMaturityType] = None 149 | remediationLevel: Optional[RemediationLevelType] = None 150 | reportConfidence: Optional[ConfidenceType] = None 151 | temporalScore: Optional[confloat(ge=0.0, le=10.0)] = None 152 | temporalSeverity: Optional[SeverityType] = None 153 | confidentialityRequirements: Optional[CiaRequirementType] = None 154 | integrityRequirements: Optional[CiaRequirementType] = None 155 | availabilityRequirements: Optional[CiaRequirementType] = None 156 | modifiedAttackVector: Optional[ModifiedAttackVectorType] = None 157 | modifiedAttackComplexity: Optional[ModifiedAttackComplexityType] = None 158 | modifiedPrivilegesRequired: Optional[ModifiedPrivilegesRequiredType] = None 159 | modifiedUserInteraction: Optional[ModifiedUserInteractionType] = None 160 | modifiedScope: Optional[ModifiedScopeType] = None 161 | modifiedConfidentialityImpact: Optional[ModifiedCiaType] = None 162 | modifiedIntegrityImpact: Optional[ModifiedCiaType] = None 163 | modifiedAvailabilityImpact: Optional[ModifiedCiaType] = None 164 | environmentalScore: Optional[confloat(ge=0.0, le=10.0)] = None 165 | environmentalSeverity: Optional[SeverityType] = None 166 | -------------------------------------------------------------------------------- /src/common/models/cvss_v40.py: -------------------------------------------------------------------------------- 1 | # generated by datamodel-codegen: 2 | # filename: cvss-v4.0.json (file downloaded from https://nvd.nist.gov/vuln-metrics/cvss/v4.0 and adjusted/fixed to match the actual DB attributes) 3 | # timestamp: 2025-01-01T09:20:50+00:00 4 | 5 | from __future__ import annotations 6 | 7 | from enum import Enum 8 | from typing import Any, Optional, Union 9 | 10 | from pydantic import BaseModel, Field, confloat, constr 11 | 12 | 13 | class Version(Enum): 14 | field_4_0 = '4.0' 15 | 16 | 17 | class AttackVectorType(Enum): 18 | NETWORK = 'NETWORK' 19 | ADJACENT = 'ADJACENT' 20 | LOCAL = 'LOCAL' 21 | PHYSICAL = 'PHYSICAL' 22 | 23 | 24 | class ModifiedAttackVectorType(Enum): 25 | NETWORK = 'NETWORK' 26 | ADJACENT = 'ADJACENT' 27 | LOCAL = 'LOCAL' 28 | PHYSICAL = 'PHYSICAL' 29 | NOT_DEFINED = 'NOT_DEFINED' 30 | 31 | 32 | class AttackComplexityType(Enum): 33 | HIGH = 'HIGH' 34 | LOW = 'LOW' 35 | 36 | 37 | class ModifiedAttackComplexityType(Enum): 38 | HIGH = 'HIGH' 39 | LOW = 'LOW' 40 | NOT_DEFINED = 'NOT_DEFINED' 41 | 42 | 43 | class AttackRequirementsType(Enum): 44 | NONE = 'NONE' 45 | PRESENT = 'PRESENT' 46 | 47 | 48 | class ModifiedAttackRequirementsType(Enum): 49 | NONE = 'NONE' 50 | PRESENT = 'PRESENT' 51 | NOT_DEFINED = 'NOT_DEFINED' 52 | 53 | 54 | class PrivilegesRequiredType(Enum): 55 | HIGH = 'HIGH' 56 | LOW = 'LOW' 57 | NONE = 'NONE' 58 | 59 | 60 | class ModifiedPrivilegesRequiredType(Enum): 61 | HIGH = 'HIGH' 62 | LOW = 'LOW' 63 | NONE = 'NONE' 64 | NOT_DEFINED = 'NOT_DEFINED' 65 | 66 | 67 | class UserInteractionType(Enum): 68 | NONE = 'NONE' 69 | PASSIVE = 'PASSIVE' 70 | ACTIVE = 'ACTIVE' 71 | 72 | 73 | class ModifiedUserInteractionType(Enum): 74 | NONE = 'NONE' 75 | PASSIVE = 'PASSIVE' 76 | ACTIVE = 'ACTIVE' 77 | NOT_DEFINED = 'NOT_DEFINED' 78 | 79 | 80 | class VulnCiaType(Enum): 81 | NONE = 'NONE' 82 | LOW = 'LOW' 83 | HIGH = 'HIGH' 84 | 85 | 86 | class ModifiedVulnCiaType(Enum): 87 | NONE = 'NONE' 88 | LOW = 'LOW' 89 | HIGH = 'HIGH' 90 | NOT_DEFINED = 'NOT_DEFINED' 91 | 92 | 93 | class ModifiedSubCType(Enum): 94 | NEGLIGIBLE = 'NEGLIGIBLE' 95 | LOW = 'LOW' 96 | HIGH = 'HIGH' 97 | NOT_DEFINED = 'NOT_DEFINED' 98 | 99 | 100 | class ModifiedSubIaType(Enum): 101 | NEGLIGIBLE = 'NEGLIGIBLE' 102 | LOW = 'LOW' 103 | HIGH = 'HIGH' 104 | SAFETY = 'SAFETY' 105 | NOT_DEFINED = 'NOT_DEFINED' 106 | 107 | 108 | class ExploitMaturityType(Enum): 109 | UNREPORTED = 'UNREPORTED' 110 | PROOF_OF_CONCEPT = 'PROOF_OF_CONCEPT' 111 | ATTACKED = 'ATTACKED' 112 | NOT_DEFINED = 'NOT_DEFINED' 113 | 114 | 115 | class CiaRequirementType(Enum): 116 | LOW = 'LOW' 117 | MEDIUM = 'MEDIUM' 118 | HIGH = 'HIGH' 119 | NOT_DEFINED = 'NOT_DEFINED' 120 | 121 | 122 | class SafetyType(Enum): 123 | NEGLIGIBLE = 'NEGLIGIBLE' 124 | PRESENT = 'PRESENT' 125 | NOT_DEFINED = 'NOT_DEFINED' 126 | 127 | 128 | class AutomatableType(Enum): 129 | NO = 'NO' 130 | YES = 'YES' 131 | NOT_DEFINED = 'NOT_DEFINED' 132 | 133 | 134 | class RecoveryType(Enum): 135 | AUTOMATIC = 'AUTOMATIC' 136 | USER = 'USER' 137 | IRRECOVERABLE = 'IRRECOVERABLE' 138 | NOT_DEFINED = 'NOT_DEFINED' 139 | 140 | 141 | class ValueDensityType(Enum): 142 | DIFFUSE = 'DIFFUSE' 143 | CONCENTRATED = 'CONCENTRATED' 144 | NOT_DEFINED = 'NOT_DEFINED' 145 | 146 | 147 | class VulnerabilityResponseEffortType(Enum): 148 | LOW = 'LOW' 149 | MODERATE = 'MODERATE' 150 | HIGH = 'HIGH' 151 | NOT_DEFINED = 'NOT_DEFINED' 152 | 153 | 154 | class ProviderUrgencyType(Enum): 155 | CLEAR = 'CLEAR' 156 | GREEN = 'GREEN' 157 | AMBER = 'AMBER' 158 | RED = 'RED' 159 | NOT_DEFINED = 'NOT_DEFINED' 160 | 161 | 162 | class NoneScoreType(BaseModel): 163 | __root__: confloat(ge=0.0, le=0.0) 164 | 165 | 166 | class LowScoreType(BaseModel): 167 | __root__: confloat(ge=0.1, le=3.9, multiple_of=0.1) 168 | 169 | 170 | class MediumScoreType(BaseModel): 171 | __root__: confloat(ge=4.0, le=6.9, multiple_of=0.1) 172 | 173 | 174 | class HighScoreType(BaseModel): 175 | __root__: confloat(ge=7.0, le=8.9, multiple_of=0.1) 176 | 177 | 178 | class CriticalScoreType(BaseModel): 179 | __root__: confloat(ge=9.0, le=10.0, multiple_of=0.1) 180 | 181 | 182 | class NoneSeverityType(BaseModel): 183 | __root__: Any = Field('NONE', const=True) 184 | 185 | 186 | class LowSeverityType(BaseModel): 187 | __root__: Any = Field('LOW', const=True) 188 | 189 | 190 | class MediumSeverityType(BaseModel): 191 | __root__: Any = Field('MEDIUM', const=True) 192 | 193 | 194 | class HighSeverityType(BaseModel): 195 | __root__: Any = Field('HIGH', const=True) 196 | 197 | 198 | class CriticalSeverityType(BaseModel): 199 | __root__: Any = Field('CRITICAL', const=True) 200 | 201 | 202 | class CveCvssDataModel(BaseModel): 203 | baseScore: Union[ 204 | NoneScoreType, LowScoreType, MediumScoreType, HighScoreType, CriticalScoreType 205 | ] 206 | baseSeverity: Union[ 207 | NoneSeverityType, 208 | LowSeverityType, 209 | MediumSeverityType, 210 | HighSeverityType, 211 | CriticalSeverityType, 212 | ] 213 | threatScore: Optional[ 214 | Union[ 215 | NoneScoreType, 216 | LowScoreType, 217 | MediumScoreType, 218 | HighScoreType, 219 | CriticalScoreType, 220 | ] 221 | ] = None 222 | threatSeverity: Optional[ 223 | Union[ 224 | NoneSeverityType, 225 | LowSeverityType, 226 | MediumSeverityType, 227 | HighSeverityType, 228 | CriticalSeverityType, 229 | ] 230 | ] = None 231 | version: Version = Field(..., description='CVSS Version') 232 | vectorString: constr( 233 | regex=r'^CVSS:4[.]0(/AV:[NALP]|/AC:[LH]|/AT:[NP]|/PR:[NLH]|/UI:[NPA]|/VC:[HLN]|/VI:[HLN]|/VA:[HLN]|/SC:[HLN]|/SI:[HLN]|/SA:[HLN]|/E:[XAPU]|/CR:[XHML]|/IR:[XHML]|/AR:[XHML]|/MAV:[XNALP]|/MAC:[XLH]|/MAT:[XNP]|/MPR:[XNLH]|/MUI:[XNPA]|/MVC:[XNLH]|/MVI:[XNLH]|/MVA:[XNLH]|/MSC:[XNLH]|/MSI:[XNLHS]|/MSA:[XNLHS]|/S:[XNP]|/AU:[XNY]|/R:[XAUI]|/V:[XDC]|/RE:[XLMH]|/U:(X|Clear|Green|Amber|Red))*$' 234 | ) 235 | attackVector: Optional[AttackVectorType] = None 236 | attackComplexity: Optional[AttackComplexityType] = None 237 | attackRequirements: Optional[AttackRequirementsType] = None 238 | privilegesRequired: Optional[PrivilegesRequiredType] = None 239 | userInteraction: Optional[UserInteractionType] = None 240 | vulnerableSystemConfidentiality: Optional[VulnCiaType] = None 241 | vulnerableSystemIntegrity: Optional[VulnCiaType] = None 242 | vulnerableSystemAvailability: Optional[VulnCiaType] = None 243 | subsequentSystemConfidentiality: Optional[VulnCiaType] = None 244 | subsequentSystemIntegrity: Optional[VulnCiaType] = None 245 | subsequentSystemAvailability: Optional[VulnCiaType] = None 246 | exploitMaturity: Optional[ExploitMaturityType] = 'NOT_DEFINED' 247 | confidentialityRequirements: Optional[CiaRequirementType] = 'NOT_DEFINED' 248 | integrityRequirements: Optional[CiaRequirementType] = 'NOT_DEFINED' 249 | availabilityRequirements: Optional[CiaRequirementType] = 'NOT_DEFINED' 250 | modifiedAttackVector: Optional[ModifiedAttackVectorType] = 'NOT_DEFINED' 251 | modifiedAttackComplexity: Optional[ModifiedAttackComplexityType] = 'NOT_DEFINED' 252 | modifiedAttackRequirements: Optional[ModifiedAttackRequirementsType] = 'NOT_DEFINED' 253 | modifiedPrivilegesRequired: Optional[ModifiedPrivilegesRequiredType] = 'NOT_DEFINED' 254 | modifiedUserInteraction: Optional[ModifiedUserInteractionType] = 'NOT_DEFINED' 255 | modifiedVulnerableSystemConfidentiality: Optional[ModifiedVulnCiaType] = 'NOT_DEFINED' 256 | modifiedVulnerableSystemIntegrity: Optional[ModifiedVulnCiaType] = 'NOT_DEFINED' 257 | modifiedVulnerableSystemAvailability: Optional[ModifiedVulnCiaType] = 'NOT_DEFINED' 258 | modifiedSubsequentSystemConfidentiality: Optional[ModifiedSubCType] = 'NOT_DEFINED' 259 | modifiedSubsequentSystemIntegrity: Optional[ModifiedSubIaType] = 'NOT_DEFINED' 260 | modifiedSubsequentSystemAvailability: Optional[ModifiedSubIaType] = 'NOT_DEFINED' 261 | safety: Optional[SafetyType] = 'NOT_DEFINED' 262 | automatable: Optional[AutomatableType] = 'NOT_DEFINED' 263 | recovery: Optional[RecoveryType] = 'NOT_DEFINED' 264 | valueDensity: Optional[ValueDensityType] = 'NOT_DEFINED' 265 | vulnerabilityResponseEffort: Optional[ 266 | VulnerabilityResponseEffortType 267 | ] = 'NOT_DEFINED' 268 | providerUrgency: Optional[ProviderUrgencyType] = 'NOT_DEFINED' 269 | -------------------------------------------------------------------------------- /src/common/util.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generic utility functions for the fastcve. 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | import os 8 | from alembic.config import Config 9 | from alembic import command 10 | 11 | 12 | class ValidationError(Exception): ... 13 | 14 | 15 | # ------------------------------------------------------------------------------ 16 | # create/update the db schema using alembic 17 | def init_db_schema(): 18 | 19 | # ------------------------------------------------------------------------------ 20 | home = os.environ.get("FCDB_HOME") 21 | if not home: 22 | raise ValidationError(f'Project home environment vars not properly set: {home}') 23 | 24 | # this is a time consuming activity thus we make sure we do it only once 25 | control_file_name = os.path.join(home, 'alembic_init_done') 26 | if not os.path.isfile(control_file_name): 27 | 28 | working_dir = os.path.join(home, 'db') 29 | cwd = os.getcwd() 30 | 31 | os.chdir(working_dir) 32 | 33 | alembic_cfg = Config("alembic.ini") 34 | 35 | # Run the alembic upgrade head command 36 | command.upgrade(alembic_cfg, "head") 37 | 38 | os.chdir(cwd) 39 | 40 | # create the file to indicate there is no need to run alembic update scripts 41 | # until next restart of the docker image 42 | # NOTE: Do not map the ${FCDB_HOME} directory to the HOST 43 | with open(control_file_name, "w"): 44 | pass 45 | 46 | 47 | # ------------------------------------------------------------------------------ 48 | def setup_env(): 49 | 50 | import subprocess 51 | 52 | home = os.environ.get("FCDB_HOME") 53 | if not home: 54 | raise ValidationError(f'Project home environment vars not properly set: {home}') 55 | 56 | # Run the bash script that exports needed environment variables 57 | script = os.path.join(home, 'config', 'setenv.sh') 58 | result = subprocess.run(["bash", script], stdout=subprocess.PIPE, universal_newlines=True) 59 | 60 | # Parse the output to extract the environment variables 61 | for line in result.stdout.split("\n"): 62 | if "=" in line: 63 | key, value = line.split("=", 1) 64 | os.environ[key] = value 65 | 66 | -------------------------------------------------------------------------------- /src/config/requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.11.1 2 | SQLAlchemy==2.0.20 3 | psycopg2-binary==2.9.9 4 | requests==2.31.0 5 | tqdm==4.66.2 6 | pytz==2024.1 7 | pydantic==1.10.14 8 | fastapi==0.95.2 9 | uvicorn==0.22.0 10 | xmltodict==0.13.0 11 | pydantic-argparse==0.10.0 12 | argcomplete==3.1.1 -------------------------------------------------------------------------------- /src/config/setenv.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #------------------------------------------------------------------------------ 3 | # Script to export all needed env variables 4 | #------------------------------------------------------------------------------ 5 | # Here we should add all definitions of parameters for application which are env 6 | # dependent. 7 | # This script has to be sourced once env is started (i.e. from ${HOME}/.profile) 8 | # Input parameters: 9 | # 1) --env INP_ENV_NAME [i.e. values:dev/test/prod] 10 | # 2) --config INP_CFG_LIST [additional list of includes/application/module] 11 | # 12 | # Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 13 | #------------------------------------------------------------------------------ 14 | getOptions() 15 | #------------------------------------------------------------------------------ 16 | { 17 | while [ -n "$1" ]; 18 | do 19 | OPT="$1" 20 | case ${OPT} in 21 | --config) 22 | shift 23 | [ -n "$1" ] && INP_CFG_LIST=$1 && shift 24 | ;; 25 | --env) 26 | shift 27 | [ -n "$1" ] && export INP_ENV_NAME=$1 && shift 28 | ;; 29 | --home) 30 | shift 31 | [ -n "$1" ] && FCDB_HOME=$1 && shift 32 | ;; 33 | *) 34 | shift 35 | ;; 36 | esac 37 | done 38 | return 0 39 | } 40 | 41 | #-------------------------------------------------------------------------------------------------- 42 | # Parse input arguments if any 43 | #-------------------------------------------------------------------------------------------------- 44 | getOptions "$@" 45 | 46 | #-------------------------------------------------------------------------------------------------- 47 | # Main Section 48 | #-------------------------------------------------------------------------------------------------- 49 | 50 | export FCDB_PRJ_NAME=fastcve 51 | 52 | # root path for APPIOTS project 53 | export FCDB_HOME=${FCDB_HOME:-${HOME}/projects/${FCDB_PRJ_NAME}} 54 | 55 | # path where configuration files are placed 56 | export FCDB_CFG_PATH=${FCDB_HOME}/config 57 | 58 | # main configuration file 59 | export FCDB_CFG_FILE=${FCDB_CFG_PATH}/setenv/config.ini 60 | 61 | # path where configuration files for additional loggers are placed 62 | export FCDB_CFG_LOG_PATH=${FCDB_CFG_PATH}/log 63 | 64 | # path where application log files are created/logged 65 | export FCDB_LOG_PATH=${FCDB_HOME}/logs 66 | 67 | # define python path 68 | export PYTHONPATH=${FCDB_HOME} 69 | 70 | if [ ! -d "${FCDB_LOG_PATH}" -a ! -r "${FCDB_LOG_PATH}" ]; then 71 | mkdir ${FCDB_LOG_PATH} 72 | fi 73 | 74 | #-------------------------------------------------------------------------------------------------- 75 | # env type [dev/test/prod] dependent parameters 76 | #-------------------------------------------------------------------------------------------------- 77 | # set default to dev is not specified explicitly 78 | if [ -z "$INP_ENV_NAME" ]; then 79 | INP_ENV_NAME=dev 80 | fi 81 | 82 | export INP_ENV_NAME 83 | 84 | if [ -n "${INP_ENV_NAME}" ]; then 85 | 86 | # any available ini file per env should be named as setenv_{INP_ENV_NAME}.ini and 87 | # available under ${FCDB_CFG_PATH} 88 | if [ -r "${FCDB_CFG_PATH}/setenv/setenv_${INP_ENV_NAME}.ini" ]; then 89 | 90 | # any variable defined in these files should set variables names and values in following form: 91 | # ENV_{NAME}={VALUE} 92 | . ${FCDB_CFG_PATH}/setenv/setenv_${INP_ENV_NAME}.ini 93 | fi 94 | fi 95 | 96 | #-------------------------------------------------------------------------------------------------- 97 | export FCDB_STORAGE_INPUT="${ENV_FCDB_STORAGE_INPUT:-${FCDB_HOME}/input}" 98 | 99 | #-------------------------------------------------------------------------------------------------- 100 | # Log Config section 101 | #-------------------------------------------------------------------------------------------------- 102 | export FCDB_LOG_CONSOLE_DEBUG_LVL=${FCDB_LOG_CONSOLE_DEBUG_LVL:-WARNING} 103 | export FCDB_LOG_FILE_DEBUG_LVL=${FCDB_LOG_FILE_DEBUG_LVL:-WARNING} 104 | 105 | env -------------------------------------------------------------------------------- /src/config/setenv/config.ini: -------------------------------------------------------------------------------- 1 | ;-------------------------------------------------------------------------------------------------- 2 | ; 3 | ; Configuration File with all the configuration parameters needed for the application 4 | ; 5 | ; Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 6 | ;-------------------------------------------------------------------------------------------------- 7 | [db] 8 | 9 | ; the DB connection string 10 | dsn = "postgresql://${FCDB_USER}:${FCDB_PASS}@${FCDB_HOST}:${FCDB_PORT}/vuln_db" 11 | ; sets the pool of DB connections used by each worker, FCDB_DB_POOL_SIZE connection with extension to max FCDB_DB_POOL_OVERFLOW 12 | ; i.e. in the case of 4 workers it would be able to simultaneosly handle 4*(FCDB_DB_POOL_OVERFLOW + FCDB_DB_POOL_OVERFLOW) requests at the same time 13 | ; pool_pre_ping: https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic 14 | params = {"pool_size": ${FCDB_DB_POOL_SIZE}, "max_overflow": ${FCDB_DB_POOL_OVERFLOW}, "pool_pre_ping": "True"} 15 | 16 | ;-------------------------------------------------------------------------------------------------- 17 | [log] 18 | 19 | ; namespace (or logger name) 20 | namespace = root 21 | 22 | ; debug level for above namespace 23 | level = ${FCDB_LOG_FILE_DEBUG_LVL} 24 | 25 | ; format the message : check https://docs.python.org/3/library/logging.html for possible values 26 | format.msg = %(levelname)-5.5s | %(asctime)s.%(msecs).3d | %(processName)s | %(threadName)-10.10s | %(name)s.%(funcName)s(%(lineno)d) : %(message)s 27 | 28 | ; format acstime parameter 29 | format.datefmt = %Y-%m-%d %H:%M:%S 30 | 31 | ; log file name 32 | file.name = fastcve_console_%(process_name).log 33 | 34 | ; maximum log file size before rotating to the next one 35 | file.max.size = 10 * 1024 * 1024 36 | 37 | ; max number of rotated log files, 11th will be deleted 38 | file.max.count = 10 39 | 40 | ;-------------------------------------------------------------------------------------------------- 41 | [fetch] 42 | 43 | ; NIST CVE API 44 | url.cve = https://services.nvd.nist.gov/rest/json/cves/2.0 45 | 46 | ; NIST CPE API 47 | url.cpe = https://services.nvd.nist.gov/rest/json/cpes/2.0 48 | 49 | ; NIST Source API 50 | url.source = https://services.nvd.nist.gov/rest/json/source/2.0 51 | 52 | ; MITRE CWE source file 53 | url.cwe = https://cwe.mitre.org/data/xml/views/2000.xml.zip 54 | 55 | ; MITRE CAPEC source file 56 | url.capec = https://capec.mitre.org/data/xml/views/3000.xml.zip 57 | 58 | ; Cynetia EPSS source file 59 | url.epss = https://epss.cyentia.com 60 | 61 | ; CISA KEV source file 62 | url.kev = https://www.cisa.gov/sites/default/files/feeds/known_exploited_vulnerabilities.json 63 | 64 | ; API_KEY set the value of API key obtained from NVD 65 | api_key = ${NVD_API_KEY} 66 | 67 | ; pause between requests 68 | request.pause.with_key = 1 #seconds to pause between requests 69 | request.pause.without_key = 6 #seconds to pause between requests 70 | 71 | ; min time between syncs (sec) 72 | min.sync.time = 2 * 60 * 60 # sec 73 | 74 | ; max days for period fetch (days) 75 | max.days.period = 120 76 | 77 | ; specify the timezone where your instance is running, this helps in accurate data updates 78 | ; possible values UTC, Etc/GMT+/-[0-12] (i.e. Etc/GMT-5) 79 | timezone = Etc/GMT+2 80 | -------------------------------------------------------------------------------- /src/config/setenv/setenv_dev.ini: -------------------------------------------------------------------------------- 1 | #-------------------------------------------------------------------------------------------------- 2 | # Configuration values definition for DEV like env 3 | #-------------------------------------------------------------------------------------------------- 4 | # Log related settings 5 | export FCDB_LOG_CONSOLE_DEBUG_LVL=DEBUG 6 | export FCDB_LOG_FILE_DEBUG_LVL=DEBUG 7 | 8 | #-------------------------------------------------------------------------------------------------- 9 | # DB related settings 10 | export FCDB_DB_POOL_SIZE=1 11 | export FCDB_DB_POOL_OVERFLOW=5 12 | 13 | export FCDB_HOST=localhost 14 | export FCDB_PORT=5432 15 | 16 | -------------------------------------------------------------------------------- /src/config/setenv/setenv_local.ini: -------------------------------------------------------------------------------- 1 | #-------------------------------------------------------------------------------------------------- 2 | # Configuration values definition for DEV like env 3 | #-------------------------------------------------------------------------------------------------- 4 | # Log related settings 5 | FCDB_LOG_CONSOLE_DEBUG_LVL=DEBUG 6 | FCDB_LOG_FILE_DEBUG_LVL=DEBUG 7 | 8 | #-------------------------------------------------------------------------------------------------- 9 | # DB related settings 10 | FCDB_DB_POOL_SIZE=5 11 | FCDB_DB_POOL_OVERFLOW=5 12 | 13 | export FCDB_HOST=localhost 14 | export FCDB_PORT=6630 15 | -------------------------------------------------------------------------------- /src/config/setenv/setenv_prod.ini: -------------------------------------------------------------------------------- 1 | #-------------------------------------------------------------------------------------------------- 2 | # Configuration values definition for PROD like env 3 | #-------------------------------------------------------------------------------------------------- 4 | # Log related settings 5 | export FCDB_LOG_CONSOLE_DEBUG_LVL=WARNING 6 | export FCDB_LOG_FILE_DEBUG_LVL=INFO 7 | 8 | #-------------------------------------------------------------------------------------------------- 9 | # DB related settings 10 | export FCDB_DB_POOL_SIZE=5 11 | export FCDB_DB_POOL_OVERFLOW=15 12 | 13 | export FCDB_HOST=localhost 14 | export FCDB_PORT=5432 15 | -------------------------------------------------------------------------------- /src/config/setenv/setenv_test.ini: -------------------------------------------------------------------------------- 1 | #-------------------------------------------------------------------------------------------------- 2 | # Configuration values definition for TEST like env 3 | #-------------------------------------------------------------------------------------------------- 4 | # Log related settings 5 | export FCDB_LOG_CONSOLE_DEBUG_LVL=DEBUG 6 | export FCDB_LOG_FILE_DEBUG_LVL=DEBUG 7 | 8 | #-------------------------------------------------------------------------------------------------- 9 | # DB related settings 10 | export FCDB_DB_POOL_SIZE=5 11 | export FCDB_DB_POOL_OVERFLOW=5 12 | 13 | export FCDB_HOST=localhost 14 | export FCDB_PORT=6630 15 | -------------------------------------------------------------------------------- /src/db/__init__.py: -------------------------------------------------------------------------------- 1 | from .database import DataBase -------------------------------------------------------------------------------- /src/db/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | [alembic] 3 | # path to migration scripts 4 | script_location = ./scripts 5 | 6 | # template used to generate migration files 7 | file_template = %%(rev)s_%%(slug)s 8 | 9 | -------------------------------------------------------------------------------- /src/db/create_schema.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Facilitates the run of alembic scripts. 4 | 5 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 6 | """ 7 | 8 | import logging 9 | import importlib 10 | import importlib.util 11 | import argparse 12 | import generic 13 | 14 | # ------------------------------------------------------------------------------ 15 | logger = logging.getLogger(__name__) 16 | appctx = None 17 | 18 | 19 | # ------------------------------------------------------------------------------ 20 | def import_meta(): 21 | # -------------------------------------------------------------------------- 22 | # Import all table definitons as per {schema}.py file 23 | module = None 24 | schema = 'tables' 25 | 26 | if importlib.util.find_spec(schema, None) is not None: 27 | module = importlib.import_module(schema) 28 | 29 | if module is None: 30 | raise RuntimeError('Cannot continue wihtout module {}.'.format(schema)) 31 | 32 | # -------------------------------------------------------------------------- 33 | # move imported table definition to global scope 34 | module_dict = module.__dict__ 35 | try: 36 | to_import = module.__all__ 37 | except AttributeError: 38 | to_import = [name for name in module_dict if not name.startswith('_')] 39 | 40 | globals().update({name: module_dict[name] for name in to_import}) 41 | 42 | 43 | # ------------------------------------------------------------------------------ 44 | def create_schema_objs(): 45 | 46 | """Tool to be used on order to create all objects defined in a particular 47 | python file that has all objects definition for a particulat schema which 48 | should be provided in input. 49 | 50 | Note: ``schema`` will expect to have corresponfing python file {schema}.py 51 | in the same directory from where create_schema is run and should 52 | contain all objects definitions as per SqlAlchemy syntax. 53 | 54 | """ 55 | 56 | global appctx 57 | 58 | import_meta() 59 | 60 | # -------------------------------------------------------------------------- 61 | # Create all schema objects Tables/Indeces/Sequences etc. defined 62 | # into Base as per imported objects from imported {schema}.py 63 | # -------------------------------------------------------------------------- 64 | metadata.create_all(appctx.db.engine) 65 | 66 | print("Finnished Creating objects for schema: tables") 67 | 68 | 69 | # -------------------------------------------------------------------------- 70 | def create_schema_diffs(): 71 | 72 | """Generates (and applies the schema differences between metadata and DB) 73 | """ 74 | from alembic.migration import MigrationContext 75 | from alembic.autogenerate import compare_metadata 76 | import pprint 77 | 78 | global appctx 79 | 80 | import_meta() 81 | conn = appctx.db.engine.connect() 82 | 83 | mc = MigrationContext.configure(connection=conn, opts={'include_schemas': True, 84 | 'target_metadata': metadata}) 85 | 86 | diff = compare_metadata(mc, metadata) 87 | if diff: 88 | pprint.pprint(diff, indent=4) 89 | else: 90 | print('No differences found\n') 91 | 92 | 93 | if __name__ == "__main__": 94 | 95 | # -------------------------------------------------------------------------- 96 | # create application context 97 | appctx = generic.ApplicationContext.instance() 98 | 99 | # -------------------------------------------------------------------------- 100 | # Parse the arguments and Validate 101 | parser = argparse.ArgumentParser(description="Schema objects creation") 102 | parser.add_argument('-d', '--diff', dest='diff_ind', action='store_true', 103 | help="Indicates to generate a difference between metadata and DB") 104 | 105 | args = parser.parse_args() 106 | 107 | if args.diff_ind: 108 | create_schema_diffs() 109 | else: 110 | create_schema_objs() 111 | 112 | -------------------------------------------------------------------------------- /src/db/database.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database handlign module, Initialization and session management 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | import os 8 | 9 | import threading 10 | import logging 11 | import sqlalchemy 12 | import sqlalchemy.orm 13 | 14 | from sqlalchemy import event, exc 15 | 16 | # ------------------------------------------------------------------------------ 17 | LOGGER = logging.getLogger(__name__) 18 | 19 | 20 | # ------------------------------------------------------------------------------ 21 | class DataBase(): 22 | """This class is repsonsible to be the interface with DB. Activitities that 23 | can be done with: 24 | 25 | *) retirieve a dedicated session for current Thread 26 | *) retrieve current sqlalchemy Engine. 27 | """ 28 | __db_session_cls = None 29 | __db_engine = None 30 | 31 | def __init__(self, dsn, **kwargs): 32 | 33 | """Constructor method for DataBase class. 34 | 35 | Args: 36 | dsn: Database connection string 37 | **kwargs: additional parameters that are passed to engine instatiation for sqlAlchemy 38 | """ 39 | 40 | self.__lock = threading.Lock() 41 | self.__locks = {} 42 | self.__thread_sessions = {} 43 | self.__db_engine = sqlalchemy.create_engine(dsn, **kwargs) 44 | # https://docs.sqlalchemy.org/en/13/faq/connections.html 45 | # how-do-i-use-engines-connections-sessions-with-python-multiprocessing-or-os-fork 46 | self.__db_engine.dispose() 47 | # https://docs.sqlalchemy.org/en/13/core/pooling.html 48 | self.__add_engine_pidguard() 49 | 50 | self.__db_session_cls = sqlalchemy.orm.sessionmaker(bind=self.__db_engine, future=True) 51 | 52 | self.__thread_sessions = {} 53 | 54 | # ------------------------------------------------------------------------------ 55 | def __add_engine_pidguard(self): 56 | """Add multiprocessing guards. 57 | 58 | Forces a connection to be reconnected if it is detected 59 | as having been shared to a sub-process. 60 | """ 61 | 62 | # -------------------------------------------------------------------------- 63 | @event.listens_for(self.__db_engine, "connect") 64 | def connect(dbapi_connection, connection_record): 65 | connection_record.info['pid'] = os.getpid() 66 | 67 | # -------------------------------------------------------------------------- 68 | @event.listens_for(self.__db_engine, "checkout") 69 | def checkout(dbapi_connection, connection_record, connection_proxy): 70 | pid = os.getpid() 71 | if connection_record.info['pid'] != pid: 72 | connection_record.connection = connection_proxy.connection = None 73 | raise exc.DisconnectionError(f"Connection record belongs to pid " 74 | f"{connection_record.info['pid']}, " 75 | f"attempting to check out in pid {pid}") 76 | 77 | # ------------------------------------------------------------------------------ 78 | @property 79 | def engine(self): 80 | """Will return current sqlalchemy engine. 81 | 82 | Returns 83 | engine: class:``sqlalchemy.engine``: - current session's DB engine 84 | """ 85 | return self.__db_engine 86 | 87 | def get_session(self, separate=False, **kwargs): 88 | """Will return a separate session for current sqlalchemy engine if separate 89 | parameter is set or the current session for current thread 90 | 91 | Returns 92 | session:class:``sqlalchemy.orm.session``: - instance corresponding to current thread 93 | """ 94 | if separate: 95 | return self.__db_session_cls(**kwargs) 96 | else: 97 | return self.__enter__() 98 | 99 | def return_session(self, session=None, exception=None): 100 | """To be called after retrieving a session using `get_session` 101 | In case session was retrieved as as a separate session then it has to be 102 | provided here at the time of return 103 | 104 | Param: 105 | session - previous retrieved session in case it was retrieved separately 106 | exception - to send the exception in case any occured - will determine 107 | the commit/rollback result for the session. 108 | """ 109 | 110 | if session is not None: 111 | try: 112 | if exception is None: 113 | session.commit() 114 | else: 115 | session.rollback() 116 | 117 | except Exception as exc: 118 | LOGGER.exception(exc) 119 | raise exc 120 | 121 | finally: 122 | session.close() 123 | 124 | else: 125 | self.__exit__(exception, None, None) 126 | 127 | def __enter__(self): 128 | 129 | """Yelds a new or current thread's DB session.""" 130 | thread_id = threading.current_thread().name 131 | 132 | thread_lock = self.__locks.get(thread_id, None) 133 | if not thread_lock: 134 | with self.__lock: 135 | thread_lock = self.__locks.get(thread_id, None) 136 | if not thread_lock: 137 | thread_lock = threading.Lock() 138 | self.__locks[thread_id] = thread_lock 139 | 140 | thread_lock.acquire() 141 | try: 142 | 143 | if not thread_id in self.__thread_sessions: 144 | self.__thread_sessions[thread_id] = {} 145 | self.__thread_sessions[thread_id]['flow_level'] = 0 146 | 147 | if self.__thread_sessions[thread_id]['flow_level'] <= 0: 148 | self.__thread_sessions[thread_id]['flow_level'] = 0 149 | self.__thread_sessions[thread_id]['session'] = self.__db_session_cls() 150 | 151 | self.__thread_sessions[thread_id]['flow_level'] += 1 152 | 153 | return self.__thread_sessions[thread_id]['session'] 154 | finally: 155 | thread_lock.release() 156 | 157 | def __exit__(self, exc_type, exc_value, traceback): 158 | 159 | """Takes care for previously yelded db session for transaction 160 | commit/roollback and closure.""" 161 | 162 | thread_id = threading.current_thread().name 163 | 164 | thread_lock = self.__locks.get(thread_id, None) 165 | if not thread_lock: 166 | with self.__lock: 167 | thread_lock = self.__locks.get(thread_id, None) 168 | if not thread_lock: 169 | thread_lock = threading.Lock() 170 | self.__locks[thread_id] = thread_lock 171 | 172 | exc, session = None, None 173 | thread_lock.acquire() 174 | try: 175 | if self.__thread_sessions[thread_id]['flow_level'] > 0: 176 | self.__thread_sessions[thread_id]['flow_level'] -= 1 177 | 178 | if self.__thread_sessions[thread_id]['flow_level'] <= 0 or exc_type: 179 | self.__thread_sessions[thread_id]['flow_level'] = 0 180 | try: 181 | session = self.__thread_sessions[thread_id]['session'] 182 | if exc_type is None: 183 | session.commit() 184 | else: 185 | session.rollback() 186 | 187 | except Exception as err: 188 | exc = err 189 | 190 | finally: 191 | if session: session.close() 192 | finally: 193 | thread_lock.release() 194 | if exc: 195 | LOGGER.exception(exc) 196 | raise exc 197 | 198 | -------------------------------------------------------------------------------- /src/db/schema.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | #------------------------------------------------------------------------------ 3 | # Shell script to facilitate the alembic scripts creation/execution/updates etc. 4 | # Run this script with --help or -h to get description and how to use. 5 | # 6 | # Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 7 | #------------------------------------------------------------------------------ 8 | usage() 9 | { 10 | SYNOPSYS="Usage: $(basename $0) [-h|--help] [-s|--sql] [-m|--message MESSAGE] [-d|--delete] [-p|--pattern PATTERN] [-r|--revision REV] action" 11 | FULL_TEXT=" 12 | 13 | Where: 14 | 15 | action - specify what kind of action is expected to be done 16 | cr[eate] - create all db related objects as per schema definition from DB. 17 | d[iff] - show if there is any difference between current repository schema definition and current DB. 18 | l[ist] - list all DB schema changes registered so far. 19 | h[ead] - apply all needed schema changes to DB to bring it in sync with repository. 20 | u[grade] - apply all needed schema changes to DB to bring it in sync with repository up to specified revision. 21 | --revision is mandatory in this case. 22 | [down]g[rade] - remove all schema changes from DB to bring it in sync with repository down to specified revision. 23 | --revision option is mandatory in this case. 24 | rev[ision] - create a new revision for current existing differences between repository and DB. 25 | --message option is mandatory if action specified as revision. 26 | ref[erence] - load reference data from json (default) or csv files into its corresponding tables. 27 | s[ync] - validates list of Action and Resources defined in the code with DB and synchronizes 28 | Actions/Resources to Role mappings with DB. (action valid for auth schema only) 29 | 30 | Options: 31 | -h | --help - will display this help 32 | -s | --sql - cause to printout SQL statements instead of actual action 33 | this option is relevant only for actions: list, head, upgrade, downgrade 34 | -m | --message - specifies the revision message for a new revision which is being created 35 | this option is mandatory and relevant only for action: revision 36 | -d | --delete - causes to delete first record from reference tables 37 | this option is relevant only for action: reference 38 | -p | --pattern - specifies pattern to seach for csv files to load into reference tables 39 | this option is relevant only for action: reference 40 | -r | --revision - specifies DB revision change number that should be applied to DB 41 | this option is relevant only for actions: upgrade and downgrade 42 | " 43 | echo "" 44 | echo "$SYNOPSYS" 45 | [ -n "$1" ] && echo "$FULL_TEXT" 46 | 47 | } 48 | #-------------------------------------------------------------- 49 | function getOptions 50 | #-------------------------------------------------------------- 51 | { 52 | CNT=0 53 | while [ -n "$1" ]; 54 | do 55 | OPT="$1" 56 | case ${OPT} in 57 | -s|--sql) 58 | let CNT+=1 59 | PRINT_SQL="--sql" 60 | ;; 61 | -m|--message) 62 | shift && let CNT+=1 63 | [ -n "$1" ] && REV_MSG="$1" && let CNT+=1 64 | ;; 65 | -d|--delete) 66 | let CNT+=1 67 | CSV_DEL_OPT="-d" 68 | ;; 69 | -p|--pattern) 70 | shift && let CNT+=1 71 | [ -n "$1" ] && CSV_PATT="$1" && let CNT+=1 72 | ;; 73 | -r|--revision) 74 | shift && let CNT+=1 75 | [ -n "$1" ] && REV="$1" && let CNT+=1 76 | ;; 77 | -h|--help) 78 | usage long && exit 0 79 | ;; 80 | -*) 81 | echo "Invalid option: $OPT" && usage >&2 || exit 1 82 | ;; 83 | esac 84 | shift 85 | done 86 | return $CNT 87 | } 88 | 89 | 90 | #-------------------------------------------------------------------------------------------------- 91 | # Parse input arguments if any 92 | #-------------------------------------------------------------------------------------------------- 93 | getOptions "$@" 94 | shift $? 95 | 96 | #------------------------------------------------------------------------------ 97 | actions="$@" 98 | if [ -z "$actions" ]; then 99 | actions=head 100 | fi 101 | 102 | #------------------------------------------------------------------------------ 103 | # change current path from where this script is run 104 | cd $(dirname ${0}) 105 | 106 | #-------------------------------------------------------------------------- 107 | . ${FCDB_HOME}/config/setenv.sh > /dev/null 108 | for action in $(echo $actions | sed 's/[,|:;.]/ /g') 109 | do 110 | # strip any extra characters from the actual db name 111 | case $action in 112 | cr|create) python3 create_schema.py* || exit 10 ;; 113 | d|diff) python3 create_schema.py* -d || exit 11 ;; 114 | h|head) alembic -c alembic.ini upgrade head $PRINT_SQL || exit 3 ;; 115 | u|upgrade) alembic -c alembic.ini upgrade "${REV}" $PRINT_SQL || exit 4 ;; 116 | g|downgrade) alembic -c alembic.ini downgrade "${REV}" $PRINT_SQL || exit 5 ;; 117 | l|list) alembic -c alembic.ini history $PRINT_SQL || exit 6 ;; 118 | rev|revision) alembic -c alembic.ini revision --autogenerate -m "${REV_MSG}" || exit 7 ;; 119 | *) echo "Unknow action:$action" && usage ;; 120 | esac 121 | done -------------------------------------------------------------------------------- /src/db/scripts/env.py: -------------------------------------------------------------------------------- 1 | """ 2 | Alembic related code 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | from __future__ import with_statement 8 | from alembic import context 9 | import logging 10 | import db.tables as tables 11 | import generic 12 | 13 | logger = logging.getLogger('alembic') 14 | appctx = generic.ApplicationContext.instance() 15 | 16 | # this is the Alembic Config object, which provides 17 | # access to the values within the .ini file in use. 18 | config = context.config 19 | 20 | def run_migrations_offline(): 21 | """Run migrations in 'offline' mode. 22 | 23 | This configures the context with just a URL 24 | and not an Engine, though an Engine is acceptable 25 | here as well. By skipping the Engine creation 26 | we don't even need a DBAPI to be available. 27 | 28 | Calls to context.execute() here emit the given string to the 29 | script output. 30 | 31 | """ 32 | url = appctx.config.get_param('db.dsn','driver://user:pass@host/db') 33 | context.configure(url=url, target_metadata=tables.metadata, literal_binds=True, compare_type=True) 34 | 35 | with context.begin_transaction(): 36 | context.run_migrations() 37 | 38 | 39 | def run_migrations_online(): 40 | """Run migrations in 'online' mode. 41 | 42 | In this scenario we need to create an Engine 43 | and associate a connection with the context. 44 | 45 | """ 46 | connectable = appctx.db.engine 47 | 48 | with connectable.connect() as connection: 49 | context.configure( 50 | connection=connection, 51 | target_metadata=tables.metadata, 52 | include_schemas=True, 53 | compare_type=True 54 | ) 55 | 56 | with context.begin_transaction(): 57 | context.run_migrations() 58 | 59 | if context.is_offline_mode(): 60 | run_migrations_offline() 61 | else: 62 | run_migrations_online() 63 | -------------------------------------------------------------------------------- /src/db/scripts/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /src/db/scripts/versions/03f01bf9a755_add_vuln_cert_table.py: -------------------------------------------------------------------------------- 1 | """add vuln_cert table 2 | 3 | Revision ID: 03f01bf9a755 4 | Revises: d4df74b54307 5 | Create Date: 2023-01-26 15:43:27.914229 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '03f01bf9a755' 14 | down_revision = 'd4df74b54307' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('vuln_cpes', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('vuln_id', sa.String(length=20), nullable=True), 24 | sa.Column('cpe', sa.String(length=256), nullable=True), 25 | sa.Column('sys_creation_date', sa.DateTime(), server_default=sa.text('current_timestamp'), nullable=False), 26 | sa.Column('cond', sa.String(length=10), nullable=False, comment='The condition between the records of the same group AND/OR'), 27 | sa.Column('negate', sa.Boolean(), nullable=True, comment='Indicate if the condition result is to be negated'), 28 | sa.Column('vulnerable', sa.Boolean(), nullable=True, comment='Indicate if the specified CPE is vulnerable'), 29 | sa.Column('group_id', sa.Integer(), nullable=True), 30 | sa.Column('parent_group_id', sa.Integer(), nullable=False), 31 | sa.Column('part', sa.String(length=1), nullable=True, comment='the part of CPE'), 32 | sa.Column('vendor', sa.String(length=128), nullable=True, comment='the vendor of CPE'), 33 | sa.Column('product', sa.String(length=128), nullable=True, comment='the product of CPE'), 34 | sa.Column('version', sa.String(length=128), nullable=True, comment='the version of CPE'), 35 | sa.Column('version_lt', sa.String(length=128), nullable=True, comment='the little version of CPE'), 36 | sa.Column('version_le', sa.String(length=128), nullable=True, comment='the little or equal version of CPE'), 37 | sa.Column('version_gt', sa.String(length=128), nullable=True, comment='the greater version of CPE'), 38 | sa.Column('version_ge', sa.String(length=128), nullable=True, comment='the greater or equal version of CPE'), 39 | sa.Column('update', sa.String(length=128), nullable=True, comment='the update of CPE'), 40 | sa.Column('edition', sa.String(length=128), nullable=True, comment='the edition of CPE'), 41 | sa.Column('language', sa.String(length=128), nullable=True, comment='the language of CPE'), 42 | sa.Column('sw_edition', sa.String(length=128), nullable=True, comment='the sw_edition of CPE'), 43 | sa.Column('target_sw', sa.String(length=128), nullable=True, comment='the target_sw of CPE'), 44 | sa.Column('target_hw', sa.String(length=128), nullable=True, comment='the target_hw of CPE'), 45 | sa.PrimaryKeyConstraint('id'), 46 | comment='Table that contains the list of Vulnerabilities' 47 | ) 48 | op.create_index(op.f('ix_vuln_cpes_vuln_id'), 'vuln_cpes', ['vuln_id'], unique=False) 49 | op.create_index('vuln_cpe_idx1', 'vuln_cpes', ['product', 'version', 'vendor'], unique=False) 50 | op.create_unique_constraint('vuln_uix_1', 'vuln', ['vuln_id']) 51 | op.create_table_comment( 52 | 'vuln', 53 | 'Table that contains the list of Vulnerabilities', 54 | existing_comment=None, 55 | schema=None 56 | ) 57 | # ### end Alembic commands ### 58 | 59 | 60 | def downgrade(): 61 | # ### commands auto generated by Alembic - please adjust! ### 62 | op.drop_table_comment( 63 | 'vuln', 64 | existing_comment='Table that contains the list of Vulnerabilities', 65 | schema=None 66 | ) 67 | op.drop_constraint('vuln_uix_1', 'vuln', type_='unique') 68 | op.drop_index('vuln_cpe_idx1', table_name='vuln_cpes') 69 | op.drop_index(op.f('ix_vuln_cpes_vuln_id'), table_name='vuln_cpes') 70 | op.drop_table('vuln_cpes') 71 | # ### end Alembic commands ### 72 | -------------------------------------------------------------------------------- /src/db/scripts/versions/2f14a6a5afe8_add_capec_table.py: -------------------------------------------------------------------------------- 1 | """Add CAPEC table 2 | 3 | Revision ID: 2f14a6a5afe8 4 | Revises: c1f79cef457f 5 | Create Date: 2023-03-01 17:35:13.831936 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '2f14a6a5afe8' 14 | down_revision = 'c1f79cef457f' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('capec', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('capec_id', sa.Integer(), nullable=False, comment='The ID of the CAPEC'), 24 | sa.Column('name', sa.String(length=256), nullable=False, comment='the name of the CAPEC'), 25 | sa.Column('status', sa.String(length=128), nullable=True, comment='the status of the CAPEC'), 26 | sa.Column('description', sa.Text(), nullable=True, comment='the description of the CAPEC'), 27 | sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True, comment='CAPEC JSON representation'), 28 | sa.PrimaryKeyConstraint('id'), 29 | comment='Table that contains the list of CWEs' 30 | ) 31 | op.create_index('capec_idx1', 'capec', ['name'], unique=False) 32 | op.create_index('capec_idx2', 'capec', ['description'], unique=False) 33 | op.create_index(op.f('ix_capec_capec_id'), 'capec', ['capec_id'], unique=False) 34 | op.alter_column('cwe', 'data', 35 | existing_type=postgresql.JSONB(astext_type=sa.Text()), 36 | comment='CWE JSON representation', 37 | existing_comment='CPE JSON representation', 38 | existing_nullable=True) 39 | # ### end Alembic commands ### 40 | 41 | 42 | def downgrade(): 43 | # ### commands auto generated by Alembic - please adjust! ### 44 | op.alter_column('cwe', 'data', 45 | existing_type=postgresql.JSONB(astext_type=sa.Text()), 46 | comment='CPE JSON representation', 47 | existing_comment='CWE JSON representation', 48 | existing_nullable=True) 49 | op.drop_index(op.f('ix_capec_capec_id'), table_name='capec') 50 | op.drop_index('capec_idx2', table_name='capec') 51 | op.drop_index('capec_idx1', table_name='capec') 52 | op.drop_table('capec') 53 | # ### end Alembic commands ### 54 | -------------------------------------------------------------------------------- /src/db/scripts/versions/892034da2349_custom_method.py: -------------------------------------------------------------------------------- 1 | """add custom function 2 | 3 | Revision ID: 892034da2349 4 | Revises: 03f01bf9a755 5 | Create Date: 2023-01-29 15:43:27.914229 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '892034da2349' 14 | down_revision = '03f01bf9a755' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.execute(""" 22 | CREATE OR REPLACE FUNCTION ver_pad(text, integer) 23 | RETURNS text 24 | AS $$ 25 | DECLARE 26 | input_string text := $1; 27 | max_length integer := $2; 28 | output_string text; 29 | parts text[]; 30 | i integer; 31 | BEGIN 32 | parts := string_to_array(input_string, '.'); 33 | output_string := ''; 34 | FOR i in 1..array_length(parts, 1) 35 | LOOP 36 | output_string := output_string || lpad(parts[i], max_length, '0') || '.'; 37 | END LOOP; 38 | output_string := substring(output_string, 1, length(output_string) - 1); 39 | RETURN output_string; 40 | END; 41 | $$ LANGUAGE plpgsql; 42 | """) 43 | # ### end Alembic commands ### 44 | 45 | 46 | def downgrade(): 47 | # ### commands auto generated by Alembic - please adjust! ### 48 | op.execute("DROP FUNCTION ver_ad(text, integer);") 49 | # ### end Alembic commands ### 50 | -------------------------------------------------------------------------------- /src/db/scripts/versions/9a14a98e9e6d_add_epss_table.py: -------------------------------------------------------------------------------- 1 | """Add EPSS table 2 | 3 | Revision ID: 9a14a98e9e6d 4 | Revises: 2f14a6a5afe8 5 | Create Date: 2024-03-12 13:30:54.706771 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '9a14a98e9e6d' 14 | down_revision = '2f14a6a5afe8' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('epss', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('cve_id', sa.String(length=20), nullable=True, comment='The ID of the CVE'), 24 | sa.Column('epss_score', sa.Float(), nullable=False, comment='the score of the epss'), 25 | sa.Column('percentile', sa.Float(), nullable=False, comment='the percentile of the epss'), 26 | sa.Column('date', sa.DateTime(), nullable=False, comment='Date when the EPSS record has been downloaed'), 27 | sa.Column('changed', sa.Boolean(), nullable=True, comment='indicate if epss_score has been changed'), 28 | sa.PrimaryKeyConstraint('id'), 29 | comment='Table that contains the list of EPSS' 30 | ) 31 | op.create_index('epss_idx1', 'epss', ['cve_id'], unique=False) 32 | op.create_index(op.f('ix_epss_cve_id'), 'epss', ['cve_id'], unique=False) 33 | # ### end Alembic commands ### 34 | 35 | 36 | def downgrade(): 37 | # ### commands auto generated by Alembic - please adjust! ### 38 | op.drop_index(op.f('ix_epss_cve_id'), table_name='epss') 39 | op.drop_index('epss_idx1', table_name='epss') 40 | op.drop_table('epss') 41 | # ### end Alembic commands ### 42 | 43 | -------------------------------------------------------------------------------- /src/db/scripts/versions/c1f79cef457f_cwe_table.py: -------------------------------------------------------------------------------- 1 | """CWE table 2 | 3 | Revision ID: c1f79cef457f 4 | Revises: 892034da2349 5 | Create Date: 2023-02-03 14:29:29.393104 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c1f79cef457f' 14 | down_revision = '892034da2349' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('cwe', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('cwe_id', sa.Integer(), nullable=False, comment='The ID of the CWE'), 24 | sa.Column('name', sa.String(length=256), nullable=False, comment='the name of the CWE'), 25 | sa.Column('status', sa.String(length=128), nullable=True, comment='the status of the CWE'), 26 | sa.Column('description', sa.Text(), nullable=True, comment='the description of the CWE'), 27 | sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True, comment='CPE JSON representation'), 28 | sa.PrimaryKeyConstraint('id'), 29 | comment='Table that contains the list of CWEs' 30 | ) 31 | #op.create_index('cwe_idx1', 'cwe', ['name'], unique=False) 32 | op.execute('CREATE INDEX cwe_idx1 on cwe (name varchar_pattern_ops)') 33 | op.execute('CREATE INDEX cwe_idx2 ON cwe (description text_pattern_ops);') 34 | op.create_index(op.f('ix_cwe_cwe_id'), 'cwe', ['cwe_id'], unique=False) 35 | op.execute('CREATE INDEX cpe_idx2 ON cpe (title_en text_pattern_ops);') 36 | op.add_column('vuln', sa.Column('description', sa.Text(), nullable=True, comment='the description of the CVE')) 37 | op.execute('CREATE INDEX vuln_idx1 ON vuln (description text_pattern_ops);') 38 | # ### end Alembic commands ### 39 | 40 | 41 | def downgrade(): 42 | # ### commands auto generated by Alembic - please adjust! ### 43 | op.drop_index('vuln_idx1', table_name='vuln') 44 | op.drop_column('vuln', 'description') 45 | op.drop_index('cpe_idx2', table_name='cpe') 46 | op.drop_index(op.f('ix_cwe_cwe_id'), table_name='cwe') 47 | op.drop_index('cwe_idx2', table_name='cwe') 48 | op.drop_index('cwe_idx1', table_name='cwe') 49 | op.drop_table('cwe') 50 | # ### end Alembic commands ### 51 | -------------------------------------------------------------------------------- /src/db/scripts/versions/d4df74b54307_initial_setup.py: -------------------------------------------------------------------------------- 1 | """Initial setup 2 | 3 | Revision ID: d4df74b54307 4 | Revises: 5 | Create Date: 2023-01-25 12:21:49.285707 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import postgresql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'd4df74b54307' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('cpe', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('name', sa.String(length=256), nullable=False, comment='the name of the CPE'), 24 | sa.Column('name_id', sa.String(length=256), nullable=False, comment='the name ID of the CPE'), 25 | sa.Column('title_en', sa.String(length=512), nullable=True, comment='the English title of the CPE'), 26 | sa.Column('sys_creation_date', sa.DateTime(), server_default=sa.text('current_timestamp'), nullable=False), 27 | sa.Column('created', sa.DateTime(), nullable=False, comment='Date when the CPE record was created.'), 28 | sa.Column('last_modified_date', sa.DateTime(), nullable=False, comment='Date when the CPE record was modified.'), 29 | sa.Column('part', sa.String(length=1), nullable=False, comment='the part of CPE'), 30 | sa.Column('vendor', sa.String(length=128), nullable=False, comment='the vendor of CPE'), 31 | sa.Column('product', sa.String(length=128), nullable=False, comment='the product of CPE'), 32 | sa.Column('version', sa.String(length=128), nullable=False, comment='the version of CPE'), 33 | sa.Column('update', sa.String(length=128), nullable=True, comment='the update of CPE'), 34 | sa.Column('edition', sa.String(length=128), nullable=True, comment='the edition of CPE'), 35 | sa.Column('language', sa.String(length=128), nullable=True, comment='the language of CPE'), 36 | sa.Column('sw_edition', sa.String(length=128), nullable=True, comment='the sw_edition of CPE'), 37 | sa.Column('target_sw', sa.String(length=128), nullable=True, comment='the target_sw of CPE'), 38 | sa.Column('target_hw', sa.String(length=128), nullable=True, comment='the target_hw of CPE'), 39 | sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True, comment='CPE JSON representation'), 40 | sa.PrimaryKeyConstraint('id'), 41 | sa.UniqueConstraint('name', name='cpe_uix_1'), 42 | comment='Table that contains the list of CPEs' 43 | ) 44 | op.create_index('cpe_idx1', 'cpe', ['product', 'version', 'vendor'], unique=False) 45 | op.create_index(op.f('ix_cpe_created'), 'cpe', ['created'], unique=False) 46 | op.create_index(op.f('ix_cpe_last_modified_date'), 'cpe', ['last_modified_date'], unique=False) 47 | op.create_index(op.f('ix_cpe_title_en'), 'cpe', ['title_en'], unique=False) 48 | op.create_table('fetch_status', 49 | sa.Column('id', sa.Integer(), nullable=False), 50 | sa.Column('name', sa.String(length=40), nullable=False, comment='the name of the status'), 51 | sa.Column('sys_creation_date', sa.DateTime(), server_default=sa.text('current_timestamp'), nullable=False), 52 | sa.Column('last_modified_date', sa.DateTime(), nullable=False, comment='Date when record was modified.'), 53 | sa.Column('stats', postgresql.JSONB(astext_type=sa.Text()), nullable=True, comment='JSON column for additional details'), 54 | sa.PrimaryKeyConstraint('id'), 55 | sa.UniqueConstraint('name', name='fetch_status_uix_1'), 56 | comment='Table that contains the fetch status for different keys' 57 | ) 58 | op.create_table('vuln', 59 | sa.Column('id', sa.Integer(), nullable=False), 60 | sa.Column('vuln_id', sa.String(length=20), nullable=True), 61 | sa.Column('sys_creation_date', sa.DateTime(), server_default=sa.text('current_timestamp'), nullable=False), 62 | sa.Column('published_date', sa.DateTime(), nullable=False, comment='Date when record published.'), 63 | sa.Column('last_modified_date', sa.DateTime(), nullable=False, comment='Date when record was modified.'), 64 | sa.Column('source', sa.String(length=100), nullable=True, comment='the source of the vulnerability'), 65 | sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True, comment='Vuln JSON representation'), 66 | sa.PrimaryKeyConstraint('id') 67 | ) 68 | op.create_index(op.f('ix_vuln_last_modified_date'), 'vuln', ['last_modified_date'], unique=False) 69 | op.create_index(op.f('ix_vuln_published_date'), 'vuln', ['published_date'], unique=False) 70 | op.create_index(op.f('ix_vuln_vuln_id'), 'vuln', ['vuln_id'], unique=False) 71 | # ### end Alembic commands ### 72 | 73 | 74 | def downgrade(): 75 | # ### commands auto generated by Alembic - please adjust! ### 76 | op.drop_index(op.f('ix_vuln_vuln_id'), table_name='vuln') 77 | op.drop_index(op.f('ix_vuln_published_date'), table_name='vuln') 78 | op.drop_index(op.f('ix_vuln_last_modified_date'), table_name='vuln') 79 | op.drop_table('vuln') 80 | op.drop_table('fetch_status') 81 | op.drop_index(op.f('ix_cpe_title_en'), table_name='cpe') 82 | op.drop_index(op.f('ix_cpe_last_modified_date'), table_name='cpe') 83 | op.drop_index(op.f('ix_cpe_created'), table_name='cpe') 84 | op.drop_index('cpe_idx1', table_name='cpe') 85 | op.drop_table('cpe') 86 | # ### end Alembic commands ### 87 | -------------------------------------------------------------------------------- /src/db/scripts/versions/ecd29e77afe3_change_vuln_description_index_type.py: -------------------------------------------------------------------------------- 1 | """Change vuln description index type 2 | 3 | Revision ID: ecd29e77afe3 4 | Revises: 9a14a98e9e6d 5 | Create Date: 2024-04-29 10:10:39.660073 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'ecd29e77afe3' 14 | down_revision = '9a14a98e9e6d' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.drop_index('vuln_idx1', table_name='vuln') 22 | op.execute('CREATE INDEX vuln_idx1 ON vuln USING GIST (to_tsvector(\'english\', description));') 23 | # ### end Alembic commands ### 24 | 25 | 26 | def downgrade(): 27 | # ### commands auto generated by Alembic - please adjust! ### 28 | op.drop_index('vuln_idx1', table_name='vuln') 29 | op.execute('CREATE INDEX vuln_idx1 ON vuln (description text_pattern_ops);') 30 | # ### end Alembic commands ### 31 | -------------------------------------------------------------------------------- /src/db/setup_db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Description: Should be used in order to setup postgres DB, Users, Schemas and access rights. 3 | # Assumptions: 1) ${FCDB_HOME}/config/setenv.sh is executed first before executing this script 4 | # 5 | # Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 6 | #-------------------------------------------------------------------------------------------------- 7 | create_db() 8 | #-------------------------------------------------------------------------------------------------- 9 | { 10 | DB=$1 11 | DB_OWNER=$2 12 | DB_CNT=$(echo "select count(1) from pg_database where datname = '$DB';" | psql -U postgres -t) 13 | [[ $? -ne 0 ]] && echo "Error executing postgres while creating DB $DB" && exit 1 14 | if [[ $DB_CNT -eq 0 ]]; then 15 | echo "Creating database $DB" 16 | echo -e "\\set ON_ERROR_STOP on\ncreate database $DB;" | psql -U postgres -t 17 | [[ $? -ne 0 ]] && echo "Error creating db $DB" && exit 1 18 | echo -e "\\set ON_ERROR_STOP on\nalter database $DB owner to $DB_OWNER;" | psql -U postgres -t 19 | [[ $? -ne 0 ]] && echo "Error alter db $DB owner to $DB_OWNER" && exit 1 20 | 21 | else 22 | echo "DB $DB already exists" 23 | fi 24 | } 25 | 26 | #-------------------------------------------------------------------------------------------------- 27 | create_user() 28 | #-------------------------------------------------------------------------------------------------- 29 | { 30 | 31 | DB_USER=$1 32 | DB_USER_ROLES=$2 33 | DB_USER_PASS=$3 34 | 35 | DB_USER_CNT=$(echo -e "\\set ON_ERROR_STOP on\nselect count(1) from pg_user where usename = '$DB_USER';" | psql -U postgres -t) 36 | [[ $? -ne 0 ]] && echo "Error executing postgres" && exit 1 37 | 38 | if [[ $DB_USER_CNT -eq 0 ]]; then 39 | echo "Creating user $DB_USER" 40 | if [[ -z $DB_USER_PASS ]]; then 41 | echo -e "\\set ON_ERROR_STOP on\ncreate user $DB_USER $DB_USER_ROLES;" | psql -U postgres -t 42 | else 43 | echo -e "\\set ON_ERROR_STOP on\ncreate user $DB_USER $DB_USER_ROLES password '$DB_USER_PASS';" | psql -U postgres -t 44 | fi 45 | [[ $? -ne 0 ]] && echo "Error creating user $DB_USER" && exit 1 46 | else 47 | echo "User $DB_USER already exists" 48 | fi 49 | } 50 | 51 | #-------------------------------------------------------------------------------------------------- 52 | # user creation 53 | create_user ${FCDB_USER} "superuser" ${FCDB_PASS:-default$RANDOM} 54 | 55 | #-------------------------------------------------------------------------------------------------- 56 | # DB creation 57 | create_db ${FCDB_NAME} ${FCDB_USER} 58 | 59 | exit 0 -------------------------------------------------------------------------------- /src/db/tables.py: -------------------------------------------------------------------------------- 1 | """ 2 | DB Schema (Tables and Indexes) definitions 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | # coding: utf-8 8 | from sqlalchemy import (Column, DateTime, Integer, String, Boolean, Float, text, 9 | UniqueConstraint, Index, Text) 10 | from sqlalchemy.ext.declarative import declarative_base 11 | from sqlalchemy.dialects.postgresql.json import JSONB 12 | 13 | Base = declarative_base() 14 | metadata = Base.metadata 15 | 16 | 17 | # ------------------------------------------------------------------------------ 18 | # Definition of application tables 19 | # ------------------------------------------------------------------------------ 20 | class FetchStatus(Base): 21 | __tablename__ = 'fetch_status' 22 | __table_args__ = ( 23 | UniqueConstraint('name', name='fetch_status_uix_1'), 24 | {u'comment': u'Table that contains the fetch status for different keys'} 25 | ) 26 | id = Column(Integer, primary_key=True) 27 | name = Column(String(40), nullable=False, comment=u"the name of the status") 28 | sys_creation_date = Column(DateTime, nullable=False, server_default=text('current_timestamp')) 29 | last_modified_date = Column(DateTime, nullable=False, comment=u'Date when record was modified.') 30 | stats = Column(JSONB, comment=u'JSON column for additional details') 31 | 32 | 33 | # ------------------------------------------------------------------------------ 34 | class Vuln(Base): 35 | __tablename__ = 'vuln' 36 | __table_args__ = ( 37 | UniqueConstraint('vuln_id', name='vuln_uix_1'), 38 | Index('vuln_idx1', 'description', postgresql_using='gist'), 39 | {u'comment': u'Table that contains the list of Vulnerabilities'} 40 | ) 41 | 42 | id = Column(Integer, primary_key=True) 43 | vuln_id = Column(String(20), index=True) 44 | sys_creation_date = Column(DateTime, nullable=False, server_default=text('current_timestamp')) 45 | published_date = Column(DateTime, index=True, nullable=False, comment=u'Date when record published.') 46 | last_modified_date = Column(DateTime, index=True, nullable=False, comment=u'Date when record was modified.') 47 | source = Column(String(100), comment=u"the source of the vulnerability") 48 | description = Column(Text, comment=u"the description of the CVE") 49 | data = Column(JSONB, comment=u'Vuln JSON representation') 50 | 51 | 52 | # ------------------------------------------------------------------------------ 53 | class VulnCpes(Base): 54 | __tablename__ = 'vuln_cpes' 55 | __table_args__ = ( 56 | Index('vuln_cpe_idx1', 'product', 'version', 'vendor', ), 57 | {u'comment': u'Table that contains the list of Vulnerabilities'} 58 | ) 59 | 60 | id = Column(Integer, primary_key=True) 61 | vuln_id = Column(String(20), index=True) 62 | cpe = Column(String(256), nullable=True) 63 | sys_creation_date = Column(DateTime, nullable=False, server_default=text('current_timestamp')) 64 | cond = Column(String(10), nullable=False, comment=u"The condition between the records of the same group AND/OR") 65 | negate = Column(Boolean, default=False, comment=u'Indicate if the condition result is to be negated') 66 | vulnerable = Column(Boolean, comment=u'Indicate if the specified CPE is vulnerable') 67 | group_id = Column(Integer) 68 | parent_group_id = Column(Integer, nullable=False) 69 | part = Column(String(1), comment=u"the part of CPE") 70 | vendor = Column(String(128), comment=u"the vendor of CPE") 71 | product = Column(String(128), comment=u"the product of CPE") 72 | version = Column(String(128), comment=u"the version of CPE") 73 | version_lt = Column(String(128), comment=u"the little version of CPE") 74 | version_le = Column(String(128), comment=u"the little or equal version of CPE") 75 | version_gt = Column(String(128), comment=u"the greater version of CPE") 76 | version_ge = Column(String(128), comment=u"the greater or equal version of CPE") 77 | update = Column(String(128), comment=u"the update of CPE") 78 | edition = Column(String(128), comment=u"the edition of CPE") 79 | language = Column(String(128), comment=u"the language of CPE") 80 | sw_edition = Column(String(128), comment=u"the sw_edition of CPE") 81 | target_sw = Column(String(128), comment=u"the target_sw of CPE") 82 | target_hw = Column(String(128), comment=u"the target_hw of CPE") 83 | 84 | 85 | # ------------------------------------------------------------------------------ 86 | class Cpe(Base): 87 | __tablename__ = 'cpe' 88 | __table_args__ = ( 89 | UniqueConstraint('name', name='cpe_uix_1'), 90 | Index('cpe_idx1', 'product', 'version', 'vendor'), 91 | Index('cpe_idx2', 'title_en'), 92 | {u'comment': u'Table that contains the list of CPEs'} 93 | ) 94 | 95 | id = Column(Integer, primary_key=True) 96 | name = Column(String(256), nullable=False, comment=u"the name of the CPE") 97 | name_id = Column(String(256), nullable=False, comment=u"the name ID of the CPE") 98 | title_en = Column(String(512), index=True, comment=u"the English title of the CPE") 99 | sys_creation_date = Column(DateTime, nullable=False, server_default=text('current_timestamp')) 100 | created = Column(DateTime, index=True, nullable=False, comment=u'Date when the CPE record was created.') 101 | last_modified_date = Column(DateTime, index=True, nullable=False, comment=u'Date when the CPE record was modified.') 102 | part = Column(String(1), nullable=False, comment=u"the part of CPE") 103 | vendor = Column(String(128), nullable=False, comment=u"the vendor of CPE") 104 | product = Column(String(128), nullable=False, comment=u"the product of CPE") 105 | version = Column(String(128), nullable=False, comment=u"the version of CPE") 106 | update = Column(String(128), comment=u"the update of CPE") 107 | edition = Column(String(128), comment=u"the edition of CPE") 108 | language = Column(String(128), comment=u"the language of CPE") 109 | sw_edition = Column(String(128), comment=u"the sw_edition of CPE") 110 | target_sw = Column(String(128), comment=u"the target_sw of CPE") 111 | target_hw = Column(String(128), comment=u"the target_hw of CPE") 112 | data = Column(JSONB, comment=u'CPE JSON representation') 113 | 114 | 115 | # ------------------------------------------------------------------------------ 116 | class Cwe(Base): 117 | __tablename__ = 'cwe' 118 | __table_args__ = ( 119 | Index('cwe_idx1', 'name'), 120 | Index('cwe_idx2', 'description'), 121 | {u'comment': u'Table that contains the list of CWEs'} 122 | ) 123 | 124 | id = Column(Integer, primary_key=True) 125 | cwe_id = Column(Integer, nullable=False, index=True, comment=u'The ID of the CWE') 126 | name = Column(String(256), nullable=False, comment=u"the name of the CWE") 127 | status = Column(String(128), comment=u"the status of the CWE") 128 | description = Column(Text, comment=u"the description of the CWE") 129 | data = Column(JSONB, comment=u'CWE JSON representation') 130 | 131 | 132 | # ------------------------------------------------------------------------------ 133 | class Capec(Base): 134 | __tablename__ = 'capec' 135 | __table_args__ = ( 136 | Index('capec_idx1', 'name'), 137 | Index('capec_idx2', 'description'), 138 | {u'comment': u'Table that contains the list of CWEs'} 139 | ) 140 | 141 | id = Column(Integer, primary_key=True) 142 | capec_id = Column(Integer, nullable=False, index=True, comment=u'The ID of the CAPEC') 143 | name = Column(String(256), nullable=False, comment=u"the name of the CAPEC") 144 | status = Column(String(128), comment=u"the status of the CAPEC") 145 | description = Column(Text, comment=u"the description of the CAPEC") 146 | data = Column(JSONB, comment=u'CAPEC JSON representation') 147 | 148 | # ------------------------------------------------------------------------------ 149 | class Epss(Base): 150 | __tablename__ = 'epss' 151 | __table_args__ = ( 152 | Index('epss_idx1', 'cve_id'), 153 | {u'comment': u'Table that contains the list of EPSS'} 154 | ) 155 | 156 | id = Column(Integer, primary_key=True) 157 | cve_id = Column(String(20), index=True, comment=u'The ID of the CVE') 158 | epss_score = Column(Float, nullable=False, comment=u"the score of the epss") 159 | percentile = Column(Float, nullable=False, comment=u"the percentile of the epss") 160 | date = Column(DateTime, nullable=False, comment=u"Date when the EPSS record has been downloaed") 161 | changed = Column(Boolean, comment=u'indicate if epss_score has been changed') 162 | -------------------------------------------------------------------------------- /src/generic/__init__.py: -------------------------------------------------------------------------------- 1 | from .context import ApplicationContext, appctx 2 | from .config import Configuration 3 | from .logging_manager import LoggingManager 4 | -------------------------------------------------------------------------------- /src/generic/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Class to facilitate management of the application's configuration. 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | import os 8 | import logging 9 | import configparser 10 | import re 11 | 12 | # -------------------------------------------------------------------------------------------------- 13 | LOGGER = logging.getLogger(__name__) 14 | # -------------------------------------------------------------------------------------------------- 15 | 16 | 17 | class Configuration: 18 | """Configuration related class to retrieve and resolve configration parameters.""" 19 | 20 | # ---------------------------------------------------------------------------------------------- 21 | def __init__(self, config_file): 22 | """Constructor method for Configuration class. 23 | 24 | Args: 25 | config_file (str): Configuration file that should be read for config parameters 26 | 27 | Raises: 28 | RuntimeError: when the following will occur: 29 | - when `config_file` cannot be read or accessed 30 | - when `config_file` is not specified 31 | """ 32 | 33 | 34 | self.__prms = {} 35 | self.__cfg = None 36 | self.__filename = None 37 | self.__parse_all_ind = False 38 | # ------------------------------------------------------------------------------------------ 39 | if config_file is None: 40 | raise RuntimeError('Input Configuration file(s) was not specified') 41 | else: 42 | if not os.access(config_file, os.R_OK): 43 | raise RuntimeError('Specified configuration file ' 44 | '<{}> cannot be read.'.format(config_file)) 45 | 46 | # ------------------------------------------------------------------------------------------ 47 | if self.__cfg is None: 48 | self.__cfg = configparser.RawConfigParser() 49 | 50 | try: 51 | self.__cfg.read(config_file) 52 | self.__filename = config_file 53 | except Exception: # pylint: disable=broad-except 54 | raise RuntimeError('Error occured while trying to ' 55 | 'read config file: <{}>'.format(config_file)) 56 | 57 | # ------------------------------------------------------------------------------------------ 58 | includes = self.get_param('include.config_files') 59 | if includes is not None: 60 | for include in includes.split(':'): 61 | if os.access(include, os.R_OK): 62 | try: 63 | self.__cfg.read(include) 64 | except Exception: # pylint: disable=broad-except 65 | LOGGER.error(f'Error while reading include config file <{include}>') 66 | 67 | # ---------------------------------------------------------------------------------------------- 68 | def __parse_all(self): 69 | """ 70 | Parses and evaluate all parameters from input config file. 71 | """ 72 | # ------------------------------------------------------------------------------------------ 73 | # Parse all config params if it was not yet done 74 | if not self.__parse_all_ind: 75 | 76 | for section in self.__cfg: 77 | for param in self.__cfg[section]: 78 | self.get_param(section + '.' + param) 79 | 80 | self.__parse_all_ind = True 81 | 82 | # ---------------------------------------------------------------------------------------------- 83 | def check_params(self, search_name): 84 | """Returns a list of parameter names which contains ``search_name`` in their name. 85 | 86 | Args: 87 | search_name (str): search all parameters that contain ``search_name`` 88 | """ 89 | self.__parse_all() 90 | return list(filter(lambda x: search_name in x, self.__prms.keys())) 91 | 92 | # ---------------------------------------------------------------------------------------------- 93 | def get_param(self, param_name, default_value=None, iteration=0): 94 | 95 | """Retrieves value of a given parameter name ``param_name`` as input. 96 | 97 | Args: 98 | param_name (str): Name of the parameter i.e. ``section.name.subname``. 99 | default_value (Any): default value to be returned in case ``param_name`` cannot 100 | be found. Optional parameter. 101 | iteration (int): used internally to avoid recurssion. 102 | 103 | Returns: 104 | value of parameter ``param_name`` if found, ``default_value`` otherwise. 105 | 106 | Raises: 107 | RecursionError: When infinite recurssion is identified. 108 | 109 | """ 110 | # ------------------------------------------------------------------------------------------ 111 | # in case such parameter was retrieved previously return it from cache 112 | if param_name in self.__prms: 113 | return self.__prms[param_name] 114 | 115 | # ------------------------------------------------------------------------------------------ 116 | # split name of input parameter by . and check if there is such section in config file 117 | param_splt = param_name.split('.', 1) 118 | if param_splt[0] not in self.__cfg: 119 | return default_value 120 | 121 | # ------------------------------------------------------------------------------------------ 122 | else: 123 | 124 | # if Section exists but parameter name doesn't 125 | if param_splt[1] not in self.__cfg[param_splt[0]]: 126 | return default_value 127 | else: 128 | prm_val = self.__cfg[param_splt[0]][param_splt[1]] 129 | 130 | # ---------------------------------------------------------------------------------- 131 | # if we end up with a parameter that in order to resolve its value we need to 132 | # recursively call same method more than 10 times then we throw exception. 133 | iteration += 1 134 | if iteration > 10: 135 | raise RecursionError('Recursion was identified for param: ' 136 | '<{}>'.format(param_name)) 137 | 138 | # ---------------------------------------------------------------------------------- 139 | # lets resolve first the references (if any) to any other configuration parameters 140 | regex = re.compile(r'\${([a-z0-9_\.]+)}') 141 | inner_prms = regex.findall(prm_val) 142 | 143 | for inner_prm in inner_prms: 144 | inner_prm_val = self.get_param(inner_prm, None, iteration) 145 | if inner_prm_val is None: 146 | LOGGER.warning(f'Inner config parameter: <{inner_prm}> is not defined.') 147 | else: 148 | prm_val = prm_val.replace('${' + inner_prm + '}', str(inner_prm_val)) 149 | 150 | # ---------------------------------------------------------------------------------- 151 | # resolve env variables (if any ) 152 | regex = re.compile(r'\${([^}]+)}') 153 | inner_prms = regex.findall(prm_val) 154 | 155 | for inner_prm in inner_prms: 156 | inner_prm_val = os.environ.get(inner_prm, '') 157 | prm_val = prm_val.replace('${' + inner_prm + '}', str(inner_prm_val)) 158 | 159 | # ---------------------------------------------------------------------------------- 160 | try: 161 | eval_prm_val = eval(prm_val) 162 | except Exception: # pylint: disable=broad-except 163 | pass 164 | else: 165 | prm_val = eval_prm_val 166 | 167 | # ---------------------------------------------------------------------------------- 168 | self.__prms[param_name] = prm_val 169 | self.__cfg[param_splt[0]][param_splt[1]] = str(prm_val) 170 | 171 | return prm_val 172 | 173 | # ---------------------------------------------------------------------------------------------- 174 | def save_config(self, filename): 175 | """Saves current configuration settings from memory to a ini file. 176 | 177 | Note: 178 | This method is usefull for reference and debug. 179 | 180 | Args: 181 | filename(str): full file name where configuration file is expected to be saved. 182 | 183 | Raises: 184 | NotADirectoryError. 185 | 186 | """ 187 | 188 | # ------------------------------------------------------------------------------------------ 189 | if self.__filename is None: 190 | return False 191 | 192 | # ------------------------------------------------------------------------------------------ 193 | # first make sure to parse all configuration settigs in order to resolve their values 194 | # before saving results to file 195 | self.__parse_all() 196 | 197 | # ------------------------------------------------------------------------------------------ 198 | # Check first if provided path exists and its writable 199 | try: 200 | with open(filename, 'w') as configfile: 201 | self.__cfg.write(configfile) 202 | return True 203 | except Exception: # pylint: disable=broad-except 204 | raise NotADirectoryError('Provided directory <{}> cannot be accessed ' 205 | 'or is not a directory'.format(os.path.dirname(filename))) 206 | 207 | -------------------------------------------------------------------------------- /src/generic/context.py: -------------------------------------------------------------------------------- 1 | """ 2 | Application Context class (Singleton). 3 | Through this class you can access all the application related information: Configuration, Logging, DB sessions, etc. 4 | 5 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 6 | """ 7 | 8 | import os 9 | import threading 10 | import logging 11 | from multiprocessing import current_process 12 | from common.util import setup_env 13 | 14 | import db 15 | import generic 16 | 17 | import threading 18 | 19 | class SingletonMeta(type): 20 | _instances = {} 21 | _lock = threading.Lock() # Lock for thread safety 22 | 23 | def __call__(cls, *args, **kwargs): 24 | with cls._lock: 25 | if cls not in cls._instances: 26 | cls._instances[cls] = super().__call__(*args, **kwargs) 27 | return cls._instances[cls] 28 | 29 | #------------------------------------------------------------------------------ 30 | APPCTX = None 31 | #------------------------------------------------------------------------------ 32 | def appctx(): 33 | """Hack for recursive imports of application context in the Flask App 34 | """ 35 | global APPCTX 36 | if not APPCTX: 37 | APPCTX = ApplicationContext() 38 | return APPCTX 39 | 40 | #------------------------------------------------------------------------------ 41 | class ApplicationContext(metaclass=SingletonMeta): 42 | """Class that implements central interface to provide configuration for an application. 43 | This is read-only while the application is running. 44 | 45 | An ApplicationContext provides: 46 | * Factory methods for accessing application components. 47 | * The ability to load file resources in a generic fashion. 48 | * The ability to publish events to registered listeners. 49 | """ 50 | 51 | __cfg = None 52 | 53 | #-------------------------------------------------------------------------- 54 | @classmethod 55 | def instance(cls, *args, **kwargs): 56 | """Instantiates only one instance of ``ApplicationContext`` for current process 57 | and returns it. 58 | """ 59 | return ApplicationContext(*args, **kwargs) 60 | 61 | #-------------------------------------------------------------------------- 62 | def __init__(self, **kwargs): 63 | """Initializes all classes that will be part of an ApplicationContext object: 64 | 65 | * Initialize an instance for :class:`generic.Configuration` 66 | * Initialize an instance for :class:`db.DataBase` 67 | * Initialize an instance for :class:`generic.LoggingManager` 68 | 69 | """ 70 | 71 | #---------------------------------------------------------------------- 72 | # If this class instance was already initialized then we skip the init 73 | if getattr(self, '__init_done', None): 74 | return 75 | 76 | setup_env() 77 | 78 | #------------------------------------------------------------------ 79 | # prepare configuration parameters. 80 | ApplicationContext.__cfg = generic.Configuration(os.environ.get('FCDB_CFG_FILE', '')) 81 | 82 | #------------------------------------------------------------------ 83 | # prepare Logger 84 | self._log_manager = generic.LoggingManager(appctx=self, **kwargs) 85 | ApplicationContext.__logger = logging.getLogger(__name__) 86 | 87 | # in case effective level is set as DEBUG we save resolved config parameters 88 | # to a file for debug purpose. 89 | log_level = self.config.get_param('log.level', 'NONE') 90 | if log_level == 'DEBUG': 91 | try: 92 | cfg_file_name = os.environ.get('FCDB_LOG_PATH', '') + \ 93 | '/config_' + current_process().name + \ 94 | '_' + str(os.getpid()) + '.ini' 95 | ApplicationContext.__cfg.save_config(cfg_file_name) 96 | except IOError: 97 | ApplicationContext.__logger.warning(f'Could not save config file {cfg_file_name} ' 98 | 'for debug purposes') 99 | 100 | #------------------------------------------------------------------ 101 | # prepare db 102 | try: 103 | db_params = self.config.get_param('db.params', {}) 104 | self.__db = db.DataBase(self.config.get_param('db.dsn'), **db_params) 105 | 106 | except Exception: 107 | ApplicationContext.__logger.exception("Exception occured while trying to initialize DB") 108 | raise 109 | 110 | #------------------------------------------------------------------ 111 | ApplicationContext.__logger.debug('Finnished app context init') 112 | 113 | self.__init_done = "True" 114 | 115 | 116 | @property 117 | def config(self): 118 | """ 119 | Returns configuration object :class:`~generic.Configuration`. 120 | """ 121 | return ApplicationContext.__cfg 122 | 123 | #---------------------------------------------------------------------------------------------- 124 | @property 125 | def db(self): # pylint: disable=C0103 126 | """ 127 | Returns: 128 | :class:`~db.DataBase` - helper for DB sqlalchemy engine and sessions. 129 | 130 | """ 131 | if self.__db is None: 132 | raise EnvironmentError('Application DB init failed or was not done yet') 133 | return self.__db 134 | 135 | @property 136 | def log_queue(self): 137 | return self._log_manager.log_queue 138 | 139 | def close(self): 140 | self._log_manager.finish() 141 | -------------------------------------------------------------------------------- /src/generic/exc.py: -------------------------------------------------------------------------------- 1 | """ 2 | App defined exceptions 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | 8 | class ApplicationException(Exception): 9 | """Generic Exception raised for all application related exceptions 10 | """ 11 | 12 | 13 | class ParameterException(ApplicationException): 14 | """Generic Exception raised for all application related exceptions 15 | """ 16 | def __init__(self, parameter_name): 17 | super(ParameterException, self).__init__( 18 | f"Parameter is missing or not provided:{parameter_name}" 19 | ) 20 | 21 | 22 | # ------------------------------------------------------------------------------ 23 | class ValidationException(Exception): 24 | """Generic Exception raised for all validation activities 25 | """ 26 | 27 | 28 | # ------------------------------------------------------------------------------ 29 | class EntityNotFoundException(ValidationException): 30 | """Generic exception thrown at the event when needed entity is not found 31 | """ 32 | def __init__(self, entity_type: str, entity_id: str, txt=''): 33 | super(EntityNotFoundException, self).__init__( 34 | f"{entity_type} ({entity_id}) not found {txt}" 35 | ) 36 | -------------------------------------------------------------------------------- /src/generic/logging_manager.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logging management 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | 7 | import os 8 | import sys 9 | import glob 10 | import re 11 | import logging 12 | import logging.config 13 | from datetime import datetime 14 | from multiprocessing import Process, current_process, Event, Queue 15 | from generic import exc as exceptions 16 | 17 | import generic 18 | 19 | # -------------------------------------------------------------------------------------------------- 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | # -------------------------------------------------------------------------------------------------- 24 | class LoggingManager(Process): 25 | """Wrapper class to manage logging configuration load/reload based on project 26 | configuration setup. 27 | """ 28 | # :int: Default Log File Size in case it was not specified in config. 29 | DEFAULT_LOG_FILE_SIZE = 10 * 1024 * 1024 # 10 Mb 30 | 31 | # :int: Default Max number of log files in case it was not specified in config. 32 | DEFAULT_LOG_FILE_COUNT = 10 33 | 34 | # :str: Default (current path) when expected variables (path related) are not set. 35 | DEFAULT_CURR_PATH = '.' 36 | 37 | # :str: Default search pattern for additional log configuration files. 38 | DEFAULT_LOG_CFG_FILE_PATT = 'log_config*.ini' 39 | 40 | # :str: Default log level. 41 | DEFAULT_LOG_LEVEL = 'DEBUG' 42 | 43 | # :bool: Default msg level propagation to upper handlers. 44 | DEFAULT_LOG_PROPAGATE = False 45 | 46 | def __init__(self, **kwargs): 47 | """Constructor method for LoggingManager. 48 | """ 49 | self._appctx = kwargs.pop('appctx', None) 50 | self._log_queue: Queue = kwargs.pop('log_queue', None) 51 | self._event_finnish: Event = kwargs.pop('event_finnish', None) 52 | super().__init__() 53 | 54 | # ----------------------------------------------------------------------------------------- 55 | self.__valid_dbg_values = {'DEBUG': logging.DEBUG, 'INFO': logging.INFO, 56 | 'WARNING': logging.WARNING, 'ERROR': logging.ERROR, 57 | 'CRITICAL': logging.CRITICAL, 'NOTSET': logging.NOTSET} 58 | # ----------------------------------------------------------------------------------------- 59 | # get env variables values 60 | log_level = self._appctx.config.get_param('log.level', self.DEFAULT_LOG_LEVEL) 61 | 62 | if log_level not in self.__valid_dbg_values: 63 | raise ValueError('Invalid value for parameter log.level ') 64 | 65 | # ----------------------------------------------------------------------------------------- 66 | # used in log file name resolutions 67 | self.__regex = re.compile(r'\%\(([a-z0-9_\.]+)\)') 68 | 69 | # ----------------------------------------------------------------------------------------- 70 | # set level to 'root' logger as NOTSET - this will leave for the handlers' log level 71 | # to decide 72 | root_logger = logging.getLogger() 73 | 74 | # Disable the default handler 75 | logging.lastResort.propagate = False 76 | 77 | proc_name = current_process().name 78 | if proc_name == 'MainProcess': 79 | 80 | root_logger.setLevel(logging.NOTSET) 81 | 82 | # ----------------------------------------------------------------------------------------- 83 | # define log Formatters 84 | # log_formatter_str = self._appctx.config.get_param('log.format.msg', None) 85 | # log_formatter_datefmt = self._appctx.config.get_param('log.format.datefmt', None) 86 | 87 | # if log_formatter_str is None: 88 | # raise ValueError('Log Message format is missing ') 89 | 90 | # ----------------------------------------------------------------------------------------- 91 | # if log_formatter_datefmt is None: 92 | # log_fmt = logging.Formatter(log_formatter_str) 93 | # else: 94 | # log_fmt = logging.Formatter(log_formatter_str, datefmt=log_formatter_datefmt) 95 | 96 | # ----------------------------------------------------------------------------------------- 97 | # define log Handler if current output is displayed on some terminal 98 | # if sys.stdout.isatty(): 99 | # log_handler = logging.StreamHandler() 100 | # log_handler.setLevel(self.__valid_dbg_values[log_level]) 101 | # log_handler.setFormatter(log_fmt) 102 | # logging.getLogger().addHandler(log_handler) 103 | 104 | self.__create_file_handler(self._appctx.config, process_name=proc_name, **kwargs) 105 | 106 | # ----------------------------------------------------------------------------------------- 107 | # set custom log level and handlers for additional Loggers (if any) 108 | log_cfg_file_srch_patt = self._appctx.config.get_param('log.config.files.pattern', 109 | self.DEFAULT_LOG_CFG_FILE_PATT) 110 | 111 | for log_cfg_file in glob.glob(os.environ.get('FCDB_CFG_LOG_PATH', self.DEFAULT_CURR_PATH) 112 | + '/' + log_cfg_file_srch_patt): 113 | try: 114 | logger.debug('Processing log config file:<{}>'.format(log_cfg_file)) 115 | log_cfg = generic.Configuration(log_cfg_file) 116 | self.__create_file_handler(log_cfg, **kwargs) 117 | 118 | except Exception: # pylint: disable=broad-except 119 | logger.exception('Error occured while processing log ' 120 | 'file:{}'.format(log_cfg_file)) 121 | 122 | # in case a log queue was specified in the input 123 | # start the process that would retieve any log messages from the children 124 | if self._log_queue is not None: 125 | import atexit 126 | atexit.register(self.finish) 127 | 128 | self.start() 129 | 130 | # else if this is not the main process 131 | else: 132 | 133 | # then we just create a handler that writes all the logs to the log queue 134 | if self._log_queue is not None: 135 | 136 | # we create a handler that is a Queue to the main Process 137 | handler = logging.handlers.QueueHandler(self._log_queue) 138 | root_logger.addHandler(handler) 139 | # send only those messages as per the config 140 | root_logger.setLevel(self.__valid_dbg_values[log_level]) 141 | 142 | # if no queue was specified 143 | else: 144 | # then we create a handler that writes all the log messages to File 145 | self.__create_file_handler(self._appctx.config, process_name=proc_name, **kwargs) 146 | 147 | # ---------------------------------------------------------------------------------------------- 148 | def run(self): 149 | 150 | while True: 151 | try: 152 | if self._event_finnish and self._event_finnish.is_set(): 153 | break 154 | 155 | record = self._log_queue.get() 156 | if record is None: 157 | break 158 | 159 | _logger = logging.getLogger(record.name) 160 | # handle the log message that we got from the queue 161 | _logger.handle(record) 162 | 163 | except EOFError: 164 | break 165 | 166 | except Exception: 167 | import traceback 168 | print('Error durring log handling:', file=sys.stderr) 169 | traceback.print_exc(file=sys.stderr) 170 | 171 | sys.exit(0) 172 | 173 | @property 174 | def log_queue(self): 175 | return self._log_queue 176 | 177 | # ---------------------------------------------------------------------------------------------- 178 | def finish(self): 179 | 180 | if current_process().name == 'MainProcess': 181 | logging.getLogger(__name__).info(f'Finnishing the Log Receiver process') 182 | if getattr(self, '_finnish_done', None) is None and self._log_queue: 183 | self._finnish_done = True 184 | self._log_queue.put(None) 185 | self.join() 186 | self.close() 187 | 188 | # ---------------------------------------------------------------------------------------------- 189 | def __create_file_handler(self, config, **kwargs): 190 | """Adds additional Handlers for specific logger as per input configuration file. 191 | 192 | Args: 193 | config (str): config file where additional Logger config parameters are specified. 194 | """ 195 | log_namespace = config.get_param('log.namespace') 196 | if log_namespace is not None: 197 | 198 | # -------------------------------------------------------------------------------------- 199 | if log_namespace == 'root': 200 | cfg_logger = logging.getLogger() 201 | else: 202 | cfg_logger = logging.getLogger(log_namespace) 203 | 204 | log_level = config.get_param('log.level', self.DEFAULT_LOG_LEVEL) 205 | if log_level not in self.__valid_dbg_values: 206 | raise ValueError('Invalid value for parameter log.level') 207 | 208 | file_name = config.get_param('log.file.name', None) 209 | 210 | if file_name: 211 | # ---------------------------------------------------------------------------------- 212 | # lets resolve first the references (if any) to any other configuration parameters 213 | inner_prms = self.__regex.findall(file_name) 214 | 215 | for inner_prm in inner_prms: 216 | inner_prm_val = kwargs.get(inner_prm, None) 217 | if inner_prm_val is None: 218 | logger.warning(f'Could not find value for parameter <{inner_prm}> specified in the log file name: <{file_name}>') 219 | file_name = file_name.replace('%(' + inner_prm + ')', inner_prm) 220 | else: 221 | file_name = file_name.replace('%(' + inner_prm + ')', str(inner_prm_val)) 222 | 223 | file_name = datetime.now().strftime(file_name) 224 | 225 | # -------------------------------------------------------------------------------------- 226 | file_max_size = config.get_param('log.file.max.size') 227 | if file_max_size is None: 228 | logger.warning('Parameter log.file.max.size was set to default value:{}' 229 | .format(self.DEFAULT_LOG_FILE_SIZE)) 230 | file_max_size = self.DEFAULT_LOG_FILE_SIZE 231 | 232 | # -------------------------------------------------------------------------------------- 233 | file_max_count = config.get_param('log.file.max.count') 234 | if file_max_count is None: 235 | logger.warning('Parameter log.file.max.count was set to default value:{}' 236 | .format(self.DEFAULT_LOG_FILE_COUNT)) 237 | file_max_count = self.DEFAULT_LOG_FILE_COUNT 238 | 239 | # -------------------------------------------------------------------------------------- 240 | file_format_msg = config.get_param('log.format.msg', None) 241 | file_format_datefmt = config.get_param('log.format.datefmt', None) 242 | 243 | # -------------------------------------------------------------------------------------- 244 | # validate input values 245 | if file_name is None or file_format_msg is None: 246 | logger.error('Mandatory value for is not set ' 247 | 'in log config file') 248 | 249 | # -------------------------------------------------------------------------------------- 250 | # create new Formatter 251 | if file_format_datefmt is None: 252 | file_fmt = logging.Formatter(file_format_msg) 253 | else: 254 | file_fmt = logging.Formatter(file_format_msg, datefmt=file_format_datefmt) 255 | 256 | # -------------------------------------------------------------------------------------- 257 | # create new Rotating File Handler 258 | log_file_handler = logging.handlers.RotatingFileHandler( 259 | os.environ.get('FCDB_LOG_PATH', self.DEFAULT_CURR_PATH) + '/' 260 | + file_name.strip(), 261 | # + file_name.replace('.log', '_' + str(os.getpid()) + '.log'), 262 | 'a', file_max_size, file_max_count, delay=True) 263 | 264 | cfg_logger.setLevel(self.__valid_dbg_values[log_level]) 265 | log_file_handler.setLevel(self.__valid_dbg_values[log_level]) 266 | log_file_handler.setFormatter(file_fmt) 267 | 268 | cfg_logger.propagate = config.get_param('log.propagate', self.DEFAULT_LOG_PROPAGATE) 269 | cfg_logger.addHandler(log_file_handler) 270 | -------------------------------------------------------------------------------- /src/search: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Main module for the search command. 4 | This program provides all the search functionality in all data sources. 5 | It is used as command line interface as well from the web interface/APIs. 6 | 7 | Execute it as follows to get the possible search options/filter capabilities: 8 | 9 | $ search --help 10 | 11 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 12 | """ 13 | 14 | import os 15 | import logging 16 | import pydantic_argparse 17 | import argcomplete 18 | import time 19 | from datetime import datetime 20 | from generic import ApplicationContext 21 | from common.models import SearchOptions 22 | from common.util import init_db_schema 23 | from common.search import search_data, results_output 24 | 25 | __version__ = os.getenv("APP_VERSION", "modified") 26 | __description__ = "FastCVE - fast, rich and API-based search for CVE and more (CPE, CWE, CAPEC)" 27 | 28 | # ------------------------------------------------------------------------------ 29 | def main(): 30 | """Main function""" 31 | 32 | # -------------------------------------------------------------------------- 33 | # Parse the arguments and Validate 34 | json_argparser = pydantic_argparse.ArgumentParser( 35 | model=SearchOptions, 36 | prog="search", 37 | version=__version__, 38 | description=__description__ 39 | ) 40 | 41 | argcomplete.autocomplete(json_argparser) 42 | 43 | args = json_argparser.parse_typed_args() 44 | 45 | profile = None 46 | if args.profile: 47 | import cProfile 48 | profile = cProfile.Profile() 49 | profile.enable() 50 | 51 | start_time = time.time() 52 | 53 | appctx = ApplicationContext.instance() 54 | 55 | logger = logging.getLogger(__name__) 56 | logger.info(f"search options: {args}") 57 | 58 | # create/update the DB schema if necessary 59 | init_db_schema() 60 | 61 | # search the data 62 | result = search_data(appctx, args) 63 | 64 | # print out the results 65 | results_output(args, result) 66 | 67 | # calculate the time needed for the search 68 | elapsed_time = time.time() - start_time 69 | logger.info(f"search time: {elapsed_time:.3f} seconds") 70 | 71 | if args.profile and profile: 72 | profile_file = f'./search_profile_{datetime.utcnow().strftime("%Y%m%d_%H%M")}.prof' 73 | profile.disable() 74 | profile.create_stats() 75 | profile.dump_stats(profile_file) 76 | 77 | 78 | if __name__ == "__main__": 79 | main() 80 | -------------------------------------------------------------------------------- /src/web/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/src/web/__init__.py -------------------------------------------------------------------------------- /src/web/app.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI, HTTPException, Depends, status 2 | from generic import ApplicationContext 3 | from common.models import SearchOptions, SearchInfoType 4 | from common.search import search_data 5 | from dependencies import get_app_cntxt 6 | from web.routers.search import router as router_search 7 | from web.models.search import StatusOutput 8 | import os 9 | 10 | version = os.getenv("APP_VERSION", "modified") 11 | app = FastAPI(title="FastCVE", description="Fast, Rich and API-based search for CVE and more (CPE, CWE, CAPEC)", version=version) 12 | 13 | 14 | @app.get("/status", tags=['status'], name="DB status", response_model=StatusOutput) 15 | async def get_status(appctx: ApplicationContext = Depends(get_app_cntxt)) -> StatusOutput: 16 | """Get the current DB status update""" 17 | 18 | try: 19 | opts = SearchOptions(searchInfo=SearchInfoType.status) 20 | result = search_data(appctx, opts) 21 | except Exception as exc: 22 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 23 | 24 | return result 25 | 26 | app.include_router(router_search, prefix="/api") 27 | 28 | -------------------------------------------------------------------------------- /src/web/dependencies.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | from generic import ApplicationContext 3 | 4 | appcntx = ApplicationContext.instance() 5 | 6 | 7 | def get_app_cntxt() -> Generator[ApplicationContext, None, None]: 8 | """Return the application context.""" 9 | return appcntx 10 | -------------------------------------------------------------------------------- /src/web/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/src/web/models/__init__.py -------------------------------------------------------------------------------- /src/web/models/search.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pydantic data models used in the search endpoints outputs 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | from pydantic import BaseModel 7 | from fastapi import Query 8 | from typing import List, Optional 9 | from datetime import datetime, date 10 | from common.models.cve import CveItem 11 | from common.models.cpe import CpeItem 12 | from common.models import CveSeverityV2, CveSeverityV3, CveSeverityV4 13 | 14 | 15 | class CveOutput(BaseModel): 16 | search: dict 17 | result: List[CveItem] 18 | 19 | 20 | class CpeOutput(BaseModel): 21 | search: dict 22 | result: List[CpeItem] 23 | 24 | 25 | class StatusItemOutput(BaseModel): 26 | update_date: datetime 27 | count: int 28 | 29 | 30 | class StatusOutput(BaseModel): 31 | 32 | capec: Optional[StatusItemOutput] 33 | cve: Optional[StatusItemOutput] 34 | cpe: Optional[StatusItemOutput] 35 | cwe: Optional[StatusItemOutput] 36 | epss: Optional[StatusItemOutput] 37 | 38 | class SearchInputCommon: 39 | 40 | def __init__(self, *, 41 | page_idx: Optional[int] = Query(default=0, description="Results page index", alias="page-idx", ge=0), 42 | page_size: Optional[int] = Query(description="Results page size", default=10, alias="page-size", ge=10, le=3000), 43 | keyword_search: Optional[List[str]] = Query(default=None, description="regexp to search for CPE in the description", alias="keyword") 44 | ) -> None: 45 | 46 | self.page_idx = page_idx 47 | self.page_size = page_size 48 | self.keyword_search = keyword_search 49 | 50 | class SearchInputCve: 51 | 52 | def __init__(self, *, 53 | cve_id: Optional[List[str]] = Query(default=None, description="Related CVE IDs to search for", alias="cve"), 54 | cpe_name: Optional[str] = Query(default=None, description="CPE2.3 filter specification to search for", alias="cpe23"), 55 | version_start: Optional[str] = Query(default=None, description="CPE version start to search for", alias="version-start"), 56 | version_start_include: Optional[bool] = Query(default=False, description="If set to true, the version start is included in the search", alias="version-start-include"), 57 | version_end: Optional[str] = Query(default=None, description="CPE version end to search for", alias="version-end"), 58 | version_end_include: Optional[bool] = Query(default=False, description="If set to true, the version end is included in the search", alias="version-end-include"), 59 | cwe_id: Optional[List[str]] = Query(default=None, description="Related CWE IDs to search for", alias="cwe"), 60 | last_mod_start_date: Optional[date] = Query(default=None, description="Last modified start date", alias="last-mod-start-date"), 61 | last_mod_end_date: Optional[date] = Query(default=None, description="Last modified end date", alias="last-mod-end-date"), 62 | pub_start_date: Optional[date] = Query(default=None, description="CVE Published start date", alias="pub-start-date"), 63 | pub_end_date: Optional[date] = Query(default=None, description="CVE Published start date", alias="pub-end-date"), 64 | cvss_v2_severity: Optional[CveSeverityV2] = Query(default=None, description="CVSS V2.0 Severity to search for", alias="cvss-severity-v2"), 65 | cvss_v3_severity: Optional[CveSeverityV3] = Query(default=None, description="CVSS V3.x Severity to search", alias="cvss-severity-v3"), 66 | cvss_v4_severity: Optional[CveSeverityV4] = Query(default=None, description="CVSS V4.0 Severity to search", alias="cvss-severity-v4"), 67 | cvss_v2_metrics: Optional[str] = Query(default=None, description="CVSS V2.0 vector string to search for", alias="cvss-metrics-v2"), 68 | cvss_v3_metrics: Optional[str] = Query(default=None, description="CVSS V3.x vector string to search for", alias="cvss-metrics-v3"), 69 | cvss_v4_metrics: Optional[str] = Query(default=None, description="CVSS V4.0 vector string to search for", alias="cvss-metrics-v4"), 70 | epss_score_gt: Optional[float] = Query(default=None, description="Greater EPSS score float to search for", alias="epss-score-gt", ge=0, le=1), 71 | epss_score_lt: Optional[float] = Query(default=None, description="Less EPSS score float to search for", alias="epss-score-lt", ge=0, le=1), 72 | epss_perc_gt: Optional[float] = Query(default=None, description="Greater EPSS percentile float to search for", alias="epss-perc-gt", ge=0, le=1), 73 | epss_perc_lt: Optional[float] = Query(default=None, description="Less EPSS percentile float to search for", alias="epss-perc-lt", ge=0, le=1), 74 | exploitable: Optional[bool] = Query(default=False, description="Known Exploited Vulnerabilities to search for", alias="exploitable"), 75 | vulnerable: Optional[bool] = Query(default=True, description="CVEs found by the CPEs that are marked as vulnerable", alias="vulnerable"), 76 | days: Optional[int] = Query(default=None, description="Number of days back when the CVEs were last modified", alias="days-back", ge=0), 77 | 78 | ) -> None: 79 | 80 | self.cve_id = cve_id 81 | self.cpe_name = cpe_name 82 | self.version_start = version_start 83 | self.version_start_include = version_start_include 84 | self.version_end = version_end 85 | self.version_end_include = version_end_include 86 | self.cwe_id = cwe_id 87 | self.last_mod_start_date = last_mod_start_date 88 | self.last_mod_end_date = last_mod_end_date 89 | self.pub_start_date = pub_start_date 90 | self.pub_end_date = pub_end_date 91 | self.cvss_v2_severity = cvss_v2_severity 92 | self.cvss_v3_severity = cvss_v3_severity 93 | self.cvss_v4_severity = cvss_v4_severity 94 | self.cvss_v2_metrics = cvss_v2_metrics 95 | self.cvss_v3_metrics = cvss_v3_metrics 96 | self.cvss_v4_metrics = cvss_v4_metrics 97 | self.epss_Score_Gt = epss_score_gt 98 | self.epss_Score_Lt = epss_score_lt 99 | self.epss_Perc_Gt = epss_perc_gt 100 | self.epss_Perc_Lt = epss_perc_lt 101 | self.exploitable = exploitable 102 | self.vulnerable = vulnerable 103 | self.days = days 104 | 105 | 106 | class SearchInputCpe: 107 | 108 | def __init__(self, *, 109 | cpe_name: Optional[str] = Query(default=None, description="CPE2.3 filter specification to search for", alias="cpe23"), 110 | version_start: Optional[str] = Query(default=None, description="CPE version start to search for", alias="version-start"), 111 | version_start_include: Optional[bool] = Query(default=False, description="If set to true, the version start is included in the search", alias="version-start-include"), 112 | version_end: Optional[str] = Query(default=None, description="CPE version end to search for", alias="version-end"), 113 | version_end_include: Optional[bool] = Query(default=False, description="If set to true, the version end is included in the search", alias="version-end-include"), 114 | last_mod_start_date: Optional[date] = Query(default=None, description="Last modified start date", alias="last-mod-start-date"), 115 | last_mod_end_date: Optional[date] = Query(default=None, description="Last modified end date", alias="last-mod-end-date"), 116 | days: Optional[int] = Query(default=None, description="Number of days back when the CPEs were last modified", alias="days-back", ge=0), 117 | deprecated: Optional[bool] = Query(default=False, description="If set to true, will fetch only the deprecated CPE names", alias="deprecated"), 118 | ) -> None: 119 | 120 | self.cpe_name = cpe_name 121 | self.version_start = version_start 122 | self.version_start_include = version_start_include 123 | self.version_end = version_end 124 | self.version_end_include = version_end_include 125 | self.last_mod_start_date = last_mod_start_date 126 | self.last_mod_end_date = last_mod_end_date 127 | self.days = days 128 | self.deprecated = deprecated 129 | 130 | class SearchInputCwe: 131 | 132 | def __init__(self, *, 133 | cwe_id: Optional[List[str]] = Query(default=None, description="Related CWE IDs to search for", alias="cwe"), 134 | ) -> None: 135 | 136 | self.cwe_id = cwe_id 137 | 138 | 139 | class SearchInputCapec: 140 | 141 | def __init__(self, *, 142 | capec_id: Optional[List[str]] = Query(default=None, description="Related CAPEC IDs to search for", alias="capec"), 143 | ) -> None: 144 | 145 | self.capec_id = capec_id 146 | -------------------------------------------------------------------------------- /src/web/routers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/src/web/routers/__init__.py -------------------------------------------------------------------------------- /src/web/routers/search.py: -------------------------------------------------------------------------------- 1 | """ 2 | Search endpoints implementation for the Binare Vulndb API. 3 | 4 | Copyright (c) 2020 to date, Binare Oy (license@binare.io) All rights reserved. 5 | """ 6 | import logging 7 | from pydantic.error_wrappers import ValidationError as PydanticValidationError 8 | from fastapi import APIRouter, Depends, Query, status, HTTPException 9 | from generic.context import ApplicationContext 10 | from dependencies import get_app_cntxt 11 | from typing import Any 12 | from common.models import SearchInfoType, SearchOptions 13 | from common.search import search_data, ValidationError 14 | from web.models.search import ( 15 | CveOutput, 16 | CpeOutput, 17 | SearchInputCommon, 18 | SearchInputCve, 19 | SearchInputCpe, 20 | SearchInputCwe, 21 | SearchInputCapec, 22 | ) 23 | 24 | router = APIRouter(prefix="/search", tags=["search"]) 25 | 26 | 27 | # ------------------------------------------------------------------------------ 28 | def search(appctx: ApplicationContext, opts: SearchOptions) -> any: 29 | 30 | logger = logging.getLogger('web') 31 | logger.info(f'search: {opts}') 32 | 33 | try: 34 | result = search_data(appctx, opts) 35 | except ValidationError as exc: 36 | logger.exception(exc) 37 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 38 | 39 | return result 40 | 41 | 42 | # ------------------------------------------------------------------------------ 43 | @router.get("/cve", name="Search CVE", response_model=CveOutput, response_model_exclude_unset=True) 44 | async def search_cve(cmn_opts: SearchInputCommon = Depends(SearchInputCommon), 45 | cve_opts: SearchInputCve = Depends(SearchInputCve), 46 | appctx: ApplicationContext = Depends(get_app_cntxt), 47 | ) -> CveOutput: 48 | 49 | """API to search for CVEs""" 50 | 51 | try: 52 | opts = SearchOptions( 53 | searchInfo=SearchInfoType.cve, 54 | pageIdx=cmn_opts.page_idx, 55 | pageSize=cmn_opts.page_size, 56 | keywordSearch=cmn_opts.keyword_search, 57 | cveId=cve_opts.cve_id, 58 | cpeName=cve_opts.cpe_name, 59 | versionStart=cve_opts.version_start, 60 | versionStartInclude=cve_opts.version_start_include, 61 | versionEnd=cve_opts.version_end, 62 | versionEndInclude=cve_opts.version_end_include, 63 | cweId=cve_opts.cwe_id, 64 | lastModStartDate=cve_opts.last_mod_start_date, 65 | lastModEndDate=cve_opts.last_mod_end_date, 66 | pubStartDate=cve_opts.pub_start_date, 67 | pubEndDate=cve_opts.pub_end_date, 68 | cvssV2Severity=cve_opts.cvss_v2_severity, 69 | cvssV3Severity=cve_opts.cvss_v3_severity, 70 | cvssV4Severity=cve_opts.cvss_v4_severity, 71 | cvssV2Metrics=cve_opts.cvss_v2_metrics, 72 | cvssV3Metrics=cve_opts.cvss_v3_metrics, 73 | cvssV4Metrics=cve_opts.cvss_v4_metrics, 74 | epssScoreGt=cve_opts.epss_Score_Gt, 75 | epssScoreLt=cve_opts.epss_Score_Lt, 76 | epssPercGt=cve_opts.epss_Perc_Gt, 77 | epssPercLt=cve_opts.epss_Perc_Lt, 78 | exploitable=cve_opts.exploitable, 79 | vulnerable=cve_opts.vulnerable, 80 | days=cve_opts.days 81 | ) 82 | except PydanticValidationError as exc: 83 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 84 | 85 | return search(appctx, opts) 86 | 87 | 88 | # ------------------------------------------------------------------------------ 89 | @router.get("/cpe", name="Search CPE", response_model=CpeOutput, response_model_exclude_unset=True) 90 | async def search_cpe(cmn_opts: SearchInputCommon = Depends(SearchInputCommon), 91 | cpe_opts: SearchInputCpe = Depends(SearchInputCpe), 92 | appctx: ApplicationContext = Depends(get_app_cntxt), 93 | ) -> CpeOutput: 94 | 95 | """API to search for CPEs""" 96 | 97 | try: 98 | opts = SearchOptions( 99 | searchInfo=SearchInfoType.cpe, 100 | pageIdx=cmn_opts.page_idx, 101 | pageSize=cmn_opts.page_size, 102 | keywordSearch=cmn_opts.keyword_search, 103 | cpeName=cpe_opts.cpe_name, 104 | versionStart=cpe_opts.version_start, 105 | versionStartInclude=cpe_opts.version_start_include, 106 | versionEnd=cpe_opts.version_end, 107 | versionEndInclude=cpe_opts.version_end_include, 108 | lastModStartDate=cpe_opts.last_mod_start_date, 109 | lastModEndDate=cpe_opts.last_mod_end_date, 110 | days=cpe_opts.days, 111 | deprecated=cpe_opts.deprecated 112 | ) 113 | except PydanticValidationError as exc: 114 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 115 | 116 | return search(appctx, opts) 117 | 118 | 119 | # ------------------------------------------------------------------------------ 120 | @router.get("/cwe", name="Search CWE") 121 | async def search_cwe(cmn_opts: SearchInputCommon = Depends(SearchInputCommon), 122 | cwe_opts: SearchInputCwe = Depends(SearchInputCwe), 123 | appctx: ApplicationContext = Depends(get_app_cntxt), 124 | ) -> Any: 125 | 126 | """API to search for CWEs""" 127 | 128 | try: 129 | opts = SearchOptions( 130 | searchInfo=SearchInfoType.cwe, 131 | pageIdx=cmn_opts.page_idx, 132 | pageSize=cmn_opts.page_size, 133 | keywordSearch=cmn_opts.keyword_search, 134 | cweId=cwe_opts.cwe_id, 135 | ) 136 | except PydanticValidationError as exc: 137 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 138 | 139 | return search(appctx, opts) 140 | 141 | 142 | # ------------------------------------------------------------------------------ 143 | @router.get("/capec", name="Search CAPEC", ) 144 | async def search_capec(cmn_opts: SearchInputCommon = Depends(SearchInputCommon), 145 | capec_opts: SearchInputCapec = Depends(SearchInputCapec), 146 | appctx: ApplicationContext = Depends(get_app_cntxt) 147 | ) -> Any: 148 | 149 | """API to search for CAPECs""" 150 | 151 | try: 152 | opts = SearchOptions( 153 | searchInfo=SearchInfoType.capec, 154 | pageIdx=cmn_opts.page_idx, 155 | pageSize=cmn_opts.page_size, 156 | keywordSearch=cmn_opts.keyword_search, 157 | capecId=capec_opts.capec_id, 158 | ) 159 | except PydanticValidationError as exc: 160 | raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(exc)) 161 | 162 | return search(appctx, opts) 163 | -------------------------------------------------------------------------------- /start_web.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [[ -n $FCDB_WEB_PARAMS ]]; then 4 | ( sleep 1 5 | . ./config/setenv.sh > /dev/null 6 | cd ${FCDB_HOME}/web 7 | nohup uvicorn app:app $FCDB_WEB_PARAMS > ${FCDB_HOME}/web_access.log 2>&1 & ) & 8 | fi 9 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | 4 | from tests.runners.cli_runner import CLIRunner 5 | 6 | 7 | def pytest_configure(config): 8 | config.addinivalue_line("markers", "smoketest: marker for smoke tests") 9 | config.addinivalue_line("markers", "cwesearch: marker for testing the CWE search only") 10 | config.addinivalue_line("markers", "cvesearch: marker for testing the CVE search only") 11 | config.addinivalue_line("markers", "cpesearch: marker for testing the CPE search only") 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def cli_runner(): 16 | """Set up the CLI runner that would be used to trigger the tests. 17 | 18 | Yields: 19 | CLIRunner: The CLI runner. 20 | """ 21 | 22 | os.environ["INP_ENV_NAME"] = "dev" 23 | os.environ["FCDB_HOME"] = os.getcwd() 24 | 25 | # make sure the docker with DB image is up and running 26 | runner = CLIRunner() 27 | print("triggering: docker compose up -d") 28 | result = runner.runcommand("docker compose up -d") 29 | assert result.returncode == 0 30 | yield runner 31 | 32 | print("triggering: docker compose down") 33 | runner.runcommand("docker compose down") 34 | 35 | -------------------------------------------------------------------------------- /tests/runners/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/tests/runners/__init__.py -------------------------------------------------------------------------------- /tests/runners/cli_runner.py: -------------------------------------------------------------------------------- 1 | from subprocess import run, PIPE, STDOUT 2 | from re import compile 3 | 4 | class CLIRunner(object): 5 | 6 | def __init__(self): 7 | self.runner = None 8 | self.cmd = None 9 | # regexp to split a string by the spaces but avoid splitting strings enclosed in a double or single quotes 10 | self.split_regex = compile(r'(?:[\"\'].*?[\"\']|\S)+') 11 | 12 | def runcommand(self, cmd): 13 | self.cmd = cmd 14 | cmd_items = list(map(lambda x: x.strip('["\']'), self.split_regex.findall(cmd))) 15 | self.runner = run(cmd_items, stdout=PIPE, stderr=STDOUT, universal_newlines=True, shell=False) 16 | 17 | return self.runner 18 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/binareio/FastCVE/c3cb6d40287a47e58bc9bb879b4b2e15c0e93f70/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_cpe_search.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pytest 3 | 4 | 5 | # ------------------------------------------------------------------------------ 6 | @pytest.mark.smoketest 7 | @pytest.mark.cpesearch 8 | def test_cpe_search_by_id(cli_runner): 9 | """Search CPEs names by CPE 2.3 specfication and requests output as ID""" 10 | 11 | result = cli_runner.runcommand("docker exec fastcve search --search-info cpe --cpe23 cpe:2.3:h:dlink:dir-412:* --output id") 12 | assert result.returncode == 0 13 | assert result.stdout[:39] == 'cpe:2.3:h:dlink:dir-412:-:*:*:*:*:*:*:*' 14 | 15 | 16 | # ------------------------------------------------------------------------------ 17 | @pytest.mark.smoketest 18 | @pytest.mark.cpesearch 19 | def test_cpe_search_by_id_json(cli_runner): 20 | """Search CPE by ID and requests output as JSON""" 21 | 22 | result = cli_runner.runcommand("docker exec fastcve search --search-info cpe --cpe23 cpe:2.3:h:dlink:dir-412:* --output json") 23 | assert result.returncode == 0 24 | try: 25 | data = json.loads(result.stdout) 26 | except json.JSONDecodeError: 27 | assert False, "JSON output could not be decoded" 28 | else: 29 | assert len(data['result']) > 0, "No results returned" 30 | assert data['result'][0]['cpeName'] == 'cpe:2.3:h:dlink:dir-412:-:*:*:*:*:*:*:*' 31 | 32 | 33 | # ------------------------------------------------------------------------------ 34 | @pytest.mark.cpesearch 35 | def test_cpe_search_by_keyword(cli_runner): 36 | """Search CPE by regex in the CPE title and requests output as JSON""" 37 | 38 | result = cli_runner.runcommand("docker exec fastcve search --search-info cpe --keyword 'D-Link.*Dir-412' --output json") 39 | assert result.returncode == 0 40 | try: 41 | data = json.loads(result.stdout) 42 | except json.JSONDecodeError: 43 | assert False, "JSON output could not be decoded" 44 | else: 45 | assert len(data['result']) > 0, "No results returned" 46 | assert 'cpe:2.3:h:dlink:dir-412:-:*:*:*:*:*:*:*' in [cpe['cpeName'] for cpe in data['result']], "Expected CPE name not found" 47 | 48 | 49 | # ------------------------------------------------------------------------------ 50 | @pytest.mark.cpesearch 51 | def test_cpe_search_by_id_check_keys(cli_runner): 52 | """Search CPE by ID and requests output as JSON. Validate the presense of a list of keys""" 53 | 54 | result = cli_runner.runcommand("docker exec fastcve search --search-info cpe --cpe23 cpe:2.3:h:dlink:dir-412:* --output json") 55 | assert result.returncode == 0 56 | try: 57 | data = json.loads(result.stdout) 58 | except json.JSONDecodeError: 59 | assert False, "JSON output could not be decoded" 60 | else: 61 | assert len(data['result']) > 0, "No results returned" 62 | assert "cpeName" in data['result'][0], "cpeName Key not found in JSON data" 63 | assert "refs" in data['result'][0], "refs Key not found in JSON data" 64 | assert "titles" in data['result'][0], "titles Key not found in JSON data" 65 | assert "created" in data['result'][0], "created Key not found in JSON data" 66 | assert "cpeNameId" in data['result'][0], "cpeNameId Key not found in JSON data" 67 | assert "deprecated" in data['result'][0], "deprecated Key not found in JSON data" 68 | assert "lastModified" in data['result'][0], "lastModified Key not found in JSON data" 69 | -------------------------------------------------------------------------------- /tests/unit/test_cve_search.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pytest 3 | 4 | 5 | # ------------------------------------------------------------------------------ 6 | @pytest.mark.smoketest 7 | @pytest.mark.cvesearch 8 | def test_cve_search_by_id(cli_runner): 9 | """Search CVEs by ID and requests output as ID""" 10 | 11 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --cve CVE-1999-0001 --output id") 12 | assert result.returncode == 0 13 | assert result.stdout[:13] == 'CVE-1999-0001' 14 | 15 | 16 | # ------------------------------------------------------------------------------ 17 | @pytest.mark.smoketest 18 | @pytest.mark.cvesearch 19 | def test_cve_search_by_id_json(cli_runner): 20 | """Search CVE by ID and requests output as JSON""" 21 | 22 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --cve CVE-1999-0001 --output json") 23 | assert result.returncode == 0 24 | try: 25 | data = json.loads(result.stdout) 26 | except json.JSONDecodeError: 27 | assert False, "JSON output could not be decoded" 28 | else: 29 | assert len(data['result']) > 0, "No results returned" 30 | assert data['result'][0]['id'] == 'CVE-1999-0001' 31 | 32 | 33 | # ------------------------------------------------------------------------------ 34 | @pytest.mark.cvesearch 35 | def test_cve_search_by_keyword(cli_runner): 36 | """Search CVE by regex in the summary and requests output as JSON""" 37 | 38 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --keyword 'ip_input.c.*BSD-derived TCP/IP.*crash or hang' --output json") 39 | assert result.returncode == 0 40 | try: 41 | data = json.loads(result.stdout) 42 | except json.JSONDecodeError: 43 | assert False, "JSON output could not be decoded" 44 | else: 45 | assert len(data['result']) > 0, "No results returned" 46 | assert data['result'][0]['id'] == 'CVE-1999-0001' 47 | 48 | 49 | # ------------------------------------------------------------------------------ 50 | @pytest.mark.cvesearch 51 | def test_cve_search_by_id_check_keys(cli_runner): 52 | """Search CVE by ID and requests output as JSON. Validate the presense of a list of keys""" 53 | 54 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --cve CVE-1999-0001 --output json") 55 | assert result.returncode == 0 56 | try: 57 | data = json.loads(result.stdout) 58 | except json.JSONDecodeError: 59 | assert False, "JSON output could not be decoded" 60 | else: 61 | assert len(data['result']) > 0, "No results returned" 62 | assert "id" in data['result'][0], "id Key not found in JSON data" 63 | assert "descriptions" in data['result'][0], "descriptions Key not found in JSON data" 64 | assert "references" in data['result'][0], "references Key not found in JSON data" 65 | assert "metrics" in data['result'][0], "metrics Key not found in JSON data" 66 | assert "published" in data['result'][0], "published Key not found in JSON data" 67 | assert "lastModified" in data['result'][0], "lastModified Key not found in JSON data" 68 | assert "weaknesses" in data['result'][0], "weaknesses Key not found in JSON data" 69 | assert "vulnStatus" in data['result'][0], "vulnStatus Key not found in JSON data" 70 | 71 | 72 | @pytest.mark.cvesearch 73 | def test_cve_search_by_several_ids(cli_runner): 74 | """Search CVE by IDs and requests output as JSON. Validate the presense of the search criteria""" 75 | 76 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --cve CVE-1999-0001 CVE-1999-0002 --output json") 77 | 78 | assert result.returncode == 0 79 | 80 | try: 81 | data = json.loads(result.stdout) 82 | except json.JSONDecodeError: 83 | assert False, "JSON output could not be decoded" 84 | else: 85 | assert 'search' in data, "search Key not found in JSON data" 86 | assert 'result' in data, "result Key not found in JSON data" 87 | assert len(data['result']) == 2, "Wrong number of results returned, expected 2" 88 | 89 | @pytest.mark.cvesearch 90 | def test_cve_search_by_epss_score(cli_runner): 91 | """Search CVE by epss-score and requests output as JSON. Validate the presense of the search criteria""" 92 | 93 | result = cli_runner.runcommand("docker exec fastcve search --search-info cve --epss-score-gt 0.00100 --output json") 94 | 95 | assert result.returncode == 0 96 | 97 | try: 98 | data = json.loads(result.stdout) 99 | except json.JSONDecodeError: 100 | assert False, "JSON output could not be decoded" 101 | else: 102 | assert 'search' in data, "search Key not found in JSON data" 103 | assert 'result' in data, "result Key not found in JSON data" 104 | -------------------------------------------------------------------------------- /tests/unit/test_cwe_search.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pytest 3 | 4 | 5 | # ------------------------------------------------------------------------------ 6 | @pytest.mark.smoketest 7 | @pytest.mark.cwesearch 8 | def test_cwe_search_by_id(cli_runner): 9 | """Search CWE by ID and requests output as ID""" 10 | 11 | result = cli_runner.runcommand("docker exec fastcve search --search-info cwe --cwe CWE-79 --output id") 12 | assert result.returncode == 0 13 | assert result.stdout[:2] == '79' 14 | 15 | 16 | # ------------------------------------------------------------------------------ 17 | @pytest.mark.smoketest 18 | @pytest.mark.cwesearch 19 | def test_cwe_search_by_id_json(cli_runner): 20 | """Search CWE by ID and requests output as JSON""" 21 | 22 | result = cli_runner.runcommand("docker exec fastcve search --search-info cwe --cwe 79 --output json") 23 | assert result.returncode == 0 24 | try: 25 | data = json.loads(result.stdout) 26 | except json.JSONDecodeError: 27 | assert False, "JSON output could not be decoded" 28 | else: 29 | assert data['result'][0]['ID'] == '79' 30 | 31 | 32 | # ------------------------------------------------------------------------------ 33 | @pytest.mark.cwesearch 34 | def test_cwe_search_by_keyword(cli_runner): 35 | """Search CWE by regex and requests output as JSON""" 36 | 37 | result = cli_runner.runcommand("docker exec fastcve search --search-info cwe --keyword 'does not neutralize.*page.*served.*users' --output json") 38 | assert result.returncode == 0 39 | try: 40 | data = json.loads(result.stdout) 41 | except json.JSONDecodeError: 42 | assert False, "JSON output could not be decoded" 43 | else: 44 | assert data['result'][0]['ID'] == '79' 45 | 46 | 47 | # ------------------------------------------------------------------------------ 48 | @pytest.mark.cwesearch 49 | def test_cwe_search_by_id_check_keys(cli_runner): 50 | """Search CWE by ID and requests output as JSON. Validate the presense of a list of keys""" 51 | 52 | result = cli_runner.runcommand("docker exec fastcve search --search-info cwe --cwe 79 --output json") 53 | assert result.returncode == 0 54 | try: 55 | data = json.loads(result.stdout) 56 | except json.JSONDecodeError: 57 | assert False, "JSON output could not be decoded" 58 | else: 59 | assert "ID" in data['result'][0], "ID Key not found in JSON data" 60 | assert "Name" in data['result'][0], "Name Key not found in JSON data" 61 | assert "Status" in data['result'][0], "Status Key not found in JSON data" 62 | assert "Description" in data['result'][0], "Description Key not found in JSON data" 63 | assert "Related_Weaknesses" in data['result'][0], "Related_Weaknesses Key not found in JSON data" 64 | assert "Related_Attack_Patterns" in data['result'][0], "Related_Attack_Patterns Key not found in JSON data" 65 | assert "Taxonomy_Mappings" in data['result'][0], "Taxonomy_Mappings Key not found in JSON data" 66 | assert "Potential_Mitigations" in data['result'][0], "Potential_Mitigations Key not found in JSON data" 67 | assert "Applicable_Platforms" in data['result'][0], "Applicable_Platforms Key not found in JSON data" 68 | 69 | 70 | @pytest.mark.cwesearch 71 | def test_cwe_search_by_several_ids_and_split_result(cli_runner): 72 | """Search CWE by IDs and requests output as JSON. Validate the presense of the search criteria""" 73 | 74 | result = cli_runner.runcommand("docker exec fastcve search --search-info cwe --cwe 77 78 79 --output json") 75 | 76 | assert result.returncode == 0 77 | 78 | try: 79 | data = json.loads(result.stdout) 80 | except json.JSONDecodeError: 81 | assert False, "JSON output could not be decoded" 82 | else: 83 | assert 'search' in data, "search Key not found in JSON data" 84 | assert 'result' in data, "result Key not found in JSON data" 85 | assert len(data['result']) == 3, "Wrong number of results returned, expected 3" 86 | -------------------------------------------------------------------------------- /vars: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | branch=$(git branch --show-current) 4 | export INP_ENV_NAME=${1:-test} 5 | export FCDB_HOME=$(pwd)/src 6 | export DOCKER_TAG=$branch 7 | export CI_BUILD_REF_NAME=$branch 8 | 9 | . ./src/config/setenv.sh 10 | 11 | export PYTHONPATH=${FCDB_HOME}/src 12 | --------------------------------------------------------------------------------