├── .github └── dependabot.yml ├── requirements.txt ├── README.md ├── .gitignore └── script └── fetch-alerts.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.12.13 2 | aiosignal==1.4.0 3 | async-timeout==4.0.3 4 | attrs==25.3.0 5 | backoff==2.2.1 6 | botocore==1.38.13 7 | certifi==2025.4.26 8 | cffi==1.17.1 9 | charset-normalizer==3.4.2 10 | Deprecated==1.2.18 11 | frozenlist==1.7.0 12 | gql==3.5.3 13 | graphql-core==3.2.6 14 | idna==3.10 15 | jmespath==1.0.1 16 | multidict==6.4.3 17 | psycopg2-binary==2.9.10 18 | pycparser==2.22 19 | PyGithub==2.6.1 20 | PyJWT==2.10.1 21 | PyNaCl==1.5.0 22 | python-dateutil==2.9.0.post0 23 | requests==2.32.4 24 | requests-toolbelt==1.0.0 25 | six==1.17.0 26 | urllib3==2.5.0 27 | websockets==15.0.1 28 | wrapt==1.17.2 29 | yarl==1.13.0 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Dependabot Dashboard 2 | 3 | A simple Python script to dump Dependabot alerts from all GitHub orgs and repos to PostgresDB. 4 | 5 | Requires the following env variables to execute: 6 | - `DB_USER` 7 | - `DB_PASSWORD` 8 | - `DB_HOST` 9 | - `GH_HOST` 10 | - `GH_TOKEN` (needs full repo access to query internal repos) 11 | - `GH_ORG` 12 | 13 | The script needs to be run everyday to visualize metrics over time. 14 | 15 | **Note:** This fork has been adapted to work on GitHub.com and GitHub Enterprise Cloud. 16 | 17 | ### Fork changes 18 | - Support Github cloud instead of GitHub Enterprise Server 19 | - Added CVE details including CVSS score 20 | - Added `GH_ORG` env variable to query a specific organization -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /script/fetch-alerts.py: -------------------------------------------------------------------------------- 1 | from gql import gql, Client 2 | from gql.transport.aiohttp import AIOHTTPTransport 3 | import json 4 | from github import Github, Auth 5 | import psycopg2 6 | from datetime import date, datetime 7 | import os 8 | 9 | db_user = os.environ["DB_USER"] 10 | db_password = os.environ["DB_PASSWORD"] 11 | db_host = os.environ["DB_HOST"] 12 | gh_token = os.environ["GH_TOKEN"] 13 | gh_org_env = os.environ["GH_ORG"] 14 | 15 | connection = psycopg2.connect(user=db_user, 16 | password=db_password, 17 | host=db_host, 18 | port="5432", 19 | database="dependabot") 20 | 21 | def initialize_db(): 22 | try: 23 | cursor = connection.cursor() 24 | 25 | postgres_create_table = """ 26 | create table if not exists dependabot_alerts( 27 | snapshot timestamp, 28 | gh_repo varchar(100), 29 | gh_org varchar(30), 30 | created_at timestamp, 31 | fixed_at timestamp, 32 | alert_number int, 33 | state varchar(30), 34 | dismissed_at timestamp, 35 | dismiss_reason varchar(300), 36 | dismisser varchar(30), 37 | vuln_ghsa_id varchar(30), 38 | vuln_cvss real, 39 | vuln_identifier_type varchar(15), 40 | vuln_identifier_value varchar(30), 41 | vuln_severity varchar(15), 42 | vuln_summary text, 43 | vuln_package varchar(100), 44 | fix_pr_number int, 45 | fix_pr_title text, 46 | fix_merged_at timestamp 47 | ); 48 | """ 49 | cursor.execute(postgres_create_table) 50 | 51 | connection.commit() 52 | count = cursor.rowcount 53 | except (Exception, psycopg2.Error) as error: 54 | print("Failed to initialize dependabot_alerts table: ", error) 55 | 56 | def insert_into_db(alert, gh_org, gh_repo): 57 | try: 58 | snapshot_date = date.today() 59 | snapshot_timestamp = datetime.strptime( 60 | str(snapshot_date), "%Y-%m-%d").strftime("%Y-%m-%dT00:00:00Z") 61 | repo = gh_org + "/" + gh_repo 62 | org = gh_org 63 | created_at = alert.get("createdAt") 64 | fixed_at = alert.get("fixedAt") 65 | 66 | alert_number = alert.get("number") 67 | state = alert.get("state") 68 | dismissed_at = alert.get("dismissedAt") 69 | dismiss_reason = alert.get("dismissReason") 70 | dismisser = None 71 | 72 | if alert.get("dismisser") is not None: 73 | dismisser = alert.get("dismisser").get("login") 74 | 75 | vuln_ghsa_id = alert.get("securityVulnerability").get("advisory").get("ghsaId") 76 | 77 | 78 | vuln_cvss = None 79 | if alert.get("securityVulnerability").get("advisory").get("cvss") is not None: 80 | vuln_cvss = alert.get("securityVulnerability").get("advisory").get("cvss").get("score") 81 | 82 | vuln_identifier_type = None 83 | vuln_identifier_value = None 84 | if alert.get("securityVulnerability").get("advisory").get("identifiers"): 85 | for identifier in alert.get("securityVulnerability").get("advisory").get("identifiers"): 86 | if identifier.get("type") == "CVE": 87 | vuln_identifier_type = identifier.get("type") 88 | vuln_identifier_value = identifier.get("value") 89 | 90 | vuln_severity = alert.get("securityVulnerability").get("severity") 91 | vuln_summary = alert.get("securityVulnerability").get("advisory").get("summary") 92 | vuln_package = alert.get("securityVulnerability").get("package").get("name") 93 | 94 | fix_pr_number = None 95 | fix_pr_title = None 96 | fix_merged_at = None 97 | 98 | if alert.get("dependabotUpdate") is not None: 99 | if alert.get("dependabotUpdate").get("pullRequest") is not None: 100 | fix_pr_number = alert.get("dependabotUpdate").get("pullRequest").get("number") 101 | fix_pr_title = alert.get("dependabotUpdate").get("pullRequest").get("title") 102 | fix_merged_at = alert.get("dependabotUpdate").get("pullRequest").get("mergedAt") 103 | 104 | if state == "FIXED" or state == "DISMISSED": 105 | tmp_now = datetime.strptime( 106 | str(snapshot_date), "%Y-%m-%d") 107 | if fixed_at is not None: 108 | tmp_fixed_at = datetime.strptime(fixed_at, "%Y-%m-%dT%H:%M:%SZ") 109 | if tmp_fixed_at >= tmp_now: 110 | state = "OPEN" 111 | if dismissed_at is not None: 112 | tmp_dismissed_at = datetime.strptime(dismissed_at, "%Y-%m-%dT%H:%M:%SZ") 113 | if tmp_dismissed_at >= tmp_now: 114 | state = "OPEN" 115 | 116 | insert_query = """ 117 | insert into dependabot_alerts ( 118 | snapshot, 119 | gh_repo, 120 | gh_org, 121 | created_at, 122 | fixed_at, 123 | alert_number, 124 | state, 125 | dismissed_at, 126 | dismiss_reason, 127 | dismisser, 128 | vuln_ghsa_id, 129 | 130 | vuln_cvss, 131 | vuln_identifier_type, 132 | vuln_identifier_value, 133 | 134 | vuln_severity, 135 | vuln_summary, 136 | vuln_package, 137 | fix_pr_number, 138 | fix_pr_title, 139 | fix_merged_at 140 | ) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); 141 | """ 142 | 143 | db_values = (snapshot_timestamp, repo, org, created_at, fixed_at, alert_number, state, dismissed_at, dismiss_reason, 144 | dismisser, vuln_ghsa_id, vuln_cvss, vuln_identifier_type, vuln_identifier_value, vuln_severity, vuln_summary, vuln_package, fix_pr_number, fix_pr_title, fix_merged_at) 145 | cursor = connection.cursor() 146 | cursor.execute(insert_query, db_values) 147 | connection.commit() 148 | except (Exception, psycopg2.Error) as error: 149 | print("Error when inserting into table: ", error) 150 | print("DEBUG: ", str(alert)) 151 | quit() 152 | 153 | def get_repos(gh_token): 154 | auth = Auth.Token(gh_token) 155 | g = Github(auth=auth) 156 | repos = [] 157 | 158 | org = g.get_organization(gh_org_env) 159 | for repo in org.get_repos(): 160 | if not repo.archived: 161 | repos.append(repo.full_name) 162 | print("Repo: " + str(repo.full_name)) 163 | 164 | return repos 165 | 166 | 167 | def get_alerts(gh_token, gh_org, gh_repo): 168 | # Handle pagination 169 | headers = { 170 | "Authorization": "Bearer " + gh_token, 171 | "Accept": "application/vnd.github.v4.idl" 172 | } 173 | 174 | # Select your transport with a defined url endpoint 175 | transport = AIOHTTPTransport("https://api.github.com/graphql", headers=headers) 176 | # Create a GraphQL client using the defined transport 177 | client = Client(transport=transport, fetch_schema_from_transport=False) 178 | 179 | # Provide a GraphQL query - first 100 throws erros from time to time* 180 | query = gql( 181 | """ 182 | { 183 | repository(name: "%s", owner: "%s") { 184 | vulnerabilityAlerts(first:100) { 185 | pageInfo { 186 | startCursor 187 | hasNextPage 188 | endCursor 189 | } 190 | nodes { 191 | createdAt 192 | fixedAt 193 | number 194 | dependabotUpdate { 195 | pullRequest { 196 | number 197 | title 198 | mergedAt 199 | } 200 | } 201 | state 202 | dismissedAt 203 | dismisser { 204 | login 205 | } 206 | dismissReason 207 | securityVulnerability { 208 | severity 209 | advisory { 210 | cvss{ 211 | score 212 | } 213 | ghsaId 214 | identifiers{ 215 | type 216 | value 217 | } 218 | summary 219 | } 220 | package { 221 | name 222 | } 223 | } 224 | } 225 | } 226 | } 227 | rateLimit { 228 | limit 229 | cost 230 | remaining 231 | resetAt 232 | } 233 | } 234 | 235 | """ % (gh_repo, gh_org) 236 | ) 237 | 238 | while True: 239 | # Execute the query on the transport 240 | result = client.execute(query) 241 | 242 | print(json.dumps(result.get("rateLimit"))) 243 | 244 | for alert in result.get("repository").get("vulnerabilityAlerts").get("nodes"): 245 | insert_into_db(alert, gh_org, gh_repo) 246 | 247 | nextpage = result.get("repository").get( 248 | "vulnerabilityAlerts").get("pageInfo").get("hasNextPage") 249 | if not nextpage: 250 | break 251 | 252 | endcursor = result.get("repository").get( 253 | "vulnerabilityAlerts").get("pageInfo").get("endCursor") 254 | query = gql( 255 | """ 256 | { 257 | repository(name: "%s", owner: "%s") { 258 | vulnerabilityAlerts(first:100, after:"%s") { 259 | pageInfo { 260 | startCursor 261 | hasNextPage 262 | endCursor 263 | } 264 | nodes { 265 | createdAt 266 | fixedAt 267 | number 268 | dependabotUpdate { 269 | pullRequest { 270 | number 271 | title 272 | mergedAt 273 | } 274 | } 275 | state 276 | dismissedAt 277 | dismisser { 278 | login 279 | } 280 | dismissReason 281 | securityVulnerability { 282 | severity 283 | advisory { 284 | cvss{ 285 | score 286 | } 287 | ghsaId 288 | identifiers{ 289 | type 290 | value 291 | } 292 | summary 293 | } 294 | package { 295 | name 296 | } 297 | } 298 | } 299 | } 300 | } 301 | rateLimit { 302 | limit 303 | cost 304 | remaining 305 | resetAt 306 | } 307 | } 308 | """ % (gh_repo, gh_org, endcursor) 309 | ) 310 | 311 | return [] 312 | 313 | 314 | initialize_db() 315 | repos = get_repos(gh_token=gh_token) 316 | 317 | for repo in repos: 318 | print("[+] Analyzing " + repo) 319 | org = repo.split("/")[0] 320 | repo_name = repo.split("/")[1] 321 | 322 | get_alerts(gh_token, org, repo_name) 323 | --------------------------------------------------------------------------------