├── app
├── tests
│ ├── __init__.py
│ └── test_snyk_scm_refresh.py
├── utils
│ ├── __init__.py
│ ├── github_utils.py
│ └── snyk_helper.py
├── __init__.py
├── models.py
├── snyk_repo.py
├── gh_repo.py
└── app.py
├── _version.py
├── .gitignore
├── requirements-dev.txt
├── requirements.txt
├── .github
├── CODEOWNERS
├── workflows
│ ├── jira.yml
│ ├── ci.yml
│ └── snyk_security.yml
└── CONTRIBUTING.md
├── .snyk
├── snyk_scm_refresh.py
├── Migration_Guide.md
├── README.md
├── common.py
├── LICENSE
└── .pylintrc
/app/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/_version.py:
--------------------------------------------------------------------------------
1 | __version__ = "1.4.3"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.csv
2 | *.log
3 | __pycache__*
4 | .venv
5 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-mock
3 | pylint==2.4.4
4 |
--------------------------------------------------------------------------------
/app/__init__.py:
--------------------------------------------------------------------------------
1 | """initialize app modules"""
2 | import common
3 | from .app import run
4 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pysnyk==0.9.9
2 | PyGithub==1.47
3 | requests==2.28.0
4 | urllib3==1.26.5
5 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # CS Engineering will be required for a review on every PR
2 | @snyk-labs/cs-engineers
3 |
--------------------------------------------------------------------------------
/.github/workflows/jira.yml:
--------------------------------------------------------------------------------
1 | name: Github Issues to Jira Issues
2 | on:
3 | issues:
4 | types: [opened, closed, labeled, unlabeled]
5 | issue_comment:
6 | types: [created]
7 | jobs:
8 | sync:
9 | name: Sync Items
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Sync Github Issues to Jira
13 | uses: jordansilva/github-action-issue-to-jira@v1.1.2
14 | env:
15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
16 | with:
17 | jiraHost: ${{ secrets.CSENG_JIRA_HOST }}
18 | jiraUsername: ${{ secrets.CSENG_JIRA_USERNAME }}
19 | jiraPassword: ${{ secrets.CSENG_JIRA_PASSWORD }}
20 | project: ${{ secrets.CSENG_JIRA_PROJECTKEY }}
21 |
--------------------------------------------------------------------------------
/.snyk:
--------------------------------------------------------------------------------
1 | # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
2 | version: v1.14.1
3 | # ignores vulnerabilities until expiry date; change duration by modifying expiry date
4 | ignore:
5 | 'snyk:lic:pip:chardet:LGPL-2.1':
6 | - '*':
7 | reason: None Given
8 | expires: 2100-08-26T19:21:20.172Z
9 | 'snyk:lic:pip:certifi:MPL-2.0':
10 | - '*':
11 | reason: None Given
12 | expires: 2100-06-12T19:00:42.674Z
13 | 'snyk:lic:pip:pygithub:LGPL-3.0':
14 | - '*':
15 | reason: None Given
16 | expires: 2100-01-31T19:21:10.343Z
17 | SNYK-PYTHON-PYYAML-590151:
18 | - '*':
19 | reason: None Given
20 | expires: 2021-01-31T19:20:40.359Z
21 | patch: {}
22 |
--------------------------------------------------------------------------------
/app/models.py:
--------------------------------------------------------------------------------
1 | """custom data objects"""
2 | from dataclasses import dataclass
3 | from typing import List
4 |
5 |
6 | @dataclass
7 | class ImportFile:
8 | """File being imported"""
9 | path: str
10 |
11 |
12 | @dataclass
13 | class PendingDelete:
14 | """Projects needing deletion"""
15 | project_id: str
16 | project_name: str
17 | org_id: str
18 | org_name: str
19 | pending_repo: str
20 |
21 |
22 | @dataclass
23 | class ImportStatus:
24 | """Import job response"""
25 | # pylint: disable=too-many-instance-attributes
26 | import_job_id: str
27 | import_status_url: str
28 | org_id: str
29 | org_name: str
30 | repo_owner: str
31 | repo_name: str
32 | files: List[ImportFile]
33 | pending_project_deletes: List[PendingDelete]
34 |
35 | # pylint: disable=too-many-instance-attributes
36 | @dataclass
37 | class GithubRepoStatus:
38 | """Status of a Github repository"""
39 | response_code: str
40 | response_message: str
41 | repo_name: str
42 | org_id: str
43 | repo_owner: str
44 | repo_full_name: str
45 | repo_default_branch: str
46 | archived: bool
47 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: ci
4 |
5 | # Controls when the workflow will run
6 | on:
7 | # Triggers the workflow on push or pull request events but only for the "main" branch
8 | push:
9 | branches: [ "master" ]
10 | pull_request:
11 | branches: [ "master" ]
12 |
13 | # Allows you to run this workflow manually from the Actions tab
14 | workflow_dispatch:
15 |
16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
17 | jobs:
18 | build_test:
19 | # The type of runner that the job will run on
20 | runs-on: ubuntu-latest
21 | # Steps represent a sequence of tasks that will be executed as part of the job
22 | steps:
23 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
24 | - uses: actions/checkout@v3
25 | - uses: actions/setup-python@v4
26 | with:
27 | python-version: '3.9'
28 | cache: 'pip'
29 | - run: |
30 | pip install -r requirements.txt --user
31 | pip install -r requirements-dev.txt --user
32 | - name: Test
33 | id: test
34 | run: pytest
35 | - name: Pylint
36 | id: pylint
37 | run: pylint app --ignore-patterns=test_
38 |
--------------------------------------------------------------------------------
/app/utils/github_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | methods for creating github or
3 | github enterprise clients
4 | """
5 | from github import Github
6 | import common
7 |
8 | # pylint: disable=invalid-name
9 | def create_github_client(GITHUB_TOKEN, VERIFY_TLS):
10 | """ return a github client for given token """
11 | try:
12 | return Github(login_or_token=GITHUB_TOKEN, verify=VERIFY_TLS)
13 | except KeyError as err:
14 | raise RuntimeError(
15 | "Failed to initialize GitHub client because GITHUB_TOKEN is not set!"
16 | ) from err
17 |
18 | def create_github_enterprise_client(GITHUB_ENTERPRISE_TOKEN, GITHUB_ENTERPRISE_HOST, VERIFY_TLS):
19 | """ return a github enterprise client for given token/host """
20 | try:
21 | return Github(base_url=f"https://{GITHUB_ENTERPRISE_HOST}/api/v3", \
22 | login_or_token=GITHUB_ENTERPRISE_TOKEN, verify=VERIFY_TLS)
23 | except KeyError as err:
24 | raise RuntimeError(
25 | "Failed to initialize GitHub client because GITHUB_ENTERPRISE_TOKEN is not set!"
26 | ) from err
27 |
28 | def get_github_client(origin):
29 | """ get the right github client depending on intergration type """
30 | #pylint: disable=no-else-return
31 | if origin == 'github':
32 | return common.gh_client
33 | elif origin == 'github-enterprise':
34 | return common.gh_enterprise_client
35 | else:
36 | raise Exception(f"could not get github client for type: {origin}")
37 |
38 | def get_github_repo(gh_client, repo_name):
39 | """ get a github repo by name """
40 | try:
41 | return gh_client.get_repo(repo_name)
42 | # pylint: disable=bare-except
43 | except:
44 | return gh_client.get_user().get_repo(repo_name)
45 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ## Contributor Agreement
4 | A pull-request will only be considered for merging into the upstream codebase after you have signed our [contributor agreement](../Contributor-Agreement.md), assigning us the rights to the contributed code and granting you a license to use it in return. If you submit a pull request, you will be prompted to review and sign the agreement with one click (we use [CLA assistant](https://cla-assistant.io/)).
5 |
6 | ## Commit messages
7 |
8 | Commit messages must follow the [Angular-style](https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit-message-format) commit format (but excluding the scope).
9 |
10 | i.e:
11 |
12 | ```text
13 | fix: minified scripts being removed
14 |
15 | Also includes tests
16 | ```
17 |
18 | This will allow for the automatic changelog to generate correctly.
19 |
20 | ### Commit types
21 |
22 | Must be one of the following:
23 |
24 | * **feat**: A new feature
25 | * **fix**: A bug fix
26 | * **docs**: Documentation only changes
27 | * **test**: Adding missing tests
28 | * **chore**: Changes to the build process or auxiliary tools and libraries such as documentation generation
29 | * **refactor**: A code change that neither fixes a bug nor adds a feature
30 | * **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
31 | * **perf**: A code change that improves performance
32 |
33 | To release a major you need to add `BREAKING CHANGE: ` to the start of the body and the detail of the breaking change.
34 |
35 | ## Code standards
36 |
37 | Ensure that your code adheres to the included `.eslintrc` config by running `npm run lint`.
38 |
39 | ## Sending pull requests
40 |
41 | - new command line options are generally discouraged unless there's a *really* good reason
42 | - add tests for newly added code (and try to mirror directory and file structure if possible)
43 | - spell check
44 | - PRs will not be code reviewed unless all tests are passing
45 |
46 | *Important:* when fixing a bug, please commit a **failing test** first so that Travis CI (or I can) can show the code failing. Once that commit is in place, then commit the bug fix, so that we can test *before* and *after*.
47 |
48 | Remember that you're developing for multiple platforms and versions of node, so if the tests pass on your Mac or Linux or Windows machine, it *may* not pass elsewhere.
49 |
--------------------------------------------------------------------------------
/snyk_scm_refresh.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Keep Snyk projects in sync with their corresponding SCM repositories
4 | """
5 | import logging
6 | import sys
7 | from os import getenv
8 | import common
9 | from app import run
10 |
11 | if __name__ == "__main__":
12 |
13 | if common.ARGS.audit_large_repos:
14 | print("\n****** AUDIT LARGE REPOS MODE ******\n")
15 | print(f"check {common.LARGE_REPOS_AUDIT_RESULTS_FILE.name} after script completes\n")
16 | elif common.ARGS.dry_run:
17 | print("\n****** DRY-RUN MODE ******\n")
18 | for arg in vars(common.ARGS):
19 | if any(arg in x for x in ['sca', 'container', 'iac', 'code']):
20 | print(f"{arg}={common.toggle_to_bool(getattr(common.ARGS, arg))}")
21 | else:
22 | print(f"{arg}={getattr(common.ARGS, arg)}")
23 | print("---")
24 |
25 | if getenv("SNYK_TOKEN") is None:
26 | print("token not set at $SNYK_TOKEN")
27 | sys.exit(1)
28 |
29 | if common.GITHUB_TOKEN is None and common.GITHUB_ENTERPRISE_TOKEN is None:
30 | print("1 of $GITHUB_TOKEN (GitHub.com) or $GITHUB_ENTERPRISE_TOKEN (GitHub Enteprise) "
31 | "must be set")
32 | print("If using $GITHUB_ENTERPRISE_TOKEN, you must also set $GITHUB_ENTERPRISE_HOST")
33 | sys.exit(1)
34 |
35 | if common.GITHUB_ENTERPRISE_HOST is not None and common.GITHUB_ENTERPRISE_TOKEN is None:
36 | print("$GITHUB_ENTERPRISE_TOKEN must be set when using $GITHUB_ENTERPRISE_HOST")
37 | sys.exit(1)
38 |
39 | if common.GITHUB_ENTERPRISE_TOKEN is not None and common.GITHUB_ENTERPRISE_HOST is None:
40 | print("$GITHUB_ENTERPRISE_HOST must be set when using $GITHUB_ENTERPRISE_TOKEN")
41 | sys.exit(1)
42 |
43 | if common.GITHUB_ENTERPRISE_TOKEN is not None and common.GITHUB_ENTERPRISE_HOST is not None:
44 | GITHUB_ENTERPRISE_TOKEN_HIDDEN = \
45 | f"****{common.GITHUB_ENTERPRISE_TOKEN[len(common.GITHUB_ENTERPRISE_TOKEN)-4:]}"
46 | sys.stdout.write("Using GHE: ")
47 | print(f"{GITHUB_ENTERPRISE_TOKEN_HIDDEN}@{common.GITHUB_ENTERPRISE_HOST}")
48 |
49 | if common.GITHUB_TOKEN is not None:
50 | GITHUB_TOKEN_HIDDEN = f"****{common.GITHUB_TOKEN[len(common.GITHUB_TOKEN)-4:]}"
51 | sys.stdout.write("Using GH: ")
52 | print(f"{GITHUB_TOKEN_HIDDEN}")
53 |
54 | print("---")
55 | if common.ARGS.debug:
56 | logging.basicConfig(filename=common.LOG_FILENAME, level=logging.DEBUG, filemode="w")
57 | else:
58 | logging.basicConfig(filename=common.LOG_FILENAME, level=logging.INFO, filemode="w")
59 |
60 | run()
61 |
--------------------------------------------------------------------------------
/.github/workflows/snyk_security.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: snyk_security
4 |
5 | # Controls when the workflow will run
6 | on:
7 | # Triggers the workflow on push at "main" branch for security scans using secret snyk token
8 | push:
9 | branches: [ "master" ]
10 | pull_request:
11 | branches: [ "master" ]
12 | # Allows you to run this workflow manually from the Actions tab
13 | workflow_dispatch:
14 |
15 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
16 | jobs:
17 | snyk_test:
18 | if: |
19 | github.event_name == 'push' || github.event_name == 'workflow_dispatch' ||
20 | (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
21 | permissions:
22 | contents: read # for actions/checkout to fetch code
23 | security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
24 | runs-on: ubuntu-latest
25 | # Steps represent a sequence of tasks that will be executed as part of the job
26 | steps:
27 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
28 | - uses: actions/checkout@v3
29 | # setup python for downloading package dependencies
30 | - uses: actions/setup-python@v4
31 | with:
32 | python-version: '3.9'
33 | cache: 'pip'
34 | - run: |
35 | pip install -r requirements.txt --user
36 | pip install -r requirements-dev.txt --user
37 | - name: Run Snyk to check for vulnerabilities
38 | uses: snyk/actions/python@master
39 | continue-on-error: true # To make sure that SARIF upload gets called
40 | env:
41 | SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
42 | with:
43 | args: --all-projects --sarif-file-output=snyk.sarif
44 | - name: Upload result to GitHub Code Scanning
45 | uses: github/codeql-action/upload-sarif@v2
46 | with:
47 | sarif_file: snyk.sarif
48 | snyk_monitor:
49 | if: |
50 | github.event_name == 'push' || github.event_name == 'workflow_dispatch' ||
51 | (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
52 | permissions:
53 | contents: read # for actions/checkout to fetch code
54 | security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
55 | needs: snyk_test
56 | runs-on: ubuntu-latest
57 | # Steps represent a sequence of tasks that will be executed as part of the job
58 | steps:
59 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
60 | - uses: actions/checkout@v3
61 | - name: Run Snyk to check for vulnerabilities
62 | uses: snyk/actions/python@master
63 | continue-on-error: true # To make sure that SARIF upload gets called
64 | env:
65 | SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
66 | with:
67 | command: monitor
68 | args: --all-projects
69 |
--------------------------------------------------------------------------------
/Migration_Guide.md:
--------------------------------------------------------------------------------
1 | This guide aims to ease up the transition between working with the deprecated snyk-scm-refresh tool and the snyk-api-import tool.
2 |
3 | # Some Differences
4 | ## Language
5 | snyk-scm-refresh was written in `Python` whereas snyk-api-import is written in `Typescript`
6 |
7 | snyk-scm-refresh was ran using either `Python` or a standalone executable whereas snyk-api-import can be run using `npm or yarn` or a standalone executable. Please refer to [Snyk's Public Documentation](https://docs.snyk.io/snyk-api-info/other-tools/tool-snyk-api-import#installation) for more information.
8 |
9 | # Migration
10 |
11 | ## Detecting and Importing New Manifests from a monitored repository
12 | Use Snyk-API-Import tool's `import` command following the [Kicking off an Import Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/import.md). The recommendation is to run this tool on a cronjob or on an event trigger to kick off the re-importing of repos into Snyk, which will detect and import the new manifests.
13 |
14 | Alternatively, use the Snyk-API-Import tool's `sync` command following the [Sync: detecting changes in monitored repos and updating Snyk projects Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md). Any manifests that do not already exist in Snyk will be imported into Snyk using this `sync` command. (Note: by default, the sync command will only detect and sync changes in manifest files supported by Snyk Opensource. To sync files for other Snyk products, specify the appropriate Snyk product using the `--snykProduct` flag).
15 |
16 | ### Expected Result
17 | The result will be the addition of the new manifest files within Snyk.
18 |
19 | ## Removing Projects for manifests that no longer exist within a monitored repository
20 | Use the Snyk-API-Import tool's `sync` command following the [Sync: detecting changes in monitored repos and updating Snyk projects Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md). Any manifests that no longer exist will be **deactivated** in Snyk and not **deleted**. Unlike Deletion, Deactivation will ensure that the historical data for that manifest file will remain in Snyk, whereas a Deletion will lead to permanent data loss for the deleted manifest file(s). (Note: by default, the sync command will only detect and sync changes in manifest files supported by Snyk Opensource. To sync files for other Snyk products, specify the appropriate Snyk product using the `--snykProduct` flag).
21 |
22 | ### Expected Result
23 | The result will be the deactivation of the removed manifest file(s) within Snyk. Note: You will have to delete the deactivated projects if you wish to completely remove them from Snyk, which will result in the permanent loss of data for these projects.
24 |
25 | ## Detect and update manifest file name changes and/or movement within a monitored repository (Rename or moving a manifest file within a monitored repository)
26 | Use the Snyk-API-Import tool's `sync` command following the [Sync: detecting changes in monitored repos and updating Snyk projects Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md). If an imported repo's manifest file is re-named or moved, any manifest files previously imported will become broken projects in Snyk and therefore deactivated by sync command. However, the sync command will also properly re-import the repo with the appropriate repo name change along with a reimport of the files to properly follow the new repo name.
27 |
28 | ### Expected Result
29 | The result will be the deactivation of the projects created during initial import, but a re-import job will trigger, resulting in displaying the projects with the correct name/path. Note: You will have to delete any deactivated projects if you wish to completely remove them from Snyk, which will result in the permanent loss of data for these projects.
30 |
31 | ## Detect and update default branch for a monitored repository (Rename or Switching to Another Branch)
32 | Use the Snyk-API-Import tool's `sync` command following the [Sync: detecting changes in monitored repos and updating Snyk projects Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md). If an imported repo's default branch is re-named, any manifest files previously imported will become broken projects in Snyk and therefore deactivated by sync command. However, the sync command will also properly re-import the repo with the appropriate repo name change along with a reimport of the files to properly follow the new repo name.
33 |
34 | ### Expected Result
35 | The result will be the deactivation of the projects created during initial import, but a re-import job will trigger, resulting in displaying the projects with the renamed default branch.
36 |
37 | ## Detect whether a monitored repo has been archived
38 | Use the Snyk-API-Import tool's `sync` command following the [Sync: detecting changes in monitored repos and updating Snyk projects Section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md). If an monitored repo is deactivated, the `sync` command will deactivate the projects within Snyk.
39 |
40 | ### Expected Result
41 | The result will be the deactivation of the projects within Snyk.
42 |
43 | ## Handling of Large Repositories
44 | For sufficiently large repositories, though, Github truncates the API response. When a truncated Github response is detected, this tool will perform a shallow clone of the repository's default branch.
45 |
46 | ### Expected Result
47 | The result will be the successful import of large repositories
48 |
49 | ## Detect deleted repos
50 | Today this is not supported by the snyk-api-import tool. Please refer to this [section](https://github.com/snyk-tech-services/snyk-api-import/blob/master/docs/sync.md#known-limitations) for understanding known limitations.
51 |
52 |
53 |
--------------------------------------------------------------------------------
/app/snyk_repo.py:
--------------------------------------------------------------------------------
1 | """
2 | data and methods for managing
3 | snyk projects from the same repository
4 | """
5 | import sys
6 | import re
7 | import snyk
8 | import common
9 | from app.gh_repo import (
10 | get_repo_manifests,
11 | passes_manifest_filter
12 | )
13 | import app
14 |
15 | class SnykRepo():
16 | """ SnykRepo object """
17 | # pylint: disable=too-many-arguments
18 | def __init__(
19 | self,
20 | full_name: str,
21 | org_id: str,
22 | org_name: str,
23 | integration_id: str,
24 | origin: str,
25 | branch: str,
26 | snyk_projects: []
27 | ):
28 | self.full_name = full_name
29 | self.org_id = org_id
30 | self.org_name = org_name
31 | self.integration_id = integration_id
32 | self.origin = origin
33 | self.branch = branch
34 | self.snyk_projects = snyk_projects
35 |
36 | def __repr__(self):
37 | return (
38 | f"{self.full_name}" + "\n"
39 | f"{self.org_id}" + "\n"
40 | f"{self.org_name}" + "\n"
41 | f"{self.integration_id}" + "\n"
42 | f"{self.origin}" + "\n"
43 | f"{self.branch}" + "\n"
44 | f"{self.snyk_projects}")
45 |
46 | def __getitem__(self, item):
47 | return self.__dict__[item]
48 |
49 | def get_projects(self):
50 | """ return list of projects for this repo """
51 | return self.snyk_projects
52 |
53 | def has_snyk_code(self):
54 | """ returns true if snyk already has a
55 | snyk code project for this repo """
56 | has_snyk_code = False
57 | for snyk_project in self.snyk_projects:
58 | if snyk_project["type"] == "sast":
59 | has_snyk_code = True
60 | break
61 | return has_snyk_code
62 |
63 | def add_new_manifests(self, dry_run):
64 | """ find and import new projects """
65 | import_response = []
66 | files = []
67 |
68 | gh_repo_manifests = get_repo_manifests(self.full_name, self.origin, self.has_snyk_code())
69 |
70 | for gh_repo_manifest in gh_repo_manifests:
71 | #print(f"checking to import: {gh_repo_manifest}")
72 | if gh_repo_manifest not in {sp['manifest'] for sp in self.snyk_projects}:
73 | files.append(dict({"path": gh_repo_manifest}))
74 |
75 | # if there are files to add, then import them
76 | if not dry_run:
77 | if len(files) > 0:
78 | import_response = app.utils.snyk_helper.import_manifests(
79 | self.org_id,
80 | self.full_name,
81 | self.integration_id,
82 | files)
83 | else:
84 | for file in files:
85 | import_message = ""
86 | if re.match(common.MANIFEST_PATTERN_CODE, file["path"]):
87 | import_message = "would trigger code analysis via"
88 | else:
89 | import_message = "would import"
90 |
91 | app.utils.snyk_helper.app_print(self.org_name,
92 | self.full_name,
93 | f"{import_message}: {file['path']}")
94 | return import_response
95 |
96 | def delete_stale_manifests(self, dry_run):
97 | """ delete snyk projects for which the corresponding SCM file no longer exists """
98 | result = []
99 | gh_repo_manifests = get_repo_manifests(self.full_name, self.origin, True)
100 | for snyk_project in self.snyk_projects:
101 | # print(snyk_project["manifest"])
102 | if (snyk_project["type"] != "sast" and
103 | passes_manifest_filter(snyk_project["manifest"]) and
104 | snyk_project["manifest"] not in gh_repo_manifests):
105 | # delete project, append on success
106 | if not dry_run:
107 | try:
108 | app.utils.snyk_helper.delete_snyk_project(snyk_project["id"],
109 | snyk_project["org_id"])
110 | result.append(snyk_project)
111 | except snyk.errors.SnykNotFoundError:
112 | print(f" - Project {snyk_project['id']} not found" \
113 | f" in org {snyk_project['org_id']}")
114 | return result
115 |
116 | def delete_manifests(self, dry_run):
117 | """ delete all snyk projects corresponding to a repo """
118 | result = []
119 | for snyk_project in self.snyk_projects:
120 | # delete project, append on success
121 | if not dry_run:
122 | try:
123 | app.utils.snyk_helper.delete_snyk_project(snyk_project["id"],
124 | snyk_project["org_id"])
125 | result.append(snyk_project)
126 | except snyk.errors.SnykNotFoundError:
127 | print(f" - Project {snyk_project['id']} not found" \
128 | f" in org {snyk_project['org_id']}")
129 | else:
130 | result.append(snyk_project)
131 | return result
132 |
133 |
134 | def deactivate_manifests(self, dry_run):
135 | """ deactivate all snyk projects corresponding to a repo """
136 | result = []
137 | for snyk_project in [x for x in self.snyk_projects if x["is_monitored"]]:
138 | # delete project, append on success
139 | if not dry_run:
140 | try:
141 | app.utils.snyk_helper.deactivate_snyk_project(snyk_project["id"],
142 | snyk_project["org_id"])
143 | result.append(snyk_project)
144 | except snyk.errors.SnykNotFoundError:
145 | print(f" - Project {snyk_project['id']} not found" \
146 | f" in org {snyk_project['org_id']}")
147 | else:
148 | result.append(snyk_project)
149 | return result
150 |
151 | def activate_manifests(self, dry_run):
152 | """ deactivate all snyk projects corresponding to a repo """
153 | result = []
154 | for snyk_project in [x for x in self.snyk_projects if not x["is_monitored"]]:
155 | # delete project, append on success
156 | if not dry_run:
157 | try:
158 | app.utils.snyk_helper.activate_snyk_project(snyk_project["id"],
159 | snyk_project["org_id"])
160 | result.append(snyk_project)
161 | except snyk.errors.SnykNotFoundError:
162 | print(f" - Project {snyk_project['id']} not found" \
163 | f" in org {snyk_project['org_id']}")
164 | else:
165 | result.append(snyk_project)
166 | return result
167 |
168 | def update_branch(self, new_branch_name, dry_run):
169 | """ update the branch for all snyk projects for this repo """
170 | result = []
171 | for (i, snyk_project) in enumerate(self.snyk_projects):
172 | if snyk_project["branch"] != new_branch_name:
173 | if not dry_run:
174 | sys.stdout.write(f"\r - {i+1}/{len(self.snyk_projects)}")
175 | sys.stdout.flush()
176 | try:
177 | app.utils.snyk_helper.update_project_branch(snyk_project["id"],
178 | snyk_project["name"],
179 | snyk_project["org_id"],
180 | new_branch_name)
181 | except snyk.errors.SnykNotFoundError:
182 | print(f" - Project {snyk_project['id']} not found" \
183 | f" in org {snyk_project['org_id']}")
184 |
185 | result.append(snyk_project)
186 |
187 | sys.stdout.write("\r")
188 | self.branch = new_branch_name
189 | return result
190 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | # snyk-scm-refresh
4 |
5 | # ⛔️ THIS REPOSITORY IS ARCHIVED.
6 |
7 | **This repository is archived and will not receive any updates or accept issues and pull requests. Please make use of the snyk-api-import-tool instead of snyk-scm-refresh. The snyk-api-import tool benefits from longer-term support and covers the majority of use cases that scm-refresh does. You can follow the migration guide to help you make the translation. This repo will be archived as of October 1st 2023.**
8 |
9 |
15 |
16 | ### Description
17 |
18 | Keeps Snyk projects in sync with their associated Github repos
19 |
20 | For repos with at least 1 project already in Snyk:
21 | - Detect and import new manifests
22 | - Remove projects for manifests that no longer exist
23 | - Update projects when a repo has been renamed
24 | - Detect and update default branch change (not renaming)
25 | - Enable Snyk Code analysis for repos
26 | - Detect deleted repos and log for review
27 |
28 |
29 |
30 | **STOP NOW IF ANY OF THE FOLLOWING ARE TRUE**
31 | - Monitoring non-default branches
32 | - Using an SCM other than Github.com or Github Enterprise Server
33 |
34 | ### Usage
35 | ```
36 | usage: snyk_scm_refresh.py [-h] [--org-id ORG_ID] [--repo-name REPO_NAME] [--sca {on,off}]
37 | [--container {on,off}] [--iac {on,off}] [--code {on,off}] [--dry-run]
38 | [--skip-scm-validation] [--debug]
39 |
40 | optional arguments:
41 | -h, --help show this help message and exit
42 | --org-id ORG_ID The Snyk Organisation Id found in Organization > Settings. If omitted,
43 | process all orgs the Snyk user has access to.
44 | --repo-name REPO_NAME
45 | The full name of the repo to process (e.g. githubuser/githubrepo). If
46 | omitted, process all repos in the Snyk org.
47 | --sca {on,off} scan for SCA manifests (on by default)
48 | --container {on,off} scan for container projects, e.g. Dockerfile (on by default)
49 | --iac {on,off} scan for IAC manifests (experimental, off by default)
50 | --code {off} code analysis is deprecated with off only option
51 | --on-archived {ignore,deactivate,delete}
52 | Deletes or deactivates projects associated with archived repos (ignore by default)
53 | --on-unarchived {ignore,reactivate}
54 | If there is a deactivated project in Snyk, should the tool reactivate it if the repo is not
55 | archived? (Warning: Use with caution, this will reactivate ALL projects associated with a repo)
56 | --dry-run Simulate processing of the script without making changes to Snyk
57 | --skip-scm-validation
58 | Skip validation of the TLS certificate used by the SCM
59 | --audit-large-repos only query github tree api to see if the response is truncated and
60 | log the result. These are the repos that would have be cloned via this tool
61 | --debug Write detailed debug data to snyk_scm_refresh.log for troubleshooting
62 | ```
63 |
64 | #### Sync with defaults
65 | `./snyk_scm_refresh.py --org-id=12345`
66 |
67 | #### Sync SCA projects only
68 | `./snyk_scm_refresh.py --org-id=12345 --container=off`
69 |
70 | #### Sync Container projects only
71 | `./snyk_scm_refresh.py --org-id=12345 --sca=off --container=on`
72 |
73 | ### Deprecated
74 | #### Snyk Code analysis for repos (Deprecated)
75 | ~~only: `./snyk_scm_refresh.py --org-id=12345 --sca=off --container=off --code=on`~~
76 | ~~defaults + snyk code enable: `./snyk_scm_refresh.py --org-id=12345 --code=on`~~
77 |
78 |
79 | ### Dependencies
80 | ```
81 | pip install -r requirements.txt
82 | ```
83 | or
84 | ```
85 | python3 -m pip install -r requirements.txt
86 | ```
87 | ### Environment
88 | ```
89 | export SNYK_TOKEN=
90 | export GITHUB_TOKEN=
91 | export GITHUB_ENTERPRISE_TOKEN=
92 | export GITHUB_ENTERPRISE_HOST=
93 | ```
94 | If GITHUB_TOKEN is set, your Github.com repos will be processed
95 |
96 | If GITHUB_ENTERPRISE_TOKEN and GITHUB_ENTERPRISE_HOST are BOTH set, your Github Enterprise Server repos will be processed
97 |
98 |
99 | :information_source:
100 | If Snyk Github Enterprise Integration type is used for your Github.com repositories, then set GITHUB_ENTERPRISE_HOST=api.github.com
101 |
102 |
103 |
104 | ### Getting a GitHub token
105 |
106 | 1. In GitHub.com browse: https://github.com/settings/tokens/new. Or in GitHub Enterprise select your user icon (top-right), then 'Settings', then 'Developer settings', then 'Personal access tokens'.
107 | 2. Scopes - Public repos do not need a scope. If you want to scan private repos, then you'll need to enable this scope: `repo` (Full control of private repositories)
108 |
109 | ### Handling self-signed certificates
110 | This tool uses the python requests library, therefore you can point [REQUESTS_CA_BUNDLE](https://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification) environment variable to the location of your cert bundle
111 |
112 | `export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt`
113 |
114 | If you are not able to validate the self-signed certificate, you may skip validation by providing the `--skip-scm-validation` option.
115 |
116 | ### Instructions
117 | Make sure to use a user *API Token* that has access to the Snyk Orgs you need to process with the script. A service account will *not* work for GitHub, which is the only SCM currently supported at this time.
118 |
119 | Ensure that your GITHUB_TOKEN or GITHUB_ENTERPRISE_TOKEN has access to the repos contained in the Snyk Orgs in scope
120 | If unsure, try one org at a time with `--org-id`
121 |
122 | **Recommended:**
123 | This tool will delete projects from Snyk that are detected as stale or have since been renamed
124 |
125 | Use the `--dry-run` option to verify the execution plan for the first run
126 |
127 | Each run generates a set of output files:
128 | | File Name | Description |
129 | | ------------------- | ----------- |
130 | | snyk-scm-refresh.log | debug log output good for troubleshooting |
131 | | _potential-repo-deletes.csv | repo no longer exists |
132 | | _stale-manifests-deleted.csv | monitored manifest files no longer exists |
133 | | _renamed-manifests-deleted.csv | manifests of renamed repos that were removed |
134 | | _renamed-manifests-pending.csv | manifests of renamed repos that were not removed. Only when the import of the repo under the new name is completed are the old ones removed. |
135 | | _completed-project-imports.csv | manifests that were imported during this job run |
136 | | _updated-project-branches.csv | projects with updated default branch |
137 | | _update-project-branches-errors.csv | projects that had an error attempting to update default branch |
138 | | _repos-skipped-on-error.csv | repos skipped due to import error |
139 | | _manifests-skipped-on-limit.csv | manifest projects skipped due to import limit |
140 |
141 | ### Handling of large repositories
142 | The primary method used by this tool to retrieve the GIT tree from each repository for the basis of comparison is via the Github API.
143 | For sufficiently large repositories, though, Github truncates the API response. When a truncated Github response is detected when retrieving the GIT tree,
144 | this tool will fall back on using the local `git` if available and configured to perform a shallow clone of the repository's default branch in order to build the tree.
145 |
146 | It will use /tmp to perform the `git clone` and then capture the output of `git ls-tree -r`
147 |
148 | When this situation occurs, you will see the following in the console:
149 | ```
150 | Large repo detected, falling back to cloning. This may take a few minutes ...
151 | ```
152 |
153 | 
154 |
155 | The truncated GIT tree response is described [here](https://docs.github.com/en/rest/reference/git#get-a-tree). The last [known limits](https://github.community/t/github-get-tree-api-limits-and-recursivity/1300/2) are: 100,000 files or 7 MB of response data, whichever is first.
156 |
157 | ### Auditing which repos are considered large
158 | In order to detect which repositories in snyk are subject the tree truncation issue mentioned above, there is another available option `--audit-large-repos`.
159 | This will only query the git tree via API and look for a truncated response, and then log the results to a file `snyk-scm-refresh_large-repos-audit-results.csv`
160 |
161 | To find all the repos based on a Snyk org, use the `--org-id` parameter in conjunction with `--audit-large-repos`
162 | Optionally you can also supply a repo name to check a single repo by also supplying the `--repo-name` filter.
163 |
164 | ### Importing manifest limit
165 | There is a set manifest projects import limit per execution. Skipped manifests projects above the limit will be logged to a CSV file.
166 | Relaunch `snyk_scm_refresh` at the next execution schedule to import any skipped projects.
167 |
--------------------------------------------------------------------------------
/app/gh_repo.py:
--------------------------------------------------------------------------------
1 | """utilities for github"""
2 | import logging
3 | import re
4 | import sys
5 | import subprocess
6 | import requests
7 | from app.models import GithubRepoStatus
8 | from app.utils.github_utils import (
9 | get_github_client,
10 | get_github_repo
11 | )
12 | import common
13 |
14 |
15 | # suppess InsecureRequestWarning when using --skip-scm-validation option
16 | # due to pylint bug
17 | # https://github.com/PyCQA/pylint/issues/4584)
18 | # pylint: disable=no-member
19 | requests.packages.urllib3.disable_warnings()
20 |
21 | # pylint: disable=invalid-name
22 | state = {
23 | "tree_already_retrieved": False,
24 | "manifests": []
25 | }
26 |
27 | def get_git_tree_from_clone(repo_name, origin):
28 | """
29 | get git tree for large repos by performing
30 | a shallow clone 'git clone --depth 1'
31 | """
32 |
33 | tree_full_paths = []
34 |
35 | gh_client = get_github_client(origin)
36 | gh_repo = get_github_repo(gh_client, repo_name)
37 |
38 | # check if git exists on the system
39 | subprocess.run(["command", "-v", "git"], check=True, stdout=subprocess.DEVNULL)
40 |
41 | name = gh_repo.name
42 | clone_url = gh_repo.clone_url
43 | default_branch = gh_repo.default_branch
44 |
45 | GIT_CLONE_PATH = f"{common.GIT_CLONE_TEMP_DIR}/{name}"
46 |
47 | # check that GIT_CLONE_PATH is set safely for deletion
48 | if re.match(f'{common.GIT_CLONE_TEMP_DIR}/.+', GIT_CLONE_PATH) and \
49 | re.match(rf'\/.+\/.+', GIT_CLONE_PATH):
50 | pass
51 | else:
52 | sys.exit(f"could not determine that the temp cloning directory"
53 | f"{GIT_CLONE_PATH} was set properly, exiting...")
54 |
55 | print(f" - shallow cloning {name} from {clone_url} to {GIT_CLONE_PATH}")
56 |
57 | # clone the repo locally
58 | subprocess.run(["rm", "-fr", f"{GIT_CLONE_PATH}"], check=True)
59 | subprocess.run(
60 | ["git", "clone", "--depth", "1", clone_url],
61 | check=True,
62 | cwd=common.GIT_CLONE_TEMP_DIR
63 | )
64 |
65 | print(" - Loading tree from local git structure")
66 |
67 | git_tree = subprocess.run(
68 | [
69 | "git",
70 | "ls-tree",
71 | "-r",
72 | default_branch
73 | ],
74 | capture_output=True,
75 | check=True,
76 | text=True,
77 | cwd=f"{GIT_CLONE_PATH}"
78 | )
79 |
80 | print(f" - removing cloned files in /tmp...")
81 | subprocess.run(["rm", "-fr", f"{GIT_CLONE_PATH}"], check=True)
82 |
83 | git_tree_lines = git_tree.stdout.splitlines()
84 | print(f" - found {len(git_tree_lines)} tree items ...")
85 |
86 | for line in git_tree_lines:
87 | sha, path = [line.split()[i] for i in (2, 3)]
88 | tree_full_paths.append({
89 | "sha": sha,
90 | "path": path
91 | })
92 |
93 | return tree_full_paths
94 |
95 | def is_gh_repo_truncated(gh_tree_response) -> bool:
96 | """ check if repo is truncated """
97 | #pylint: disable=protected-access
98 | return gh_tree_response._rawData['truncated']
99 |
100 | def get_git_tree_from_api(repo_name, origin):
101 | """ get git tree for repo via API call """
102 | gh_client = get_github_client(origin)
103 | gh_repo = get_github_repo(gh_client, repo_name)
104 |
105 | return gh_repo.get_git_tree(gh_repo.default_branch, True)
106 |
107 | def get_repo_manifests(snyk_repo_name, origin, skip_snyk_code):
108 | """retrieve list of all supported manifests in a given github repo"""
109 |
110 | if state['tree_already_retrieved']:
111 | state['tree_already_retrieved'] = False
112 | return state['manifests']
113 |
114 | state['manifests'] = []
115 |
116 | tree_response = get_git_tree_from_api(snyk_repo_name, origin)
117 |
118 | contents = tree_response.tree
119 |
120 | is_truncated_str = is_gh_repo_truncated(tree_response)
121 |
122 | if is_truncated_str:
123 | # repo too large to get try via API, just clone it
124 | print(f" - Large repo detected, falling back to cloning. "
125 | f"This may take a few minutes ...")
126 | contents = get_git_tree_from_clone(snyk_repo_name, origin)
127 | # print(f"tree contents: {contents}")
128 |
129 | while contents:
130 | tree_element = contents.pop(0)
131 | # print(f"tree_element: {tree_element}")
132 | if is_truncated_str:
133 | tree_element_sha = tree_element['sha']
134 | tree_element_path = tree_element['path']
135 | else:
136 | tree_element_sha = tree_element.sha
137 | tree_element_path = tree_element.path
138 | full_path = {
139 | "sha": tree_element_sha,
140 | "path": tree_element_path
141 | }
142 | if passes_manifest_filter(full_path['path'], skip_snyk_code):
143 | #print(f"appending to manifests to check")
144 | state['manifests'].append(full_path['path'])
145 | if re.match(common.MANIFEST_PATTERN_CODE, full_path['path']):
146 | skip_snyk_code = True
147 |
148 | state['tree_already_retrieved'] = True
149 | return state['manifests']
150 |
151 | def passes_manifest_filter(path, skip_snyk_code=False):
152 | """ check if given path should be imported based
153 | on configured search and exclusion filters """
154 |
155 | passes_filter = False
156 | if (common.PROJECT_TYPE_ENABLED_SCA and
157 | re.match(common.MANIFEST_PATTERN_SCA, path)):
158 | passes_filter = True
159 | # print('passes SCA filter true')
160 | if (common.PROJECT_TYPE_ENABLED_CONTAINER and
161 | re.match(common.MANIFEST_PATTERN_CONTAINER, path)):
162 | passes_filter = True
163 | # print('passes CONTAINER filter true')
164 | if (common.PROJECT_TYPE_ENABLED_IAC and
165 | re.match(common.MANIFEST_PATTERN_IAC, path)):
166 | passes_filter = True
167 | # print('passes IAC filter true')
168 | if (common.PROJECT_TYPE_ENABLED_CODE and
169 | re.match(common.MANIFEST_PATTERN_CODE, path)):
170 | if not skip_snyk_code:
171 | passes_filter = True
172 | # print('passes CODE filter true')
173 | if re.match(common.MANIFEST_PATTERN_EXCLUSIONS, path):
174 | passes_filter = False
175 |
176 | return passes_filter
177 |
178 | def get_gh_repo_status(snyk_gh_repo):
179 | # pylint: disable=too-many-branches
180 | """detect if repo still exists, has been removed, or renamed"""
181 | repo_owner = snyk_gh_repo.full_name.split("/")[0]
182 | repo_name = snyk_gh_repo.full_name.split("/")[1]
183 | response_message = ""
184 | response_status_code = ""
185 | repo_default_branch = ""
186 | archived = False
187 |
188 | # logging.debug(f"snyk_gh_repo origin: {snyk_gh_repo.origin}")
189 |
190 | if snyk_gh_repo.origin == "github":
191 | github_token = common.GITHUB_TOKEN
192 | elif snyk_gh_repo.origin == "github-enterprise":
193 | github_token = common.GITHUB_ENTERPRISE_TOKEN
194 |
195 | headers = {"Authorization": "Bearer %s"}
196 | headers["Authorization"] = headers["Authorization"] % (github_token)
197 | if snyk_gh_repo.origin == "github" or \
198 | (snyk_gh_repo.origin == "github-enterprise" and \
199 | common.USE_GHE_INTEGRATION_FOR_GH_CLOUD):
200 | request_url = f"https://api.github.com/repos/{snyk_gh_repo['full_name']}"
201 | # print("requestURL: " + requestURL)
202 | elif snyk_gh_repo.origin == "github-enterprise":
203 | request_url = f"https://{common.GITHUB_ENTERPRISE_HOST}" \
204 | f"/api/v3/repos/{snyk_gh_repo['full_name']}"
205 | try:
206 | response = requests.get(url=request_url,
207 | allow_redirects=False,
208 | headers=headers,
209 | verify=common.VERIFY_TLS)
210 | # logging.debug("response_code: %d" % response.status_code)
211 | # logging.debug(f"response default branch -> {response.json()['default_branch']}")
212 |
213 | response_status_code = response.status_code
214 |
215 | if response.status_code == 200:
216 | response_message = "Match"
217 | repo_default_branch = response.json()['default_branch']
218 | archived = response.json()['archived']
219 |
220 | elif response.status_code == 404:
221 | response_message = "Not Found"
222 |
223 | elif response.status_code == 401:
224 | raise RuntimeError("GitHub request is unauthorized!")
225 |
226 | elif response.status_code == 301:
227 | follow_response = requests.get(
228 | url=response.headers["Location"],
229 | headers=headers,
230 | verify=common.VERIFY_TLS
231 | )
232 | if follow_response.status_code == 200:
233 | repo_new_full_name = follow_response.json()["full_name"]
234 | repo_owner = repo_new_full_name.split("/")[0]
235 | repo_name = repo_new_full_name.split("/")[1]
236 | archived = follow_response.json()['archived']
237 | else:
238 | repo_owner = ""
239 | repo_name = ""
240 |
241 | response_message = f"Moved to {repo_name}"
242 |
243 | except requests.exceptions.RequestException as err:
244 | # make sure it gets logged in log file when in debug mode
245 | logging.debug(f"{err}")
246 |
247 | response_status_code = "ERROR"
248 | response_message = f"{err}"
249 |
250 | finally:
251 | repo_status = GithubRepoStatus(
252 | response_status_code,
253 | response_message,
254 | repo_name,
255 | snyk_gh_repo["org_id"],
256 | repo_owner,
257 | f"{repo_owner}/{repo_name}",
258 | repo_default_branch,
259 | archived
260 | )
261 | return repo_status
262 |
--------------------------------------------------------------------------------
/common.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from os import (
3 | getenv,
4 | path
5 | )
6 | from snyk import SnykClient
7 | from app.utils.github_utils import (
8 | create_github_client,
9 | create_github_enterprise_client
10 | )
11 | import argparse
12 | import configparser
13 | from _version import __version__
14 |
15 | USER_AGENT = f"pysnyk/snyk_services/snyk_scm_refresh/{__version__}"
16 |
17 | MANIFEST_PATTERN_SCA = '^(?![.]).*(package[.]json|Gemfile[.]lock|pom[.]xml|build[.]gradle|.*[.]lockfile|build[.]sbt|.*req.*[.]txt|Gopkg[.]lock|go[.]mod|vendor[.]json|packages[.]config|.*[.]csproj|.*[.]fsproj|.*[.]vbproj|project[.]json|project[.]assets[.]json|composer[.]lock|Podfile|Podfile[.]lock)$'
18 | MANIFEST_PATTERN_CONTAINER = '^.*(Dockerfile)$'
19 | MANIFEST_PATTERN_IAC = '.*[.](yaml|yml|tf)$'
20 | MANIFEST_PATTERN_CODE = '.*[.](js|cs|php|java|py)$'
21 | MANIFEST_PATTERN_EXCLUSIONS = '^.*(fixtures|tests\/|__tests__|test\/|__test__|[.].*ci\/|.*ci[.].yml|node_modules\/|bower_components\/|variables[.]tf|outputs[.]tf).*$'
22 | GITHUB_CLOUD_API_HOST = "api.github.com"
23 |
24 | GITHUB_ENABLED = False
25 | GITHUB_ENTERPRISE_ENABLED = False
26 | USE_GHE_INTEGRATION_FOR_GH_CLOUD = False
27 |
28 | SNYK_TOKEN = getenv("SNYK_TOKEN")
29 | GITHUB_TOKEN = getenv("GITHUB_TOKEN")
30 | GITHUB_ENTERPRISE_TOKEN = getenv("GITHUB_ENTERPRISE_TOKEN")
31 | GITHUB_ENTERPRISE_HOST = getenv("GITHUB_ENTERPRISE_HOST")
32 |
33 | GIT_CLONE_TEMP_DIR = "/tmp"
34 |
35 | LOG_PREFIX = "snyk-scm-refresh"
36 | LOG_FILENAME = LOG_PREFIX + ".log"
37 | POTENTIAL_DELETES_FILE = open("%s_potential-repo-deletes.csv" % LOG_PREFIX, "w")
38 | POTENTIAL_DELETES_FILE.write("org,repo\n")
39 | STALE_MANIFESTS_DELETED_FILE = open(
40 | "%s_stale-manifests-deleted.csv" % LOG_PREFIX, "w"
41 | )
42 | STALE_MANIFESTS_DELETED_FILE.write("org,project\n")
43 | RENAMED_MANIFESTS_DELETED_FILE = open(
44 | "%s_renamed-manifests-deleted.csv" % LOG_PREFIX, "w"
45 | )
46 | RENAMED_MANIFESTS_DELETED_FILE.write("org,project\n")
47 | RENAMED_MANIFESTS_PENDING_FILE = open(
48 | "%s_renamed-manifests-pending.csv" % LOG_PREFIX, "w"
49 | )
50 | RENAMED_MANIFESTS_PENDING_FILE.write("org,project\n")
51 | COMPLETED_PROJECT_IMPORTS_FILE = open(
52 | "%s_completed-project-imports.csv" % LOG_PREFIX, "w"
53 | )
54 | COMPLETED_PROJECT_IMPORTS_FILE.write("org,project,success\n")
55 | REPOS_SKIPPED_ON_ERROR_FILE = open(
56 | "%s_repos-skipped-on-error.csv" % LOG_PREFIX, "w"
57 | )
58 | REPOS_SKIPPED_ON_ERROR_FILE.write("org,repo,status\n")
59 | MANIFESTS_SKIPPED_ON_LIMIT_FILE = open(
60 | "%s_manifests-skipped-on-limit.csv" % LOG_PREFIX, "w"
61 | )
62 | MANIFESTS_SKIPPED_ON_LIMIT_FILE.write("skipped_manifest_file_path\n")
63 | UPDATED_PROJECT_BRANCHES_FILE = open(
64 | "%s_updated-project-branches.csv" % LOG_PREFIX, "w"
65 | )
66 | UPDATED_PROJECT_BRANCHES_FILE.write("org,project_name,project_id,new_branch\n")
67 | UPDATE_PROJECT_BRANCHES_ERRORS_FILE = open(
68 | "%s_update-project-branches-errors.csv" % LOG_PREFIX, "w"
69 | )
70 | UPDATE_PROJECT_BRANCHES_ERRORS_FILE.write("org,project_name,project_id,new_branch\n")
71 | LARGE_REPOS_AUDIT_RESULTS_FILE = open(
72 | "%s_large-repos-audit-results.csv" % LOG_PREFIX, "w"
73 | )
74 | LARGE_REPOS_AUDIT_RESULTS_FILE.write("org,repo,is_large\n")
75 |
76 | PENDING_REMOVAL_MAX_CHECKS = 45
77 | PENDING_REMOVAL_CHECK_INTERVAL = 20
78 |
79 | def parse_command_line_args():
80 | """Parse command-line arguments"""
81 |
82 | parser = argparse.ArgumentParser()
83 | parser.add_argument(
84 | "--org-id",
85 | type=str,
86 | help="The Snyk Organisation Id found in Organization > Settings. \
87 | If omitted, process all orgs the Snyk user has access to.",
88 | required=False,
89 | )
90 | parser.add_argument(
91 | "--repo-name",
92 | type=str,
93 | help="The full name of the repo to process (e.g. githubuser/githubrepo). \
94 | If omitted, process all repos in the Snyk org.",
95 | required=False,
96 | )
97 | parser.add_argument(
98 | "--sca",
99 | help="scan for SCA manifests (on by default)",
100 | required=False,
101 | default=True,
102 | choices=['on', 'off']
103 | )
104 | parser.add_argument(
105 | "--container",
106 | help="scan for container projects, e.g. Dockerfile (on by default)",
107 | required=False,
108 | default=True,
109 | choices=['on', 'off']
110 | )
111 | parser.add_argument(
112 | "--iac",
113 | help="scan for IAC manifests (experimental, off by default)",
114 | required=False,
115 | default=False,
116 | choices=['on', 'off']
117 | )
118 | parser.add_argument(
119 | "--on-archived",
120 | help="Tells the tool what to do when a GitHub project is archived (Snyk projects ignored by default)",
121 | required=False,
122 | default="ignore",
123 | choices=['ignore', 'deactivate', 'delete']
124 | )
125 | parser.add_argument(
126 | "--on-unarchived",
127 | help="If the tool detects a Snyk project deactivated whilst the GitHub repo is not archived, what should it do?"
128 | " (By default the tool will ignore)",
129 | required=False,
130 | default="ignore",
131 | choices=['ignore', 'reactivate']
132 | )
133 | # show disabled argument help message and prevent invalidation of any existent "--code=off" verbose argument mode
134 | parser.add_argument(
135 | "--code",
136 | help="code analysis is deprecated with off only option",
137 | required=False,
138 | default=False,
139 | choices=['off']
140 | )
141 | parser.add_argument(
142 | "--dry-run",
143 | help="Simulate processing of the script without making changes to Snyk",
144 | required=False,
145 | action="store_true",
146 | )
147 | parser.add_argument(
148 | "--skip-scm-validation",
149 | help="Skip validation of the TLS certificate used by the SCM",
150 | required=False,
151 | action="store_true",
152 | )
153 | parser.add_argument(
154 | "--audit-large-repos",
155 | help="only query github tree api to see if the response is truncated and \
156 | log the result. These are the repos that would have be cloned via this tool",
157 | required=False,
158 | action="store_true",
159 | )
160 | parser.add_argument(
161 | "--debug",
162 | help="Write detailed debug data to snyk_scm_refresh.log for troubleshooting",
163 | required=False,
164 | action="store_true",
165 | )
166 |
167 | return parser.parse_args()
168 |
169 | ARGS = parse_command_line_args()
170 |
171 | def toggle_to_bool(toggle_value) -> bool:
172 | if toggle_value == "on":
173 | return True
174 | if toggle_value == "off":
175 | return False
176 | return toggle_value
177 |
178 | snyk_client = SnykClient(SNYK_TOKEN, user_agent=USER_AGENT)
179 |
180 | VERIFY_TLS = not ARGS.skip_scm_validation
181 |
182 | if (GITHUB_ENTERPRISE_HOST == GITHUB_CLOUD_API_HOST):
183 | USE_GHE_INTEGRATION_FOR_GH_CLOUD = True
184 |
185 | if (GITHUB_TOKEN):
186 | GITHUB_ENABLED = True
187 | gh_client = create_github_client(GITHUB_TOKEN, VERIFY_TLS)
188 | print("created github.com client")
189 |
190 | if (GITHUB_ENTERPRISE_HOST):
191 | GITHUB_ENTERPRISE_ENABLED = True
192 | if USE_GHE_INTEGRATION_FOR_GH_CLOUD:
193 | gh_enterprise_client = create_github_client(GITHUB_ENTERPRISE_TOKEN, VERIFY_TLS)
194 | print(f"created github client for enterprise host: {GITHUB_ENTERPRISE_HOST}")
195 | else:
196 | print(f"created GH enterprise client for host: {GITHUB_ENTERPRISE_HOST}")
197 | gh_enterprise_client = create_github_enterprise_client(GITHUB_ENTERPRISE_TOKEN, \
198 | GITHUB_ENTERPRISE_HOST, VERIFY_TLS)
199 |
200 | PROJECT_TYPE_ENABLED_SCA = toggle_to_bool(ARGS.sca)
201 | PROJECT_TYPE_ENABLED_CONTAINER = toggle_to_bool(ARGS.container)
202 | PROJECT_TYPE_ENABLED_IAC = toggle_to_bool(ARGS.iac)
203 | # disabled snyk code due to unsupported underlying api changes
204 | PROJECT_TYPE_ENABLED_CODE = False
205 | MAX_IMPORT_MANIFEST_PROJECTS = 1000
206 |
--------------------------------------------------------------------------------
/app/app.py:
--------------------------------------------------------------------------------
1 | """
2 | Keep Snyk projects in sync with their corresponding SCM repositories
3 | """
4 | import sys
5 | import time
6 | import re
7 | import snyk.errors
8 | import common
9 | from app.models import ImportStatus
10 | from app.gh_repo import (
11 | get_gh_repo_status,
12 | is_gh_repo_truncated,
13 | get_git_tree_from_api
14 | )
15 | from app.utils.snyk_helper import (
16 | get_snyk_repos_from_snyk_orgs,
17 | app_print,
18 | process_import_status_checks,
19 | import_manifests,
20 | log_potential_delete,
21 | log_audit_large_repo_result
22 | )
23 |
24 |
25 | def run():
26 | """Begin application logic"""
27 | # pylint: disable=too-many-locals, too-many-branches, too-many-statements
28 | # pylint: disable=too-many-nested-blocks
29 | sys.stdout.write("Retrieving Snyk Repos")
30 | sys.stdout.flush()
31 |
32 | snyk_orgs = []
33 |
34 | # if --orgId exists, use it
35 | # otherwise get all orgs the api user is part of
36 | try:
37 | if common.ARGS.org_id:
38 | snyk_orgs.append(common.snyk_client.organizations.get(common.ARGS.org_id))
39 | else:
40 | snyk_orgs = common.snyk_client.organizations.all()
41 | except snyk.errors.SnykHTTPError as err:
42 | print(f"\n\n{err.message}, exiting...\n")
43 | sys.exit(1)
44 |
45 | print(f" for {len(snyk_orgs)} org(s)")
46 |
47 | # build snyk repo objects
48 | snyk_repos = get_snyk_repos_from_snyk_orgs(snyk_orgs, common.ARGS)
49 | len_snyk_repos = len(snyk_repos)
50 | sys.stdout.write(f" - {len_snyk_repos} found\n")
51 | if len_snyk_repos == 0:
52 | print("\nIf using repo-name filter, ensure it is correct\n")
53 | sys.exit(1)
54 |
55 | import_status_checks = []
56 |
57 | for (i, snyk_repo) in enumerate(snyk_repos):
58 | # snyk_repo.get_projects()
59 | deleted_projects = []
60 | is_default_renamed = False
61 | app_print(snyk_repo.org_name,
62 | snyk_repo.full_name,
63 | f"Processing {str(i + 1)}/{str(len(snyk_repos))}")
64 |
65 | try:
66 | gh_repo_status = get_gh_repo_status(snyk_repo)
67 |
68 | except RuntimeError as err:
69 | raise RuntimeError("Failed to query GitHub repository!") from err
70 |
71 | app_print(snyk_repo.org_name,
72 | snyk_repo.full_name,
73 | f"Github Status {gh_repo_status.response_code}" \
74 | f"({gh_repo_status.response_message}) [{snyk_repo.origin}]")
75 |
76 | # if snyk_repo does not still exist (removed/404), then log and skip to next repo
77 | if gh_repo_status.response_code == 404: # project no longer exists
78 | log_potential_delete(snyk_repo.org_name, snyk_repo.full_name)
79 |
80 | elif gh_repo_status.response_code == 200: # project exists and has not been renamed
81 | # if --audit-large-repos is on
82 | if common.ARGS.audit_large_repos:
83 | is_truncated_str = \
84 | is_gh_repo_truncated(
85 | get_git_tree_from_api(snyk_repo.full_name, snyk_repo.origin)
86 | )
87 | log_audit_large_repo_result(
88 | snyk_repo.org_name,
89 | snyk_repo.full_name,
90 | str(bool(is_truncated_str))
91 | )
92 | # move to next repo without processing the rest of the code
93 | continue
94 |
95 | # If we've previously deactivated projects, we should activate them again
96 | # if the repo becomes "unarchived"
97 | if not gh_repo_status.archived and common.ARGS.on_unarchived == "reactivate":
98 | for project in snyk_repo.snyk_projects:
99 | if not project["is_monitored"]:
100 | activated_projects = snyk_repo.activate_manifests(common.ARGS.dry_run)
101 | for activated_project in activated_projects:
102 | if not common.ARGS.dry_run:
103 | app_print(snyk_repo.org_name,
104 | snyk_repo.full_name,
105 | f"Activated manifest: {activated_project['manifest']}")
106 | else:
107 | app_print(snyk_repo.org_name,
108 | snyk_repo.full_name,
109 | f"Would activate manifest: "
110 | f"{activated_project['manifest']}")
111 | break # We just needed to check if any one of the projects wasn't active
112 |
113 | if gh_repo_status.archived and common.ARGS.on_archived != "ignore":
114 | app_print(snyk_repo.org_name,
115 | snyk_repo.full_name,
116 | f"Repo is archived")
117 |
118 | # Check what archival mode we're running in
119 | on_archival_action = common.ARGS.on_archived
120 | if on_archival_action == "deactivate":
121 | deleted_projects = snyk_repo.deactivate_manifests(common.ARGS.dry_run)
122 | elif on_archival_action == "delete":
123 | deleted_projects = snyk_repo.delete_manifests(common.ARGS.dry_run)
124 |
125 | # And tell the user what has or would have happened
126 | for project in deleted_projects:
127 | if not common.ARGS.dry_run:
128 | app_print(snyk_repo.org_name,
129 | snyk_repo.full_name,
130 | f"{on_archival_action.capitalize()}d manifest: "
131 | f"{project['manifest']}")
132 | else:
133 | app_print(snyk_repo.org_name,
134 | snyk_repo.full_name,
135 | f"Would {on_archival_action} manifest: {project['manifest']}")
136 | # snyk has the wrong branch, re-import
137 | elif gh_repo_status.repo_default_branch != snyk_repo.branch:
138 | app_print(snyk_repo.org_name,
139 | snyk_repo.full_name,
140 | f"Default branch name changed from {snyk_repo.branch}" f" -> "
141 | f"{gh_repo_status.repo_default_branch}")
142 | updated_projects = snyk_repo.update_branch(
143 | gh_repo_status.repo_default_branch,
144 | common.ARGS.dry_run)
145 | for project in updated_projects:
146 | if not common.ARGS.dry_run:
147 | app_print(snyk_repo.org_name,
148 | snyk_repo.full_name,
149 | f"Monitored branch set to " \
150 | f"{gh_repo_status.repo_default_branch} " \
151 | f"for: {project['manifest']}")
152 | else: # find deltas
153 | app_print(snyk_repo.org_name,
154 | snyk_repo.full_name,
155 | f"Checking {str(len(snyk_repo.snyk_projects))} " \
156 | f"projects for any stale manifests")
157 | # print(f"snyk repo projects: {snyk_repo.snyk_projects}")
158 | deleted_projects = snyk_repo.delete_stale_manifests(common.ARGS.dry_run)
159 | for project in deleted_projects:
160 | if not common.ARGS.dry_run:
161 | app_print(snyk_repo.org_name,
162 | snyk_repo.full_name,
163 | f"Deleted stale manifest: {project['manifest']}")
164 | else:
165 | app_print(snyk_repo.org_name,
166 | snyk_repo.full_name,
167 | f"Would delete stale manifest: {project['manifest']}")
168 |
169 | app_print(snyk_repo.org_name,
170 | snyk_repo.full_name,
171 | "Checking for new manifests in source tree")
172 |
173 | # if not common.ARGS.dry_run:
174 | projects_import = snyk_repo.add_new_manifests(common.ARGS.dry_run)
175 |
176 | if isinstance(projects_import, ImportStatus):
177 | import_status_checks.append(projects_import)
178 | app_print(snyk_repo.org_name,
179 | snyk_repo.full_name,
180 | f"Found {len(projects_import.files)} to import")
181 | for file in projects_import.files:
182 | import_message = ""
183 | if re.match(common.MANIFEST_PATTERN_CODE, file["path"]):
184 | import_message = "Triggering code analysis via"
185 | else:
186 | import_message = "Importing new manifest"
187 |
188 | app_print(snyk_repo.org_name,
189 | snyk_repo.full_name,
190 | f"{import_message}: {file['path']}")
191 |
192 | # if snyk_repo has been moved/renamed (301), then re-import the entire repo
193 | # with the new name and remove the old one (make optional)
194 | elif gh_repo_status.response_code == 301:
195 | app_print(snyk_repo.org_name,
196 | snyk_repo.full_name,
197 | f"Repo has moved to {gh_repo_status.repo_full_name}, submitting import...")
198 | if not common.ARGS.dry_run:
199 | repo_import_status = import_manifests(snyk_repo.org_id,
200 | gh_repo_status.repo_full_name,
201 | snyk_repo.integration_id)
202 | # build list of projects to delete with old name
203 | # only when the repo with new name has been imported
204 | repo_projects = snyk_repo.get_projects()
205 | # pylint: disable=unused-variable
206 | for (j, repo_project) in enumerate(repo_projects):
207 | repo_projects[j]["pending_repo"] = gh_repo_status.repo_full_name
208 |
209 | repo_import_status.pending_project_deletes = repo_projects
210 | import_status_checks.append(repo_import_status)
211 | else:
212 | app_print(snyk_repo.org_name,
213 | snyk_repo.full_name,
214 | "Would import repo (all targets) under new name")
215 |
216 | else:
217 | app_print(snyk_repo.org_name,
218 | snyk_repo.full_name,
219 | f"Skipping due to invalid response")
220 |
221 | time.sleep(1)
222 |
223 | process_import_status_checks(import_status_checks)
224 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | https://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 |
180 | Copyright © Snyk Ltd 2020
181 |
182 | Licensed under the Apache License, Version 2.0 (the "License");
183 | you may not use this file except in compliance with the License.
184 | You may obtain a copy of the License at
185 |
186 | http://www.apache.org/licenses/LICENSE-2.0
187 |
188 | Unless required by applicable law or agreed to in writing, software
189 | distributed under the License is distributed on an "AS IS" BASIS,
190 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
191 | See the License for the specific language governing permissions and
192 | limitations under the License.
--------------------------------------------------------------------------------
/app/tests/test_snyk_scm_refresh.py:
--------------------------------------------------------------------------------
1 | """test suite for snyk_scm_refresh.py"""
2 | import os
3 | import pytest
4 | import random
5 | import string
6 | import snyk
7 | from snyk.models import Organization
8 | from snyk.models import Project
9 | import common
10 | from app.snyk_repo import SnykRepo
11 | from app.models import GithubRepoStatus
12 | from _version import __version__
13 |
14 | from app.gh_repo import (
15 | get_gh_repo_status,
16 | passes_manifest_filter,
17 | )
18 | from app.utils.snyk_helper import (
19 | get_snyk_projects_for_repo,
20 | get_snyk_repos_from_snyk_projects,
21 | import_manifests
22 | )
23 |
24 | USER_AGENT = f"pysnyk/snyk_services/snyk_scm_refresh/{__version__}"
25 |
26 | class MockResponse:
27 | """ mock response for github check """
28 | def __init__(self, status_code):
29 | self.status_code = status_code
30 | self.headers = {"Location": "test_location"}
31 |
32 | def json(self):
33 | response = {"full_name": "new_owner/new_repo", "default_branch": "master", "archived": False}
34 | return response
35 |
36 | @pytest.mark.parametrize(
37 | "status_code, response_message, repo, name, owner, default_branch, archived",
38 | [
39 | (200, "Match", "test_org/test_repo", "test_repo", "test_owner", "master", False),
40 | (301, "Moved to new_repo", "new_owner/new_repo", "new_repo", "new_owner", "", False),
41 | (404, "Not Found", "test_org/test_repo", None, None, "", False)
42 | ],
43 | )
44 | def test_get_gh_repo_status_github(mocker, status_code, response_message, repo, name, owner, default_branch, archived):
45 |
46 | # TODO: assumes a successful redirect for the 301 case
47 | mocker.patch(
48 | "requests.get", side_effect=[MockResponse(status_code), MockResponse(200)]
49 | )
50 | mocker.patch.dict(os.environ, {'GITHUB_ENTERPRISE_TOKEN': '1234', 'GITHUB_ENTERPRISE_HOST':common.GITHUB_CLOUD_API_HOST})
51 |
52 | snyk_repo_github = SnykRepo(
53 | 'new_owner/new_repo',
54 | "1234-5678",
55 | "new_owner",
56 | "12345",
57 | "github",
58 | "master",
59 | []
60 | )
61 |
62 | repo_status = GithubRepoStatus(
63 | status_code,
64 | response_message,
65 | snyk_repo_github["full_name"].split("/")[1],
66 | snyk_repo_github["org_id"],
67 | snyk_repo_github["full_name"].split("/")[0],
68 | snyk_repo_github["full_name"],
69 | default_branch,
70 | archived
71 | )
72 |
73 | assert get_gh_repo_status(snyk_repo_github) == repo_status
74 |
75 | @pytest.mark.parametrize(
76 | "status_code, response_message, repo, name, owner, default_branch, archived",
77 | [
78 | (200, "Match", "test_org/test_repo", "test_repo", "test_owner", "master", False),
79 | (301, "Moved to new_repo", "new_owner/new_repo", "new_repo", "new_owner", "", False),
80 | (404, "Not Found", "test_org/test_repo", None, None, "", False)
81 | ],
82 | )
83 | def test_get_gh_repo_status_github_enterprise_cloud(mocker, status_code, response_message, repo, name, owner, default_branch, archived):
84 |
85 | # TODO: assumes a successful redirect for the 301 case
86 | mocker.patch(
87 | "requests.get", side_effect=[MockResponse(status_code), MockResponse(200)]
88 | )
89 | mocker.patch.dict(os.environ, {'GITHUB_ENTERPRISE_TOKEN': '1234', 'GITHUB_ENTERPRISE_HOST':common.GITHUB_CLOUD_API_HOST})
90 |
91 | snyk_repo_github_enterprise = SnykRepo(
92 | 'new_owner/new_repo',
93 | "1234-5678",
94 | "new_owner",
95 | "12345",
96 | "github-enterprise",
97 | "master",
98 | []
99 | )
100 |
101 | repo_status = GithubRepoStatus(
102 | status_code,
103 | response_message,
104 | snyk_repo_github_enterprise["full_name"].split("/")[1],
105 | snyk_repo_github_enterprise["org_id"],
106 | snyk_repo_github_enterprise["full_name"].split("/")[0],
107 | snyk_repo_github_enterprise["full_name"],
108 | default_branch,
109 | archived
110 | )
111 |
112 | assert get_gh_repo_status(snyk_repo_github_enterprise) == repo_status
113 |
114 | def test_get_gh_repo_status_unauthorized(mocker):
115 | """ test handling unauthorized token """
116 | mocker.patch(
117 | "requests.get", side_effect=[MockResponse(401)]
118 | )
119 |
120 | mocker.patch.dict(os.environ, {'GITHUB_TOKEN': 'test_token'})
121 |
122 | snyk_repo = SnykRepo(
123 | 'test_org/test_repo',
124 | "1234-5678",
125 | "new_owner",
126 | "12345",
127 | "github",
128 | "master",
129 | []
130 | )
131 |
132 | with pytest.raises(RuntimeError):
133 | get_gh_repo_status(snyk_repo)
134 |
135 | def test_get_snyk_repos_from_snyk_projects():
136 | """ test generating unique repos from project list """
137 |
138 | snyk_gh_projects = [
139 | {
140 | "id": "12345",
141 | "name": "scotte-snyk/test-project-1:package.json",
142 | "repo_full_name": "scotte-snyk/test-project-1",
143 | "repo_owner": "scotte-snyk",
144 | "repo_name": "test-project-1",
145 | "manifest": "package.json",
146 | "org_id": "12345",
147 | "org_name": "scotte-snyk",
148 | "origin": "github",
149 | "type": "npm",
150 | "integration_id": "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
151 | "branch_from_name": "",
152 | "branch": "master",
153 | "is_monitored": True
154 | },
155 | {
156 | "id": "12345",
157 | "name": "scotte-snyk/test-project-2:package.json",
158 | "repo_full_name": "scotte-snyk/test-project-2",
159 | "repo_owner": "scotte-snyk",
160 | "repo_name": "test-project-2",
161 | "manifest": "package.json",
162 | "org_id": "12345",
163 | "org_name": "scotte-snyk",
164 | "origin": "github",
165 | "type": "npm",
166 | "integration_id": "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
167 | "branch_from_name": "",
168 | "branch": "master",
169 | "is_monitored": True
170 | },
171 | ]
172 |
173 | snyk_repos_from_snyk_projects = [
174 | SnykRepo(
175 | 'scotte-snyk/test-project-1',
176 | "12345",
177 | "scotte-snyk",
178 | "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
179 | "github",
180 | "master",
181 | [snyk_gh_projects[0]]
182 | ),
183 | SnykRepo(
184 | 'scotte-snyk/test-project-2',
185 | "12345",
186 | "scotte-snyk",
187 | "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
188 | "github",
189 | "master",
190 | [snyk_gh_projects[1]]
191 | )
192 | ]
193 |
194 | assert str(get_snyk_repos_from_snyk_projects(snyk_gh_projects)) == str(snyk_repos_from_snyk_projects)
195 |
196 | def test_get_snyk_project_for_repo():
197 | """ test collecting projects for a repo """
198 |
199 | class TestModels(object):
200 | #@pytest.fixture
201 | def organization(self):
202 | org = Organization(
203 | name="My Other Org", id="a04d9cbd-ae6e-44af-b573-0556b0ad4bd2"
204 | )
205 | org.client = snyk.SnykClient("token", user_agent=USER_AGENT)
206 | return org
207 |
208 | def base_url(self):
209 | return "https://snyk.io/api/v1"
210 |
211 | def organization_url(self, base_url, organization):
212 | return "%s/org/%s" % (base_url, organization.id)
213 |
214 | snyk_projects = [
215 | Project(name='scotte-snyk/test-project-1:package.json',
216 | organization=TestModels.organization,
217 | id='66d7ebef-9b36-464f-889c-b92c9ef5ce12',
218 | created='2020-07-27T20:09:02.150Z',
219 | origin='github',
220 | type='pip',
221 | readOnly=False,
222 | testFrequency='daily',
223 | issueCountsBySeverity={"low": 8, "high": 13, "medium": 15},
224 | isMonitored=True,
225 | branch='main',
226 | remoteRepoUrl='scotte-snyk/test-project-1'
227 | ),
228 | Project(name='scotte-snyk/test-project-1',
229 | organization=TestModels.organization,
230 | id='66d7ebef-9b36-464f-889c-b92c9ef5ce12',
231 | created='2020-07-27T20:09:02.150Z',
232 | origin='github',
233 | type='sast',
234 | readOnly=False,
235 | testFrequency='daily',
236 | issueCountsBySeverity={"low": 8, "high": 13, "medium": 15},
237 | isMonitored=True,
238 | branch='main',
239 | remoteRepoUrl='scotte-snyk/test-project-1'
240 | ),
241 | Project(name='scotte-snyk/test-project-2:requirements.txt',
242 | organization=TestModels.organization,
243 | id='93b82d1f-1544-45c9-b3bc-86e799c7225b',
244 | created='2020-07-27T20:08:44.903Z',
245 | origin='github',
246 | type='npm',
247 | readOnly=False,
248 | testFrequency='daily',
249 | issueCountsBySeverity={"low": 8, "high": 13, "medium": 15},
250 | isMonitored=True,
251 | branch='main',
252 | remoteRepoUrl='scotte-snyk/test-project-2'
253 | )
254 | ]
255 |
256 | snyk_projects_filtered = [snyk_projects[0],snyk_projects[1]]
257 |
258 | assert get_snyk_projects_for_repo(snyk_projects, \
259 | "scotte-snyk/test-project-1") == snyk_projects_filtered
260 |
261 | def test_passes_manifest_filter():
262 | path_fail_1 = "/__test__/path/project.csproj"
263 | path_fail_2 = "/node_modules/some/package.json"
264 | path_pass_1 = "package.json"
265 | path_pass_2 = "requirements-test.txt"
266 | path_fail_3 = "tests/vuln-in-git/Gemfile.lock"
267 | assert passes_manifest_filter(path_fail_1) == False
268 | assert passes_manifest_filter(path_pass_1) == True
269 | assert passes_manifest_filter(path_fail_2) == False
270 | assert passes_manifest_filter(path_pass_2) == True
271 | assert passes_manifest_filter(path_fail_3) == False
272 |
273 | @pytest.fixture
274 | def snyk_projects_fixture():
275 | class TestModels(object):
276 | # @pytest.fixture
277 | def organization(self):
278 | org = Organization(
279 | name="My Other Org", id="a04d9cbd-ae6e-44af-b573-0556b0ad4bd2"
280 | )
281 | org.client = SnykClient("token", user_agent=USER_AGENT)
282 | return org
283 |
284 | def base_url(self):
285 | return "https://snyk.io/api/v1"
286 |
287 | def organization_url(self, base_url, organization):
288 | return "%s/org/%s" % (base_url, organization.id)
289 |
290 | snyk_gh_projects = [
291 | {
292 | "id": "12345",
293 | "name": "scotte-snyk/test-project-1:package.json",
294 | "repo_full_name": "scotte-snyk/test-project-1",
295 | "repo_owner": "scotte-snyk",
296 | "repo_name": "test-project-1",
297 | "manifest": "package.json",
298 | "org_id": "12345",
299 | "org_name": "scotte-snyk",
300 | "origin": "github",
301 | "type": "npm",
302 | "integration_id": "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
303 | "branch_from_name": "",
304 | "branch": "master",
305 | "is_monitored": True
306 | },
307 | {
308 | "id": "12345",
309 | "name": "scotte-snyk/test-project-2:package.json",
310 | "repo_full_name": "scotte-snyk/test-project-2",
311 | "repo_owner": "scotte-snyk",
312 | "repo_name": "test-project-2",
313 | "manifest": "package.json",
314 | "org_id": "12345",
315 | "org_name": "scotte-snyk",
316 | "origin": "github",
317 | "type": "npm",
318 | "integration_id": "66d7ebef-9b36-464f-889c-b92c9ef5ce12",
319 | "branch_from_name": "",
320 | "branch": "master",
321 | "is_monitored": False
322 | },
323 | ]
324 |
325 | snyk_repo_github_enterprise = SnykRepo(
326 | 'new_owner/new_repo',
327 | "1234-5678",
328 | "new_owner",
329 | "12345",
330 | "github-enterprise",
331 | "master",
332 | snyk_gh_projects
333 | )
334 | return snyk_repo_github_enterprise
335 |
336 |
337 | def test_archived_repo_delete(snyk_projects_fixture, mocker):
338 | mock = mocker.patch(
339 | "app.utils.snyk_helper.delete_snyk_project"
340 | )
341 | snyk_projects_fixture.delete_manifests(dry_run=False)
342 | assert mock.called_once
343 |
344 |
345 | def test_archived_repo_deactivate(snyk_projects_fixture, mocker):
346 | mock = mocker.patch(
347 | "app.utils.snyk_helper.deactivate_snyk_project"
348 | )
349 | snyk_projects_fixture.deactivate_manifests(dry_run=False)
350 | assert mock.called_once
351 |
352 |
353 | def test_unarchived_repo_reactivate(snyk_projects_fixture, mocker):
354 | mock = mocker.patch(
355 | "app.utils.snyk_helper.activate_snyk_project"
356 | )
357 | snyk_projects_fixture.activate_manifests(dry_run=False)
358 | assert mock.called
359 |
360 | def test_import_manifest_exceeds_limit(mocker):
361 | """
362 | Pytest snyk_helper.import_manifest exceeding limit of manifest projects
363 | """
364 | # refer to ie-playground org
365 | org_id = "39ddc762-b1b9-41ce-ab42-defbe4575bd6"
366 | repo_full_name = "snyk-playground/java-goof"
367 | integration_id = "5881e5b0-308f-4a1b-9bcb-38e3491872e0"
368 | files = []
369 |
370 | # follow snyk_repo.add_new_manifests appending manifest path
371 | for x in range(common.MAX_IMPORT_MANIFEST_PROJECTS + 1):
372 | files.append(dict({"path": ''.join(random.choices(string.ascii_lowercase, k=5)) + ".tf"}))
373 |
374 | mocker.patch.dict(os.environ, {'GITHUB_TOKEN': '1234'})
375 | org = Organization(
376 | name="My Other Org", id=org_id, slug="myotherorg", url=f"https://snyk.io/api/v1/org/{org_id}"
377 | )
378 | org.client = snyk.SnykClient("token", user_agent=USER_AGENT)
379 | mocker.patch("snyk.managers.OrganizationManager.get", return_value=org)
380 | mocker.patch("snyk.models.Organization.client", return_value=org.client)
381 |
382 | # run assertion mock client will post request and hit SnykHTTPError
383 | with pytest.raises(snyk.errors.SnykHTTPError):
384 | import_manifests(org_id, repo_full_name, integration_id, files)
385 |
386 | # assert csv contains header and a skipped manifest file path
387 | common.MANIFESTS_SKIPPED_ON_LIMIT_FILE.close()
388 | with open("snyk-scm-refresh_manifests-skipped-on-limit.csv", 'r') as fp:
389 | num_lines = len(fp.readlines())
390 | assert num_lines == 2
391 |
--------------------------------------------------------------------------------
/app/utils/snyk_helper.py:
--------------------------------------------------------------------------------
1 | """ helper functions to interact with snyk """
2 | # pylint: disable=invalid-name, cyclic-import
3 | import sys
4 | import re
5 | import time
6 | import snyk.errors
7 | import common
8 | from app.models import ImportStatus
9 | from ..snyk_repo import SnykRepo
10 |
11 | def app_print(org, repo, text):
12 | """print formatted output"""
13 | print(f"[org:{org}][{repo}] {text}")
14 |
15 | def log_potential_delete(org_name, repo_name):
16 | """ Log potential repo deletion """
17 | app_print(org_name, repo_name, "Logging potential delete")
18 | common.POTENTIAL_DELETES_FILE.write(f"{org_name},{repo_name}\n")
19 |
20 | def log_updated_project_branch(org_name, project_id, project_name, new_branch):
21 | """ Log project branch update """
22 | common.UPDATED_PROJECT_BRANCHES_FILE.write(f"{org_name},"
23 | f"{project_name},"
24 | f"{project_id},"
25 | f"{new_branch}\n")
26 |
27 | def log_update_project_branch_error(org_name, project_id, project_name, new_branch):
28 | """ Log project branch update """
29 | common.UPDATE_PROJECT_BRANCHES_ERRORS_FILE.write(
30 | f"{org_name},"
31 | f"{project_name},"
32 | f"{project_id},"
33 | f"{new_branch}\n")
34 |
35 | def log_audit_large_repo_result(org_name: str, repo_name: str, is_large: str):
36 | """ Log audit large repo result """
37 | common.LARGE_REPOS_AUDIT_RESULTS_FILE.write(
38 | f"{org_name},"
39 | f"{repo_name},"
40 | f"{is_large}\n")
41 |
42 | def get_snyk_repos_from_snyk_orgs(snyk_orgs, ARGS):
43 | """Build list of repositories from a given list of Snyk orgs"""
44 | snyk_repos = []
45 | snyk_projects = build_snyk_project_list(snyk_orgs, ARGS)
46 |
47 | # initialize to the first repo name
48 | num_projects = len(snyk_projects)
49 |
50 | if num_projects > 0:
51 | snyk_repos = get_snyk_repos_from_snyk_projects(snyk_projects)
52 |
53 | return snyk_repos
54 |
55 | def get_snyk_repos_from_snyk_projects(snyk_projects):
56 | """ Get list of unique repos built from an input of snyk projects"""
57 | snyk_repos = []
58 | # num_projects = len(snyk_projects)
59 | curr_repo_name = ""
60 |
61 | repo_projects = []
62 | for (i, project) in enumerate(snyk_projects):
63 |
64 | # we encountered a new repo
65 | if project["repo_full_name"] != curr_repo_name:
66 | # add repo to snyk_repos
67 | snyk_repos.append(
68 | SnykRepo(snyk_projects[i]["repo_full_name"],
69 | snyk_projects[i]["org_id"],
70 | snyk_projects[i]["org_name"],
71 | snyk_projects[i]["integration_id"],
72 | snyk_projects[i]["origin"],
73 | snyk_projects[i]["branch"],
74 | [x for x in snyk_projects if x["repo_full_name"] ==
75 | project["repo_full_name"]])
76 | )
77 | repo_projects = []
78 |
79 | else:
80 | repo_projects.append(project)
81 |
82 | curr_repo_name = project["repo_full_name"]
83 |
84 | return snyk_repos
85 |
86 | def build_snyk_project_list(snyk_orgs, ARGS):
87 | # pylint: disable=too-many-branches
88 | # pylint: disable=too-many-locals
89 | """Build list of Snyk projects across all Snyk orgs in scope"""
90 | snyk_gh_projects = []
91 | snyk_projects = []
92 | project_origins = []
93 |
94 | if common.GITHUB_ENABLED:
95 | project_origins.append("github")
96 | if common.GITHUB_ENTERPRISE_ENABLED:
97 | project_origins.append("github-enterprise")
98 |
99 | for (i, snyk_org) in enumerate(snyk_orgs):
100 | print(f"({i+1}) org: {snyk_org.name}")
101 | try:
102 | if common.GITHUB_ENABLED:
103 | gh_integration_id = snyk_org.integrations.filter(name="github")[
104 | 0].id
105 | if common.GITHUB_ENTERPRISE_ENABLED:
106 | gh_enterprise_integration_id = \
107 | snyk_org.integrations.filter(name="github-enterprise")[0].id
108 | except snyk.errors.SnykHTTPError:
109 | print(f"\n\nUnable to retrieve GitHub integration id for org: {snyk_org.name}, " \
110 | "check permissions and integration status\n\n")
111 | sys.exit(1)
112 |
113 | snyk_projects = snyk_org.projects.all()
114 |
115 | if ARGS.repo_name:
116 | snyk_projects = get_snyk_projects_for_repo(
117 | snyk_projects, ARGS.repo_name)
118 |
119 | for project in snyk_projects:
120 | integration_id = ''
121 | if project.origin in project_origins:
122 | if project.origin == 'github':
123 | integration_id = gh_integration_id
124 | elif project.origin == 'github-enterprise':
125 | integration_id = gh_enterprise_integration_id
126 | # snyk/goof(master):pom.xml or just snyk/goof:pom.xml
127 | split_project_name = project.name.split(
128 | ":"
129 | )
130 | if len(split_project_name) == 2:
131 | manifest = split_project_name[1]
132 | else:
133 | manifest = split_project_name[0]
134 | # snyk/goof(master) or #snyk/goof
135 | tmp_branch_split = split_project_name[0].split("(")
136 | if len(tmp_branch_split) == 2:
137 | branch_from_name = tmp_branch_split[1].split(")")[0]
138 | else:
139 | branch_from_name = ""
140 | split_repo_name = tmp_branch_split[0].split("/")
141 | # print(f"project name/branch -> {project.name}/{project.branch}")
142 | snyk_gh_projects.append(
143 | {
144 | "id": project.id,
145 | "name": project.name,
146 | "repo_full_name": split_project_name[0].split("(")[0],
147 | "repo_owner": split_repo_name[0],
148 | "repo_name": split_repo_name[1].split("(")[0],
149 | "manifest": manifest,
150 | "org_id": snyk_org.id,
151 | "org_name": snyk_org.name,
152 | "origin": project.origin,
153 | "type": project.type,
154 | "integration_id": integration_id,
155 | "branch_from_name": branch_from_name,
156 | "branch": project.branch,
157 | "is_monitored": project.isMonitored
158 | }
159 | )
160 |
161 | snyk_gh_projects = sorted(
162 | snyk_gh_projects, key=lambda x: x['repo_full_name'])
163 | return snyk_gh_projects
164 |
165 | def get_snyk_projects_for_repo(snyk_projects, repo_full_name):
166 | """Return snyk projects that belong to the specified repo only"""
167 | snyk_projects_filtered = []
168 |
169 | for snyk_project in snyk_projects:
170 | # extract the repo part of the project name
171 | # e.g. scotte-snyk/demo-project:package.json should return
172 | # 'scotte-snyk/demo-project'
173 | if repo_full_name == snyk_project.name.split(":")[0]:
174 | snyk_projects_filtered.append(snyk_project)
175 |
176 | return snyk_projects_filtered
177 |
178 | def import_manifests(org_id, repo_full_name, integration_id, files=[]) -> ImportStatus:
179 | # pylint: disable=dangerous-default-value
180 | """Import a Github Repo into Snyk"""
181 |
182 | repo_full_name = repo_full_name.split("/")
183 | org = common.snyk_client.organizations.get(org_id)
184 | path = f"org/{org.id}/integrations/{integration_id}/import"
185 |
186 | if len(files) > 0:
187 | # verify against set limit per repo
188 | if len(files) > common.MAX_IMPORT_MANIFEST_PROJECTS:
189 | # log skipped manifests exceeding limit to csv file
190 | skipped_files = files[-(len(files) - common.MAX_IMPORT_MANIFEST_PROJECTS):]
191 | print(f"Importing up to limit of {common.MAX_IMPORT_MANIFEST_PROJECTS}/{len(files)}")
192 | print(f"See skipped manifests in {common.MANIFESTS_SKIPPED_ON_LIMIT_FILE.name}")
193 | for mf in skipped_files:
194 | common.MANIFESTS_SKIPPED_ON_LIMIT_FILE.write(f"{mf['path']}\n")
195 | # import manifests within limit
196 | files = files[:common.MAX_IMPORT_MANIFEST_PROJECTS]
197 |
198 | payload = {
199 | "target": {"owner": repo_full_name[0], "name": repo_full_name[1], "branch": ""},
200 | "files": files
201 | }
202 | else:
203 | payload = {
204 | "target": {"owner": repo_full_name[0], "name": repo_full_name[1], "branch": ""}
205 | }
206 |
207 | try:
208 | response = org.client.post(path, payload)
209 | except snyk.errors.SnykHTTPError as err:
210 | if err.code in [502, 504]:
211 | print("Server error, lets try again in a minute...")
212 | time.sleep(60)
213 | try:
214 | response = org.client.post(path, payload)
215 | except snyk.errors.SnykHTTPError as err_retry:
216 | print(f"Still failed after retry with {str(err_retry.code)}!")
217 | raise
218 | else:
219 | raise
220 | return ImportStatus(re.search('org/.+/integrations/.+/import/(.+)',
221 | response.headers['Location']).group(1),
222 | response.headers['Location'],
223 | org.id,
224 | org.name,
225 | repo_full_name[0],
226 | repo_full_name[1],
227 | files,
228 | [])
229 |
230 | def delete_snyk_project(project_id, org_id):
231 | """Delete a single Snyk project"""
232 |
233 | org = common.snyk_client.organizations.get(org_id)
234 |
235 | try:
236 | project = org.projects.get(project_id)
237 | return project.delete()
238 | except snyk.errors.SnykNotFoundError:
239 | print(f" - Project {project_id} not found in org {org_id} ...")
240 | return False
241 |
242 | def deactivate_snyk_project(project_id, org_id):
243 | """Deactivate a single Snyk project"""
244 | org = common.snyk_client.organizations.get(org_id)
245 |
246 | try:
247 | project = org.projects.get(project_id)
248 | return project.deactivate()
249 | except snyk.errors.SnykNotFoundError:
250 | print(f" - Project {project_id} not found in org {org_id} ...")
251 | return False
252 |
253 | def activate_snyk_project(project_id, org_id):
254 | """Acitvate a single Syyk project"""
255 | org = common.snyk_client.organizations.get(org_id)
256 |
257 | try:
258 | project = org.projects.get(project_id)
259 | return project.activate()
260 | except snyk.errors.SnykNotFoundError:
261 | print(f" - Project {project_id} not found in org {org_id} ...")
262 | return False
263 |
264 | def process_import_status_checks(import_status_checks):
265 | # pylint: disable=too-many-nested-blocks, too-many-branches
266 | # pylint: disable=too-many-locals
267 | """
268 | Check status of pending import jobs
269 | up to PENDING_REMOVAL_MAX_CHECKS times,
270 | waiting PENDING_REMOVAL_CHECK_INTERVAL seconds between checks
271 | """
272 |
273 | check_count = 0
274 | unique_import_status_checks = []
275 | import_jobs_completed = []
276 | import_logs_completed = []
277 |
278 | polling_minutes = (common.PENDING_REMOVAL_MAX_CHECKS * common.PENDING_REMOVAL_CHECK_INTERVAL)/60
279 |
280 | print(f"Checking import statuses, polling for up to "
281 | f"{str(polling_minutes)} minutes...")
282 |
283 | # get unique import status checks with combined pending deletes (if present)
284 | seen_check_ids = []
285 | for import_status_check in import_status_checks:
286 | if import_status_check.import_job_id not in seen_check_ids:
287 | unique_import_status_checks.append(import_status_check)
288 | seen_check_ids.append(import_status_check.import_job_id)
289 | else:
290 | for (i, usc) in enumerate(unique_import_status_checks):
291 | if usc.import_job_id == import_status_check.import_job_id:
292 | unique_import_status_checks[i].pending_project_deletes.extend(
293 | import_status_check.pending_project_deletes)
294 |
295 | while check_count < common.PENDING_REMOVAL_MAX_CHECKS:
296 | if len(unique_import_status_checks) > len(import_jobs_completed):
297 | sys.stdout.write(f"{len(unique_import_status_checks) - len(import_jobs_completed)} "
298 | f"batch pending\n")
299 | sys.stdout.flush()
300 | # check each import job statuses
301 | for import_job in unique_import_status_checks:
302 | if import_job.import_job_id not in import_jobs_completed:
303 | import_status = get_import_status(
304 | import_job.import_status_url, import_job.org_id
305 | )
306 | print(f"checking import job: {import_job.import_job_id}" \
307 | f" [{import_status['status']}]")
308 |
309 | # process each individual repo import
310 | for import_status_log in import_status["logs"]:
311 | uniq_import_log = import_status_log["name"] + \
312 | '-' + import_status_log["created"]
313 | if uniq_import_log not in import_logs_completed:
314 | print(f" - [{import_status_log['name']}] "
315 | f"Import Target status: {import_status_log['status']} "
316 | f"({len(import_status_log['projects'])} projects)")
317 | # if repo import status is complete, log
318 | # and delete any pending waiting on this repo import
319 | if import_status_log["status"] == "complete":
320 | # print(import_status_log)
321 | import_logs_completed.append(uniq_import_log)
322 | for project in import_status_log["projects"]:
323 | if 'targetFile' in project:
324 | imported_project = project['targetFile']
325 | app_print(import_job.org_name,
326 | import_status_log["name"],
327 | f"Imported {imported_project}")
328 | # pylint: disable=line-too-long
329 | common.COMPLETED_PROJECT_IMPORTS_FILE.write(
330 | f"{import_job.org_name},"
331 | f"{import_status_log['name']}:{imported_project},"
332 | f"{project['success']}\n")
333 |
334 | if import_status["status"] != "pending":
335 | import_jobs_completed.append(import_job.import_job_id)
336 | # print(f'import job completed with id: {import_job.import_job_id}')
337 | #job completed, do the pending deletes here
338 | for pending_delete in import_job.pending_project_deletes:
339 | app_print(pending_delete['org_name'],
340 | pending_delete['repo_full_name'],
341 | f"delete stale project [{pending_delete['id']}]")
342 | delete_snyk_project(
343 | pending_delete['id'],
344 | pending_delete['org_id']
345 | )
346 | common.RENAMED_MANIFESTS_DELETED_FILE.write(
347 | f"{pending_delete['org_name']},"
348 | f"{pending_delete['repo_full_name']}:"
349 | f"{pending_delete['manifest']}\n")
350 |
351 | print(f"Checking back in {common.PENDING_REMOVAL_CHECK_INTERVAL} seconds...")
352 | time.sleep(common.PENDING_REMOVAL_CHECK_INTERVAL)
353 |
354 | else:
355 | print("None Pending, Done.\n")
356 | return
357 |
358 | check_count += 1
359 | if check_count == common.PENDING_REMOVAL_MAX_CHECKS:
360 | print(f"\nExiting with {len(unique_import_status_checks) - len(import_jobs_completed)} "
361 | f"pending removals, logging...\n")
362 |
363 | for import_status_check in unique_import_status_checks:
364 | if import_status_check.import_job_id \
365 | not in import_jobs_completed:
366 | common.RENAMED_MANIFESTS_PENDING_FILE.write(
367 | f"{import_status_check.org_name},"
368 | f"{import_status_check.repo_owner}/{import_status_check.repo_name}\n")
369 | return
370 |
371 | def update_project_branch(project_id, project_name, org_id, new_branch_name):
372 | """ update snyk project monitored branch """
373 | org = common.snyk_client.organizations.get(org_id)
374 | path = f"org/{org.id}/project/{project_id}"
375 |
376 | payload = {
377 | "branch": new_branch_name
378 | }
379 | # print('updating project via ', path)
380 | try:
381 | response = org.client.put(path, payload)
382 | log_updated_project_branch(org.name, project_id, project_name, new_branch_name)
383 | return response.json()['id']
384 | except snyk.errors.SnykHTTPError as err:
385 | if err.code in [502, 504]:
386 | print("Server error, lets try again in a minute...")
387 | time.sleep(60)
388 | try:
389 | response = org.client.post(path, payload)
390 | log_updated_project_branch(org.name, project_id, project_name, new_branch_name)
391 | return response.json()['id']
392 | except snyk.errors.SnykHTTPError as err_retry:
393 | print(f"Still failed after retry with {str(err_retry.code)}! Logging...")
394 | log_update_project_branch_error(org.name, project_id, project_name, new_branch_name)
395 |
396 | def get_import_status(import_status_url, org_id):
397 | """Retrieve status data for a Snyk import job"""
398 |
399 | # extract path segment for later use
400 | path = re.search('.+(org/.+)', import_status_url).group(1)
401 |
402 | org = common.snyk_client.organizations.get(org_id)
403 | response = org.client.get(path)
404 | return response.json()
405 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 |
3 | # A comma-separated list of package or module names from where C extensions may
4 | # be loaded. Extensions are loading into the active Python interpreter and may
5 | # run arbitrary code.
6 | extension-pkg-whitelist=
7 |
8 | # Add files or directories to the blacklist. They should be base names, not
9 | # paths.
10 | ignore=CVS
11 |
12 | # Add files or directories matching the regex patterns to the blacklist. The
13 | # regex matches against base names, not paths.
14 | ignore-patterns=test_
15 |
16 | # Python code to execute, usually for sys.path manipulation such as
17 | # pygtk.require().
18 | #init-hook=
19 |
20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
21 | # number of processors available to use.
22 | jobs=1
23 |
24 | # Control the amount of potential inferred values when inferring a single
25 | # object. This can help the performance when dealing with large functions or
26 | # complex, nested conditions.
27 | limit-inference-results=100
28 |
29 | # List of plugins (as comma separated values of python module names) to load,
30 | # usually to register additional checkers.
31 | load-plugins=
32 |
33 | # Pickle collected data for later comparisons.
34 | persistent=yes
35 |
36 | # Specify a configuration file.
37 | #rcfile=
38 |
39 | # When enabled, pylint would attempt to guess common misconfiguration and emit
40 | # user-friendly hints instead of false-positive error messages.
41 | suggestion-mode=yes
42 |
43 | # Allow loading of arbitrary C extensions. Extensions are imported into the
44 | # active Python interpreter and may run arbitrary code.
45 | unsafe-load-any-extension=no
46 |
47 |
48 | [MESSAGES CONTROL]
49 |
50 | # Only show warnings with the listed confidence levels. Leave empty to show
51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
52 | confidence=
53 |
54 | # Disable the message, report, category or checker with the given id(s). You
55 | # can either give multiple identifiers separated by comma (,) or put this
56 | # option multiple times (only on the command line, not in the configuration
57 | # file where it should appear only once). You can also use "--disable=all" to
58 | # disable everything first and then reenable specific checks. For example, if
59 | # you want to run only the similarities checker, you can use "--disable=all
60 | # --enable=similarities". If you want to run only the classes checker, but have
61 | # no Warning level messages displayed, use "--disable=all --enable=classes
62 | # --disable=W".
63 | disable=print-statement,
64 | parameter-unpacking,
65 | unpacking-in-except,
66 | old-raise-syntax,
67 | backtick,
68 | long-suffix,
69 | old-ne-operator,
70 | old-octal-literal,
71 | import-star-module-level,
72 | non-ascii-bytes-literal,
73 | raw-checker-failed,
74 | bad-inline-option,
75 | locally-disabled,
76 | file-ignored,
77 | suppressed-message,
78 | useless-suppression,
79 | deprecated-pragma,
80 | use-symbolic-message-instead,
81 | apply-builtin,
82 | basestring-builtin,
83 | buffer-builtin,
84 | cmp-builtin,
85 | coerce-builtin,
86 | execfile-builtin,
87 | file-builtin,
88 | long-builtin,
89 | raw_input-builtin,
90 | reduce-builtin,
91 | standarderror-builtin,
92 | unicode-builtin,
93 | xrange-builtin,
94 | coerce-method,
95 | delslice-method,
96 | getslice-method,
97 | setslice-method,
98 | no-absolute-import,
99 | old-division,
100 | dict-iter-method,
101 | dict-view-method,
102 | next-method-called,
103 | metaclass-assignment,
104 | indexing-exception,
105 | raising-string,
106 | reload-builtin,
107 | oct-method,
108 | hex-method,
109 | nonzero-method,
110 | cmp-method,
111 | input-builtin,
112 | round-builtin,
113 | intern-builtin,
114 | unichr-builtin,
115 | map-builtin-not-iterating,
116 | zip-builtin-not-iterating,
117 | range-builtin-not-iterating,
118 | filter-builtin-not-iterating,
119 | using-cmp-argument,
120 | eq-without-hash,
121 | div-method,
122 | idiv-method,
123 | rdiv-method,
124 | exception-message-attribute,
125 | invalid-str-codec,
126 | sys-max-int,
127 | bad-python3-import,
128 | deprecated-string-function,
129 | deprecated-str-translate-call,
130 | deprecated-itertools-function,
131 | deprecated-types-field,
132 | next-method-defined,
133 | dict-items-not-iterating,
134 | dict-keys-not-iterating,
135 | dict-values-not-iterating,
136 | deprecated-operator-function,
137 | deprecated-urllib-function,
138 | xreadlines-attribute,
139 | deprecated-sys-function,
140 | exception-escape,
141 | comprehension-escape
142 |
143 | # Enable the message, report, category or checker with the given id(s). You can
144 | # either give multiple identifier separated by comma (,) or put this option
145 | # multiple time (only on the command line, not in the configuration file where
146 | # it should appear only once). See also the "--disable" option for examples.
147 | enable=c-extension-no-member
148 |
149 |
150 | [REPORTS]
151 |
152 | # Python expression which should return a score less than or equal to 10. You
153 | # have access to the variables 'error', 'warning', 'refactor', and 'convention'
154 | # which contain the number of messages in each category, as well as 'statement'
155 | # which is the total number of statements analyzed. This score is used by the
156 | # global evaluation report (RP0004).
157 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
158 |
159 | # Template used to display messages. This is a python new-style format string
160 | # used to format the message information. See doc for all details.
161 | #msg-template=
162 |
163 | # Set the output format. Available formats are text, parseable, colorized, json
164 | # and msvs (visual studio). You can also give a reporter class, e.g.
165 | # mypackage.mymodule.MyReporterClass.
166 | output-format=text
167 |
168 | # Tells whether to display a full report or only the messages.
169 | reports=no
170 |
171 | # Activate the evaluation score.
172 | score=yes
173 |
174 |
175 | [REFACTORING]
176 |
177 | # Maximum number of nested blocks for function / method body
178 | max-nested-blocks=5
179 |
180 | # Complete name of functions that never returns. When checking for
181 | # inconsistent-return-statements if a never returning function is called then
182 | # it will be considered as an explicit return statement and no message will be
183 | # printed.
184 | never-returning-functions=sys.exit
185 |
186 |
187 | [LOGGING]
188 |
189 | # Format style used to check logging format string. `old` means using %
190 | # formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
191 | logging-format-style=fstr
192 |
193 | # Logging modules to check that the string format arguments are in logging
194 | # function parameter format.
195 | logging-modules=logging
196 |
197 |
198 | [SPELLING]
199 |
200 | # Limits count of emitted suggestions for spelling mistakes.
201 | max-spelling-suggestions=4
202 |
203 | # Spelling dictionary name. Available dictionaries: none. To make it work,
204 | # install the python-enchant package.
205 | spelling-dict=
206 |
207 | # List of comma separated words that should not be checked.
208 | spelling-ignore-words=
209 |
210 | # A path to a file that contains the private dictionary; one word per line.
211 | spelling-private-dict-file=
212 |
213 | # Tells whether to store unknown words to the private dictionary (see the
214 | # --spelling-private-dict-file option) instead of raising a message.
215 | spelling-store-unknown-words=no
216 |
217 |
218 | [MISCELLANEOUS]
219 |
220 | # List of note tags to take in consideration, separated by a comma.
221 | notes=FIXME,
222 | XXX,
223 | TODO
224 |
225 |
226 | [TYPECHECK]
227 |
228 | # List of decorators that produce context managers, such as
229 | # contextlib.contextmanager. Add to this list to register other decorators that
230 | # produce valid context managers.
231 | contextmanager-decorators=contextlib.contextmanager
232 |
233 | # List of members which are set dynamically and missed by pylint inference
234 | # system, and so shouldn't trigger E1101 when accessed. Python regular
235 | # expressions are accepted.
236 | generated-members=
237 |
238 | # Tells whether missing members accessed in mixin class should be ignored. A
239 | # mixin class is detected if its name ends with "mixin" (case insensitive).
240 | ignore-mixin-members=yes
241 |
242 | # Tells whether to warn about missing members when the owner of the attribute
243 | # is inferred to be None.
244 | ignore-none=yes
245 |
246 | # This flag controls whether pylint should warn about no-member and similar
247 | # checks whenever an opaque object is returned when inferring. The inference
248 | # can return multiple potential results while evaluating a Python object, but
249 | # some branches might not be evaluated, which results in partial inference. In
250 | # that case, it might be useful to still emit no-member and other checks for
251 | # the rest of the inferred objects.
252 | ignore-on-opaque-inference=yes
253 |
254 | # List of class names for which member attributes should not be checked (useful
255 | # for classes with dynamically set attributes). This supports the use of
256 | # qualified names.
257 | ignored-classes=optparse.Values,thread._local,_thread._local
258 |
259 | # List of module names for which member attributes should not be checked
260 | # (useful for modules/projects where namespaces are manipulated during runtime
261 | # and thus existing member attributes cannot be deduced by static analysis). It
262 | # supports qualified module names, as well as Unix pattern matching.
263 | ignored-modules=
264 |
265 | # Show a hint with possible names when a member name was not found. The aspect
266 | # of finding the hint is based on edit distance.
267 | missing-member-hint=yes
268 |
269 | # The minimum edit distance a name should have in order to be considered a
270 | # similar match for a missing member name.
271 | missing-member-hint-distance=1
272 |
273 | # The total number of similar names that should be taken in consideration when
274 | # showing a hint for a missing member.
275 | missing-member-max-choices=1
276 |
277 | # List of decorators that change the signature of a decorated function.
278 | signature-mutators=
279 |
280 |
281 | [VARIABLES]
282 |
283 | # List of additional names supposed to be defined in builtins. Remember that
284 | # you should avoid defining new builtins when possible.
285 | additional-builtins=
286 |
287 | # Tells whether unused global variables should be treated as a violation.
288 | allow-global-unused-variables=yes
289 |
290 | # List of strings which can identify a callback function by name. A callback
291 | # name must start or end with one of those strings.
292 | callbacks=cb_,
293 | _cb
294 |
295 | # A regular expression matching the name of dummy variables (i.e. expected to
296 | # not be used).
297 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
298 |
299 | # Argument names that match this expression will be ignored. Default to name
300 | # with leading underscore.
301 | ignored-argument-names=_.*|^ignored_|^unused_
302 |
303 | # Tells whether we should check for unused import in __init__ files.
304 | init-import=no
305 |
306 | # List of qualified module names which can have objects that can redefine
307 | # builtins.
308 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
309 |
310 |
311 | [FORMAT]
312 |
313 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
314 | expected-line-ending-format=
315 |
316 | # Regexp for a line that is allowed to be longer than the limit.
317 | ignore-long-lines=^\s*(# )??$
318 |
319 | # Number of spaces of indent required inside a hanging or continued line.
320 | indent-after-paren=4
321 |
322 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
323 | # tab).
324 | indent-string=' '
325 |
326 | # Maximum number of characters on a single line.
327 | max-line-length=100
328 |
329 | # Maximum number of lines in a module.
330 | max-module-lines=1000
331 |
332 | # List of optional constructs for which whitespace checking is disabled. `dict-
333 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
334 | # `trailing-comma` allows a space between comma and closing bracket: (a, ).
335 | # `empty-line` allows space-only lines.
336 | no-space-check=trailing-comma,
337 | dict-separator
338 |
339 | # Allow the body of a class to be on the same line as the declaration if body
340 | # contains single statement.
341 | single-line-class-stmt=no
342 |
343 | # Allow the body of an if to be on the same line as the test if there is no
344 | # else.
345 | single-line-if-stmt=no
346 |
347 |
348 | [SIMILARITIES]
349 |
350 | # Ignore comments when computing similarities.
351 | ignore-comments=yes
352 |
353 | # Ignore docstrings when computing similarities.
354 | ignore-docstrings=yes
355 |
356 | # Ignore imports when computing similarities.
357 | ignore-imports=no
358 |
359 | # Minimum lines number of a similarity.
360 | min-similarity-lines=4
361 |
362 |
363 | [BASIC]
364 |
365 | # Naming style matching correct argument names.
366 | argument-naming-style=snake_case
367 |
368 | # Regular expression matching correct argument names. Overrides argument-
369 | # naming-style.
370 | #argument-rgx=
371 |
372 | # Naming style matching correct attribute names.
373 | attr-naming-style=snake_case
374 |
375 | # Regular expression matching correct attribute names. Overrides attr-naming-
376 | # style.
377 | #attr-rgx=
378 |
379 | # Bad variable names which should always be refused, separated by a comma.
380 | bad-names=foo,
381 | bar,
382 | baz,
383 | toto,
384 | tutu,
385 | tata
386 |
387 | # Naming style matching correct class attribute names.
388 | class-attribute-naming-style=any
389 |
390 | # Regular expression matching correct class attribute names. Overrides class-
391 | # attribute-naming-style.
392 | #class-attribute-rgx=
393 |
394 | # Naming style matching correct class names.
395 | class-naming-style=PascalCase
396 |
397 | # Regular expression matching correct class names. Overrides class-naming-
398 | # style.
399 | #class-rgx=
400 |
401 | # Naming style matching correct constant names.
402 | const-naming-style=UPPER_CASE
403 |
404 | # Regular expression matching correct constant names. Overrides const-naming-
405 | # style.
406 | #const-rgx=
407 |
408 | # Minimum line length for functions/classes that require docstrings, shorter
409 | # ones are exempt.
410 | docstring-min-length=-1
411 |
412 | # Naming style matching correct function names.
413 | function-naming-style=snake_case
414 |
415 | # Regular expression matching correct function names. Overrides function-
416 | # naming-style.
417 | #function-rgx=
418 |
419 | # Good variable names which should always be accepted, separated by a comma.
420 | good-names=i,
421 | j,
422 | k,
423 | ex,
424 | Run,
425 | _
426 |
427 | # Include a hint for the correct naming format with invalid-name.
428 | include-naming-hint=no
429 |
430 | # Naming style matching correct inline iteration names.
431 | inlinevar-naming-style=any
432 |
433 | # Regular expression matching correct inline iteration names. Overrides
434 | # inlinevar-naming-style.
435 | #inlinevar-rgx=
436 |
437 | # Naming style matching correct method names.
438 | method-naming-style=snake_case
439 |
440 | # Regular expression matching correct method names. Overrides method-naming-
441 | # style.
442 | #method-rgx=
443 |
444 | # Naming style matching correct module names.
445 | module-naming-style=snake_case
446 |
447 | # Regular expression matching correct module names. Overrides module-naming-
448 | # style.
449 | #module-rgx=
450 |
451 | # Colon-delimited sets of names that determine each other's naming style when
452 | # the name regexes allow several styles.
453 | name-group=
454 |
455 | # Regular expression which should only match function or class names that do
456 | # not require a docstring.
457 | no-docstring-rgx=^_
458 |
459 | # List of decorators that produce properties, such as abc.abstractproperty. Add
460 | # to this list to register other decorators that produce valid properties.
461 | # These decorators are taken in consideration only for invalid-name.
462 | property-classes=abc.abstractproperty
463 |
464 | # Naming style matching correct variable names.
465 | variable-naming-style=snake_case
466 |
467 | # Regular expression matching correct variable names. Overrides variable-
468 | # naming-style.
469 | #variable-rgx=
470 |
471 |
472 | [STRING]
473 |
474 | # This flag controls whether the implicit-str-concat-in-sequence should
475 | # generate a warning on implicit string concatenation in sequences defined over
476 | # several lines.
477 | check-str-concat-over-line-jumps=no
478 |
479 |
480 | [IMPORTS]
481 |
482 | # List of modules that can be imported at any level, not just the top level
483 | # one.
484 | allow-any-import-level=
485 |
486 | # Allow wildcard imports from modules that define __all__.
487 | allow-wildcard-with-all=no
488 |
489 | # Analyse import fallback blocks. This can be used to support both Python 2 and
490 | # 3 compatible code, which means that the block might have code that exists
491 | # only in one or another interpreter, leading to false positives when analysed.
492 | analyse-fallback-blocks=no
493 |
494 | # Deprecated modules which should not be used, separated by a comma.
495 | deprecated-modules=optparse,tkinter.tix
496 |
497 | # Create a graph of external dependencies in the given file (report RP0402 must
498 | # not be disabled).
499 | ext-import-graph=
500 |
501 | # Create a graph of every (i.e. internal and external) dependencies in the
502 | # given file (report RP0402 must not be disabled).
503 | import-graph=
504 |
505 | # Create a graph of internal dependencies in the given file (report RP0402 must
506 | # not be disabled).
507 | int-import-graph=
508 |
509 | # Force import order to recognize a module as part of the standard
510 | # compatibility libraries.
511 | known-standard-library=
512 |
513 | # Force import order to recognize a module as part of a third party library.
514 | known-third-party=enchant
515 |
516 | # Couples of modules and preferred modules, separated by a comma.
517 | preferred-modules=
518 |
519 |
520 | [CLASSES]
521 |
522 | # List of method names used to declare (i.e. assign) instance attributes.
523 | defining-attr-methods=__init__,
524 | __new__,
525 | setUp,
526 | __post_init__
527 |
528 | # List of member names, which should be excluded from the protected access
529 | # warning.
530 | exclude-protected=_asdict,
531 | _fields,
532 | _replace,
533 | _source,
534 | _make
535 |
536 | # List of valid names for the first argument in a class method.
537 | valid-classmethod-first-arg=cls
538 |
539 | # List of valid names for the first argument in a metaclass class method.
540 | valid-metaclass-classmethod-first-arg=cls
541 |
542 |
543 | [DESIGN]
544 |
545 | # Maximum number of arguments for function / method.
546 | max-args=5
547 |
548 | # Maximum number of attributes for a class (see R0902).
549 | max-attributes=7
550 |
551 | # Maximum number of boolean expressions in an if statement (see R0916).
552 | max-bool-expr=5
553 |
554 | # Maximum number of branch for function / method body.
555 | max-branches=12
556 |
557 | # Maximum number of locals for function / method body.
558 | max-locals=15
559 |
560 | # Maximum number of parents for a class (see R0901).
561 | max-parents=7
562 |
563 | # Maximum number of public methods for a class (see R0904).
564 | max-public-methods=20
565 |
566 | # Maximum number of return / yield for function / method body.
567 | max-returns=6
568 |
569 | # Maximum number of statements in function / method body.
570 | max-statements=50
571 |
572 | # Minimum number of public methods for a class (see R0903).
573 | min-public-methods=2
574 |
575 |
576 | [EXCEPTIONS]
577 |
578 | # Exceptions that will emit a warning when being caught. Defaults to
579 | # "BaseException, Exception".
580 | overgeneral-exceptions=BaseException,
581 | Exception
582 |
--------------------------------------------------------------------------------