├── .editorconfig ├── .github ├── dependabot.yml └── workflows │ ├── add-depr-ticket-to-depr-board.yml │ ├── add-remove-label-on-comment.yml │ ├── ci.yml │ ├── commitlint.yml │ ├── pypi-publish.yml │ ├── self-assign-issue.yml │ ├── support-window-issue-creator.yml │ └── upgrade-python-requirements.yml ├── .gitignore ├── LICENSE.txt ├── MANIFEST.in ├── Makefile ├── NOTICE.txt ├── README.rst ├── barcalendar.py ├── catalog-info.yaml ├── count-lines.sh ├── dev-requirements.txt ├── edx_repo_tools ├── __init__.py ├── add_common_constraint.py ├── audit_gh_users │ ├── README.rst │ ├── __init__.py │ ├── audit_users.py │ ├── extra.in │ └── extra.txt ├── auth.py ├── codemods │ ├── __init__.py │ ├── django2 │ │ ├── README │ │ ├── __init__.py │ │ ├── auth_anonymous_update.py │ │ ├── foreignkey_on_delete_mod.py │ │ └── widget_add_renderer_mod.py │ ├── django3 │ │ ├── __init__.py │ │ ├── add_new_django32_settings.py │ │ ├── github_actions_modernizer.py │ │ ├── github_actions_modernizer_django.py │ │ ├── remove_python2_unicode_compatible.py │ │ ├── replace_render_to_response.py │ │ ├── replace_static.py │ │ ├── replace_unicode_with_str.py │ │ ├── script_to_replace_static.sh │ │ ├── setup_file_modernizer.py │ │ ├── tox_modernizer.py │ │ └── travis_modernizer.py │ ├── django42 │ │ ├── github_actions_modernizer_django42.py │ │ ├── remove_providing_args_arg.py │ │ └── tox_moderniser_django42.py │ ├── node16 │ │ ├── __init__.py │ │ ├── gha_ci_modernizer.py │ │ └── gha_release_workflow_modernizer.py │ └── python312 │ │ ├── __init__.py │ │ ├── gh_actions_modernizer.py │ │ └── tox_modernizer.py ├── conventional_commits │ ├── README.rst │ ├── __init__.py │ ├── commitstats.py │ ├── extra.in │ └── extra.txt ├── data.py ├── dependabot_yml.py ├── dev │ ├── __init__.py │ ├── clone_org.py │ ├── get_org_repo_urls.py │ └── show_hooks.py ├── find_dependencies │ ├── README.rst │ ├── __init__.py │ ├── extra.in │ ├── extra.txt │ ├── find_dependencies.py │ └── find_python_dependencies.py ├── gitgraft │ ├── README.md │ ├── __init__.py │ ├── gitgraft.py │ ├── gitgraft_modulestore │ └── gitgraft_platform_core ├── helpers.py ├── modernize_openedx_yaml.py ├── pull_request_creator │ ├── __init__.py │ ├── extra.in │ └── extra.txt ├── release │ ├── __init__.py │ ├── tag_release.py │ └── tag_release_v2.py ├── repo_access_scraper │ ├── README.rst │ ├── __init__.py │ ├── extra.in │ ├── extra.txt │ └── repo_access_scraper.py ├── repo_checks │ ├── README.rst │ ├── __init__.py │ ├── extra.in │ ├── extra.txt │ ├── labels.yaml │ └── repo_checks.py └── utils.py ├── gittools.sh ├── maintainer_reports ├── .gitignore ├── README.md ├── gcp_wrapper_open_pulls.py ├── graphql_queries.py ├── graphql_requests.py ├── graphql_util.py ├── main.py ├── requirements.txt └── sql_queries.py ├── pylintrc ├── pylintrc_tweaks ├── requirements.txt ├── requirements ├── base.in ├── base.txt ├── common_constraints.txt ├── constraints.txt ├── development.in ├── development.txt ├── pip-tools.in ├── pip-tools.txt ├── pip.in └── pip.txt ├── setup.py └── tests ├── __init__.py ├── fake_repos ├── repo_with_nvmrc │ ├── .github │ │ └── workflows │ │ │ └── release.yml │ └── .nvmrc └── repo_without_nvmrc │ └── .github │ └── workflows │ └── release.yml ├── pull_request_creator_test_data ├── diff.txt └── minor_diff.txt ├── sample_files ├── .nvmrc ├── sample_ci_file.yml ├── sample_ci_file_2.yml ├── sample_ci_file_3.yml ├── sample_ci_file_4.yml ├── sample_ci_file_5.yml ├── sample_ci_file_multiple_jobs.yml ├── sample_django_settings.py ├── sample_django_settings_2.py ├── sample_node_ci.yml ├── sample_node_ci2.yml ├── sample_python2_unicode_removal.py ├── sample_render_to_response.py ├── sample_render_to_response_2.py └── sample_setup_file.py ├── sample_openedx.yaml ├── sample_tox_config.ini ├── sample_tox_config_2.ini ├── test_actions_modernizer.py ├── test_actions_modernizer_django.py ├── test_add_new_django32_settings.py ├── test_gha_release_workflow_modernizer.py ├── test_modernize_openedx_yaml.py ├── test_node_ci_modernizer.py ├── test_pull_request_creator.py ├── test_remove_python2_unicode_compatible.py ├── test_replace_render_to_response.py ├── test_repo_checks.py ├── test_setup_file_modernizer.py ├── test_tag_release.py ├── test_tox_modernizer.py ├── test_travis.yml ├── test_travis_2.yml └── test_travis_modernizer.py /.editorconfig: -------------------------------------------------------------------------------- 1 | # *************************** 2 | # ** DO NOT EDIT THIS FILE ** 3 | # *************************** 4 | # 5 | # This file was generated by edx-lint: https://github.com/openedx/edx-lint 6 | # 7 | # If you want to change this file, you have two choices, depending on whether 8 | # you want to make a local change that applies only to this repo, or whether 9 | # you want to make a central change that applies to all repos using edx-lint. 10 | # 11 | # Note: If your .editorconfig file is simply out-of-date relative to the latest 12 | # .editorconfig in edx-lint, ensure you have the latest edx-lint installed 13 | # and then follow the steps for a "LOCAL CHANGE". 14 | # 15 | # LOCAL CHANGE: 16 | # 17 | # 1. Edit the local .editorconfig_tweaks file to add changes just to this 18 | # repo's file. 19 | # 20 | # 2. Run: 21 | # 22 | # $ edx_lint write .editorconfig 23 | # 24 | # 3. This will modify the local file. Submit a pull request to get it 25 | # checked in so that others will benefit. 26 | # 27 | # 28 | # CENTRAL CHANGE: 29 | # 30 | # 1. Edit the .editorconfig file in the edx-lint repo at 31 | # https://github.com/openedx/edx-lint/blob/master/edx_lint/files/.editorconfig 32 | # 33 | # 2. install the updated version of edx-lint (in edx-lint): 34 | # 35 | # $ pip install . 36 | # 37 | # 3. Run (in edx-lint): 38 | # 39 | # $ edx_lint write .editorconfig 40 | # 41 | # 4. Make a new version of edx_lint, submit and review a pull request with the 42 | # .editorconfig update, and after merging, update the edx-lint version and 43 | # publish the new version. 44 | # 45 | # 5. In your local repo, install the newer version of edx-lint. 46 | # 47 | # 6. Run: 48 | # 49 | # $ edx_lint write .editorconfig 50 | # 51 | # 7. This will modify the local file. Submit a pull request to get it 52 | # checked in so that others will benefit. 53 | # 54 | # 55 | # 56 | # 57 | # 58 | # STAY AWAY FROM THIS FILE! 59 | # 60 | # 61 | # 62 | # 63 | # 64 | # SERIOUSLY. 65 | # 66 | # ------------------------------ 67 | # Generated by edx-lint version: 5.3.4 68 | # ------------------------------ 69 | [*] 70 | end_of_line = lf 71 | insert_final_newline = true 72 | charset = utf-8 73 | indent_style = space 74 | indent_size = 4 75 | max_line_length = 120 76 | trim_trailing_whitespace = true 77 | 78 | [{Makefile, *.mk}] 79 | indent_style = tab 80 | indent_size = 8 81 | 82 | [*.{yml,yaml,json}] 83 | indent_size = 2 84 | 85 | [*.js] 86 | indent_size = 2 87 | 88 | [*.diff] 89 | trim_trailing_whitespace = false 90 | 91 | [.git/*] 92 | trim_trailing_whitespace = false 93 | 94 | [COMMIT_EDITMSG] 95 | max_line_length = 72 96 | 97 | [*.rst] 98 | max_line_length = 79 99 | 100 | # bbcbced841ed335dd8abb7456a6b13485d701b40 101 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Adding new check for github-actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | reviewers: 9 | - "openedx/arbi-bom" 10 | -------------------------------------------------------------------------------- /.github/workflows/add-depr-ticket-to-depr-board.yml: -------------------------------------------------------------------------------- 1 | # Run the workflow that adds new tickets that are either: 2 | # - labelled "DEPR" 3 | # - title starts with "[DEPR]" 4 | # - body starts with "Proposal Date" (this is the first template field) 5 | # to the org-wide DEPR project board 6 | 7 | name: Add newly created DEPR issues to the DEPR project board 8 | 9 | on: 10 | issues: 11 | types: [opened] 12 | 13 | jobs: 14 | routeissue: 15 | uses: openedx/.github/.github/workflows/add-depr-ticket-to-depr-board.yml@master 16 | secrets: 17 | GITHUB_APP_ID: ${{ secrets.GRAPHQL_AUTH_APP_ID }} 18 | GITHUB_APP_PRIVATE_KEY: ${{ secrets.GRAPHQL_AUTH_APP_PEM }} 19 | SLACK_BOT_TOKEN: ${{ secrets.SLACK_ISSUE_BOT_TOKEN }} 20 | -------------------------------------------------------------------------------- /.github/workflows/add-remove-label-on-comment.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs when a comment is made on the ticket 2 | # If the comment starts with "label: " it tries to apply 3 | # the label indicated in rest of comment. 4 | # If the comment starts with "remove label: ", it tries 5 | # to remove the indicated label. 6 | # Note: Labels are allowed to have spaces and this script does 7 | # not parse spaces (as often a space is legitimate), so the command 8 | # "label: really long lots of words label" will apply the 9 | # label "really long lots of words label" 10 | 11 | name: Allows for the adding and removing of labels via comment 12 | 13 | on: 14 | issue_comment: 15 | types: [created] 16 | 17 | jobs: 18 | add_remove_labels: 19 | uses: openedx/.github/.github/workflows/add-remove-label-on-comment.yml@master 20 | 21 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | 8 | jobs: 9 | build: 10 | 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | matrix: 14 | os: [ubuntu-latest] 15 | python-version: 16 | - '3.12' 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: setup python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | - name: Install pip 26 | run: pip install -r requirements/pip.txt 27 | 28 | - name: Install Dependencies 29 | run: make dev-install 30 | 31 | - name: Run Tests 32 | run: make test 33 | -------------------------------------------------------------------------------- /.github/workflows/commitlint.yml: -------------------------------------------------------------------------------- 1 | # Run commitlint on the commit messages in a pull request. 2 | 3 | name: Lint Commit Messages 4 | 5 | on: 6 | - pull_request 7 | 8 | jobs: 9 | commitlint: 10 | uses: openedx/.github/.github/workflows/commitlint.yml@master 11 | -------------------------------------------------------------------------------- /.github/workflows/pypi-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish package to PyPi 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | 10 | push: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | - name: setup python 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: 3.8 20 | 21 | - name: Install pip 22 | run: pip install -r requirements/pip.txt 23 | 24 | - name: Install Dependencies 25 | run: pip install setuptools wheel 26 | 27 | - name: Build package 28 | run: python setup.py sdist bdist_wheel 29 | 30 | - name: Publish to PyPi 31 | uses: pypa/gh-action-pypi-publish@release/v1 32 | with: 33 | user: __token__ 34 | password: ${{ secrets.PYPI_UPLOAD_TOKEN }} 35 | -------------------------------------------------------------------------------- /.github/workflows/self-assign-issue.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs when a comment is made on the ticket 2 | # If the comment starts with "assign me" it assigns the author to the 3 | # ticket (case insensitive) 4 | 5 | name: Assign comment author to ticket if they say "assign me" 6 | on: 7 | issue_comment: 8 | types: [created] 9 | 10 | jobs: 11 | self_assign_by_comment: 12 | uses: openedx/.github/.github/workflows/self-assign-issue.yml@master 13 | -------------------------------------------------------------------------------- /.github/workflows/support-window-issue-creator.yml: -------------------------------------------------------------------------------- 1 | name: Support Windows Issue Creater 2 | on: 3 | schedule: 4 | - cron: 0 0 1 */3 * 5 | 6 | jobs: 7 | create_issue: 8 | name: Create Support Window Issue 9 | runs-on: ubuntu-latest 10 | permissions: 11 | issues: write 12 | steps: 13 | - name: Create issue 14 | uses: imjohnbo/issue-bot@v3 15 | with: 16 | assignees: "usamasadiq" 17 | labels: "arbi-bom" 18 | close-previous: true 19 | title: "Support Window Update" 20 | body: | 21 | ### Description 22 | Copy the link of the current issue and add it to the [Arbi-bom GitHub Project](https://github.com/orgs/edx/projects/12). 23 | 24 | Follow the steps mentioned in the `repo_tools/barcalendar.py` and check following points for the [Support Window Sheet](https://docs.google.com/spreadsheets/u/2/d/11DheEtMDGrbA9hsUvZ2SEd4Cc8CaC4mAfoV8SVaLBGI/edit#gid=195838733) update. 25 | For reference, use the respective end of life dates from [endoflife.date](https://endoflife.date/). 26 | 27 | - [ ] Update the version of each dependency currently in use, if necessary 28 | - [ ] Delete any versions older than one prior to the one currently in use 29 | - [ ] Add end dates which were not previously known 30 | - [ ] Add new releases that we're likely to consider upgrading to 31 | - [ ] Do a quick review of our dependencies to see if any categories should be added or removed 32 | 33 | env: 34 | GITHUB_TOKEN: ${{ secrets.requirements_bot_github_token }} 35 | -------------------------------------------------------------------------------- /.github/workflows/upgrade-python-requirements.yml: -------------------------------------------------------------------------------- 1 | name: Upgrade Requirements 2 | 3 | on: 4 | schedule: 5 | - cron: "0 4 * * 4" 6 | workflow_dispatch: 7 | inputs: 8 | branch: 9 | description: 'Target branch to create requirements PR against' 10 | required: true 11 | default: 'master' 12 | jobs: 13 | call-upgrade-python-requirements-workflow: 14 | with: 15 | branch: ${{ github.event.inputs.branch }} 16 | send_success_notification: false 17 | python_version: "3.12" 18 | secrets: 19 | requirements_bot_github_token: ${{ secrets.REQUIREMENTS_BOT_GITHUB_TOKEN }} 20 | requirements_bot_github_email: ${{ secrets.REQUIREMENTS_BOT_GITHUB_EMAIL }} 21 | edx_smtp_username: ${{ secrets.EDX_SMTP_USERNAME }} 22 | edx_smtp_password: ${{ secrets.EDX_SMTP_PASSWORD }} 23 | uses: openedx/.github/.github/workflows/upgrade-python-requirements.yml@master 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | auth.yaml 2 | venv 3 | *.pyc 4 | *~ 5 | 6 | .sass-cache 7 | .webcache 8 | .cache 9 | 10 | .env 11 | 12 | # Coverage data 13 | .coverage 14 | htmlcov 15 | 16 | # The locally checked out repos 17 | .oep2-workspace 18 | 19 | # Ourselves, installed 20 | edx_repo_tools.egg-info 21 | 22 | # PyCharm files 23 | .idea/ 24 | 25 | build/ 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE.txt 2 | include README.rst 3 | include requirements/base.in 4 | recursive-include edx_repo_tools *.html *.png *.gif *js *.css *jpg *jpeg *svg *py 5 | include requirements/constraints.txt 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help clean test dev-install install upgrade lint 2 | 3 | help: ## display this help message 4 | @echo "Please use \`make ' where is one of" 5 | @awk -F ':.*?## ' '/^[a-zA-Z]/ && NF==2 {printf "\033[36m %-25s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort 6 | 7 | clean: ## remove transient artifacts 8 | rm -rf .*cache *.egg-info .coverage build/ htmlcov/ 9 | find . -name '__pycache__' -exec rm -rf {} + 10 | find . -name '*.pyc' -exec rm -f {} + 11 | find . -name '*.pyo' -exec rm -f {} + 12 | find . -name '*~' -exec rm -f {} + 13 | 14 | test: ## run the tests 15 | pytest 16 | 17 | dev-install: ## install everything to develop here 18 | pip install -e .[dev] 19 | 20 | install: ## install everything to run the tools 21 | pip install -r requirements/base.txt 22 | pip install -e . 23 | 24 | COMMON_CONSTRAINTS_TXT=requirements/common_constraints.txt 25 | .PHONY: $(COMMON_CONSTRAINTS_TXT) 26 | $(COMMON_CONSTRAINTS_TXT): 27 | wget -O "$(@)" https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt || touch "$(@)" 28 | 29 | upgrade: export CUSTOM_COMPILE_COMMAND=make upgrade 30 | upgrade: $(COMMON_CONSTRAINTS_TXT) ## update the requirements/*.txt files with the latest packages satisfying requirements/*.in 31 | pip install -qr requirements/pip-tools.txt 32 | pip-compile --upgrade --allow-unsafe --rebuild -o requirements/pip.txt requirements/pip.in 33 | pip-compile --upgrade -o requirements/pip-tools.txt requirements/pip-tools.in 34 | pip install -qr requirements/pip.txt 35 | pip install -qr requirements/pip-tools.txt 36 | pip-compile --upgrade -o requirements/base.txt requirements/base.in 37 | pip-compile --upgrade -o requirements/development.txt requirements/development.in 38 | for fextra in edx_repo_tools/*/extra.in; do pip-compile --upgrade -o $${fextra%.in}.txt $$fextra; done 39 | 40 | lint: ## run pylint 41 | pylint *.py edx_repo_tools tests 42 | -------------------------------------------------------------------------------- /NOTICE.txt: -------------------------------------------------------------------------------- 1 | Copyright 2014-2016, edX 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this work except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ################### 2 | Open edX Repo Tools 3 | ################### 4 | 5 | This repo contains a number of tools Open edX engineers use for working with 6 | GitHub repositories. 7 | 8 | The set of tools has grown over the years. Some are old and in current use, 9 | some have fallen out of use, some are quite new. 10 | 11 | Setting up GitHub authentication 12 | ================================ 13 | 14 | Most of these make GitHub API calls, and so will need GitHub credentials in 15 | order to not be severely rate-limited. Edit (or create) `~/.netrc` so that it 16 | has an entry like this:: 17 | 18 | machine api.github.com 19 | login your_user_name 20 | password ghp_XyzzyfGXFooBar8nBqQuuxY9brgXYz4Xyzzy 21 | 22 | Change the login to your GitHub user name. The password is a Personal Access 23 | Token you get from https://github.com/settings/tokens. Visit that page, click 24 | "Generate new token." It will prompt you for your password, then you'll see a 25 | scary list of scopes. Check the "repo" option and click "Generate token." Copy 26 | the token that appears. Paste it into your ~/.netrc in the "password" entry. 27 | 28 | 29 | Working in the repo 30 | =================== 31 | 32 | To work on these tools: 33 | 34 | 1. Use a virtualenv. 35 | 36 | 2. Install dependencies:: 37 | 38 | make dev-install 39 | 40 | 3. Run tests:: 41 | 42 | make test 43 | 44 | 4. Older tools were Python files run from the root of the repo. Now we are 45 | being more disciplined and putting code into importable modules with entry 46 | points in setup.py. 47 | 48 | 5. Simple tools can go into an existing subdirectory of edx_repo_tools. Follow 49 | the structure of existing tools you find here. More complex tools, or ones 50 | that need unusual third-party requirements, should go into a new 51 | subdirectory of edx_repo_tools. 52 | 53 | 6. Add a new `entry_point` in setup.py for your command: 54 | 55 | .. code:: 56 | 57 | entry_points={ 58 | 'console_scripts': [ 59 | ... 60 | 'new_tool = edx_repo_tools.new_tool_dir.new_tool:main', 61 | ... 62 | 63 | 7. If your tool is in its own directory, you can create an `extra.in` file 64 | there with third-party requirements intended just for your tool. This will 65 | automatically create an installable "extra" for your requirements. 66 | 67 | Active Tools 68 | ============ 69 | 70 | repo_checks 71 | ----------- 72 | 73 | See the `repo_checks README `_ in its subfolder. 74 | 75 | Older Tools 76 | =========== 77 | 78 | There are many programs in this repo in various stages of disrepair. A few 79 | of them are described in this repo's `older README.md`_ file. Others are not 80 | described at all, but may be useful, or have useful tidbits in the code. 81 | 82 | .. _older README.md: https://github.com/openedx/repo-tools/blob/7aa8bda466d1925c56d4ad6e3b2bdd87b1f83148/README.md 83 | 84 | 85 | Feedback 86 | ======== 87 | 88 | Please send any feedback to oscm@edx.org. 89 | -------------------------------------------------------------------------------- /catalog-info.yaml: -------------------------------------------------------------------------------- 1 | # This file records information about this repo. Its use is described in OEP-55: 2 | # https://open-edx-proposals.readthedocs.io/en/latest/processes/oep-0055-proc-project-maintainers.html 3 | 4 | apiVersion: backstage.io/v1alpha1 5 | # (Required) Acceptable Values: Component, Resource, System 6 | # Use `Component` unless you know how backstage works and what the other kinds mean. 7 | kind: Component 8 | metadata: 9 | # (Required) Must be the name of the repo, without the owning organization. 10 | name: 'repo-tools' 11 | description: "Tools for repo maintenance, etc." 12 | links: 13 | 14 | annotations: 15 | # (Optional) We use the below annotation to indicate whether or not this 16 | # repository should be tagged for openedx releases and which branch is tagged. 17 | openedx.org/release: null 18 | spec: 19 | 20 | # (Required) This can be a group (`group:`) or a user (`user:`). 21 | # Don't forget the "user:" or "group:" prefix. Groups must be GitHub team 22 | # names in the openedx GitHub organization: https://github.com/orgs/openedx/teams 23 | # 24 | # If you need a new team created, create an issue with Axim engineering: 25 | # https://github.com/openedx/axim-engineering/issues/new/choose 26 | owner: group:axim-engineering 27 | 28 | # (Required) Acceptable Type Values: service, website, library 29 | type: 'library' 30 | 31 | # (Required) Acceptable Lifecycle Values: experimental, production, deprecated 32 | lifecycle: 'production' 33 | -------------------------------------------------------------------------------- /count-lines.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Count lines of code in Open edX. 4 | # This certainly over-counts JavaScript code, since we have lots of non-authored 5 | # JavaScript in our repos. 6 | # 7 | # Needs cloc (https://github.com/AlDanial/cloc) 8 | 9 | REPORTDIR=/tmp/cloc-reports 10 | mkdir -p $REPORTDIR 11 | rm -rf $REPORTDIR/* 12 | 13 | cat < $REPORTDIR/exclude-files.txt 14 | package-lock.json 15 | EOF 16 | 17 | cat < $REPORTDIR/more-langs.txt 18 | reStructured Text 19 | filter remove_matches xyzzy 20 | extension rst 21 | 3rd_gen_scale 1.0 22 | SVG Graphics 23 | filter remove_html_comments 24 | extension svg 25 | 3rd_gen_scale 1.0 26 | EOF 27 | 28 | find . -name .git -type d -prune | while read d; do 29 | dd=$(dirname "$d") 30 | if [[ $dd == ./src/third-party/* ]]; then 31 | # Ignore repos in the "third-party" tree. 32 | continue; 33 | fi 34 | echo "==== $dd ========================================================================================================" 35 | cd $dd 36 | git remote -v 37 | 38 | REPORTHEAD=$REPORTDIR/${dd##*/} 39 | cloc \ 40 | --report-file=$REPORTHEAD.txt \ 41 | --read-lang-def=$REPORTDIR/more-langs.txt \ 42 | --ignored=$REPORTHEAD.ignored \ 43 | --vcs=git \ 44 | --not-match-d='.*\.egg-info' \ 45 | --exclude-dir=node_modules,vendor,locale \ 46 | --exclude-ext=png,jpg,gif,ttf,eot,woff,mo,xcf \ 47 | --exclude-list-file=$REPORTDIR/exclude-files.txt \ 48 | . 49 | cd - 50 | done 51 | 52 | cloc \ 53 | --sum-reports \ 54 | --read-lang-def=$REPORTDIR/more-langs.txt \ 55 | $REPORTDIR/*.txt 56 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # This is a stub file to prevent any external calls to this file from breaking. 2 | # This should be removed when we are sure that nothing references this 3 | # original requirements location. 4 | -r requirements/development.txt 5 | -------------------------------------------------------------------------------- /edx_repo_tools/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | __version__ = '2.0.0' 3 | -------------------------------------------------------------------------------- /edx_repo_tools/add_common_constraint.py: -------------------------------------------------------------------------------- 1 | import re 2 | from os import path 3 | import urllib.request 4 | 5 | import click 6 | 7 | FILES = [ 8 | 'requirements/constraint.txt', 9 | 'requirements/constraints.txt', 10 | 'requirements/pins.txt', 11 | ] 12 | 13 | 14 | class CommonConstraint: 15 | """ 16 | CommonConstraint class is responsible for adding common constraint pin in 17 | the constraints file of the repository 18 | """ 19 | 20 | def __init__(self): 21 | self.comment = "# Common constraints for edx repos\n" 22 | self.constraint = "-c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints" \ 23 | ".txt\n " 24 | self.file = self._get_file_name() 25 | self.lines = [] 26 | 27 | def _get_file_name(self): 28 | for file in FILES: 29 | if path.exists(file): 30 | return file 31 | 32 | def _read_lines(self): 33 | with open(self.file, 'r') as file: 34 | self.lines = file.readlines() 35 | 36 | def _get_constraint_index(self): 37 | for i in range(len(self.lines)): 38 | if not self.lines[i].lstrip().startswith('#'): 39 | if self.lines[i] == '\n': 40 | return i + 1 41 | return 0 42 | 43 | def _get_constraints(self): 44 | target_url = "https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt" 45 | 46 | packages = [] 47 | for raw_line in urllib.request.urlopen(target_url): 48 | line = raw_line.decode('utf-8') 49 | package = re.search('^[A-Za-z0-9-_]+(<|==|>)', line) 50 | if package: 51 | packages.append(re.sub("(<=|==|>=|>|<)([0-9]*.*)\n", "", package.string.lower())) 52 | return packages 53 | 54 | def _remove_common_constraints(self): 55 | constraints = self._get_constraints() 56 | for index, line in enumerate(self.lines): 57 | package = re.search('^[A-Za-z0-9-_]+(<|==|>)', line) 58 | if package: 59 | if re.sub("(<=|==|>=|>|<)([0-9]*.*)\n", "", package.string.lower()) in constraints: 60 | del self.lines[index] 61 | if self.lines[index - 1].lstrip().startswith('#'): 62 | del self.lines[index - 1] 63 | if self.lines[index - 2] == '\n': 64 | del self.lines[index - 1] 65 | 66 | def _insert_constraint(self): 67 | index = self._get_constraint_index() 68 | 69 | self.lines.insert(index, self.comment) 70 | self.lines.insert(index + 1, self.constraint) 71 | self.lines.insert(index + 2, "\n") 72 | 73 | return self.lines 74 | 75 | def _write_file(self): 76 | with open(self.file, 'w') as file: 77 | file.writelines(self.lines) 78 | 79 | def update_file(self): 80 | if self.file is None: 81 | raise click.ClickException('No constraint file exists!') 82 | 83 | self._read_lines() 84 | self._insert_constraint() 85 | self._remove_common_constraints() 86 | self._write_file() 87 | 88 | click.echo('Added common constraint successfully!') 89 | 90 | 91 | @click.command() 92 | def main(): 93 | constraint = CommonConstraint() 94 | constraint.update_file() 95 | 96 | 97 | if __name__ == "__main__": 98 | main() 99 | -------------------------------------------------------------------------------- /edx_repo_tools/audit_gh_users/README.rst: -------------------------------------------------------------------------------- 1 | Audit GitHub Users 2 | ################## 3 | 4 | This script will compare the list of users in a github org against a list of 5 | users in a CSV and tell you which github users are not listed in the CSV. 6 | 7 | CSV Location and Format 8 | *********************** 9 | 10 | The CSV is expected to be in a GitHub repo and it should contain a column name 11 | "GitHub Username" that contains a GitHub username. 12 | 13 | Usage 14 | ***** 15 | 16 | You will need a GH pesonal access token with the following scopes: 17 | 18 | * read:org 19 | * repo 20 | 21 | First, set up repo-tools as described in `the root README <../../README.rst>`_. 22 | There are a few ways to do this; one way is:: 23 | 24 | export GITHUB_TOKEN="$(pass github-token)" # assumes you have passwordstore.org 25 | 26 | python3 -m venv venv 27 | . venv/bin/activate 28 | pip install -e .[audit_gh_users] 29 | 30 | Then, run the script:: 31 | 32 | audit_users 33 | 34 | Contributing 35 | ************ 36 | 37 | * Make changes on your branch. 38 | 39 | * CI will run tests for you, but not linting, so ensure your changes don't break pylint: ``pylint edx_repo_tools/audit_users``. 40 | 41 | * Ping `#ask-axim`__ on Slack for review. 42 | 43 | __ https://openedx.slack.com/archives/C0497NQCLBT 44 | 45 | * Once approved, apply and merge (non-Axim engineers: ask your Axim reviewer to do this part for you). 46 | -------------------------------------------------------------------------------- /edx_repo_tools/audit_gh_users/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/audit_gh_users/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/audit_gh_users/audit_users.py: -------------------------------------------------------------------------------- 1 | """ 2 | Audit github users in an org. Comparing the list of users to those in a CSV. 3 | 4 | See the README for more info. 5 | """ 6 | 7 | import base64 8 | import csv 9 | import io 10 | from itertools import chain 11 | import click 12 | from ghapi.all import GhApi, paged 13 | 14 | 15 | @click.command() 16 | @click.option( 17 | "--github-token", 18 | "_github_token", 19 | envvar="GITHUB_TOKEN", 20 | required=True, 21 | help="A github personal access token.", 22 | ) 23 | @click.option( 24 | "--org", 25 | "org", 26 | default="openedx", 27 | help="The github org that you wish check.", 28 | ) 29 | @click.option( 30 | "--csv-repo", 31 | "csv_repo", 32 | default="openedx-webhooks-data", 33 | help="The github repo that contains the CSV we should compare against.", 34 | ) 35 | @click.option( 36 | "--csv-path", 37 | "csv_path", 38 | default="salesforce-export.csv", 39 | help="The path in the repo to the csv file. The file should contain a 'GitHub Username' column.", 40 | ) 41 | def main(org, _github_token, csv_repo, csv_path): 42 | """ 43 | Entry point for command-line invocation. 44 | """ 45 | api = GhApi() 46 | 47 | # Get all github users in the org. 48 | current_org_users = [ 49 | member.login 50 | for member in chain.from_iterable( 51 | paged(api.orgs.list_members, org, per_page=100) 52 | ) 53 | ] 54 | 55 | # Get all github usernames from openedx-webhooks-data/salesforce-export.csv 56 | csv_file = io.StringIO( 57 | base64.decodebytes( 58 | api.repos.get_content(org, csv_repo, csv_path).content.encode() 59 | ).decode("utf-8") 60 | ) 61 | reader = csv.DictReader(csv_file) 62 | csv_github_users = [row["GitHub Username"] for row in reader] 63 | 64 | # Find all the people that are in the org but not in sales force. 65 | extra_org_users = set(current_org_users) - set(csv_github_users) 66 | 67 | # List the users we need to investigate 68 | print("\n".join(sorted(extra_org_users))) 69 | 70 | 71 | if __name__ == "__main__": 72 | main() # pylint: disable=no-value-for-parameter 73 | -------------------------------------------------------------------------------- /edx_repo_tools/audit_gh_users/extra.in: -------------------------------------------------------------------------------- 1 | -c ../../requirements/constraints.txt 2 | 3 | click 4 | ghapi 5 | -------------------------------------------------------------------------------- /edx_repo_tools/audit_gh_users/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | click==8.1.7 8 | # via -r edx_repo_tools/audit_gh_users/extra.in 9 | fastcore==1.5.54 10 | # via ghapi 11 | ghapi==1.0.5 12 | # via -r edx_repo_tools/audit_gh_users/extra.in 13 | packaging==24.1 14 | # via 15 | # fastcore 16 | # ghapi 17 | 18 | # The following packages are considered to be unsafe in a requirements file: 19 | # pip 20 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/codemods/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django2/README: -------------------------------------------------------------------------------- 1 | Setup 2 | ===== 3 | 4 | ``` 5 | pip install -r requirements.txt 6 | ``` 7 | 8 | Usage 9 | ===== 10 | 11 | 12 | 1. Run the code mod on what you want. 13 | ``` 14 | # bowler run 15 | # For example 16 | bowler run foreignkey_on_delete_mod.py ~/src/edx-platform 17 | 18 | # to update the User.is_authenticated and User.is_anonymous as properties 19 | bowler run auth_anonymous_update.py ~/src/edx-platform 20 | ``` 21 | 22 | ``` 23 | # PROMPT HELP 24 | "y": "apply this hunk", 25 | "n": "skip this hunk", 26 | "a": "apply this hunk and all remaining hunks for this file", 27 | "d": "skip this hunk and all remaining hunks for this file", 28 | "q": "quit; do not apply this hunk or any remaining hunks", 29 | "?": "show help", 30 | ``` 31 | 32 | 2. Correct any formatting to match your formatting rules. 33 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/codemods/django2/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django2/auth_anonymous_update.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from typing import Optional 4 | from bowler import Query, LN, Capture, Filename, TOKEN, SYMBOL 5 | from fissix.pytree import Node, Leaf 6 | 7 | 8 | # usage of the User.is_authenticated() and User.is_anonymous() methods is deprecated. Remove the last parenthesis. 9 | 10 | 11 | def parentheses(node: LN, capture: Capture, filename: Filename) -> Optional[LN]: 12 | 13 | # this is how the last node look a like, Node(trailer, [Leaf(7, '('), Leaf(8, ')')]) 14 | nodes = capture.get("function_call") 15 | last_node = nodes.children[-1] # pick the last node 16 | 17 | # make sure its a leaf and contains parenthesis before deleting the last node 18 | if isinstance(last_node.children[0], Leaf) and isinstance(last_node.children[1], Leaf): 19 | if last_node.children[0].value == "(" and last_node.children[1].value == ")": 20 | del nodes.children[-1] 21 | 22 | return nodes 23 | 24 | 25 | ( 26 | Query(sys.argv[1]) 27 | .select_method("is_authenticated") 28 | .modify(parentheses) 29 | .idiff() 30 | ), 31 | ( 32 | Query(sys.argv[1]) 33 | .select_method("is_anonymous") 34 | .modify(parentheses) 35 | .idiff() 36 | ) 37 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django2/foreignkey_on_delete_mod.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from typing import Optional 4 | from bowler import Query, LN, Capture, Filename, TOKEN, SYMBOL 5 | from fissix.pytree import Node, Leaf 6 | from lib2to3.fixer_util import Name, KeywordArg, Dot, Comma, Newline, ArgList 7 | 8 | 9 | def filter_print_string(node, capture, filename) -> bool: 10 | function_name = capture.get("function_name") 11 | from pprint import pprint 12 | 13 | pprint(node) 14 | pprint(capture) 15 | return True 16 | 17 | 18 | def filter_has_no_on_delete(node: LN, capture: Capture, filename: Filename) -> bool: 19 | arguments = capture.get("function_arguments")[0].children 20 | for arg in arguments: 21 | if arg.type == SYMBOL.argument and arg.children[0].type == TOKEN.NAME: 22 | arg_name = arg.children[0].value 23 | 24 | if arg_name == "on_delete": 25 | return False # this call already has an on_delete argument. 26 | return True 27 | 28 | 29 | def add_on_delete_cascade( 30 | node: LN, capture: Capture, filename: Filename 31 | ) -> Optional[LN]: 32 | arguments = capture.get("function_arguments")[0] 33 | new_on_delete_node = KeywordArg(Name(" on_delete"), Name("models.CASCADE")) 34 | 35 | if isinstance(arguments, Leaf): # Node is a leaf and so we need to replace it with a list of things we want instead. 36 | arguments.replace([arguments.clone(),Comma(),new_on_delete_node]) 37 | else: 38 | arguments.append_child(Comma()) 39 | arguments.append_child(new_on_delete_node) 40 | 41 | return node 42 | 43 | 44 | ( 45 | Query(sys.argv[1]) 46 | .select_method("ForeignKey") 47 | .is_call() 48 | .filter(filter_has_no_on_delete) 49 | .modify(add_on_delete_cascade) 50 | .idiff() 51 | ), 52 | ( 53 | Query(sys.argv[1]) 54 | .select_method("OneToOneField") 55 | .is_call() 56 | .filter(filter_has_no_on_delete) 57 | .modify(add_on_delete_cascade) 58 | .idiff() 59 | ) 60 | 61 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django2/widget_add_renderer_mod.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from lib2to3.fixer_util import Name, KeywordArg, Comma 3 | from typing import Optional 4 | 5 | from bowler import Query, LN, Capture, Filename, TOKEN 6 | from fissix.pytree import Leaf 7 | 8 | 9 | def render_has_no_renderer(node: LN, capture: Capture, filename: Filename) -> bool: 10 | 11 | if 'function_def' not in capture: 12 | return False # This is not a function definition, no need to add argument 13 | 14 | arguments = capture.get("function_arguments")[0].children 15 | 16 | known_arguments = [arg.value for arg in arguments if arg.value in ['name', 'value', 'attrs']] 17 | if len(known_arguments) != 3: 18 | return False # This render doesn't belong to widget 19 | 20 | for arg in arguments: 21 | if arg.type == TOKEN.NAME and arg.value == "renderer": 22 | return False # This definition already has a renderer argument. 23 | 24 | return True 25 | 26 | 27 | def add_renderer(node: LN, capture: Capture, filename: Filename) -> Optional[LN]: 28 | arguments = capture.get("function_arguments")[0] 29 | new_renderer_node = KeywordArg(Name(" renderer"), Name("None")) 30 | 31 | if isinstance(arguments, Leaf): # Node is a leaf and so we need to replace it with a list of things we want instead 32 | arguments.replace([arguments.clone(), Comma(), new_renderer_node]) 33 | else: 34 | arguments.append_child(Comma()) 35 | arguments.append_child(new_renderer_node) 36 | 37 | return node 38 | 39 | 40 | ( 41 | Query(sys.argv[1]) 42 | .select_method("render") 43 | .filter(render_has_no_renderer) 44 | .modify(add_renderer) 45 | .idiff() 46 | ) 47 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/__init__.py: -------------------------------------------------------------------------------- 1 | from .github_actions_modernizer import GithubCIModernizer 2 | from .github_actions_modernizer_django import GithubCIDjangoModernizer 3 | from .setup_file_modernizer import SetupFileModernizer 4 | from .tox_modernizer import ConfigReader, ToxModernizer 5 | from .travis_modernizer import DJANGO_PATTERN, TravisModernizer 6 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/add_new_django32_settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Codemod to add new settings in the repo settings file. 3 | """ 4 | import json 5 | import re 6 | import os 7 | import click 8 | from copy import deepcopy 9 | 10 | 11 | class SettingsModernizer: 12 | """ 13 | Django32 modernizer for updating settings files. 14 | """ 15 | DEFAULT_ALGORITHM_KEY = "DEFAULT_HASHING_ALGORITHM" 16 | NEW_HASHING_ALGORITHM = "sha1" 17 | DEFAULT_FIELD_KEY = "DEFAULT_AUTO_FIELD" 18 | NEW_AUTO_FIELD = "django.db.models.AutoField" 19 | NEW_PROCESSOR = "django.template.context_processors.request" 20 | 21 | def __init__(self, setting_path, is_service): 22 | self.settings_path = setting_path 23 | self.is_service = is_service 24 | 25 | def _apply_regex_operations(self, matching_pattern, new_pattern, context_processors=False): 26 | file_data = open(self.settings_path).read() 27 | if context_processors: 28 | if self.NEW_PROCESSOR not in file_data and re.search("context_processors", file_data): 29 | file_data = re.sub(pattern=matching_pattern, repl=new_pattern, string=file_data) 30 | else: 31 | file_data = re.sub(pattern=matching_pattern, repl="", string=file_data) 32 | file_data = file_data+new_pattern 33 | return deepcopy(file_data) 34 | 35 | def _update_settings_file(self, matching_pattern, new_pattern, context_processors=False): 36 | file_data = self._apply_regex_operations(matching_pattern, new_pattern, context_processors) 37 | with open(self.settings_path, 'w') as setting_file: 38 | setting_file.write(file_data) 39 | 40 | def update_settings(self): 41 | if self.is_service: 42 | self.update_hash_algorithm() 43 | self.update_auto_field() 44 | self.update_context_processors() 45 | 46 | def update_hash_algorithm(self): 47 | """ 48 | Update the HASHING_ALGORITHM in the settings file. 49 | """ 50 | matching_algorithm = f"{self.DEFAULT_ALGORITHM_KEY}\s=\s'[a-zA-Z0-9]*'\\n" 51 | new_algorithm = f"{self.DEFAULT_ALGORITHM_KEY} = '{self.NEW_HASHING_ALGORITHM}'\n" 52 | self._update_settings_file(matching_algorithm, new_algorithm) 53 | 54 | def update_auto_field(self): 55 | """ 56 | Update the AUTO_FIELD in the settings file. 57 | """ 58 | matching_field = f"{self.DEFAULT_FIELD_KEY}\s=\s'([a-zA-Z](.[a-zA-Z])?)*'\\n" 59 | new_field = f"{self.DEFAULT_FIELD_KEY} = '{self.NEW_AUTO_FIELD}'\n" 60 | self._update_settings_file(matching_field, new_field) 61 | 62 | def update_context_processors(self): 63 | """ 64 | Update the CONTEXT_PROCESSORS in the settings file. 65 | """ 66 | matching_pattern = fr"'context_processors': \(([^)]*)\)" 67 | new_pattern = fr"'context_processors': (\1" + f"\t'{self.NEW_PROCESSOR}',\n\t\t\t\t)" 68 | self._update_settings_file(matching_pattern, new_pattern, context_processors=True) 69 | 70 | 71 | @click.command() 72 | @click.option('--setting_path', default='settings/base.py', help="Path to the settings file to update") 73 | @click.option('--is_service', default=False, help="Flag for service/library, pass True for service") 74 | def main(setting_path, is_service): 75 | setting_modernizer = SettingsModernizer(setting_path, is_service) 76 | setting_modernizer.update_settings() 77 | 78 | 79 | if __name__ == '__main__': 80 | main() 81 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/github_actions_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modernizer for Github Actions CI 3 | """ 4 | from copy import deepcopy 5 | 6 | import click 7 | 8 | from edx_repo_tools.utils import YamlLoader 9 | 10 | TO_BE_REMOVED_PYTHON = ['3.5', '3.6'] 11 | ALLOWED_PYTHON_VERSIONS = ['3.7', '3.8', 'py38'] 12 | 13 | 14 | class GithubCIModernizer(YamlLoader): 15 | def __init__(self, file_path): 16 | super().__init__(file_path) 17 | 18 | def _update_matrix(self): 19 | 20 | python_versions = list() 21 | matrix_elements = dict() 22 | section_key = None 23 | 24 | for key in ['build', 'tests', 'run_tests', 'run_quality', 'pytest']: 25 | if key in self.elements['jobs']: 26 | section_key = key 27 | matrix_elements = deepcopy(self.elements['jobs'][section_key]['strategy']['matrix']) 28 | 29 | for key, value in matrix_elements.items(): 30 | if key == 'python-version': 31 | python_versions.extend(filter( 32 | lambda version: version in ALLOWED_PYTHON_VERSIONS, value)) 33 | elif key in ['include', 'exclude']: 34 | without_python35 = list() 35 | for item in value: 36 | if item['python-version'] not in TO_BE_REMOVED_PYTHON: 37 | without_python35.append(item) 38 | 39 | if len(without_python35): 40 | self.elements['jobs'][section_key]['strategy']['matrix'][key] = without_python35 41 | else: 42 | del self.elements['jobs'][section_key]['strategy']['matrix'][key] 43 | if not section_key: 44 | return 45 | self.elements['jobs'][section_key]['strategy']['matrix']['python-version'] = python_versions 46 | 47 | def _update_python_versions(self): 48 | self._update_matrix() 49 | 50 | def modernize(self): 51 | self._update_python_versions() 52 | self.update_yml_file() 53 | 54 | 55 | @click.command() 56 | @click.option( 57 | '--path', default='.github/workflows/ci.yml', 58 | help="Path to default CI workflow file") 59 | def main(path): 60 | modernizer = GithubCIModernizer(path) 61 | modernizer.modernize() 62 | 63 | 64 | if __name__ == '__main__': 65 | main() 66 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/github_actions_modernizer_django.py: -------------------------------------------------------------------------------- 1 | """ 2 | Django Matrix Modernizer for Github Actions CI 3 | """ 4 | import re 5 | from copy import deepcopy 6 | 7 | import click 8 | 9 | from edx_repo_tools.utils import YamlLoader 10 | 11 | DJANGO_ENV_PATTERN = r"django[0-3][0-2]?" 12 | ALLOWED_DJANGO_ENVS = ['django32', 'django40'] 13 | ALLOWED_DJANGO_VERSIONS = ['3.2', '4.0'] 14 | 15 | 16 | class GithubCIDjangoModernizer(YamlLoader): 17 | def __init__(self, file_path): 18 | super().__init__(file_path) 19 | 20 | def _update_matrix_items(self, job_name, matrix_item_name, matrix_item): 21 | MATRIX_INCLUDE_EXCLUDE_SECTION = ['include', 'exclude'] 22 | if not isinstance(matrix_item, list): 23 | return 24 | if not matrix_item_name in MATRIX_INCLUDE_EXCLUDE_SECTION: 25 | has_django_env = any( 26 | re.match(DJANGO_ENV_PATTERN, item) for item in matrix_item 27 | ) 28 | if not has_django_env: 29 | return 30 | non_django_matrix_items = [ 31 | item for item in matrix_item if not re.match(DJANGO_ENV_PATTERN, item)] 32 | updated_matrix_items = non_django_matrix_items + ALLOWED_DJANGO_ENVS 33 | self.elements['jobs'][job_name]['strategy']['matrix'][matrix_item_name] = updated_matrix_items 34 | else: 35 | self._update_matrix_include_exclude_sections( 36 | job_name, matrix_item_name, matrix_item) 37 | 38 | def _update_matrix_include_exclude_sections(self, job_name, matrix_item_name, matrix_item): 39 | if not matrix_item_name in ['include', 'exclude']: 40 | return 41 | section_items = deepcopy(matrix_item) 42 | for item in section_items: 43 | item_index = self.elements['jobs'][job_name]['strategy']['matrix'][matrix_item_name].index(item) 44 | if ('django-version' in item) and (not item['django-version'] in ALLOWED_DJANGO_VERSIONS): 45 | del self.elements['jobs'][job_name]['strategy']['matrix'][matrix_item_name][item_index] 46 | elif (('toxenv' in item) and (not item['toxenv'] in ALLOWED_DJANGO_ENVS) and 47 | (item['toxenv'].find('django') != -1)): 48 | del self.elements['jobs'][job_name]['strategy']['matrix'][matrix_item_name][item_index] 49 | 50 | def _update_django_matrix_items(self, job_name, job): 51 | matrices = job.get('strategy').get('matrix').items() 52 | for matrix_item_key, matrix_item in matrices: 53 | self._update_matrix_items(job_name, matrix_item_key, matrix_item) 54 | 55 | def _update_codecov_check(self, job_name, step): 56 | step_elements = deepcopy(step) 57 | if not 'uses' in step_elements: 58 | return 59 | if not (step_elements['uses']) in ['codecov/codecov-action@v1', 'codecov/codecov-action@v2']: 60 | return 61 | if not 'if' in step_elements: 62 | return 63 | step_index = self.elements['jobs'][job_name]['steps'].index(step) 64 | django_32_string = step_elements['if'].replace('django22', 'django32') 65 | self.elements['jobs'][job_name]['steps'][step_index]['if'] = django_32_string 66 | 67 | def _update_job_steps(self, job_name, job): 68 | steps = job.get('steps') 69 | if not steps: 70 | return 71 | for step in steps: 72 | self._update_codecov_check(job_name, step) 73 | 74 | def _update_job(self): 75 | for job_name, job in self.elements.get('jobs').items(): 76 | self._update_job_steps(job_name, job) 77 | 78 | def _update_job_matrices(self): 79 | for job_name, job in self.elements.get('jobs').items(): 80 | self._update_django_matrix_items(job_name, job) 81 | 82 | def modernize(self): 83 | self._update_job_matrices() 84 | self._update_job() 85 | self.update_yml_file() 86 | 87 | 88 | @click.command() 89 | @click.option( 90 | '--path', default='.github/workflows/ci.yml', 91 | help="Path to default CI workflow file") 92 | def main(path): 93 | modernizer = GithubCIDjangoModernizer(path) 94 | modernizer.modernize() 95 | 96 | 97 | if __name__ == '__main__': 98 | main() 99 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/remove_python2_unicode_compatible.py: -------------------------------------------------------------------------------- 1 | """ 2 | A script to remove the python2_unicode_compatible imports and headers 3 | """ 4 | import sys 5 | import click 6 | from bowler import Query 7 | 8 | 9 | def remove_node(node, _, __): 10 | """ 11 | Remove the node containing the expression python_2_unicode_compatible 12 | """ 13 | node.remove() 14 | 15 | 16 | def run_removal_query(path): 17 | """ 18 | Run the bowler query on the input files for refactoring. 19 | """ 20 | ( 21 | Query(path) 22 | .select("decorator<'@' name='python_2_unicode_compatible' any>") 23 | .modify(remove_node) 24 | .select("import_from<'from' module_name=any 'import' 'python_2_unicode_compatible'>") 25 | .modify(remove_node) 26 | .write() 27 | ) 28 | 29 | 30 | @click.command() 31 | @click.option('--path', help='use syntax: --path {path_to_input_file/directory}') 32 | def main(path): 33 | """ 34 | Function to handle input path for refactoring. 35 | HOW_TO_USE: when running as a repo tool, use following syntax to run the command: 36 | remove_python2_unicode_compatible --path {path_to_input_file/directory} 37 | """ 38 | run_removal_query(path) 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/replace_render_to_response.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from lib2to3.fixer_util import Name, Comma 3 | from typing import Optional 4 | 5 | from bowler import Query, LN, Capture, Filename 6 | 7 | DJANGO_SHORTCUT_FILES = [] 8 | 9 | 10 | def filter_render_function(node: LN, capture: Capture, filename: Filename) -> bool: 11 | import_statement = capture.get("function_import") 12 | function_call = capture.get("function_call") 13 | if import_statement: 14 | if "django.shortcuts" in str(import_statement.children[1]): 15 | DJANGO_SHORTCUT_FILES.append(filename) 16 | return True 17 | elif function_call: 18 | # Only rename and modify those render_to_response which are imported from django.shortcuts 19 | return filename in DJANGO_SHORTCUT_FILES 20 | 21 | return False 22 | 23 | 24 | def add_request_param(node: LN, capture: Capture, filename: Filename) -> Optional[LN]: 25 | arguments = capture.get("function_arguments") 26 | if arguments: 27 | arguments = arguments[0] 28 | # Insert request parameter at start 29 | arguments.insert_child(0, Name("request")) 30 | arguments.insert_child(1, Comma()) 31 | arguments.children[2].prefix = " " 32 | 33 | return node 34 | 35 | 36 | def main(): 37 | ( 38 | Query(sys.argv[1]) 39 | .select_function("render_to_response") 40 | .filter(filter_render_function) 41 | .rename('render') 42 | .modify(add_request_param) 43 | .write() 44 | ) 45 | 46 | 47 | if __name__ == '__main__': 48 | main() 49 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/replace_static.py: -------------------------------------------------------------------------------- 1 | import click 2 | import subprocess 3 | 4 | 5 | @click.command() 6 | @click.option('--path', help='use syntax: --path {path_to_input_file/directory}') 7 | def main(path): 8 | """ 9 | Function to handle input path for refactoring. 10 | HOW_TO_USE: when running as a repo tool, use following syntax to run the command: 11 | replace_staticfiles --path {path_to_input_file/directory} 12 | """ 13 | subprocess.run(['./edx_repo_tools/codemods/django3/script_to_replace_static.sh', path]) 14 | 15 | 16 | if __name__ == '__main__': 17 | main() 18 | 19 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/replace_unicode_with_str.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | import click 4 | from bowler import Query 5 | 6 | 7 | def replace_unicode(path): 8 | """ 9 | Run the bowler query on the input files for refactoring. 10 | """ 11 | ( 12 | Query(path) 13 | .select_function("__unicode__") 14 | .rename('__str__') 15 | .idiff() 16 | ), 17 | ( 18 | Query(path) 19 | .select_method("__unicode__") 20 | .is_call() 21 | .rename('__str__') 22 | .idiff() 23 | ) 24 | 25 | 26 | @click.command() 27 | @click.option('--path', help='use syntax: --path {path_to_input_file/directory}') 28 | def main(path): 29 | """ 30 | Function to handle input path for refactoring. 31 | HOW_TO_USE: when running as a repo tool, use following syntax to run the command: 32 | replace_unicode_with_str --path {path_to_input_file/directory} 33 | """ 34 | replace_unicode(path) 35 | 36 | 37 | if __name__ == '__main__': 38 | main() 39 | 40 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/script_to_replace_static.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Pass the Directory you want to replace the occurences in, as first argument. 4 | # e.g bash script_to_replace_static.sh Workspace/credentials 5 | DIR="$1" 6 | # Type of files to repalce in 7 | SEARCH="*.html" 8 | 9 | for f in $(find $DIR -name "$SEARCH" -type f); do 10 | echo "Replacing in $f" 11 | # Creating backup file because sed creates empty file when output is in same file as input. 12 | cp $f $f.bak 13 | sed 's/{% load staticfiles %}/{% load static %}/g' $f.bak > $f 14 | cp $f $f.bak 15 | sed 's/{% load admin_static %}/{% load static %}/g' $f.bak > $f 16 | # Deleting bakup file 17 | rm -f $f.bak 18 | done 19 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/setup_file_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Codemod to modernize setup file. 3 | """ 4 | import json 5 | import os 6 | import re 7 | from copy import deepcopy 8 | 9 | import click 10 | 11 | TROVE_CLASSIFIERS_INDENT_COUNT = 8 12 | 13 | class SetupFileModernizer: 14 | """ 15 | Django32 modernizer for updating setup files. 16 | """ 17 | old_classifiers_regex = r"(?!\s\s+'Framework :: Django :: 3.2')(\s\s+'Framework\s+::\s+Django\s+::\s+[0-3]+\.[0-2]+',)" 18 | most_recent_classifier_regex = r"\s\s'Framework :: Django :: 3.2',\n" 19 | # Keep the new classifiers in descending order i.e Framework :: Django :: 4.1 then Framework :: Django :: 4.0 so they are sorted in the file 20 | new_trove_classifiers = ["'Framework :: Django :: 4.0',\n"] 21 | 22 | def __init__(self, path=None) -> None: 23 | self.setup_file_path = path 24 | 25 | def _update_classifiers(self) -> None: 26 | file_data = open(self.setup_file_path).read() 27 | file_data = self._remove_outdated_classifiers(file_data) 28 | file_data = self._add_new_classifiers(file_data) 29 | self._write_data_to_file(file_data) 30 | 31 | def _remove_outdated_classifiers(self, file_data) -> str: 32 | modified_file_data = re.sub(self.old_classifiers_regex, '', file_data) 33 | return modified_file_data 34 | 35 | def _add_new_classifiers(self, file_data) -> str: 36 | res = re.search(self.most_recent_classifier_regex, file_data) 37 | end_index_of_most_recent_classifier = res.end() 38 | modified_file_data = file_data 39 | for classifier in self.new_trove_classifiers: 40 | modified_file_data = (modified_file_data[:end_index_of_most_recent_classifier] + 41 | classifier.rjust(len(classifier)+TROVE_CLASSIFIERS_INDENT_COUNT) + 42 | modified_file_data[end_index_of_most_recent_classifier:]) 43 | return modified_file_data 44 | 45 | def _write_data_to_file(self, file_data) -> None: 46 | with open(self.setup_file_path, 'w') as setup_file: 47 | setup_file.write(file_data) 48 | 49 | def update_setup_file(self) -> None: 50 | self._update_classifiers() 51 | 52 | 53 | @click.command() 54 | @click.option( 55 | '--path', default='setup.py', 56 | help="Path to setup.py File") 57 | def main(path): 58 | setup_file_modernizer = SetupFileModernizer(path) 59 | setup_file_modernizer.update_setup_file() 60 | 61 | 62 | if __name__ == '__main__': 63 | main() 64 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/tox_modernizer.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | from configparser import ConfigParser, NoSectionError 4 | 5 | import click 6 | 7 | TOX_SECTION = "tox" 8 | ENVLIST = "envlist" 9 | TEST_ENV_SECTION = "testenv" 10 | TEST_ENV_DEPS = "deps" 11 | PYTHON_SUBSTITUTE = "py38" 12 | DJANGO_SUBSTITUTE = "django{32,40}" 13 | 14 | DJANGO_32_DEPENDENCY = "django32: Django>=3.2,<4.0\n" 15 | DJANGO_40_DEPENDENCY = "django40: Django>=4.0,<4.1\n" 16 | NEW_DJANGO_DEPENDENCIES = DJANGO_32_DEPENDENCY + DJANGO_40_DEPENDENCY 17 | 18 | SECTIONS = [TOX_SECTION, TEST_ENV_SECTION] 19 | 20 | PYTHON_PATTERN = "(py{.*?}-?|py[0-9]+,|py[0-9]+-)" 21 | 22 | DJANGO_PATTERN = "(django[0-9]+,|django[0-9]+\n|django{.*}\n|django{.*?}|django[0-9]+-|django{.*}-)" 23 | 24 | DJANGO_DEPENDENCY_PATTERN = "([^\n]*django[0-9]+:.*\n?)" 25 | 26 | 27 | class ConfigReader: 28 | def __init__(self, file_path=None, config_dict=None): 29 | self.config_dict = config_dict 30 | self.file_path = file_path 31 | 32 | def get_modernizer(self): 33 | config_parser = ConfigParser() 34 | if self.config_dict is not None: 35 | config_parser.read_dict(self.config_dict) 36 | else: 37 | config_parser.read(self.file_path) 38 | return ToxModernizer(config_parser, self.file_path) 39 | 40 | 41 | class ToxModernizer: 42 | def __init__(self, config_parser, file_path): 43 | self.file_path = file_path 44 | self.config_parser = config_parser 45 | self._validate_tox_config_sections() 46 | 47 | def _validate_tox_config_sections(self): 48 | if not self.config_parser.sections(): 49 | raise NoSectionError("Bad Config. No sections found.") 50 | 51 | if all(section not in SECTIONS for section in self.config_parser.sections()): 52 | raise NoSectionError("File doesn't contain required sections") 53 | 54 | def _update_env_list(self): 55 | tox_section = self.config_parser[TOX_SECTION] 56 | env_list = tox_section[ENVLIST] 57 | 58 | env_list = ToxModernizer._replace_runners(PYTHON_PATTERN, PYTHON_SUBSTITUTE, env_list) 59 | env_list = ToxModernizer._replace_runners(DJANGO_PATTERN, DJANGO_SUBSTITUTE, env_list) 60 | self.config_parser[TOX_SECTION][ENVLIST] = env_list 61 | 62 | @staticmethod 63 | def _replace_runners(pattern, substitute, env_list): 64 | matches = re.findall(pattern, env_list) 65 | if not matches: 66 | return env_list 67 | substitute = ToxModernizer._get_runner_substitute(matches, substitute) 68 | return ToxModernizer._replace_matches(pattern, substitute, env_list, matches) 69 | 70 | @staticmethod 71 | def _replace_matches(pattern, substitute, target, matches): 72 | if not matches: 73 | return target 74 | occurrences_to_replace = len(matches) - 1 75 | if occurrences_to_replace > 0: 76 | target = re.sub(pattern, '', target, occurrences_to_replace) 77 | target = re.sub(pattern, substitute, target) 78 | return target 79 | 80 | @staticmethod 81 | def _get_runner_substitute(matches, substitute): 82 | last_match = matches[-1] 83 | has_other_runners = last_match.endswith('-') 84 | return substitute + "-" if has_other_runners else substitute 85 | 86 | def _replace_django_versions(self): 87 | test_environment = self.config_parser[TEST_ENV_SECTION] 88 | dependencies = test_environment[TEST_ENV_DEPS] 89 | matches = re.findall(DJANGO_DEPENDENCY_PATTERN, dependencies) 90 | dependencies = self._replace_matches(DJANGO_DEPENDENCY_PATTERN, NEW_DJANGO_DEPENDENCIES, dependencies, matches) 91 | 92 | self.config_parser[TEST_ENV_SECTION][TEST_ENV_DEPS] = dependencies 93 | 94 | def _update_config_file(self): 95 | # ConfigParser insists on using tabs for output. We want spaces. 96 | with io.StringIO() as configw: 97 | self.config_parser.write(configw) 98 | new_ini = configw.getvalue() 99 | new_ini = new_ini.replace("\t", " ") 100 | with open(self.file_path, 'w') as configfile: 101 | configfile.write(new_ini) 102 | 103 | def modernize(self): 104 | self._update_env_list() 105 | self._replace_django_versions() 106 | self._update_config_file() 107 | 108 | 109 | @click.command() 110 | @click.option( 111 | '--path', default='tox.ini', 112 | help="Path to target tox config file") 113 | def main(path): 114 | modernizer = ConfigReader(path).get_modernizer() 115 | modernizer.modernize() 116 | 117 | 118 | if __name__ == '__main__': 119 | main() 120 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django3/travis_modernizer.py: -------------------------------------------------------------------------------- 1 | import re 2 | from copy import deepcopy 3 | 4 | import click 5 | 6 | from edx_repo_tools.utils import YamlLoader 7 | 8 | ALLOWED_PYTHON_VERSIONS = '3.8' 9 | 10 | DEPRECATED_DJANGO_VERSIONS_PATTERN = r"django111|django20|django21" 11 | ALLOWED_DJANGO_VERSIONS_PATTERN = r"django22|django30|django31|django32" 12 | 13 | DJANGO_PATTERN = r"django[0-3][0-2][0-2]?" 14 | 15 | ALLOWED_DJANGO_VERSIONS = ['django22', 'django30', 'django31', 'django32'] 16 | 17 | 18 | class TravisModernizer(YamlLoader): 19 | def __init__(self, file_path): 20 | super().__init__(file_path) 21 | 22 | def _update_python_dict(self): 23 | python_versions = self.elements.get('python', None) 24 | if python_versions is None: 25 | return 26 | self.elements['python'] = [ALLOWED_PYTHON_VERSIONS] 27 | 28 | def _update_matrix_python_versions(self): 29 | matrix_elements = self.elements.get("matrix", {}).get("include") 30 | if matrix_elements is None: 31 | return 32 | has_python_matrix = any(matrix_item.get("python") is not None for matrix_item in matrix_elements) 33 | if not has_python_matrix: 34 | return 35 | non_python_matrix_elements = [] 36 | python_matrix_items = [] 37 | for matrix_element in matrix_elements: 38 | if 'python' not in matrix_element.keys(): 39 | non_python_matrix_elements.append(matrix_element) 40 | continue 41 | python_matrix_item = deepcopy(matrix_element) 42 | python_matrix_item['python'] = ALLOWED_PYTHON_VERSIONS 43 | python_matrix_items.append(python_matrix_item) 44 | break 45 | self.elements["matrix"]["include"] = non_python_matrix_elements + python_matrix_items 46 | 47 | @staticmethod 48 | def _get_updated_django_matrix_items(django_matrix_item): 49 | updated_django_matrix_items = [] 50 | for django_version in ALLOWED_DJANGO_VERSIONS: 51 | django_matrix_item_clone = deepcopy(django_matrix_item) 52 | django_matrix_item_clone["env"] = re.sub(DJANGO_PATTERN, django_version, django_matrix_item_clone["env"]) 53 | updated_django_matrix_items.append(django_matrix_item_clone) 54 | return updated_django_matrix_items 55 | 56 | @staticmethod 57 | def _get_updated_django_envs(django_env_item): 58 | updated_django_env_items = [] 59 | for django_version in ALLOWED_DJANGO_VERSIONS: 60 | django_env_item = re.sub(DJANGO_PATTERN, django_version, django_env_item) 61 | updated_django_env_items.append(django_env_item) 62 | return updated_django_env_items 63 | 64 | def _update_django_envs(self): 65 | env_elements = self.elements.get("env") 66 | if env_elements is None: 67 | return 68 | has_django_env = any(re.search(DJANGO_PATTERN, env_item) for env_item in env_elements) 69 | if not has_django_env: 70 | return 71 | django_env_item = [django_env_item for django_env_item in env_elements 72 | if re.search(DJANGO_PATTERN, django_env_item)][0] 73 | non_django_env_items = [env_item for env_item in env_elements 74 | if not re.search(DJANGO_PATTERN, env_item)] 75 | self.elements["env"] = non_django_env_items + TravisModernizer._get_updated_django_envs(django_env_item) 76 | 77 | def _update_django_matrix_envs(self): 78 | matrix_items = self.elements.get("matrix", {}).get("include", []) 79 | if not matrix_items: 80 | return 81 | has_django_env = any(re.search(DJANGO_PATTERN, matrix_item.get('env', '')) for matrix_item in matrix_items) 82 | if not has_django_env: 83 | return 84 | django_matrix_element = [matrix_item for matrix_item in matrix_items 85 | if re.search(DJANGO_PATTERN, matrix_item.get("env", ""))][0] 86 | non_django_matrix_items = [matrix_item for matrix_item in matrix_items 87 | if not re.search(DJANGO_PATTERN, matrix_item.get("env", ""))] 88 | self.elements["matrix"]["include"] = (non_django_matrix_items + 89 | TravisModernizer._get_updated_django_matrix_items(django_matrix_element)) 90 | 91 | def _update_python_versions(self): 92 | self._update_python_dict() 93 | self._update_matrix_python_versions() 94 | 95 | def _update_django_versions(self): 96 | self._update_django_envs() 97 | self._update_django_matrix_envs() 98 | 99 | def modernize(self): 100 | self._update_python_versions() 101 | self._update_django_versions() 102 | self.update_yml_file() 103 | 104 | 105 | @click.command() 106 | @click.option( 107 | '--path', default='.travis.yml', 108 | help="Path to target travis config file") 109 | def main(path): 110 | modernizer = TravisModernizer(path) 111 | modernizer.modernize() 112 | 113 | 114 | if __name__ == '__main__': 115 | main() 116 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django42/github_actions_modernizer_django42.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modernizer for Github Actions CI Django 4.2 support 3 | """ 4 | from copy import deepcopy 5 | import click 6 | from edx_repo_tools.utils import YamlLoader 7 | 8 | ALLOWED_DJANGO_VERSIONS = ['django32', 'django42'] 9 | 10 | 11 | class GithubCIModernizer(YamlLoader): 12 | def __init__(self, file_path): 13 | super().__init__(file_path) 14 | 15 | def _update_django_in_matrix(self): 16 | django_versions = list() 17 | matrix_elements = dict() 18 | section_key = None 19 | 20 | for key in ['build', 'tests', 'run_tests', 'run_quality', 'pytest']: 21 | if key in self.elements['jobs']: 22 | section_key = key 23 | matrix_elements = deepcopy(self.elements['jobs'][section_key]['strategy']['matrix']) 24 | 25 | for key, value in matrix_elements.items(): 26 | if key == 'django-version': 27 | django_versions = value 28 | django_versions.extend(filter( 29 | lambda version: version not in value, ALLOWED_DJANGO_VERSIONS)) 30 | if not section_key: 31 | return 32 | if django_versions: 33 | self.elements['jobs'][section_key]['strategy']['matrix']['django-version'] = django_versions 34 | 35 | def _update_github_actions(self): 36 | self._update_django_in_matrix() 37 | 38 | def modernize(self): 39 | self._update_github_actions() 40 | self.update_yml_file() 41 | 42 | 43 | @click.command() 44 | @click.option( 45 | '--path', default='.github/workflows/ci.yml', 46 | help="Path to default CI workflow file") 47 | def main(path): 48 | modernizer = GithubCIModernizer(path) 49 | modernizer.modernize() 50 | 51 | 52 | if __name__ == '__main__': 53 | main() 54 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django42/remove_providing_args_arg.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import click 4 | 5 | def remove_providing_args(root_dir): 6 | # Regex pattern to match the lines containing providing_args 7 | pattern = r"(.*)[,\s]*providing_args\s*=\s*\[.*?\](.*)" 8 | 9 | # Traverse all Python files in the root directory 10 | for root, _, files in os.walk(root_dir): 11 | for file in files: 12 | if file.endswith(".py"): 13 | file_path = os.path.join(root, file) 14 | updated_lines = [] 15 | 16 | # Open the file and read its content 17 | with open(file_path, "r") as f: 18 | lines = f.readlines() 19 | 20 | # Process each line in the file 21 | for line in lines: 22 | # Check if the line contains providing_args 23 | match = re.match(pattern, line) 24 | if match: 25 | # Remove the providing_args argument along with any preceding comma or whitespace 26 | updated_line = match.group(1).rstrip(", \t") + match.group(2) + "\n" 27 | updated_lines.append(updated_line) 28 | else: 29 | updated_lines.append(line) 30 | 31 | # Write the updated content back to the file 32 | with open(file_path, "w") as f: 33 | f.writelines(updated_lines) 34 | 35 | 36 | @click.command() 37 | @click.option( 38 | '--root_dir', default='.', 39 | help="Path to root of project") 40 | def main(root_dir): 41 | remove_providing_args(root_dir) 42 | print("Providing_args removed from the specified lines.") 43 | 44 | 45 | if __name__ == '__main__': 46 | main() -------------------------------------------------------------------------------- /edx_repo_tools/codemods/django42/tox_moderniser_django42.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | from configparser import ConfigParser, NoSectionError 4 | 5 | import click 6 | 7 | TOX_SECTION = "tox" 8 | ENVLIST = "envlist" 9 | TEST_ENV_SECTION = "testenv" 10 | TEST_ENV_DEPS = "deps" 11 | PYTHON_SUBSTITUTE = "py38" 12 | DJANGO_SUBSTITUTE = "django{32, 42}" 13 | 14 | DJANGO_42_DEPENDENCY = "django42: Django>=4.2,<4.3\n" 15 | NEW_DJANGO_DEPENDENCIES = DJANGO_42_DEPENDENCY 16 | 17 | SECTIONS = [TOX_SECTION, TEST_ENV_SECTION] 18 | 19 | PYTHON_PATTERN = "(py{.*?}-?|py[0-9]+,|py[0-9]+-)" 20 | 21 | DJANGO_PATTERN = "(django[0-9]+,|django[0-9]+\n|django{.*}\n|django{.*?}|django[0-9]+-|django{.*}-)" 22 | 23 | DJANGO4_DEPENDENCY_PATTERN = "(django32:.*\n)" 24 | 25 | 26 | class ConfigReader: 27 | def __init__(self, file_path=None, config_dict=None): 28 | self.config_dict = config_dict 29 | self.file_path = file_path 30 | 31 | def get_modernizer(self): 32 | config_parser = ConfigParser() 33 | if self.config_dict is not None: 34 | config_parser.read_dict(self.config_dict) 35 | else: 36 | config_parser.read(self.file_path) 37 | return ToxModernizer(config_parser, self.file_path) 38 | 39 | 40 | class ToxModernizer: 41 | def __init__(self, config_parser, file_path): 42 | self.file_path = file_path 43 | self.config_parser = config_parser 44 | self._validate_tox_config_sections() 45 | 46 | def _validate_tox_config_sections(self): 47 | if not self.config_parser.sections(): 48 | raise NoSectionError("Bad Config. No sections found.") 49 | 50 | if all(section not in SECTIONS for section in self.config_parser.sections()): 51 | raise NoSectionError("File doesn't contain required sections") 52 | 53 | def _update_env_list(self): 54 | tox_section = self.config_parser[TOX_SECTION] 55 | env_list = tox_section[ENVLIST] 56 | 57 | env_list = ToxModernizer._replace_runners(PYTHON_PATTERN, PYTHON_SUBSTITUTE, env_list) 58 | env_list = ToxModernizer._replace_runners(DJANGO_PATTERN, DJANGO_SUBSTITUTE, env_list) 59 | self.config_parser[TOX_SECTION][ENVLIST] = env_list 60 | 61 | @staticmethod 62 | def _replace_runners(pattern, substitute, env_list): 63 | matches = re.findall(pattern, env_list) 64 | if not matches: 65 | return env_list 66 | substitute = ToxModernizer._get_runner_substitute(matches, substitute) 67 | return ToxModernizer._replace_matches(pattern, substitute, env_list, matches) 68 | 69 | @staticmethod 70 | def _replace_matches(pattern, substitute, target, matches): 71 | if not matches: 72 | return target 73 | occurrences_to_replace = len(matches) - 1 74 | if occurrences_to_replace > 0: 75 | target = re.sub(pattern, '', target, occurrences_to_replace) 76 | 77 | # checking if there is any dependency for django32 dont override it 78 | if matches[0].startswith('django32:'): 79 | substitute = matches[0] 80 | if 'django42:' not in target: 81 | substitute += DJANGO_42_DEPENDENCY 82 | target = re.sub(pattern, substitute, target) 83 | return target 84 | 85 | @staticmethod 86 | def _get_runner_substitute(matches, substitute): 87 | last_match = matches[-1] 88 | has_other_runners = last_match.endswith('-') 89 | return substitute + "-" if has_other_runners else substitute 90 | 91 | def _replace_django_versions(self): 92 | test_environment = self.config_parser[TEST_ENV_SECTION] 93 | dependencies = test_environment[TEST_ENV_DEPS] 94 | matches = re.findall(DJANGO4_DEPENDENCY_PATTERN, dependencies) 95 | dependencies = self._replace_matches(DJANGO4_DEPENDENCY_PATTERN, NEW_DJANGO_DEPENDENCIES, dependencies, matches) 96 | 97 | self.config_parser[TEST_ENV_SECTION][TEST_ENV_DEPS] = dependencies 98 | 99 | def _update_config_file(self): 100 | # ConfigParser insists on using tabs for output. We want spaces. 101 | with io.StringIO() as configw: 102 | self.config_parser.write(configw) 103 | new_ini = configw.getvalue() 104 | new_ini = new_ini.replace("\t", " ") 105 | with open(self.file_path, 'w') as configfile: 106 | configfile.write(new_ini) 107 | 108 | def modernize(self): 109 | self._update_env_list() 110 | self._replace_django_versions() 111 | self._update_config_file() 112 | 113 | 114 | @click.command() 115 | @click.option( 116 | '--path', default='tox.ini', 117 | help="Path to target tox config file") 118 | def main(path): 119 | modernizer = ConfigReader(path).get_modernizer() 120 | modernizer.modernize() 121 | 122 | 123 | if __name__ == '__main__': 124 | main() 125 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/node16/__init__.py: -------------------------------------------------------------------------------- 1 | from .gha_ci_modernizer import GithubCiNodeModernizer 2 | from .gha_release_workflow_modernizer import GithubNodeReleaseWorkflowModernizer 3 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/node16/gha_ci_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Node Modernizer for Github Actions CI 3 | """ 4 | import click 5 | from copy import deepcopy 6 | 7 | from edx_repo_tools.utils import YamlLoader 8 | 9 | ALLOWED_NODE_VERSIONS = [16] 10 | ALLOWED_NPM_VERSION = '8.x.x' 11 | 12 | 13 | class GithubCiNodeModernizer(YamlLoader): 14 | def __init__(self, file_path): 15 | super().__init__(file_path) 16 | 17 | def _add_new_matrix(self, job_name): 18 | self.elements['jobs'][job_name]['strategy'] = {'matrix': {'node': ALLOWED_NODE_VERSIONS}} 19 | self.elements['jobs'][job_name].move_to_end('strategy', last=False) 20 | self.elements['jobs'][job_name].move_to_end('runs-on', last=False) 21 | if 'name' in self.elements['jobs'][job_name]: 22 | self.elements['jobs'][job_name].move_to_end('name', last=False) 23 | 24 | def _update_existing_matrix(self, job_name): 25 | self.elements['jobs'][job_name]['strategy']['matrix']['node'] = ALLOWED_NODE_VERSIONS 26 | 27 | def _update_strategy_matrix(self, job_name): 28 | if 'strategy' in self.elements['jobs'][job_name] and 'matrix' in self.elements['jobs'][job_name]['strategy']: 29 | self._update_existing_matrix(job_name) 30 | else: 31 | self._add_new_matrix(job_name) 32 | 33 | def _update_node_version(self, job_name, step): 34 | step_index = self.elements['jobs'][job_name]['steps'].index(step) 35 | self.elements['jobs'][job_name]['steps'][step_index]['with']['node-version'] = '${{ matrix.node }}' 36 | 37 | def _update_npm_version(self, job_name, step): 38 | step_index = self.elements['jobs'][job_name]['steps'].index(step) 39 | self.elements['jobs'][job_name]['steps'][step_index]['run'] = 'npm i -g npm@'+ALLOWED_NPM_VERSION 40 | 41 | def _update_job_steps(self, job_name, job): 42 | steps = job.get('steps') 43 | if not steps: 44 | return 45 | for step in steps: 46 | if 'name' in step and step['name'] == 'Setup Nodejs': 47 | self._update_node_version(job_name, step) 48 | elif 'name' in step and step['name'] == 'Setup npm': 49 | self._update_npm_version(job_name, step) 50 | 51 | def _update_job_name(self, job_name, job): 52 | self.elements['jobs']['tests'] = deepcopy(job) 53 | self.elements['jobs'].move_to_end('tests', last=False) 54 | self.elements['jobs'].pop(job_name) 55 | 56 | def _update_job(self): 57 | jobs = self.elements.get('jobs') 58 | if 'tests' not in jobs and 'build' in jobs: 59 | self._update_job_name('build', self.elements['jobs']['build']) 60 | if 'tests' in jobs: 61 | self._update_strategy_matrix('tests') 62 | self._update_job_steps('tests', self.elements['jobs']['tests']) 63 | 64 | def modernize(self): 65 | self._update_job() 66 | self.update_yml_file() 67 | 68 | 69 | @click.command() 70 | @click.option( 71 | '--path', default='./.github/workflows/ci.yml', 72 | help="Path to default CI workflow file") 73 | def main(path): 74 | modernizer = GithubCiNodeModernizer(path) 75 | modernizer.modernize() 76 | 77 | 78 | if __name__ == '__main__': 79 | main() 80 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/node16/gha_release_workflow_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Node modernizer for Github CI release workflow 3 | """ 4 | 5 | from copy import deepcopy 6 | from os.path import exists 7 | from pathlib import Path 8 | 9 | import click 10 | from ruamel.yaml import YAML 11 | 12 | from edx_repo_tools.utils import YamlLoader 13 | 14 | NODE_RELEASE_VERSION = 16 15 | NODE_JS_SETUP_ACTION_LIST = ['actions/setup-node@v2', 'actions/setup-node@v1'] 16 | FETCH_NODE_VERSION_STEP = """name: 'Setup Nodejs Env'\nrun: 'echo "NODE_VER=`cat .nvmrc`" >> $GITHUB_ENV'\n""" 17 | 18 | class GithubNodeReleaseWorkflowModernizer(YamlLoader): 19 | def __init__(self, release_workflow_file_path): 20 | super().__init__(release_workflow_file_path) 21 | 22 | def _does_nvmrc_exists(self): 23 | target_file = Path(self.file_path).resolve().parents[2] 24 | target_file = str(target_file) + '/.nvmrc' 25 | return exists(target_file) 26 | 27 | def _add_setup_nodejs_env_step(self, step_elements, step_index): 28 | if self._does_nvmrc_exists(): 29 | yaml = YAML() 30 | fetch_node_version_step = yaml.load(FETCH_NODE_VERSION_STEP) 31 | step_elements.insert( 32 | step_index, fetch_node_version_step) 33 | return step_elements 34 | 35 | def _update_node_version(self, step_elements, step_index): 36 | if self._does_nvmrc_exists(): 37 | step_elements[step_index]['with']['node-version'] = "${{ env.NODE_VER }}" 38 | else: 39 | step_elements[step_index]['with']['node-version'] = 16 40 | return step_elements 41 | 42 | def _update_job_steps(self, job_name, job): 43 | steps = job.get('steps') 44 | updated_job_steps = None 45 | if not steps: 46 | return 47 | for step in steps: 48 | if 'uses' in step and step['uses'] in NODE_JS_SETUP_ACTION_LIST: 49 | step_index = self.elements['jobs'][job_name]['steps'].index(step) 50 | job_steps = deepcopy(self.elements['jobs'][job_name]['steps']) 51 | job_steps = self._update_node_version(job_steps, step_index) 52 | job_steps = self._add_setup_nodejs_env_step(job_steps, step_index) 53 | 54 | updated_job_steps = job_steps 55 | return updated_job_steps 56 | 57 | 58 | def _update_job(self): 59 | jobs = self.elements.get('jobs') 60 | if 'release' in jobs: 61 | updated_job = self._update_job_steps( 62 | 'release', self.elements['jobs']['release']) 63 | self.elements['jobs']['release']['steps'] = updated_job 64 | 65 | def modernize(self): 66 | self._update_job() 67 | self.update_yml_file() 68 | 69 | 70 | @click.command() 71 | @click.option( 72 | '--workflow_path', default='./.github/workflows/release.yml', 73 | help="Path to release workflow file") 74 | def main(workflow_path): 75 | modernizer = GithubNodeReleaseWorkflowModernizer(workflow_path) 76 | modernizer.modernize() 77 | 78 | 79 | if __name__ == '__main__': 80 | main() 81 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/python312/__init__.py: -------------------------------------------------------------------------------- 1 | from .tox_modernizer import ConfigReader, ToxModernizer 2 | from .gh_actions_modernizer import GithubCIModernizer 3 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/python312/gh_actions_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Github Actions CI Modernizer to add Python 3.12 and drop Django 3.2 testing 3 | """ 4 | import os 5 | from copy import deepcopy 6 | import click 7 | from edx_repo_tools.utils import YamlLoader 8 | 9 | TO_BE_REMOVED_PYTHON = ['3.5', '3.6', '3.7'] 10 | ALLOWED_PYTHON_VERSIONS = ['3.8', '3.12'] 11 | 12 | ALLOWED_DJANGO_VERSIONS = ['4.2', 'django42'] 13 | DJANGO_ENV_TO_ADD = ['django42'] 14 | DJANGO_ENV_TO_REMOVE = ['django32', 'django40', 'django41'] 15 | 16 | 17 | class GithubCIModernizer(YamlLoader): 18 | def __init__(self, file_path): 19 | super().__init__(file_path) 20 | 21 | def _update_python_and_django_in_matrix(self): 22 | django_versions = list() 23 | python_versions = list() 24 | matrix_elements = dict() 25 | 26 | 27 | for section_key in self.elements['jobs']: 28 | matrix_elements = deepcopy(self.elements['jobs'][section_key]['strategy']['matrix']) 29 | 30 | for key, value in matrix_elements.items(): 31 | if key == 'django-version': 32 | for dj_version in DJANGO_ENV_TO_ADD: 33 | if dj_version not in value: 34 | value.append(dj_version) 35 | django_versions = list(filter(lambda version: version in ALLOWED_DJANGO_VERSIONS, value)) 36 | if django_versions: 37 | self.elements['jobs'][section_key]['strategy']['matrix'][key] = django_versions 38 | 39 | if key in ['tox', 'toxenv', 'tox-env']: 40 | for dj_env in DJANGO_ENV_TO_ADD: 41 | if dj_env not in value: 42 | value.append(dj_env) 43 | tox_envs = list(filter(lambda version: version not in DJANGO_ENV_TO_REMOVE, value)) 44 | if tox_envs: 45 | self.elements['jobs'][section_key]['strategy']['matrix'][key] = tox_envs 46 | 47 | if key == 'python-version': 48 | for version in ALLOWED_PYTHON_VERSIONS: 49 | if version not in value: 50 | value.append(version) 51 | python_versions = list(filter(lambda version: version not in TO_BE_REMOVED_PYTHON, value)) 52 | if python_versions: 53 | self.elements['jobs'][section_key]['strategy']['matrix'][key] = python_versions 54 | else: 55 | del self.elements['jobs'][section_key]['strategy']['matrix'][key] 56 | 57 | elif key in ['include', 'exclude']: 58 | allowed_python_vers = list() 59 | for item in value: 60 | if item['python-version'] not in TO_BE_REMOVED_PYTHON: 61 | allowed_python_vers.append(item) 62 | 63 | if len(allowed_python_vers): 64 | self.elements['jobs'][section_key]['strategy']['matrix'][key] = allowed_python_vers 65 | else: 66 | del self.elements['jobs'][section_key]['strategy']['matrix'][key] 67 | 68 | 69 | def _update_github_actions(self): 70 | self._update_python_and_django_in_matrix() 71 | 72 | def modernize(self): 73 | self._update_github_actions() 74 | self.update_yml_file() 75 | 76 | 77 | @click.command() 78 | @click.option( 79 | '--path', default='.github/workflows/ci.yml', 80 | help="Path to default CI workflow file") 81 | def main(path): 82 | if os.path.exists(path): 83 | modernizer = GithubCIModernizer(path) 84 | modernizer.modernize() 85 | else: 86 | print("ci.yml not found on specified path") 87 | 88 | 89 | if __name__ == '__main__': 90 | main() 91 | -------------------------------------------------------------------------------- /edx_repo_tools/codemods/python312/tox_modernizer.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import re 4 | from configparser import ConfigParser, NoSectionError 5 | 6 | import click 7 | 8 | TOX_SECTION = "tox" 9 | ENVLIST = "envlist" 10 | TEST_ENV_SECTION = "testenv" 11 | TEST_ENV_DEPS = "deps" 12 | PYTHON_SUBSTITUTE = "py{38, 312}" 13 | DJANGO_SUBSTITUTE = "django{42}" 14 | 15 | DJANGO_42_DEPENDENCY = "django42: Django>=4.2,<4.3\n" 16 | NEW_DJANGO_DEPENDENCIES = DJANGO_42_DEPENDENCY 17 | 18 | SECTIONS = [TOX_SECTION, TEST_ENV_SECTION] 19 | 20 | PYTHON_PATTERN = "(py{.*?}-?|py[0-9]+,|py[0-9]+-)" 21 | 22 | DJANGO_PATTERN = "(django[0-9]+,|django[0-9]+\n|django{.*}\n|django{.*?}|django[0-9]+-|django{.*}-)" 23 | 24 | DJANGO_DEPENDENCY_PATTERN = "([^\n]*django[0-9]+:.*\n?)" 25 | 26 | 27 | class ConfigReader: 28 | def __init__(self, file_path=None, config_dict=None): 29 | self.config_dict = config_dict 30 | self.file_path = file_path 31 | 32 | def get_modernizer(self): 33 | config_parser = ConfigParser() 34 | if self.config_dict is not None: 35 | config_parser.read_dict(self.config_dict) 36 | else: 37 | config_parser.read(self.file_path) 38 | return ToxModernizer(config_parser, self.file_path) 39 | 40 | 41 | class ToxModernizer: 42 | def __init__(self, config_parser, file_path): 43 | self.file_path = file_path 44 | self.config_parser = config_parser 45 | self._validate_tox_config_sections() 46 | 47 | def _validate_tox_config_sections(self): 48 | if not self.config_parser.sections(): 49 | raise NoSectionError("Bad Config. No sections found.") 50 | 51 | if all(section not in SECTIONS for section in self.config_parser.sections()): 52 | raise NoSectionError("File doesn't contain required sections") 53 | 54 | def _update_env_list(self): 55 | tox_section = self.config_parser[TOX_SECTION] 56 | env_list = tox_section[ENVLIST] 57 | 58 | env_list = ToxModernizer._replace_runners(PYTHON_PATTERN, PYTHON_SUBSTITUTE, env_list) 59 | env_list = ToxModernizer._replace_runners(DJANGO_PATTERN, DJANGO_SUBSTITUTE, env_list) 60 | self.config_parser[TOX_SECTION][ENVLIST] = env_list 61 | 62 | @staticmethod 63 | def _replace_runners(pattern, substitute, env_list): 64 | matches = re.findall(pattern, env_list) 65 | if not matches: 66 | return env_list 67 | substitute = ToxModernizer._get_runner_substitute(matches, substitute) 68 | return ToxModernizer._replace_matches(pattern, substitute, env_list, matches) 69 | 70 | @staticmethod 71 | def _replace_matches(pattern, substitute, target, matches): 72 | if not matches: 73 | return target 74 | occurrences_to_replace = len(matches) - 1 75 | if occurrences_to_replace > 0: 76 | target = re.sub(pattern, '', target, occurrences_to_replace) 77 | target = re.sub(pattern, substitute, target) 78 | return target 79 | 80 | @staticmethod 81 | def _get_runner_substitute(matches, substitute): 82 | last_match = matches[-1] 83 | has_other_runners = last_match.endswith('-') 84 | return substitute + "-" if has_other_runners else substitute 85 | 86 | def _replace_django_versions(self): 87 | test_environment = self.config_parser[TEST_ENV_SECTION] 88 | dependencies = test_environment[TEST_ENV_DEPS] 89 | matches = re.findall(DJANGO_DEPENDENCY_PATTERN, dependencies) 90 | dependencies = self._replace_matches(DJANGO_DEPENDENCY_PATTERN, NEW_DJANGO_DEPENDENCIES, dependencies, matches) 91 | 92 | self.config_parser[TEST_ENV_SECTION][TEST_ENV_DEPS] = dependencies 93 | 94 | def _update_config_file(self): 95 | # ConfigParser insists on using tabs for output. We want spaces. 96 | with io.StringIO() as configw: 97 | self.config_parser.write(configw) 98 | new_ini = configw.getvalue() 99 | new_ini = new_ini.replace("\t", " ") 100 | with open(self.file_path, 'w') as configfile: 101 | configfile.write(new_ini) 102 | 103 | def modernize(self): 104 | self._update_env_list() 105 | self._replace_django_versions() 106 | self._update_config_file() 107 | 108 | 109 | @click.command() 110 | @click.option( 111 | '--path', default='tox.ini', 112 | help="Path to target tox config file") 113 | def main(path): 114 | if os.path.exists(path): 115 | modernizer = ConfigReader(path).get_modernizer() 116 | modernizer.modernize() 117 | else: 118 | print("tox.ini not found on specified path") 119 | 120 | 121 | if __name__ == '__main__': 122 | main() 123 | -------------------------------------------------------------------------------- /edx_repo_tools/conventional_commits/README.rst: -------------------------------------------------------------------------------- 1 | Commit Stats 2 | ############ 3 | 4 | This directory has code to collect and report statistics about conventional commit compliance. After cloning a number of repos, you use these tools to collect data into a commits.db database, then query and graph the data. 5 | 6 | #. Create a Python 3.8 virtualenv, and a new directory into which to clone the repos. 7 | 8 | #. Install repo-tools (https://github.com/openedx/repo-tools) into your virtualenv, including the "conventional_commits" extra requirements:: 9 | 10 | $ python -m pip install '/path/to/repo-tools[conventional_commits]' 11 | 12 | #. Change to your work directory. 13 | 14 | #. Create a sub-directory called "edx", and cd into it. 15 | 16 | #. Clone the edx org. This will need more than 8Gb of disk space:: 17 | 18 | $ clone_org --prune --forks edx 19 | 20 | #. Update all the repos. The "gittree" shell alias is in the gittools.sh file in repo-tools:: 21 | 22 | $ gittree "git fetch --all; git checkout \$(git remote show origin | awk '/HEAD branch/ {print \$NF}'); git pull" 23 | 24 | #. cd .. 25 | 26 | #. Delete the existing commits.db file, if any. 27 | 28 | #. Collect commit stats. Consider every edx repo, ignore the ones ending in 29 | "-private", and include all the ones that have an openedx.yaml file:: 30 | 31 | $ conventional_commits collect --ignore='*-private' --require=openedx.yaml edx/* 32 | 33 | #. Now commits.db is a SQLite database with a table called "commits". You can query this directly with SQLite-compatible tools if you like. 34 | 35 | #. Draw a chart:: 36 | 37 | $ conventional_commits plot 38 | -------------------------------------------------------------------------------- /edx_repo_tools/conventional_commits/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/conventional_commits/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/conventional_commits/extra.in: -------------------------------------------------------------------------------- 1 | # The conventional_commits code needs some extra packages installed that are 2 | # large and unusual, so we keep them separate here. 3 | 4 | -c ../../requirements/constraints.txt 5 | 6 | dataset 7 | pandas 8 | matplotlib 9 | -------------------------------------------------------------------------------- /edx_repo_tools/conventional_commits/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | alembic==1.13.2 8 | # via dataset 9 | banal==1.0.6 10 | # via dataset 11 | contourpy==1.2.1 12 | # via matplotlib 13 | cycler==0.12.1 14 | # via matplotlib 15 | dataset==1.6.2 16 | # via -r edx_repo_tools/conventional_commits/extra.in 17 | fonttools==4.53.1 18 | # via matplotlib 19 | greenlet==3.0.3 20 | # via 21 | # -c edx_repo_tools/conventional_commits/../../requirements/constraints.txt 22 | # sqlalchemy 23 | kiwisolver==1.4.5 24 | # via matplotlib 25 | mako==1.3.5 26 | # via alembic 27 | markupsafe==2.1.5 28 | # via mako 29 | matplotlib==3.9.1 30 | # via -r edx_repo_tools/conventional_commits/extra.in 31 | numpy==2.0.1 32 | # via 33 | # contourpy 34 | # matplotlib 35 | # pandas 36 | packaging==24.1 37 | # via matplotlib 38 | pandas==2.2.2 39 | # via -r edx_repo_tools/conventional_commits/extra.in 40 | pillow==10.4.0 41 | # via matplotlib 42 | pyparsing==3.1.2 43 | # via matplotlib 44 | python-dateutil==2.9.0.post0 45 | # via 46 | # matplotlib 47 | # pandas 48 | pytz==2024.1 49 | # via pandas 50 | six==1.16.0 51 | # via python-dateutil 52 | sqlalchemy==1.4.52 53 | # via 54 | # alembic 55 | # dataset 56 | typing-extensions==4.12.2 57 | # via alembic 58 | tzdata==2024.1 59 | # via pandas 60 | -------------------------------------------------------------------------------- /edx_repo_tools/data.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from github3.exceptions import NotFoundError 4 | import yaml 5 | 6 | 7 | logging.basicConfig() 8 | LOGGER = logging.getLogger(__name__) 9 | 10 | 11 | def iter_nonforks(hub, orgs): 12 | """Yield all the non-fork repos in a GitHub organization. 13 | 14 | Arguments: 15 | hub (:class:`~github3.GitHub`): A connection to GitHub. 16 | orgs (list of str): the GitHub organizations to search. 17 | 18 | Yields: 19 | Repositories (:class:`~github3.Repository`) 20 | 21 | """ 22 | for org in orgs: 23 | for repo in hub.organization(org).repositories(): 24 | if repo.fork: 25 | LOGGER.debug("Skipping %s because it is a fork", repo.full_name) 26 | else: 27 | yield repo 28 | 29 | 30 | def iter_openedx_yaml(file_name, hub, orgs, branches=None): 31 | """ 32 | Yield the data from all catalog-info.yaml or openedx.yaml files found in repositories in ``orgs`` 33 | on any of ``branches``. 34 | 35 | Arguments: 36 | hub (:class:`~github3.GitHub`): A connection to GitHub. 37 | orgs (list of str): A GitHub organizations to search for openedx.yaml files. 38 | branches (list of str): Branches to search for openedx.yaml files. If 39 | that file exists on multiple branches, then only the contents 40 | of the first will be yielded. (optional, defaults to the default 41 | branch in the repo). 42 | 43 | Yields: 44 | Repositories (:class:`~github3.Repository) 45 | 46 | """ 47 | 48 | for repo in iter_nonforks(hub, orgs): 49 | for branch in (branches or [repo.default_branch]): 50 | try: 51 | contents = repo.file_contents(file_name, ref=branch) 52 | except NotFoundError: 53 | contents = None 54 | 55 | if contents is not None: 56 | LOGGER.debug("Found %s at %s:%s", file_name, repo.full_name, branch) 57 | try: 58 | data = yaml.safe_load(contents.decoded) 59 | except Exception as exc: 60 | LOGGER.error("Couldn't parse %s from %s:%s, skipping repo", file_name, repo.full_name, branch, exc_info=True) 61 | else: 62 | if data is not None: 63 | yield repo, data 64 | 65 | break 66 | -------------------------------------------------------------------------------- /edx_repo_tools/dependabot_yml.py: -------------------------------------------------------------------------------- 1 | import os 2 | import click 3 | 4 | from edx_repo_tools.utils import YamlLoader 5 | 6 | 7 | github_actions = """\ 8 | # Adding new check for github-actions 9 | package-ecosystem: "github-actions" 10 | directory: "/" 11 | schedule: 12 | interval: "weekly" 13 | """ 14 | ecosystem_reviewers = """\ 15 | reviewers: 16 | - "{reviewer}" 17 | """ 18 | 19 | # Adding new packages for update. Add tuple with key and related data. 20 | ADD_NEW_FIELDS = [("github-actions", github_actions,)] 21 | 22 | 23 | class DependabotYamlModernizer(YamlLoader): 24 | """ 25 | Dependabot Yaml Modernizer class is responsible for adding new elements in dependabot.yml. 26 | """ 27 | 28 | def __init__(self, file_path, reviewer): 29 | super().__init__(file_path) 30 | self.reviewer = reviewer 31 | 32 | def _add_elements(self): 33 | self.elements['updates'] = self.elements.get('updates') or [] 34 | found = False 35 | for key, value in ADD_NEW_FIELDS: 36 | for index in self.elements['updates']: 37 | if key == index.get('package-ecosystem'): 38 | found = True 39 | break 40 | if not found: 41 | self.elements['updates'].append(self.yml_instance.load(value)) 42 | 43 | def _add_reviewers(self): 44 | self.elements['updates'] = self.elements.get('updates') or [] 45 | for key, value in ADD_NEW_FIELDS: 46 | for index, elem in enumerate(self.elements['updates']): 47 | if key == elem.get('package-ecosystem'): 48 | self.elements["updates"][index].update(self.yml_instance.load( 49 | ecosystem_reviewers.format(**{"reviewer": self.reviewer}) 50 | )) 51 | break 52 | 53 | 54 | def modernize(self): 55 | self._add_elements() 56 | self.reviewer and self._add_reviewers() 57 | # otherwise it brings back whole update back towards left side. 58 | 59 | self.yml_instance.indent(mapping=4, sequence=4, offset=2) 60 | self.update_yml_file() 61 | 62 | 63 | @click.command() 64 | # path should be the path of dependabot.yml inside a repo 65 | @click.option( 66 | '--path', default='.github/dependabot.yml', 67 | help="Path to target dependabot.yml file") 68 | # reviewer should be a github username or team name, 69 | # and the team name should be in the format of org-name/team-name 70 | @click.option( 71 | '--reviewer', default=None, 72 | help="Name of the reviewer") 73 | def main(path, reviewer): 74 | if not os.path.exists(path): 75 | new_file_content = """\ 76 | version: 2 77 | updates: 78 | """ 79 | with open(path, 'w') as file: 80 | file.write(new_file_content) 81 | modernizer = DependabotYamlModernizer(path, reviewer) 82 | modernizer.modernize() 83 | 84 | 85 | if __name__ == "__main__": 86 | main() 87 | -------------------------------------------------------------------------------- /edx_repo_tools/dev/__init__.py: -------------------------------------------------------------------------------- 1 | """Developer-oriented tools.""" 2 | -------------------------------------------------------------------------------- /edx_repo_tools/dev/clone_org.py: -------------------------------------------------------------------------------- 1 | """Clone an entire GitHub organization into current directory.""" 2 | 3 | import fnmatch 4 | import os.path 5 | import shutil 6 | 7 | import click 8 | from git.repo.base import Repo 9 | 10 | from edx_repo_tools.auth import pass_github 11 | 12 | 13 | @click.command() 14 | @click.option( 15 | '--archived/--no-archived', is_flag=True, default=False, 16 | help="Should archived repos be included?" 17 | ) 18 | @click.option( 19 | '--archived-only', is_flag=True, default=False, 20 | help="Should only archived repos be cloned?" 21 | ) 22 | @click.option( 23 | '--forks/--no-forks', is_flag=True, default=False, 24 | help="Should forks be included?" 25 | ) 26 | @click.option( 27 | '--forks-only', is_flag=True, default=False, 28 | help="Should only forks be cloned?" 29 | ) 30 | @click.option( 31 | '--depth', type=int, default=0, 32 | help="Depth argument for git clone", 33 | ) 34 | @click.option( 35 | '--ignore', 36 | help="Glob pattern for repos to ignore", 37 | ) 38 | @click.option( 39 | '--prune', is_flag=True, default=False, 40 | help="Remove repos that we wouldn't have cloned", 41 | ) 42 | @click.argument( 43 | 'org' 44 | ) 45 | @pass_github 46 | def main(hub, archived, archived_only, forks, forks_only, depth, ignore, prune, org): 47 | """ 48 | Clone an entire GitHub organization into the current directory. 49 | Each repo becomes a subdirectory. 50 | """ 51 | if archived_only: 52 | archived = True 53 | if forks_only: 54 | forks = True 55 | dir_names = set() 56 | for repo in hub.organization(org).repositories(): 57 | if repo.fork and not forks: 58 | continue 59 | if not repo.fork and forks_only: 60 | continue 61 | if repo.archived and not archived: 62 | continue 63 | if not repo.archived and archived_only: 64 | continue 65 | if ignore and fnmatch.fnmatch(repo.name, ignore): 66 | print(f"Ignoring {repo.full_name}") 67 | continue 68 | dir_name = repo.name 69 | dir_name = dir_name.lstrip("-") # avoid dirname/option confusion 70 | dir_names.add(dir_name) 71 | if os.path.exists(dir_name): 72 | continue 73 | 74 | print("Cloning {}".format(repo.full_name)) 75 | clone_args = {} 76 | if depth: 77 | clone_args['depth'] = depth 78 | Repo.clone_from(repo.ssh_url, dir_name, **clone_args) 79 | 80 | if prune: 81 | for dir_name in os.listdir("."): 82 | if os.path.isdir(dir_name): 83 | if dir_name not in dir_names: 84 | print("Pruning {}".format(dir_name)) 85 | shutil.rmtree(dir_name) 86 | -------------------------------------------------------------------------------- /edx_repo_tools/dev/get_org_repo_urls.py: -------------------------------------------------------------------------------- 1 | """Get the urls to every repository in a GitHub organization.""" 2 | 3 | import os.path 4 | 5 | import click 6 | from git.repo.base import Repo 7 | 8 | from edx_repo_tools.auth import pass_github 9 | 10 | 11 | @click.command() 12 | @click.option( 13 | '--forks/--no-forks', is_flag=True, default=False, 14 | help="Should forks be included?" 15 | ) 16 | @click.argument( 17 | 'org' 18 | ) 19 | @click.option( 20 | '--url_type', default="ssh", 21 | help="options: ssh or https" 22 | ) 23 | @click.option( 24 | '--output_file', default="repositories.txt", 25 | help="where should script output urls" 26 | ) 27 | @click.option( 28 | '--add_archived', is_flag=True, default=False, 29 | help="Do you want urls for archived repos?") 30 | @click.option( 31 | '--ignore-repo', '-i', multiple=True, default=[], 32 | help="If you want to ignore any repo?") 33 | @pass_github 34 | def main(hub, forks, org, url_type, output_file, add_archived, ignore_repo): 35 | """ 36 | Used to get the urls for all the repositories in a github organization 37 | """ 38 | repositories = [] 39 | for repo in hub.organization(org).repositories(): 40 | if repo.fork and not forks: 41 | continue 42 | if repo.archived and not add_archived: 43 | continue 44 | if repo.name in ignore_repo: 45 | continue 46 | if url_type == "ssh": 47 | repositories.append(repo.ssh_url) 48 | else: 49 | repositories.append(repo.clone_url) 50 | with open(output_file, 'a') as filehandle: 51 | for repo_url in repositories: 52 | filehandle.write('%s\n' % repo_url) 53 | -------------------------------------------------------------------------------- /edx_repo_tools/dev/show_hooks.py: -------------------------------------------------------------------------------- 1 | """Show the hooks in an organization.""" 2 | 3 | import os.path 4 | import re 5 | 6 | import click 7 | from git.repo.base import Repo 8 | 9 | from edx_repo_tools.auth import pass_github 10 | from edx_repo_tools.helpers import paginated_get 11 | 12 | @click.command() 13 | @click.argument('org') 14 | @click.argument('pattern', required=False) 15 | @pass_github 16 | def main(hub, org, pattern=None): 17 | for repo in hub.organization(org).repositories(): 18 | printed_repo = False 19 | url = f"https://api.github.com/repos/{repo.full_name}/hooks" 20 | for r in paginated_get(url): 21 | if pattern: 22 | show_it = False 23 | for v in r['config'].values(): 24 | if re.search(pattern, v): 25 | show_it = True 26 | else: 27 | show_it = True 28 | 29 | if show_it: 30 | if not printed_repo: 31 | print(f"\n-- {repo.full_name} ---------------------") 32 | print(f" https://github.com/{repo.full_name}/settings/hooks") 33 | printed_repo = True 34 | print("{r[name]}".format(r=r)) 35 | for k, v in sorted(r['config'].items()): 36 | print(f" {k}: {v}") 37 | -------------------------------------------------------------------------------- /edx_repo_tools/find_dependencies/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/find_dependencies/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/find_dependencies/extra.in: -------------------------------------------------------------------------------- 1 | # Dependencies needed by find_dependencies.py 2 | 3 | -c ../../requirements/constraints.txt 4 | 5 | rich 6 | requests 7 | requirements-parser -------------------------------------------------------------------------------- /edx_repo_tools/find_dependencies/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | certifi==2024.7.4 8 | # via requests 9 | charset-normalizer==3.3.2 10 | # via requests 11 | idna==3.7 12 | # via requests 13 | markdown-it-py==3.0.0 14 | # via rich 15 | mdurl==0.1.2 16 | # via markdown-it-py 17 | pygments==2.18.0 18 | # via rich 19 | requests==2.32.3 20 | # via -r edx_repo_tools/find_dependencies/extra.in 21 | requirements-parser==0.10.1 22 | # via -r edx_repo_tools/find_dependencies/extra.in 23 | rich==13.7.1 24 | # via -r edx_repo_tools/find_dependencies/extra.in 25 | types-setuptools==71.1.0.20240723 26 | # via requirements-parser 27 | urllib3==2.2.2 28 | # via requests 29 | -------------------------------------------------------------------------------- /edx_repo_tools/find_dependencies/find_python_dependencies.py: -------------------------------------------------------------------------------- 1 | """ 2 | Spider and catalog dependencies. 3 | $ python find_python_dependencies.py --req-file $FILE_PATH 4 | """ 5 | 6 | import click 7 | import json 8 | import os 9 | import requirements 10 | import sys 11 | from pathlib import Path 12 | import requests 13 | 14 | 15 | # The first of these we find is the requirements file we'll examine: 16 | def request_package_info_url(package): 17 | base_url = "https://pypi.org/pypi/" 18 | url = f"{base_url}{package}/json" 19 | response = requests.get(url) 20 | if response.status_code == 200: 21 | data_dict = response.json() 22 | info = data_dict["info"] 23 | return info["home_page"] 24 | else: 25 | print(f"Failed to retrieve data for package {package}. Status code:", response.status_code) 26 | 27 | FIRST_PARTY_ORGS = ["openedx"] 28 | 29 | SECOND_PARTY_ORGS = [ 30 | "edx", "edx-unsupported", "edx-solutions", 31 | "mitodl", 32 | "overhangio", 33 | "open-craft", "eduNEXT", "raccoongang", 34 | ] 35 | 36 | def urls_in_orgs(urls, orgs): 37 | """ 38 | Find urls that are in any of the `orgs`. 39 | """ 40 | return sorted( 41 | url for url in urls 42 | if any(f"/{org}/" in url for org in orgs) 43 | ) 44 | 45 | @click.command() 46 | @click.option( 47 | '--req-file', 'directories', 48 | multiple=True, 49 | required=True, 50 | help="The absolute file paths to locate Python dependencies" 51 | "within a particular repository. You can provide this " 52 | "option multiple times to include multiple requirement files.", 53 | ) 54 | @click.option( 55 | '--ignore', 'ignore_paths', 56 | multiple=True, 57 | help="Dependency Repo URL to ignore even if it's" 58 | "outside of your organization's approved list", 59 | ) 60 | 61 | def main(directories=None, ignore_paths=None): 62 | """ 63 | Analyze the requirements in input directory mentioned on the command line. 64 | """ 65 | 66 | home_page = set() 67 | for directory in directories: 68 | with open(directory) as fbase: 69 | for req in requirements.parse(fbase): 70 | url = request_package_info_url(req.name) 71 | if url is not None: 72 | home_page.add(url) 73 | 74 | packages_urls = set(urls_in_orgs(home_page, SECOND_PARTY_ORGS)) 75 | 76 | if diff:= packages_urls.symmetric_difference(set(ignore_paths)): 77 | print("The following packages are from 2nd party orgs and should not be added as a core dependency, they can be added as an optional dependency operationally or they can be transferred to the openedx org before they are included:") 78 | print("\n".join(diff)) 79 | exit(1) 80 | 81 | if __name__ == "__main__": 82 | main() -------------------------------------------------------------------------------- /edx_repo_tools/gitgraft/README.md: -------------------------------------------------------------------------------- 1 | # gitgraft.py 2 | 3 | ## Overview 4 | Python script to assist migrating commits between otherwise disconnected github repositories. Specifically designed for repos created by breaking out subtrees via commands like ```git filter-branch```. 5 | 6 | The script works from a simple configuration file and should allow for repeated pulls of changes without duplicating commits. It will create a new branch on the target repo and generate a series of commits on that branch that match commits made to relevant files on the original branch. 7 | 8 | ## How does it work? 9 | gitgraft looks takes a list of "tracked paths" (directories of files) that are configured to map two locations in different repos together. It then looks back a configurable number of days to find commits that are in common between them by comparing commit metadata such as committer, author, timestamps, and message. If ```git filter-branch``` was used to separate out the repo while keeping history those things should stay the same. 10 | 11 | It will then: 12 | * Create a new local branch in the branched repository 13 | * Look at any commits made to tracked paths in the original repo that do not exist in the branched repo for any tracked files changed as part of the commit 14 | * If any files were found a new commit is crafted for the branched repo with a commit message that looks like this: 15 | 16 | ``` 17 | Graft d1f2561 18 | 19 | Grafting commit >>d1f256124cf4a304afed64e02be528a55126f7c2<< 20 | Original commit by Jeremy Bowman on 2017-03-27T15:51:37 with this message: 21 | ---------------------------------------------------------------- 22 | PLAT-1198 Reduce risk of losing navigation events 23 | ``` 24 | 25 | If the new branch and commits look good and get integrated to the branched repo you can re-run gitgraft to search for additional changes at any time. Subsequent runs will look in the commit messages for the SHA1 located between the >><< in commit messages and use that to mark those commits as "moved", effectively ignoring them and allowing only the on grafted changes to be considered. 26 | 27 | ## Setup 28 | Make sure you've got all of the requirements: 29 | ``` 30 | pip install -r requirements.txt 31 | ``` 32 | 33 | Create a configuration file, which should look something like this: 34 | 35 | ``` 36 | # "original" means the repo with the commits you would like to take 37 | # "branched" means the repo that you will be copying those commits into 38 | 39 | [repositories] 40 | # Since we can't rely on git to reliably give us repo names, specify what we should call 41 | # the original repo here. 42 | original_repository_name = edx-platform 43 | 44 | # Local path to the top level repo directories. Must not be dirty! 45 | original_repository = /Users/brianmesick/Dev/edx-platform/edx-platform-head/edx-platform 46 | branched_repository = /Users/brianmesick/Dev/platform-core/platform-core 47 | 48 | # Branches to checkout to diff against 49 | original_branch = master 50 | branched_branch = bmedx/dogstats-and-markup 51 | 52 | [tracked_paths] 53 | # Maps paths which are relevant to this grafting, left is the original repo, right is the branched repo. 54 | tracked = 55 | openedx/core/lib/api/plugins.py > platform_core/lib/api/plugins.py 56 | common/lib/dogstats/dogstats_wrapper > platform_core/lib/dogstats_wrapper 57 | openedx/core/lib/cache_utils.py > platform_core/lib/cache_utils.py 58 | openedx/core/lib/course_tabs.py > platform_core/lib/course_tabs.py 59 | openedx/core/lib/tempdir.py > platform_core/lib/tempdir.py 60 | 61 | # Any paths under the tracked paths that you would like to *not* be considered. Useful 62 | # for large or frequently updated paths that are irrelevant and slow things down. These 63 | # are optional. 64 | original_ignored = 65 | 66 | branched_ignored = 67 | common/lib/dogstats/dogstats_wrapper/huge/directory/with/many/changes/ 68 | ``` 69 | 70 | Looking at all of the commits of large repositories can be slow, and can cause some false file matches. Having more, and specific, ```tracked``` and ```ignored``` options can save a lot of time and confusion. 71 | 72 | ## Usage 73 | 74 | ``` 75 | Usage: gitgraft.py [OPTIONS] CONF 76 | 77 | Creates a "best-guess" copy of commits across two unrelated (no consistent 78 | history) github repositories 79 | 80 | Options: 81 | --dry_run Do a test run without creating a branch or commits 82 | --verbose Verbose output 83 | --help Show this message and exit. 84 | ``` 85 | 86 | The output will show commits as they happen, including the impacted files. When run with ```--verbose``` extensive debugging information will be printed, which can assist with narrowing down issues with tracked or ignored paths. 87 | -------------------------------------------------------------------------------- /edx_repo_tools/gitgraft/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/gitgraft/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/gitgraft/gitgraft_modulestore: -------------------------------------------------------------------------------- 1 | [repositories] 2 | original_repository_name = edx-platform 3 | original_repository = /Users/brianmesick/Dev/edx-platform/edx-platform-head/edx-platform 4 | branched_repository = /Users/brianmesick/Dev/modulestore/modulestore 5 | original_branch = master 6 | branched_branch = master 7 | 8 | [tracked_paths] 9 | tracked = 10 | common/lib/xmodule/xmodule > xmodule 11 | 12 | original_ignored = 13 | common/lib/xmodule/xmodule/js 14 | 15 | branched_ignored = 16 | xmodule/tests 17 | xmodule/modulestore/tests 18 | -------------------------------------------------------------------------------- /edx_repo_tools/gitgraft/gitgraft_platform_core: -------------------------------------------------------------------------------- 1 | [repositories] 2 | original_repository_name = platform-core 3 | original_repository = /Users/brianmesick/Dev/edx-platform/edx-platform-head/edx-platform 4 | branched_repository = /Users/brianmesick/Dev/platform-core/platform-core 5 | original_branch = master 6 | branched_branch = bmedx/dogstats-and-markup 7 | 8 | [tracked_paths] 9 | tracked = 10 | openedx/core/lib/api/plugins.py > platform_core/lib/api/plugins.py 11 | common/lib/dogstats/dogstats_wrapper > platform_core/lib/dogstats_wrapper 12 | openedx/core/lib/cache_utils.py > platform_core/lib/cache_utils.py 13 | openedx/core/lib/course_tabs.py > platform_core/lib/course_tabs.py 14 | openedx/core/lib/tempdir.py > platform_core/lib/tempdir.py 15 | 16 | original_ignored = 17 | 18 | branched_ignored = 19 | 20 | -------------------------------------------------------------------------------- /edx_repo_tools/helpers.py: -------------------------------------------------------------------------------- 1 | """Helpers for various things.""" 2 | 3 | 4 | import os 5 | import pprint 6 | import re 7 | import sys 8 | 9 | import requests as real_requests 10 | 11 | from urlobject import URLObject 12 | 13 | try: 14 | from cachecontrol import CacheControlAdapter 15 | from cachecontrol.caches import FileCache 16 | except ImportError: 17 | CacheControlAdapter = None 18 | 19 | 20 | class WrappedRequests: 21 | """A helper wrapper around requests. 22 | 23 | Provides uniform authentication and logging. 24 | """ 25 | 26 | def __init__(self): 27 | self._session = None 28 | self.all_requests = None 29 | 30 | @property 31 | def session(self): 32 | if self._session is None: 33 | self._session = real_requests.Session() 34 | if CacheControlAdapter: 35 | adapter = CacheControlAdapter(cache=FileCache(".webcache")) 36 | self._session.mount("http://", adapter) 37 | self._session.mount("https://", adapter) 38 | print("Caching to .webcache") 39 | return self._session 40 | 41 | def record_request(self, method, url, args, kwargs): 42 | if 0: 43 | print(f"{method} {url}") 44 | if self.all_requests is None: 45 | return 46 | self.all_requests.append( 47 | "{}: {} {} {}".format( 48 | method, url, args if args else "", kwargs if kwargs else "" 49 | ).rstrip() 50 | ) 51 | 52 | def _kwargs(self, url, kwargs): 53 | """Adjust the kwargs for a request.""" 54 | if "auth" not in kwargs: 55 | # For Heroku, get github credentials from environment vars. 56 | if url.startswith("https://api.github.com"): 57 | user_name = os.environ.get("GITHUB_API_USER") 58 | token = os.environ.get("GITHUB_API_TOKEN") 59 | if user_name and token: 60 | kwargs["auth"] = (user_name, token) 61 | return kwargs 62 | 63 | def get(self, url, *args, **kwargs): 64 | self.record_request("GET", url, args, kwargs) 65 | response = self.session.get(url, *args, **self._kwargs(url, kwargs)) 66 | if 0: 67 | # Useful for diagnosing caching issues with the GitHub API. 68 | print("request:") 69 | pprint.pprint(dict(response.request.headers)) 70 | if response.from_cache: 71 | info = "cached" 72 | else: 73 | info = "{} left".format(response.headers["X-RateLimit-Remaining"]) 74 | print("headers:") 75 | pprint.pprint(dict(response.headers)) 76 | print(f"GET {url}: {info}") 77 | return response 78 | 79 | def post(self, url, *args, **kwargs): 80 | self.record_request("POST", url, args, kwargs) 81 | return self.session.post(url, *args, **self._kwargs(url, kwargs)) 82 | 83 | 84 | # Now we can use requests as usual, or even import it from this module. 85 | requests = WrappedRequests() 86 | 87 | 88 | def paginated_get(url, limit=None, debug=False, **kwargs): 89 | """ 90 | Retrieve all objects from a paginated API. 91 | 92 | Assumes that the pagination is specified in the "link" header, like 93 | Github's v3 API. 94 | 95 | The `limit` describes how many results you'd like returned. You might get 96 | more than this, but you won't make more requests to the server once this 97 | limit has been exceeded. For example, paginating by 100, if you set a 98 | limit of 250, three requests will be made, and you'll get 300 objects. 99 | 100 | """ 101 | url = URLObject(url).set_query_param('per_page', '100') 102 | limit = limit or 999999999 103 | returned = 0 104 | while url: 105 | resp = requests.get(url, **kwargs) 106 | result = resp.json() 107 | if not resp.ok: 108 | raise real_requests.exceptions.RequestException(result["message"]) 109 | if debug: 110 | pprint.pprint(result, stream=sys.stderr) 111 | for item in result: 112 | yield item 113 | returned += 1 114 | url = None 115 | if "link" in resp.headers and returned < limit: 116 | match = re.search(r'<(?P[^>]+)>; rel="next"', resp.headers["link"]) 117 | if match: 118 | url = match.group('url') 119 | -------------------------------------------------------------------------------- /edx_repo_tools/modernize_openedx_yaml.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from edx_repo_tools.utils import YamlLoader 4 | 5 | DEPRECATED_FIELDS = ['owner', 'supporting_teams', 'track_pulls', 'track-pulls'] 6 | 7 | 8 | class YamlModernizer(YamlLoader): 9 | """ 10 | Yaml Modernizer class is responsible for getting rid of obsolete elements from openedx.yaml files 11 | update the DEPRECATED_FIELDS list to adjust the modernizer output 12 | """ 13 | 14 | def __init__(self, file_path): 15 | super().__init__(file_path) 16 | 17 | def _remove_deprecated_elements(self): 18 | for deprecated_field in DEPRECATED_FIELDS: 19 | if deprecated_field in self.elements.keys(): 20 | del self.elements[deprecated_field] 21 | 22 | def modernize(self): 23 | self._remove_deprecated_elements() 24 | self.update_yml_file() 25 | 26 | 27 | @click.command() 28 | @click.option( 29 | '--path', default='openedx.yaml', 30 | help="Path to target openedx.yaml file") 31 | def main(path): 32 | modernizer = YamlModernizer(path) 33 | modernizer.modernize() 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /edx_repo_tools/pull_request_creator/extra.in: -------------------------------------------------------------------------------- 1 | # additional dependencies needed by pull_request_creator 2 | 3 | -c ../../requirements/constraints.txt 4 | 5 | PyGithub 6 | packaging # used in create pull request script to compare package versions 7 | -------------------------------------------------------------------------------- /edx_repo_tools/pull_request_creator/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | certifi==2024.7.4 8 | # via requests 9 | cffi==1.16.0 10 | # via 11 | # cryptography 12 | # pynacl 13 | charset-normalizer==3.3.2 14 | # via requests 15 | cryptography==43.0.0 16 | # via pyjwt 17 | deprecated==1.2.14 18 | # via pygithub 19 | idna==3.7 20 | # via requests 21 | packaging==24.1 22 | # via -r edx_repo_tools/pull_request_creator/extra.in 23 | pycparser==2.22 24 | # via cffi 25 | pygithub==2.3.0 26 | # via -r edx_repo_tools/pull_request_creator/extra.in 27 | pyjwt[crypto]==2.8.0 28 | # via pygithub 29 | pynacl==1.5.0 30 | # via pygithub 31 | requests==2.32.3 32 | # via pygithub 33 | typing-extensions==4.12.2 34 | # via pygithub 35 | urllib3==2.2.2 36 | # via 37 | # pygithub 38 | # requests 39 | wrapt==1.16.0 40 | # via deprecated 41 | -------------------------------------------------------------------------------- /edx_repo_tools/release/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/release/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/repo_access_scraper/README.rst: -------------------------------------------------------------------------------- 1 | Repo Access Scraper 2 | ################### 3 | 4 | This tool records who is granted write (or admin) access to a repo. It writes a .md file with teams and people who have write access to each repo. It also captures screenshots of the GitHub access settings pages for each involved repo and team. 5 | 6 | #. Create a Python 3.8 virtualenv. 7 | 8 | #. Install repo-tools (https://github.com/openedx/repo-tools) into your virtualenv, including the "repo_access_scraper" extra requirements:: 9 | 10 | $ python -m pip install '/path/to/repo-tools[repo_access_scraper]' 11 | 12 | #. You may need to install the playwright headless browsers:: 13 | 14 | $ playwright install 15 | 16 | #. Generate a GitHub personal access token, and define it in the environment:: 17 | 18 | $ export GITHUB_TOKEN=ghp_w3IJJ8YvqW4MJ....DGDpP8iOhko472RmIlP 19 | 20 | #. Run the tool, naming the repos you want to audit: 21 | 22 | .. code:: 23 | 24 | $ repo_access_scraper \ 25 | openedx/course-discovery \ 26 | openedx/frontend-app-publisher \ 27 | openedx/ecommerce \ 28 | openedx/ecommerce-worker \ 29 | openedx/frontend-app-payment \ 30 | openedx/frontend-app-ecommerce 31 | 32 | #. A report.md file will be created. 33 | 34 | #. An images.zip file will be created, with screenshots of the GitHub access settings and team members pages. 35 | -------------------------------------------------------------------------------- /edx_repo_tools/repo_access_scraper/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/repo_access_scraper/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/repo_access_scraper/extra.in: -------------------------------------------------------------------------------- 1 | # The repo_access_scraper code needs some extra packages installed that are 2 | # large and unusual, so we keep them separate here. 3 | 4 | -c ../../requirements/constraints.txt 5 | 6 | playwright 7 | -------------------------------------------------------------------------------- /edx_repo_tools/repo_access_scraper/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | greenlet==3.0.3 8 | # via 9 | # -c edx_repo_tools/repo_access_scraper/../../requirements/constraints.txt 10 | # playwright 11 | playwright==1.45.1 12 | # via -r edx_repo_tools/repo_access_scraper/extra.in 13 | pyee==11.1.0 14 | # via playwright 15 | typing-extensions==4.12.2 16 | # via pyee 17 | -------------------------------------------------------------------------------- /edx_repo_tools/repo_checks/README.rst: -------------------------------------------------------------------------------- 1 | Repo Checks 2 | ########### 3 | 4 | This is a tool & a lightweight framework for automating administrative tasks through GitHub's API. 5 | 6 | These checks are generally written by Axim engineers to help us to help us establish some consistency across the plethora of repositories under the `openedx GitHub organization `_, although they theoretically could be applied to any GitHub organization. 7 | 8 | Concepts 9 | ******** 10 | 11 | A "repo check" is something that we want to ensure about a given repository. Each repo check defines the following: 12 | 13 | * ``is_relevant``: Does this check even make sense on the given repo? 14 | * ``check``: Does the repo satisfy the check's conditions? 15 | * ``dry_run``: Based on the results of ``check``, display any problems. 16 | * ``fix``: Based on the results of ``check``, actively fix the problems. 17 | 18 | The ``repo_checks`` command line tool lets you execute these checks, either as dry runs or as active fixes. 19 | 20 | Usage 21 | ***** 22 | 23 | You will need a GH personal access token (classic, not "Fine-grained tokens") with the following scopes: 24 | 25 | * admin:org 26 | * repo 27 | * user 28 | * workflow 29 | 30 | First, set up repo-tools as described in `the root README <../../README.rst>`_. 31 | There are a few ways to do this; one way is:: 32 | 33 | export GITHUB_TOKEN="$(pass github-token)" # assumes you have passwordstore.org 34 | 35 | python3 -m venv venv 36 | . venv/bin/activate 37 | pip install -e .[repo_checks] 38 | 39 | Then, dry-run the script (one of these):: 40 | 41 | repo_checks # all repos & checks 42 | repo_checks -r edx-platform -r frontend-platform # limit repos 43 | repo_checks -c EnsureLabels -c RequiredCLACheck # limit checks 44 | repo_checks -c EnsureLabels -r edx-platform # single repo & check 45 | 46 | Finally, when you're ready, you can actually apply the fixes to GitHub:: 47 | 48 | repo_checks --no-dry-run <... same args you used above ...> 49 | 50 | Note this will open pull requests in the relevant repos. Some repos intentionally don't have certain workflows (for example, ``docs.openedx.org`` does not use ``commitlint``), so please tag maintainers on the pull requests so they can decide whether or not to use the added or changed workflows. 51 | 52 | When running over all repos in an organization, the script runs on the newest repos first as those are the most likely 53 | to be out of compliance. 54 | 55 | A note about rate-limiting, if your run is halted due to rate-limiting, note the last repo that the check was running on 56 | in the output and restart the job from there once your rate limit has been reset:: 57 | 58 | repo_checks ... # original run 59 | ... # rate limiting or other error halts the run 60 | repo_checks ... --start-at "" # Re run starting from where we halted. 61 | 62 | Contributing 63 | ************ 64 | 65 | * Make changes on your branch. 66 | 67 | * Consider adding `to the test suite <../../tests/test_repo_checks.py>`_ even though it is currently sparse. 68 | 69 | * CI will run tests for you, but not linting, so ensure your changes don't break repo_checks' pylint: ``pylint edx_repo_tools/repo_checks``. 70 | 71 | * Dry-run the script and save the output (non-Axim engineers: you should be able to do this with a read-only GH access token). 72 | 73 | * Open a PR. Paste your dry-run output into the PR (https://gist.github.com is helpful for long outputs). 74 | 75 | * Ping ``#ask-axim`` for review. 76 | 77 | * Once approved, apply and merge (non-Axim engineers: ask your Axim reviewer to do this part for you). 78 | 79 | * Run the script with ``--no-dry-run``, saving the output. Paste the output into the PR for future reference. 80 | 81 | * If something went wrong, push fixes to the PR and try again. Repeat as necessary. 82 | 83 | * Once successfully applied, merge the PR. 84 | -------------------------------------------------------------------------------- /edx_repo_tools/repo_checks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/edx_repo_tools/repo_checks/__init__.py -------------------------------------------------------------------------------- /edx_repo_tools/repo_checks/extra.in: -------------------------------------------------------------------------------- 1 | -c ../../requirements/constraints.txt 2 | 3 | cache_to_disk 4 | click 5 | ghapi 6 | pyyaml 7 | requests 8 | -------------------------------------------------------------------------------- /edx_repo_tools/repo_checks/extra.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | cache-to-disk==2.0.0 8 | # via -r edx_repo_tools/repo_checks/extra.in 9 | certifi==2024.7.4 10 | # via requests 11 | charset-normalizer==3.3.2 12 | # via requests 13 | click==8.1.7 14 | # via -r edx_repo_tools/repo_checks/extra.in 15 | fastcore==1.5.54 16 | # via ghapi 17 | ghapi==1.0.5 18 | # via -r edx_repo_tools/repo_checks/extra.in 19 | idna==3.7 20 | # via requests 21 | packaging==24.1 22 | # via 23 | # fastcore 24 | # ghapi 25 | pyyaml==6.0.1 26 | # via -r edx_repo_tools/repo_checks/extra.in 27 | requests==2.32.3 28 | # via -r edx_repo_tools/repo_checks/extra.in 29 | urllib3==2.2.2 30 | # via requests 31 | 32 | # The following packages are considered to be unsafe in a requirements file: 33 | # pip 34 | -------------------------------------------------------------------------------- /edx_repo_tools/utils.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import os 3 | import subprocess 4 | 5 | import click 6 | from ruamel.yaml import YAML 7 | 8 | 9 | def dry_echo(dry, message, *args, **kwargs): 10 | """ 11 | Print a command to the console (like :func:`click.echo`), but if ``dry`` is True, 12 | then prefix the message with a warning message stating that the action was 13 | skipped. All unknown args and kwargs are passed to :func:`click.echo` 14 | 15 | Example usage: 16 | 17 | dry_echo(dry, "Firing ze missiles!", fg='red') 18 | if not dry: 19 | fire_ze_missiles() 20 | 21 | Arguments: 22 | dry (bool): Whether to prefix the dry-run notification 23 | message: The message to print 24 | """ 25 | click.echo("{dry}{message}".format( 26 | dry=click.style("DRY RUN - SKIPPED: ", fg='yellow', bold=True) if dry else "", 27 | message=click.style(message, *args, **kwargs) 28 | )) 29 | 30 | 31 | def dry(f, help='Disable or enable actions taken by the script'): 32 | """ 33 | A click decorator that adds a ``--dry/--doit`` flag. It is passed to the 34 | command as ``dry``, and defaults to True. 35 | """ 36 | return click.option( 37 | '--dry/--doit', 38 | is_flag=True, 39 | default=True, 40 | help=help, 41 | )(f) 42 | 43 | 44 | class YamlLoader: 45 | def __init__(self, file_path): 46 | self.file_path = file_path 47 | self.yml_instance = YAML() 48 | self.yml_instance.preserve_quotes = True 49 | self.yml_instance.default_flow_style = None 50 | self.yml_instance.indent(mapping=2, sequence=2, offset=0) 51 | self._load_file() 52 | 53 | def _load_file(self): 54 | with open(self.file_path) as file_stream: 55 | self.elements = self.yml_instance.load(file_stream) 56 | 57 | def update_yml_file(self): 58 | with open(self.file_path, 'w') as file_stream: 59 | self.yml_instance.dump(self.elements, file_stream) 60 | 61 | 62 | def get_cmd_output(cmd): 63 | """Run a command in shell, and return the Unicode output.""" 64 | try: 65 | data = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 66 | except subprocess.CalledProcessError as ex: 67 | data = ex.output 68 | try: 69 | data = data.decode("utf-8") 70 | except UnicodeDecodeError: 71 | data = data.decode("latin1") 72 | return data 73 | 74 | 75 | @contextlib.contextmanager 76 | def change_dir(new_dir): 77 | """Change directory, and then change back. 78 | 79 | Use as a context manager, it will give you the new directory, and later 80 | restore the old one. 81 | 82 | """ 83 | old_dir = os.getcwd() 84 | os.chdir(new_dir) 85 | try: 86 | yield os.getcwd() 87 | finally: 88 | os.chdir(old_dir) 89 | -------------------------------------------------------------------------------- /gittools.sh: -------------------------------------------------------------------------------- 1 | # Source this file to define some useful aliases for working with many git 2 | # repos at once. 3 | # Originally from: 4 | # https://github.com/nedbat/dot/blob/a517603c4969b017d5604418d05babc4a0f323f8/.rc.sh#L126 5 | 6 | 7 | # Run a command for every repo found somewhere beneath the current directory. 8 | # 9 | # $ gittree git fetch --all --prune 10 | # 11 | # To only run commands in repos with a particular branch, use gittreeif: 12 | # 13 | # $ gittreeif branch_name git fetch --all --prune 14 | # 15 | # If the command has subcommands that need to run in each directory, quote the 16 | # entire command: 17 | # 18 | # $ gittreeif origin/foo 'git log --format="%s" origin/foo ^$(git merge-base origin/master origin/foo)' 19 | # 20 | # The directory name is printed before each command. Use -q to suppress this, 21 | # or -r to show the origin remote url instead of the directory name. 22 | # 23 | # $ gittreeif origin/foo -q git status 24 | # 25 | gittreeif() { 26 | local test_branch="$1" 27 | shift 28 | local show_dir=true show_repo=false 29 | if [[ $1 == -r ]]; then 30 | # -r means, show the remote url instead of the directory. 31 | shift 32 | local show_dir=false show_repo=true 33 | fi 34 | if [[ $1 == -q ]]; then 35 | # -q means, don't echo the separator line with the directory. 36 | shift 37 | local show_dir=false show_repo=false 38 | fi 39 | find . -name .git -type d -prune | while read d; do 40 | local d=$(dirname "$d") 41 | if [[ "$test_branch" != "" ]]; then 42 | git -C "$d" rev-parse --verify -q "$test_branch" >& /dev/null || continue 43 | fi 44 | if [[ $show_dir == true ]]; then 45 | echo "---- $d ----" 46 | fi 47 | if [[ $show_repo == true ]]; then 48 | echo "----" $(git -C "$d" config --get remote.origin.url) "----" 49 | fi 50 | if [[ $# == 1 && $1 == *' '* ]]; then 51 | (cd "$d" && eval "$1") 52 | else 53 | (cd "$d" && "$@") 54 | fi 55 | done 56 | } 57 | 58 | gittree() { 59 | # Run a command on all git repos. 60 | gittreeif "" "$@" 61 | } 62 | -------------------------------------------------------------------------------- /maintainer_reports/.gitignore: -------------------------------------------------------------------------------- 1 | google-service-credentials.json 2 | -------------------------------------------------------------------------------- /maintainer_reports/README.md: -------------------------------------------------------------------------------- 1 | A CLI for querying GraphQL data from GitHub and persisiting it to BigQuery for 2 | analysis and visualization. 3 | 4 | ### Getting Started 5 | In order to run you will need a JSON credentials file for Google Cloud. 6 | 7 | The file is expected to be named `google-service-credentials.json` and in the same directory as the script. The credentials should be associated with a GCP service account added to a GCP project. The current service account is: pull-request-reporter@pull-request-reporting.iam.gserviceaccount.com 8 | 9 | Additionall you will need to set a GitHub bearer token as an environment variable. That token must be associated with an account with the appropriate permsions to access the repositories you are querying. 10 | 11 | `export GH_BEARER_TOKEN=ghp_V2xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx` 12 | 13 | ### Related Resources 14 | 15 | - [BigQuery Workspace](https://console.cloud.google.com/bigquery?project=pull-request-reporting&ws=!1m4!1m3!3m2!1spull-request-reporting!2sopen_edx_github) 16 | - [GCP Project](https://console.cloud.google.com/home/dashboard?project=pull-request-reporting) 17 | - [Datastudio Dashboard](https://datastudio.google.com/reporting/64382012-1c39-4af0-8fa2-84c32c5192f5/page/p_ermoo938zc) 18 | - [Authoritative Repository Sheet](https://docs.google.com/spreadsheets/d/1tI5OPMjnhL6obzynuAUURLjOSN72QWCkHSXGGoFLMG8/edit#gid=819569654) 19 | 20 | ### Get Help 21 | `python ./graphql_requests.py --help` 22 | 23 | ### Process Open Pull Requests 24 | `python ./graphql_requests.py handle-open-pulls` 25 | 26 | ### Process Closed Pull Requests. 27 | `python ./graphql_requests.py handle-closed-pulls` 28 | -------------------------------------------------------------------------------- /maintainer_reports/gcp_wrapper_open_pulls.py: -------------------------------------------------------------------------------- 1 | import click 2 | from graphql_requests import * 3 | 4 | if __name__=='__main__': 5 | # 6 | # Wrapping this in a try block to prevent Click from exiting directly. 7 | # GCP Cloud Functions doesn't want programs to do so, but return so their 8 | # wrapper can handle cleanly exiting the environment. 9 | # 10 | try: 11 | # pylint: disable=no-value-for-parameter 12 | cli(["handle-open-pulls"]) 13 | except SystemExit as e: 14 | if e.code != 0: 15 | raise 16 | -------------------------------------------------------------------------------- /maintainer_reports/graphql_util.py: -------------------------------------------------------------------------------- 1 | """ 2 | A collection of utility functions for interacting with the GitHub 3 | GraphQL API. 4 | """ 5 | import os 6 | from time import sleep 7 | 8 | import click 9 | import requests 10 | from dateutil.rrule import MONTHLY, rrule 11 | from requests.adapters import HTTPAdapter, Retry 12 | from dotenv import load_dotenv 13 | 14 | DATE_MASK='%Y-%m-%d' 15 | 16 | # Added for GCP support, should consider implications for 17 | # running locally. 18 | load_dotenv() 19 | GH_BEARER_TOKEN = os.getenv('GH_BEARER_TOKEN') 20 | 21 | if not GH_BEARER_TOKEN: 22 | raise Exception("Didn't find the GH_BEARER_TOKEN set in the environment") 23 | 24 | HEADERS = {"Authorization": f"Bearer {GH_BEARER_TOKEN}"} 25 | END_POINT="https://api.github.com/graphql" 26 | 27 | # Seconds to throttle between calls 28 | SLEEP_BETWEEN_CALLS=8 29 | 30 | def run_query_with_retry(query): 31 | """ 32 | Runs a GitHub GraphQL query automatically retrying failed queries up to 33 | 5 times with backoff. 34 | """ 35 | 36 | # Utilizing Requests sessions to retry with backoff for specific status codes. 37 | # GitHub's API is pretty flakey and even with this configurations failure 38 | # occasionally occur. 39 | s = requests.Session() 40 | retries = Retry(total=10, backoff_factor=5, status_forcelist=[ 502, 503, 504 ]) 41 | s.mount('https://', HTTPAdapter(max_retries=retries)) 42 | 43 | request = s.post(END_POINT, json={'query': query}, headers=HEADERS) 44 | 45 | if request.status_code == 200: 46 | return request.json() 47 | else: 48 | raise Exception(f"GraphQL call failed, returning status code of {request.status_code}") 49 | 50 | 51 | def retrieve_paginated_results(query): 52 | """ 53 | Github GraphQL queries are paginated and retreiving all results requires 54 | following the page references provided in each subsequent response. This 55 | function aggregates all results across the pages and returns a single array 56 | containing all the results. 57 | """ 58 | 59 | starting_end_cursor = "null" 60 | 61 | data = run_query_with_retry(query.replace("_END_CURSOR_",starting_end_cursor)) 62 | results = data["data"]["search"]["nodes"] 63 | has_next_page = data["data"]["search"]["pageInfo"]["hasNextPage"] 64 | 65 | while has_next_page: 66 | 67 | click.echo("Handling next page") 68 | 69 | end_cursor = data["data"]["search"]["pageInfo"]["endCursor"] 70 | sleep(SLEEP_BETWEEN_CALLS) 71 | 72 | data = run_query_with_retry(query=query.replace("_END_CURSOR_",'"' + end_cursor + '"')) 73 | 74 | results = results + data["data"]["search"]["nodes"] 75 | 76 | has_next_page = data["data"]["search"]["pageInfo"]["hasNextPage"] 77 | 78 | return results 79 | 80 | def date_slice_query(query, start_date, end_date, time_bucket=MONTHLY): 81 | """ 82 | Runs a query over a set of smaller timespans to provide flexibility and reduce 83 | errors interacting with the flakey GraphQL service offered by GitHub. 84 | """ 85 | query_start_date = None 86 | 87 | for bucket in rrule(time_bucket, dtstart=start_date, until=end_date): 88 | 89 | results = [] 90 | 91 | if not query_start_date: 92 | query_start_date = bucket 93 | else: 94 | 95 | sleep(SLEEP_BETWEEN_CALLS) 96 | 97 | click.echo(f"Handling date range {query_start_date.strftime(DATE_MASK)} to {bucket.strftime(DATE_MASK)}") 98 | 99 | results = results + retrieve_paginated_results(query.replace("_RANGE_", query_start_date.strftime(DATE_MASK) + ".." + bucket.strftime(DATE_MASK))) 100 | 101 | query_start_date = bucket 102 | 103 | return results 104 | -------------------------------------------------------------------------------- /maintainer_reports/main.py: -------------------------------------------------------------------------------- 1 | import click 2 | from graphql_requests import * 3 | 4 | def main(data, context): 5 | """ 6 | Wrapper function following recommended pattern for 7 | GCP Cloudfunctions. 8 | """ 9 | # Wrapping this in a try block to prevent Click from exiting directly. 10 | # GCP Cloud Functions doesn't want programs to do so, but return so their 11 | # wrapper can handle cleanly exiting the environment. 12 | # 13 | try: 14 | # pylint: disable=no-value-for-parameter 15 | cli(["handle-open-pulls"]) 16 | except SystemExit as sys_exit: 17 | if sys_exit.code != 0: 18 | raise 19 | 20 | 21 | if __name__=='__main__': 22 | main('data','context') 23 | -------------------------------------------------------------------------------- /maintainer_reports/requirements.txt: -------------------------------------------------------------------------------- 1 | click 2 | python-dotenv 3 | requests 4 | gspread 5 | python_graphql_client 6 | google-cloud-bigquery 7 | -------------------------------------------------------------------------------- /maintainer_reports/sql_queries.py: -------------------------------------------------------------------------------- 1 | """ 2 | Provides SQL queries used for interacting with BigQuery. 3 | """ 4 | 5 | # This query is mostly generated using the BigQuery Information Schema. 6 | # 7 | # select concat("_target.",column_name," = ","_source.",column_name) 8 | # from `pull-request-reporting.open_edx_github.INFORMATION_SCHEMA.COLUMNS` 9 | # where table_name = 'closed_pull_requests'; 10 | # 11 | UPSERT_CLOSED_PULLS = """merge `pull-request-reporting.open_edx_github.closed_pull_requests` _target 12 | using `pull-request-reporting.open_edx_github._TEMP_TABLE_NAME_` _source 13 | on _target.permalink = _source.permalink 14 | when matched then 15 | update 16 | set 17 | _target.additions = _source.additions, 18 | _target.labels = _source.labels, 19 | _target.mergedAt = _source.mergedAt, 20 | _target.isDraft = _source.isDraft, 21 | _target.lastEditedAt = _source.lastEditedAt, 22 | _target.updatedAt = _source.updatedAt, 23 | _target.headRef = _source.headRef, 24 | _target.permalink = _source.permalink, 25 | _target.changedFiles = _source.changedFiles, 26 | _target.deletions = _source.deletions, 27 | _target.mergedBy = _source.mergedBy, 28 | _target.url = _source.url, 29 | _target.author = _source.author, 30 | _target.baseRef = _source.baseRef, 31 | _target.createdAt = _source.createdAt, 32 | _target.number = _source.number, 33 | _target.repository = _source.repository, 34 | _target.commits = _source.commits, 35 | _target.state = _source.state, 36 | _target.title = _source.title 37 | when not matched then 38 | insert (additions, labels, mergedAt, isDraft, lastEditedAt, updatedAt, headRef, permalink, changedFiles, deletions, mergedBy, url, author, baseRef, createdAt, number, repository, commits, state, title) 39 | values (additions, labels, mergedAt, isDraft, lastEditedAt, updatedAt, headRef, permalink, changedFiles, deletions, mergedBy, url, author, baseRef, createdAt, number, repository, commits, state, title); 40 | """ 41 | 42 | -------------------------------------------------------------------------------- /pylintrc_tweaks: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | load-plugins = edx_lint.pylint 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This is a stub file to prevent any external calls to this file from breaking. 2 | # This should be removed when we are sure that nothing references this 3 | # original requirements location. 4 | -r requirements/base.txt 5 | -------------------------------------------------------------------------------- /requirements/base.in: -------------------------------------------------------------------------------- 1 | -c constraints.txt 2 | 3 | # TODO: This list could be trimmed down by moving 4 | # some requirements out to the extra.in files of the tools 5 | # that actually need them. 6 | 7 | CacheControl 8 | PyYAML 9 | appdirs 10 | backports.csv 11 | click 12 | github3.py 13 | gitpython 14 | lazy 15 | lockfile 16 | more_itertools 17 | path.py 18 | python-dateutil 19 | python-dotenv 20 | requests 21 | statistics 22 | tqdm 23 | uritemplate 24 | urlobject 25 | ruamel.yaml 26 | bowler 27 | 28 | # tox is used by the OEP10 checker 29 | tox 30 | -------------------------------------------------------------------------------- /requirements/base.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | appdirs==1.4.4 8 | # via 9 | # -r requirements/base.in 10 | # fissix 11 | attrs==23.2.0 12 | # via bowler 13 | backports-csv==1.0.7 14 | # via -r requirements/base.in 15 | bowler==0.9.0 16 | # via -r requirements/base.in 17 | cachecontrol==0.14.0 18 | # via -r requirements/base.in 19 | cachetools==5.4.0 20 | # via tox 21 | certifi==2024.7.4 22 | # via requests 23 | cffi==1.16.0 24 | # via cryptography 25 | chardet==5.2.0 26 | # via tox 27 | charset-normalizer==3.3.2 28 | # via requests 29 | click==8.1.7 30 | # via 31 | # -r requirements/base.in 32 | # bowler 33 | # moreorless 34 | colorama==0.4.6 35 | # via tox 36 | cryptography==43.0.0 37 | # via pyjwt 38 | distlib==0.3.8 39 | # via virtualenv 40 | docutils==0.21.2 41 | # via statistics 42 | execnet==2.1.1 43 | # via pytest-xdist 44 | filelock==3.15.4 45 | # via 46 | # tox 47 | # virtualenv 48 | fissix==24.4.24 49 | # via bowler 50 | gitdb==4.0.11 51 | # via gitpython 52 | github3-py==4.0.1 53 | # via -r requirements/base.in 54 | gitpython==3.1.43 55 | # via -r requirements/base.in 56 | idna==3.7 57 | # via requests 58 | iniconfig==2.0.0 59 | # via pytest 60 | lazy==1.6 61 | # via -r requirements/base.in 62 | lockfile==0.12.2 63 | # via -r requirements/base.in 64 | more-itertools==10.3.0 65 | # via -r requirements/base.in 66 | moreorless==0.4.0 67 | # via bowler 68 | msgpack==1.0.8 69 | # via cachecontrol 70 | packaging==24.1 71 | # via 72 | # pyproject-api 73 | # pytest 74 | # tox 75 | path==16.14.0 76 | # via path-py 77 | path-py==12.5.0 78 | # via -r requirements/base.in 79 | platformdirs==4.2.2 80 | # via 81 | # tox 82 | # virtualenv 83 | pluggy==1.5.0 84 | # via 85 | # pytest 86 | # tox 87 | pycparser==2.22 88 | # via cffi 89 | pyjwt[crypto]==2.8.0 90 | # via github3-py 91 | pyproject-api==1.7.1 92 | # via tox 93 | pytest==8.3.1 94 | # via 95 | # -r requirements/base.in 96 | # pytest-logging 97 | # pytest-xdist 98 | pytest-logging==2015.11.4 99 | # via -r requirements/base.in 100 | pytest-xdist==3.6.1 101 | # via -r requirements/base.in 102 | python-dateutil==2.9.0.post0 103 | # via 104 | # -r requirements/base.in 105 | # github3-py 106 | python-dotenv==1.0.1 107 | # via -r requirements/base.in 108 | pyyaml==6.0.1 109 | # via -r requirements/base.in 110 | requests==2.32.3 111 | # via 112 | # -r requirements/base.in 113 | # cachecontrol 114 | # github3-py 115 | ruamel-yaml==0.18.6 116 | # via -r requirements/base.in 117 | ruamel-yaml-clib==0.2.8 118 | # via ruamel-yaml 119 | six==1.16.0 120 | # via python-dateutil 121 | smmap==5.0.1 122 | # via gitdb 123 | statistics==1.0.3.5 124 | # via -r requirements/base.in 125 | tox==4.16.0 126 | # via -r requirements/base.in 127 | tqdm==4.66.4 128 | # via -r requirements/base.in 129 | uritemplate==4.1.1 130 | # via 131 | # -r requirements/base.in 132 | # github3-py 133 | urllib3==2.2.2 134 | # via requests 135 | urlobject==2.4.3 136 | # via -r requirements/base.in 137 | virtualenv==20.26.3 138 | # via tox 139 | volatile==2.1.0 140 | # via bowler 141 | -------------------------------------------------------------------------------- /requirements/common_constraints.txt: -------------------------------------------------------------------------------- 1 | # A central location for most common version constraints 2 | # (across edx repos) for pip-installation. 3 | # 4 | # Similar to other constraint files this file doesn't install any packages. 5 | # It specifies version constraints that will be applied if a package is needed. 6 | # When pinning something here, please provide an explanation of why it is a good 7 | # idea to pin this package across all edx repos, Ideally, link to other information 8 | # that will help people in the future to remove the pin when possible. 9 | # Writing an issue against the offending project and linking to it here is good. 10 | # 11 | # Note: Changes to this file will automatically be used by other repos, referencing 12 | # this file from Github directly. It does not require packaging in edx-lint. 13 | 14 | 15 | # using LTS django version 16 | Django<5.0 17 | 18 | # elasticsearch>=7.14.0 includes breaking changes in it which caused issues in discovery upgrade process. 19 | # elastic search changelog: https://www.elastic.co/guide/en/enterprise-search/master/release-notes-7.14.0.html 20 | # See https://github.com/openedx/edx-platform/issues/35126 for more info 21 | elasticsearch<7.14.0 22 | 23 | # django-simple-history>3.0.0 adds indexing and causes a lot of migrations to be affected 24 | django-simple-history==3.0.0 25 | 26 | # opentelemetry requires version 6.x at the moment: 27 | # https://github.com/open-telemetry/opentelemetry-python/issues/3570 28 | # Normally this could be added as a constraint in edx-django-utils, where we're 29 | # adding the opentelemetry dependency. However, when we compile pip-tools.txt, 30 | # that uses version 7.x, and then there's no undoing that when compiling base.txt. 31 | # So we need to pin it globally, for now. 32 | # Ticket for unpinning: https://github.com/openedx/edx-lint/issues/407 33 | importlib-metadata<7 34 | 35 | # Cause: https://github.com/openedx/event-tracking/pull/290 36 | # event-tracking 2.4.1 upgrades to pymongo 4.4.0 which is not supported on edx-platform. 37 | # We will pin event-tracking to do not break existing installations 38 | # This can be unpinned once https://github.com/openedx/edx-platform/issues/34586 39 | # has been resolved and edx-platform is running with pymongo>=4.4.0 40 | event-tracking<2.4.1 41 | -------------------------------------------------------------------------------- /requirements/constraints.txt: -------------------------------------------------------------------------------- 1 | -c common_constraints.txt 2 | 3 | # playwright and sqlalchemy requirements conflict for greenlet<=3.0.1 4 | greenlet>3.0.1 5 | -------------------------------------------------------------------------------- /requirements/development.in: -------------------------------------------------------------------------------- 1 | -c constraints.txt 2 | -r base.txt 3 | 4 | edx-lint 5 | pip-tools 6 | pytest 7 | pytest-mock 8 | responses 9 | -------------------------------------------------------------------------------- /requirements/development.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | appdirs==1.4.4 8 | # via 9 | # -r requirements/base.txt 10 | # fissix 11 | astroid==3.2.4 12 | # via 13 | # pylint 14 | # pylint-celery 15 | attrs==23.2.0 16 | # via 17 | # -r requirements/base.txt 18 | # bowler 19 | backports-csv==1.0.7 20 | # via -r requirements/base.txt 21 | bowler==0.9.0 22 | # via -r requirements/base.txt 23 | build==1.2.1 24 | # via pip-tools 25 | cachecontrol==0.14.0 26 | # via -r requirements/base.txt 27 | cachetools==5.4.0 28 | # via 29 | # -r requirements/base.txt 30 | # tox 31 | certifi==2024.7.4 32 | # via 33 | # -r requirements/base.txt 34 | # requests 35 | cffi==1.16.0 36 | # via 37 | # -r requirements/base.txt 38 | # cryptography 39 | chardet==5.2.0 40 | # via 41 | # -r requirements/base.txt 42 | # tox 43 | charset-normalizer==3.3.2 44 | # via 45 | # -r requirements/base.txt 46 | # requests 47 | click==8.1.7 48 | # via 49 | # -r requirements/base.txt 50 | # bowler 51 | # click-log 52 | # code-annotations 53 | # edx-lint 54 | # moreorless 55 | # pip-tools 56 | click-log==0.4.0 57 | # via edx-lint 58 | code-annotations==1.8.0 59 | # via edx-lint 60 | colorama==0.4.6 61 | # via 62 | # -r requirements/base.txt 63 | # tox 64 | cryptography==43.0.0 65 | # via 66 | # -r requirements/base.txt 67 | # pyjwt 68 | dill==0.3.8 69 | # via pylint 70 | distlib==0.3.8 71 | # via 72 | # -r requirements/base.txt 73 | # virtualenv 74 | docutils==0.21.2 75 | # via 76 | # -r requirements/base.txt 77 | # statistics 78 | edx-lint==5.3.7 79 | # via -r requirements/development.in 80 | execnet==2.1.1 81 | # via 82 | # -r requirements/base.txt 83 | # pytest-xdist 84 | filelock==3.15.4 85 | # via 86 | # -r requirements/base.txt 87 | # tox 88 | # virtualenv 89 | fissix==24.4.24 90 | # via 91 | # -r requirements/base.txt 92 | # bowler 93 | gitdb==4.0.11 94 | # via 95 | # -r requirements/base.txt 96 | # gitpython 97 | github3-py==4.0.1 98 | # via -r requirements/base.txt 99 | gitpython==3.1.43 100 | # via -r requirements/base.txt 101 | idna==3.7 102 | # via 103 | # -r requirements/base.txt 104 | # requests 105 | iniconfig==2.0.0 106 | # via 107 | # -r requirements/base.txt 108 | # pytest 109 | isort==5.13.2 110 | # via pylint 111 | jinja2==3.1.4 112 | # via code-annotations 113 | lazy==1.6 114 | # via -r requirements/base.txt 115 | lockfile==0.12.2 116 | # via -r requirements/base.txt 117 | markupsafe==2.1.5 118 | # via jinja2 119 | mccabe==0.7.0 120 | # via pylint 121 | more-itertools==10.3.0 122 | # via -r requirements/base.txt 123 | moreorless==0.4.0 124 | # via 125 | # -r requirements/base.txt 126 | # bowler 127 | msgpack==1.0.8 128 | # via 129 | # -r requirements/base.txt 130 | # cachecontrol 131 | packaging==24.1 132 | # via 133 | # -r requirements/base.txt 134 | # build 135 | # pyproject-api 136 | # pytest 137 | # tox 138 | path==16.14.0 139 | # via 140 | # -r requirements/base.txt 141 | # path-py 142 | path-py==12.5.0 143 | # via -r requirements/base.txt 144 | pbr==6.0.0 145 | # via stevedore 146 | pip-tools==7.4.1 147 | # via -r requirements/development.in 148 | platformdirs==4.2.2 149 | # via 150 | # -r requirements/base.txt 151 | # pylint 152 | # tox 153 | # virtualenv 154 | pluggy==1.5.0 155 | # via 156 | # -r requirements/base.txt 157 | # pytest 158 | # tox 159 | pycparser==2.22 160 | # via 161 | # -r requirements/base.txt 162 | # cffi 163 | pyjwt[crypto]==2.8.0 164 | # via 165 | # -r requirements/base.txt 166 | # github3-py 167 | pylint==3.2.6 168 | # via 169 | # edx-lint 170 | # pylint-celery 171 | # pylint-django 172 | # pylint-plugin-utils 173 | pylint-celery==0.3 174 | # via edx-lint 175 | pylint-django==2.5.5 176 | # via edx-lint 177 | pylint-plugin-utils==0.8.2 178 | # via 179 | # pylint-celery 180 | # pylint-django 181 | pyproject-api==1.7.1 182 | # via 183 | # -r requirements/base.txt 184 | # tox 185 | pyproject-hooks==1.1.0 186 | # via 187 | # build 188 | # pip-tools 189 | pytest==8.3.1 190 | # via 191 | # -r requirements/base.txt 192 | # -r requirements/development.in 193 | # pytest-logging 194 | # pytest-mock 195 | # pytest-xdist 196 | pytest-logging==2015.11.4 197 | # via -r requirements/base.txt 198 | pytest-mock==3.14.0 199 | # via -r requirements/development.in 200 | pytest-xdist==3.6.1 201 | # via -r requirements/base.txt 202 | python-dateutil==2.9.0.post0 203 | # via 204 | # -r requirements/base.txt 205 | # github3-py 206 | python-dotenv==1.0.1 207 | # via -r requirements/base.txt 208 | python-slugify==8.0.4 209 | # via code-annotations 210 | pyyaml==6.0.1 211 | # via 212 | # -r requirements/base.txt 213 | # code-annotations 214 | # responses 215 | requests==2.32.3 216 | # via 217 | # -r requirements/base.txt 218 | # cachecontrol 219 | # github3-py 220 | # responses 221 | responses==0.25.3 222 | # via -r requirements/development.in 223 | ruamel-yaml==0.18.6 224 | # via -r requirements/base.txt 225 | ruamel-yaml-clib==0.2.8 226 | # via 227 | # -r requirements/base.txt 228 | # ruamel-yaml 229 | six==1.16.0 230 | # via 231 | # -r requirements/base.txt 232 | # edx-lint 233 | # python-dateutil 234 | smmap==5.0.1 235 | # via 236 | # -r requirements/base.txt 237 | # gitdb 238 | statistics==1.0.3.5 239 | # via -r requirements/base.txt 240 | stevedore==5.2.0 241 | # via code-annotations 242 | text-unidecode==1.3 243 | # via python-slugify 244 | tomlkit==0.13.0 245 | # via pylint 246 | tox==4.16.0 247 | # via -r requirements/base.txt 248 | tqdm==4.66.4 249 | # via -r requirements/base.txt 250 | uritemplate==4.1.1 251 | # via 252 | # -r requirements/base.txt 253 | # github3-py 254 | urllib3==2.2.2 255 | # via 256 | # -r requirements/base.txt 257 | # requests 258 | # responses 259 | urlobject==2.4.3 260 | # via -r requirements/base.txt 261 | virtualenv==20.26.3 262 | # via 263 | # -r requirements/base.txt 264 | # tox 265 | volatile==2.1.0 266 | # via 267 | # -r requirements/base.txt 268 | # bowler 269 | wheel==0.43.0 270 | # via pip-tools 271 | 272 | # The following packages are considered to be unsafe in a requirements file: 273 | # pip 274 | # setuptools 275 | -------------------------------------------------------------------------------- /requirements/pip-tools.in: -------------------------------------------------------------------------------- 1 | -c constraints.txt 2 | 3 | pip-tools 4 | -------------------------------------------------------------------------------- /requirements/pip-tools.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | build==1.2.1 8 | # via pip-tools 9 | click==8.1.7 10 | # via pip-tools 11 | packaging==24.1 12 | # via build 13 | pip-tools==7.4.1 14 | # via -r requirements/pip-tools.in 15 | pyproject-hooks==1.1.0 16 | # via 17 | # build 18 | # pip-tools 19 | wheel==0.43.0 20 | # via pip-tools 21 | 22 | # The following packages are considered to be unsafe in a requirements file: 23 | # pip 24 | # setuptools 25 | -------------------------------------------------------------------------------- /requirements/pip.in: -------------------------------------------------------------------------------- 1 | # Core dependencies for installing other packages 2 | 3 | -c constraints.txt 4 | 5 | pip 6 | setuptools 7 | wheel 8 | -------------------------------------------------------------------------------- /requirements/pip.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # make upgrade 6 | # 7 | wheel==0.43.0 8 | # via -r requirements/pip.in 9 | 10 | # The following packages are considered to be unsafe in a requirements file: 11 | pip==24.1.2 12 | # via -r requirements/pip.in 13 | setuptools==71.1.0 14 | # via -r requirements/pip.in 15 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openedx/repo-tools/95ce6eeee34adfa1f06b3cb32262983513c2a541/tests/__init__.py -------------------------------------------------------------------------------- /tests/fake_repos/repo_with_nvmrc/.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release CI 2 | on: 3 | push: 4 | branches: 5 | - master 6 | jobs: 7 | release: 8 | name: Release 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v2 13 | with: 14 | fetch-depth: 0 15 | - name: Setup Node.js 16 | uses: actions/setup-node@v2 17 | with: 18 | node-version: 12 19 | - name: Install dependencies 20 | run: npm ci 21 | - name: Validate package-lock.json changes 22 | run: make validate-no-uncommitted-package-lock-changes 23 | - name: Lint 24 | run: npm run lint 25 | - name: Test 26 | run: npm run test 27 | - name: i18n_extract 28 | run: npm run i18n_extract 29 | - name: Coverage 30 | uses: codecov/codecov-action@v4 31 | with: 32 | token: ${{ secrets.CODECOV_TOKEN }} 33 | fail_ci_if_error: false 34 | - name: Build 35 | run: npm run build 36 | - name: Release 37 | env: 38 | GITHUB_TOKEN: ${{ secrets.SEMANTIC_RELEASE_GITHUB_TOKEN }} 39 | NPM_TOKEN: ${{ secrets.SEMANTIC_RELEASE_NPM_TOKEN }} 40 | run: npx semantic-release 41 | -------------------------------------------------------------------------------- /tests/fake_repos/repo_with_nvmrc/.nvmrc: -------------------------------------------------------------------------------- 1 | v16 2 | -------------------------------------------------------------------------------- /tests/fake_repos/repo_without_nvmrc/.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release CI 2 | on: 3 | push: 4 | branches: 5 | - master 6 | jobs: 7 | release: 8 | name: Release 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v2 13 | with: 14 | fetch-depth: 0 15 | - name: Setup Node.js 16 | uses: actions/setup-node@v2 17 | with: 18 | node-version: 12 19 | - name: Install dependencies 20 | run: npm ci 21 | - name: Validate package-lock.json changes 22 | run: make validate-no-uncommitted-package-lock-changes 23 | - name: Lint 24 | run: npm run lint 25 | - name: Test 26 | run: npm run test 27 | - name: i18n_extract 28 | run: npm run i18n_extract 29 | - name: Coverage 30 | uses: codecov/codecov-action@v4 31 | with: 32 | token: ${{ secrets.CODECOV_TOKEN }} 33 | fail_ci_if_error: false 34 | - name: Build 35 | run: npm run build 36 | - name: Release 37 | env: 38 | GITHUB_TOKEN: ${{ secrets.SEMANTIC_RELEASE_GITHUB_TOKEN }} 39 | NPM_TOKEN: ${{ secrets.SEMANTIC_RELEASE_NPM_TOKEN }} 40 | run: npx semantic-release 41 | -------------------------------------------------------------------------------- /tests/pull_request_creator_test_data/minor_diff.txt: -------------------------------------------------------------------------------- 1 | diff --git a/requirements/base.txt b/requirements/base.txt 2 | index e94bd48..132b72b 100644 3 | --- a/requirements/base.txt 4 | +++ b/requirements/base.txt 5 | @@ -1,6 +1,6 @@ 6 | # 7 | -# This file is autogenerated by pip-compile with python 3.8 8 | -# To update, run: 9 | +# This file is autogenerated by pip-compile with Python 3.8 10 | +# by the following command: 11 | # 12 | # make upgrade 13 | # 14 | @@ -12,14 +12,12 @@ iniconfig==1.1.1 15 | -packaging==21.3 16 | +packaging==21.6 17 | 18 | -------------------------------------------------------------------------------- /tests/sample_files/.nvmrc: -------------------------------------------------------------------------------- 1 | v16 2 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | run_tests: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.8'] 17 | django-version: [django42] 18 | toxenv: 19 | - 'quality' 20 | - 'docs' 21 | - 'pii_check' 22 | - django42 23 | steps: 24 | - uses: actions/checkout@v2 25 | - name: setup python 26 | uses: actions/setup-python@v2 27 | with: 28 | python-version: ${{ matrix.python-version }} 29 | 30 | - name: Install pip 31 | run: pip install -r requirements/pip.txt 32 | 33 | - name: Install Dependencies 34 | run: pip install -r requirements/ci.txt 35 | 36 | - name: Run Tests 37 | env: 38 | TOXENV: ${{ matrix.toxenv }} 39 | run: tox 40 | 41 | - name: Run Coverage 42 | if: matrix.python-version == '3.8' && matrix.toxenv=='django32' 43 | uses: codecov/codecov-action@v1 44 | with: 45 | flags: unittests 46 | fail_ci_if_error: true 47 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file_2.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | run_tests: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.6', '3.7', '3.8', '3.12'] 17 | django-version: ['3.2'] 18 | include: 19 | - python-version: "3.5" 20 | toxenv: 'quality' 21 | ubuntu: 20.04 22 | exclude: 23 | - python-version: "3.5" 24 | toxenv: 'quality' 25 | - python-version: "3.7" 26 | toxenv: 'docs' 27 | - python-version: "3.8" 28 | toxenv: 'pii_check' 29 | steps: 30 | - uses: actions/checkout@v2 31 | - name: setup python 32 | uses: actions/setup-python@v2 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | 36 | - name: Install pip 37 | run: pip install -r requirements/pip.txt 38 | 39 | - name: Install Dependencies 40 | run: pip install -r requirements/ci.txt 41 | 42 | - name: Run Tests 43 | env: 44 | TOXENV: ${{ matrix.toxenv }} 45 | run: tox 46 | 47 | - name: Run Coverage 48 | if: matrix.python-version == '3.8' && matrix.toxenv=='django42' 49 | uses: codecov/codecov-action@v4 50 | with: 51 | token: ${{ secrets.CODECOV_TOKEN }} 52 | flags: unittests 53 | fail_ci_if_error: true 54 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file_3.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.8'] 17 | tox-env: ['django32', 'quality', 'docs', 'pii_check'] 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: setup python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | - name: Install pip 26 | run: pip install -r requirements/pip.txt 27 | 28 | - name: Install Dependencies 29 | run: pip install -r requirements/ci.txt 30 | 31 | - name: Run Tests 32 | env: 33 | TOXENV: ${{ matrix.tox-env }} 34 | run: tox 35 | 36 | - name: Run Coverage 37 | if: matrix.python-version == '3.8' && matrix.tox-env=='django32' 38 | uses: codecov/codecov-action@v1 39 | with: 40 | flags: unittests 41 | fail_ci_if_error: true 42 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file_4.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.8'] 17 | tox-env: [django32, quality, docs, pii_check] 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: setup python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | test: 26 | name: Tests 27 | runs-on: ${{ matrix.os }} 28 | strategy: 29 | matrix: 30 | os: [ubuntu-20.04] 31 | python-version: ['3.5', '3.8'] 32 | django-version: ['3.2', '4.0'] 33 | tox: [django32, quality, docs, pii_check] 34 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file_5.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | run_tests: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.6', '3.7', '3.8'] 17 | django-version: ['3.0', '3.1', 3.2', '4.0'] 18 | include: 19 | - python-version: '3.5' 20 | toxenv: 'quality' 21 | - python-version: '3.8' 22 | toxenv: 'quality' 23 | exclude: 24 | - python-version: "3.7" 25 | toxenv: 'django32' 26 | - python-version: "3.8" 27 | toxenv: 'django30' 28 | - python-version: "3.8" 29 | django-version: '3.1' 30 | steps: 31 | - uses: actions/checkout@v2 32 | - name: setup python 33 | uses: actions/setup-python@v2 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | 37 | - name: Install pip 38 | run: pip install -r requirements/pip.txt 39 | 40 | - name: Install Dependencies 41 | run: pip install -r requirements/ci.txt 42 | 43 | - name: Run Tests 44 | env: 45 | TOXENV: ${{ matrix.toxenv }} 46 | run: tox 47 | 48 | - name: Run Coverage 49 | if: matrix.python-version == '3.8' && matrix.toxenv=='django32' 50 | uses: codecov/codecov-action@v1 51 | with: 52 | flags: unittests 53 | fail_ci_if_error: true 54 | -------------------------------------------------------------------------------- /tests/sample_files/sample_ci_file_multiple_jobs.yml: -------------------------------------------------------------------------------- 1 | name: Python CI 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | name: Tests 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-20.04] 16 | python-version: ['3.5', '3.8'] 17 | tox-env: [django32, quality, docs, pii_check] 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: setup python 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | django_test: 26 | name: Tests 27 | runs-on: ${{ matrix.os }} 28 | strategy: 29 | matrix: 30 | os: [ubuntu-20.04] 31 | python-version: ['3.8'] 32 | django-version: [django32, django40] 33 | 34 | test: 35 | name: Tests 36 | runs-on: ${{ matrix.os }} 37 | strategy: 38 | matrix: 39 | os: [ubuntu-20.04] 40 | python-version: ['3.5', '3.8'] 41 | tox: [quality, docs, pii_check] 42 | -------------------------------------------------------------------------------- /tests/sample_files/sample_django_settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os.path import abspath, dirname, join 3 | 4 | PROJECT_APPS = ( 5 | 'test.apps.core', 6 | 'test.apps.api', 7 | ) 8 | 9 | MIDDLEWARE = ( 10 | 'corsheaders.middleware.CorsMiddleware', 11 | 'django.contrib.sessions.middleware.SessionMiddleware', 12 | 'django.middleware.locale.LocaleMiddleware', 13 | 'django.middleware.common.CommonMiddleware', 14 | ) 15 | 16 | # TEMPLATE CONFIGURATION 17 | # See: https://docs.djangoproject.com/en/1.11/ref/settings/#templates 18 | TEMPLATES = [ 19 | { 20 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 21 | 'APP_DIRS': True, 22 | 'DIRS': ( 23 | root('templates'), 24 | ), 25 | 'OPTIONS': { 26 | 'context_processors': ( 27 | 'django.contrib.auth.context_processors.auth', 28 | 'django.template.context_processors.debug', 29 | 'django.template.context_processors.i18n', 30 | 'django.template.context_processors.media', 31 | 'django.template.context_processors.static', 32 | 'django.template.context_processors.tz', 33 | 'django.contrib.messages.context_processors.messages', 34 | ), 35 | 'debug': True, # Django will only display debug pages if the global DEBUG setting is set to True. 36 | } 37 | }, 38 | ] 39 | # END TEMPLATE CONFIGURATION 40 | -------------------------------------------------------------------------------- /tests/sample_files/sample_django_settings_2.py: -------------------------------------------------------------------------------- 1 | import os 2 | from os.path import abspath, dirname, join 3 | 4 | PROJECT_APPS = ( 5 | 'test.apps.core', 6 | 'test.apps.api', 7 | ) 8 | 9 | MIDDLEWARE = ( 10 | 'corsheaders.middleware.CorsMiddleware', 11 | 'django.contrib.sessions.middleware.SessionMiddleware', 12 | 'django.middleware.locale.LocaleMiddleware', 13 | 'django.middleware.common.CommonMiddleware', 14 | ) 15 | 16 | # TEMPLATE CONFIGURATION 17 | # See: https://docs.djangoproject.com/en/1.11/ref/settings/#templates 18 | TEMPLATES = [ 19 | { 20 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 21 | 'APP_DIRS': True, 22 | 'DIRS': ( 23 | root('templates'), 24 | ), 25 | 'OPTIONS': { 26 | 'context_processors': ( 27 | 'django.contrib.auth.context_processors.auth', 28 | 'django.template.context_processors.debug', 29 | 'django.template.context_processors.i18n', 30 | 'django.template.context_processors.media', 31 | 'django.template.context_processors.static', 32 | 'django.template.context_processors.tz', 33 | 'django.contrib.messages.context_processors.messages', 34 | 'django.template.context_processors.request', 35 | ), 36 | 'debug': True, # Django will only display debug pages if the global DEBUG setting is set to True. 37 | } 38 | }, 39 | ] 40 | # END TEMPLATE CONFIGURATION 41 | 42 | # Django32 settings 43 | DEFAULT_HASHING_ALGORITHM = 'md6' 44 | DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' 45 | -------------------------------------------------------------------------------- /tests/sample_files/sample_node_ci.yml: -------------------------------------------------------------------------------- 1 | name: node_js CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - '**' 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-20.04 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v2 17 | 18 | - name: Setup Nodejs 19 | uses: actions/setup-node@v1 20 | with: 21 | node-version: 12 22 | 23 | - name: Setup npm 24 | run: npm i -g npm@5.6.0 25 | 26 | - name: Install dependencies 27 | run: npm ci 28 | 29 | - name: Cache node modules 30 | uses: actions/cache@v4 31 | with: 32 | path: ~/.npm 33 | key: v1-npm-deps-${{ hashFiles('**/package-lock.json') }} 34 | restore-keys: v1-npm-deps- 35 | 36 | - name: Unit Tests 37 | run: npm run test 38 | 39 | - name: Run Coverage 40 | uses: codecov/codecov-action@v4 41 | with: 42 | token: ${{ secrets.CODECOV_TOKEN }} 43 | fail_ci_if_error: true 44 | 45 | - name: Send failure notification 46 | if: ${{ failure() }} 47 | uses: dawidd6/action-send-mail@v3 48 | with: 49 | server_address: email-smtp.us-east-1.amazonaws.com 50 | server_port: 465 51 | username: ${{secrets.EDX_SMTP_USERNAME}} 52 | password: ${{secrets.EDX_SMTP_PASSWORD}} 53 | subject: CI workflow failed in ${{github.repository}} 54 | to: teaching-and-learning@2u-internal.opsgenie.net 55 | from: github-actions 56 | body: CI workflow in ${{github.repository}} failed! For details see "github.com/${{ 57 | github.repository }}/actions/runs/${{ github.run_id }}" 58 | 59 | job2: 60 | runs-on: ubuntu-20.04 61 | steps: 62 | - name: Checkout 63 | uses: actions/checkout@v2 64 | 65 | - name: Setup Nodejs 66 | uses: actions/setup-node@v1 67 | with: 68 | node-version: 12 69 | 70 | - name: Setup npm 71 | run: npm i -g npm@5.6.0 72 | 73 | - name: Install dependencies 74 | run: npm ci 75 | -------------------------------------------------------------------------------- /tests/sample_files/sample_node_ci2.yml: -------------------------------------------------------------------------------- 1 | name: node_js CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - "**" 10 | 11 | jobs: 12 | tests: 13 | runs-on: ubuntu-20.04 14 | strategy: 15 | matrix: 16 | python: [8, 12] 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v2 20 | 21 | - name: Setup Nodejs 22 | uses: actions/setup-node@v1 23 | with: 24 | node-version: ${{ matrix.node }} 25 | 26 | - name: Install dependencies 27 | run: npm ci 28 | 29 | - name: Unit Tests 30 | run: npm run test 31 | 32 | - name: Validate Package Lock 33 | run: make validate-no-uncommitted-package-lock-changes 34 | 35 | - name: Run Lint 36 | run: npm run lint 37 | 38 | - name: Run Test 39 | run: npm run test 40 | 41 | - name: Run Build 42 | run: npm run build 43 | 44 | - name: Run Coverage 45 | uses: codecov/codecov-action@v4 46 | with: 47 | token: ${{ secrets.CODECOV_TOKEN }} 48 | fail_ci_if_error: true 49 | -------------------------------------------------------------------------------- /tests/sample_files/sample_python2_unicode_removal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test file for the script remove_python2_unicode_compatible.py 3 | """ 4 | from django.utils.encoding import python_2_unicode_compatible 5 | from django.utils.translation import ugettext_lazy as _ 6 | 7 | 8 | @python_2_unicode_compatible 9 | class Test: 10 | """ 11 | Random Test class 12 | """ 13 | def __init__(self): 14 | pass 15 | 16 | 17 | @python_2_unicode_compatible 18 | @login_required 19 | class Test2: 20 | """ 21 | Random Test class 22 | """ 23 | def __init__(self): 24 | pass 25 | 26 | 27 | @login_required 28 | class Test3: 29 | """ 30 | Random Test class 31 | """ 32 | def __init__(self): 33 | pass 34 | -------------------------------------------------------------------------------- /tests/sample_files/sample_render_to_response.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from django.shortcuts import render_to_response 3 | 4 | 5 | def get_submissions_for_student_item(request, course_id, student_id, item_id): 6 | student_item_dict = dict( 7 | course_id=course_id, 8 | student_id=student_id, 9 | item_id=item_id, 10 | ) 11 | context = dict(**student_item_dict) 12 | 13 | return render_to_response('submissions.html', context) 14 | -------------------------------------------------------------------------------- /tests/sample_files/sample_render_to_response_2.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from django.contrib.auth.decorators import login_required 4 | from common.djangoapps.edxmako.shortcuts import render_to_response 5 | 6 | 7 | def get_submissions_for_student_item(request, course_id, student_id, item_id): 8 | student_item_dict = dict( 9 | course_id=course_id, 10 | student_id=student_id, 11 | item_id=item_id, 12 | ) 13 | context = dict(**student_item_dict) 14 | 15 | return render_to_response('submissions.html', context) 16 | 17 | -------------------------------------------------------------------------------- /tests/sample_files/sample_setup_file.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import re 4 | 5 | from setuptools import find_packages, setup 6 | 7 | 8 | def is_requirement(line): 9 | """ 10 | Return True if the requirement line is a package requirement. 11 | 12 | Returns: 13 | bool: True if the line is not blank, a comment, 14 | a URL, or an included file 15 | """ 16 | # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why 17 | 18 | return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c')) 19 | 20 | 21 | def load_requirements(*requirements_paths): 22 | """ 23 | Load all requirements from the specified requirements files. 24 | 25 | Requirements will include any constraints from files specified 26 | with -c in the requirements files. 27 | Returns a list of requirement strings. 28 | """ 29 | # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why. 30 | # small change from original SEMGREP update to allow packages with [] in the name 31 | 32 | requirements = {} 33 | constraint_files = set() 34 | 35 | # groups "my-package-name<=x.y.z,..." into ("my-package-name", "<=x.y.z,...") 36 | requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?") 37 | 38 | def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present): 39 | regex_match = requirement_line_regex.match(current_line) 40 | if regex_match: 41 | package = regex_match.group(1) 42 | version_constraints = regex_match.group(2) 43 | existing_version_constraints = current_requirements.get(package, None) 44 | # it's fine to add constraints to an unconstrained package, but raise an error if there are already 45 | # constraints in place 46 | if existing_version_constraints and existing_version_constraints != version_constraints: 47 | raise BaseException(f'Multiple constraint definitions found for {package}:' 48 | f' "{existing_version_constraints}" and "{version_constraints}".' 49 | f'Combine constraints into one location with {package}' 50 | f'{existing_version_constraints},{version_constraints}.') 51 | if add_if_not_present or package in current_requirements: 52 | current_requirements[package] = version_constraints 53 | 54 | # process .in files and store the path to any constraint files that are pulled in 55 | for path in requirements_paths: 56 | with open(path) as reqs: 57 | for line in reqs: 58 | if is_requirement(line): 59 | add_version_constraint_or_raise(line, requirements, True) 60 | if line and line.startswith('-c') and not line.startswith('-c http'): 61 | constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip()) 62 | 63 | # process constraint files and add any new constraints found to existing requirements 64 | for constraint_file in constraint_files: 65 | with open(constraint_file) as reader: 66 | for line in reader: 67 | if is_requirement(line): 68 | add_version_constraint_or_raise(line, requirements, False) 69 | 70 | # process back into list of pkg><=constraints strings 71 | constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())] 72 | return constrained_requirements 73 | 74 | 75 | def get_version(*file_paths): 76 | """ 77 | Extract the version string from the file at the given relative path fragments. 78 | """ 79 | filename = os.path.join(os.path.dirname(__file__), *file_paths) 80 | with open(filename, encoding='utf-8') as opened_file: 81 | version_file = opened_file.read() 82 | version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", 83 | version_file, re.M) 84 | if version_match: 85 | return version_match.group(1) 86 | raise RuntimeError('Unable to find version string.') 87 | 88 | 89 | VERSION = get_version("edx_rest_framework_extensions", "__init__.py") 90 | 91 | 92 | setup( 93 | name='edx-drf-extensions', 94 | version=VERSION, 95 | description='edX extensions of Django REST Framework', 96 | author='edX', 97 | author_email='oscm@edx.org', 98 | url='https://github.com/openedx/edx-drf-extensions', 99 | license='Apache 2.0', 100 | classifiers=[ 101 | 'Development Status :: 5 - Production/Stable', 102 | 'Environment :: Web Environment', 103 | 'Intended Audience :: Developers', 104 | 'License :: OSI Approved :: Apache Software License', 105 | 'Operating System :: OS Independent', 106 | 'Programming Language :: Python :: 3', 107 | 'Programming Language :: Python :: 3.8', 108 | 'Framework :: Django', 109 | 'Framework :: Django :: 2.2', 110 | 'Framework :: Django :: 3.0', 111 | 'Framework :: Django :: 3.1', 112 | 'Framework :: Django :: 3.2', 113 | ], 114 | packages=find_packages(exclude=["tests"]), 115 | install_requires=load_requirements('requirements/base.in'), 116 | tests_require=load_requirements('requirements/test.in'), 117 | ) 118 | -------------------------------------------------------------------------------- /tests/sample_openedx.yaml: -------------------------------------------------------------------------------- 1 | # This file describes this Open edX repo, as described in OEP-2: 2 | # http://open-edx-proposals.readthedocs.io/en/latest/oeps/oep-0002.html#specification 3 | 4 | owner: georgebabey 5 | supporting_teams: 6 | - edx/business-enterprise-team 7 | nick: edx-enterprise 8 | track-pulls: true 9 | oeps: 10 | oep-30: 11 | state: True 12 | oep-7: True # Python 3 13 | oep-18: True 14 | 15 | tags: 16 | - enterprise 17 | - ent 18 | - library 19 | -------------------------------------------------------------------------------- /tests/sample_tox_config.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{38}-django{32,40}-drf{39,310,latest}, 4 | docs, 5 | quality, 6 | version_check, 7 | pii_check, 8 | translations 9 | 10 | [testenv] 11 | deps = 12 | django32: Django>=3.2,<3.3 13 | django40: Django>=4.0,<4.1 14 | drf39: djangorestframework<3.10.0 15 | drf310: djangorestframework<3.11.0 16 | drflatest: djangorestframework 17 | -rrequirements/test.txt 18 | commands = 19 | python -Wd -m pytest {posargs:-n 3} 20 | -------------------------------------------------------------------------------- /tests/sample_tox_config_2.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py37,py38-django32,django40-drf39,drf310,drflatest, 4 | docs, 5 | quality, 6 | version_check, 7 | pii_check, 8 | translations 9 | 10 | [testenv] 11 | deps = 12 | django32: Django>=3.2,<3.3 13 | django40: Django>=4.0,<4.1 14 | drf39: djangorestframework<3.10.0 15 | drf310: djangorestframework<3.11.0 16 | drflatest: djangorestframework 17 | -rrequirements/test.txt 18 | commands = 19 | python -Wd -m pytest {posargs:-n 3} 20 | -------------------------------------------------------------------------------- /tests/test_actions_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for Github Actions Modernizer Script 3 | """ 4 | import os 5 | import shutil 6 | import uuid 7 | from unittest import TestCase 8 | 9 | from edx_repo_tools.codemods.python312 import GithubCIModernizer 10 | from edx_repo_tools.utils import YamlLoader 11 | 12 | 13 | class TestGithubActionsModernizer(TestCase): 14 | 15 | def setUp(self): 16 | self.test_file1 = self._setup_local_copy("sample_files/sample_ci_file.yml") 17 | self.test_file2 = self._setup_local_copy("sample_files/sample_ci_file_2.yml") 18 | self.test_file3 = self._setup_local_copy("sample_files/sample_ci_file_3.yml") 19 | 20 | @staticmethod 21 | def _setup_local_copy(file_name): 22 | current_directory = os.path.dirname(__file__) 23 | temp_file = os.path.join(current_directory, str(uuid.uuid4()) + ".yml") 24 | local_file = os.path.join(current_directory, file_name) 25 | shutil.copy2(local_file, temp_file) 26 | return temp_file 27 | 28 | @staticmethod 29 | def _get_updated_yaml_elements(file_path): 30 | modernizer = GithubCIModernizer(file_path) 31 | modernizer.modernize() 32 | yaml_loader = YamlLoader(file_path) 33 | return yaml_loader.elements 34 | 35 | def test_python_matrix_items(self): 36 | ci_elements = TestGithubActionsModernizer._get_updated_yaml_elements(self.test_file1) 37 | python_versions = ci_elements['jobs']['run_tests']['strategy']['matrix']['python-version'] 38 | 39 | self.assertIsInstance(python_versions, list) 40 | self.assertIn('3.8', python_versions) 41 | self.assertIn('3.12', python_versions) 42 | 43 | def test_python_matrix_items_build_tag(self): 44 | ci_elements = TestGithubActionsModernizer._get_updated_yaml_elements(self.test_file3) 45 | python_versions = ci_elements['jobs']['build']['strategy']['matrix']['python-version'] 46 | 47 | self.assertIsInstance(python_versions, list) 48 | self.assertIn('3.8', python_versions) 49 | self.assertIn('3.12', python_versions) 50 | 51 | def tearDown(self): 52 | os.remove(self.test_file1) 53 | os.remove(self.test_file2) 54 | os.remove(self.test_file3) 55 | -------------------------------------------------------------------------------- /tests/test_actions_modernizer_django.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for Github Actions Django Modernizer Script 3 | """ 4 | import os 5 | import shutil 6 | from os.path import basename, dirname, join 7 | 8 | from edx_repo_tools.codemods.python312 import GithubCIModernizer 9 | from edx_repo_tools.utils import YamlLoader 10 | 11 | 12 | def setup_local_copy(filepath, tmpdir): 13 | current_directory = dirname(__file__) 14 | local_file = join(current_directory, filepath) 15 | temp_file_path = str(join(tmpdir, basename(filepath))) 16 | shutil.copy2(local_file, temp_file_path) 17 | return temp_file_path 18 | 19 | 20 | def get_updated_yaml_elements(file_path): 21 | modernizer = GithubCIModernizer(file_path) 22 | modernizer.modernize() 23 | yaml_loader = YamlLoader(file_path) 24 | return yaml_loader.elements 25 | 26 | 27 | def test_matrix_items(tmpdir): 28 | """ 29 | Test the scenario where django env is present in the tox-envs within a single job workflow. 30 | """ 31 | test_file = setup_local_copy("sample_files/sample_ci_file.yml", tmpdir) 32 | ci_elements = get_updated_yaml_elements(test_file) 33 | tox_envs = ci_elements['jobs']['run_tests']['strategy']['matrix']['toxenv'] 34 | 35 | assert 'django32' not in tox_envs 36 | assert 'django42' in tox_envs 37 | 38 | 39 | def test_matrix_items_multiple_jobs(tmpdir): 40 | """ 41 | Test the scenarios with multiple jobs including/excluding django in test tox-envs. 42 | """ 43 | test_file = setup_local_copy("sample_files/sample_ci_file_multiple_jobs.yml", tmpdir) 44 | ci_elements = get_updated_yaml_elements(test_file) 45 | 46 | # test the case with django env present in one job 47 | job1_tox_envs = ci_elements['jobs']['build']['strategy']['matrix']['tox-env'] 48 | assert 'django32' not in job1_tox_envs 49 | assert 'django42' in job1_tox_envs 50 | 51 | # test the case with django env present in second job 52 | job2_tox_envs = ci_elements['jobs']['django_test']['strategy']['matrix']['django-version'] 53 | assert 'django32' not in job2_tox_envs 54 | assert 'django42' in job2_tox_envs 55 | 56 | # test the case with no django env present in third job. 57 | job3_tox_envs = ci_elements['jobs']['test']['strategy']['matrix']['tox'] 58 | assert 'django42' in job3_tox_envs 59 | 60 | def test_include_exclude_list(tmpdir): 61 | """ 62 | Test the scenario with job's matrix having include, exclude sections 63 | """ 64 | test_file = setup_local_copy("sample_files/sample_ci_file_5.yml", tmpdir) 65 | ci_elements = get_updated_yaml_elements(test_file) 66 | include_list = ci_elements['jobs']['run_tests']['strategy']['matrix'].get('include', {}) 67 | exclude_list = ci_elements['jobs']['run_tests']['strategy']['matrix'].get('exclude', {}) 68 | 69 | for item in list(include_list) + list(exclude_list): 70 | if 'django-version' in item: 71 | assert item['django-version'] != '3.2' 72 | if 'toxenv' in item: 73 | assert item['toxenv'] != 'django42' 74 | -------------------------------------------------------------------------------- /tests/test_add_new_django32_settings.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for new django32 settings codemod 3 | """ 4 | import os 5 | import shutil 6 | from os.path import dirname, basename, join 7 | from edx_repo_tools.codemods.django3.add_new_django32_settings import SettingsModernizer 8 | 9 | def setup_local_copy(filepath, tmpdir): 10 | current_directory = dirname(__file__) 11 | local_file = join(current_directory, filepath) 12 | temp_file_path = str(join(tmpdir, basename(filepath))) 13 | shutil.copy2(local_file, temp_file_path) 14 | return temp_file_path 15 | 16 | def test_update_existing_hashing_algorithm(tmpdir): 17 | """ 18 | Test the case where an old hashing algorithm is already present in the settings 19 | """ 20 | test_file = setup_local_copy("sample_files/sample_django_settings_2.py", tmpdir) 21 | setting_modernizer = SettingsModernizer(setting_path=test_file, is_service=True) 22 | setting_modernizer.update_hash_algorithm() 23 | with open(test_file) as test_setting_file: 24 | target_algorithm = f"{setting_modernizer.DEFAULT_ALGORITHM_KEY} = '{setting_modernizer.NEW_HASHING_ALGORITHM}'" 25 | assert target_algorithm in test_setting_file.read() 26 | 27 | def test_add_new_hashing_algorithm(tmpdir): 28 | """ 29 | Test the case where no hashing algorithm is present in the settings 30 | """ 31 | test_file = setup_local_copy("sample_files/sample_django_settings.py", tmpdir) 32 | setting_modernizer = SettingsModernizer(setting_path=test_file, is_service=True) 33 | setting_modernizer.update_hash_algorithm() 34 | with open(test_file) as test_setting_file: 35 | target_algorithm = f"{setting_modernizer.DEFAULT_ALGORITHM_KEY} = '{setting_modernizer.NEW_HASHING_ALGORITHM}'" 36 | assert target_algorithm in test_setting_file.read() 37 | 38 | def test_update_existing_auto_field(tmpdir): 39 | """ 40 | Test the case where an old value of auto field is present in the settings 41 | """ 42 | test_file = setup_local_copy("sample_files/sample_django_settings_2.py", tmpdir) 43 | setting_modernizer = SettingsModernizer(setting_path=test_file, is_service=True) 44 | setting_modernizer.update_auto_field() 45 | with open(test_file) as test_setting_file: 46 | target_field = f"{setting_modernizer.DEFAULT_FIELD_KEY} = '{setting_modernizer.NEW_AUTO_FIELD}'" 47 | assert target_field in test_setting_file.read() 48 | 49 | def test_add_new_auto_field(tmpdir): 50 | """ 51 | Test the case where no auto field value is present in the settings 52 | """ 53 | test_file = setup_local_copy("sample_files/sample_django_settings.py", tmpdir) 54 | setting_modernizer = SettingsModernizer(setting_path=test_file, is_service=True) 55 | setting_modernizer.update_auto_field() 56 | with open(test_file) as test_setting_file: 57 | target_field = f"{setting_modernizer.DEFAULT_FIELD_KEY} = '{setting_modernizer.NEW_AUTO_FIELD}'" 58 | assert target_field in test_setting_file.read() 59 | 60 | def test_add_new_context_processor(tmpdir): 61 | """ 62 | Test the case where no request context_processor is present in the settings 63 | """ 64 | test_file = setup_local_copy("sample_files/sample_django_settings.py", tmpdir) 65 | setting_modernizer = SettingsModernizer(setting_path=test_file, is_service=True) 66 | setting_modernizer.update_context_processors() 67 | with open(test_file) as test_setting_file: 68 | assert setting_modernizer.NEW_PROCESSOR in test_setting_file.read() 69 | -------------------------------------------------------------------------------- /tests/test_gha_release_workflow_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for GitHub Actions Modernizer Script 3 | """ 4 | import shutil 5 | import uuid 6 | from os.path import basename, dirname, join 7 | 8 | from edx_repo_tools.codemods.node16 import GithubNodeReleaseWorkflowModernizer 9 | from edx_repo_tools.utils import YamlLoader 10 | 11 | NODE_JS_SETUP_ACTION_LIST = ['actions/setup-node@v2', 'actions/setup-node@v1'] 12 | 13 | 14 | def setup_local_dir(dirpath, tmpdir): 15 | current_directory = dirname(__file__) 16 | local_dir = join(current_directory, dirpath) 17 | fake_repo_path = tmpdir.mkdir(f"fake_repo_{uuid.uuid4()}") 18 | shutil.copytree(local_dir, fake_repo_path, dirs_exist_ok=True) 19 | return fake_repo_path 20 | 21 | def get_updated_yaml_elements(file_path): 22 | modernizer = GithubNodeReleaseWorkflowModernizer(file_path) 23 | modernizer.modernize() 24 | yaml_loader = YamlLoader(file_path) 25 | return yaml_loader.elements 26 | 27 | 28 | def test_add_node_env_step(tmpdir): 29 | node_env_step = None 30 | fake_repo_path = setup_local_dir('fake_repos/repo_with_nvmrc/',tmpdir) 31 | test_file = join(fake_repo_path, ".github/workflows/release.yml") 32 | ci_elements = get_updated_yaml_elements(test_file) 33 | for step in ci_elements['jobs']['release']['steps']: 34 | if 'name' in step and step['name'] == 'Setup Nodejs Env': 35 | node_env_step = step 36 | assert node_env_step['run'] == 'echo "NODE_VER=`cat .nvmrc`" >> $GITHUB_ENV' 37 | 38 | 39 | def test_node_version_value(tmpdir): 40 | fake_repo_without_nvmrc_path = setup_local_dir('fake_repos/repo_without_nvmrc/',tmpdir) 41 | test_file = join(fake_repo_without_nvmrc_path, ".github/workflows/release.yml") 42 | ci_elements_without_rc_file_present = get_updated_yaml_elements(test_file) 43 | 44 | node_setup_step = None 45 | for step in ci_elements_without_rc_file_present['jobs']['release']['steps']: 46 | if 'uses' in step and step['uses'] in NODE_JS_SETUP_ACTION_LIST: 47 | node_setup_step = step 48 | 49 | assert node_setup_step['with']['node-version'] == 16 50 | 51 | fake_repo_with_nvmrc_path = setup_local_dir('fake_repos/repo_with_nvmrc/',tmpdir) 52 | test_file = join(fake_repo_with_nvmrc_path, ".github/workflows/release.yml") 53 | ci_elements_with_rc_file_present = get_updated_yaml_elements(test_file) 54 | 55 | node_setup_step = None 56 | for step in ci_elements_with_rc_file_present['jobs']['release']['steps']: 57 | if 'uses' in step and step['uses'] in NODE_JS_SETUP_ACTION_LIST: 58 | node_setup_step = step 59 | assert node_setup_step['with']['node-version'] == '${{ env.NODE_VER }}' 60 | -------------------------------------------------------------------------------- /tests/test_modernize_openedx_yaml.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | from ruamel.yaml import YAML 5 | 6 | from edx_repo_tools.modernize_openedx_yaml import YamlModernizer 7 | 8 | 9 | def setup_local_copy(tmpdir): 10 | sample_yam_file = os.path.join(os.path.dirname(__file__), "sample_openedx.yaml") 11 | temp_file_path = str(tmpdir.join('test-openedx.yaml')) 12 | shutil.copy2(sample_yam_file, temp_file_path) 13 | return temp_file_path 14 | 15 | 16 | def load_yaml(sample_yam_file): 17 | with open(sample_yam_file) as file_stream: 18 | return YAML().load(file_stream) 19 | 20 | 21 | def test_travis_modernizer(tmpdir): 22 | test_yaml_file = setup_local_copy(tmpdir) 23 | modernizer = YamlModernizer(test_yaml_file) 24 | modernizer.modernize() 25 | updated_yaml = load_yaml(test_yaml_file) 26 | assert 'owner' not in updated_yaml.keys() 27 | assert 'supporting_teams' not in updated_yaml.keys() 28 | assert 'track_pulls' not in updated_yaml.keys() 29 | assert 'track-pulls' not in updated_yaml.keys() 30 | -------------------------------------------------------------------------------- /tests/test_node_ci_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for Github Actions Modernizer Script 3 | """ 4 | import shutil 5 | from os.path import dirname, basename, join 6 | 7 | from edx_repo_tools.codemods.node16 import GithubCiNodeModernizer 8 | from edx_repo_tools.utils import YamlLoader 9 | 10 | 11 | def setup_local_copy(filepath, tmpdir): 12 | current_directory = dirname(__file__) 13 | local_file = join(current_directory, filepath) 14 | temp_file_path = str(join(tmpdir, basename(filepath))) 15 | shutil.copy2(local_file, temp_file_path) 16 | return temp_file_path 17 | 18 | 19 | def get_updated_yaml_elements(file_path): 20 | modernizer = GithubCiNodeModernizer(file_path) 21 | modernizer.modernize() 22 | yaml_loader = YamlLoader(file_path) 23 | return yaml_loader.elements 24 | 25 | 26 | def test_node_version_value(tmpdir): 27 | test_file = setup_local_copy("sample_files/sample_node_ci.yml", tmpdir) 28 | ci_elements = get_updated_yaml_elements(test_file) 29 | 30 | node_setup_step = None 31 | for step in ci_elements['jobs']['tests']['steps']: 32 | if step['name'] == 'Setup Nodejs': 33 | node_setup_step = step 34 | 35 | assert '${{ matrix.node }}' in node_setup_step['with']['node-version'] 36 | 37 | 38 | def test_npm_version_value(tmpdir): 39 | test_file = setup_local_copy("sample_files/sample_node_ci.yml", tmpdir) 40 | ci_elements = get_updated_yaml_elements(test_file) 41 | 42 | npm_setup_step = None 43 | for step in ci_elements['jobs']['tests']['steps']: 44 | if step['name'] == 'Setup npm': 45 | npm_setup_step = step 46 | 47 | assert '8.x.x' in npm_setup_step['run'] 48 | 49 | 50 | def test_job_name(tmpdir): 51 | test_file = setup_local_copy("sample_files/sample_node_ci.yml", tmpdir) 52 | ci_elements = get_updated_yaml_elements(test_file) 53 | 54 | assert 'tests' in ci_elements['jobs'] 55 | 56 | 57 | def test_add_matrix_items(tmpdir): 58 | test_file = setup_local_copy("sample_files/sample_node_ci.yml", tmpdir) 59 | ci_elements = get_updated_yaml_elements(test_file) 60 | 61 | assert 16 in ci_elements['jobs']['tests']['strategy']['matrix']['node'] 62 | 63 | 64 | def test_update_matrix_items(tmpdir): 65 | test_file = setup_local_copy("sample_files/sample_node_ci2.yml", tmpdir) 66 | ci_elements = get_updated_yaml_elements(test_file) 67 | 68 | assert 16 in ci_elements['jobs']['tests']['strategy']['matrix']['node'] 69 | -------------------------------------------------------------------------------- /tests/test_remove_python2_unicode_compatible.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for verifying the output of the script edx_repo_tools/codemods/django3/remove_python2_unicode_compatible.py 3 | """ 4 | import os 5 | from unittest import TestCase 6 | import shutil 7 | import uuid 8 | import subprocess 9 | from edx_repo_tools.codemods.django3 import remove_python2_unicode_compatible 10 | 11 | 12 | def setup_local_copy(tmpdir): 13 | """ 14 | Setup local copy of the sample file for tests 15 | """ 16 | sample_test_file = os.path.join(os.path.dirname(__file__), "sample_files/sample_python2_unicode_removal.py") 17 | temp_file_path = str(tmpdir.join('test-python2-unicode.py')) 18 | shutil.copy2(sample_test_file, temp_file_path) 19 | return temp_file_path 20 | 21 | 22 | def test_python2_unicode_compatible_removed(tmpdir): 23 | """ 24 | Test the script on a sample file to make sure it is removing python_2_unicode_compatible imports and decorators. 25 | """ 26 | sample_file_path = setup_local_copy(tmpdir) 27 | with open(sample_file_path) as sample_file: 28 | sample_code = sample_file.read() 29 | assert "python_2_unicode_compatible" in sample_code 30 | remove_python2_unicode_compatible.run_removal_query(sample_file_path) 31 | with open(sample_file_path) as sample_file: 32 | sample_code = sample_file.read() 33 | assert "python_2_unicode_compatible" not in sample_code 34 | -------------------------------------------------------------------------------- /tests/test_replace_render_to_response.py: -------------------------------------------------------------------------------- 1 | """Tests for replace_render_to_response script""" 2 | import subprocess 3 | 4 | import os 5 | import shutil 6 | 7 | 8 | def setup_local_copy(tmpdir, path): 9 | sample_file = os.path.join(os.path.dirname(__file__), "sample_files", path) 10 | temp_file_path = str(tmpdir.join('sample_render_to_response_tmp.py')) 11 | shutil.copy2(sample_file, temp_file_path) 12 | return temp_file_path 13 | 14 | 15 | def test_replace_script(tmpdir): 16 | """ 17 | Test replace script on a file to make sure it is renaming it properly and 18 | also adding request parameter to updated function call 19 | """ 20 | sample_file_path = setup_local_copy(tmpdir, "sample_render_to_response.py") 21 | with open(sample_file_path) as sample_file: 22 | sample_code = sample_file.read() 23 | assert "render_to_response" in sample_code 24 | assert "render_to_response(request, " not in sample_code 25 | assert "render(request, " not in sample_code 26 | subprocess.call(['replace_render_to_response', sample_file_path]) 27 | with open(sample_file_path) as sample_file: 28 | sample_code = sample_file.read() 29 | assert "render_to_response" not in sample_code 30 | assert "render" in sample_code 31 | # Adding request parameter to function call 32 | assert "render(request, " in sample_code 33 | 34 | 35 | def test_replace_script_avoid_non_django_version(tmpdir): 36 | """ 37 | Test replace script on the file in which function name shouldn't be changed 38 | as it is being imported from some local directory instead of django.shortcuts 39 | """ 40 | sample_file_path = setup_local_copy(tmpdir, "sample_render_to_response_2.py") 41 | with open(sample_file_path) as sample_file: 42 | sample_code = sample_file.read() 43 | assert "render_to_response" in sample_code 44 | assert "render(request, " not in sample_code 45 | subprocess.call(['replace_render_to_response', sample_file_path]) 46 | with open(sample_file_path) as sample_file: 47 | sample_code = sample_file.read() 48 | # Shouldn't replace render_to_response in this file 49 | assert "render_to_response" in sample_code 50 | assert "render(request, " not in sample_code 51 | -------------------------------------------------------------------------------- /tests/test_repo_checks.py: -------------------------------------------------------------------------------- 1 | """ 2 | (Incomplete) test suite for repo_checks. 3 | """ 4 | from __future__ import annotations 5 | 6 | from unittest.mock import MagicMock, call, patch 7 | 8 | import pytest 9 | 10 | from edx_repo_tools.repo_checks import repo_checks 11 | 12 | 13 | @pytest.fixture 14 | def maintenance_label(): 15 | """ 16 | Quickly make a basic label to return via the API. 17 | """ 18 | label = MagicMock() 19 | label.name = "maintenance" 20 | label.color = "ff9125" 21 | label.description = ( 22 | "Routine upkeep necessary for the health of the platform" 23 | ) 24 | 25 | return label 26 | 27 | 28 | # Our list of expected labels, normally defined in labels.yaml. 29 | labels_yaml = [ 30 | { 31 | "name": "maintenance", 32 | "color": "ff9125", 33 | "description": "Routine upkeep necessary for the health of the platform", 34 | } 35 | ] 36 | 37 | 38 | @patch.object(repo_checks.Labels, "labels", labels_yaml) 39 | class TestLabelsCheck: 40 | def test_check_for_no_change(self, maintenance_label): 41 | api = MagicMock() 42 | api.issues.list_labels_for_repo.side_effect = [[maintenance_label], None] 43 | check_cls = repo_checks.Labels(api, "test_org", "test_repo") 44 | 45 | # Make sure that the check returns True, indicating that no changes need to be made. 46 | assert check_cls.check()[0] 47 | 48 | def test_addition(self, maintenance_label): 49 | api = MagicMock() 50 | api.issues.list_labels_for_repo.return_value = [] 51 | check_cls = repo_checks.Labels(api, "test_org", "test_repo") 52 | 53 | # The check should be false because the maintenance label should be missing. 54 | assert check_cls.check()[0] == False 55 | 56 | check_cls.fix() 57 | assert api.issues.create_label.called 58 | 59 | call_args = api.issues.create_label.call_args 60 | expected_call = call( 61 | owner="test_org", 62 | repo="test_repo", 63 | name=maintenance_label.name, 64 | color=maintenance_label.color, 65 | description=maintenance_label.description, 66 | ) 67 | assert call_args == expected_call 68 | assert not api.issues.update_label.called 69 | 70 | def test_update_label(self, maintenance_label): 71 | maintenance_label.name = ":+1: Ma.in-t 'e'n_a\"nce!\" :-1:" 72 | api = MagicMock() 73 | api.issues.list_labels_for_repo.side_effect = [[maintenance_label], None] 74 | 75 | check_cls = repo_checks.Labels(api, "test_org", "test_repo") 76 | 77 | assert check_cls.check()[0] == False 78 | check_cls.fix() 79 | 80 | call_args = api.issues.update_label.call_args 81 | expected_call = call( 82 | owner="test_org", 83 | repo="test_repo", 84 | name=maintenance_label.name, 85 | color=maintenance_label.color, 86 | new_name="maintenance", 87 | description=maintenance_label.description, 88 | ) 89 | 90 | assert call_args == expected_call 91 | assert not api.issues.create_label.called 92 | assert api.issues.update_label.called 93 | -------------------------------------------------------------------------------- /tests/test_setup_file_modernizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for setup file modernizer 3 | """ 4 | import os 5 | import shutil 6 | from os.path import basename, dirname, join 7 | 8 | from edx_repo_tools.codemods.django3.setup_file_modernizer import SetupFileModernizer 9 | 10 | 11 | def setup_local_copy(filepath, tmpdir): 12 | current_directory = dirname(__file__) 13 | local_file = join(current_directory, filepath) 14 | temp_file_path = str(join(tmpdir, basename(filepath))) 15 | shutil.copy2(local_file, temp_file_path) 16 | return temp_file_path 17 | 18 | def test_remove_existing_classifiers(tmpdir): 19 | """ 20 | Test the case where old classifiers are removed 21 | """ 22 | test_file = setup_local_copy("sample_files/sample_setup_file.py", tmpdir) 23 | setup_file_modernizer = SetupFileModernizer() 24 | file_data = open(test_file).read() 25 | updated_file_data = setup_file_modernizer._remove_outdated_classifiers(file_data) 26 | assert "'Framework :: Django :: 3.1'" not in updated_file_data 27 | assert "'Framework :: Django :: 3.0'" not in updated_file_data 28 | assert "'Framework :: Django :: 2.2'" not in updated_file_data 29 | assert "'Framework :: Django :: 3.2'" in updated_file_data 30 | 31 | def test_add_new_classifiers(tmpdir): 32 | """ 33 | Test the case where new classifiers are added 34 | """ 35 | test_file = setup_local_copy("sample_files/sample_setup_file.py", tmpdir) 36 | setup_file_modernizer = SetupFileModernizer() 37 | file_data = open(test_file).read() 38 | updated_file_data = setup_file_modernizer._add_new_classifiers(file_data) 39 | assert "'Framework :: Django :: 4.0'" in updated_file_data 40 | -------------------------------------------------------------------------------- /tests/test_tox_modernizer.py: -------------------------------------------------------------------------------- 1 | """Tests for TOX modernizer """ 2 | import os 3 | import re 4 | from configparser import NoSectionError, ConfigParser 5 | from unittest import TestCase 6 | import shutil 7 | import uuid 8 | from edx_repo_tools.codemods.python312 import ConfigReader 9 | 10 | 11 | class TestToxModernizer(TestCase): 12 | def setUp(self): 13 | self.config_file1 = self._setup_local_copy("sample_tox_config.ini") 14 | self.config_file2 = self._setup_local_copy("sample_tox_config_2.ini") 15 | 16 | @staticmethod 17 | def _setup_local_copy(file_name): 18 | current_directory = os.path.dirname(__file__) 19 | temp_file = os.path.join(current_directory, str(uuid.uuid4()) + ".ini") 20 | local_file = os.path.join(current_directory, file_name) 21 | shutil.copy2(local_file, temp_file) 22 | return temp_file 23 | 24 | def _get_parser(self, file_path): 25 | modernizer = ConfigReader(file_path=file_path).get_modernizer() 26 | modernizer.modernize() 27 | parser = ConfigParser() 28 | parser.read(file_path) 29 | self._assert_no_tabs(file_path) 30 | return parser 31 | 32 | def _assert_django_dependencies_replaced(self, config_file): 33 | parser = self._get_parser(config_file) 34 | dependencies = parser['testenv']['deps'] 35 | 36 | self.assertNotIn("django32:", dependencies) 37 | self.assertIn("django42:", dependencies) 38 | 39 | def _assert_replaces_python_interpreters(self, config_file): 40 | parser = self._get_parser(config_file) 41 | env_list = parser['tox']['envlist'] 42 | 43 | self.assertIn("py{38, 312}", env_list) 44 | 45 | def _assert_replaces_django_runners(self, config_file): 46 | parser = self._get_parser(config_file) 47 | env_list = parser['tox']['envlist'] 48 | 49 | self.assertIn("django{42}", env_list) 50 | 51 | def _assert_replaces_django_dependencies(self, config_file): 52 | self._assert_django_dependencies_replaced(config_file) 53 | 54 | def _assert_adds_django_dependencies(self, config_file): 55 | parser = ConfigParser() 56 | parser.read(config_file) 57 | 58 | dependencies = parser['testenv']['deps'] 59 | dependencies = re.sub("[^\n]*django42.*\n", '', dependencies) 60 | parser['testenv']['deps'] = dependencies 61 | 62 | with open(config_file, 'w') as configfile: 63 | parser.write(configfile) 64 | 65 | self._assert_django_dependencies_replaced(config_file) 66 | 67 | def _assert_no_tabs(self, config_file): 68 | with open(config_file) as configfile: 69 | assert "\t" not in configfile.read() 70 | 71 | def test_raises_error_no_empty_config(self): 72 | tox_config = {} 73 | self.assertRaises(NoSectionError, ConfigReader(config_dict=tox_config).get_modernizer) 74 | 75 | def test_raises_error_bad_config(self): 76 | tox_config = {'section1': {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}, 77 | 'section2': {'keyA': 'valueA', 'keyB': 'valueB', 'keyC': 'valueC'}, 78 | 'section3': {'foo': 'x', 'bar': 'y', 'baz': 'z'}} 79 | 80 | self.assertRaises(NoSectionError, ConfigReader(tox_config).get_modernizer) 81 | 82 | def test_replaces_python_interpreters(self): 83 | self._assert_replaces_python_interpreters(self.config_file1) 84 | self._assert_replaces_python_interpreters(self.config_file2) 85 | 86 | def test_replaces_django_runners(self): 87 | self._assert_replaces_django_runners(self.config_file1) 88 | self._assert_replaces_django_runners(self.config_file2) 89 | 90 | def test_django_dependency_replaced(self): 91 | self._assert_django_dependencies_replaced(self.config_file1) 92 | self._assert_django_dependencies_replaced(self.config_file2) 93 | 94 | def test_adds_django_dependency(self): 95 | self._assert_adds_django_dependencies(self.config_file1) 96 | self._assert_adds_django_dependencies(self.config_file2) 97 | 98 | def tearDown(self): 99 | os.remove(self.config_file1) 100 | os.remove(self.config_file2) 101 | -------------------------------------------------------------------------------- /tests/test_travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "3.5" 5 | - "3.6" 6 | 7 | branches: 8 | only: 9 | - master 10 | 11 | sudo: required 12 | 13 | cache: 14 | - pip 15 | 16 | env: 17 | - TOXENV=django111 18 | - TOXENV=django20 19 | - TOXENV=django21 20 | - TOXENV=django22 21 | - TOXENV=quality-and-translations 22 | - TOXENV=acceptance-tests 23 | 24 | after_success: 25 | - pip install --upgrade codecov 26 | - make exec-coverage 27 | - codecov 28 | -------------------------------------------------------------------------------- /tests/test_travis_2.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | branches: 4 | only: 5 | - master 6 | 7 | sudo: required 8 | 9 | cache: 10 | - pip 11 | 12 | install: 13 | pip install tox 14 | 15 | matrix: 16 | include: 17 | - python: 3.6 18 | env: 19 | DJANGO_ENV=django21 20 | TESTNAME=quality-and-js 21 | TARGETS="check_translations_up_to_date clean_static quality" 22 | - python: 3.8 23 | env: 24 | DJANGO_ENV=django21 25 | TESTNAME=quality-and-js 26 | TARGETS="check_translations_up_to_date clean_static quality" 27 | - python: 3.5 28 | env: 29 | DJANGO_ENV=django21 30 | TESTNAME=quality-and-js 31 | TARGETS="check_translations_up_to_date clean_static quality" 32 | - python: 3.5 33 | env: 34 | TESTNAME=quality-and-js 35 | TARGETS="check_translations_up_to_date clean_static quality" 36 | - python: 3.5 37 | env: 38 | DJANGO_ENV=django22 39 | TESTNAME=test-python 40 | TARGETS="requirements.js clean_static static validate_python" 41 | 42 | after_success: 43 | - pip install -U codecov 44 | - docker exec ecommerce_testing /edx/app/ecommerce/ecommerce/.travis/run_coverage.sh 45 | - codecov 46 | -------------------------------------------------------------------------------- /tests/test_travis_modernizer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import shutil 4 | import uuid 5 | from unittest import TestCase 6 | 7 | from edx_repo_tools.codemods.django3 import TravisModernizer, DJANGO_PATTERN 8 | from edx_repo_tools.utils import YamlLoader 9 | 10 | 11 | class TestTravisModernizer(TestCase): 12 | 13 | def setUp(self): 14 | self.test_file1 = self._setup_local_copy("test_travis.yml") 15 | self.test_file2 = self._setup_local_copy("test_travis_2.yml") 16 | 17 | @staticmethod 18 | def _setup_local_copy(file_name): 19 | current_directory = os.path.dirname(__file__) 20 | temp_file = os.path.join(current_directory, str(uuid.uuid4()) + ".yml") 21 | local_file = os.path.join(current_directory, file_name) 22 | shutil.copy2(local_file, temp_file) 23 | return temp_file 24 | 25 | @staticmethod 26 | def _get_updated_yaml_elements(file_path): 27 | modernizer = TravisModernizer(file_path) 28 | modernizer.modernize() 29 | yaml_loader = YamlLoader(file_path) 30 | return yaml_loader.elements 31 | 32 | def test_python_env_items(self): 33 | travis_elements = TestTravisModernizer._get_updated_yaml_elements(self.test_file1) 34 | python_versions = travis_elements.get("python") 35 | 36 | self.assertIsInstance(python_versions, list) 37 | self.assertTrue(len(python_versions), 1) 38 | python_version = python_versions[0] 39 | 40 | self.assertEqual(str(python_version), '3.8') 41 | 42 | def test_python_matrix_items(self): 43 | travis_elements = TestTravisModernizer._get_updated_yaml_elements(self.test_file2) 44 | python_versions = [matrix_item for matrix_item in travis_elements.get("matrix").get("include") 45 | if 'python' in matrix_item.keys()] 46 | 47 | self.assertIsInstance(python_versions, list) 48 | self.assertTrue(len(python_versions), 1) 49 | python_version = python_versions[0].get('python') 50 | 51 | self.assertEqual(python_version, '3.8') 52 | 53 | def test_django_env_items(self): 54 | travis_elements = TestTravisModernizer._get_updated_yaml_elements(self.test_file1) 55 | django_envs = [django_env_item for django_env_item in travis_elements.get("env") 56 | if re.search(DJANGO_PATTERN, django_env_item)] 57 | self.assertIsInstance(django_envs, list) 58 | self.assertTrue(len(django_envs), 3) 59 | 60 | self.assertTrue(any("django22" in django_env for django_env in django_envs)) 61 | self.assertTrue(any("django30" in django_env for django_env in django_envs)) 62 | self.assertTrue(any("django31" in django_env for django_env in django_envs)) 63 | self.assertTrue(any("django32" in django_env for django_env in django_envs)) 64 | 65 | def test_django_matrix_items(self): 66 | travis_elements = TestTravisModernizer._get_updated_yaml_elements(self.test_file2) 67 | django_matrix_envs = [matrix_item for matrix_item in travis_elements.get("matrix").get("include") 68 | if re.search(DJANGO_PATTERN, matrix_item.get("env"))] 69 | 70 | self.assertIsInstance(django_matrix_envs, list) 71 | self.assertTrue(len(django_matrix_envs), 3) 72 | 73 | django_envs = [matrix_item.get("env") for matrix_item in django_matrix_envs] 74 | 75 | self.assertTrue(any("django22" in django_env for django_env in django_envs)) 76 | self.assertTrue(any("django30" in django_env for django_env in django_envs)) 77 | self.assertTrue(any("django31" in django_env for django_env in django_envs)) 78 | self.assertTrue(any("django32" in django_env for django_env in django_envs)) 79 | 80 | def tearDown(self): 81 | os.remove(self.test_file1) 82 | os.remove(self.test_file2) 83 | --------------------------------------------------------------------------------