├── .github └── workflows │ ├── bundle_cron.yml │ ├── learn_cron.yml │ ├── pre-commit.yml │ ├── reports_cron.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── .reuse └── dep5 ├── CODE_OF_CONDUCT.md ├── CODE_OF_CONDUCT.md.license ├── LICENSE ├── LICENSES ├── MIT.txt └── Unlicense.txt ├── README.rst ├── README.rst.license ├── adabot-cron.sh ├── adabot.screenrc ├── adabot ├── __init__.py ├── arduino_libraries.py ├── circuitpython_bundle.py ├── circuitpython_libraries.py ├── circuitpython_library_download_stats.py ├── circuitpython_library_patches.py ├── circuitpython_library_release.py ├── github_requests.py ├── lib │ ├── assign_hacktober_label.py │ ├── blinka_funcs.py │ ├── bundle_announcer.py │ ├── circuitpython_library_validators.py │ └── common_funcs.py ├── pypi_requests.py └── update_cp_org_libraries.py ├── patches ├── 0001-Added-help-text-and-problem-matcher.patch ├── 0001-Added-pre-commit-and-SPDX-copyright.patch ├── 0001-Added-pre-commit-config-file.patch ├── 0001-Added-pull-request-template.patch ├── 0001-Added-pylint-disable-for-f-strings-in-tests-director.patch ├── 0001-Disabled-unspecified-encoding-pylint-check.patch ├── 0001-First-gitignore-patch.patch ├── 0001-First-part-of-patch.patch ├── 0001-Fixed-readthedocs-build.patch ├── 0001-Globally-disabled-consider-using-f-string-pylint-che.patch ├── 0001-Hardcoded-Black-and-REUSE-versions.patch ├── 0001-Increase-duplicate-code-check-threshold.patch ├── 0001-Moved-CI-to-Python-3.7.patch ├── 0001-Pylint,-pre-commit,-readthedocs-patch.patch ├── 0001-Re-added-pylint-install-to-build.yml.patch ├── 0001-Removed-duplicate-code-from-library-pylint-disable.patch ├── 0001-Removed-pylint-process-from-github-workflow-patch-2.patch ├── 0001-Removed-pylint-process-from-github-workflow.patch ├── 0001-Second-gitignore-patch.patch ├── 0001-Update-Black-to-latest.patch ├── 0001-Updated-readthedocs-file.patch ├── 0001-add-sphinx-configuration-to-rtd.yaml.patch ├── 0001-build.yml-add-black-formatting-check.patch ├── 0001-remove-deprecated-get_html_theme_path-call-alt-config.patch ├── 0001-remove-deprecated-get_html_theme_path-call.patch ├── 0001-update-rtd.yml-file.patch ├── 0003-actions-Remove-bad-whitespace-from-pylint-invocation.patch ├── 01192023_release_patch.patch ├── 01SEP2022_venv_gitignore.patch ├── 05302022_set_doc_language.patch ├── 09AUG2022-setuptools-scm.patch ├── 09MAY2023_precommit_update.patch ├── 11OCT2023-add-sphinx-rtd-theme-to-docs-reqs.patch ├── 11SEP2023_fix_rtd_theme.patch ├── 14MAY2023_pylintrc_jquery.patch ├── inclusive-terminology.patch └── lines-similarity.patch ├── pyproject.toml ├── pytest.ini ├── requirements.txt ├── template-env.sh ├── tests ├── conftest.py ├── integration │ ├── test_arduino_libraries.py │ ├── test_circuitpython_libraries.py │ └── test_update_cp_org_libraries.py └── unit │ ├── test_blinka_funcs.py │ ├── test_common_funcs.py │ ├── test_github_requests.py │ └── test_pypi_requests.py └── tools ├── README.md ├── README.md.license ├── ci_status.py ├── docs_status.py ├── file_compare.py ├── find_text.py ├── git_functionality.py ├── iterate_libraries.py ├── library_functions.py ├── run_black.sh └── runner.py /.github/workflows/bundle_cron.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Update Bundles 6 | 7 | on: 8 | schedule: 9 | - cron: 0 5 * * * 10 | workflow_dispatch: 11 | 12 | jobs: 13 | check-repo-owner: 14 | # This job is so the entire workflow will end successfully and give some 15 | # output to explain why it hasn't run on a non-Adafruit fork. 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: repository 19 | env: 20 | OWNER_IS_ADAFRUIT: ${{ startswith(github.repository, 'adafruit/') }} 21 | run: | 22 | echo "This workflow will only run if Adafruit is the repository owner." 23 | echo "Repository owner is Adafruit: $OWNER_IS_ADAFRUIT" 24 | update-bundles: 25 | runs-on: ubuntu-latest 26 | # Only run the build on Adafruit's repository. Forks won't have the secrets. 27 | # Its necessary to do this here, since 'schedule' events cannot (currently) 28 | # be limited (they run on all forks' default branches). 29 | if: startswith(github.repository, 'adafruit/') 30 | steps: 31 | - name: Set up Python 3.12 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: 3.12 35 | - name: Load contributor cache 36 | uses: actions/cache@v4 37 | with: 38 | key: "contributor-cache" 39 | path: "contributors.json" 40 | - name: Versions 41 | run: | 42 | python3 --version 43 | - uses: actions/checkout@v4 44 | with: 45 | submodules: true 46 | - name: Install deps 47 | run: | 48 | pip install -r requirements.txt 49 | - name: Run adabot.circuitpython_bundle 50 | env: 51 | ADABOT_EMAIL: ${{ secrets.ADABOT_EMAIL }} 52 | ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }} 53 | ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }} 54 | BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }} 55 | BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }} 56 | run: | 57 | python3 -u -m adabot.circuitpython_bundle 58 | -------------------------------------------------------------------------------- /.github/workflows/learn_cron.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Jeff Epler for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Tag Learning System Guides 6 | 7 | on: 8 | schedule: 9 | - cron: 0 5 * * * 10 | 11 | jobs: 12 | check-repo-owner: 13 | # This job is so the entire workflow will end successfully and give some 14 | # output to explain why it hasn't run on a non-Adafruit fork. 15 | runs-on: ubuntu-latest 16 | if: ${{ (github.repository_owner != 'adafruit') }} 17 | steps: 18 | - run: | 19 | echo "This workflow is only intended to run in the adafruit fork of adabot" 20 | 21 | update-learn: 22 | runs-on: ubuntu-latest 23 | # Only run the build if the access token has been configured. THs will be 24 | # the case on Adafruit's repository. Its necessary to do this here, since 25 | # 'schedule' events cannot (currently) be limited (they run on all forks' 26 | # default branches). 27 | if: ${{ (github.repository_owner == 'adafruit') }} 28 | steps: 29 | - uses: actions/checkout@v4 30 | with: 31 | repository: ${{ github.repository_owner }}/Adafruit_Learning_System_Guides 32 | token: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }} 33 | - name: Tag a release 34 | env: 35 | ADABOT_EMAIL: ${{ secrets.ADABOT_EMAIL }} 36 | run: | 37 | git config --global user.name adabot 38 | git config --global user.email "$ADABOT_EMAIL" 39 | TAG_NAME=`date +%Y%m%d` 40 | git tag $TAG_NAME 41 | git push origin $TAG_NAME 42 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Run pre-commit 6 | 7 | on: [pull_request, push] 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Set up Python 3 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: 3 17 | - name: Versions 18 | run: | 19 | python3 --version 20 | - name: Checkout Current Repo 21 | uses: actions/checkout@v3 22 | - name: Pip install requirements 23 | run: | 24 | pip install --force-reinstall -r requirements.txt 25 | - name: Pip install pre-commit 26 | run: | 27 | pip install pre-commit 28 | - name: Run pre-commit hooks 29 | run: | 30 | pre-commit run --all-files 31 | -------------------------------------------------------------------------------- /.github/workflows/reports_cron.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Run Daily Reports 6 | 7 | on: 8 | schedule: 9 | # The actor (github.actor) that runs the cron job may be the user who created the cron job 10 | # initially. It does not appear to be settable via a secret or environment variable. 11 | - cron: 15 5 * * * 12 | workflow_dispatch: 13 | 14 | 15 | jobs: 16 | check-repo-owner: 17 | # This job is so the entire workflow will end successfully and give some 18 | # output to explain why it hasn't run on a non-Adafruit fork. 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: repository 22 | env: 23 | OWNER_IS_ADAFRUIT: ${{ startswith(github.repository, 'adafruit/') }} 24 | run: | 25 | echo "This workflow will only run if Adafruit is the repository owner." 26 | echo "Repository owner is Adafruit: $OWNER_IS_ADAFRUIT" 27 | run-reports: 28 | runs-on: ubuntu-latest 29 | # Only run the build on Adafruit's repository. Forks won't have the secrets. 30 | # Its necessary to do this here, since 'schedule' events cannot (currently) 31 | # be limited (they run on all forks' default branches). 32 | if: startswith(github.repository, 'adafruit/') 33 | env: 34 | ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }} 35 | ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }} 36 | RTD_TOKEN: ${{ secrets.RTD_TOKEN }} 37 | BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }} 38 | BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }} 39 | steps: 40 | - name: Set up Python 3.11 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: 3.11 44 | - name: Versions 45 | run: | 46 | python3 --version 47 | - uses: actions/checkout@v4 48 | with: 49 | submodules: true 50 | - name: Install deps 51 | run: | 52 | pip install -r requirements.txt 53 | - name: Make Directory For Report Files 54 | run: mkdir -p bin/adabot 55 | - name: Set Date Variable 56 | id: today 57 | run: | 58 | echo date=$( 59 | date +%Y%m%d 60 | ) >> $GITHUB_OUTPUT 61 | - name: Run adabot.circuitpython_libraries 62 | env: 63 | # LIB_CHECK_CP_FILE is for circuitpython_libraries.py output 64 | LIB_CHECK_CP_FILE: bin/adabot/circuitpython_library_report_${{ steps.today.outputs.date }}.txt 65 | run: | 66 | python3 -u -m adabot.circuitpython_libraries -o $LIB_CHECK_CP_FILE 67 | continue-on-error: true 68 | - name: Run adabot.circuitpython_library_download_stats 69 | env: 70 | # LIB_DL_STATS_FILE is for future Bundle and PyPi download stats script 71 | LIB_DL_STATS_FILE: bin/adabot/library_download_stats_${{ steps.today.outputs.date }}.txt 72 | run: | 73 | python3 -u -m adabot.circuitpython_library_download_stats -o $LIB_DL_STATS_FILE 74 | continue-on-error: true 75 | - name: Run adabot.arduino_libraries 76 | env: 77 | # LIB_CHECK_ARD_FILE is for arduino_libraries.py output 78 | LIB_CHECK_ARD_FILE: bin/adabot/arduino_library_report_${{ steps.today.outputs.date }}.txt 79 | run: | 80 | python3 -u -m adabot.arduino_libraries -o $LIB_CHECK_ARD_FILE 81 | continue-on-error: true 82 | - name: Check For Files 83 | run: | 84 | ls bin/adabot 85 | - name: Upload Reports To AWS S3 86 | if: ${{ github.event_name != 'workflow_dispatch' }} 87 | env: 88 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 89 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 90 | run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/adabot/ s3://adafruit-circuit-python/adabot/bin/reports/ --recursive --no-progress --region us-east-1" 91 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Test Adabot 6 | 7 | on: [push, pull_request] 8 | 9 | jobs: 10 | check-repo-owner: 11 | # This job is so the entire workflow will end successfully and give some 12 | # output to explain why it hasn't run on a non-Adafruit fork. 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: repository 16 | env: 17 | OWNER_IS_ADAFRUIT: ${{ startswith(github.repository, 'adafruit/') }} 18 | run: | 19 | echo "This workflow will only run if Adafruit is the repository owner." 20 | echo "Repository owner is Adafruit: $OWNER_IS_ADAFRUIT" 21 | run-tests: 22 | runs-on: ubuntu-latest 23 | # Only run the build on Adafruit's repository. Forks won't have the secrets. 24 | # Its necessary to do this here, since 'schedule' events cannot (currently) 25 | # be limited (they run on all forks' default branches). 26 | if: startswith(github.repository, 'adafruit/') 27 | steps: 28 | - name: Set up Python 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: 3 32 | - name: Versions 33 | run: | 34 | python3 --version 35 | - uses: actions/checkout@v4 36 | with: 37 | submodules: true 38 | - name: Install deps 39 | run: | 40 | pip install -r requirements.txt 41 | - name: Run Tests 42 | env: 43 | ADABOT_EMAIL: ${{ secrets.ADABOT_EMAIL }} 44 | ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }} 45 | ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }} 46 | RTD_TOKEN: ${{ secrets.RTD_TOKEN }} 47 | run: | 48 | python3 -u -m pytest 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | __pycache__ 6 | _build 7 | .bundles/* 8 | *.pyc 9 | .env 10 | .venv 11 | env.sh 12 | *.swp 13 | .libraries/* 14 | .gitlibs/* 15 | .cp_org/* 16 | .blinka/* 17 | .vscode 18 | .idea/* 19 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | exclude: patches 6 | repos: 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v4.4.0 9 | hooks: 10 | - id: end-of-file-fixer 11 | - id: trailing-whitespace 12 | - repo: https://github.com/fsfe/reuse-tool 13 | rev: v1.1.2 14 | hooks: 15 | - id: reuse 16 | - repo: https://github.com/psf/black 17 | rev: 24.3.0 18 | hooks: 19 | - id: black 20 | - repo: https://github.com/pycqa/pylint 21 | rev: v3.2.6 22 | hooks: 23 | - id: pylint 24 | name: pylint 25 | types: [python] 26 | -------------------------------------------------------------------------------- /.reuse/dep5: -------------------------------------------------------------------------------- 1 | Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: adabot 3 | Source: https://github.com/adafruit/adabot.git 4 | 5 | Files: patches/* 6 | Copyright: 2022 Kattni Rembor 7 | License: MIT 8 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | nationality, personal appearance, race, religion, or sexual identity and 10 | orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at support@adafruit.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at [http://contributor-covenant.org/version/1/4][version] 72 | 73 | [homepage]: http://contributor-covenant.org 74 | [version]: http://contributor-covenant.org/version/1/4/ 75 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 Scott Shawcroft for Adafruit Industries 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LICENSES/MIT.txt: -------------------------------------------------------------------------------- 1 | MIT License Copyright (c) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is furnished 8 | to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice (including the next 11 | paragraph) shall be included in all copies or substantial portions of the 12 | Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 16 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 17 | OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 18 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF 19 | OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /LICENSES/Unlicense.txt: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or distribute 4 | this software, either in source code form or as a compiled binary, for any 5 | purpose, commercial or non-commercial, and by any means. 6 | 7 | In jurisdictions that recognize copyright laws, the author or authors of this 8 | software dedicate any and all copyright interest in the software to the public 9 | domain. We make this dedication for the benefit of the public at large and 10 | to the detriment of our heirs and successors. We intend this dedication to 11 | be an overt act of relinquishment in perpetuity of all present and future 12 | rights to this software under copyright law. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 16 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS 17 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 18 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH 19 | THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, 20 | please refer to 21 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | 2 | Adafruit Adabot 3 | ============ 4 | 5 | .. image :: https://img.shields.io/discord/327254708534116352.svg 6 | :target: https://adafru.it/discord 7 | :alt: Discord 8 | 9 | AdaBot is a friendly helper bot that works across the web to make people's 10 | lives better. It focuses on those contributing to Adafruit's variety of 11 | projects including CircuitPython. 12 | 13 | Setup 14 | ======= 15 | 16 | Here are the instructions for one time setup. Its simpler to start once 17 | everything is installed. 18 | 19 | Debian/Ubuntu Dependencies 20 | +++++++++++++++++++++++++++ 21 | 22 | .. code-block:: shell 23 | 24 | sudo apt-get update # make sure you have the latest packages 25 | sudo apt-get upgrade # make sure already installed packages are latest 26 | sudo apt-get install git python3 python3-venv python3-pip screen 27 | 28 | Adabot 29 | ++++++++++ 30 | 31 | Once the dependencies are installed, now clone the git repo into your home directory. 32 | 33 | .. code-block:: shell 34 | 35 | git clone https://github.com/adafruit/adabot.git 36 | cd adabot 37 | 38 | First, set up a virtual environment and install the deps. 39 | 40 | .. code-block:: shell 41 | 42 | python3 -m venv .env 43 | source .env/bin/activate 44 | pip install -r requirements.txt 45 | 46 | Secrets! 47 | +++++++++ 48 | 49 | Adabot needs a few secrets to do her work. Never, ever check these into source 50 | control! 51 | 52 | They are stored as environment variables in ``env.sh``. 53 | 54 | So, copy the example ``template-env.sh``, edit it and save it as ``env.sh``. 55 | 56 | .. code-block:: shell 57 | 58 | cp template-env.sh env.sh 59 | nano env.sh 60 | 61 | Do CTRL-X to exit and press Y to save the file before exiting. 62 | 63 | Git 64 | +++++++++ 65 | 66 | Adabot can automatically commit information so git must know an email and name 67 | for the author. 68 | 69 | .. code-block:: shell 70 | 71 | git config --global user.email "" 72 | git config --global user.name "Adafruit Adabot" 73 | git config --global credential.helper 'store --file ~/.adabot-git-credentials' 74 | git push 75 | 76 | The git push won't actually push anything but it prompt for the bot's username 77 | and password. These will be stored in the ~/.adabot-git-credentials file which 78 | makes this not very secure. Make sure your OAUTH token has only the permissions 79 | it needs in case it ends up in someone else's hands. 80 | 81 | Usage Example 82 | ============= 83 | 84 | To run Adabot we'll use screen to manage all of the individual pieces. Luckily, 85 | we have a screenrc file that manages starting everything up. 86 | 87 | .. code-block:: shell 88 | 89 | screen -c adabot.screenrc 90 | 91 | This command will return back to your prompt with something like 92 | ``[detached from 10866.pts-0.raspberrypi]``. This means that Rosie is now 93 | running within screen session behind the scenes. You can view output of it by 94 | attaching to the screen with: 95 | 96 | .. code-block:: shell 97 | 98 | screen -r 99 | 100 | Once reattached you can stop everything by CTRL-Cing repeatedly or detach again 101 | with CTRL-A then D. If any errors occur, a sleep command will be run so you can 102 | view the output before screen shuts down. 103 | 104 | You can also run adabot without using screen. To run the library statistics and validation script you must be inside this cloned 105 | adabot directory and run the following command: 106 | 107 | .. code-block:: shell 108 | 109 | python3 -m adabot.circuitpython_libraries 110 | 111 | Ensure you have set BOTH the Github access token and Travis token environment 112 | variables beforehand--see the template-env.sh for the name and where to get tokens. 113 | 114 | Applying Patches To All CircuitPython Libraries 115 | ================================================ 116 | To apply a patch to all CircuitPython libraries (only guaranteed for files shared 117 | among all libraries, such as those included in the cookiecutter (e.g. README.rst, etc), 118 | do the following: 119 | 120 | 1. Apply your update(s) to any library as normal, using ``git commit``. It is recommended to 121 | give a short, detailed description of the patch. This description will be used by the next 122 | step for both the name of the patchfile and the subsequent patch messages. 123 | 124 | 2. Create a patch file using `git format-patch `_. 125 | There are many techniques to using `git format-patch`; choose the one that makes 126 | sense for your updates. As a general usage example, ``format-patch -n`` will create patches 127 | for ``n`` number of commits starting with the most recent: 128 | 129 | .. code-block:: shell 130 | 131 | # creates a patch file based on the last commit 132 | git format-patch -1 133 | 134 | # creates patch files based on the last 5 commits 135 | git format-patch -5 136 | 137 | # creates a patch file with zero lines of context (to eliminate any unique 138 | # text that will cause the patch to not be applicable). must use 139 | # 'git apply --unidiff-zero' flag to apply the patch. 140 | git format-patch -1 -U0 141 | 142 | 3. Place the new patch file into the ``adabot/patches`` directory on a fork of the 143 | adafruit/adabot repository, and ``git commit`` with a description of the patch(es). 144 | 145 | 4. Submit a Pull Request (PR) to the adafruit/adabot repository from the updated fork. 146 | 147 | 5. Run the patch update script after the PR has been merged. 148 | 149 | 150 | To run the patch update script you must be inside this cloned adabot directory and 151 | run the following command: 152 | 153 | .. code-block:: shell 154 | 155 | # note: ensure the local clone is current with the github repo that contains the patch(es) 156 | # by using git pull before running the script. 157 | python3 -m adabot.circuitpython_library_patches 158 | 159 | # The 'circuitpython_library_patches' script accepts command line arguments. Use 160 | # the help argument to display usage. 161 | python3 -m adabot.circuitpython_library_patches -h 162 | 163 | 164 | Making Releases For CircuitPython Libraries 165 | =========================================== 166 | Adabot includes a utility to check if a library needs a new release 167 | and to help a human create the release with a CLI instead of the 168 | web interface. 169 | 170 | To use it: 171 | 172 | 1. Clone the adabot repo locally and open a terminal inside of it 173 | 2. Run ``pip install .`` in the root of Adabot repo to install it via pip 174 | 3. Clone the library repo locally 175 | 4. ``cd`` into the library repo 176 | 5. run ``python -m adabot.circuitpython_library_release`` 177 | 6. Answer the prompts for new tag name and title. 178 | 179 | This utility can be used in conjunction with ``git submodule foreach`` inside of the 180 | CircuitPython Library Bundle. 181 | 182 | These are the steps for that process: 183 | 184 | 1. Clone the adabot repo locally and open a terminal inside of it 185 | 2. If you want to use the same title for all libraries (i.e. due to a patch rollout) 186 | then modify the ``RELEASE_TITLE`` dictionary value at the top 187 | of ``adabot/circuitpython_library_release.py`` 188 | 3. Run ``pip install .`` in the root of Adabot repo to install it via pip 189 | 4. Clone the Library Bundle repo and open a terminal inside of it 190 | 5. Run these commands to update all submodules 191 | 192 | .. code-block:: shell 193 | 194 | git submodule sync --quiet --recursive 195 | git submodule update --init 196 | 197 | 198 | 6. Run ``git submodule foreach 'python -m adabot.circuitpython_library_release'`` 199 | 200 | 201 | 202 | Contributing 203 | ============ 204 | 205 | Contributions are welcome! Please read our `Code of Conduct 206 | `_ 207 | before contributing to help this project stay welcoming. 208 | -------------------------------------------------------------------------------- /README.rst.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /adabot-cron.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 4 | # 5 | # SPDX-License-Identifier: MIT 6 | 7 | cd /home/tannewt/adabot 8 | 9 | source .env/bin/activate 10 | source env.sh 11 | 12 | python -m adabot.circuitpython_bundle 13 | -------------------------------------------------------------------------------- /adabot.screenrc: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | # Screen startup file to start multiple commands under multiple screens. 6 | # Start with "screen -c thisfilename" 7 | 8 | screen -t flask 0 bash -c "source .env/bin/activate; source env.sh; export FLASK_APP=rosie-ci.py; flask run || sleep 1000" 9 | 10 | # With a free ngrok account you will get a random subdomain. 11 | # screen -t ngrok 1 ngrok http 5000 12 | 13 | # Use this command with your own subdomain. 14 | screen -t ngrok 1 bash -c "../ngrok http -subdomain=rosie-ci 5000 || sleep 1000" 15 | 16 | screen -t celery_high 2 bash -c "source .env/bin/activate; source env.sh; celery -A rosie-ci.celery worker -n high -Q high || [ $? -eq 1 ] || sleep 1000" 17 | 18 | screen -t celery_low 3 bash -c "source .env/bin/activate; source env.sh; celery -A rosie-ci.celery worker -n low -Q low || [ $? -eq 1 ] || sleep 1000" 19 | 20 | detach 21 | -------------------------------------------------------------------------------- /adabot/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023 Tim Cocks 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """AdaBot is a friendly helper bot that works across the web to make people's 6 | lives better.""" 7 | 8 | REQUESTS_TIMEOUT = 30 9 | -------------------------------------------------------------------------------- /adabot/arduino_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2018 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Adabot utility for Arduino Libraries.""" 6 | 7 | import argparse 8 | import logging 9 | import sys 10 | import traceback 11 | 12 | import requests 13 | 14 | from adabot import github_requests as gh_reqs, REQUESTS_TIMEOUT 15 | 16 | logger = logging.getLogger(__name__) 17 | ch = logging.StreamHandler(stream=sys.stdout) 18 | logging.basicConfig(level=logging.INFO, format="%(message)s", handlers=[ch]) 19 | 20 | # Setup ArgumentParser 21 | cmd_line_parser = argparse.ArgumentParser( 22 | description="Adabot utility for Arduino Libraries.", 23 | prog="Adabot Arduino Libraries Utility", 24 | ) 25 | cmd_line_parser.add_argument( 26 | "-o", 27 | "--output_file", 28 | help="Output log to the filename provided.", 29 | metavar="", 30 | dest="output_file", 31 | ) 32 | cmd_line_parser.add_argument( 33 | "-v", 34 | "--verbose", 35 | help="Set the level of verbosity printed to the command prompt." 36 | " Zero is off; One is on (default).", 37 | type=int, 38 | default=1, 39 | dest="verbose", 40 | choices=[0, 1], 41 | ) 42 | 43 | all_libraries = [] 44 | adafruit_library_index = [] 45 | 46 | 47 | def list_repos(): 48 | """Return a list of all Adafruit repositories with 'Arduino' in either the 49 | name, description, or readme. Each list item is a dictionary of GitHub API 50 | repository state. 51 | """ 52 | repos = [] 53 | result = gh_reqs.get( 54 | "/search/repositories", 55 | params={ 56 | "q": ( 57 | "Arduino in:name in:description in:readme fork:true user:adafruit archived:false" 58 | " OR Library in:name in:description in:readme fork:true user:adafruit" 59 | " archived:false OR Adafruit_ in:name fork:true user:adafruit archived:false AND" 60 | " NOT PCB in:name AND NOT Python in:name" 61 | ), 62 | "per_page": 100, 63 | "sort": "updated", 64 | "order": "asc", 65 | }, 66 | ) 67 | while result.ok: 68 | repos.extend( 69 | result.json()["items"] 70 | ) # uncomment and comment below, to include all forks 71 | 72 | if result.links.get("next"): 73 | result = gh_reqs.get(result.links["next"]["url"]) 74 | else: 75 | break 76 | 77 | return repos 78 | 79 | 80 | def is_arduino_library(repo): 81 | """Returns if the repo is an Arduino library, as determined by the existence of 82 | the 'library.properties' file. 83 | """ 84 | lib_prop_file = requests.get( 85 | "https://raw.githubusercontent.com/adafruit/" 86 | + repo["name"] 87 | + "/" 88 | + repo["default_branch"] 89 | + "/library.properties", 90 | timeout=REQUESTS_TIMEOUT, 91 | ) 92 | return lib_prop_file.ok 93 | 94 | 95 | def print_list_output(title, coll): 96 | """Helper function to format output.""" 97 | logger.info("") 98 | logger.info(title.format(len(coll) - 2)) 99 | long_col = [ 100 | (max([len(str(row[i])) for row in coll]) + 3) for i in range(len(coll[0])) 101 | ] 102 | row_format = "".join(["{:<" + str(this_col) + "}" for this_col in long_col]) 103 | for lib in coll: 104 | logger.info("%s", row_format.format(*lib)) 105 | 106 | 107 | def validate_library_properties(repo): 108 | """Checks if the latest GitHub Release Tag and version in the library_properties 109 | file match. Will also check if the library_properties is there, but no release 110 | has been made. 111 | """ 112 | lib_prop_file = None 113 | lib_version = None 114 | release_tag = None 115 | lib_prop_file = requests.get( 116 | "https://raw.githubusercontent.com/adafruit/" 117 | + repo["name"] 118 | + "/" 119 | + repo["default_branch"] 120 | + "/library.properties", 121 | timeout=REQUESTS_TIMEOUT, 122 | ) 123 | if not lib_prop_file.ok: 124 | # print("{} skipped".format(repo["name"])) 125 | return None # no library properties file! 126 | 127 | lines = lib_prop_file.text.split("\n") 128 | for line in lines: 129 | if "version" in line: 130 | lib_version = line[len("version=") :] 131 | break 132 | 133 | get_latest_release = gh_reqs.get( 134 | "/repos/adafruit/" + repo["name"] + "/releases/latest" 135 | ) 136 | if get_latest_release.ok: 137 | response = get_latest_release.json() 138 | if "tag_name" in response: 139 | release_tag = response["tag_name"] 140 | if "message" in response: 141 | if response["message"] == "Not Found": 142 | release_tag = "None" 143 | else: 144 | release_tag = "Unknown" 145 | 146 | if lib_version and release_tag: 147 | return [release_tag, lib_version] 148 | 149 | return None 150 | 151 | 152 | def validate_release_state(repo): 153 | """Validate if a repo 1) has a release, and 2) if there have been commits 154 | since the last release. Returns a list of string error messages for the 155 | repository. 156 | """ 157 | if not is_arduino_library(repo): 158 | return None 159 | 160 | compare_tags = gh_reqs.get( 161 | "/repos/" 162 | + repo["full_name"] 163 | + "/compare/" 164 | + repo["default_branch"] 165 | + "..." 166 | + repo["tag_name"] 167 | ) 168 | if not compare_tags.ok: 169 | logger.error( 170 | "Error: failed to compare %s '%s' to tag '%s'", 171 | repo["name"], 172 | repo["default_branch"], 173 | repo["tag_name"], 174 | ) 175 | return None 176 | compare_tags_json = compare_tags.json() 177 | if "status" in compare_tags_json: 178 | if compare_tags_json["status"] != "identical": 179 | return [repo["tag_name"], compare_tags_json["behind_by"]] 180 | elif "errors" in compare_tags_json: 181 | logger.error( 182 | "Error: comparing latest release to '%s' failed on '%s'. Error Message: %s", 183 | repo["default_branch"], 184 | repo["name"], 185 | compare_tags_json["message"], 186 | ) 187 | 188 | return None 189 | 190 | 191 | def validate_actions(repo): 192 | """Validate if a repo has workflows/githubci.yml""" 193 | repo_has_actions = requests.get( 194 | "https://raw.githubusercontent.com/adafruit/" 195 | + repo["name"] 196 | + "/" 197 | + repo["default_branch"] 198 | + "/.github/workflows/githubci.yml", 199 | timeout=REQUESTS_TIMEOUT, 200 | ) 201 | return repo_has_actions.ok 202 | 203 | 204 | def validate_example(repo): 205 | """Validate if a repo has any files in examples directory""" 206 | repo_has_ino = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/contents/examples") 207 | return repo_has_ino.ok and len(repo_has_ino.json()) 208 | 209 | 210 | # pylint: disable=too-many-branches 211 | def run_arduino_lib_checks(): 212 | """Run necessary functions and outout the results.""" 213 | logger.info("Running Arduino Library Checks") 214 | logger.info("Getting list of libraries to check...") 215 | 216 | repo_list = list_repos() 217 | logger.info("Found %s Arduino libraries to check\n", len(repo_list)) 218 | failed_lib_prop = [ 219 | [" Repo", "Release Tag", "library.properties Version"], 220 | [" ----", "-----------", "--------------------------"], 221 | ] 222 | needs_release_list = [ 223 | [" Repo", "Latest Release", "Commits Behind"], 224 | [" ----", "--------------", "--------------"], 225 | ] 226 | needs_registration_list = [[" Repo"], [" ----"]] 227 | missing_actions_list = [[" Repo"], [" ----"]] 228 | missing_library_properties_list = [[" Repo"], [" ----"]] 229 | 230 | for repo in repo_list: 231 | have_examples = validate_example(repo) 232 | if not have_examples: 233 | # not a library 234 | continue 235 | 236 | entry = {"name": repo["name"]} 237 | 238 | lib_check = validate_library_properties(repo) 239 | if not lib_check: 240 | missing_library_properties_list.append([" " + str(repo["name"])]) 241 | continue 242 | 243 | # print(repo['clone_url']) 244 | needs_registration = False 245 | for lib in adafruit_library_index: 246 | if (repo["clone_url"] == lib["repository"]) or ( 247 | repo["html_url"] == lib["website"] 248 | ): 249 | entry["arduino_version"] = lib["version"] # found it! 250 | break 251 | else: 252 | needs_registration = True 253 | if needs_registration: 254 | needs_registration_list.append([" " + str(repo["name"])]) 255 | 256 | entry["release"] = lib_check[0] 257 | entry["version"] = lib_check[1] 258 | repo["tag_name"] = lib_check[0] 259 | 260 | needs_release = validate_release_state(repo) 261 | entry["needs_release"] = needs_release 262 | if needs_release: 263 | needs_release_list.append( 264 | [" " + str(repo["name"]), needs_release[0], needs_release[1]] 265 | ) 266 | 267 | missing_actions = not validate_actions(repo) 268 | entry["needs_actions"] = missing_actions 269 | if missing_actions: 270 | missing_actions_list.append([" " + str(repo["name"])]) 271 | 272 | all_libraries.append(entry) 273 | 274 | for entry in all_libraries: 275 | logging.info(entry) 276 | 277 | if len(failed_lib_prop) > 2: 278 | print_list_output( 279 | "Libraries Have Mismatched Release Tag and library.properties Version: ({})", 280 | failed_lib_prop, 281 | ) 282 | 283 | if len(needs_registration_list) > 2: 284 | print_list_output( 285 | "Libraries that are not registered with Arduino: ({})", 286 | needs_registration_list, 287 | ) 288 | 289 | if len(needs_release_list) > 2: 290 | print_list_output( 291 | "Libraries have commits since last release: ({})", needs_release_list 292 | ) 293 | 294 | if len(missing_actions_list) > 2: 295 | print_list_output( 296 | "Libraries that is not configured with Actions: ({})", missing_actions_list 297 | ) 298 | 299 | if len(missing_library_properties_list) > 2: 300 | print_list_output( 301 | "Libraries that is missing library.properties file: ({})", 302 | missing_library_properties_list, 303 | ) 304 | 305 | 306 | def main(verbosity=1, output_file=None): # pylint: disable=missing-function-docstring 307 | if output_file: 308 | file_handler = logging.FileHandler(output_file) 309 | logger.addHandler(file_handler) 310 | 311 | if verbosity == 0: 312 | logger.setLevel("CRITICAL") 313 | 314 | try: 315 | reply = requests.get( 316 | "http://downloads.arduino.cc/libraries/library_index.json", 317 | timeout=REQUESTS_TIMEOUT, 318 | ) 319 | if not reply.ok: 320 | logging.error( 321 | "Could not fetch http://downloads.arduino.cc/libraries/library_index.json" 322 | ) 323 | sys.exit() 324 | arduino_library_index = reply.json() 325 | for lib in arduino_library_index["libraries"]: 326 | if "adafruit" in lib["url"]: 327 | adafruit_library_index.append(lib) 328 | run_arduino_lib_checks() 329 | except: 330 | _, exc_val, exc_tb = sys.exc_info() 331 | logger.error("Exception Occurred!") 332 | logger.error(("-" * 60)) 333 | logger.error("Traceback (most recent call last):") 334 | trace = traceback.format_tb(exc_tb) 335 | for line in trace: 336 | logger.error(line) 337 | logger.error(exc_val) 338 | 339 | raise 340 | 341 | 342 | if __name__ == "__main__": 343 | cmd_line_args = cmd_line_parser.parse_args() 344 | main(verbosity=cmd_line_args.verbose, output_file=cmd_line_args.output_file) 345 | -------------------------------------------------------------------------------- /adabot/circuitpython_library_download_stats.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2018 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ Collects download stats for the Adafruit CircuitPython Library Bundles 6 | and each library. 7 | """ 8 | 9 | import os 10 | import datetime 11 | import sys 12 | import argparse 13 | import traceback 14 | import operator 15 | import requests 16 | 17 | from google.cloud import bigquery 18 | import google.oauth2.service_account 19 | 20 | from adabot import github_requests as gh_reqs, REQUESTS_TIMEOUT 21 | from adabot.lib import common_funcs 22 | 23 | # Setup ArgumentParser 24 | cmd_line_parser = argparse.ArgumentParser( 25 | description="Adabot utility for CircuitPython Library download stats." 26 | " Provides stats for the Adafruit CircuitPython Bundle, and PyPi if available.", 27 | prog="Adabot CircuitPython Libraries Download Stats", 28 | ) 29 | cmd_line_parser.add_argument( 30 | "-o", 31 | "--output_file", 32 | help="Output log to the filename provided.", 33 | metavar="", 34 | dest="output_file", 35 | ) 36 | cmd_line_parser.add_argument( 37 | "-v", 38 | "--verbose", 39 | help="Set the level of verbosity printed to the command prompt." 40 | " Zero is off; One is on (default).", 41 | type=int, 42 | default=1, 43 | dest="verbose", 44 | choices=[0, 1], 45 | ) 46 | 47 | # Global variables 48 | OUTPUT_FILENAME = None 49 | VERBOSITY = 1 50 | file_data = [] 51 | 52 | # List containing libraries on PyPi that are not returned by the 'list_repos()' function, 53 | # i.e. are not named 'Adafruit_CircuitPython_'. 54 | PYPI_FORCE_NON_CIRCUITPYTHON = ["Adafruit-Blinka"] 55 | 56 | # https://www.piwheels.org/json.html 57 | PIWHEELS_PACKAGES_URL = "https://www.piwheels.org/packages.json" 58 | 59 | 60 | def retrieve_piwheels_stats(): 61 | """Get data dump of piwheels download stats""" 62 | stats = {} 63 | response = requests.get(PIWHEELS_PACKAGES_URL, timeout=REQUESTS_TIMEOUT) 64 | if response.ok: 65 | packages = response.json() 66 | stats = { 67 | pkg: {"total": dl_all, "month": dl_month} 68 | for pkg, dl_month, dl_all, *_ in packages 69 | if pkg.startswith("adafruit") 70 | } 71 | 72 | return stats 73 | 74 | 75 | def parse_piwheels_stats(): 76 | """Map piwheels download stats for each repo""" 77 | successful_stats = {} 78 | failed_stats = [] 79 | repos = common_funcs.list_repos() 80 | dl_stats = retrieve_piwheels_stats() 81 | for repo in repos: 82 | if repo["owner"]["login"] == "adafruit" and repo["name"].startswith( 83 | "Adafruit_CircuitPython" 84 | ): 85 | if common_funcs.repo_is_on_pypi(repo): 86 | pkg_name = repo["name"].replace("_", "-").lower() 87 | if pkg_name in dl_stats: 88 | successful_stats[repo["name"]] = ( 89 | dl_stats[pkg_name]["month"], 90 | dl_stats[pkg_name]["total"], 91 | ) 92 | else: 93 | failed_stats.append(repo["name"]) 94 | 95 | for lib in PYPI_FORCE_NON_CIRCUITPYTHON: 96 | pkg_name = lib.lower() 97 | if pkg_name in dl_stats: 98 | successful_stats[lib] = ( 99 | dl_stats[pkg_name]["month"], 100 | dl_stats[pkg_name]["total"], 101 | ) 102 | else: 103 | failed_stats.append(lib) 104 | 105 | return successful_stats, failed_stats 106 | 107 | 108 | def retrieve_pypi_stats(submodules, additional_packages=("adafruit-blinka",)): 109 | """Get data dump of PyPI download stats (for the last 7 days)""" 110 | # Create access info dictionary 111 | access_info = { 112 | "private_key": os.environ["BIGQUERY_PRIVATE_KEY"], 113 | "client_email": os.environ["BIGQUERY_CLIENT_EMAIL"], 114 | "token_uri": "https://oauth2.googleapis.com/token", 115 | } 116 | 117 | # Use credentials to create a BigQuery client object 118 | credentials = google.oauth2.service_account.Credentials.from_service_account_info( 119 | access_info 120 | ) 121 | client = bigquery.Client("circuitpython-stats", credentials=credentials) 122 | 123 | # Get the list of PyPI package names 124 | packages = [] 125 | for submod in submodules: 126 | url: str = submod[1]["url"] 127 | pre_name = url.split("/")[-1][:-4] 128 | packages.append(pre_name.replace("_", "-").lower()) 129 | for addpack in additional_packages: 130 | packages.append(addpack) 131 | 132 | # Construct the query to use 133 | query = """ 134 | SELECT 135 | file.project as name, COUNT(*) AS num_downloads, 136 | FROM 137 | `bigquery-public-data.pypi.file_downloads` 138 | WHERE DATE(timestamp) 139 | BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), DAY) 140 | AND DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), DAY) 141 | AND file.project in ( 142 | """ 143 | packages_query = ["?" for _ in packages] 144 | query_parameters = [ 145 | bigquery.ScalarQueryParameter(None, "STRING", package) for package in packages 146 | ] 147 | query += ",".join(packages_query) 148 | query += """ 149 | ) 150 | GROUP BY file.project 151 | ORDER BY num_downloads DESC 152 | """ 153 | 154 | # Configure and run the query 155 | job_config = bigquery.QueryJobConfig(query_parameters=query_parameters) 156 | query_job = client.query( 157 | query, 158 | job_config=job_config, 159 | ) 160 | return query_job.result() 161 | 162 | 163 | def get_bundle_stats(bundle): 164 | """Returns the download stats for 'bundle'. Uses release tag names to compile download 165 | stats for the last 7 days. This assumes an Adabot release within that time frame, and 166 | that tag name(s) will be the date (YYYYMMDD). 167 | """ 168 | stats_dict = {} 169 | bundle_stats = gh_reqs.get("/repos/adafruit/" + bundle + "/releases") 170 | if not bundle_stats.ok: 171 | return {"Failed to retrieve bundle stats": bundle_stats.text} 172 | start_date = datetime.date.today() 173 | 174 | for release in bundle_stats.json(): 175 | try: 176 | release_date = datetime.date( 177 | int(release["tag_name"][:4]), 178 | int(release["tag_name"][4:6]), 179 | int(release["tag_name"][6:]), 180 | ) 181 | except ValueError: 182 | output_handler( 183 | "Skipping release. Tag name invalid: {}".format(release["tag_name"]) 184 | ) 185 | continue 186 | if (start_date - release_date).days > 7: 187 | break 188 | 189 | for asset in release["assets"]: 190 | if asset["name"].startswith("adafruit"): 191 | asset_name = asset["name"][: asset["name"].rfind("-")] 192 | if asset_name in stats_dict: 193 | stats_dict[asset_name] = ( 194 | stats_dict[asset_name] + asset["download_count"] 195 | ) 196 | else: 197 | stats_dict[asset_name] = asset["download_count"] 198 | 199 | return stats_dict 200 | 201 | 202 | def output_handler(message="", quiet=False): 203 | """Handles message output to prompt/file for functions.""" 204 | if OUTPUT_FILENAME is not None: 205 | file_data.append(message) 206 | if VERBOSITY and not quiet: 207 | print(message) 208 | 209 | 210 | def run_stat_check(): 211 | """Run and report all download stats.""" 212 | output_handler("Adafruit CircuitPython Library Download Stats") 213 | output_handler( 214 | "Report Date: {}".format(datetime.datetime.now().strftime("%d %B %Y, %I:%M%p")) 215 | ) 216 | output_handler() 217 | output_handler("Adafruit_CircuitPython_Bundle downloads for the past week:") 218 | for stat in sorted( 219 | get_bundle_stats("Adafruit_CircuitPython_Bundle").items(), 220 | key=operator.itemgetter(1), 221 | reverse=True, 222 | ): 223 | output_handler(" {0}: {1}".format(stat[0], stat[1])) 224 | output_handler() 225 | 226 | pypi_downloads = {} 227 | pypi_failures = [] 228 | downloads_list = [ 229 | ["| Library", "| Last Month", "| Total |"], 230 | ["|:-------", "|:--------:", "|:-----:|"], 231 | ] 232 | output_handler("Adafruit CircuitPython Library Piwheels downloads:") 233 | output_handler() 234 | pypi_downloads, pypi_failures = parse_piwheels_stats() 235 | for stat in sorted( 236 | pypi_downloads.items(), key=operator.itemgetter(1, 1), reverse=True 237 | ): 238 | downloads_list.append( 239 | ["| " + str(stat[0]), "| " + str(stat[1][0]), "| " + str(stat[1][1]) + " |"] 240 | ) 241 | 242 | long_col = [ 243 | (max([len(str(row[i])) for row in downloads_list]) + 3) 244 | for i in range(len(downloads_list[0])) 245 | ] 246 | row_format = "".join(["{:<" + str(this_col) + "}" for this_col in long_col]) 247 | for lib in downloads_list: 248 | output_handler(row_format.format(*lib)) 249 | 250 | if len(pypi_failures) > 0: 251 | output_handler() 252 | output_handler(" * Failed to retrieve stats for the following libraries:") 253 | for fail in pypi_failures: 254 | output_handler(" * {}".format(fail)) 255 | 256 | 257 | if __name__ == "__main__": 258 | cmd_line_args = cmd_line_parser.parse_args() 259 | VERBOSITY = cmd_line_args.verbose 260 | if cmd_line_args.output_file: 261 | OUTPUT_FILENAME = cmd_line_args.output_file 262 | try: 263 | run_stat_check() 264 | except: 265 | if OUTPUT_FILENAME is not None: 266 | exc_type, exc_val, exc_tb = sys.exc_info() 267 | output_handler("Exception Occurred!", quiet=True) 268 | output_handler(("-" * 60), quiet=True) 269 | output_handler("Traceback (most recent call last):", quiet=True) 270 | tb = traceback.format_tb(exc_tb) 271 | for line in tb: 272 | output_handler(line, quiet=True) 273 | output_handler(exc_val, quiet=True) 274 | 275 | raise 276 | 277 | finally: 278 | if OUTPUT_FILENAME is not None: 279 | with open(OUTPUT_FILENAME, "w") as f: 280 | for line in file_data: 281 | f.write(str(line) + "\n") 282 | -------------------------------------------------------------------------------- /adabot/circuitpython_library_patches.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Adabot utility for applying patches to all CircuitPython Libraries.""" 6 | 7 | import argparse 8 | import os 9 | import shutil 10 | import sys 11 | 12 | import requests 13 | import sh 14 | from sh.contrib import git 15 | 16 | from adabot import REQUESTS_TIMEOUT 17 | from adabot.lib import common_funcs 18 | 19 | 20 | working_directory = os.path.abspath(os.getcwd()) 21 | lib_directory = working_directory + "/.libraries/" 22 | patch_directory = working_directory + "/patches/" 23 | repos = [] 24 | check_errors = [] 25 | apply_errors = [] 26 | stats = [] 27 | 28 | """ 29 | Setup the command line argument parsing object. 30 | """ 31 | cli_parser = argparse.ArgumentParser( 32 | description="Apply patches to any common file(s) in" 33 | " all Adafruit CircuitPython Libraries." 34 | ) 35 | cli_parser.add_argument( 36 | "-l", "--list", help="Lists the available patches to run.", action="store_true" 37 | ) 38 | cli_parser.add_argument( 39 | "-p", 40 | help="Runs only the single patch referenced.", 41 | metavar="", 42 | dest="patch", 43 | ) 44 | cli_parser.add_argument( 45 | "-f", 46 | help="Adds the referenced FLAGS to the git.am call." 47 | " Only available when using '-p'. Enclose flags in brackets '[]'." 48 | " Multiple flags can be passed. NOTE: '--signoff' is already used " 49 | " used by default, and will be ignored. EXAMPLE: -f [-C0] -f [-s]", 50 | metavar="FLAGS", 51 | action="append", 52 | dest="flags", 53 | type=str, 54 | ) 55 | cli_parser.add_argument( 56 | "--use-apply", 57 | help="Forces use of 'git apply' instead of 'git am'." 58 | " This is necessary when needing to use 'apply' flags not available" 59 | " to 'am' (e.g. '--unidiff-zero'). Only available when using '-p'.", 60 | action="store_true", 61 | dest="use_apply", 62 | ) 63 | cli_parser.add_argument( 64 | "--dry-run", 65 | help="Accomplishes a dry run of patches, without applying" " them.", 66 | action="store_true", 67 | dest="dry_run", 68 | ) 69 | cli_parser.add_argument( 70 | "--local", 71 | help="Force use of local patches. This skips verification" 72 | " of patch files in the adabot GitHub repository. MUST use '--dry-run'" 73 | " with this argument; this guards against applying unapproved patches.", 74 | action="store_true", 75 | dest="run_local", 76 | ) 77 | 78 | 79 | def get_repo_list(): 80 | """Uses adabot.circuitpython_libraries module to get a list of 81 | CircuitPython repositories. Filters the list down to adafruit 82 | owned/sponsored CircuitPython libraries. 83 | """ 84 | repo_list = [] 85 | get_repos = common_funcs.list_repos() 86 | for repo in get_repos: 87 | if not ( 88 | repo["owner"]["login"] == "adafruit" 89 | and repo["name"].startswith("Adafruit_CircuitPython") 90 | ): 91 | continue 92 | repo_list.append(dict(name=repo["name"], url=repo["clone_url"])) 93 | 94 | return repo_list 95 | 96 | 97 | def get_patches(run_local): 98 | """Returns the list of patch files located in the adabot/patches 99 | directory. 100 | """ 101 | return_list = [] 102 | if not run_local: 103 | contents = requests.get( 104 | "https://api.github.com/repos/adafruit/adabot/contents/patches", 105 | timeout=REQUESTS_TIMEOUT 106 | ) 107 | if contents.ok: 108 | for patch in contents.json(): 109 | patch_name = patch["name"] 110 | return_list.append(patch_name) 111 | else: 112 | contents = os.listdir(patch_directory) 113 | for file in contents: 114 | if file.endswith(".patch"): 115 | return_list.append(file) 116 | 117 | return return_list 118 | 119 | # pylint: disable=too-many-arguments 120 | def apply_patch(repo_directory, patch_filepath, repo, patch, flags, use_apply): 121 | """Apply the `patch` in `patch_filepath` to the `repo` in 122 | `repo_directory` using git am or git apply. The commit 123 | with the user running the script (adabot if credentials are set 124 | for that). 125 | 126 | When `use_apply` is true, the `--apply` flag is automatically added 127 | to ensure that any passed flags that turn off apply (e.g. `--check`) 128 | are overridden. 129 | """ 130 | if not os.getcwd() == repo_directory: 131 | os.chdir(repo_directory) 132 | 133 | if not use_apply: 134 | try: 135 | git.am(flags, patch_filepath) 136 | except sh.ErrorReturnCode as err: 137 | apply_errors.append( 138 | dict(repo_name=repo, patch_name=patch, error=err.stderr) 139 | ) 140 | return False 141 | else: 142 | apply_flags = ["--apply"] 143 | for flag in flags: 144 | if not flag == "--signoff": 145 | apply_flags.append(flag) 146 | try: 147 | git.apply(apply_flags, patch_filepath) 148 | except sh.ErrorReturnCode as err: 149 | apply_errors.append( 150 | dict(repo_name=repo, patch_name=patch, error=err.stderr) 151 | ) 152 | return False 153 | 154 | with open(patch_filepath) as patchfile: 155 | for line in patchfile: 156 | if "[PATCH]" in line: 157 | message = '"' + line[(line.find("]") + 2) :] + '"' 158 | break 159 | try: 160 | git.commit("-a", "-m", message) 161 | except sh.ErrorReturnCode as err: 162 | apply_errors.append( 163 | dict(repo_name=repo, patch_name=patch, error=err.stderr) 164 | ) 165 | return False 166 | 167 | try: 168 | git.push() 169 | except sh.ErrorReturnCode as err: 170 | apply_errors.append(dict(repo_name=repo, patch_name=patch, error=err.stderr)) 171 | return False 172 | return True 173 | 174 | 175 | # pylint: disable=too-many-locals,too-many-branches,too-many-statements 176 | def check_patches(repo, patches, flags, use_apply, dry_run): 177 | """Gather a list of patches from the `adabot/patches` directory 178 | on the adabot repo. Clone the `repo` and run git apply --check 179 | to test wether it requires any of the gathered patches. 180 | 181 | When `use_apply` is true, any flags except `--apply` are passed 182 | through to the check call. This ensures that the check call is 183 | representative of the actual apply call. 184 | """ 185 | applied = 0 186 | skipped = 0 187 | failed = 0 188 | 189 | repo_directory = lib_directory + repo["name"] 190 | 191 | for patch in patches: 192 | try: 193 | os.chdir(lib_directory) 194 | except FileNotFoundError: 195 | os.mkdir(lib_directory) 196 | os.chdir(lib_directory) 197 | 198 | try: 199 | git.clone(repo["url"]) 200 | except sh.ErrorReturnCode_128 as err: 201 | if b"already exists" in err.stderr: 202 | pass 203 | else: 204 | raise RuntimeError(err.stderr) from None 205 | os.chdir(repo_directory) 206 | 207 | patch_filepath = patch_directory + patch 208 | 209 | try: 210 | check_flags = ["--check"] 211 | if use_apply: 212 | for flag in flags: 213 | if not flag in ("--apply", "--signoff"): 214 | check_flags.append(flag) 215 | git.apply(check_flags, patch_filepath) 216 | run_apply = True 217 | except sh.ErrorReturnCode_1 as err: 218 | run_apply = False 219 | if b"error" not in err.stderr or b"patch does not apply" in err.stderr: 220 | parse_err = err.stderr.decode() 221 | parse_err = parse_err[parse_err.rfind(":") + 1 : -1] 222 | print(" . Skipping {}:{}".format(repo["name"], parse_err)) 223 | skipped += 1 224 | else: 225 | failed += 1 226 | error_str = str(err.stderr, encoding="utf-8").replace("\n", " ") 227 | error_start = error_str.rfind("error:") + 7 228 | check_errors.append( 229 | dict( 230 | repo_name=repo["name"], 231 | patch_name=patch, 232 | error=error_str[error_start:], 233 | ) 234 | ) 235 | 236 | except sh.ErrorReturnCode as err: 237 | run_apply = False 238 | failed += 1 239 | error_str = str(err.stderr, encoding="utf-8").replace("\n", " ") 240 | error_start = error_str.rfind("error:") + 7 241 | check_errors.append( 242 | dict( 243 | repo_name=repo["name"], 244 | patch_name=patch, 245 | error=error_str[error_start:], 246 | ) 247 | ) 248 | 249 | if run_apply and not dry_run: 250 | result = apply_patch( 251 | repo_directory, patch_filepath, repo["name"], patch, flags, use_apply 252 | ) 253 | if result: 254 | applied += 1 255 | else: 256 | failed += 1 257 | elif run_apply and dry_run: 258 | applied += 1 259 | 260 | return [applied, skipped, failed] 261 | 262 | 263 | if __name__ == "__main__": 264 | cli_args = cli_parser.parse_args() 265 | if cli_args.run_local: 266 | if cli_args.dry_run or cli_args.list: 267 | pass 268 | else: 269 | raise RuntimeError( 270 | "'--local' can only be used in conjunction with" 271 | " '--dry-run' or '--list'." 272 | ) 273 | 274 | run_patches = get_patches(cli_args.run_local) 275 | cmd_flags = ["--signoff"] 276 | 277 | if cli_args.list: 278 | print("Available Patches:", run_patches) 279 | sys.exit() 280 | if cli_args.patch: 281 | if not cli_args.patch in run_patches: 282 | raise ValueError( 283 | "'{}' is not an available patchfile.".format(cli_args.patch) 284 | ) 285 | run_patches = [cli_args.patch] 286 | if cli_args.flags is not None: 287 | if not cli_args.patch: 288 | raise RuntimeError( 289 | "Must be used with a single patch. See help (-h) for usage." 290 | ) 291 | if "[-i]" in cli_args.flags: 292 | raise ValueError("Interactive Mode flag not allowed.") 293 | for flag_arg in cli_args.flags: 294 | if not flag_arg == "[--signoff]": 295 | cmd_flags.append(flag_arg.strip("[]")) 296 | if cli_args.use_apply: 297 | if not cli_args.patch: 298 | raise RuntimeError( 299 | "Must be used with a single patch. See help (-h) for usage." 300 | ) 301 | 302 | print(".... Beginning Patch Updates ....") 303 | print(".... Working directory:", working_directory) 304 | print(".... Library directory:", lib_directory) 305 | print(".... Patches directory:", patch_directory) 306 | 307 | check_errors = [] 308 | apply_errors = [] 309 | stats = [0, 0, 0] 310 | 311 | print(".... Deleting any previously cloned libraries") 312 | try: 313 | libs = os.listdir(path=lib_directory) 314 | for lib in libs: 315 | shutil.rmtree(lib_directory + lib) 316 | except FileNotFoundError: 317 | pass 318 | 319 | repos = get_repo_list() 320 | print(".... Running Patch Checks On", len(repos), "Repos ....") 321 | 322 | for repository in repos: 323 | results = check_patches( 324 | repository, 325 | run_patches, 326 | cmd_flags, 327 | cli_args.use_apply, 328 | cli_args.dry_run 329 | ) 330 | for k in range(3): 331 | stats[k] += results[k] 332 | 333 | print(".... Patch Updates Completed ....") 334 | print(".... Patches Applied:", stats[0]) 335 | print(".... Patches Skipped:", stats[1]) 336 | print(".... Patches Failed:", stats[2], "\n") 337 | print(".... Patch Check Failure Report ....") 338 | if len(check_errors) > 0: 339 | for error in check_errors: 340 | print( 341 | ">> Repo: {0}\tPatch: {1}\n Error: {2}".format( 342 | error["repo_name"], error["patch_name"], error["error"] 343 | ) 344 | ) 345 | else: 346 | print("No Failures") 347 | print("\n") 348 | print(".... Patch Apply Failure Report ....") 349 | if len(apply_errors) > 0: 350 | for error in apply_errors: 351 | print( 352 | ">> Repo: {0}\tPatch: {1}\n Error: {2}".format( 353 | error["repo_name"], error["patch_name"], error["error"] 354 | ) 355 | ) 356 | else: 357 | print("No Failures") 358 | -------------------------------------------------------------------------------- /adabot/circuitpython_library_release.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023 Tim Cocks for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | Check if a new release needs to be made, and if so, make it. 7 | """ 8 | import argparse 9 | import subprocess 10 | import logging 11 | from datetime import datetime 12 | import toml 13 | 14 | # Empty RELEASE_TITLE will prompt to ask for a title for each release. 15 | # Set a value here if you want to use the same string for the title of all releases 16 | config = {"RELEASE_TITLE": ""} 17 | 18 | release_date_format = "%Y-%m-%dT%H:%M:%SZ" 19 | commit_date_format = "%a %b %d %H:%M:%S %Y" 20 | 21 | VALID_MENU_CHOICES = ("1", "2", "3", "4", "") 22 | 23 | 24 | def make_release(new_tag, logger, test_run=False): 25 | """ 26 | Make the release 27 | """ 28 | # pylint: disable=line-too-long 29 | 30 | while config["RELEASE_TITLE"] == "": 31 | config["RELEASE_TITLE"] = input("Enter a Release Title: ") 32 | 33 | if not test_run: 34 | make_release_result = subprocess.getoutput( 35 | f"gh release create {new_tag} --generate-notes -t '{new_tag} - {config['RELEASE_TITLE']}'" 36 | ) 37 | 38 | if logger is not None: 39 | logger.info(make_release_result) 40 | else: 41 | print(make_release_result) 42 | else: 43 | print("would run: ") 44 | print( 45 | "gh release create {new_tag} -F release_notes.md -t '{new_tag} - {config['RELEASE_TITLE']}'" 46 | ) 47 | 48 | 49 | def get_pypi_name(): 50 | """ 51 | return the shorthand project name used for pypi, docs, etc. 52 | """ 53 | data = toml.load("pyproject.toml") 54 | 55 | return data["project"]["name"].replace("adafruit-circuitpython-", "") 56 | 57 | 58 | def needs_new_release(logger): 59 | """ 60 | return true if there are commits newer than the latest release 61 | """ 62 | last_commit_time = subprocess.getoutput( 63 | " TZ=UTC0 git log -1 --date=local --format='%cd'" 64 | ) 65 | logger.info(f"last commit: {last_commit_time}") 66 | 67 | last_commit_date_obj = datetime.strptime(last_commit_time, commit_date_format) 68 | 69 | release_info = get_release_info() 70 | 71 | logger.info(f"Latest release is: {release_info['current_tag']}") 72 | logger.info(f"createdAt: {release_info['created_at']}") 73 | 74 | release_date_obj = datetime.strptime( 75 | release_info["created_at"], release_date_format 76 | ) 77 | return release_date_obj < last_commit_date_obj 78 | 79 | 80 | def bump_major(tag_symver): 81 | """ 82 | Returns a string with a new tag created by incrementing 83 | the major version of the given semantic version tag. 84 | """ 85 | tag_parts = tag_symver.split(".") 86 | tag_parts[0] = str(int(tag_parts[0]) + 1) 87 | tag_parts[1] = "0" 88 | tag_parts[2] = "0" 89 | return ".".join(tag_parts) 90 | 91 | 92 | def bump_minor(tag_symver): 93 | """ 94 | Returns a string with a new tag created by incrementing 95 | the minor version of the given semantic version tag. 96 | """ 97 | tag_parts = tag_symver.split(".") 98 | tag_parts[1] = str(int(tag_parts[1]) + 1) 99 | tag_parts[2] = "0" 100 | return ".".join(tag_parts) 101 | 102 | 103 | def bump_patch(tag_symver): 104 | """ 105 | Returns a string with a new tag created by incrementing 106 | the patch version of the given semantic version tag. 107 | """ 108 | tag_parts = tag_symver.split(".") 109 | tag_parts[-1] = str(int(tag_parts[-1]) + 1) 110 | return ".".join(tag_parts) 111 | 112 | 113 | def get_release_info(): 114 | """ 115 | return a dictionary of info about the latest release 116 | """ 117 | result = subprocess.getoutput("gh release list -L 1 | awk 2") 118 | createdAt = result.split("\t")[-1] 119 | tag = result.split("\t")[-2] 120 | return { 121 | "current_tag": tag, 122 | "new_tag_patch": bump_patch(tag), 123 | "new_tag_minor": bump_minor(tag), 124 | "new_tag_major": bump_major(tag), 125 | "created_at": createdAt, 126 | } 127 | 128 | 129 | def get_compare_url(tag_name, compare_to_tag_name="main"): 130 | """ 131 | Get the URL to the GitHub compare page for the latest release compared 132 | to current main. 133 | """ 134 | remote_url = subprocess.getoutput("git ls-remote --get-url origin") 135 | if not remote_url.startswith("https"): 136 | remote_url = subprocess.getoutput("git ls-remote --get-url adafruit") 137 | 138 | if not remote_url.startswith("https"): 139 | return "Sorry, Unknown Remotes" 140 | 141 | compare_url = remote_url.replace( 142 | ".git", f"/compare/{tag_name}...{compare_to_tag_name}" 143 | ) 144 | return compare_url 145 | 146 | 147 | def main_cli(): 148 | """ 149 | Main CLI entry point 150 | """ 151 | logging.basicConfig( 152 | level=logging.INFO, 153 | format="%(asctime)s [%(levelname)s] %(message)s", 154 | handlers=[ 155 | logging.FileHandler("../../../automated_releaser.log"), 156 | logging.StreamHandler(), 157 | ], 158 | ) 159 | 160 | parser = argparse.ArgumentParser( 161 | prog="adabot.circuitpython_library_release", 162 | description="Create GitHub releases for CircuitPython Library projects if they " 163 | "contain commits newer than the most recent release.", 164 | ) 165 | parser.add_argument("-t", "--title") 166 | args = parser.parse_args() 167 | if args.title is not None: 168 | config["RELEASE_TITLE"] = args.title 169 | 170 | def menu_prompt(release_info): 171 | """ 172 | Prompt the user to ask which part of the symantic version should be 173 | incremented, or if the library release should be skipped. 174 | Returns the choice inputted by the user. 175 | """ 176 | print("This library needs a new release. Please select a choice:") 177 | print(f"Changes: {get_compare_url(release_info['current_tag'])}") 178 | print( 179 | f"1. *default* Bump Patch, new tag would be: {release_info['new_tag_patch']}" 180 | ) 181 | print(f"2. Bump Minor, new tag would be: {release_info['new_tag_minor']}") 182 | print(f"3. Bump Major, new tag would be: {release_info['new_tag_major']}") 183 | print("4. Skip releasing this library and go to next in the list") 184 | return input("Choice, enter blank for default: ") 185 | 186 | result = subprocess.getoutput("git checkout main") 187 | 188 | result = subprocess.getoutput("pwd") 189 | logging.info("Checking: %s", "/".join(result.split("/")[-3:])) 190 | 191 | if needs_new_release(logging): 192 | release_info = get_release_info() 193 | choice = menu_prompt(release_info) 194 | while choice not in VALID_MENU_CHOICES: 195 | logging.info("Error: Invalid Selection '%s'", choice) 196 | choice = menu_prompt(release_info) 197 | 198 | if choice in ("1", ""): 199 | logging.info( 200 | "Making a new release with tag: %s", release_info["new_tag_patch"] 201 | ) 202 | make_release(release_info["new_tag_patch"], logging) 203 | elif choice == "2": 204 | logging.info( 205 | "Making a new release with tag: %s", release_info["new_tag_minor"] 206 | ) 207 | make_release(release_info["new_tag_minor"], logging) 208 | elif choice == "3": 209 | logging.info( 210 | "Making a new release with tag: %s", release_info["new_tag_major"] 211 | ) 212 | make_release(release_info["new_tag_major"], logging) 213 | elif choice == "4": 214 | logging.info("Skipping release.") 215 | 216 | else: 217 | logging.info("No new commits since last release, skipping") 218 | 219 | 220 | if __name__ == "__main__": 221 | main_cli() 222 | -------------------------------------------------------------------------------- /adabot/github_requests.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Wrapper for GitHub requests.""" 6 | 7 | import types 8 | from base64 import b64encode 9 | import datetime 10 | import functools 11 | import logging 12 | import os 13 | import time 14 | import traceback 15 | 16 | import requests 17 | import requests_cache 18 | 19 | TIMEOUT = 60 20 | 21 | 22 | def setup_cache(expire_after=7200): 23 | """Sets up a cache for requests.""" 24 | requests_cache.install_cache( 25 | cache_name="github_cache", 26 | backend="sqlite", 27 | expire_after=expire_after, 28 | allowable_codes=(200, 404), 29 | ) 30 | 31 | 32 | def _fix_url(url): 33 | if url.startswith("/"): 34 | url = "https://api.github.com" + url 35 | return url 36 | 37 | 38 | def _fix_kwargs(kwargs): 39 | api_version = ( 40 | "application/vnd.github.scarlet-witch-preview+json;" 41 | "application/vnd.github.hellcat-preview+json" 42 | ) 43 | if "headers" in kwargs: 44 | if "Accept" in kwargs["headers"]: 45 | kwargs["headers"]["Accept"] += ";" + api_version 46 | else: 47 | kwargs["headers"]["Accept"] = api_version 48 | else: 49 | kwargs["headers"] = {"Accept": "application/vnd.github.hellcat-preview+json"} 50 | if "ADABOT_GITHUB_ACCESS_TOKEN" in os.environ and "auth" not in kwargs: 51 | user = os.environ.get("ADABOT_GITHUB_USER", "") 52 | access_token = os.environ["ADABOT_GITHUB_ACCESS_TOKEN"] 53 | basic_encoded = b64encode(str(user + ":" + access_token).encode()).decode() 54 | auth_header = "Basic {}".format(basic_encoded) 55 | 56 | kwargs["headers"]["Authorization"] = auth_header 57 | 58 | return kwargs 59 | 60 | 61 | def _safe_response_json(self): 62 | """ 63 | overridden response.json() function that will catch JSONDecodeError 64 | log it but try to continue on afterward. 65 | """ 66 | try: 67 | return self.original_json() 68 | except requests.exceptions.JSONDecodeError: 69 | exception_text = traceback.format_exc() 70 | if "ADABOT_GITHUB_ACCESS_TOKEN" in os.environ: 71 | exception_text = exception_text.replace( 72 | os.environ["ADABOT_GITHUB_ACCESS_TOKEN"], "[secure]" 73 | ) 74 | logging.warning("%s", exception_text) 75 | return {} 76 | 77 | 78 | def request(method, url, **kwargs): 79 | """Processes request for `url`.""" 80 | try: 81 | response = getattr(requests, method)( 82 | _fix_url(url), timeout=TIMEOUT, **_fix_kwargs(kwargs) 83 | ) 84 | from_cache = getattr(response, "from_cache", False) 85 | # If rate limit remaining is missing, then assume we're fine. Use a million to signify this 86 | # case. GitHub will be in the single thousands. 87 | remaining = int(response.headers.get("X-RateLimit-Remaining", 1000000)) 88 | logging.debug( 89 | "GET %s %s status=%s", 90 | url, 91 | f"{'(cache)' if from_cache else '(%d remaining)' % remaining}", 92 | response.status_code, 93 | ) 94 | except requests.RequestException: 95 | exception_text = traceback.format_exc() 96 | if "ADABOT_GITHUB_ACCESS_TOKEN" in os.environ: 97 | exception_text = exception_text.replace( 98 | os.environ["ADABOT_GITHUB_ACCESS_TOKEN"], "[secure]" 99 | ) 100 | logging.critical("%s", exception_text) 101 | raise RuntimeError( 102 | "See log for error text that has been sanitized for secrets" 103 | ) from None 104 | 105 | if not from_cache: 106 | if remaining % 100 == 0 or remaining < 20: 107 | logging.info("%d requests remaining this hour", remaining) 108 | if not from_cache and remaining <= 1: 109 | rate_limit_reset = datetime.datetime.fromtimestamp( 110 | int(response.headers["X-RateLimit-Reset"]) 111 | ) 112 | logging.warning( 113 | "GitHub API Rate Limit reached. Pausing until Rate Limit reset." 114 | ) 115 | # This datetime.now() is correct, *because* `fromtimestamp` above 116 | # converts the timestamp into local time, same as now(). This is 117 | # different than the sites that use GH_INTERFACE.get_rate_limit, in 118 | # which the rate limit is a UTC time, so it has to be compared to 119 | # utcnow. 120 | while datetime.datetime.now() < rate_limit_reset: 121 | logging.warning("Rate Limit will reset at: %s", rate_limit_reset) 122 | reset_diff = rate_limit_reset - datetime.datetime.now() 123 | 124 | # wait a full extra 60 seconds to avoid time collision 125 | logging.info("Sleeping %s seconds", reset_diff.seconds + 60) 126 | time.sleep(reset_diff.seconds + 60) 127 | 128 | response.original_json = response.json 129 | response.json = types.MethodType(_safe_response_json, response) 130 | return response 131 | 132 | 133 | get = functools.partial(request, "get") 134 | post = functools.partial(request, "post") 135 | put = functools.partial(request, "put") 136 | delete = functools.partial(request, "delete") 137 | patch = functools.partial(request, "patch") 138 | -------------------------------------------------------------------------------- /adabot/lib/assign_hacktober_label.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """An utility to automatically apply the 'hacktoberfest' label to open issues 6 | marked as 'good first issue', during DigitalOcean's/GitHub's Hacktoberfest 7 | event. 8 | """ 9 | 10 | import argparse 11 | import datetime 12 | import requests 13 | 14 | from adabot import github_requests as gh_reqs, REQUESTS_TIMEOUT 15 | from adabot.lib import common_funcs 16 | 17 | cli_args = argparse.ArgumentParser(description="Hacktoberfest Label Assigner") 18 | cli_args.add_argument( 19 | "-r", 20 | "--remove-label", 21 | action="store_true", 22 | help="Option to remove Hacktoberfest labels, instead of adding them.", 23 | dest="remove_labels", 24 | ) 25 | cli_args.add_argument( 26 | "--dry-run", 27 | action="store_true", 28 | help="Option to remove Hacktoberfest labels, instead of adding them.", 29 | dest="dry_run", 30 | ) 31 | 32 | 33 | # Hacktoberfest Season 34 | # - lists are in [start, stop] format. 35 | # - tuples are in (month, day) format. 36 | _ADD_SEASON = [(9, 29), (10, 30)] 37 | _REMOVE_SEASON = [(11, 1), (11, 10)] 38 | 39 | 40 | def is_hacktober_season(): 41 | """Checks if the current day falls within either the add range (_ADD_SEASON) 42 | or the remove range (_REMOVE_SEASON). Returns boolean if within 43 | Hacktoberfest season, and which action to take. 44 | """ 45 | today = datetime.date.today() 46 | add_range = [datetime.date(today.year, *month_day) for month_day in _ADD_SEASON] 47 | remove_range = [ 48 | datetime.date(today.year, *month_day) for month_day in _REMOVE_SEASON 49 | ] 50 | if add_range[0] <= today <= add_range[1]: 51 | return True, "add" 52 | if remove_range[0] <= today <= remove_range[1]: 53 | return True, "remove" 54 | 55 | return False, None 56 | 57 | 58 | def get_open_issues(repo): 59 | """Retrieve all open issues for given repo.""" 60 | 61 | params = { 62 | "state": "open", 63 | } 64 | response = gh_reqs.get("/repos/" + repo["full_name"] + "/issues", params=params) 65 | if not response.ok: 66 | print(f"Failed to retrieve issues for '{repo['name']}'") 67 | return False 68 | 69 | issues = [] 70 | while response.ok: 71 | issues.extend( 72 | [issue for issue in response.json() if "pull_request" not in issue] 73 | ) 74 | 75 | if response.links.get("next"): 76 | response = requests.get( 77 | response.links["next"]["url"], timeout=REQUESTS_TIMEOUT 78 | ) 79 | else: 80 | break 81 | 82 | return issues 83 | 84 | 85 | def ensure_hacktober_label_exists(repo, dry_run=False): 86 | """Checks if the 'Hacktoberfest' label exists on the repo. 87 | If not, creates the label. 88 | """ 89 | response = gh_reqs.get(f"/repos/{repo['full_name']}/labels") 90 | if not response.ok: 91 | print(f"Failed to retrieve labels for '{repo['name']}'") 92 | return False 93 | 94 | repo_labels = [label["name"] for label in response.json()] 95 | 96 | hacktober_exists = {"Hacktoberfest", "hacktoberfest"} & set(repo_labels) 97 | if not hacktober_exists: 98 | params = { 99 | "name": "Hacktoberfest", 100 | "color": "f2b36f", 101 | "description": "DigitalOcean's Hacktoberfest", 102 | } 103 | if not dry_run: 104 | result = gh_reqs.post(f"/repos/{repo['full_name']}/labels", json=params) 105 | if not result.status_code == 201: 106 | print(f"Failed to create new Hacktoberfest label for: {repo['name']}") 107 | return False 108 | 109 | return True 110 | 111 | 112 | def assign_hacktoberfest(repo, issues=None, remove_labels=False, dry_run=False): 113 | """Gathers open issues on a repo, and assigns the 'Hacktoberfest' label 114 | to each issue if its not already assigned. 115 | """ 116 | labels_changed = 0 117 | 118 | if not issues: 119 | issues = get_open_issues(repo) 120 | 121 | for issue in issues: 122 | update_issue = False 123 | label_names = [label["name"] for label in issue["labels"]] 124 | has_good_first = "good first issue" in label_names 125 | has_hacktober = {"Hacktoberfest", "hacktoberfest"} & set(label_names) 126 | 127 | if remove_labels: 128 | if has_hacktober: 129 | label_names = [ 130 | label for label in label_names if label not in has_hacktober 131 | ] 132 | update_issue = True 133 | else: 134 | if has_good_first and not has_hacktober: 135 | label_exists = ensure_hacktober_label_exists(repo, dry_run) 136 | if not label_exists: 137 | continue 138 | update_issue = True 139 | 140 | if update_issue: 141 | label_names.append("Hacktoberfest") 142 | params = {"labels": label_names} 143 | if not dry_run: 144 | result = gh_reqs.patch( 145 | f"/repos/{repo['full_name']}/issues/{str(issue['number'])}", 146 | json=params, 147 | ) 148 | 149 | if result.ok: 150 | labels_changed += 1 151 | else: 152 | # sadly, GitHub will only silently ignore labels that are 153 | # not added and return a 200. so this will most likely only 154 | # trigger on endpoint/connection failures. 155 | print(f"Failed to add Hacktoberfest label to: {issue['url']}") 156 | else: 157 | labels_changed += 1 158 | 159 | return labels_changed 160 | 161 | 162 | def process_hacktoberfest(repo, issues=None, remove_labels=False, dry_run=False): 163 | """Run hacktoberfest functions and return the result.""" 164 | result = assign_hacktoberfest(repo, issues, remove_labels, dry_run) 165 | return result 166 | 167 | 168 | if __name__ == "__main__": 169 | LABELS_ASSIGNED = 0 170 | args = cli_args.parse_args() 171 | 172 | if not args.remove_labels: 173 | print("Checking for open issues to assign the Hacktoberfest label to...") 174 | else: 175 | print("Checking for open issues to remove the Hacktoberfest label from...") 176 | 177 | repos = common_funcs.list_repos() 178 | for repository in repos: 179 | LABELS_ASSIGNED += process_hacktoberfest( 180 | repository, remove_labels=args.remove_labels, dry_run=args.dry_run 181 | ) 182 | 183 | if not args.remove_labels: 184 | print(f"Added the Hacktoberfest label to {LABELS_ASSIGNED} issues.") 185 | else: 186 | print(f"Removed the Hacktoberfest label from {LABELS_ASSIGNED} issues.") 187 | -------------------------------------------------------------------------------- /adabot/lib/blinka_funcs.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Common functions used with Adabot & Blinka interactions.""" 6 | 7 | from adabot import github_requests as gh_reqs 8 | 9 | 10 | def board_count(): 11 | """Retrieve the number of boards currently supported by Adafruit_Blinka, 12 | via the count of files in circuitpython-org/_blinka. 13 | """ 14 | count = 0 15 | cirpy_org_url = "/repos/adafruit/circuitpython-org/contents/_blinka" 16 | response = gh_reqs.get(cirpy_org_url) 17 | if response.ok: 18 | response_json = response.json() 19 | count = len(response_json) 20 | 21 | return count 22 | -------------------------------------------------------------------------------- /adabot/lib/bundle_announcer.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney, for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | Checks for the latest releases in the Community bundle based 7 | on the automated release. 8 | 9 | * Author(s): Alec Delaney 10 | """ 11 | 12 | import datetime 13 | import logging 14 | import os 15 | import time 16 | from typing import Tuple, Set 17 | from typing_extensions import TypeAlias 18 | 19 | import github as pygithub 20 | import parse 21 | 22 | from adabot.lib.common_funcs import MAXIMUM_RATE_LIMIT_DELAY 23 | 24 | GH_INTERFACE = pygithub.Github(os.environ.get("ADABOT_GITHUB_ACCESS_TOKEN")) 25 | 26 | RepoResult: TypeAlias = Tuple[str, str] 27 | """(Submodule Name, Full Repo Name)""" 28 | 29 | 30 | def get_community_bundle_updates() -> Tuple[Set[RepoResult], Set[RepoResult]]: 31 | """Get new and updated libraries in the Community Bundle""" 32 | return get_bundle_updates("adafruit/CircuitPython_Community_Bundle") 33 | 34 | 35 | def get_adafruit_bundle_updates() -> Tuple[Set[RepoResult], Set[RepoResult]]: 36 | """Get new and updated libraries in the Adafruit Bundle""" 37 | return get_bundle_updates("adafruit/Adafruit_CircuitPython_Bundle") 38 | 39 | 40 | # pylint: disable=too-many-locals 41 | def get_bundle_updates(full_repo_name: str) -> Tuple[Set[RepoResult], Set[RepoResult]]: 42 | """ 43 | Get the updates to the Community Bundle. 44 | 45 | Returns new and updated libraries 46 | """ 47 | while True: 48 | try: 49 | repository = GH_INTERFACE.get_repo(full_repo_name) 50 | seven_days_ago = datetime.datetime.now() - datetime.timedelta(days=7) 51 | recent_releases = [ 52 | release 53 | for release in repository.get_releases() 54 | if release.created_at > seven_days_ago 55 | ] 56 | new_libs = set() 57 | updated_libs = set() 58 | for recent_release in recent_releases: 59 | relevant_lines = [ 60 | line 61 | for line in recent_release.body.split("\n") 62 | if line.startswith("Updated libraries") 63 | or line.startswith("New libraries:") 64 | ] 65 | for relevant_line in relevant_lines: 66 | lib_components = [ 67 | x.strip(",") for x in relevant_line.split(" ")[2:] 68 | ] 69 | for lib in lib_components: 70 | comps = parse.parse("[{name:S}]({link_comp:S})", lib.strip()) 71 | link: str = parse.search( 72 | "{link:S}/releases", comps["link_comp"] 73 | )["link"] 74 | full_name = parse.search( 75 | "https://github.com/{full_name:S}", link 76 | )["full_name"] 77 | if relevant_line.startswith("Updated libraries"): 78 | updated_libs.add((full_name, link)) 79 | else: 80 | new_libs.add((full_name, link)) 81 | return (new_libs, updated_libs) 82 | 83 | except pygithub.RateLimitExceededException: 84 | core_rate_limit_reset = GH_INTERFACE.get_rate_limit().core.reset 85 | sleep_time = core_rate_limit_reset - datetime.datetime.utcnow() 86 | logging.warning("Rate Limit will reset at: %s", core_rate_limit_reset) 87 | time.sleep(min(sleep_time.seconds, MAXIMUM_RATE_LIMIT_DELAY)) 88 | continue 89 | except pygithub.GithubException: 90 | # Secrets may not be available or error occurred - just skip 91 | return (set(), set()) 92 | 93 | 94 | if __name__ == "__main__": 95 | adafruit_results = get_adafruit_bundle_updates() 96 | community_results = get_community_bundle_updates() 97 | for new_adafruit_lib in adafruit_results[0]: 98 | print(f"New libraries: {new_adafruit_lib[0]} { {new_adafruit_lib[1]} }") 99 | for updated_adafruit_lib in adafruit_results[1]: 100 | print( 101 | f"Updated libraries: {updated_adafruit_lib[0]} { {updated_adafruit_lib[1]} }" 102 | ) 103 | print("-----") 104 | for new_community_lib in community_results[0]: 105 | print(f"New libraries: {new_community_lib[0]} { {new_community_lib[1]} }") 106 | for updated_community_lib in community_results[1]: 107 | print( 108 | f"Updated libraries: {updated_community_lib[0]} { {updated_community_lib[1]} }" 109 | ) 110 | -------------------------------------------------------------------------------- /adabot/pypi_requests.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ Helper for requests to pypi.org 6 | 7 | * Author(s): Michael McWethy 8 | """ 9 | 10 | import requests 11 | 12 | from adabot import REQUESTS_TIMEOUT 13 | 14 | 15 | def _fix_url(url): 16 | if url.startswith("/"): 17 | url = "https://pypi.org" + url 18 | return url 19 | 20 | 21 | def get(url, **kwargs): 22 | """Process a GET request from pypi.org""" 23 | return requests.get(_fix_url(url), timeout=REQUESTS_TIMEOUT, **kwargs) 24 | -------------------------------------------------------------------------------- /adabot/update_cp_org_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2019 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Adabot utility for updating circuitpython.org libraries info.""" 6 | 7 | # pylint: disable=redefined-outer-name 8 | 9 | import argparse 10 | import datetime 11 | import inspect 12 | import json 13 | import logging 14 | import re 15 | import sys 16 | 17 | from adabot.lib import common_funcs 18 | from adabot.lib import circuitpython_library_validators as cpy_vals 19 | from adabot import github_requests as gh_reqs 20 | from adabot import pypi_requests as pypi 21 | 22 | logger = logging.getLogger(__name__) 23 | ch = logging.StreamHandler(stream=sys.stdout) 24 | logging.basicConfig(level=logging.DEBUG, format="%(message)s", handlers=[ch]) 25 | 26 | 27 | DO_NOT_VALIDATE = [ 28 | "CircuitPython_Community_Bundle", 29 | "cookiecutter-adafruit-circuitpython", 30 | ] 31 | 32 | # Setup ArgumentParser 33 | cmd_line_parser = argparse.ArgumentParser( 34 | description="Adabot utility for updating circuitpython.org libraries info.", 35 | prog="Adabot circuitpython.org/libraries Updater", 36 | ) 37 | cmd_line_parser.add_argument( 38 | "-o", 39 | "--output_file", 40 | help="Output JSON file to the filename provided.", 41 | metavar="", 42 | dest="output_file", 43 | ) 44 | cmd_line_parser.add_argument( 45 | "--cache-http", 46 | help="Cache HTTP requests using requests_cache", 47 | action="store_true", 48 | default=False, 49 | ) 50 | cmd_line_parser.add_argument( 51 | "--cache-ttl", help="HTTP cache TTL", type=int, default=7200 52 | ) 53 | cmd_line_parser.add_argument( 54 | "--keep-repos", help="Keep repos between runs", action="store_true", default=False 55 | ) 56 | cmd_line_parser.add_argument( 57 | "--loglevel", help="Adjust the log level (default INFO)", type=str, default="INFO" 58 | ) 59 | 60 | sort_re = re.compile(r"(?<=\(Open\s)(.+)(?=\sdays)") 61 | 62 | 63 | def get_open_issues_and_prs(repo): 64 | """Retreive all of the open issues (minus pull requests) for the repo.""" 65 | open_issues = [] 66 | open_pull_requests = [] 67 | params = {"state": "open"} 68 | result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/issues", params=params) 69 | if not result.ok: 70 | return [], [] 71 | 72 | issues = result.json() 73 | for issue in issues: 74 | created = datetime.datetime.strptime(issue["created_at"], "%Y-%m-%dT%H:%M:%SZ") 75 | days_open = datetime.datetime.today() - created 76 | if days_open.days < 0: # opened earlier today 77 | days_open += datetime.timedelta(days=(days_open.days * -1)) 78 | 79 | issue_title = "{0} (Open {1} days)".format(issue["title"], days_open.days) 80 | if "pull_request" not in issue: # ignore pull requests 81 | issue_labels = ["None"] 82 | if len(issue["labels"]) != 0: 83 | issue_labels = [label["name"] for label in issue["labels"]] 84 | 85 | issue_dict = { 86 | "title": issue_title, 87 | "url": issue["html_url"], 88 | "labels": issue_labels, 89 | } 90 | 91 | open_issues.append(issue_dict) 92 | else: 93 | open_pull_requests.append({issue["html_url"]: issue_title}) 94 | 95 | return open_issues, open_pull_requests 96 | 97 | 98 | def get_contributors(repo): 99 | """Gather contributor information.""" 100 | contributors = [] 101 | reviewers = [] 102 | merged_pr_count = 0 103 | params = {"state": "closed", "sort": "updated", "direction": "desc"} 104 | result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/pulls", params=params) 105 | if result.ok: 106 | today_minus_seven = datetime.datetime.today() - datetime.timedelta(days=7) 107 | pull_requests = result.json() 108 | for pull_request in pull_requests: 109 | merged_at = datetime.datetime.min 110 | if "merged_at" in pull_request: 111 | if pull_request["merged_at"] is None: 112 | continue 113 | merged_at = datetime.datetime.strptime( 114 | pull_request["merged_at"], "%Y-%m-%dT%H:%M:%SZ" 115 | ) 116 | else: 117 | continue 118 | if merged_at < today_minus_seven: 119 | continue 120 | contributors.append(pull_request["user"]["login"]) 121 | merged_pr_count += 1 122 | 123 | # get reviewers (merged_by, and any others) 124 | single_pr = gh_reqs.get(pull_request["url"]) 125 | if not single_pr.ok: 126 | continue 127 | pr_info = single_pr.json() 128 | reviewers.append(pr_info["merged_by"]["login"]) 129 | pr_reviews = gh_reqs.get(str(pr_info["url"]) + "/reviews") 130 | if not pr_reviews.ok: 131 | continue 132 | for review in pr_reviews.json(): 133 | if review["state"].lower() == "approved": 134 | reviewers.append(review["user"]["login"]) 135 | 136 | return contributors, reviewers, merged_pr_count 137 | 138 | 139 | # pylint: disable=too-many-locals,too-many-branches,too-many-statements 140 | def main( 141 | loglevel="ERROR", 142 | keep_repos=False, 143 | cache_http=False, 144 | cache_ttl=7200, 145 | output_file=None, 146 | ): 147 | """Main""" 148 | logger.setLevel(loglevel) 149 | logger.info("Running circuitpython.org/libraries updater...") 150 | 151 | run_time = datetime.datetime.now() 152 | 153 | logger.info("Run Date: %s", run_time.strftime("%d %B %Y, %I:%M%p")) 154 | 155 | if output_file: 156 | logger.info(" - Report output will be saved to: %s", output_file) 157 | file_handler = logging.FileHandler(output_file) 158 | logger.addHandler(file_handler) 159 | 160 | if cache_http: 161 | cpy_vals.gh_reqs.setup_cache(cache_ttl) 162 | 163 | repos = common_funcs.list_repos( 164 | include_repos=( 165 | "CircuitPython_Community_Bundle", 166 | "cookiecutter-adafruit-circuitpython", 167 | ) 168 | ) 169 | 170 | new_libs = {} 171 | updated_libs = {} 172 | open_issues_by_repo = {} 173 | open_prs_by_repo = {} 174 | contributors = set() 175 | reviewers = set() 176 | merged_pr_count_total = 0 177 | repos_by_error = {} 178 | 179 | default_validators = [ 180 | vals[1] 181 | for vals in inspect.getmembers(cpy_vals.LibraryValidator) 182 | if vals[0].startswith("validate") 183 | ] 184 | bundle_submodules = common_funcs.get_bundle_submodules() 185 | 186 | latest_pylint = "" 187 | pylint_info = pypi.get("/pypi/pylint/json") 188 | if pylint_info and pylint_info.ok: 189 | latest_pylint = pylint_info.json()["info"]["version"] 190 | 191 | validator = cpy_vals.LibraryValidator( 192 | default_validators, 193 | bundle_submodules, 194 | latest_pylint, 195 | keep_repos=keep_repos, 196 | ) 197 | 198 | for repo in repos: 199 | if ( 200 | repo["name"] in cpy_vals.BUNDLE_IGNORE_LIST 201 | or repo["name"] == "circuitpython" 202 | ): 203 | continue 204 | repo_name = repo["name"] 205 | 206 | # get a list of new & updated libraries for the last week 207 | check_releases = common_funcs.is_new_or_updated(repo) 208 | if check_releases == "new": 209 | new_libs[repo_name] = repo["html_url"] 210 | elif check_releases == "updated": 211 | updated_libs[repo_name] = repo["html_url"] 212 | 213 | # get a list of open issues and pull requests 214 | check_issues, check_prs = get_open_issues_and_prs(repo) 215 | if check_issues: 216 | open_issues_by_repo[repo_name] = check_issues 217 | if check_prs: 218 | open_prs_by_repo[repo_name] = check_prs 219 | 220 | # get the contributors and reviewers for the last week 221 | get_contribs, get_revs, get_merge_count = get_contributors(repo) 222 | if get_contribs: 223 | contributors.update(get_contribs) 224 | if get_revs: 225 | reviewers.update(get_revs) 226 | merged_pr_count_total += get_merge_count 227 | 228 | if repo_name in DO_NOT_VALIDATE: 229 | continue 230 | 231 | # run repo validators to check for infrastructure errors 232 | errors = [] 233 | try: 234 | errors = validator.run_repo_validation(repo) 235 | except Exception as err: # pylint: disable=broad-except 236 | logging.exception("Unhandled exception %s", str(err)) 237 | errors.extend([cpy_vals.ERROR_OUTPUT_HANDLER]) 238 | for error in errors: 239 | if not isinstance(error, tuple): 240 | # check for an error occurring in the validator module 241 | if error == cpy_vals.ERROR_OUTPUT_HANDLER: 242 | # print(errors, "repo output handler error:", validator.output_file_data) 243 | logging.error(", ".join(validator.output_file_data)) 244 | validator.output_file_data.clear() 245 | if error not in repos_by_error: 246 | repos_by_error[error] = [] 247 | repos_by_error[error].append(repo["html_url"]) 248 | else: 249 | if error[0] not in repos_by_error: 250 | repos_by_error[error[0]] = [] 251 | repos_by_error[error[0]].append(f"{repo['html_url']} ({error[1]} days)") 252 | 253 | # assemble the JSON data 254 | build_json = { 255 | "updated_at": run_time.strftime("%Y-%m-%dT%H:%M:%SZ"), 256 | "contributors": sorted(contributors, key=str.lower), 257 | "reviewers": sorted(reviewers, key=str.lower), 258 | "merged_pr_count": str(merged_pr_count_total), 259 | "library_updates": { 260 | "new": {key: new_libs[key] for key in sorted(new_libs, key=str.lower)}, 261 | "updated": { 262 | key: updated_libs[key] for key in sorted(updated_libs, key=str.lower) 263 | }, 264 | }, 265 | "open_issues": { 266 | key: open_issues_by_repo[key] 267 | for key in sorted(open_issues_by_repo, key=str.lower) 268 | }, 269 | "pull_requests": { 270 | key: open_prs_by_repo[key] 271 | for key in sorted(open_prs_by_repo, key=str.lower) 272 | }, 273 | "repo_infrastructure_errors": { 274 | key: repos_by_error[key] for key in sorted(repos_by_error, key=str.lower) 275 | }, 276 | } 277 | 278 | logger.info("%s", json.dumps(build_json, indent=2)) 279 | 280 | 281 | if __name__ == "__main__": 282 | cmd_line_args = cmd_line_parser.parse_args() 283 | main( 284 | loglevel=cmd_line_args.loglevel, 285 | keep_repos=cmd_line_args.keep_repos, 286 | cache_http=cmd_line_args.cache_http, 287 | cache_ttl=cmd_line_args.cache_ttl, 288 | output_file=cmd_line_args.output_file, 289 | ) 290 | -------------------------------------------------------------------------------- /patches/0001-Added-help-text-and-problem-matcher.patch: -------------------------------------------------------------------------------- 1 | From 3860976e511f1407840c914a8c550ac154716cf9 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Wed, 19 May 2021 13:35:18 -0400 4 | Subject: [PATCH] Added help text and problem matcher 5 | 6 | --- 7 | .github/workflows/build.yml | 2 ++ 8 | .github/workflows/failure-help-text.yml | 19 +++++++++++++++++++ 9 | 2 files changed, 21 insertions(+) 10 | create mode 100644 .github/workflows/failure-help-text.yml 11 | 12 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 13 | index 3baf502..ce66461 100644 14 | --- a/.github/workflows/build.yml 15 | +++ b/.github/workflows/build.yml 16 | @@ -49,0 +50,2 @@ jobs: 17 | + - name: Setup problem matchers 18 | + uses: adafruit/circuitpython-action-library-ci-problem-matchers@v1 19 | diff --git a/.github/workflows/failure-help-text.yml b/.github/workflows/failure-help-text.yml 20 | new file mode 100644 21 | index 0000000..0b1194f 22 | --- /dev/null 23 | +++ b/.github/workflows/failure-help-text.yml 24 | @@ -0,0 +1,19 @@ 25 | +# SPDX-FileCopyrightText: 2021 Scott Shawcroft for Adafruit Industries 26 | +# 27 | +# SPDX-License-Identifier: MIT 28 | + 29 | +name: Failure help text 30 | + 31 | +on: 32 | + workflow_run: 33 | + workflows: ["Build CI"] 34 | + types: 35 | + - completed 36 | + 37 | +jobs: 38 | + post-help: 39 | + runs-on: ubuntu-latest 40 | + if: ${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.event == 'pull_request' }} 41 | + steps: 42 | + - name: Post comment to help 43 | + uses: adafruit/circuitpython-action-library-ci-failed@v1 44 | -- 45 | 2.25.1 46 | 47 | -------------------------------------------------------------------------------- /patches/0001-Added-pre-commit-and-SPDX-copyright.patch: -------------------------------------------------------------------------------- 1 | From 13bca4df73f59b91f7d043d4a18a05718cc0e458 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Mon, 11 Jan 2021 15:06:44 -0500 4 | Subject: [PATCH] Added pre-commit and SPDX copyright 5 | 6 | --- 7 | .github/workflows/build.yml | 28 ++++++++++++++++++++++++---- 8 | .github/workflows/release.yml | 4 ++++ 9 | 2 files changed, 28 insertions(+), 4 deletions(-) 10 | 11 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 12 | index b6977a9..59baa53 100644 13 | --- a/.github/workflows/build.yml 14 | +++ b/.github/workflows/build.yml 15 | @@ -1,3 +1,7 @@ 16 | +# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries 17 | +# 18 | +# SPDX-License-Identifier: MIT 19 | + 20 | name: Build CI 21 | 22 | on: [pull_request, push] 23 | @@ -38,20 +42,36 @@ jobs: 24 | # (e.g. - apt-get: gettext, etc; pip: circuitpython-build-tools, requirements.txt; etc.) 25 | run: | 26 | source actions-ci/install.sh 27 | - - name: Pip install pylint, black, & Sphinx 28 | + - name: Pip install pylint, Sphinx, pre-commit 29 | run: | 30 | - pip install --force-reinstall pylint black==19.10b0 Sphinx sphinx-rtd-theme 31 | + pip install --force-reinstall pylint Sphinx sphinx-rtd-theme pre-commit 32 | - name: Library version 33 | run: git describe --dirty --always --tags 34 | - - name: Check formatting 35 | + - name: Pre-commit hooks 36 | run: | 37 | - black --check --target-version=py35 . 38 | + pre-commit run --all-files 39 | - name: PyLint 40 | run: | 41 | pylint $( find . -path './adafruit*.py' ) 42 | ([[ ! -d "examples" ]] || pylint --disable=missing-docstring,invalid-name,bad-whitespace $( find . -path "./examples/*.py" )) 43 | - name: Build assets 44 | run: circuitpython-build-bundles --filename_prefix ${{ steps.repo-name.outputs.repo-name }} --library_location . 45 | + - name: Archive bundles 46 | + uses: actions/upload-artifact@v2 47 | + with: 48 | + name: bundles 49 | + path: ${{ github.workspace }}/bundles/ 50 | - name: Build docs 51 | working-directory: docs 52 | run: sphinx-build -E -W -b html . _build/html 53 | + - name: Check For setup.py 54 | + id: need-pypi 55 | + run: | 56 | + echo ::set-output name=setup-py::$( find . -wholename './setup.py' ) 57 | + - name: Build Python package 58 | + if: contains(steps.need-pypi.outputs.setup-py, 'setup.py') 59 | + run: | 60 | + pip install --upgrade setuptools wheel twine readme_renderer testresources 61 | + python setup.py sdist 62 | + python setup.py bdist_wheel --universal 63 | + twine check dist/* 64 | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml 65 | index 18efb9c..6d0015a 100644 66 | --- a/.github/workflows/release.yml 67 | +++ b/.github/workflows/release.yml 68 | @@ -1,3 +1,7 @@ 69 | +# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries 70 | +# 71 | +# SPDX-License-Identifier: MIT 72 | + 73 | name: Release Actions 74 | 75 | on: 76 | -- 77 | 2.25.1 78 | 79 | -------------------------------------------------------------------------------- /patches/0001-Added-pre-commit-config-file.patch: -------------------------------------------------------------------------------- 1 | From f61829b736156f06e88125a150428bd11aa146df Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Mon, 11 Jan 2021 16:06:47 -0500 4 | Subject: [PATCH] Added pre-commit-config file 5 | 6 | --- 7 | .pre-commit-config.yaml | 19 +++++++++++++++++++ 8 | 1 file changed, 19 insertions(+) 9 | create mode 100644 .pre-commit-config.yaml 10 | 11 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 12 | new file mode 100644 13 | index 0000000..aab5f1c 14 | --- /dev/null 15 | +++ b/.pre-commit-config.yaml 16 | @@ -0,0 +1,19 @@ 17 | +# SPDX-FileCopyrightText: 2020 Diego Elio Pettenò 18 | +# 19 | +# SPDX-License-Identifier: Unlicense 20 | + 21 | +repos: 22 | +- repo: https://github.com/python/black 23 | + rev: stable 24 | + hooks: 25 | + - id: black 26 | +- repo: https://github.com/fsfe/reuse-tool 27 | + rev: latest 28 | + hooks: 29 | + - id: reuse 30 | +- repo: https://github.com/pre-commit/pre-commit-hooks 31 | + rev: v2.3.0 32 | + hooks: 33 | + - id: check-yaml 34 | + - id: end-of-file-fixer 35 | + - id: trailing-whitespace 36 | -- 37 | 2.25.1 38 | 39 | -------------------------------------------------------------------------------- /patches/0001-Added-pull-request-template.patch: -------------------------------------------------------------------------------- 1 | From 763698dd1677b2ef3e2d269767368589bf416c06 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Wed, 19 May 2021 13:32:42 -0400 4 | Subject: [PATCH] Added pull request template 5 | 6 | --- 7 | .../adafruit_circuitpython_pr.md | 13 +++++++++++++ 8 | 1 file changed, 13 insertions(+) 9 | create mode 100644 .github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 10 | 11 | diff --git a/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 12 | new file mode 100644 13 | index 0000000..71ef8f8 14 | --- /dev/null 15 | +++ b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 16 | @@ -0,0 +1,13 @@ 17 | +# SPDX-FileCopyrightText: 2021 Adafruit Industries 18 | +# 19 | +# SPDX-License-Identifier: MIT 20 | + 21 | +Thank you for contributing! Before you submit a pull request, please read the following. 22 | + 23 | +Make sure any changes you're submitting are in line with the CircuitPython Design Guide, available here: https://circuitpython.readthedocs.io/en/latest/docs/design_guide.html 24 | + 25 | +If your changes are to documentation, please verify that the documentation builds locally by following the steps found here: https://adafru.it/build-docs 26 | + 27 | +Before submitting the pull request, make sure you've run Pylint and Black locally on your code. You can do this manually or using pre-commit. Instructions are available here: https://adafru.it/check-your-code 28 | + 29 | +Please remove all of this text before submitting. Include an explanation or list of changes included in your PR, as well as, if applicable, a link to any related issues. 30 | -- 31 | 2.25.1 32 | 33 | -------------------------------------------------------------------------------- /patches/0001-Added-pylint-disable-for-f-strings-in-tests-director.patch: -------------------------------------------------------------------------------- 1 | From e3fe3ad633702ac996e32516b9a4d099e0ca3c3b Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Tue, 28 Sep 2021 14:56:20 -0400 4 | Subject: [PATCH] Added pylint disable for f-strings in tests directory 5 | 6 | --- 7 | .pre-commit-config.yaml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index 8690c22..43636ac 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -38,5 +38,5 @@ repos: 15 | name: pylint (tests code) 16 | description: Run pylint rules on "tests/*.py" files 17 | entry: /usr/bin/env bash -c 18 | - args: ['([[ ! -d "tests" ]] || for test in $(find . -path "./tests/*.py"); do pylint --disable=missing-docstring $test; done)'] 19 | + args: ['([[ ! -d "tests" ]] || for test in $(find . -path "./tests/*.py"); do pylint --disable=missing-docstring,consider-using-f-string $test; done)'] 20 | language: system 21 | -- 22 | 2.25.1 23 | 24 | -------------------------------------------------------------------------------- /patches/0001-Disabled-unspecified-encoding-pylint-check.patch: -------------------------------------------------------------------------------- 1 | From 70552ea19bdb5a06550d429c8e28b54770ce7548 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Fri, 5 Nov 2021 14:49:30 -0400 4 | Subject: [PATCH] Disabled unspecified-encoding pylint check 5 | 6 | --- 7 | .pylintrc | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pylintrc b/.pylintrc 11 | index e78bad2..cfd1c41 100644 12 | --- a/.pylintrc 13 | +++ b/.pylintrc 14 | @@ -55,7 +55,7 @@ confidence= 15 | # no Warning level messages displayed, use"--disable=all --enable=classes 16 | # --disable=W" 17 | # disable=import-error,print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call 18 | -disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,import-error,bad-continuation 19 | +disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,import-error,bad-continuation,unspecified-encoding 20 | 21 | # Enable the message, report, category or checker with the given id(s). You can 22 | # either give multiple identifier separated by comma (,) or put this option 23 | -- 24 | 2.25.1 25 | 26 | -------------------------------------------------------------------------------- /patches/0001-First-gitignore-patch.patch: -------------------------------------------------------------------------------- 1 | From 2a8657e9851b5916cf1733d05b9c47f818570832 Mon Sep 17 00:00:00 2001 2 | From: evaherrada 3 | Date: Thu, 21 Apr 2022 15:00:27 -0400 4 | Subject: [PATCH] Updated gitignore 5 | 6 | --- 7 | .gitignore | 48 ++++++++++++++++++++++++++++++++++++++++-------- 8 | 1 file changed, 40 insertions(+), 8 deletions(-) 9 | 10 | diff --git a/.gitignore b/.gitignore 11 | index 9647e71..544ec4a 100644 12 | --- a/.gitignore 13 | +++ b/.gitignore 14 | @@ -1,15 +1,47 @@ 15 | -# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries 16 | +# SPDX-FileCopyrightText: 2022 Kattni Rembor, written for Adafruit Industries 17 | # 18 | -# SPDX-License-Identifier: Unlicense 19 | +# SPDX-License-Identifier: MIT 20 | 21 | +# Do not include files and directories created by your personal work environment, such as the IDE 22 | +# you use, except for those already listed here. Pull requests including changes to this file will 23 | +# not be accepted. 24 | + 25 | +# This .gitignore file contains rules for files generated by working with CircuitPython libraries, 26 | +# including building Sphinx, testing with pip, and creating a virual environment, as well as the 27 | +# MacOS and IDE-specific files generated by using MacOS in general, or the PyCharm or VSCode IDEs. 28 | + 29 | +# If you find that there are files being generated on your machine that should not be included in 30 | +# your git commit, you should create a .gitignore_global file on your computer to include the 31 | +# files created by your personal setup. To do so, follow the two steps below. 32 | + 33 | +# First, create a file called .gitignore_global somewhere convenient for you, and add rules for 34 | +# the files you want to exclude from git commits. 35 | + 36 | +# Second, configure Git to use the exclude file for all Git repositories by running the 37 | +# following via commandline, replacing "path/to/your/" with the actual path to your newly created 38 | +# .gitignore_global file: 39 | +# git config --global core.excludesfile path/to/your/.gitignore_global 40 | + 41 | +# CircuitPython-specific files 42 | *.mpy 43 | -.idea 44 | + 45 | +# Python-specific files 46 | __pycache__ 47 | -_build 48 | *.pyc 49 | + 50 | +# Sphinx build-specific files 51 | +_build 52 | + 53 | +# This file results from running `pip -e install .` in a local repository 54 | +*.egg-info 55 | + 56 | +# Virtual environment-specific files 57 | .env 58 | -bundles 59 | + 60 | +# MacOS-specific files 61 | *.DS_Store 62 | -.eggs 63 | -dist 64 | -**/*.egg-info 65 | + 66 | +# IDE-specific files 67 | +.idea 68 | +.vscode 69 | +*~ 70 | -- 71 | 2.25.1 72 | 73 | -------------------------------------------------------------------------------- /patches/0001-First-part-of-patch.patch: -------------------------------------------------------------------------------- 1 | From efe38badfd18297988c9c67be8ac1108d150a4ca Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Thu, 13 Jan 2022 16:27:30 -0500 4 | Subject: [PATCH] First part of patch 5 | 6 | --- 7 | .../PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md | 2 +- 8 | .github/workflows/build.yml | 6 +++--- 9 | .github/workflows/release.yml | 8 ++++---- 10 | .readthedocs.yaml | 2 +- 11 | 4 files changed, 9 insertions(+), 9 deletions(-) 12 | 13 | diff --git a/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 14 | index 71ef8f8..8de294e 100644 15 | --- a/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 16 | +++ b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md 17 | @@ -4,7 +4,7 @@ 18 | 19 | Thank you for contributing! Before you submit a pull request, please read the following. 20 | 21 | -Make sure any changes you're submitting are in line with the CircuitPython Design Guide, available here: https://circuitpython.readthedocs.io/en/latest/docs/design_guide.html 22 | +Make sure any changes you're submitting are in line with the CircuitPython Design Guide, available here: https://docs.circuitpython.org/en/latest/docs/design_guide.html 23 | 24 | If your changes are to documentation, please verify that the documentation builds locally by following the steps found here: https://adafru.it/build-docs 25 | 26 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 27 | index ca35544..474520d 100644 28 | --- a/.github/workflows/build.yml 29 | +++ b/.github/workflows/build.yml 30 | @@ -22,10 +22,10 @@ jobs: 31 | awk -F '\/' '{ print tolower($2) }' | 32 | tr '_' '-' 33 | ) 34 | - - name: Set up Python 3.7 35 | - uses: actions/setup-python@v1 36 | + - name: Set up Python 3.x 37 | + uses: actions/setup-python@v2 38 | with: 39 | - python-version: 3.7 40 | + python-version: "3.x" 41 | - name: Versions 42 | run: | 43 | python3 --version 44 | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml 45 | index 6d0015a..a65e5de 100644 46 | --- a/.github/workflows/release.yml 47 | +++ b/.github/workflows/release.yml 48 | @@ -24,10 +24,10 @@ jobs: 49 | awk -F '\/' '{ print tolower($2) }' | 50 | tr '_' '-' 51 | ) 52 | - - name: Set up Python 3.6 53 | - uses: actions/setup-python@v1 54 | + - name: Set up Python 3.x 55 | + uses: actions/setup-python@v2 56 | with: 57 | - python-version: 3.6 58 | + python-version: "3.x" 59 | - name: Versions 60 | run: | 61 | python3 --version 62 | @@ -67,7 +67,7 @@ jobs: 63 | echo ::set-output name=setup-py::$( find . -wholename './setup.py' ) 64 | - name: Set up Python 65 | if: contains(steps.need-pypi.outputs.setup-py, 'setup.py') 66 | - uses: actions/setup-python@v1 67 | + uses: actions/setup-python@v2 68 | with: 69 | python-version: '3.x' 70 | - name: Install dependencies 71 | diff --git a/.readthedocs.yaml b/.readthedocs.yaml 72 | index 1335112..f8b2891 100644 73 | --- a/.readthedocs.yaml 74 | +++ b/.readthedocs.yaml 75 | @@ -9,7 +9,7 @@ 76 | version: 2 77 | 78 | python: 79 | - version: "3.7" 80 | + version: "3.x" 81 | install: 82 | - requirements: docs/requirements.txt 83 | - requirements: requirements.txt 84 | -- 85 | 2.25.1 86 | 87 | -------------------------------------------------------------------------------- /patches/0001-Fixed-readthedocs-build.patch: -------------------------------------------------------------------------------- 1 | From 609b9a5243fbbc6e0f6014d4aeaa902a1d2ecfc9 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Mon, 14 Feb 2022 15:35:02 -0500 4 | Subject: [PATCH] Fixed readthedocs build 5 | 6 | --- 7 | .readthedocs.yaml | 6 +++++- 8 | 1 file changed, 5 insertions(+), 1 deletion(-) 9 | 10 | diff --git a/.readthedocs.yaml b/.readthedocs.yaml 11 | index f8b2891..33c2a61 100644 12 | --- a/.readthedocs.yaml 13 | +++ b/.readthedocs.yaml 14 | @@ -8,8 +8,12 @@ 15 | # Required 16 | version: 2 17 | 18 | +build: 19 | + os: ubuntu-20.04 20 | + tools: 21 | + python: "3" 22 | + 23 | python: 24 | - version: "3.x" 25 | install: 26 | - requirements: docs/requirements.txt 27 | - requirements: requirements.txt 28 | -- 29 | 2.25.1 30 | 31 | -------------------------------------------------------------------------------- /patches/0001-Globally-disabled-consider-using-f-string-pylint-che.patch: -------------------------------------------------------------------------------- 1 | From 351ab4ed57292ee4f906fa7ffc20d98037cd88f4 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Thu, 23 Sep 2021 17:52:55 -0400 4 | Subject: [PATCH] Globally disabled consider-using-f-string pylint check 5 | 6 | --- 7 | .pre-commit-config.yaml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index 354c761..8810708 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -30,5 +30,5 @@ repos: 15 | name: pylint (examples code) 16 | description: Run pylint rules on "examples/*.py" files 17 | entry: /usr/bin/env bash -c 18 | - args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name $example; done)'] 19 | + args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name,consider-using-f-string $example; done)'] 20 | language: system 21 | -- 22 | 2.25.1 23 | 24 | -------------------------------------------------------------------------------- /patches/0001-Hardcoded-Black-and-REUSE-versions.patch: -------------------------------------------------------------------------------- 1 | From 61834a16fa143657a84b684ec6fe207b9894403a Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Wed, 3 Feb 2021 16:38:51 -0500 4 | Subject: [PATCH] Hardcoded Black and REUSE versions 5 | 6 | --- 7 | .pre-commit-config.yaml | 4 ++-- 8 | 1 file changed, 2 insertions(+), 2 deletions(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index aab5f1c..07f886c 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -4,11 +4,11 @@ 15 | 16 | repos: 17 | - repo: https://github.com/python/black 18 | - rev: stable 19 | + rev: 20.8b1 20 | hooks: 21 | - id: black 22 | - repo: https://github.com/fsfe/reuse-tool 23 | - rev: latest 24 | + rev: v0.12.1 25 | hooks: 26 | - id: reuse 27 | - repo: https://github.com/pre-commit/pre-commit-hooks 28 | -- 29 | 2.25.1 30 | 31 | -------------------------------------------------------------------------------- /patches/0001-Increase-duplicate-code-check-threshold.patch: -------------------------------------------------------------------------------- 1 | From 980d4f141f960bfab8a650c842eb763f429a5c73 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Fri, 19 Mar 2021 12:55:03 -0400 4 | Subject: [PATCH] Increase duplicate code check threshold 5 | 6 | --- 7 | .pylintrc | 5 ++--- 8 | 1 file changed, 2 insertions(+), 3 deletions(-) 9 | 10 | diff --git a/.pylintrc b/.pylintrc 11 | index 3c07cc6..9e69bd0 100644 12 | --- a/.pylintrc 13 | +++ b/.pylintrc 14 | @@ -25,2 +25 @@ ignore-patterns= 15 | -# jobs=1 16 | -jobs=2 17 | +jobs=1 18 | @@ -256 +255 @@ ignore-imports=yes 19 | -min-similarity-lines=4 20 | +min-similarity-lines=12 21 | -- 22 | 2.25.1 23 | 24 | -------------------------------------------------------------------------------- /patches/0001-Moved-CI-to-Python-3.7.patch: -------------------------------------------------------------------------------- 1 | From f2d8d960f1713c8e161b78826a52ef79816c7a19 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Mon, 24 May 2021 09:54:31 -0400 4 | Subject: [PATCH] Moved CI to Python 3.7 5 | 6 | --- 7 | .github/workflows/build.yml | 4 ++-- 8 | 1 file changed, 2 insertions(+), 2 deletions(-) 9 | 10 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 11 | index 0ab7182..c4c975d 100644 12 | --- a/.github/workflows/build.yml 13 | +++ b/.github/workflows/build.yml 14 | @@ -22,10 +22,10 @@ jobs: 15 | awk -F '\/' '{ print tolower($2) }' | 16 | tr '_' '-' 17 | ) 18 | - - name: Set up Python 3.6 19 | + - name: Set up Python 3.7 20 | uses: actions/setup-python@v1 21 | with: 22 | - python-version: 3.6 23 | + python-version: 3.7 24 | - name: Versions 25 | run: | 26 | python3 --version 27 | -- 28 | 2.25.1 29 | 30 | -------------------------------------------------------------------------------- /patches/0001-Pylint,-pre-commit,-readthedocs-patch.patch: -------------------------------------------------------------------------------- 1 | From 45a494ad6b93623b42ac6c96ff261be52ea37537 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Wed, 3 Nov 2021 14:40:16 -0400 4 | Subject: PATCH Pylint and readthedocs patch test 5 | 6 | --- 7 | .github/workflows/build.yml | 4 ++-- 8 | .pre-commit-config.yaml | 26 +++++++++++++++++--------- 9 | .pylintrc | 2 +- 10 | .readthedocs.yml | 2 +- 11 | docs/requirements.txt | 5 +++++ 12 | 5 files changed, 26 insertions(+), 13 deletions(-) 13 | create mode 100644 docs/requirements.txt 14 | 15 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 16 | index c4c975d..ca35544 100644 17 | --- a/.github/workflows/build.yml 18 | +++ b/.github/workflows/build.yml 19 | @@ -42,9 +42,9 @@ jobs: 20 | # (e.g. - apt-get: gettext, etc; pip: circuitpython-build-tools, requirements.txt; etc.) 21 | run: | 22 | source actions-ci/install.sh 23 | - - name: Pip install pylint, Sphinx, pre-commit 24 | + - name: Pip install Sphinx, pre-commit 25 | run: | 26 | - pip install --force-reinstall pylint Sphinx sphinx-rtd-theme pre-commit 27 | + pip install --force-reinstall Sphinx sphinx-rtd-theme pre-commit 28 | - name: Library version 29 | run: git describe --dirty --always --tags 30 | - name: Pre-commit hooks 31 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 32 | index 8810708..1b9fadc 100644 33 | --- a/.pre-commit-config.yaml 34 | +++ b/.pre-commit-config.yaml 35 | @@ -18,17 +18,25 @@ repos: 36 | - id: end-of-file-fixer 37 | - id: trailing-whitespace 38 | - repo: https://github.com/pycqa/pylint 39 | - rev: pylint-2.7.1 40 | + rev: v2.11.1 41 | hooks: 42 | - id: pylint 43 | name: pylint (library code) 44 | types: [python] 45 | - exclude: "^(docs/|examples/|setup.py$)" 46 | -- repo: local 47 | - hooks: 48 | - - id: pylint_examples 49 | - name: pylint (examples code) 50 | + args: 51 | + - --disable=consider-using-f-string 52 | + exclude: "^(docs/|examples/|tests/|setup.py$)" 53 | + - id: pylint 54 | + name: pylint (example code) 55 | description: Run pylint rules on "examples/*.py" files 56 | - entry: /usr/bin/env bash -c 57 | - args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name,consider-using-f-string $example; done)'] 58 | - language: system 59 | + types: [python] 60 | + files: "^examples/" 61 | + args: 62 | + - --disable=missing-docstring,invalid-name,consider-using-f-string,duplicate-code 63 | + - id: pylint 64 | + name: pylint (test code) 65 | + description: Run pylint rules on "tests/*.py" files 66 | + types: [python] 67 | + files: "^tests/" 68 | + args: 69 | + - --disable=missing-docstring,consider-using-f-string,duplicate-code 70 | diff --git a/.pylintrc b/.pylintrc 71 | index aed1e4c..12a028e 100644 72 | --- a/.pylintrc 73 | +++ b/.pylintrc 74 | @@ -252,7 +252,7 @@ ignore-docstrings=yes 75 | ignore-imports=yes 76 | 77 | # Minimum lines number of a similarity. 78 | -min-similarity-lines=12 79 | +min-similarity-lines=4 80 | 81 | 82 | [BASIC] 83 | diff --git a/.readthedocs.yml b/.readthedocs.yml 84 | index ffa84c4..49dcab3 100644 85 | --- a/.readthedocs.yml 86 | +++ b/.readthedocs.yml 87 | @@ -4,4 +4,4 @@ 88 | 89 | python: 90 | version: 3 91 | -requirements_file: requirements.txt 92 | +requirements_file: docs/requirements.txt 93 | diff --git a/docs/requirements.txt b/docs/requirements.txt 94 | new file mode 100644 95 | index 0000000..88e6733 96 | --- /dev/null 97 | +++ b/docs/requirements.txt 98 | @@ -0,0 +1,5 @@ 99 | +# SPDX-FileCopyrightText: 2021 Kattni Rembor for Adafruit Industries 100 | +# 101 | +# SPDX-License-Identifier: Unlicense 102 | + 103 | +sphinx>=4.0.0 104 | -- 105 | 2.25.1 106 | -------------------------------------------------------------------------------- /patches/0001-Re-added-pylint-install-to-build.yml.patch: -------------------------------------------------------------------------------- 1 | From c5615b53b7bbd47517a1a999b3f10dd70f3c4c6c Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Tue, 2 Mar 2021 17:17:50 -0500 4 | Subject: [PATCH] Re-added pylint install to build.yml 5 | 6 | --- 7 | .github/workflows/build.yml | 4 ++-- 8 | 1 file changed, 2 insertions(+), 2 deletions(-) 9 | 10 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 11 | index 621d5ef..3baf502 100644 12 | --- a/.github/workflows/build.yml 13 | +++ b/.github/workflows/build.yml 14 | @@ -42,9 +42,9 @@ jobs: 15 | # (e.g. - apt-get: gettext, etc; pip: circuitpython-build-tools, requirements.txt; etc.) 16 | run: | 17 | source actions-ci/install.sh 18 | - - name: Pip install Sphinx, pre-commit 19 | + - name: Pip install pylint, Sphinx, pre-commit 20 | run: | 21 | - pip install --force-reinstall Sphinx sphinx-rtd-theme pre-commit 22 | + pip install --force-reinstall pylint Sphinx sphinx-rtd-theme pre-commit 23 | - name: Library version 24 | run: git describe --dirty --always --tags 25 | - name: Pre-commit hooks 26 | -- 27 | 2.24.3 (Apple Git-128) 28 | 29 | -------------------------------------------------------------------------------- /patches/0001-Removed-duplicate-code-from-library-pylint-disable.patch: -------------------------------------------------------------------------------- 1 | From 938c68803029b40ba783141373957614350bba67 Mon Sep 17 00:00:00 2001 2 | From: evaherrada 3 | Date: Tue, 21 Jun 2022 17:00:37 -0400 4 | Subject: [PATCH] Removed duplicate-code from library pylint disable 5 | 6 | --- 7 | .pre-commit-config.yaml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index 0a91a11..3343606 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -24,7 +24,7 @@ repos: 15 | name: pylint (library code) 16 | types: [python] 17 | args: 18 | - - --disable=consider-using-f-string,duplicate-code 19 | + - --disable=consider-using-f-string 20 | exclude: "^(docs/|examples/|tests/|setup.py$)" 21 | - id: pylint 22 | name: pylint (example code) 23 | -- 24 | 2.25.1 25 | 26 | -------------------------------------------------------------------------------- /patches/0001-Removed-pylint-process-from-github-workflow-patch-2.patch: -------------------------------------------------------------------------------- 1 | From 890916a8fa3274a194b6a45872b93e4d87779b84 Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Fri, 26 Feb 2021 16:26:15 -0500 4 | Subject: [PATCH] Removed pylint process from github workflow patch 2 5 | 6 | --- 7 | .github/workflows/build.yml | 4 ---- 8 | .pre-commit-config.yaml | 15 +++++++++++++++ 9 | 2 files changed, 15 insertions(+), 4 deletions(-) 10 | 11 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 12 | index 59baa53..3baf502 100644 13 | --- a/.github/workflows/build.yml 14 | +++ b/.github/workflows/build.yml 15 | @@ -53,4 +52,0 @@ jobs: 16 | - - name: PyLint 17 | - run: | 18 | - pylint $( find . -path './adafruit*.py' ) 19 | - ([[ ! -d "examples" ]] || pylint --disable=missing-docstring,invalid-name,bad-whitespace $( find . -path "./examples/*.py" )) 20 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 21 | index 07f886c..354c761 100644 22 | --- a/.pre-commit-config.yaml 23 | +++ b/.pre-commit-config.yaml 24 | @@ -19,0 +20,15 @@ repos: 25 | +- repo: https://github.com/pycqa/pylint 26 | + rev: pylint-2.7.1 27 | + hooks: 28 | + - id: pylint 29 | + name: pylint (library code) 30 | + types: [python] 31 | + exclude: "^(docs/|examples/|setup.py$)" 32 | +- repo: local 33 | + hooks: 34 | + - id: pylint_examples 35 | + name: pylint (examples code) 36 | + description: Run pylint rules on "examples/*.py" files 37 | + entry: /usr/bin/env bash -c 38 | + args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name $example; done)'] 39 | + language: system 40 | -- 41 | 2.25.1 42 | 43 | -------------------------------------------------------------------------------- /patches/0001-Removed-pylint-process-from-github-workflow.patch: -------------------------------------------------------------------------------- 1 | From 223f4eea34853e043ab9e00cbcf8fb0fd6451aff Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Tue, 2 Mar 2021 16:46:17 -0500 4 | Subject: [PATCH] Removed pylint process from github workflow 5 | 6 | --- 7 | .github/workflows/build.yml | 8 ++------ 8 | .pre-commit-config.yaml | 15 +++++++++++++++ 9 | .pylintrc | 2 +- 10 | 3 files changed, 18 insertions(+), 7 deletions(-) 11 | 12 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 13 | index 59baa53..621d5ef 100644 14 | --- a/.github/workflows/build.yml 15 | +++ b/.github/workflows/build.yml 16 | @@ -42,18 +42,14 @@ jobs: 17 | # (e.g. - apt-get: gettext, etc; pip: circuitpython-build-tools, requirements.txt; etc.) 18 | run: | 19 | source actions-ci/install.sh 20 | - - name: Pip install pylint, Sphinx, pre-commit 21 | + - name: Pip install Sphinx, pre-commit 22 | run: | 23 | - pip install --force-reinstall pylint Sphinx sphinx-rtd-theme pre-commit 24 | + pip install --force-reinstall Sphinx sphinx-rtd-theme pre-commit 25 | - name: Library version 26 | run: git describe --dirty --always --tags 27 | - name: Pre-commit hooks 28 | run: | 29 | pre-commit run --all-files 30 | - - name: PyLint 31 | - run: | 32 | - pylint $( find . -path './adafruit*.py' ) 33 | - ([[ ! -d "examples" ]] || pylint --disable=missing-docstring,invalid-name,bad-whitespace $( find . -path "./examples/*.py" )) 34 | - name: Build assets 35 | run: circuitpython-build-bundles --filename_prefix ${{ steps.repo-name.outputs.repo-name }} --library_location . 36 | - name: Archive bundles 37 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 38 | index 07f886c..354c761 100644 39 | --- a/.pre-commit-config.yaml 40 | +++ b/.pre-commit-config.yaml 41 | @@ -17,3 +17,18 @@ repos: 42 | - id: check-yaml 43 | - id: end-of-file-fixer 44 | - id: trailing-whitespace 45 | +- repo: https://github.com/pycqa/pylint 46 | + rev: pylint-2.7.1 47 | + hooks: 48 | + - id: pylint 49 | + name: pylint (library code) 50 | + types: [python] 51 | + exclude: "^(docs/|examples/|setup.py$)" 52 | +- repo: local 53 | + hooks: 54 | + - id: pylint_examples 55 | + name: pylint (examples code) 56 | + description: Run pylint rules on "examples/*.py" files 57 | + entry: /usr/bin/env bash -c 58 | + args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name $example; done)'] 59 | + language: system 60 | diff --git a/.pylintrc b/.pylintrc 61 | index 5c31f66..9ed669e 100644 62 | --- a/.pylintrc 63 | +++ b/.pylintrc 64 | @@ -250,7 +250,7 @@ ignore-comments=yes 65 | ignore-docstrings=yes 66 | 67 | # Ignore imports when computing similarities. 68 | -ignore-imports=no 69 | +ignore-imports=yes 70 | 71 | # Minimum lines number of a similarity. 72 | min-similarity-lines=4 73 | -- 74 | 2.24.3 (Apple Git-128) 75 | 76 | -------------------------------------------------------------------------------- /patches/0001-Second-gitignore-patch.patch: -------------------------------------------------------------------------------- 1 | From 3e8355111337836c52f6ef121641712d4ca9f6d2 Mon Sep 17 00:00:00 2001 2 | From: evaherrada 3 | Date: Thu, 21 Apr 2022 15:45:16 -0400 4 | Subject: [PATCH] Updated gitignore 5 | 6 | --- 7 | .gitignore | 49 +++++++++++++++++++++++++++++++++++++++---------- 8 | 1 file changed, 39 insertions(+), 10 deletions(-) 9 | 10 | diff --git a/.gitignore b/.gitignore 11 | index 2c6ddfd..544ec4a 100644 12 | --- a/.gitignore 13 | +++ b/.gitignore 14 | @@ -1,18 +1,47 @@ 15 | -# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries 16 | +# SPDX-FileCopyrightText: 2022 Kattni Rembor, written for Adafruit Industries 17 | # 18 | -# SPDX-License-Identifier: Unlicense 19 | +# SPDX-License-Identifier: MIT 20 | 21 | +# Do not include files and directories created by your personal work environment, such as the IDE 22 | +# you use, except for those already listed here. Pull requests including changes to this file will 23 | +# not be accepted. 24 | + 25 | +# This .gitignore file contains rules for files generated by working with CircuitPython libraries, 26 | +# including building Sphinx, testing with pip, and creating a virual environment, as well as the 27 | +# MacOS and IDE-specific files generated by using MacOS in general, or the PyCharm or VSCode IDEs. 28 | + 29 | +# If you find that there are files being generated on your machine that should not be included in 30 | +# your git commit, you should create a .gitignore_global file on your computer to include the 31 | +# files created by your personal setup. To do so, follow the two steps below. 32 | + 33 | +# First, create a file called .gitignore_global somewhere convenient for you, and add rules for 34 | +# the files you want to exclude from git commits. 35 | + 36 | +# Second, configure Git to use the exclude file for all Git repositories by running the 37 | +# following via commandline, replacing "path/to/your/" with the actual path to your newly created 38 | +# .gitignore_global file: 39 | +# git config --global core.excludesfile path/to/your/.gitignore_global 40 | + 41 | +# CircuitPython-specific files 42 | *.mpy 43 | -.idea 44 | + 45 | +# Python-specific files 46 | __pycache__ 47 | -_build 48 | *.pyc 49 | + 50 | +# Sphinx build-specific files 51 | +_build 52 | + 53 | +# This file results from running `pip -e install .` in a local repository 54 | +*.egg-info 55 | + 56 | +# Virtual environment-specific files 57 | .env 58 | -.python-version 59 | -build*/ 60 | -bundles 61 | + 62 | +# MacOS-specific files 63 | *.DS_Store 64 | -.eggs 65 | -dist 66 | -**/*.egg-info 67 | + 68 | +# IDE-specific files 69 | +.idea 70 | .vscode 71 | +*~ 72 | -- 73 | 2.25.1 74 | 75 | -------------------------------------------------------------------------------- /patches/0001-Update-Black-to-latest.patch: -------------------------------------------------------------------------------- 1 | From 94efa5385a95c41e2564e9f0a2760728d689eead Mon Sep 17 00:00:00 2001 2 | From: Kattni Rembor 3 | Date: Mon, 28 Mar 2022 15:52:04 -0400 4 | Subject: [PATCH] Update Black to latest. 5 | 6 | --- 7 | .pre-commit-config.yaml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index 1b9fadc..7467c1d 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -4,7 +4,7 @@ 15 | 16 | repos: 17 | - repo: https://github.com/python/black 18 | - rev: 20.8b1 19 | + rev: 22.3.0 20 | hooks: 21 | - id: black 22 | - repo: https://github.com/fsfe/reuse-tool 23 | -- 24 | 2.33.1 25 | 26 | -------------------------------------------------------------------------------- /patches/0001-Updated-readthedocs-file.patch: -------------------------------------------------------------------------------- 1 | From df685288b18965c4089a8895b0eb3bf80c17423e Mon Sep 17 00:00:00 2001 2 | From: dherrada 3 | Date: Tue, 9 Nov 2021 13:31:14 -0500 4 | Subject: [PATCH] Updated readthedocs file 5 | 6 | --- 7 | .readthedocs.yaml | 15 +++++++++++++++ 8 | .readthedocs.yml | 7 ------- 9 | 2 files changed, 15 insertions(+), 7 deletions(-) 10 | create mode 100644 .readthedocs.yaml 11 | delete mode 100644 .readthedocs.yml 12 | 13 | diff --git a/.readthedocs.yaml b/.readthedocs.yaml 14 | new file mode 100644 15 | index 0000000..95ec218 16 | --- /dev/null 17 | +++ b/.readthedocs.yaml 18 | @@ -0,0 +1,15 @@ 19 | +# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries 20 | +# 21 | +# SPDX-License-Identifier: Unlicense 22 | + 23 | +# Read the Docs configuration file 24 | +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 25 | + 26 | +# Required 27 | +version: 2 28 | + 29 | +python: 30 | + version: "3.6" 31 | + install: 32 | + - requirements: docs/requirements.txt 33 | + - requirements: requirements.txt 34 | diff --git a/.readthedocs.yml b/.readthedocs.yml 35 | deleted file mode 100644 36 | index 49dcab3..0000000 37 | --- a/.readthedocs.yml 38 | +++ /dev/null 39 | @@ -1,7 +0,0 @@ 40 | -# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries 41 | -# 42 | -# SPDX-License-Identifier: Unlicense 43 | - 44 | -python: 45 | - version: 3 46 | -requirements_file: docs/requirements.txt 47 | -- 48 | 2.25.1 49 | 50 | -------------------------------------------------------------------------------- /patches/0001-add-sphinx-configuration-to-rtd.yaml.patch: -------------------------------------------------------------------------------- 1 | From 0e4e3ab23acf83d546be3e0dd8cb47137034a32c Mon Sep 17 00:00:00 2001 2 | From: foamyguy 3 | Date: Tue, 14 Jan 2025 11:32:34 -0600 4 | Subject: [PATCH] add sphinx configuration to rtd.yaml 5 | 6 | --- 7 | .readthedocs.yaml | 3 +++ 8 | 1 file changed, 3 insertions(+) 9 | 10 | diff --git a/.readthedocs.yaml b/.readthedocs.yaml 11 | index 33c2a61..88bca9f 100644 12 | --- a/.readthedocs.yaml 13 | +++ b/.readthedocs.yaml 14 | @@ -8,6 +8,9 @@ 15 | # Required 16 | version: 2 17 | 18 | +sphinx: 19 | + configuration: docs/conf.py 20 | + 21 | build: 22 | os: ubuntu-20.04 23 | tools: 24 | -- 25 | 2.48.0 26 | 27 | -------------------------------------------------------------------------------- /patches/0001-build.yml-add-black-formatting-check.patch: -------------------------------------------------------------------------------- 1 | From e725354300a8c8b9cdfcca9e8345568acee96d85 Mon Sep 17 00:00:00 2001 2 | From: sommersoft 3 | Date: Tue, 7 Apr 2020 15:02:31 -0500 4 | Subject: [PATCH] build.yml: add black formatting check 5 | 6 | --- 7 | .github/workflows/build.yml | 3 +++ 8 | 1 file changed, 3 insertions(+) 9 | 10 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 11 | index 1dad804..b6977a9 100644 12 | --- a/.github/workflows/build.yml 13 | +++ b/.github/workflows/build.yml 14 | @@ -44,4 +44,7 @@ jobs: 15 | - name: Library version 16 | run: git describe --dirty --always --tags 17 | + - name: Check formatting 18 | + run: | 19 | + black --check --target-version=py35 . 20 | - name: PyLint 21 | run: | 22 | -- 23 | 2.17.1 24 | 25 | -------------------------------------------------------------------------------- /patches/0001-remove-deprecated-get_html_theme_path-call-alt-config.patch: -------------------------------------------------------------------------------- 1 | From 164ae12550a34b8751aeef0cb676eae4e72ab610 Mon Sep 17 00:00:00 2001 2 | From: foamyguy 3 | Date: Mon, 7 Oct 2024 14:51:48 -0500 4 | Subject: [PATCH] remove deprecated sphinx theme call 5 | 6 | --- 7 | docs/conf.py | 1 - 8 | 1 file changed, 1 deletion(-) 9 | 10 | diff --git a/docs/conf.py b/docs/conf.py 11 | index a393c5e..0d0e89d 100644 12 | --- a/docs/conf.py 13 | +++ b/docs/conf.py 14 | @@ -154,7 +154,6 @@ if not on_rtd: # only import and set the theme if we're building docs locally 15 | import sphinx_rtd_theme 16 | 17 | html_theme = "sphinx_rtd_theme" 18 | - html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."] 19 | except: 20 | html_theme = "default" 21 | html_theme_path = ["."] 22 | -- 23 | 2.46.2 24 | 25 | -------------------------------------------------------------------------------- /patches/0001-remove-deprecated-get_html_theme_path-call.patch: -------------------------------------------------------------------------------- 1 | From 7b7cdd3d13f4ee2cf4be44ea3966dfdec659f9a9 Mon Sep 17 00:00:00 2001 2 | From: foamyguy 3 | Date: Mon, 7 Oct 2024 09:24:05 -0500 4 | Subject: [PATCH] remove deprecated get_html_theme_path() call 5 | 6 | --- 7 | docs/conf.py | 1 - 8 | 1 file changed, 1 deletion(-) 9 | 10 | diff --git a/docs/conf.py b/docs/conf.py 11 | index de06824..cb73a8f 100644 12 | --- a/docs/conf.py 13 | +++ b/docs/conf.py 14 | @@ -109,7 +109,6 @@ napoleon_numpy_docstring = False 15 | import sphinx_rtd_theme 16 | 17 | html_theme = "sphinx_rtd_theme" 18 | -html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."] 19 | 20 | # Add any paths that contain custom static files (such as style sheets) here, 21 | # relative to this directory. They are copied after the builtin static files, 22 | -- 23 | 2.46.2 24 | 25 | -------------------------------------------------------------------------------- /patches/0001-update-rtd.yml-file.patch: -------------------------------------------------------------------------------- 1 | From 2ae4e52cee16024261282d9d4b860cd33cc5b35f Mon Sep 17 00:00:00 2001 2 | From: foamyguy 3 | Date: Wed, 4 Jun 2025 10:00:20 -0500 4 | Subject: [PATCH] update rtd.yml file 5 | 6 | --- 7 | .readthedocs.yaml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.readthedocs.yaml b/.readthedocs.yaml 11 | index fe4faae..ee38fa0 100644 12 | --- a/.readthedocs.yaml 13 | +++ b/.readthedocs.yaml 14 | @@ -12,7 +12,7 @@ sphinx: 15 | configuration: docs/conf.py 16 | 17 | build: 18 | - os: ubuntu-20.04 19 | + os: ubuntu-lts-latest 20 | tools: 21 | python: "3" 22 | 23 | -- 24 | 2.49.0 25 | 26 | -------------------------------------------------------------------------------- /patches/0003-actions-Remove-bad-whitespace-from-pylint-invocation.patch: -------------------------------------------------------------------------------- 1 | From 9c445e26608a5be97f78c7aff4d7af28f53a1c90 Mon Sep 17 00:00:00 2001 2 | From: Jeff Epler 3 | Date: Sun, 23 Aug 2020 12:08:25 -0500 4 | Subject: [PATCH] actions: Remove bad-whitespace from pylint invocation 5 | 6 | --- 7 | .github/workflows/build.yml | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml 11 | index b6977a9..dc092cd 100644 12 | --- a/.github/workflows/build.yml 13 | +++ b/.github/workflows/build.yml 14 | @@ -49,7 +49,7 @@ jobs: 15 | - name: PyLint 16 | run: | 17 | pylint $( find . -path './adafruit*.py' ) 18 | - ([[ ! -d "examples" ]] || pylint --disable=missing-docstring,invalid-name,bad-whitespace $( find . -path "./examples/*.py" )) 19 | + ([[ ! -d "examples" ]] || pylint --disable=missing-docstring,invalid-name $( find . -path "./examples/*.py" )) 20 | - name: Build assets 21 | run: circuitpython-build-bundles --filename_prefix ${{ steps.repo-name.outputs.repo-name }} --library_location . 22 | - name: Build docs 23 | -- 24 | 2.20.1 25 | 26 | -------------------------------------------------------------------------------- /patches/01192023_release_patch.patch: -------------------------------------------------------------------------------- 1 | From 31490c26ab114fde7462765cbfa3f67afce33bc0 Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney <89490472+tekktrik@users.noreply.github.com> 3 | Date: Thu, 19 Jan 2023 23:39:55 -0500 4 | Subject: [PATCH] Add upload url to release action 5 | 6 | --- 7 | .github/workflows/release_gh.yml | 1 + 8 | 1 file changed, 1 insertion(+) 9 | 10 | diff --git a/.github/workflows/release_gh.yml b/.github/workflows/release_gh.yml 11 | index b8aa8d6..9acec60 100644 12 | --- a/.github/workflows/release_gh.yml 13 | +++ b/.github/workflows/release_gh.yml 14 | @@ -16,3 +16,4 @@ jobs: 15 | uses: adafruit/workflows-circuitpython-libs/release-gh@main 16 | with: 17 | github-token: ${{ secrets.GITHUB_TOKEN }} 18 | + upload-url: ${{ github.event.release.upload_url }} 19 | -- 20 | 2.39.0 21 | 22 | -------------------------------------------------------------------------------- /patches/01SEP2022_venv_gitignore.patch: -------------------------------------------------------------------------------- 1 | From 72726ff54b3b5782181f6ab2057ce84258a94277 Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney <89490472+tekktrik@users.noreply.github.com> 3 | Date: Thu, 1 Sep 2022 20:16:31 -0400 4 | Subject: [PATCH] Add .venv to .gitignore 5 | 6 | --- 7 | .gitignore | 1 + 8 | 1 file changed, 1 insertion(+) 9 | 10 | diff --git a/.gitignore b/.gitignore 11 | index 544ec4a..db3d538 100644 12 | --- a/.gitignore 13 | +++ b/.gitignore 14 | @@ -37,6 +37,7 @@ _build 15 | 16 | # Virtual environment-specific files 17 | .env 18 | +.venv 19 | 20 | # MacOS-specific files 21 | *.DS_Store 22 | -- 23 | 2.37.2 24 | 25 | -------------------------------------------------------------------------------- /patches/05302022_set_doc_language.patch: -------------------------------------------------------------------------------- 1 | From c87dc6f80fc8eac93b266103aef8dc9683301b01 Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney 3 | Date: Mon, 30 May 2022 14:25:04 -0400 4 | Subject: [PATCH] Set language to "en" for documentation 5 | 6 | --- 7 | docs/conf.py | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/docs/conf.py b/docs/conf.py 11 | index cb5dde7..c58fe52 100644 12 | --- a/docs/conf.py 13 | +++ b/docs/conf.py 14 | @@ -57,7 +57,7 @@ release = "1.0" 15 | # 16 | # This is also used if you do content translation via gettext catalogs. 17 | # Usually you set "language" from the command line for these cases. 18 | -language = None 19 | +language = "en" 20 | 21 | # List of patterns, relative to source directory, that match files and 22 | # directories to ignore when looking for source files. 23 | -- 24 | 2.36.1 25 | 26 | -------------------------------------------------------------------------------- /patches/09AUG2022-setuptools-scm.patch: -------------------------------------------------------------------------------- 1 | From a445ef4116a4f863fe532cdc3295f73876db400f Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney 3 | Date: Tue, 9 Aug 2022 12:03:54 -0400 4 | Subject: [PATCH] Add setuptools-scm to build system requirements 5 | 6 | --- 7 | pyproject.toml | 1 + 8 | 1 file changed, 1 insertion(+) 9 | 10 | diff --git a/pyproject.toml b/pyproject.toml 11 | index 822021e..0c8d672 100644 12 | --- a/pyproject.toml 13 | +++ b/pyproject.toml 14 | @@ -6,6 +6,7 @@ 15 | requires = [ 16 | "setuptools", 17 | "wheel", 18 | + "setuptools-scm", 19 | ] 20 | 21 | [project] 22 | -- 23 | 2.35.1.windows.2 24 | 25 | -------------------------------------------------------------------------------- /patches/09MAY2023_precommit_update.patch: -------------------------------------------------------------------------------- 1 | From a70dfa8cd8a37dbd5674a4efd0ace8597dc44be5 Mon Sep 17 00:00:00 2001 2 | From: Tekktrik 3 | Date: Tue, 9 May 2023 20:26:25 -0400 4 | Subject: [PATCH] Update pre-commit hooks 5 | 6 | --- 7 | .pre-commit-config.yaml | 8 ++++---- 8 | 1 file changed, 4 insertions(+), 4 deletions(-) 9 | 10 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml 11 | index 0e5fccc..70ade69 100644 12 | --- a/.pre-commit-config.yaml 13 | +++ b/.pre-commit-config.yaml 14 | @@ -4,21 +4,21 @@ 15 | 16 | repos: 17 | - repo: https://github.com/python/black 18 | - rev: 22.3.0 19 | + rev: 23.3.0 20 | hooks: 21 | - id: black 22 | - repo: https://github.com/fsfe/reuse-tool 23 | - rev: v0.14.0 24 | + rev: v1.1.2 25 | hooks: 26 | - id: reuse 27 | - repo: https://github.com/pre-commit/pre-commit-hooks 28 | - rev: v4.2.0 29 | + rev: v4.4.0 30 | hooks: 31 | - id: check-yaml 32 | - id: end-of-file-fixer 33 | - id: trailing-whitespace 34 | - repo: https://github.com/pycqa/pylint 35 | - rev: v2.15.5 36 | + rev: v2.17.4 37 | hooks: 38 | - id: pylint 39 | name: pylint (library code) 40 | -- 41 | 2.40.0 42 | 43 | -------------------------------------------------------------------------------- /patches/11OCT2023-add-sphinx-rtd-theme-to-docs-reqs.patch: -------------------------------------------------------------------------------- 1 | From 6ac5e49df667e54aef02f5181b41b233be7a4bf3 Mon Sep 17 00:00:00 2001 2 | From: foamyguy 3 | Date: Mon, 16 Oct 2023 14:30:31 -0500 4 | Subject: [PATCH] unpin sphinx and add sphinx-rtd-theme to docs reqs 5 | 6 | --- 7 | docs/requirements.txt | 3 ++- 8 | 1 file changed, 2 insertions(+), 1 deletion(-) 9 | 10 | diff --git a/docs/requirements.txt b/docs/requirements.txt 11 | index 797aa04..979f568 100644 12 | --- a/docs/requirements.txt 13 | +++ b/docs/requirements.txt 14 | @@ -2,5 +2,6 @@ 15 | # 16 | # SPDX-License-Identifier: Unlicense 17 | 18 | -sphinx>=4.0.0 19 | +sphinx 20 | sphinxcontrib-jquery 21 | +sphinx-rtd-theme 22 | -- 23 | 2.34.1 24 | 25 | -------------------------------------------------------------------------------- /patches/11SEP2023_fix_rtd_theme.patch: -------------------------------------------------------------------------------- 1 | Subject: [PATCH] fix rtd theme 2 | --- 3 | Index: docs/conf.py 4 | IDEA additional info: 5 | Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP 6 | <+>UTF-8 7 | =================================================================== 8 | diff --git a/docs/conf.py b/docs/conf.py 9 | --- a/docs/conf.py 10 | +++ b/docs/conf.py 11 | @@ -101,19 +101,10 @@ 12 | # The theme to use for HTML and HTML Help pages. See the documentation for 13 | # a list of builtin themes. 14 | # 15 | -on_rtd = os.environ.get("READTHEDOCS", None) == "True" 16 | - 17 | -if not on_rtd: # only import and set the theme if we're building docs locally 18 | - try: 19 | - import sphinx_rtd_theme 20 | +import sphinx_rtd_theme 21 | 22 | - html_theme = "sphinx_rtd_theme" 23 | - html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."] 24 | - except: 25 | - html_theme = "default" 26 | - html_theme_path = ["."] 27 | -else: 28 | - html_theme_path = ["."] 29 | +html_theme = "sphinx_rtd_theme" 30 | +html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."] 31 | 32 | # Add any paths that contain custom static files (such as style sheets) here, 33 | # relative to this directory. They are copied after the builtin static files, 34 | -------------------------------------------------------------------------------- /patches/14MAY2023_pylintrc_jquery.patch: -------------------------------------------------------------------------------- 1 | From 6467782ca1523e6d77cb6b857d16d6d6df1feeb7 Mon Sep 17 00:00:00 2001 2 | From: Tekktrik 3 | Date: Sun, 14 May 2023 13:00:32 -0400 4 | Subject: [PATCH] Update .pylintrc, fix jQuery for docs 5 | 6 | --- 7 | .pylintrc | 2 +- 8 | docs/conf.py | 1 + 9 | docs/requirements.txt | 1 + 10 | 3 files changed, 3 insertions(+), 1 deletion(-) 11 | 12 | diff --git a/.pylintrc b/.pylintrc 13 | index 40208c3..f945e92 100644 14 | --- a/.pylintrc 15 | +++ b/.pylintrc 16 | @@ -396,4 +396,4 @@ min-public-methods=1 17 | 18 | # Exceptions that will emit a warning when being caught. Defaults to 19 | # "Exception" 20 | -overgeneral-exceptions=Exception 21 | +overgeneral-exceptions=builtins.Exception 22 | diff --git a/docs/conf.py b/docs/conf.py 23 | index 7c368fb..f24dd46 100644 24 | --- a/docs/conf.py 25 | +++ b/docs/conf.py 26 | @@ -17,6 +17,7 @@ sys.path.insert(0, os.path.abspath("..")) 27 | # ones. 28 | extensions = [ 29 | "sphinx.ext.autodoc", 30 | + "sphinxcontrib.jquery", 31 | "sphinx.ext.intersphinx", 32 | "sphinx.ext.napoleon", 33 | "sphinx.ext.todo", 34 | diff --git a/docs/requirements.txt b/docs/requirements.txt 35 | index 88e6733..797aa04 100644 36 | --- a/docs/requirements.txt 37 | +++ b/docs/requirements.txt 38 | @@ -3,3 +3,4 @@ 39 | # SPDX-License-Identifier: Unlicense 40 | 41 | sphinx>=4.0.0 42 | +sphinxcontrib-jquery 43 | -- 44 | 2.40.1 45 | 46 | -------------------------------------------------------------------------------- /patches/inclusive-terminology.patch: -------------------------------------------------------------------------------- 1 | From f351f391e7bb66c99b8a17b04f7f48a68f0a6a0a Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney 3 | Date: Sun, 22 May 2022 00:18:23 -0400 4 | Subject: [PATCH 1/2] Switch to inclusive terminology 5 | 6 | --- 7 | .pylintrc | 4 ++-- 8 | 1 file changed, 2 insertions(+), 2 deletions(-) 9 | 10 | diff --git a/.pylintrc b/.pylintrc 11 | index 4e50ab9..e06d2f6 100644 12 | --- a/.pylintrc 13 | +++ b/.pylintrc 14 | @@ -9,11 +9,11 @@ 15 | # run arbitrary code 16 | extension-pkg-whitelist= 17 | 18 | -# Add files or directories to the blacklist. They should be base names, not 19 | +# Add files or directories to the ignore-list. They should be base names, not 20 | # paths. 21 | ignore=CVS 22 | 23 | -# Add files or directories matching the regex patterns to the blacklist. The 24 | +# Add files or directories matching the regex patterns to the ignore-list. The 25 | # regex matches against base names, not paths. 26 | ignore-patterns= 27 | 28 | -- 29 | 2.35.1 30 | 31 | -------------------------------------------------------------------------------- /patches/lines-similarity.patch: -------------------------------------------------------------------------------- 1 | From 1e29fcc30ba460dd12e19ec1cb9512af56f7e01d Mon Sep 17 00:00:00 2001 2 | From: Alec Delaney 3 | Date: Sun, 22 May 2022 00:18:55 -0400 4 | Subject: [PATCH 2/2] Increase min lines similarity 5 | 6 | --- 7 | .pylintrc | 2 +- 8 | 1 file changed, 1 insertion(+), 1 deletion(-) 9 | 10 | diff --git a/.pylintrc b/.pylintrc 11 | index e06d2f6..fe0cbee 100644 12 | --- a/.pylintrc 13 | +++ b/.pylintrc 14 | @@ -252,7 +252,7 @@ ignore-docstrings=yes 15 | ignore-imports=yes 16 | 17 | # Minimum lines number of a similarity. 18 | -min-similarity-lines=4 19 | +min-similarity-lines=12 20 | 21 | 22 | [BASIC] 23 | -- 24 | 2.35.1 25 | 26 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | [build-system] 6 | requires = [ 7 | "setuptools", 8 | "wheel", 9 | "setuptools-scm", 10 | ] 11 | 12 | [project] 13 | name = "adafruit-adabot" 14 | description = "Adabot is our robot friend who helps Adafruit online " 15 | version = "0.0.0+auto.0" 16 | readme = "README.rst" 17 | authors = [ 18 | {name = "Adafruit Industries", email = "circuitpython@adafruit.com"} 19 | ] 20 | urls = {Homepage = "https://github.com/adafruit/adabot"} 21 | keywords = [ 22 | "adafruit", 23 | "micropython", 24 | "circuitpython", 25 | "automation", 26 | ] 27 | license = {text = "MIT"} 28 | classifiers = [ 29 | "Intended Audience :: Developers", 30 | "Topic :: Software Development :: Libraries", 31 | "Topic :: Software Development :: Embedded Systems", 32 | "Topic :: System :: Hardware", 33 | "License :: OSI Approved :: MIT License", 34 | "Programming Language :: Python :: 3", 35 | ] 36 | dynamic = ["dependencies", "optional-dependencies"] 37 | 38 | [project.scripts] 39 | adabot-release = "adabot.circuitpython_library_release:main_cli" 40 | 41 | [tool.setuptools] 42 | packages = ["adabot"] 43 | 44 | [tool.setuptools.dynamic] 45 | dependencies = {file = ["requirements.txt"]} 46 | optional-dependencies = {optional = {file = ["optional_requirements.txt"]}} 47 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | [pytest] 6 | addopts = -v --tb=short --show-capture=no 7 | testpaths = tests/unit/ tests/integration/ 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | black==24.3.0 6 | circuitpython-build-tools 7 | packaging==22.0 8 | pylint==3.2.6 9 | pytest 10 | pyyaml>=5.4.1 11 | requests==2.32.0 12 | sh==1.12.14 13 | requests-cache==0.5.2 14 | parse==1.19.0 15 | GitPython==3.1.41 16 | PyGithub==1.57 17 | typing-extensions~=4.0 18 | google-auth~=2.13 19 | google-cloud-bigquery~=3.3 20 | toml 21 | -------------------------------------------------------------------------------- /template-env.sh: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | # Go here to generate a github access token: 6 | # https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/ 7 | # KEEP THIS TOKEN SECRET AND SAFE! Anyone with access to the token has FULL CONTROL of your GitHub account! 8 | export ADABOT_GITHUB_ACCESS_TOKEN= 9 | 10 | # This is the username associated with the access token. 11 | export ADABOT_GITHUB_USER= 12 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney, for Adafruit Industries 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Configuration file for pytest (along with `pytest.ini`)""" 6 | 7 | 8 | def pytest_addoption(parser): 9 | """Add options to the `pytest` command""" 10 | parser.addoption( 11 | "--use-tokens", 12 | action="store_true", 13 | default=False, 14 | help="Test commands that use environment tokens", 15 | ) 16 | -------------------------------------------------------------------------------- /tests/integration/test_arduino_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Integration tests for 'adabot/arduino_libraries.py'""" 6 | 7 | import pytest # pylint: disable=unused-import 8 | 9 | from adabot import arduino_libraries 10 | from adabot import github_requests 11 | 12 | 13 | def mock_list_repos(): 14 | """Function to monkeypatch `arduino_libraries.list_repos()` for a shorter set of repos.""" 15 | 16 | return [github_requests.get("/repos/adafruit/Adafruit_NeoPixel").json()] 17 | 18 | 19 | def test_adafruit_libraries(monkeypatch): 20 | """Test main arduino_libraries function, without writing an output file.""" 21 | 22 | monkeypatch.setattr(arduino_libraries, "list_repos", mock_list_repos) 23 | 24 | arduino_libraries.main() 25 | 26 | 27 | # pylint: disable=invalid-name 28 | def test_adafruit_libraries_output_file(monkeypatch, tmp_path, capsys): 29 | """Test main arduino_libraries funciton, with writing an output file.""" 30 | 31 | monkeypatch.setattr(arduino_libraries, "list_repos", mock_list_repos) 32 | 33 | tmp_output_file = tmp_path / "output_test.txt" 34 | 35 | arduino_libraries.main(output_file=tmp_output_file) 36 | 37 | captured = capsys.readouterr() 38 | 39 | assert tmp_output_file.read_text() == captured.out 40 | -------------------------------------------------------------------------------- /tests/integration/test_circuitpython_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Integration tests for 'adabot/circuitpython_libraries.py'""" 6 | 7 | import pytest # pylint: disable=unused-import 8 | 9 | from adabot.lib import common_funcs 10 | from adabot import github_requests 11 | from adabot import circuitpython_libraries 12 | 13 | from adabot.lib import circuitpython_library_validators 14 | 15 | 16 | # pylint: disable=unused-argument 17 | def mock_list_repos(*args, **kwargs): 18 | """Function to monkeypatch `common_funcs.list_repos()` for a shorter set of repos.""" 19 | return [ 20 | github_requests.get("/repos/adafruit/Adafruit_CircuitPython_TestRepo").json() 21 | ] 22 | 23 | 24 | def test_circuitpython_libraries(monkeypatch, pytestconfig): 25 | """Test main function of 'circuitpyton_libraries.py', without writing an output file.""" 26 | 27 | monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos) 28 | 29 | # Delete specific tests that require repository secrets 30 | # They can't be tested via, so let's remove them and test the others 31 | if not pytestconfig.getoption("--use-tokens"): 32 | vals = [ 33 | validator[0] 34 | for validator in circuitpython_libraries.default_validators 35 | if validator[0] 36 | not in circuitpython_library_validators.LibraryValidator.get_token_methods() 37 | ] 38 | vals_str = ",".join(vals) 39 | else: 40 | vals_str = "all" 41 | 42 | circuitpython_libraries.main(validator=vals_str) 43 | 44 | 45 | # pylint: disable=invalid-name 46 | def test_circuitpython_libraries_output_file( 47 | monkeypatch, pytestconfig, tmp_path, capsys 48 | ): 49 | """Test main funciton of 'circuitpython_libraries.py', with writing an output file.""" 50 | 51 | monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos) 52 | 53 | # Delete specific tests that require repository secrets 54 | # They can't be tested via, so let's remove them and test the others 55 | if not pytestconfig.getoption("--use-tokens"): 56 | vals = [ 57 | validator[0] 58 | for validator in circuitpython_libraries.default_validators 59 | if validator[0] 60 | not in circuitpython_library_validators.LibraryValidator.get_token_methods() 61 | ] 62 | vals_str = ",".join(vals) 63 | else: 64 | vals_str = "all" 65 | 66 | tmp_output_file = tmp_path / "output_test.txt" 67 | 68 | circuitpython_libraries.main(validator=vals_str, output_file=tmp_output_file) 69 | 70 | captured = capsys.readouterr() 71 | 72 | assert tmp_output_file.read_text() == captured.out 73 | -------------------------------------------------------------------------------- /tests/integration/test_update_cp_org_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Integration tests for 'adabot/update_cp_org_libraries.py'""" 6 | 7 | import json 8 | 9 | import pytest # pylint: disable=unused-import 10 | 11 | from adabot.lib import common_funcs 12 | from adabot import github_requests 13 | from adabot import update_cp_org_libraries 14 | 15 | 16 | # pylint: disable=unused-argument 17 | def mock_list_repos(*args, **kwargs): 18 | """Function to monkeypatch `common_funcs.list_repos()` for a shorter set of repos.""" 19 | repos = [] 20 | result = github_requests.get( 21 | "/search/repositories", 22 | params={ 23 | "q": "Adafruit_CircuitPython user:adafruit archived:false fork:true", 24 | "per_page": 100, 25 | "sort": "updated", 26 | "order": "asc", 27 | }, 28 | ) 29 | 30 | if result.ok: 31 | repos.extend( 32 | repo 33 | for repo in result.json()["items"] 34 | if ( 35 | repo["owner"]["login"] == "adafruit" 36 | and ( 37 | repo["name"].startswith("Adafruit_CircuitPython") 38 | or repo["name"] == "circuitpython" 39 | ) 40 | ) 41 | ) 42 | 43 | repo_names = [repo["name"] for repo in repos] 44 | 45 | if kwargs.get("include_repos", False): 46 | for repo in kwargs["include_repos"]: 47 | if repo not in repo_names: 48 | add_repo = github_requests.get("/repos/adafruit/" + repo) 49 | if add_repo.ok: 50 | repos.append(add_repo.json()) 51 | else: 52 | print("list_repos(): Failed to retrieve '{}'".format(repo)) 53 | 54 | if len(repos) > 5: 55 | repos = repos[:5] 56 | 57 | return repos 58 | 59 | 60 | # pylint: disable=unused-argument 61 | def mock_get_contribs(*args): 62 | """Function to monkeypatch `update_cp_org_libraries.get_contributors()` to ensure 63 | proper testing of usage. Monkeypatched `list_repos` will likely not produce results. 64 | """ 65 | contribs = ["test_user1", "test_user2"] 66 | reviewers = ["test_reviewer1", "test_reviewer2"] 67 | merged_pr_count = 4 68 | 69 | return contribs, reviewers, merged_pr_count 70 | 71 | 72 | def test_update_cp_org_libraries(monkeypatch): 73 | """Test main function of 'circuitpyton_libraries.py', without writing an output file.""" 74 | 75 | monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos) 76 | monkeypatch.setattr(update_cp_org_libraries, "get_contributors", mock_get_contribs) 77 | 78 | update_cp_org_libraries.main(loglevel="INFO") 79 | 80 | 81 | # pylint: disable=invalid-name 82 | def test_update_cp_org_libraries_output_file(monkeypatch, tmp_path, capsys): 83 | """Test main funciton of 'update_cp_org_libraries.py', with writing an output file.""" 84 | 85 | monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos) 86 | monkeypatch.setattr(update_cp_org_libraries, "get_contributors", mock_get_contribs) 87 | 88 | tmp_output_file = tmp_path / "output_test.txt" 89 | 90 | update_cp_org_libraries.main(loglevel="INFO", output_file=tmp_output_file) 91 | 92 | output = tmp_output_file.read_text() 93 | 94 | assert json.loads(output) 95 | -------------------------------------------------------------------------------- /tests/unit/test_blinka_funcs.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Unit tests for 'adabot/lib/blinka_funcs.py'""" 6 | 7 | import pytest # pylint: disable=unused-import 8 | 9 | from adabot.lib import blinka_funcs 10 | 11 | 12 | def test_board_count(): 13 | """Test that 'board_count' returns a number.""" 14 | assert blinka_funcs.board_count() >= 0 15 | -------------------------------------------------------------------------------- /tests/unit/test_common_funcs.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2021 Michael Schroeder 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """Unit tests for 'adabot/lib/common_funcs.py'""" 6 | 7 | import datetime 8 | import re 9 | 10 | import pytest # pylint: disable=unused-import 11 | import requests 12 | 13 | from adabot.lib import common_funcs 14 | from adabot import github_requests 15 | 16 | 17 | def test_list_repos(): 18 | """Test that list_repos returns a list object.""" 19 | repos = common_funcs.list_repos() 20 | 21 | assert isinstance(repos, list) 22 | 23 | 24 | def test_repo_is_on_pypi_true(): 25 | """Test 'repo_is_on_pypi'""" 26 | assert common_funcs.repo_is_on_pypi({"name": "pytest"}) 27 | 28 | 29 | sani_urls = [ 30 | {"id": "sanitize http://", "url": "http://www.website.com/"}, 31 | {"id": "sanitize https://", "url": "https://www.website.com"}, 32 | {"id": "sanitize git://", "url": "git://www.website.com"}, 33 | {"id": "sanitize ://*.git", "url": "http://www.website.com/page.git"}, 34 | ] 35 | 36 | 37 | @pytest.mark.parametrize("urls", sani_urls, ids=[url["id"] for url in sani_urls]) 38 | def test_sanitize_url(urls): 39 | """Test 'sanitize_urls'""" 40 | assert re.match( 41 | r"^(?!http|https|git)(?:\:\/\/){0,1}.+(? str: 37 | """Uses ``PyGithub`` to check the CI status of a repository 38 | 39 | :param Repository lib_repo: The repo as a github.Repository.Repository object 40 | :param str|None user: The user that triggered the run; if `None` is 41 | provided, any user is acceptable 42 | :param str|None branch: The branch name to specifically check; if `None` is 43 | provided, all branches are allowed; this is the default 44 | :param str|None workflow_filename: The filename of the workflow; if `None` is 45 | provided, any workflow name is acceptable; the default is ``"build.yml"`` 46 | :return: The requested runs conclusion 47 | :rtype: str 48 | """ 49 | 50 | arg_dict = {} 51 | if user is not None: 52 | arg_dict["actor"] = user 53 | if branch is not None: 54 | arg_dict["branch"] = branch 55 | 56 | workflow: Workflow = lib_repo.get_workflow(workflow_filename) 57 | workflow_runs = workflow.get_runs(**arg_dict) 58 | return workflow_runs[0].conclusion 59 | 60 | 61 | def run_gh_rest_rerun( 62 | lib_repo: Repository, 63 | user: Optional[str] = None, 64 | branch: Optional[str] = None, 65 | workflow_filename: Optional[str] = "build.yml", 66 | rerun_level: int = 0, 67 | ) -> bool: 68 | """Uses ``PyGithub`` to rerun the CI status of a repository 69 | 70 | :param Repository lib_repo: The repo as a github.Repository.Repository object 71 | :param str|None user: The user that triggered the run; if `None` is 72 | provided, any user is acceptable 73 | :param str|None branch: The branch name to specifically check; if `None` is 74 | provided, all branches are allowed; this is the default 75 | :param str|None workflow_filename: The filename of the workflow; if `None` is 76 | provided, any workflow name is acceptable; the default is ``"build.yml"`` 77 | :param int rerun_level: The level at which rerun should occur (0 = none, 78 | 1 = failed, 2 = all) 79 | :return: The requested runs conclusion 80 | :rtype: bool 81 | """ 82 | if not rerun_level: 83 | return False 84 | result = None 85 | if rerun_level == 1: 86 | result = ( 87 | run_gh_rest_check(lib_repo, user, branch, workflow_filename) == "success" 88 | ) 89 | if rerun_level == 2 or not result: 90 | arg_dict = {} 91 | if user is not None: 92 | arg_dict["actor"] = user 93 | if branch is not None: 94 | arg_dict["branch"] = branch 95 | workflow: Workflow = lib_repo.get_workflow(workflow_filename) 96 | latest_run: WorkflowRun = workflow.get_runs(**arg_dict)[0] 97 | latest_run.rerun() 98 | return True 99 | return False 100 | 101 | 102 | def check_build_status( 103 | lib_repo: Repository, 104 | user: Optional[str] = None, 105 | branch: Optional[str] = None, 106 | workflow_filename: Optional[str] = "build.yml", 107 | debug: bool = False, 108 | ) -> Optional[str]: 109 | """Uses ``PyGithub`` to check the build statuses of the Adafruit 110 | CircuitPython Bundle 111 | 112 | :param Repository lib_repo: The repo as a github.Repository.Repository object 113 | :param str|None user: The user that triggered the run; if `None` is 114 | provided, any user is acceptable 115 | :param str|None branch: The branch name to specifically check; if `None` is 116 | provided, all branches are allowed; this is the default 117 | :param str|None workflow_filename: The filename of the workflow; if `None` 118 | is provided, any workflow name is acceptable; the defail is `"build.yml"` 119 | :param bool debug: Whether debug statements should be printed to the standard 120 | output 121 | :return: The result of the workflow run, or ``None`` if it could not be 122 | determined 123 | :rtype: str|None 124 | """ 125 | 126 | if debug: 127 | print("Checking", lib_repo.name) 128 | 129 | if lib_repo.archived: 130 | return True 131 | 132 | try: 133 | result = ( 134 | run_gh_rest_check(lib_repo, user, branch, workflow_filename) == "success" 135 | ) 136 | if debug and not result: 137 | print("***", "Library", lib_repo.name, "failed the patch!", "***") 138 | return result 139 | except GithubException: 140 | if debug: 141 | print( 142 | "???", 143 | "Library", 144 | lib_repo.name, 145 | "workflow could not be determined", 146 | "???", 147 | ) 148 | return None 149 | 150 | 151 | # pylint: disable=too-many-arguments 152 | def rerun_workflow( 153 | lib_repo: Repository, 154 | user: Optional[str] = None, 155 | branch: Optional[str] = None, 156 | workflow_filename: Optional[str] = "build.yml", 157 | rerun_level: int = 0, 158 | debug: bool = False, 159 | ): 160 | """Uses ``PyGithub`` to rerun the CI of the Adafruit 161 | CircuitPython Bundle repositories 162 | 163 | :param Repository lib_repo: The repo as a github.Repository.Repository object 164 | :param str|None user: The user that triggered the run; if `None` is 165 | provided, any user is acceptable 166 | :param str|None branch: The branch name to specifically check; if `None` is 167 | provided, all branches are allowed; this is the default 168 | :param str|None workflow_filename: The filename of the workflow; if `None` 169 | is provided, any workflow name is acceptable; the defail is `"build.yml"` 170 | :param int rerun_level: The level at which rerun should occur (0 = none, 171 | 1 = failed, 2 = all) 172 | :param bool debug: Whether debug statements should be printed to the standard 173 | output 174 | :return: The result of the workflow run, or ``None`` if it could not be 175 | determined 176 | :rtype: bool|None 177 | """ 178 | if lib_repo.archived: 179 | return False 180 | 181 | try: 182 | result = run_gh_rest_rerun( 183 | lib_repo, user, branch, workflow_filename, rerun_level 184 | ) 185 | if debug and result: 186 | print("***", "Library", lib_repo.name, "workflow was rerun!", "***") 187 | return result 188 | except GithubException: 189 | if debug: 190 | print( 191 | "???", 192 | "Library", 193 | lib_repo.name, 194 | "had an issue occur", 195 | "???", 196 | ) 197 | return None 198 | 199 | 200 | def check_build_statuses( 201 | gh_token: str, 202 | user: Optional[str] = None, 203 | branch: Optional[str] = "main", 204 | workflow_filename: Optional[str] = "build.yml", 205 | *, 206 | debug: bool = False, 207 | local_folder: str = "", 208 | ) -> list[RemoteLibFunc_IterResult[bool]]: 209 | """Checks all the libraries in the Adafruit CircuitPython Bundle to get the 210 | latest build status with the requested information 211 | 212 | :param str gh_token: The Github token to be used for with the Github API 213 | :param str|None user: The user that triggered the run; if `None` is 214 | provided, any user is acceptable 215 | :param str|None branch: The branch name to specifically check; if `None` is 216 | provided, all branches are allowed; this is the default 217 | :param str|None workflow_filename: The filename of the workflow; if `None` is 218 | provided, any workflow name is acceptable; the defail is `"build.yml"` 219 | :param bool debug: Whether debug statements should be printed to 220 | the standard output 221 | :param str local_folder: A path to a local folder containing extra repositories 222 | :return: A list of tuples containing paired Repoistory objects and build 223 | statuses 224 | :rtype: list 225 | """ 226 | 227 | return iter_remote_bundle_with_func( 228 | gh_token, 229 | [(check_build_status, (user, branch, workflow_filename), {"debug": debug})], 230 | local_folder=local_folder, 231 | ) 232 | 233 | 234 | def rerun_workflows( 235 | gh_token: str, 236 | user: Optional[str] = None, 237 | branch: Optional[str] = "main", 238 | workflow_filename: Optional[str] = "build.yml", 239 | rerun_level: int = 0, 240 | *, 241 | debug: bool = False, 242 | local_folder: str = "", 243 | ) -> list[RemoteLibFunc_IterResult[bool]]: 244 | """Reruns the CI of all the libraries in the Adafruit CircuitPython Bundle. 245 | 246 | :param str gh_token: The Github token to be used for with the Github API 247 | :param str|None user: The user that triggered the run; if `None` is 248 | provided, any user is acceptable 249 | :param str|None branch: The branch name to specifically check; if `None` is 250 | provided, all branches are allowed; this is the default 251 | :param str|None workflow_filename: The filename of the workflow; if `None` is 252 | provided, any workflow name is acceptable; the defail is `"build.yml"` 253 | :param int rerun_level: The level at which reruns should occur (0 = none, 254 | 1 = failed, 2 = all) 255 | :param bool debug: Whether debug statements should be printed to 256 | the standard output 257 | :param str local_folder: A path to a local folder containing extra repositories 258 | :return: A list of tuples containing paired Repoistory objects and build 259 | statuses 260 | :rtype: list 261 | """ 262 | 263 | return iter_remote_bundle_with_func( 264 | gh_token, 265 | [ 266 | ( 267 | rerun_workflow, 268 | (user, branch, workflow_filename, rerun_level), 269 | {"debug": debug}, 270 | ) 271 | ], 272 | local_folder=local_folder, 273 | ) 274 | 275 | 276 | def save_build_statuses( 277 | build_results: list[RemoteLibFunc_IterResult[bool]], 278 | failures_filepath: StrPath = "failures.txt", 279 | ) -> None: 280 | """Save the list of failed and/or errored libraries to files 281 | 282 | :param list failed_builds: The list of workflow run results after 283 | iterating through the libraries 284 | :param StrPath failures_filepath: The filename/filepath to write the list 285 | of failed libraries to; the default is "failures.txt" 286 | """ 287 | 288 | # Get failed builds 289 | bad_builds = [result[0].name for result in build_results if result[1][0]] 290 | 291 | # Save the list of bad builds, if provided 292 | if bad_builds: 293 | with open(failures_filepath, mode="w", encoding="utf-8") as outputfile: 294 | for build in bad_builds: 295 | outputfile.write(build + "\n") 296 | 297 | 298 | if __name__ == "__main__": 299 | parser = argparse.ArgumentParser( 300 | description="Check the CI status of the Bundle libraries" 301 | ) 302 | parser.add_argument( 303 | "gh_token", metavar="GH_TOKEN", type=str, help="GitHub token with proper scopes" 304 | ) 305 | parser.add_argument( 306 | "--user", 307 | metavar="U", 308 | type=str, 309 | dest="user", 310 | default=None, 311 | help="Select a specific user that triggered the workflow", 312 | ) 313 | parser.add_argument( 314 | "--branch", 315 | metavar="B", 316 | type=str, 317 | dest="branch", 318 | default=None, 319 | help='Branch name; default is "main"', 320 | ) 321 | parser.add_argument( 322 | "--workflow", 323 | metavar="W", 324 | type=str, 325 | dest="workflow", 326 | default="build.yml", 327 | help='Workflow name; default is "build.yml"', 328 | ) 329 | parser.add_argument( 330 | "--debug", action="store_true", help="Print debug text during execution" 331 | ) 332 | parser.add_argument( 333 | "--rerun-level", 334 | metavar="R", 335 | type=int, 336 | dest="rerun_level", 337 | default=0, 338 | help="Level to rerun CI workflows (0 = none, 1 = failed, 2 = all)", 339 | ) 340 | parser.add_argument( 341 | "--local-folder", 342 | metavar="L", 343 | type=str, 344 | dest="local_folder", 345 | default="", 346 | help="An additional folder to check and run", 347 | ) 348 | 349 | args = parser.parse_args() 350 | 351 | if args.rerun_level: 352 | if args.debug: 353 | print("Rerunning workflows...") 354 | rerun_workflows( 355 | args.gh_token, 356 | args.user, 357 | args.branch, 358 | args.workflow, 359 | args.rerun_level, 360 | debug=args.debug, 361 | local_folder=args.local_folder, 362 | ) 363 | if args.debug: 364 | print("Waiting 10 minutes to allow workflows to finish running...") 365 | time.sleep(600) 366 | 367 | if args.debug: 368 | print("Checking workflows statuses...") 369 | results = check_build_statuses( 370 | args.gh_token, 371 | args.user, 372 | args.branch, 373 | args.workflow, 374 | debug=args.debug, 375 | local_folder=args.local_folder, 376 | ) 377 | 378 | fail_list = [ 379 | repo_name.name for repo_name, repo_results in results if not repo_results[0] 380 | ] 381 | 382 | if fail_list: 383 | print(f'Failures for CI workflow "{args.workflow}":') 384 | for failure in fail_list: 385 | print(failure) 386 | RETURN_CODE = 1 387 | else: 388 | print(f"No failures for CI workflow: {args.workflow}!") 389 | RETURN_CODE = 0 390 | 391 | raise SystemExit(RETURN_CODE) 392 | -------------------------------------------------------------------------------- /tools/docs_status.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | 7 | docs_status.py 8 | ============== 9 | 10 | Functionality for checking the ReadTheDocs build status for libraries 11 | in the Adafruit CircuitPython Bundle 12 | 13 | * Author(s): Alec Delaney 14 | 15 | """ 16 | 17 | from typing import Any, Optional 18 | import argparse 19 | import time 20 | import parse 21 | import requests 22 | from github.Repository import Repository 23 | from github.ContentFile import ContentFile 24 | 25 | from iterate_libraries import ( 26 | iter_remote_bundle_with_func, 27 | RemoteLibFunc_IterResult, 28 | ) 29 | from adabot import REQUESTS_TIMEOUT 30 | 31 | 32 | def check_docs_status( 33 | lib_repo: Repository, rtd_token: str, *, debug: bool = True 34 | ) -> Optional[bool]: 35 | """Checks a library for the latest documentation build status with the 36 | requested information 37 | 38 | .. note:: 39 | 40 | The ReadTheDocs token must have sufficient privileges for accessing 41 | the API; therefore, only a maintainer can use this functionality. 42 | 43 | :param str gh_token: The Github token to be used for with the Github 44 | API 45 | :param str rtd_token: A ReadTheDocs API token with sufficient privileges 46 | :param bool debug: Whether to use debug print statements 47 | :return: Whether the documentation built successfully; returns None if it 48 | could not be determined 49 | :rtype: bool|None 50 | """ 51 | 52 | if debug: 53 | print("Checking", lib_repo.name) 54 | 55 | # Get the README file contents 56 | content_file: ContentFile = lib_repo.get_contents("README.rst") 57 | readme_text = content_file.decoded_content.decode("utf-8") 58 | 59 | # Parse for the ReadTheDocs slug 60 | search_results: parse.Result = parse.search( 61 | "https://readthedocs.org/projects/{slug:S}/badge", readme_text 62 | ) 63 | rtd_slug: str = search_results.named["slug"] 64 | rtd_slug = rtd_slug.replace("_", "-", -1) 65 | 66 | # GET the latest documentation build runs 67 | url = f"https://readthedocs.org/api/v3/projects/{rtd_slug}/builds/" 68 | headers = {"Authorization": f"token {rtd_token}"} 69 | response = requests.get(url, headers=headers, timeout=REQUESTS_TIMEOUT) 70 | json_response: dict[str, Any] = response.json() 71 | 72 | # Return the results of the latest run 73 | doc_build_results: Optional[list[dict[str, Any]]] = json_response.get( 74 | "results", None 75 | ) 76 | if doc_build_results is None: 77 | return None 78 | result = doc_build_results[0].get("success") 79 | if debug and not result: 80 | print(f"RTD build failed or unavailable for {lib_repo.name}") 81 | time.sleep(3) 82 | return result 83 | 84 | 85 | def check_docs_statuses( 86 | gh_token: str, rtd_token: str 87 | ) -> list[RemoteLibFunc_IterResult[Optional[bool]]]: 88 | """Checks all the libraries in a cloned Adafruit CircuitPython Bundle 89 | to get the latest documentation build status with the requested 90 | information 91 | 92 | .. note:: 93 | 94 | The ReadTheDocs token must have sufficient privileges for accessing 95 | the API; therefore, only a maintainer can use this functionality. 96 | 97 | :param str gh_token: The Github token to be used for with the Github 98 | API 99 | :param str rtd_token: A ReadTheDocs API token with sufficient privileges 100 | :return: A list of tuples containing paired Repository objects and 101 | documentation build statuses 102 | :rtype: list 103 | """ 104 | 105 | return iter_remote_bundle_with_func( 106 | gh_token, [(check_docs_status, (rtd_token,), {"debug": True})] 107 | ) 108 | 109 | 110 | if __name__ == "__main__": 111 | parser = argparse.ArgumentParser( 112 | description="Check the RTD docs build status of the Bundle libraries" 113 | ) 114 | parser.add_argument( 115 | "gh_token", metavar="GH_TOKEN", type=str, help="GitHub token with proper scopes" 116 | ) 117 | parser.add_argument( 118 | "rtd_token", metavar="RTD_TOKEN", type=str, help="ReadTheDocs token" 119 | ) 120 | 121 | args = parser.parse_args() 122 | 123 | results = check_docs_statuses(args.gh_token, args.rtd_token) 124 | fail_list = [ 125 | repo_name.name 126 | for repo_name, repo_results in results 127 | if not repo_results[0] # pylint: disable=singleton-comparison 128 | ] 129 | 130 | if fail_list: 131 | print("Failures for RTD builds:") 132 | for failure in fail_list: 133 | print(failure) 134 | RETURN_CODE = 1 135 | else: 136 | print("No failures for RTD builds!") 137 | RETURN_CODE = 0 138 | 139 | raise SystemExit(RETURN_CODE) 140 | -------------------------------------------------------------------------------- /tools/file_compare.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Eva Herrada 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | 7 | file-compare.py 8 | =============== 9 | 10 | Functionality to compare a file across all Adafruit CircuitPython repos 11 | and output the text of the files along with which and how many repos use that 12 | exact file text. 13 | 14 | * Author(s): Eva Herrada 15 | 16 | """ 17 | import argparse 18 | from typing import Optional 19 | 20 | import requests 21 | from requests.structures import CaseInsensitiveDict 22 | 23 | from adabot import REQUESTS_TIMEOUT 24 | from adabot.lib.common_funcs import list_repos 25 | 26 | 27 | def compare(git_file: str, token: Optional[str] = None) -> list: 28 | """Uses requests to compare files across the adafruit org 29 | 30 | .. note:: 31 | 32 | The GitHub API token is not necessary as long as all repos 33 | being accessed are public. However: it does make things easier 34 | as you won't get rate-limited quite as often 35 | 36 | :param str git_file: The file to compare 37 | :param str|None token: The (optional but recommended) github API token 38 | :return: A list containing all the unique file texts, sorted from most to 39 | least common along with the repos that have that exact file text. 40 | :rtype: list 41 | """ 42 | 43 | files = {} 44 | 45 | all_repos = list_repos() 46 | print("Got Repos List") 47 | print(f"Repos found: {len(all_repos)}") 48 | 49 | for repo in all_repos: 50 | name = repo["name"] 51 | url = f"https://raw.githubusercontent.com/adafruit/{name}/main/{git_file}" 52 | 53 | if token: 54 | # If repo is private - we need to add a token in header: 55 | headers = CaseInsensitiveDict() 56 | headers["Authorization"] = f"token {token}" 57 | 58 | resp = requests.get(url, headers=headers, timeout=REQUESTS_TIMEOUT) 59 | else: 60 | resp = requests.get(url, timeout=REQUESTS_TIMEOUT) 61 | 62 | if resp.status_code != 200: 63 | print(name) 64 | print(resp.status_code) 65 | if resp.text not in files: 66 | files[resp.text] = [1, [repo["html_url"]]] 67 | else: 68 | files[resp.text][0] = files[resp.text][0] + 1 69 | files[resp.text][1].append(repo["html_url"]) 70 | 71 | top = 0 72 | sort = [] 73 | for text, repos in files.items(): 74 | if repos[0] >= top: 75 | sort.insert(0, [repos[0], text, repos[1]]) 76 | top = repos[0] 77 | else: 78 | for i, val in enumerate(sort): 79 | if val[0] <= repos[0]: 80 | sort.insert(i, [repos[0], text, repos[1]]) 81 | break 82 | 83 | return sort 84 | 85 | 86 | if __name__ == "__main__": 87 | parser = argparse.ArgumentParser( 88 | description="Compare files across the adafruit CircuitPython repos", 89 | ) 90 | parser.add_argument( 91 | "gh_token", 92 | metavar="GH_TOKEN", 93 | type=str, 94 | help="GitHub token with proper scopes", 95 | ) 96 | 97 | parser.add_argument( 98 | "--file", 99 | metavar="", 100 | type=str, 101 | dest="file", 102 | required=True, 103 | help="File to compare", 104 | ) 105 | 106 | parser.add_argument( 107 | "-o", 108 | metavar="", 109 | type=str, 110 | dest="outfile", 111 | default=None, 112 | help="File to send output to", 113 | ) 114 | 115 | args = parser.parse_args() 116 | 117 | results = compare(args.file, args.gh_token) 118 | 119 | for index, value in enumerate(results): 120 | print(f"##### {index+1}/{len(results)} #####") 121 | print(value[0]) 122 | print("START OF FILE") 123 | print(value[1]) 124 | print("END OF FILE") 125 | print(value[2]) 126 | print() 127 | if args.outfile: 128 | with open(args.outfile, "w") as F: 129 | for index, value in enumerate(results): 130 | F.write(f"##### {index+1}/{len(results)} #####\n") 131 | F.write(f"{value[0]}\n") 132 | F.write("START OF FILE\n") 133 | F.write(f"{value[1]}\n") 134 | F.write("END OF FILE\n") 135 | for r in value[2]: 136 | F.write(r + "\n") 137 | F.write("\n") 138 | -------------------------------------------------------------------------------- /tools/find_text.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Eva Herrada 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | Tool for searching for text across all circuitpython libraries. 7 | Intended to be used to verify patches. 8 | 9 | IMPORTANT: Must be run from the top-level adabot directory (one directory up 10 | from this one) 11 | 12 | Type `python3 find_text.py -h` to figure out how to use. 13 | """ 14 | 15 | import datetime 16 | import getopt 17 | import json 18 | import sys 19 | 20 | import requests 21 | 22 | from adabot import REQUESTS_TIMEOUT 23 | from adabot.lib.common_funcs import list_repos 24 | 25 | argumentList = sys.argv[1:] 26 | 27 | OPTIONS = "ht:f:o:j" 28 | 29 | long_options = ["help", "text=", "file=", "outfile=", "json"] 30 | 31 | HELPMSG = """Usage: 32 | python3 find_text.py [-h | -t text | -f file | -o outfile | -j] 33 | Arguments: 34 | -h --help - Displays this message 35 | -t --text - (required) Text to check, can be used multiple times 36 | -f --file - (required) File to check for text for on github 37 | -o --outfile - (optional) Output file, prints output to stdout if variable 38 | is not set 39 | -j --json - Outputs in json instead of plain text""" 40 | 41 | text = [] 42 | FILE = None 43 | OUTFILE = None 44 | j = False 45 | 46 | URL_TEMPLATE = "https://raw.githubusercontent.com/adafruit/{}/main/{}" 47 | RELEASE_TEMPLATE = "https://api.github.com/repos/adafruit/{}/releases/latest" 48 | 49 | try: 50 | arguments, values = getopt.getopt(argumentList, OPTIONS, long_options) 51 | 52 | for currentArgument, currentValue in arguments: 53 | if currentArgument in ("-h", "--help"): 54 | print(HELPMSG) 55 | sys.exit() 56 | 57 | if currentArgument in ("-t", "--text"): 58 | print(f"Text: {currentValue}") 59 | text.append(currentValue) 60 | print(text) 61 | 62 | if currentArgument in ("-f", "--file"): 63 | print(f"File: {currentValue}") 64 | FILE = currentValue 65 | 66 | if currentArgument in ("-o", "--outfile"): 67 | OUTFILE = currentValue 68 | 69 | if currentArgument in ("-j", "--json"): 70 | j = True 71 | 72 | except getopt.error as err: 73 | print(str(err)) 74 | 75 | 76 | if len(text) == 0 or FILE is None: 77 | if len(text) == 0: 78 | print("Please enter text to check for") 79 | if FILE is None: 80 | print("Please enter a file to search for the text in") 81 | print(HELPMSG) 82 | sys.exit() 83 | 84 | RESULTS = { 85 | "file_not_found": [], 86 | "file_has_none": [], 87 | "file_has_all": [], 88 | } 89 | for i in range(len(text)): 90 | RESULTS[f"file_has_text_{i}"] = [] 91 | 92 | 93 | def delete_multiple_lines(n=1): 94 | """Delete the last line in the STDOUT.""" 95 | for _ in range(n): 96 | sys.stdout.write("\x1b[1A") # cursor up one line 97 | sys.stdout.write("\x1b[2K") # delete the last line 98 | 99 | 100 | def prettyprint(info, results): 101 | """Prints info about current repo and result of search""" 102 | print("┌" + "─" * (len(info) + 4) + "┐") 103 | print("│ ", info, " │") 104 | for res in results: 105 | print("│ ", res, " " * (len(info) - (len(res) - 9)), "│") 106 | print("└" + "─" * (len(info) + 4) + "┘") 107 | delete_multiple_lines(3 + len(results)) 108 | 109 | 110 | try: 111 | with open("repos.json", "r") as f: 112 | LAST_RUN = f.readline().rstrip() 113 | except FileNotFoundError: 114 | LAST_RUN = "" 115 | 116 | print(f"Last run: {LAST_RUN}") 117 | if LAST_RUN != str(datetime.date.today()): 118 | with open("repos.json", "w") as f: 119 | print("Fetching Repos List") 120 | all_repos = list_repos() 121 | print("Got Repos List") 122 | f.write(str(datetime.date.today()) + "\n") 123 | f.write(json.dumps(all_repos)) 124 | 125 | with open("repos.json", "r") as f: 126 | all_repos = json.loads(f.read().split("\n")[1]) 127 | 128 | print(f"Repos found: {len(all_repos)}") 129 | 130 | 131 | for repo in all_repos: 132 | INFO = "getting {} for: {}".format(FILE, repo["name"]) 133 | response = requests.get( 134 | URL_TEMPLATE.format(repo["name"], FILE), timeout=REQUESTS_TIMEOUT 135 | ) 136 | result = [] 137 | if response.status_code == 404: 138 | RESULTS["file_not_found"].append(repo["html_url"]) 139 | result.append("\033[91mFile not found\033[0m") 140 | else: 141 | tracker = [False for i in range(len(text))] 142 | for index, item in enumerate(text): 143 | if item in response.text: 144 | tracker[index] = True 145 | 146 | if all(tracker): 147 | result = ["\033[92mFound all text\033[0m"] 148 | RESULTS["file_has_all"].append(repo["html_url"]) 149 | elif not any(tracker): 150 | result = ["\033[91mDid not find any text\033[0m"] 151 | RESULTS["file_has_none"].append(repo["html_url"]) 152 | for index, item in enumerate(tracker): 153 | if item: 154 | result.append(f"\033[93mFound text {index}\033[0m") 155 | RESULTS[f"file_has_text_{index}"].append(repo["html_url"]) 156 | else: 157 | result.append(f"\033[93mDid not find text {index}\033[0m") 158 | 159 | prettyprint(INFO, result) 160 | 161 | if j: 162 | if OUTFILE is not None: 163 | with open(OUTFILE, "w") as F: 164 | F.write(json.dumps(RESULTS)) 165 | else: 166 | print(json.dumps(RESULTS)) 167 | else: 168 | if OUTFILE is not None: 169 | with open(OUTFILE, "w") as F: 170 | for k, v in RESULTS.items(): 171 | F.write(k + "\n") 172 | for i in v: 173 | F.write(i + "\n") 174 | F.write("\n") 175 | else: 176 | for k, v in RESULTS.items(): 177 | print(k) 178 | for i in v: 179 | print(i) 180 | 181 | 182 | print("┌" + "─" * 30 + "┐") 183 | for k, v in RESULTS.items(): 184 | print("│ ", k, len(v), " " * (24 - (len(k) + len(str(len(v))))), " │") 185 | print("└" + "─" * 30 + "┘") 186 | -------------------------------------------------------------------------------- /tools/git_functionality.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | 7 | git_functionality.py 8 | ==================== 9 | 10 | Functionality for using GitPython to fetch, pull, commit, push, etc. to 11 | cloned repos and their remotes 12 | 13 | * Author(s): Alec Delaney 14 | 15 | """ 16 | 17 | import functools 18 | from typing import Any 19 | import git 20 | import git.repo 21 | import git.index.base 22 | from library_functions import StrPath 23 | 24 | 25 | def _get_repo_and_remote( 26 | lib_path: StrPath, remote_name: str = "origin" 27 | ) -> tuple[git.repo.Repo, git.Remote]: 28 | """ 29 | Get the repo and remote objects 30 | 31 | :param StrPath lib_path: The path to the repo 32 | :param str remote_name: (optional) The name of the remote, 33 | default is ``origin`` 34 | :return: The repo and remote objects 35 | :rtype: tuple 36 | """ 37 | 38 | # Create the repo and remote objects 39 | repo = git.repo.Repo(lib_path) 40 | remote = repo.remote(remote_name) 41 | 42 | return repo, remote 43 | 44 | 45 | def sync_and_checkout( 46 | lib_path: StrPath, remote_name: str = "origin", branch_name: str = "main" 47 | ) -> None: 48 | """ 49 | Update the repo, and ensure it is on the given branch using a 50 | forced checkout 51 | 52 | :param StrPath lib_path: The path to the repo 53 | :param str remote_name: The remote name to fetch and pull, 54 | default is ``origin`` 55 | :param str branch_name: The branch name to checkout, default 56 | is ``main`` 57 | """ 58 | 59 | # Create the repo and remote objects 60 | repo, remote = _get_repo_and_remote(lib_path, remote_name) 61 | 62 | # Fetch from the remote 63 | remote.fetch() 64 | 65 | # Checkout and pull to the given branchb 66 | # if repo.active_branch != branch_name: 67 | branch: git.Head = getattr(repo.heads, branch_name) 68 | branch.checkout(force=True) 69 | remote.pull() 70 | 71 | 72 | def push_changes(lib_path: StrPath, remote_name: str = "origin") -> None: 73 | """ 74 | Pushes any changes made to the repo to the given remote 75 | 76 | :param StrPath lib_path: The path to the repo 77 | :param str remote_name: (optional) The name of remote, default 78 | is ``main`` 79 | """ 80 | 81 | # Create the repo and remote objects 82 | _, remote = _get_repo_and_remote(lib_path, remote_name) 83 | 84 | # Push changes 85 | remote.push() 86 | 87 | 88 | def commit_changes( 89 | lib_path: StrPath, 90 | message: str, 91 | remote_name: str = "origin", 92 | skip_hooks: bool = True, 93 | ) -> None: 94 | """ 95 | Stage all files and commit them 96 | 97 | :param StrPath lib_path: The path to the repo 98 | :param str message: The commit message 99 | :param str remote_name: (optional) The name of the remote, 100 | default is ``origin`` 101 | :param bool skip_hooks: (optional) Whether commit hooks should be 102 | skipped; default is True 103 | """ 104 | 105 | # Create the repo and remote objects 106 | repo, _ = _get_repo_and_remote(lib_path, remote_name) 107 | 108 | # Add all the files and commit them 109 | index_file = git.index.base.IndexFile(repo) 110 | index_file.add("*") 111 | index_file.commit(message, skip_hooks=skip_hooks) 112 | 113 | 114 | def sync_commit_push( 115 | message: str, 116 | *, 117 | remote_name: str = "origin", 118 | branch_name: str = "main", 119 | skip_hooks: bool = True 120 | ): 121 | """ 122 | Decorator for automatically fetching, pulling, and pushing changes 123 | for a library function 124 | 125 | :param str message: The commit message 126 | :param str remote_name: (optional) The name of the remote, default 127 | is ``origin`` 128 | :param str branch_name: (optional) The name of the branch, default 129 | is ``main`` 130 | :param bool skip_hooks: (optional) Whether to skip the commit hooks, 131 | default is ``True`` 132 | """ 133 | 134 | def decorator_sync_commit_push(func): 135 | functools.wraps(func) 136 | 137 | def wrapper_sync_commit_push(lib_path: StrPath, *args, **kwargs) -> Any: 138 | # Fetch and pull to repo 139 | sync_and_checkout(lib_path, remote_name, branch_name) 140 | 141 | # Run fucntion 142 | result = func(lib_path, *args, **kwargs) 143 | 144 | # Commit and push changes 145 | commit_changes(lib_path, message, remote_name, skip_hooks) 146 | push_changes(lib_path, remote_name) 147 | 148 | # Return the function result(s) 149 | return result 150 | 151 | return wrapper_sync_commit_push 152 | 153 | return decorator_sync_commit_push 154 | -------------------------------------------------------------------------------- /tools/iterate_libraries.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | 7 | iterate_libraries.py 8 | ==================== 9 | 10 | Functionality for iterating through a cloned Adafruit CircuitPython 11 | Bundle to run functions on each library 12 | 13 | * Author(s): Alec Delaney 14 | 15 | """ 16 | 17 | import os 18 | import glob 19 | import pathlib 20 | from collections.abc import Sequence, Iterable 21 | from typing import TypeVar, Any, Union, List 22 | from typing_extensions import TypeAlias 23 | import parse 24 | from github import Github 25 | from github.Repository import Repository 26 | from github.ContentFile import ContentFile 27 | from library_functions import StrPath, LocalLibFunc, RemoteLibFunc 28 | 29 | # Helpful type annotapython generic type aliastion definitions 30 | 31 | PosArg = TypeVar("PosArg") 32 | KeyArg = TypeVar("KeyArg") 33 | RetArg = TypeVar("RetArg") 34 | 35 | LocalLibFunc_IterInstruction: TypeAlias = tuple[ 36 | LocalLibFunc, Sequence[PosArg], dict[str, KeyArg] 37 | ] 38 | """Instruction set as a tuple of a function to run on a local library, 39 | a list of the positional arguments to be provided to it, and a 40 | dictionary of keyword arguments to be provided to it. You do not need 41 | to include the libray path as an argument, as it is automatically 42 | supplied.""" 43 | 44 | LocalLibFunc_IterResult: TypeAlias = tuple[StrPath, list[RetArg]] 45 | """Result of function(s) run on a library as a tuple of the path to 46 | the local library modified and a list of the result(s) of the 47 | function(s)""" 48 | 49 | RemoteLibFunc_IterInstruction: TypeAlias = tuple[ 50 | RemoteLibFunc, Sequence[PosArg], dict[str, KeyArg] 51 | ] 52 | """Instruction set as a tuple of a function to run on a remote library, 53 | a list of the positional arguments to be provided to it, and a 54 | dictionary of keyword arguments to be provided to it. You do not need 55 | to include the Repository object as an argument, as it is autmoatically 56 | supplied.""" 57 | 58 | RemoteLibFunc_IterResult: TypeAlias = tuple[Repository, list[RetArg]] 59 | """Result of function(s) run on a library as a tuple of the name of 60 | the remote library modified and a list of the result(s) of the 61 | function(s)""" 62 | 63 | 64 | # Global Variables 65 | 66 | _BUNDLE_BRANCHES = ("drivers", "helpers") 67 | 68 | 69 | def perform_func( 70 | item: Any, 71 | func_workflow: Union[RemoteLibFunc_IterInstruction, LocalLibFunc_IterInstruction], 72 | ) -> Union[List[RemoteLibFunc_IterResult], List[LocalLibFunc_IterResult]]: 73 | """ 74 | Perform the given function 75 | """ 76 | func_results = [] 77 | for func, args, kwargs in func_workflow: 78 | result = func(item, *args, **kwargs) 79 | func_results.append(result) 80 | return func_results 81 | 82 | 83 | def iter_local_bundle_with_func( 84 | bundle_path: StrPath, 85 | func_workflow: Iterable[LocalLibFunc_IterInstruction], 86 | *, 87 | local_folder: str = "", 88 | ) -> list[LocalLibFunc_IterResult]: 89 | """Iterate through the libraries and run a given function with the 90 | provided arguments 91 | 92 | :param StrPath bundle_path: The path to the cloned bundle 93 | :param Iterable func_workflow: An iterable of tuples containing pairs 94 | of functions and corresponding arguments; the path to each specific 95 | library is automatically provided to the functions, so the functions 96 | must account for it 97 | :return: A list containing tuples of pairs of each library path and a list 98 | with the results from each function 99 | :rtype: list 100 | """ 101 | 102 | # Initialize list of results 103 | results = [] 104 | 105 | # Keep track of all libraries iterated 106 | iterated = set() 107 | 108 | # Loop through each bundle branch 109 | for branch_name in _BUNDLE_BRANCHES: 110 | libraries_glob_path = os.path.join(bundle_path, "libraries", branch_name, "*") 111 | libraries_path_list = glob.glob(libraries_glob_path) 112 | 113 | # Enter each library in the bundle 114 | for library_path in libraries_path_list: 115 | iterated.add(os.path.split(library_path)[1].lower()) 116 | func_results = perform_func(library_path, func_workflow) 117 | 118 | results.append((library_path, func_results)) 119 | 120 | if local_folder: 121 | additional = { 122 | os.path.split(pathname)[1].lower() 123 | for pathname in glob.glob(os.path.join(local_folder, "*")) 124 | } 125 | diff = additional.difference(iterated) 126 | for unused in diff: 127 | unused_func_results = perform_func(unused, func_workflow) 128 | results.append((unused, unused_func_results)) 129 | 130 | return results 131 | 132 | 133 | # pylint: disable=too-many-locals 134 | def iter_remote_bundle_with_func( 135 | gh_token: str, 136 | func_workflow: RemoteLibFunc_IterInstruction, 137 | *, 138 | local_folder: str = "", 139 | ) -> list[RemoteLibFunc_IterResult]: 140 | """Iterate through the remote bundle, accessing each library's git repo 141 | using the GitHub RESTful API (specifically using ``PyGithub``) 142 | 143 | :param str gh_token: A GitHub token with proper scopes 144 | :param Iterable func_workflow: An iterable of tuples containing pairs 145 | of functions and corresponding arguments; the path to each specific 146 | library is automatically provided to the functions, so the functions 147 | must account for it 148 | :return: A list containing tuples of pairs of each library path and a list 149 | with the results from each function 150 | :rtype: list 151 | """ 152 | 153 | # Get the Github repo object 154 | github_client = Github(gh_token) 155 | bundle_repo = github_client.get_repo("adafruit/Adafruit_CircuitPython_Bundle") 156 | 157 | # Initialize list of results 158 | results = [] 159 | 160 | # Keep track of all libraries iterated 161 | iterated = set() 162 | 163 | # Loop through each bundle branch 164 | for branch_name in _BUNDLE_BRANCHES: 165 | branch_repos_path = "/".join(("libraries", branch_name)) 166 | branch_repos: list[ContentFile] = bundle_repo.get_contents(branch_repos_path) 167 | 168 | # Enter each library in the bundle 169 | for repo_file in branch_repos: 170 | repo_name_result: parse.Result = parse.search( 171 | "repos/adafruit/{repo_name:w}/", repo_file.git_url 172 | ) 173 | repo_name: str = repo_name_result.named["repo_name"] 174 | 175 | repo = github_client.get_repo(f"adafruit/{repo_name}") 176 | iterated.add(repo_name.lower()) 177 | 178 | func_results = perform_func(repo, func_workflow) 179 | results.append((repo, func_results)) 180 | 181 | if local_folder: 182 | additional = { 183 | path.name.lower() for path in pathlib.Path(local_folder).glob("*") 184 | } 185 | diff = additional.difference(iterated) 186 | for unused in diff: 187 | unused_repo = github_client.get_repo(f"adafruit/{unused}") 188 | unused_func_results = perform_func(unused_repo, func_workflow) 189 | results.append((unused_repo, unused_func_results)) 190 | 191 | return results 192 | -------------------------------------------------------------------------------- /tools/library_functions.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Alec Delaney 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | 7 | lib_funcs.py 8 | ============ 9 | 10 | Library-specific functionalities to aid in developing patches 11 | 12 | * Author(s): Alec Delaney 13 | 14 | """ 15 | 16 | import os 17 | import functools 18 | from collections.abc import Sequence 19 | from typing import Protocol, Any, Union 20 | from typing_extensions import TypeAlias 21 | from github.Repository import Repository 22 | 23 | # Helpful type annotation for path-like strings 24 | StrPath: TypeAlias = Union[str, os.PathLike[str]] 25 | """Path or path-like strings""" 26 | 27 | 28 | # pylint: disable=too-few-public-methods 29 | class LocalLibFunc(Protocol): 30 | """Typing protocol for methods (or callables) that take the following 31 | parameters: 32 | 33 | - (StrPath) The path to a specific Adafruit library 34 | - (Sequence[Any]) A list of any positional arguments 35 | - (Dict[str, Any]) A dict of any keyword arguments 36 | """ 37 | 38 | def __call__( 39 | self, lib_path: StrPath, *args: Sequence[Any], **kwargs: dict[str, Any] 40 | ) -> Any: ... 41 | 42 | 43 | # pylint: disable=too-few-public-methods 44 | class RemoteLibFunc(Protocol): 45 | """Typing protocol for methods (or callables) that take the following 46 | parameters: 47 | 48 | - (Repository) The repo as a github.Repository.Repository object 49 | - (Sequence[Any]) A list of any positional arguments 50 | - (Dict[str, Any]) A dict of any keyword arguments 51 | """ 52 | 53 | def __call__( 54 | self, lib_repo: Repository, *args: Sequence[Any], **kwargs: dict[str, Any] 55 | ) -> Any: ... 56 | 57 | 58 | def in_lib_path(func: LocalLibFunc) -> LocalLibFunc: 59 | """Decorator for automating temporarily entering a function's 60 | library directory 61 | 62 | :param LibraryFunc func: The library function to decorate 63 | """ 64 | 65 | @functools.wraps(func) 66 | def wrapper_use_lib_path(lib_path: StrPath, *args, **kwargs) -> Any: 67 | # Get the current directory 68 | current_path = os.getcwd() 69 | 70 | # Enter the library directory for the duration of executing the function 71 | os.chdir(lib_path) 72 | result = func(lib_path, *args, **kwargs) 73 | os.chdir(current_path) 74 | 75 | # Return the result of the function 76 | return result 77 | 78 | return wrapper_use_lib_path 79 | -------------------------------------------------------------------------------- /tools/run_black.sh: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023 Alec Delaney 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | rm -rf .gitlibs 6 | mkdir .gitlibs 7 | cd .libraries 8 | for repo in *; do 9 | cd ../.gitlibs 10 | git clone https://github.com/adafruit/$repo.git 11 | cd $repo 12 | pre-commit run --all-files 13 | git add -A 14 | git commit -m "Run pre-commit" 15 | git push 16 | cd .. 17 | done 18 | -------------------------------------------------------------------------------- /tools/runner.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2022 Eva Herrada 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | """ 6 | Tool for running specific CircuitPython library validators one at a time. 7 | 8 | IMPORTANT: Must be run from the top-level adabot directory (one directory up 9 | from this one) 10 | 11 | Type `python3 runner.py` to run this file, and select the validator you want 12 | to run 13 | """ 14 | 15 | import datetime 16 | import inspect 17 | import json 18 | 19 | from adabot import pypi_requests as pypi 20 | from adabot.lib import circuitpython_library_validators as cpy_vals 21 | from adabot.lib import common_funcs 22 | from adabot.lib.common_funcs import list_repos 23 | 24 | default_validators = [ 25 | vals[1] 26 | for vals in inspect.getmembers(cpy_vals.LibraryValidator) 27 | if vals[0].startswith("validate") 28 | ] 29 | 30 | bundle_submodules = common_funcs.get_bundle_submodules() 31 | 32 | LATEST_PYLINT = "" 33 | pylint_info = pypi.get("/pypi/pylint/json") 34 | if pylint_info and pylint_info.ok: 35 | LATEST_PYLINT = pylint_info.json()["info"]["version"] 36 | 37 | validator = cpy_vals.LibraryValidator( 38 | default_validators, 39 | bundle_submodules, 40 | LATEST_PYLINT, 41 | ) 42 | 43 | valids = {} 44 | for count, val in enumerate(default_validators): 45 | t = str(val).split(" at", maxsplit=1)[0].split("Validator.", maxsplit=1)[1] 46 | valids[count] = t 47 | print(f"{count}:", t) 48 | 49 | select = valids[ 50 | int(input(f"Select a function to run [0-{len(default_validators)-1}]: ")) 51 | ] 52 | print(select) 53 | selected = getattr(validator, select) 54 | print(selected) 55 | 56 | try: 57 | with open("repos.json", "r") as f: 58 | DATE = f.readline().rstrip() 59 | except FileNotFoundError: 60 | DATE = "" 61 | 62 | print(f"Last run: {DATE}") 63 | if DATE != str(datetime.date.today()): 64 | with open("repos.json", "w") as f: 65 | print("Fetching Repos List") 66 | all_repos = list_repos() 67 | print("Got Repos List") 68 | f.write(str(datetime.date.today()) + "\n") 69 | f.write(json.dumps(all_repos)) 70 | 71 | with open("repos.json", "r") as f: 72 | all_repos = json.loads(f.read().split("\n")[1]) 73 | 74 | results = {} 75 | 76 | for repo in all_repos: 77 | val = selected(repo) 78 | print(repo["name"]) 79 | print(val) 80 | if len(val): 81 | if isinstance(val[0], tuple): 82 | if val[0][0] not in results: 83 | results[val[0][0]] = [] 84 | results[val[0][0]].append(repo["name"]) 85 | else: 86 | for i in val: 87 | if i not in results: 88 | results[i] = [] 89 | results[i].append(repo["name"]) 90 | 91 | 92 | print(results) 93 | with open("adabot_run.txt", "w") as f: 94 | for k, v in results.items(): 95 | f.write(k + "\n") 96 | for i in v: 97 | f.write(i + "\n") 98 | f.write("\n") 99 | --------------------------------------------------------------------------------