├── .bettercodehub.yml ├── .coveragerc ├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── issue-inquiry.md ├── biskotaki.yaml ├── dependabot.yml ├── labeler.yml └── workflows │ ├── auto-prod.yml │ ├── ci.yml │ ├── codecov-job.yml │ ├── dev_pr_validation.yml │ ├── docs-job.yml │ ├── generate.yaml │ ├── labeler.yaml │ ├── load-to-rt.yml │ ├── merge-rt-in-release.yml │ ├── merge-to-boarding.yml │ ├── merge-to-train.yml │ ├── open-doors.yml │ ├── policy_lint.yml │ ├── pr-to-boarding.yml │ ├── pr-to-master.yml │ ├── pydeps-job.yml │ ├── quick-docs.yaml │ ├── sca-job.yml │ ├── tag-prod.yml │ ├── test-job.yml │ ├── test-python.yml │ ├── test.yaml │ └── type-check-job.yml ├── .gitignore ├── .prospector.yml ├── .pylintrc ├── .readthedocs.yml ├── CHANGELOG.rst ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── docs ├── assets │ ├── CICD-Pipe.png │ ├── ci-open-v1.png │ ├── deps_all.svg │ ├── deps_inner.svg │ ├── deps_ktc-mcs_2.svg │ ├── deps_ktc.svg │ └── generator-flowchart.svg ├── guides │ └── index.md ├── includes │ ├── cicd_mermaid.md │ ├── dockerfile_mermaid.md │ ├── how_to_generate_project.md │ ├── how_to_installation.md │ └── how_to_use_gen_proj.md ├── index.md ├── tags.md ├── topics │ ├── arch.md │ ├── cli_logic.md │ ├── dependencies.md │ ├── development │ │ ├── build_process_DAG.md │ │ ├── cicd.md │ │ ├── dependabot.md │ │ ├── docs_only_release_process.md │ │ ├── gitops │ │ │ ├── gh-web-ui-allow_auto-merge.png │ │ │ ├── gitops-multi-topics-cheatsheet.md │ │ │ ├── gitops-v2-cheatsheet.md │ │ │ ├── gitops-v2.md │ │ │ ├── index.md │ │ │ └── tutorial_release_my_branch_v2.md │ │ ├── index.md │ │ ├── release_candidate.md │ │ └── topic_branch_to_dev.md │ ├── generator_process.md │ └── why_this_package.md └── tutorials │ └── index.md ├── mkdocs.yml ├── poetry.lock ├── pyproject.toml ├── scripts ├── distro-sem-ver-bump.sh ├── gen_api_refs_pages.py ├── lint-local.sh ├── load-to-rt.sh ├── mypy.sh ├── open-doors.sh ├── sem-ver-bump.sh ├── sphinx-process.sh ├── start-rt.sh ├── terminal-based-release.sh ├── update-snapshot-interactive.sh ├── update-snapshot.sh ├── visualize-dockerfile.py └── visualize-ga-workflow.py ├── src ├── cookiecutter_python │ ├── __init__.py │ ├── __main__.py │ ├── _find_lib.py │ ├── _logging.py │ ├── _logging_config.py │ ├── backend │ │ ├── __init__.py │ │ ├── check_server_result.py │ │ ├── error_handling │ │ │ ├── __init__.py │ │ │ └── handler_builder.py │ │ ├── generator │ │ │ ├── __init__.py │ │ │ └── generator.py │ │ ├── helpers.py │ │ ├── hosting_services │ │ │ ├── __init__.py │ │ │ ├── check_engine.py │ │ │ ├── check_service.py │ │ │ ├── check_web_hosting_service.py │ │ │ ├── checker.py │ │ │ ├── checkers.py │ │ │ ├── exceptions.py │ │ │ ├── extract_name.py │ │ │ ├── handle_hosting_service_check.py │ │ │ ├── handler.py │ │ │ ├── value_extractor.py │ │ │ └── web_hosting_service.py │ │ ├── load_config.py │ │ ├── main.py │ │ ├── post_main.py │ │ ├── pre_main.py │ │ ├── proxy.py │ │ ├── request.py │ │ ├── sanitization │ │ │ ├── __init__.py │ │ │ ├── input_sanitization.py │ │ │ ├── interpreters_support.py │ │ │ └── string_sanitizers │ │ │ │ ├── __init__.py │ │ │ │ ├── base_sanitizer.py │ │ │ │ ├── sanitize_reg_input.py │ │ │ │ ├── sanitize_reg_module_name.py │ │ │ │ └── sanitize_reg_version.py │ │ └── user_config_proxy.py │ ├── cli.py │ ├── cli_handlers.py │ ├── cookiecutter.json │ ├── exceptions.py │ ├── handle │ │ ├── __init__.py │ │ ├── dialogs │ │ │ ├── __init__.py │ │ │ ├── dialog.py │ │ │ └── lib │ │ │ │ ├── __init__.py │ │ │ │ └── project_name.py │ │ ├── interactive_cli_pipeline.py │ │ ├── node_base.py │ │ ├── node_factory.py │ │ └── node_interface.py │ ├── hooks │ │ ├── __init__.py │ │ ├── post_gen_project.py │ │ └── pre_gen_project.py │ ├── py.typed │ ├── utils.py │ └── {{ cookiecutter.project_slug }} │ │ ├── .coveragerc │ │ ├── .github │ │ ├── labeler.yml │ │ └── workflows │ │ │ ├── cicd.yml │ │ │ ├── codecov-upload.yml │ │ │ ├── labeler.yaml │ │ │ ├── policy_lint.yml │ │ │ ├── signal-deploy.yml │ │ │ └── test.yaml │ │ ├── .gitignore │ │ ├── .prospector.yml │ │ ├── .pylintrc │ │ ├── .readthedocs.yml │ │ ├── CHANGELOG.rst │ │ ├── CONTRIBUTING.md │ │ ├── Dockerfile │ │ ├── LICENSE │ │ ├── MANIFEST.in │ │ ├── README.rst │ │ ├── mkdocs.yml │ │ ├── pyproject.toml │ │ ├── scripts │ │ ├── gen_api_refs_pages.py │ │ ├── parse_version.py │ │ ├── visualize-dockerfile.py │ │ └── visualize-ga-workflow.py │ │ ├── setup.cfg │ │ ├── src │ │ └── {{ cookiecutter.pkg_name }} │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ ├── _logging.py │ │ │ ├── cli.py │ │ │ ├── fixtures.py │ │ │ └── py.typed │ │ ├── tests │ │ ├── conftest.py │ │ ├── smoke_test.py │ │ ├── test_cli.py │ │ ├── test_invoking_cli.py │ │ └── test_my_fixture.py │ │ ├── tox.ini │ │ ├── {% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %} │ │ ├── index.md │ │ ├── tags.md │ │ └── topics │ │ │ ├── arch.md │ │ │ └── development │ │ │ ├── build_process_DAG.md │ │ │ ├── cicd.md │ │ │ ├── cicd_mermaid.md │ │ │ ├── dockerfile_mermaid.md │ │ │ └── index.md │ │ └── {% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %} │ │ ├── Makefile │ │ ├── conf.py │ │ ├── contents │ │ ├── 10_introduction.rst │ │ ├── 20_why_this_package.rst │ │ ├── 30_usage.rst │ │ ├── 40_modules.rst │ │ └── {{ cookiecutter.pkg_name }}.rst │ │ ├── index.rst │ │ ├── make.bat │ │ └── spelling_wordlist.txt └── stubs │ ├── cookiecutter │ ├── __init__.pyi │ ├── config.pyi │ ├── exceptions.pyi │ ├── generate.pyi │ └── main.pyi │ ├── git │ ├── __init__.pyi │ └── exc.pyi │ └── requests_futures │ ├── __init__.pyi │ └── sessions.pyi ├── tests ├── biskotaki_ci │ ├── conftest.py │ ├── snapshot │ │ ├── biskotaki_ci_no_input │ │ │ ├── test_build_creates_artifacts.py │ │ │ └── test_lint_passes.py │ │ ├── test_matches_biskotaki_runtime_gen.py │ │ └── test_valid_ci_config.py │ ├── test_logging.py │ └── test_regression_biskotaki.py ├── conftest.py ├── data │ ├── biskotaki-with-no-docs-specs.yaml │ ├── biskotaki-without-interpreters.yaml │ ├── correct_python_package_names.txt │ ├── gold-standard.yml │ ├── pytest-fixture.yaml │ ├── rendering │ │ ├── only_list_template │ │ │ ├── cookiecutter.json │ │ │ ├── hooks │ │ │ │ └── pre_gen_project.py │ │ │ └── {{ cookiecutter.project_dir_name }} │ │ │ │ └── a.txt │ │ └── user_config.yml │ ├── snapshots │ │ ├── README.md │ │ ├── biskotaki-gold-standard │ │ │ ├── .coveragerc │ │ │ ├── .github │ │ │ │ ├── labeler.yml │ │ │ │ └── workflows │ │ │ │ │ ├── cicd.yml │ │ │ │ │ ├── codecov-upload.yml │ │ │ │ │ ├── labeler.yaml │ │ │ │ │ ├── policy_lint.yml │ │ │ │ │ └── signal-deploy.yml │ │ │ ├── .gitignore │ │ │ ├── .prospector.yml │ │ │ ├── .pylintrc │ │ │ ├── .readthedocs.yml │ │ │ ├── CHANGELOG.rst │ │ │ ├── CONTRIBUTING.md │ │ │ ├── Dockerfile │ │ │ ├── LICENSE │ │ │ ├── README.rst │ │ │ ├── docs │ │ │ │ ├── index.md │ │ │ │ ├── tags.md │ │ │ │ └── topics │ │ │ │ │ ├── arch.md │ │ │ │ │ └── development │ │ │ │ │ ├── build_process_DAG.md │ │ │ │ │ ├── cicd.md │ │ │ │ │ ├── cicd_mermaid.md │ │ │ │ │ ├── dockerfile_mermaid.md │ │ │ │ │ └── index.md │ │ │ ├── mkdocs.yml │ │ │ ├── pyproject.toml │ │ │ ├── scripts │ │ │ │ ├── gen_api_refs_pages.py │ │ │ │ ├── parse_version.py │ │ │ │ ├── visualize-dockerfile.py │ │ │ │ └── visualize-ga-workflow.py │ │ │ ├── src │ │ │ │ └── biskotakigold │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── __main__.py │ │ │ │ │ ├── _logging.py │ │ │ │ │ ├── cli.py │ │ │ │ │ └── py.typed │ │ │ ├── tests │ │ │ │ ├── smoke_test.py │ │ │ │ ├── test_cli.py │ │ │ │ └── test_invoking_cli.py │ │ │ └── tox.ini │ │ ├── biskotaki-interactive │ │ │ ├── .coveragerc │ │ │ ├── .github │ │ │ │ ├── labeler.yml │ │ │ │ └── workflows │ │ │ │ │ ├── cicd.yml │ │ │ │ │ ├── codecov-upload.yml │ │ │ │ │ ├── labeler.yaml │ │ │ │ │ ├── policy_lint.yml │ │ │ │ │ └── signal-deploy.yml │ │ │ ├── .gitignore │ │ │ ├── .prospector.yml │ │ │ ├── .pylintrc │ │ │ ├── .readthedocs.yml │ │ │ ├── CHANGELOG.rst │ │ │ ├── CONTRIBUTING.md │ │ │ ├── Dockerfile │ │ │ ├── LICENSE │ │ │ ├── README.rst │ │ │ ├── docs │ │ │ │ ├── Makefile │ │ │ │ ├── conf.py │ │ │ │ ├── contents │ │ │ │ │ ├── 10_introduction.rst │ │ │ │ │ ├── 20_why_this_package.rst │ │ │ │ │ ├── 30_usage.rst │ │ │ │ │ ├── 40_modules.rst │ │ │ │ │ └── biskotaki.rst │ │ │ │ ├── index.rst │ │ │ │ ├── make.bat │ │ │ │ └── spelling_wordlist.txt │ │ │ ├── pyproject.toml │ │ │ ├── scripts │ │ │ │ ├── parse_version.py │ │ │ │ ├── visualize-dockerfile.py │ │ │ │ └── visualize-ga-workflow.py │ │ │ ├── src │ │ │ │ └── biskotaki │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── _logging.py │ │ │ │ │ └── py.typed │ │ │ ├── tests │ │ │ │ └── smoke_test.py │ │ │ └── tox.ini │ │ └── biskotaki-no-input │ │ │ ├── .coveragerc │ │ │ ├── .github │ │ │ ├── labeler.yml │ │ │ └── workflows │ │ │ │ ├── cicd.yml │ │ │ │ ├── codecov-upload.yml │ │ │ │ ├── labeler.yaml │ │ │ │ ├── policy_lint.yml │ │ │ │ └── signal-deploy.yml │ │ │ ├── .gitignore │ │ │ ├── .prospector.yml │ │ │ ├── .pylintrc │ │ │ ├── .readthedocs.yml │ │ │ ├── CHANGELOG.rst │ │ │ ├── CONTRIBUTING.md │ │ │ ├── Dockerfile │ │ │ ├── LICENSE │ │ │ ├── README.rst │ │ │ ├── docs │ │ │ ├── Makefile │ │ │ ├── conf.py │ │ │ ├── contents │ │ │ │ ├── 10_introduction.rst │ │ │ │ ├── 20_why_this_package.rst │ │ │ │ ├── 30_usage.rst │ │ │ │ ├── 40_modules.rst │ │ │ │ └── biskotaki.rst │ │ │ ├── index.rst │ │ │ ├── make.bat │ │ │ └── spelling_wordlist.txt │ │ │ ├── pyproject.toml │ │ │ ├── scripts │ │ │ ├── parse_version.py │ │ │ ├── visualize-dockerfile.py │ │ │ └── visualize-ga-workflow.py │ │ │ ├── src │ │ │ └── biskotaki │ │ │ │ ├── __init__.py │ │ │ │ ├── _logging.py │ │ │ │ └── py.typed │ │ │ ├── tests │ │ │ └── smoke_test.py │ │ │ └── tox.ini │ └── test_cookiecutter.json ├── generator_defaults_shift │ └── test_docs_settings.py ├── test_build_backend_sdist.py ├── test_ci_pipeline_generation.py ├── test_cli.py ├── test_cookiecutter_choice_var.py ├── test_cookiecutter_context.py ├── test_dialog_system.py ├── test_docs_gen_feat_compatibillity.py ├── test_error_classifier.py ├── test_generate.py ├── test_git_porcelain.py ├── test_git_sdk.py ├── test_gold_standard.py ├── test_is_repo_clean_function.py ├── test_load_util.py ├── test_module.py ├── test_post_hook.py ├── test_prehook.py ├── test_running_test_suite.py ├── test_sanitization_component.py ├── test_sanity.py ├── test_snapshot_workflow_yaml.py └── test_version_string.py ├── tox.ini └── uv.lock /.bettercodehub.yml: -------------------------------------------------------------------------------- 1 | component_depth: 3 2 | languages: 3 | - python 4 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | cookiecutter_python 4 | 5 | [report] 6 | show_missing = true 7 | precision = 2 8 | omit = 9 | *migrations* 10 | biskotaki-gold-standard 11 | biskotaki 12 | my-new-project 13 | my-fixture 14 | exclude_lines = 15 | raise NotImplementedError 16 | raise NotImplemented 17 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .tox 2 | __pycache__ 3 | *.pyc 4 | *.pyo 5 | *.pyd 6 | .Python 7 | env 8 | .coverage 9 | .coverage.* 10 | .cache 11 | coverage.xml 12 | *,cover 13 | *.log 14 | .git 15 | .mypy_cache 16 | .pytest_cache 17 | soft-rel\.log 18 | notes\.md 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG]" 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Install the *cookiecutter-python* package with *python3 -m pip install cookiecutter-python* 16 | 2. Run the **cli** as: *generate-python ...* 17 | please provided the arguments supplied to the cli 18 | 3. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. Linux Mint, iOS, Windows] 28 | - Version [e.g. 1.2.0] 29 | run: *generate-python --version* 30 | - Python Interpreter 31 | run: *generate-python --version* 32 | 33 | 34 | **Additional context** 35 | Add any other context about the problem here. 36 | For example, what input values were supplied to the *generate-python* 37 | cli (ie paste contents of your *config file*) 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[FEATURE]" 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue-inquiry.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue Inquiry 3 | about: Make an inquiry about an "issue" which is not a 'bug' nor a 'feature request' 4 | title: "[OTHER ISSUE]" 5 | labels: question 6 | assignees: '' 7 | 8 | --- 9 | 10 | **TODO: Title Goes Here** 11 | 12 | **Do you have a question related to the *cookiecutter-python* package?** 13 | Please submit your question :) 14 | 15 | Examples: 16 | - Why is *A* designed the way it is designed? 17 | - How does *A* accomplish whatever it accomplishes? 18 | -------------------------------------------------------------------------------- /.github/biskotaki.yaml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: Biskotaki 3 | project_type: module 4 | project_slug: biskotaki 5 | pkg_name: biskotaki 6 | repo_name: biskotaki 7 | readthedocs_project_slug: biskotaki 8 | docker_image: biskotaki 9 | full_name: Konstantinos Lampridis 10 | author: Konstantinos Lampridis 11 | email: k.lampridis@hotmail.com 12 | author_email: k.lampridis@hotmail.com 13 | github_username: boromir674 14 | project_short_description: Project generated using https://github.com/boromir674/cookiecutter-python-package 15 | initialize_git_repo: 'no' 16 | interpreters: {"supported-interpreters": ["3.7", "3.8", "3.9", "3.10", "3.11"]} 17 | ## Documentation Config ## 18 | docs_builder: "sphinx" 19 | ## READ THE DOCS CI Config ## 20 | rtd_python_version: "3.10" 21 | cicd: 'experimental' 22 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | updates: 4 | 5 | # Configure Dependabot for Python projects 6 | - package-ecosystem: "pip" # For Python projects using pip 7 | directory: "/" # Root directory of the project 8 | 9 | schedule: 10 | interval: "daily" # How often to check for updates 11 | # You can adjust the interval as needed (e.g., "weekly"). 12 | 13 | open-pull-requests-limit: 10 # Limits the number of open pull requests 14 | 15 | # Prioritize security updates by setting them to open immediately 16 | security-updates: "auto" 17 | 18 | # Enable version updates for all dependencies 19 | versioning-strategy: "increase" # Can be "lockfile-only", "increase", or "widen" 20 | 21 | # ignore: 22 | # - dependency-name: "example-dependency" 23 | # versions: ["1.0.0"] 24 | # This section is customizable and allows you to ignore specific dependencies or versions. 25 | -------------------------------------------------------------------------------- /.github/workflows/codecov-job.yml: -------------------------------------------------------------------------------- 1 | ## Codecov Upload - Reusable Workflow ## 2 | 3 | on: 4 | workflow_call: 5 | secrets: 6 | CODECOV_TOKEN: 7 | required: true 8 | jobs: 9 | upload: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Get Codecov binary 14 | run: | 15 | curl -Os https://uploader.codecov.io/latest/linux/codecov 16 | chmod +x codecov 17 | 18 | # DOWNLOAD XML FILES FROM ARTIFACTS 19 | - name: Download All Artifacts 20 | uses: actions/download-artifact@v4 21 | with: 22 | path: coverage 23 | pattern: coverage-* 24 | merge-multiple: true 25 | 26 | - run: ls -R coverage 27 | 28 | # UPLOAD XML FILES TO CODECOV 29 | - name: Upload Coverage Reports to Codecov 30 | env: 31 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} # this is "read" from inputs 32 | run: | 33 | for file in coverage/coverage*.xml; do 34 | OS_NAME=$(echo $file | sed -E "s/coverage-(\w\+)-/\1/") 35 | PY_VERSION=$(echo $file | sed -E "s/coverage-\w\+-(\d\.)\+/\1/") 36 | ./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose 37 | echo "[INFO] Sent to Codecov: $file !" 38 | done 39 | -------------------------------------------------------------------------------- /.github/workflows/labeler.yaml: -------------------------------------------------------------------------------- 1 | name: "PR Labeler" 2 | 3 | on: 4 | - pull_request_target 5 | 6 | jobs: 7 | label_PR: 8 | # permissions: 9 | # contents: read 10 | # pull-requests: write 11 | runs-on: ubuntu-latest 12 | # This Job behaves as a Listener to PR events, and each step is a Handler 13 | steps: 14 | # HANDLER 1: Label PR, given file changes and Labeling Rules '.github/labeler.yml' 15 | - uses: actions/labeler@v5 16 | with: 17 | repo-token: ${{ secrets.COOKIECUTTER_PYTHON_PACKAGE_LABELER }} # Ensure GITHUB_TOKEN is used 18 | -------------------------------------------------------------------------------- /.github/workflows/pr-to-boarding.yml: -------------------------------------------------------------------------------- 1 | ############################# 2 | ## PR to Boarding - GitOps ## 3 | ############################# 4 | 5 | # export tt='board-request'; git tag -d "$tt"; git push --delete origin "$tt"; git tag "$tt" && git push origin "$tt" 6 | 7 | on: 8 | push: 9 | tags: 10 | - board-request 11 | - board-n-release 12 | 13 | jobs: 14 | pr_to_boarding: 15 | uses: boromir674/automated-workflows/.github/workflows/go-pr-to-boarding.yml@test 16 | with: 17 | # pass tag to PR from --> - 18 | board_tag: "${{ github.ref_name }}" 19 | main_branch: ${{ vars.MAIN_BRANCH || 'main' }} 20 | secrets: 21 | github_pat: ${{ secrets.GH_TOKEN }} 22 | -------------------------------------------------------------------------------- /.github/workflows/pr-to-master.yml: -------------------------------------------------------------------------------- 1 | ############################ 2 | ## Open PR to Main/Master ## 3 | ############################ 4 | 5 | name: Open PR to Master 6 | 7 | # WHEN a PR 'release-train' --> 'release' is merged 8 | # THEN Open PR: 'release' --> 'master' 9 | 10 | on: 11 | pull_request: 12 | types: [closed] 13 | branches: # ALLOWED Base Branches 14 | - release 15 | 16 | jobs: 17 | open_pr_to_master: 18 | if: github.event.pull_request.merged == true && contains(fromJSON('["release-train"]'), github.head_ref) 19 | runs-on: ubuntu-latest 20 | env: 21 | RELEASE: 'release' 22 | MAIN_BR: 'master' 23 | steps: 24 | - uses: actions/checkout@v4 25 | with: 26 | fetch-depth: 0 # 0 indicates all history for all branches and tags. 27 | set-safe-directory: '' # `git config --global --add safe.directory ` 28 | token: '${{ secrets.GH_TOKEN }}' 29 | 30 | ##### Open PR: Release --> Main/Master ##### 31 | - name: "Open PR 'head': ${{ env.RELEASE }} --> 'base': ${{ env.MAIN_BR }}" 32 | env: 33 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 34 | run: | 35 | gh pr create --head "${{ env.RELEASE }}" --base "${{ env.MAIN_BR }}" \ 36 | --title "Release Version '${{ steps.sem_ver.outputs.SEMVER }}' into '${{ env.MAIN_BR }}' Branch" \ 37 | --body "## :rocket: Release '${{ steps.sem_ver.outputs.SEMVER }}' into '${{ env.MAIN_BR }}' Branch :rocket: 38 | 39 | This PR marks a pivotal moment in our deployment cycle, signaling that all changes on the **Release** branch are deemed ready for production. It represents the collective decision of our developers that the changes bundled in the Release are suitable to be released together. 40 | 41 | ### What's Happening in This PR? 42 | 43 | - We are merging the '${{ env.RELEASE }}' branch into the '${{ env.MAIN_BR }}' branch. 44 | - This action is a critical step, transitioning us from the release phase to the production phase. 45 | 46 | ### :white_check_mark: Automatic Merging Upon CI Checks :white_check_mark: 47 | 48 | This PR will be automatically merged into the '${{ env.MAIN_BR }}' branch, if the following conditions are met: 49 | 50 | - All CI Checks pass 51 | - Code Review is approved by at least one developer 52 | 53 | ### :warning: Manual Merging Upon CI Checks and Code Review :warning: 54 | 55 | If the above conditions are not met, this PR will be manually merged into the '${{ env.MAIN_BR }}' branch, by a developer. 56 | " 57 | -------------------------------------------------------------------------------- /.github/workflows/tag-prod.yml: -------------------------------------------------------------------------------- 1 | 2 | # Automatically, Publish a Production Tag 3 | 4 | # RUNS on PR 'release' --> 'main/master' MERGED 5 | # implies closed, and excludes 'close without merge' 6 | 7 | on: 8 | pull_request: 9 | types: [closed] 10 | branches: 11 | # access with ${{ github.event.pull_request.base.ref }} 12 | - main 13 | - master 14 | 15 | 16 | jobs: 17 | tag_prod: 18 | runs-on: ubuntu-latest 19 | # if merged code AND HEAD branch "was" 'release' AND PR has label 'auto-deploy' 20 | if: github.event.pull_request.merged == true && 21 | github.event.pull_request.head.ref == 'release' && 22 | contains(github.event.pull_request.labels.*.name, 'auto-deploy') 23 | steps: 24 | - name: "Checkout Code" 25 | uses: actions/checkout@v4 26 | with: 27 | fetch-depth: 0 # 0 indicates all history for all branches and tags. 28 | set-safe-directory: '' # `git config --global --add safe.directory ` 29 | token: '${{ secrets.GH_TOKEN }}' 30 | 31 | # by checkout we should probably be at master (by default) 32 | - name: Ensure we are on 'master' branch 33 | run: git checkout master 34 | 35 | # Parse SOURCE Sem Ver 36 | - run: echo SEMVER=$(grep -E -o '^version\s*=\s*\".*\"' pyproject.toml | cut -d'"' -f2) >> $GITHUB_ENV 37 | 38 | # Exit if SOURCE Sem Ver is empty 39 | - name: "Exit if SOURCE Sem Ver is empty" 40 | run: | 41 | if [ -z "${{ env.SEMVER }}" ]; then 42 | echo "SEMVER is empty. Exiting .." 43 | echo "SEMVER is empty: '${{ env.SEMVER }}'" >> $GITHUB_STEP_SUMMARY 44 | exit 1 45 | fi 46 | 47 | # Derive Prod Git Tag 48 | - name: 'Derive Git Tag: v${{ env.SEMVER }}' 49 | run: echo "PROD_TAG=v${{ env.SEMVER }}" >> $GITHUB_ENV 50 | 51 | # Trigger CI/CD for 'Production Release' 52 | - run: git config --global user.name "Konstantinos Lampridis" 53 | - run: git config --global user.email "boromir674@hotmail.com" 54 | 55 | - name: "Push '${{ env.PROD_TAG }}' to trigger CI/CD for 'Production Release'" 56 | run: | 57 | git tag "${{ env.PROD_TAG }}" -m "Production Release ${{ env.PROD_TAG }}" 58 | git push origin "${{ env.PROD_TAG }}" 59 | 60 | echo "Tagged and Pushed '${{ env.PROD_TAG }}' to trigger CI/CD for 'Production Release'" >> $GITHUB_STEP_SUMMARY 61 | -------------------------------------------------------------------------------- /.github/workflows/type-check-job.yml: -------------------------------------------------------------------------------- 1 | # Reusable Job for Static Type Checking with mypy 2 | 3 | name: Type Checking 4 | on: 5 | workflow_call: 6 | inputs: 7 | # App Installation Settings # 8 | 9 | # Trigger Behavior Settings # 10 | default_trigger: 11 | required: false 12 | default: true 13 | description: "Default trigger for the workflow. If true the 'default behavior' is to run on call. If false, the 'default behavior' is to be skipped." 14 | type: boolean 15 | 16 | override: 17 | required: false 18 | description: "Override the default trigger, 'true' gurantees run, 'false' not run" 19 | type: string # IMPORTANT: must be string, to model 3 states {true, false, none} 20 | 21 | # Environment Settings # 22 | python_version: 23 | required: false 24 | default: '3.10' 25 | description: "Python version to use for the job. Default is 3.10" 26 | type: string 27 | 28 | jobs: 29 | test: 30 | if: inputs.override == 'true' || (inputs.override != 'false' && inputs.default_trigger == true) 31 | runs-on: 'ubuntu-latest' 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - name: Set up Python ${ inputs.python_version } 36 | uses: actions/setup-python@v5 37 | with: 38 | python-version: '${{ inputs.python_version }}' 39 | 40 | # Install uv 41 | - name: Install uv 42 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 43 | 44 | - name: 'Export pinned Prod + TypeCheck dependencies' 45 | run: uv export --no-emit-project --no-dev --extra typing --frozen --format requirements-txt -o requirements.txt 46 | 47 | # Install dependencies in virtualenv 48 | - name: 'Install "Prod + TypeCheck" dependencies' 49 | run: | 50 | uv venv 51 | uv pip install --no-deps -r requirements.txt 52 | 53 | # TYPE CHECKING with MYPY 54 | - name: Do Type Checking 55 | env: 56 | PKG: src/cookiecutter_python # for DRYness 57 | MYPYPATH: src/stubs/ # REQUIRED for mypy to find our custom stubs 58 | # create __init__.py in tests/, temporarily. to avoid mypy error due to multiple conftest.py files inside tests/ 59 | run: | 60 | touch tests/__init__.py 61 | 62 | echo "[INFO] Running mypy for type checking" 63 | 64 | uv run mypy --show-error-codes \ 65 | --exclude tests/data \ 66 | --exclude "${PKG}/{{ cookiecutter.project_slug }}" \ 67 | src 68 | 69 | # delete temporarily created empty __init__.py in tests 70 | - if: always() 71 | run: rm tests/__init__.py 72 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vscode/ 3 | 4 | *.egg-info/ 5 | *.pyc 6 | *\.bak 7 | \.fuse* 8 | .coverage 9 | .DS_Store 10 | *__pycache__ 11 | 12 | docs/_build/ 13 | dist/ 14 | build/ 15 | htmlcov/ 16 | 17 | .tox/ 18 | node_modules 19 | 20 | dependency-graphs/ 21 | test-results/ 22 | uml-diagrams/ 23 | pydoer-graphs/ 24 | 25 | package-lock\.json 26 | package\.json 27 | pydeps/ 28 | gen/ 29 | Dockerfile-1 30 | logging\.py\.wip 31 | notes\.org~ 32 | cookie-py\.log 33 | soft-rel\.log 34 | notes\.md 35 | 36 | # LOGS generate by our python code 37 | 38 | cookie-py.log 39 | dev-notes/ 40 | *\.txt 41 | temp 42 | *\.tar\.gz 43 | docs-dist/ 44 | docs-build/ 45 | WIP.sh 46 | \.mutmut-cache 47 | html/ 48 | dist_docs/ 49 | coverage\.xml 50 | coverage\.* 51 | check-yaml 52 | TODOs.md 53 | dist-wheels/ 54 | biskotaki\.log 55 | tests/data/snapshots/biskotaki-gold-standard/site/ 56 | site/ 57 | -------------------------------------------------------------------------------- /.prospector.yml: -------------------------------------------------------------------------------- 1 | # output-format: json 2 | 3 | strictness: high 4 | test-warnings: true 5 | doc-warnings: false 6 | member-warnings: false 7 | inherits: 8 | - default 9 | ignore-paths: 10 | - docs 11 | ignore-patterns: 12 | - (^|/)skip(this)?(/|$) 13 | - src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/__init__.py 14 | - src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/__main__.py 15 | - src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/cli.py 16 | - src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/smoke_test.py 17 | - src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/test_cli.py 18 | autodetect: false 19 | max-line-length: 95 20 | 21 | 22 | # TOOLS 23 | 24 | pyflakes: 25 | run: true 26 | disable: 27 | # we disable F821 this since the {{ cookiecutter }} templated python dict is technically an undefined name, 28 | # while pyflakes conducts static code analysis 29 | - F821 30 | 31 | pyroma: 32 | run: true 33 | disable: 34 | - PYR15 35 | - PYR18 36 | 37 | dodgy: 38 | run: true 39 | 40 | mccabe: 41 | run: true 42 | options: 43 | max-complexity: 9 44 | 45 | 46 | # INACTIVE 47 | 48 | pylint: 49 | run: false 50 | 51 | bandit: 52 | run: false 53 | 54 | frosted: 55 | run: false 56 | 57 | pep8: 58 | run: false 59 | 60 | pep257: 61 | run: false 62 | 63 | mypy: 64 | run: false 65 | 66 | vulture: 67 | run: false 68 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | apt_packages: 14 | - graphviz 15 | 16 | # ALL JOBS implied: https://docs.readthedocs.io/en/stable/builds.html 17 | jobs: 18 | pre_install: 19 | - python -m pip install uv 'tox==3.28.0' 20 | # Reproducible Docs Building Environment - stetup/installation 21 | - uv export --extra docs --frozen --no-dev --no-emit-project -f requirements-txt -o prod+docs.txt 22 | post_install: 23 | # Reproducible Docs Building Environment - stetup/installation 24 | # - uv pip install --no-deps -r prod+docs.txt 25 | - python -m pip install --no-deps -e . 26 | # Install dependencies for the 'pre_build' step 27 | - python -m pip install pyyaml 28 | pre_build: 29 | - tox -e pydeps 30 | - cp -r pydeps/* docs/assets 31 | - ls -l docs/assets 32 | 33 | - chmod +x ./scripts/visualize-ga-workflow.py 34 | - ./scripts/visualize-ga-workflow.py .github/workflows/test.yaml > ./docs/includes/cicd_mermaid.md 35 | 36 | - chmod +x ./scripts/visualize-dockerfile.py 37 | - ./scripts/visualize-dockerfile.py > ./docs/includes/dockerfile_mermaid.md 38 | 39 | 40 | # MKDOCS BUILD documentation in the "docs/" directory 41 | mkdocs: 42 | configuration: mkdocs.yml 43 | # fail_on_warning: false 44 | 45 | 46 | # TODO: Verify if below work for MkDocs or whther there is a work-around 47 | # formats: 48 | # - pdf 49 | # - epub 50 | 51 | 52 | # Optional but recommended, declare the Python requirements required 53 | # to build your documentation 54 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 55 | python: 56 | install: 57 | - requirements: prod+docs.txt 58 | -------------------------------------------------------------------------------- /docs/assets/CICD-Pipe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/docs/assets/CICD-Pipe.png -------------------------------------------------------------------------------- /docs/assets/ci-open-v1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/docs/assets/ci-open-v1.png -------------------------------------------------------------------------------- /docs/guides/index.md: -------------------------------------------------------------------------------- 1 | # How-to Guides 2 | 3 | Our **Python Generator** Project was designed to be installable via `pip` and then invoked through the `generate-python` entrypoint to the CLI. 4 | 5 | ## Installation 6 | 7 | {% include 'how_to_installation.md' %} 8 | 9 | ## Generator Usage 10 | 11 | {% include 'how_to_generate_project.md' %} 12 | 13 | ## Generated Project Usage 14 | 15 | {% include 'how_to_use_gen_proj.md' %} 16 | -------------------------------------------------------------------------------- /docs/includes/cicd_mermaid.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | graph LR; 3 | set_github_outputs 4 | set_github_outputs --> test 5 | test --> codecov_coverage_host 6 | sca 7 | docs 8 | pydeps 9 | test --> docker_build 10 | set_github_outputs --> docker_build 11 | test --> check_which_git_branch_we_are_on 12 | docs --> check_which_git_branch_we_are_on 13 | sca --> check_which_git_branch_we_are_on 14 | pydeps --> check_which_git_branch_we_are_on 15 | check_which_git_branch_we_are_on --> pypi_publish 16 | test --> pypi_publish 17 | check_which_git_branch_we_are_on --> gh_release 18 | ``` 19 | -------------------------------------------------------------------------------- /docs/includes/dockerfile_mermaid.md: -------------------------------------------------------------------------------- 1 | ## Dockerfile Flow Chart 2 | 3 | **Dockerfile: Dockerfile** 4 | 5 | ```mermaid 6 | graph TB; 7 | python_slim --> builder 8 | builder --> prod_builder 9 | builder --> test_builder 10 | builder --> docs_builder 11 | builder --> docs_live_builder 12 | scratch --> source 13 | prod_builder -. "requirements.txt" .-> source 14 | python_slim --> base_env 15 | base_env --> build_wheels 16 | source -. "/app" .-> build_wheels 17 | base_env --> install 18 | build_wheels -. "${DISTRO_WHEELS}" .-> install 19 | python_slim --> test_dev 20 | test_builder -. "requirements-test.txt" .-> test_dev 21 | base_env --> test_wheels 22 | build_wheels -. "${DISTRO_WHEELS}" .-> test_wheels 23 | test_builder -. "requirements-test.txt" .-> test_wheels 24 | python_slim --> docs_base 25 | docs_base --> docs 26 | docs_base --> docs_live 27 | install --> prod 28 | ``` 29 | -------------------------------------------------------------------------------- /docs/includes/how_to_generate_project.md: -------------------------------------------------------------------------------- 1 | 2 | > Using the cli is as simple as invoking `generate-python` from a console. 3 | 4 | You can run the following to see all the available parameters you can control: 5 | 6 | 7 | === "Pipx / Pip" 8 | 9 | ```sh 10 | generate-python --help 11 | ``` 12 | 13 | === "Docker" 14 | 15 | ```sh 16 | docker run -it --rm boromir674/generate-python:master --help 17 | ``` 18 | 19 | The most common way to generate a new Python Package Project is to run: 20 | 21 | 22 | === "Pipx / Pip" 23 | 24 | ```sh 25 | generate-python 26 | ``` 27 | 28 | === "Docker (linux shell)" 29 | 30 | ```sh 31 | docker run -it --rm boromir674/generate-python:master 32 | ``` 33 | 34 | This will prompt you to input some values and create a fresh new Project in the current directory! 35 | 36 | Now, simply `cd` into the generated Project's directory and enjoy some of the features the generator supplies new projects with! 37 | 38 | More on use cases in the next section. 39 | -------------------------------------------------------------------------------- /docs/includes/how_to_installation.md: -------------------------------------------------------------------------------- 1 | 2 | **Cookiecutter Python Package**, available as *source code* on github, with published 3 | 4 | **Distribution** on *pypi.org* **PyPI**, and **Docker Image** on *hub.docker.com* **Registry**. 5 | 6 | 7 | === "Install with *pipx*" 8 | 9 | Install in virtual env, and make available globally in your (host) machine. 10 | 11 | ```sh 12 | pipx install cookiecutter-python 13 | ``` 14 | 15 | Now, the ``generate-python`` executable should be available. 16 | 17 | 18 | === "Via Docker" 19 | 20 | Pull the latest Stable image from Docker Hub 21 | 22 | ```sh 23 | docker pull boromir674/generate-python:master 24 | ``` 25 | 26 | Now, the CLI should be available, via 27 | `docker run -it --rm boromir674/generate-python:master` 28 | 29 | !!! Hint 30 | 31 | Tag `master` is latest tested stable. Tag `latest` is literally latest pushed (no stability guaranteed) 32 | 33 | === "Install with *pip*, only Linux / MacOS" 34 | 35 | Install in virtual env 36 | 37 | ```sh 38 | virtualenv env --python=python3 39 | source env/bin/activate 40 | 41 | pip install cookiecutter-python 42 | ``` 43 | 44 | Make available to current user 45 | 46 | ```sh 47 | ln -s env/bin/generate-python ~/.local/bin/generate-python 48 | ``` 49 | 50 | Now, the ``generate-python`` executable should be available (assuming ~/.local/bin is in your PATH). 51 | 52 | !!! Hint 53 | 54 | All methods shown above Download Latest Stable Releases, either from pypi or docker 55 | 56 | 57 | ### Verify Installation 58 | 59 | You can verify by running the following: 60 | 61 | ```sh 62 | generate-python --version 63 | ``` 64 | -------------------------------------------------------------------------------- /docs/tags.md: -------------------------------------------------------------------------------- 1 | # Tags 2 | 3 | Following is a list of relevant tags: 4 | 5 | [TAGS] -------------------------------------------------------------------------------- /docs/topics/arch.md: -------------------------------------------------------------------------------- 1 | # Software Architecture 2 | 3 | [//]: # (this is a comment) 4 | [//]: # (Description of what is this Page) 5 | 6 | Here you can find the software architecture of the project. 7 | 8 | ## Module Dependencies 9 | 10 | [//]: # (Description of what is this Section) 11 | 12 | Here you can find the dependencies between the modules of the project. 13 | 14 | The dependencies are Visualized as a Graph, where Nodes are the modules and the Edges are python ``import`` statements. 15 | 16 | The dependencies are visualized, after running the following command: 17 | 18 | ```sh 19 | tox -e pydeps 20 | ``` 21 | 22 | !!! Tip 23 | 24 | Right-click and open image in new Tab for better inspection 25 | 26 | ### First-party Dependencies 27 | 28 | [//]: # (Inner Python Imports SVG Graph) 29 | 30 | ![First-party Dependencies](../assets/deps_inner.svg) 31 | 32 | 33 | ### First and Third party Dependencies 34 | 35 | [//]: # (First-Party with 3rd-party having all incoming edges to our individual Modules) 36 | 37 | ![All Dependencies - C](../assets/deps_all.svg) 38 | 39 | 40 | ### 1st+3rd party Deps - 1st as Cluster 41 | 42 | [//]: # ("Boxed" First-Party with 3rd-party having all incoming edges to our Box) 43 | 44 | ![All Dependencies - B](../assets/deps_ktc.svg) 45 | 46 | 47 | ### 1st+3rd party Deps - 1st+3rd as Cluster 48 | 49 | [//]: # ("Boxed" First-Party with 3rd-party having 1 incoming edge to our Box) 50 | 51 | ![All Dependencies - A](../assets/deps_ktc-mcs_2.svg) 52 | -------------------------------------------------------------------------------- /docs/topics/cli_logic.md: -------------------------------------------------------------------------------- 1 | 2 | ```mermaid 3 | graph TB 4 | subgraph cli ["CLI Params"] 5 | ni>"no_input: bool"] 6 | cf>"config_file: str"] 7 | dc>"default_config: bool"] 8 | end 9 | 10 | ni .-> A 11 | cf .-> A 12 | dc .-> A 13 | 14 | A["CLI"] --> if1 15 | 16 | subgraph gen ["Main Generate"] 17 | 18 | 19 | subgraph pre_gen_s ["Pre Gen"] 20 | if1{"no_input == False"} -- Yes --> y1[/"Interactive Mode ON"/] 21 | if1 -- No --> n1[/"Interactive Mode OFF"\] 22 | 23 | y1 --> if2{"python is 3.9 and below?"} 24 | if2 -- Yes --> y2["Interpreters from Interactive Dialog"] 25 | if2 -- No --> n2["return []"] 26 | 27 | n1 --> if3 28 | 29 | if3{"config_file given?"} -- Yes --> y3["Interpreters from user YAML"] 30 | if3 -- No --> n3["return []"] 31 | 32 | y2 --> if4 33 | n2 --> if4 34 | 35 | y3 --> if4 36 | n3 --> if4 37 | 38 | if4{"interpreters found?"} -- Yes --> y4["Store in Cookie Extra Context"] 39 | 40 | end 41 | 42 | y4 --> g1 43 | if4 -- No --> g1 44 | 45 | subgraph gen_s ["Gen"] 46 | g1["pre_gen_project - Hook"] --> g 47 | g["Cookicutter - jinja"] --> g2 48 | g2["post_gen_project - Hook"] 49 | end 50 | 51 | g2 --> p1 52 | 53 | subgraph post_gen_s ["Post Gen"] 54 | p1["Check PypI & Read The Docs"] 55 | end 56 | 57 | end 58 | 59 | %% this is a mermaid comment 60 | 61 | p1 --> E 62 | E(("END")) 63 | ``` 64 | -------------------------------------------------------------------------------- /docs/topics/development/build_process_DAG.md: -------------------------------------------------------------------------------- 1 | ## Docker Build Process DAG 2 | 3 | `docker build` possible execution paths. 4 | 5 | Flow Chart, of how exection navigates docker stages (see --target of docker build). 6 | 7 | If you run `docker build .` the `target` used by default is the `default_with_demo` Stage in the Graph. 8 | 9 | **Dockerfile: ./Dockerfile** 10 | 11 | - `Nodes` represent docker **stages** 12 | - `Continuous arrows/edges` represent `FROM A AS B` docker statements 13 | - `Dotted arrows/edges` represent `COPY --from=A /path/to/file /local/path` statements 14 | 15 | 16 | {% include 'dockerfile_mermaid.md' %} 17 | 18 | With this **multi-stage** Dockerfile design, stages can be **built in parallel** (assuming appropiate build backend)! 19 | -------------------------------------------------------------------------------- /docs/topics/development/cicd.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - CICD 4 | --- 5 | 6 | ## CICD Pipeline, as Github Action Workflow 7 | 8 | ### Variables to provide for `var` context 9 | 10 | Flow Chart, of Jobs Dependencies in the Pipeline. 11 | 12 | **config: ./.github/workflows/test.yaml** 13 | 14 | {% include 'cicd_mermaid.md' %} 15 | -------------------------------------------------------------------------------- /docs/topics/development/docs_only_release_process.md: -------------------------------------------------------------------------------- 1 | # Streamline **Documentation** Updates 2 | 3 | 1. Branch off the `main` branch, and checkout your `topical branch` (`tb`). 4 | 5 | 2. Create Docs-only changes and commit them to your `tb`. 6 | 7 | 3. Push the git tag `quick-release` to trigger the Docs Release Workflow on the CI. 8 | 9 | A new PR is expected to **open** from `tb` to a `dedicated docs` branch, 10 | and automatically **merge** if the Docs Build passes on the `rtd` CI. 11 | 12 | Then, a new PR is expected to **open** from the `dedicated docs` branch to `main`, 13 | with extra commits for the SemVer bump and Changelog updates. 14 | 15 | 4. Wait for the second PR to open, go to the GitHub web UI to review it, and merge it. 16 | 17 | A new **tag** is expected to be created (on the new main/master commit), 18 | and a `PyPI` distribution will be uploaded, a new Docker Image on DockerHub, 19 | and a new GitHub Release will be created. 20 | 21 | ## Workflows References 22 | 23 | - **quick-docs.yaml**: Listens to the `quick-release` git tag and merges `tb` → `db` after opening a PR. 24 | [Source Code](https://github.com/boromir674/cookiecutter-python-package/blob/master/.github/workflows/quick-docs.yaml) 25 | -------------------------------------------------------------------------------- /docs/topics/development/gitops/gh-web-ui-allow_auto-merge.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/docs/topics/development/gitops/gh-web-ui-allow_auto-merge.png -------------------------------------------------------------------------------- /docs/topics/development/gitops/index.md: -------------------------------------------------------------------------------- 1 | # Semi Automated GitOps Processes 2 | 3 | ## V2 4 | 5 | V2 Git Ops processes involve multiple branches, such as `release`, `release-train`, 6 | `direct-onboarding`, `test-distro`, `test-distro-docs`, while "moving" User's changes 7 | step-by-step into main. 8 | 9 | 10 |
11 | 12 | - :simple-githubactions:{ .lg .middle } __`Tutorial`: V2 process to Release changes__ 13 | 14 | --- 15 | 16 | **V2:** Release changes from **one branch** 17 | 18 | [:octicons-arrow-right-24: Tutorial ](./tutorial_release_my_branch_v2.md) 19 | 20 | 21 | - :simple-githubactions:{ .lg .middle } [__`How-to` Guide: V2 process to Release changes__](./gitops-v2.md) 22 | 23 | --- 24 | 25 | **V2:** Release changes from **one branch** 26 | 27 | [:octicons-arrow-right-24: Cheat Sheet ](./gitops-v2-cheatsheet.md) 28 | 29 | 30 | - :simple-githubactions:{ .lg .middle } __V2 process to Release changes from multiple branches__ 31 | 32 | --- 33 | 34 | **V2:** Release changes from **multiple branch** 35 | 36 | [:octicons-arrow-right-24: Cheat Sheet ](./gitops-multi-topics-cheatsheet.md) 37 | 38 |
39 | -------------------------------------------------------------------------------- /docs/topics/development/index.md: -------------------------------------------------------------------------------- 1 | # Development 2 | Here you will find topics related to `Development`, the `build`, and the `CI/CD` Pipeline design. 3 | 4 |
5 | 6 | 7 | - :material-docker:{ .lg .middle } __Docker__ 8 | 9 | --- 10 | 11 | Dockerfile design, Build Process 12 | 13 | [:octicons-arrow-right-24: Topic ](./build_process_DAG.md) 14 | 15 | 16 | - :simple-githubactions:{ .lg .middle } __CI/CD Pipeline__ 17 | 18 | --- 19 | 20 | Github Actions Workflow of Jobs, visualized as a DAG 21 | 22 | [:octicons-arrow-right-24: Topic ](./cicd.md) 23 | 24 | 25 | - :material-book-open: __Dependabot Docs__ 26 | 27 | --- 28 | 29 | Dependabot - Documentation 30 | 31 | [:octicons-arrow-right-24: Docs ](./dependabot.md) 32 | 33 | 34 | - :material-state-machine:{ .lg .middle } __Git Ops Processes__ 35 | 36 | --- 37 | 38 | Step-by-step Processes, leveraging `git` and `CI` for **Releasing changes** 39 | 40 | [:octicons-arrow-right-24: Docs ](./gitops/) 41 | 42 |
43 | -------------------------------------------------------------------------------- /docs/topics/development/release_candidate.md: -------------------------------------------------------------------------------- 1 | # Release Candidate / Test Deployment 2 | 3 | From your branch, run: 4 | 5 | [//]: # (Start of command block) 6 | 7 | ```sh 8 | rc_tag=$(grep -E -o '^version\s*=\s*".*"' pyproject.toml | cut -d'"' -f2) rc_tag="${rc_tag}-rc" 9 | 10 | git tag "$git_tag" || (git tag -d "$git_tag" && git tag "$git_tag") git push origin -d "$git_tag"; git push origin "$git_tag" 11 | 12 | ``` 13 | 14 | [//]: # (End of command block) 15 | 16 | This will trigger the CI/CD Pipeline and instruct it to do a **Test Deployment**. 17 | 18 | Test Deployment is a full deployment of the package to the test environment. 19 | It is the closest thing to a real (production) deployment. 20 | 21 | The CI/CD Pipeline will: 22 | 23 | 1. Make wheel builds (and unit test them) for the package using a Job Matrix factoring OS x Python Versions. 24 | 2. Perform normal measurements of Code Coverage, Static Code Analysis, and Docker Build. 25 | 3. Publish the Python Wheel Distribution in the Test Environment at [test.pypi.org](https://test.pypi.org). 26 | -------------------------------------------------------------------------------- /docs/topics/development/topic_branch_to_dev.md: -------------------------------------------------------------------------------- 1 | # Board `dev` branch via PR 2 | 3 | > Merge `Topic Branch` via **PR** into `Integration Branch` 4 | 5 | ## `How-to` Open PR to `dev` 6 | 7 | Prerequisites: 8 | 9 | - Your git HEAD is on the `Topic Branch` 10 | 11 | !!! Tip 12 | 13 | Adjust the instructions, using Inputs for `Default` and `Integration` Branches 14 | 15 |
16 | 17 | - **`INPUT`** Default Branch (ie main) 18 | 19 | --- 20 | 21 | 22 | 23 | 24 | - **`INPUT`** Integration Branch (ie dev) 25 | 26 | --- 27 | 28 | 29 | 30 |
31 | 32 |
33 | 34 |
    35 |
  1. Define Default and Integration Branches (1) 36 | 37 |
    
    38 |             export MAIN_BR=...
    39 |             export DEV_BR=...
    40 |         
    41 |
  2. 42 | 43 |
  3. Open PR to Integration Branch 44 | 45 | ```sh 46 | git checkout ${DEV_BR} && git pull && git rebase ${MAIN_BR} && git push -f 47 | git checkout - && git rebase ${DEV_BR} 48 | git push -f && gh pr create --base ${DEV_BR} 49 | ``` 50 | 51 |
  4. 52 |
  5. Enable Auto Merge 53 | 54 | ```sh 55 | gh pr merge --merge --auto 56 | ``` 57 |
  6. 58 |
59 | 60 | 61 | 62 | 63 | 64 |
65 | 66 | **Congratulations** :smile: ! 67 | 68 | Now the **PR** shall auto-merge once all Required Checks Pass ! 69 | 70 | ## Next Steps 71 | 72 | Watch the **PR Validation** `Workflow` "live": 73 | ```sh 74 | gh run watch 75 | ``` 76 | -------------------------------------------------------------------------------- /docs/topics/generator_process.md: -------------------------------------------------------------------------------- 1 | # Parametrized Generator Process 2 | 3 | > Understand the **Generation Process**, in depth 4 | 5 | 6 | ```mermaid 7 | graph 8 | 9 | %% INPUTS to Generator 10 | subgraph inputs ["INPUT (Optional)"] 11 | input_things["Project Name 12 | Package Type 13 | CI/CD Pipeline Design 14 | Docs Builder 15 | ... 16 | ... 17 | "] 18 | 19 | end 20 | 21 | inputs ==> derive_default 22 | 23 | %% APPLICATION LAYER 24 | subgraph gen ["Generator"] 25 | 26 | %% INPUTS to Generator 27 | 28 | derive_default["Derive Default Param Values"] 29 | 30 | is_interactive{"Is interactive?"} 31 | 32 | prompt["Prompt User to override Default"] 33 | 34 | Gen["Generator"] 35 | 36 | subgraph coo ["Cookiecutter Template"] 37 | template>"Template Files"] 38 | cjson["cookiecutter.json"] 39 | 40 | end 41 | 42 | cjson --> derive_default 43 | derive_default --> is_interactive 44 | is_interactive -- "No" --> Gen 45 | is_interactive --"yes"--> prompt 46 | prompt --> Gen 47 | template ==> Gen 48 | 49 | end 50 | 51 | Gen ==> out 52 | 53 | %% OUTPUTS of Generator 54 | subgraph out ["GENERATED FILES"] 55 | py["Python Modules 56 | Test Suite 57 | CI/CD Pipeline 58 | docs 59 | Python Package Metadata 60 | lint 61 | docker 62 | "] 63 | 64 | end 65 | ``` 66 | 67 | 68 | --- 69 | 70 | !!! Tip 71 | Right-click -> open image in new tab 72 | 73 | ![Generator Flowchart](../assets/generator-flowchart.svg) 74 | -------------------------------------------------------------------------------- /docs/topics/why_this_package.md: -------------------------------------------------------------------------------- 1 | # Why this Generator? 2 | 3 | *So, why choose this Python Package Generator?* 4 | 5 | ## Robust CLI 6 | 7 | You want an `easy-to-use`, `cross-platform` CLI. 8 | 9 | - It offers a **1-click** command, or an option for an interactive `wizard`. 10 | - **Tested** on **15 different setups**, across multiple `Platforms` and `Python Interpreters`: 11 | - **OS**: {Ubuntu, MacOS, Windows} X **Python**: {3.7, 3.8, 3.9, 3.10, 3.11, 3.12} 12 | - Built on established software, such as *cookiecutter* and *jinja2*. 13 | 14 | ## "DevOps": aka Automations and CI/CD 15 | 16 | You want to focus on your *business logic* and *test cases* in new Python projects. 17 | 18 | - Scaffolded project is **one push** away from triggering its **CI/CD pipeline** on GitHub Actions. 19 | - **Continuous Deployment**, publishing at `pypi.org`, `Docker Hub`, and `Read The Docs`. 20 | - Designed for **GitOps**, supporting various `automated developer activities`. 21 | - **Automations** with the same entry point for both **CI and Local** runs, via `tox`. 22 | - Stress-Testing, with a **Job Matrix** spanning multiple `Python Interpreters` and `Operating Systems`. 23 | 24 | ## Approved Tooling 25 | 26 | You want the best tools under your belt for your development lifecycle. 27 | 28 | - `tox`, `poetry`, `ruff`, `mypy`, `pytest`, `black`, `isort`, `mkdocs`, `sphinx`. 29 | 30 | ## Template Variant 31 | 32 | You want `poetry`, but what if you want to develop a `pytest plugin`? 33 | 34 | - Generate **Library**: a Python Distribution, offering modules: Python API/SDK. 35 | - Configured with **poetry** as the build backend and Package Manager. 36 | 37 | - Generate **CLI**: a Python Distribution, offering modules and a CLI as an entry point. 38 | - Configured with **poetry** as the build backend and Package Manager. 39 | 40 | - Generate **Pytest Plugin**: a Python Distribution, designed for a *pytest plugin*. 41 | - Configured with **setuptools** backend, as required by `pytest`! 42 | -------------------------------------------------------------------------------- /docs/tutorials/index.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/docs/tutorials/index.md -------------------------------------------------------------------------------- /scripts/distro-sem-ver-bump.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # POSIX-compliant shell script 4 | 5 | VERSION="${1}" 6 | GITHUB_ORG="${2:-boromir674}" 7 | REPO="${3:-cookiecutter-python}" 8 | 9 | # CONSTANTS 10 | VERSION_VAR='__version__' 11 | 12 | 13 | ## 1. Module Specific - Sem Ver 14 | INIT_FILE='src/cookiecutter_python/__init__.py' 15 | sed -i.bak -E "s/(${VERSION_VAR} = ['\"])[0-9]+\.[0-9]+\.[0-9]+(['\"])/\\1${VERSION}\\2/" "${INIT_FILE}" && rm "${INIT_FILE}.bak" 16 | 17 | ## 2. Python Poetry BUILD - Bound - Sem Ver 18 | 19 | # Until uv migration is verified we must update all regex matches (ie for poetry and uv config sections!) 20 | PYPROJECT='pyproject.toml' 21 | sed -i.bak -E "s/(version = ['\"])[0-9]+\.[0-9]+\.[0-9]+(['\"])/\\1${VERSION}\\2/" "${PYPROJECT}" && rm "${PYPROJECT}.bak" 22 | 23 | 24 | ## 3. Python Setuptools BUILD - Bound - Sem Ver 25 | # SETUP_PY='setup.py' 26 | # sed -i -E "s/(version\s*=\s*['\"])[0-9]+\.[0-9]+\.[0-9]+(['\"])/\1${VERSION}\2/" "${SETUP_PY}" 27 | # sed -i -E "s/(download_url\s*=\s*https:\/\/github.com\/${GITHUB_ORG}\/${REPO}\/archive\/v)[0-9]+\.[0-9]+\.[0-9]+(\.tar\.gz)/\1${VERSION}\2/" "${SETUP_PY}" 28 | 29 | ## 4. JS/TS / Node BUILD - Bound - Sem Ver 30 | # PACKAGE_JSON='package.json' 31 | # sed -i -E "s/(\"version\": \"v?)[0-9]+\.[0-9]+\.[0-9]+(\")/\1${VERSION}\2/" "${PACKAGE_JSON}" 32 | -------------------------------------------------------------------------------- /scripts/load-to-rt.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Takes Changes in HEAD, and puts them on 'Release Train' (RT) 4 | 5 | set -e 6 | 7 | # Initialize variables 8 | CHANGES_BR="$(git rev-parse --abbrev-ref HEAD)" 9 | tag='board-rt' # Default value for tag 10 | 11 | # Process arguments 12 | for arg in "$@" 13 | do 14 | case $arg in 15 | # Put Changes in RT, and start RT --> Release 16 | --close) 17 | tag='auto-release' 18 | shift # Remove --close from processing 19 | ;; 20 | *) 21 | # If it's not '--close', treat it as CHANGES_BR 22 | CHANGES_BR="$arg" 23 | ;; 24 | esac 25 | done 26 | 27 | # branch with user's changes (ie code developed) 28 | CHANGES_BR="${1:-$(git rev-parse --abbrev-ref HEAD)}" 29 | 30 | if [ "$CHANGES_BR" = "main" ] || [ "$CHANGES_BR" = "master" ] || [ "$CHANGES_BR" = "release-train" ]; then 31 | echo " [REQ]: CHANGES_BR must not be main, master or release-train!" 32 | echo 33 | echo " [FIX]: Please checkout a different branch and re-run this script." 34 | echo "Exiting ..." 35 | exit 1 36 | fi 37 | 38 | echo "[STEP]: Ensure Upstream is up-to-date with User's Branch" 39 | git push -u origin HEAD 40 | 41 | # GIT OPS 42 | export tag='board-rt' 43 | 44 | echo "[STEP]: Tag Commit: $tag" 45 | (git tag "$tag" || (echo "* Tag $tag already exists" && git tag -d "$tag" && echo "* Deleted tag ${tag}" && git tag "$tag") && echo " -> Created tag $tag") 46 | 47 | echo "[STEP]: Push Tag: $tag" 48 | (git push origin --delete "$tag" && echo "* Deleted Remote tag ${tag}") || echo "* Remote Tag $tag does not exist" 49 | git push origin "$tag" && echo " -> Pushed tag $tag" 50 | 51 | echo 52 | echo " DONE !!" 53 | echo 54 | echo " Triggered Boarding Worklow!" 55 | 56 | # NEXT STEPS 57 | 58 | echo 59 | echo " ---> NEXT STEPS:" 60 | echo 61 | echo "1. Check CI Workflow: https://github.com/boromir674/cookiecutter-python-package/actions/workflows/load-to-rt.yml" 62 | 63 | echo 64 | echo "Run: ./scripts/start-rt.sh" 65 | echo 66 | echo "Run: ./scripts/open-doors.sh" 67 | 68 | # END 69 | echo 70 | -------------------------------------------------------------------------------- /scripts/mypy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ## Run Mypy against Code 4 | 5 | # Env Required: .lint-env 6 | 7 | set -e 8 | 9 | # if env notr found exit with error 10 | if [ ! -d .lint-env ]; then 11 | echo "No .lint-env found, please run 'uv venv .lint-env' first" 12 | exit 1 13 | fi 14 | 15 | 16 | # uv export --no-emit-project --no-dev --extra typing --frozen --format requirements-txt -o prod+type.txt 17 | 18 | # . .lint-env/bin/activate 19 | 20 | # uv pip install --no-deps -r prod+type.txt 21 | 22 | # set mypy environment 23 | export MYPYPATH=${MYPYPATH:-src/stubs} 24 | 25 | # DRYness 26 | PKG=${PGK:-src/cookiecutter_python} 27 | 28 | mypy --show-error-codes --check-untyped-defs \ 29 | --exclude tests/data \ 30 | --check-untyped-defs \ 31 | "${PKG}/hooks" \ 32 | "${PKG}/backend" "${PKG}/handle" \ 33 | "${PKG}/utils.py" "${PKG}/exceptions.py" \ 34 | "${PKG}/cli.py" "${PKG}/cli_handlers.py" \ 35 | "${PKG}/__main__.py" "${PKG}/__init__.py" \ 36 | tests 37 | -------------------------------------------------------------------------------- /scripts/open-doors.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Open Doors, after Release Train reaches destination 4 | 5 | set -e 6 | 7 | export tt='open-doors' 8 | 9 | set +e 10 | git tag -d "$tt" 11 | git push --delete origin "$tt" 12 | 13 | set -e 14 | git tag "$tt" 15 | 16 | git push origin "$tt" 17 | -------------------------------------------------------------------------------- /scripts/sem-ver-bump.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # NEW VERSION for Release 4 | VERSION="${1}" 5 | # EXAMPLE: 1.4.5 6 | 7 | # Sem Ver Major Minor Patch + Pre-release metadata 8 | # regex="[0-9]+\.[0-9]+\.[0-9]+(?:\-[a-zA-Z0-9]+(?:\.[a-zA-Z0-9]+)*)?" 9 | 10 | regex="[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9]+(\.[a-zA-Z0-9]+)*)?" 11 | 12 | set -e 13 | 14 | ## 1. DISTRO Sem Ver - needed for Prod tag 15 | bash ./scripts/distro-sem-ver-bump.sh "${VERSION}" 16 | 17 | ## 2. README.md - Sem Ver 18 | README_MD='README.md' 19 | # sed -i -E "s/(['\"]?v?)[0-9]+\.[0-9]+\.[0-9]+(['\"]?)/\1${VERSION}\2/" "${README_MD}" 20 | 21 | # Replace occurences such as /v2.5.8/ with /v2.5.9/ 22 | sed -i -E "s/(['\"]?v?)${regex}(['\"]?)/\1${VERSION}\2/" "${README_MD}" 23 | 24 | # Replace occurences such as /v2.5.8..main with /v2.5.9..main 25 | sed -i -E "s/(['\"]?v?)${regex}\.\./\1${VERSION}../" "${README_MD}" 26 | 27 | 28 | # Sphinx Docs - Sem Ver 29 | # DOCS_CONF='docs/conf.py' 30 | # sed -i -E "s/(release\s*=\s*['\"]v?).+(['\"])/\1${VERSION}\2/" "${DOCS_CONF}" 31 | 32 | ### README.rst - Sem Ver 33 | # README_RST='README.rst' 34 | 35 | # sed -i -E "s/(['\"]?v?)[0-9]+\.[0-9]+\.[0-9]+(['\"]?)/\1${VERSION}\2/" "${README_RST}" 36 | # sed -i -E "s/(['\"]?v)${regex}(['\"]?)/\1${VERSION}\2/" "${README_RST}" 37 | # sed -i -E "s/(['\"]?v?)${regex}(\/|\.\.)/\1${VERSION}\2/" "${README_RST}" 38 | # sed -i -E "s/(['\"]?v?)${regex}((\/|\.\.))/\1${VERSION}\2/g" "${README_RST}" 39 | -------------------------------------------------------------------------------- /scripts/sphinx-process.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | sphinx-build -E -b doctest docs docs-dist 4 | sphinx-build -E -b html docs docs-dist 5 | sphinx-build -b spelling docs docs-dist 6 | sphinx-build -b linkcheck docs docs-dist 7 | 8 | echo "View documentation at docs-dist/index.html; it is ready to be hosted!" 9 | -------------------------------------------------------------------------------- /scripts/update-snapshot-interactive.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Update the Snashot Biskotaki (ci) Test Data maintained for 4 | # Regression Tests 5 | 6 | set -e 7 | 8 | # From 'edit' mode installation: 9 | echo 10 | tox -e dev -vv --notest 11 | 12 | set +e 13 | rm -rf /tmp/biskotaki 14 | set -e 15 | 16 | ### RUN Generator in Interactive Mode, prompting/asking for user input ### 17 | echo 18 | .tox/dev/bin/generate-python --offline --config-file .github/biskotaki.yaml -o /tmp/ 19 | 20 | ### UPDATE SHAPSHOT, by Copying all Generated files and folders recursively ### 21 | INTERACTIVE_SHAPSHOT=${INTERACTIVE_SHAPSHOT:-tests/data/snapshots/biskotaki-interactive} 22 | 23 | set +e 24 | rm -rf "${INTERACTIVE_SHAPSHOT}" 25 | set -e 26 | # copy generated biskotaki to 'biskotaki-no-input' test Snapshot 27 | cp -r /tmp/biskotaki/ "${INTERACTIVE_SHAPSHOT}" 28 | 29 | 30 | # show diff of biskotaki-interactive 31 | echo 32 | git diff --stat "${INTERACTIVE_SHAPSHOT}" 33 | 34 | # get only last part of path from NO_INPUT_SHAPSHOT 35 | INTERACTIVE_SHAPSHOT_NAME=$(echo "${INTERACTIVE_SHAPSHOT}" | awk -F/ '{print $NF}') 36 | 37 | echo 38 | echo "Next steps:" 39 | echo 40 | echo "git add ${INTERACTIVE_SHAPSHOT}" 41 | echo "git commit -m \"tests(data): update ${INTERACTIVE_SHAPSHOT_NAME} Snapshot, used in Regression Testing\"" 42 | echo 43 | 44 | ## GIT ADD ## 45 | # git add "${INTERACTIVE_SHAPSHOT}" 46 | # echo 47 | 48 | ## GIT COMMIT ## 49 | # git cz 50 | # echo 51 | # echo "Snapshot updated!" -------------------------------------------------------------------------------- /scripts/update-snapshot.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # Update the Snashot Biskotaki (ci) Test Data maintained for Regression Tests 4 | 5 | set -e 6 | 7 | # From 'edit' mode installation: 8 | echo 9 | tox -e dev -vv --notest 10 | 11 | set +e 12 | rm -rf /tmp/biskotaki 13 | set -e 14 | 15 | ### RUN Generator in Non-Interactive Mode ### 16 | echo 17 | .tox/dev/bin/generate-python --offline --no-input --config-file .github/biskotaki.yaml -o /tmp/ 18 | 19 | 20 | ### UPDATE SHAPSHOT, by Copying all Generated files and folders recursively ### 21 | NO_INPUT_SHAPSHOT=${NO_INPUT_SHAPSHOT:-tests/data/snapshots/biskotaki-no-input} 22 | set +e 23 | rm -rf "${NO_INPUT_SHAPSHOT}" 24 | set -e 25 | # copy generated biskotaki to 'biskotaki-no-input' test Snapshot 26 | cp -r /tmp/biskotaki/ "${NO_INPUT_SHAPSHOT}" 27 | 28 | 29 | # show diff of biskotaki-no-input 30 | echo 31 | git diff --stat "${NO_INPUT_SHAPSHOT}" 32 | 33 | # get only last part of path from NO_INPUT_SHAPSHOT 34 | NO_INPUT_SHAPSHOT_NAME=$(echo "${NO_INPUT_SHAPSHOT}" | awk -F/ '{print $NF}') 35 | 36 | 37 | echo 38 | echo "Next steps:" 39 | echo 40 | echo "git add ${NO_INPUT_SHAPSHOT}" 41 | echo "git commit -m \"tests(data): update ${NO_INPUT_SHAPSHOT_NAME} Snapshot, used in Regression Testing\"" 42 | echo 43 | 44 | ## GIT ADD ## 45 | # git add "${NO_INPUT_SHAPSHOT}" 46 | # echo 47 | 48 | ## GIT COMMIT ## 49 | # git cz 50 | # echo 51 | # echo "Snapshot updated!" 52 | -------------------------------------------------------------------------------- /src/cookiecutter_python/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '2.6.2' 2 | 3 | from . import _logging # noqa 4 | -------------------------------------------------------------------------------- /src/cookiecutter_python/__main__.py: -------------------------------------------------------------------------------- 1 | """Allow to run Python Generator also through `python -m cookiecutter_python`. 2 | 3 | Allows Python Generator to be executed through `python -m cookiecutter_python`. 4 | """ 5 | from __future__ import absolute_import 6 | 7 | from cookiecutter_python.cli import main 8 | 9 | 10 | if __name__ == "__main__": # pragma: no cover 11 | main(prog_name='generate-python') 12 | -------------------------------------------------------------------------------- /src/cookiecutter_python/_find_lib.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import typing as t 3 | from importlib import import_module 4 | from os import path 5 | from pathlib import Path 6 | 7 | 8 | SRC_DIR = Path(__file__).parent.parent.resolve() 9 | 10 | __all__ = ['find_lib'] 11 | 12 | T = t.TypeVar('T') 13 | 14 | 15 | def find_lib(module: t.Optional[str] = None) -> t.Tuple[str, str]: 16 | lib_dir: str 17 | if module is None: # set path as the dir where the invoking code is 18 | namespace = sys._getframe(2).f_globals # caller's globals 19 | # Set as Lib the directory where the invoker module is located at runtime 20 | lib_dir = path.dirname(path.realpath(namespace['__file__'])) 21 | dotted_lib_path: str = '.'.join( 22 | Path(lib_dir).relative_to(SRC_DIR).parts 23 | ) # pragma: no mutate 24 | return lib_dir, dotted_lib_path 25 | 26 | # Import input module 27 | # module_object = import_module(module.replace('/', '.')) 28 | module_object = import_module(module) # TODO: read __file__ without importing 29 | 30 | # Set as Lib the directory where the INPUT module is located at runtime 31 | # if top-level init is at '/site-packages/some_python_package/__init__.py' 32 | # then distro_path is '/site-packages/some_python_package' 33 | lib_dir = str(Path(str(module_object.__file__)).parent) 34 | return lib_dir, module 35 | -------------------------------------------------------------------------------- /src/cookiecutter_python/_logging_config.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'FILE_TARGET_LOGS', 3 | ] 4 | 5 | 6 | FILE_TARGET_LOGS: str = 'cookie-py.log' 7 | """File Name to use for by the File Handler of emitted Log Records.""" 8 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/__init__.py: -------------------------------------------------------------------------------- 1 | from .main import generate 2 | from .post_main import CheckWebServerError 3 | from .sanitization import sanitize 4 | 5 | 6 | __all__ = [ 7 | 'generate', 8 | 'CheckWebServerError', 9 | 'sanitize', 10 | ] 11 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/check_server_result.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class CheckWebServerResult(ABC): 5 | """Interface for checking the result of a web server request.""" 6 | 7 | @property 8 | @abstractmethod 9 | def future(self): 10 | raise NotImplementedError 11 | 12 | @property 13 | @abstractmethod 14 | def name(self) -> str: 15 | """The name of the resource requested to search on the web server. 16 | 17 | Returns: 18 | str: the name of the resource (ie python package slug, rtd project) 19 | """ 20 | raise NotImplementedError 21 | 22 | @property 23 | @abstractmethod 24 | def service_name(self) -> str: 25 | """The name of the web server. 26 | 27 | Returns: 28 | str: the name (slug) of the web server 29 | """ 30 | raise NotImplementedError 31 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/error_handling/__init__.py: -------------------------------------------------------------------------------- 1 | from .handler_builder import HandlerBuilder 2 | 3 | 4 | __all__ = ['HandlerBuilder'] 5 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/error_handling/handler_builder.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | 3 | import click 4 | from software_patterns import SubclassRegistry 5 | 6 | 7 | class HandlerBuilderRegistry(SubclassRegistry[t.Callable]): 8 | pass 9 | 10 | 11 | class HandlerBuilder(metaclass=HandlerBuilderRegistry): 12 | pass 13 | 14 | 15 | @HandlerBuilder.register_as_subclass('non-critical') 16 | class NonCriticalHandlerBuilder: 17 | def __call__(self, error): 18 | click.echo(error) 19 | 20 | 21 | @HandlerBuilder.register_as_subclass('critical') 22 | class CriticalHandlerBuilder: 23 | def __call__(self, error): 24 | click.echo('{}'.format(str(error))) 25 | click.echo('Error message: {}'.format(str(error))) 26 | 27 | # Message that program is exiting due to error 28 | click.echo('Exiting due to error') 29 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/generator/__init__.py: -------------------------------------------------------------------------------- 1 | from .generator import generator 2 | 3 | 4 | __all__ = ['generator'] 5 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/generator/generator.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | from cookiecutter.main import cookiecutter as cookiecutter_main_handler 5 | from software_patterns import ProxySubject, Singleton 6 | 7 | from ..proxy import BaseProxy 8 | 9 | 10 | __all__ = ['cookiecutter'] 11 | 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | my_dir = os.path.dirname(os.path.realpath(__file__)) 16 | 17 | 18 | class CookiecutterSubject(ProxySubject[str]): 19 | pass 20 | 21 | 22 | class CookiecutterProxy(BaseProxy[str]): 23 | """Proxy to cookiecutter: 'from cookiecutter.main import cookiecutter'.""" 24 | 25 | def request(self, *args, **kwargs) -> str: 26 | """[summary] 27 | 28 | Returns: 29 | str: [description] 30 | """ 31 | logger.info( 32 | *BaseProxy.log_info_args('Cookiecutter Proxy Request: %s', *args, **kwargs) 33 | ) 34 | return super().request(*args, **kwargs) 35 | 36 | 37 | # Singleton and Adapter of Cookiecutter Proxy 38 | class CookiecutterProxySingleton(metaclass=Singleton): 39 | def __init__(self, proxy_factory) -> None: 40 | super().__init__() 41 | self._proxy = proxy_factory() 42 | 43 | def __call__(self, *args, **kwds) -> str: 44 | return self._proxy.request(*args, **kwds) 45 | 46 | 47 | cookiecutter = CookiecutterProxySingleton( 48 | lambda: CookiecutterProxy(CookiecutterSubject(cookiecutter_main_handler)) 49 | ) 50 | 51 | generator = cookiecutter 52 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/__init__.py: -------------------------------------------------------------------------------- 1 | from .check_engine import Engine 2 | 3 | 4 | __all__ = ['Engine'] 5 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/check_web_hosting_service.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | import attr 4 | from requests_futures.sessions import FuturesSession 5 | 6 | 7 | @attr.s(auto_attribs=True, slots=True, frozen=True) 8 | class WebHostingServiceChecker: 9 | # Check if CI config is out of the box ok to integrate with hosting services 10 | # or due to namespace collisions (ie on pypi.org/project/, slight change is needed 11 | """Check if CI config is out-of-the-box ok to integrate with hosting services""" 12 | url_getter: Callable[[str], str] 13 | 14 | def __call__(self, name: str): 15 | session = FuturesSession() 16 | future = session.get(self.url_getter(name)) 17 | return type( 18 | 'RequestResult', 19 | (), 20 | {'future': future, 'name': name, 'service_name': str(self.url_getter)}, 21 | ) 22 | 23 | def __str__(self): 24 | return str(self.url_getter) 25 | 26 | @staticmethod 27 | def create(hosting_service): 28 | return WebHostingServiceChecker(hosting_service.url) 29 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/checker.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import attr 4 | 5 | from .checkers import Checkers 6 | 7 | 8 | @attr.s(auto_attribs=True, slots=True, frozen=True) 9 | class Checker: 10 | config_file: Union[None, str] 11 | default_config: Union[None, bool] 12 | checkers: Checkers 13 | 14 | def __getattr__(self, service_name: str): 15 | return self.checkers[service_name] 16 | 17 | def __iter__(self): 18 | return iter(self.checkers) 19 | 20 | @staticmethod 21 | def from_hosting_info(config_file, default_config, hosting_infos): 22 | """Activate Web Host Checks if user config and NOT default config""" 23 | return Checker( 24 | config_file, 25 | default_config, 26 | Checkers.from_hosting_info( 27 | hosting_infos, 28 | config_file and not default_config, 29 | config_file, 30 | ), 31 | ) 32 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/checkers.py: -------------------------------------------------------------------------------- 1 | from typing import MutableMapping 2 | 3 | import attr 4 | 5 | from .check_service import ServiceChecker 6 | 7 | 8 | @attr.s(auto_attribs=True, slots=True, frozen=True) 9 | class Checkers: 10 | _checkers: MutableMapping 11 | 12 | def __getitem__(self, item): 13 | return self._checkers[item] 14 | 15 | def __iter__(self): 16 | return iter(self._checkers.values()) 17 | 18 | @staticmethod 19 | def from_hosting_info(hosting_infos, activate_flag, config_file): 20 | return Checkers( 21 | { 22 | str(x.service): ServiceChecker.create(x, activate_flag, config_file) 23 | for x in hosting_infos 24 | } 25 | ) 26 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/exceptions.py: -------------------------------------------------------------------------------- 1 | __all__ = ['ContextVariableDoesNotExist'] 2 | 3 | 4 | class ContextVariableDoesNotExist(Exception): 5 | """Raised when a Context Variable does not exist, in a dict-like object.""" 6 | 7 | pass 8 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/extract_name.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Callable, Mapping 3 | 4 | import attr 5 | 6 | from ..user_config_proxy import get_user_config 7 | from .exceptions import ContextVariableDoesNotExist 8 | from .value_extractor import BaseValueExtractor 9 | 10 | 11 | # The Dict Schema expected to be parsed 12 | # class GeneratorYamlData(TypedDict): 13 | # default_context: Dict[str, Any] 14 | 15 | 16 | @attr.s(auto_attribs=True, slots=True, frozen=True) 17 | class NameExtractor: 18 | """Extract Context Value, from a User's Config (YAML) file.""" 19 | 20 | name_extractor: Callable[[Mapping], str] 21 | 22 | def __call__(self, config_file: str) -> str: 23 | # Delegate to Cookiecutter YAML parsing of User Config 24 | # config_data: GeneratorYamlData = get_user_config( 25 | config_data = get_user_config( 26 | config_file=config_file, 27 | # MUST be False, otherwise Cookiecutter does not do YAML parsing 28 | default_config=False, 29 | ) 30 | context_data = config_data['default_context'] 31 | try: 32 | return self.name_extractor(context_data) 33 | except KeyError as error: 34 | raise ContextVariableDoesNotExist( 35 | "{msg}: {data}".format( 36 | msg="Attempted to retrieve non-existant variable", 37 | data=json.dumps( 38 | { 39 | 'variable_name': str(self.name_extractor), 40 | 'available_variables': '[{keys}]'.format( 41 | keys=', '.join( 42 | tuple(sorted([str(x) for x in context_data.keys()])) 43 | ), 44 | ), 45 | }, 46 | indent=4, 47 | sort_keys=True, 48 | ), 49 | ), 50 | ) from error 51 | 52 | @staticmethod 53 | def create(hosting_service_info): 54 | return NameExtractor(BaseValueExtractor(hosting_service_info.variable_name)) 55 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/handle_hosting_service_check.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import typing as t 3 | 4 | import attr 5 | from requests.exceptions import ConnectionError as RequestsConnectionError 6 | 7 | 8 | HostingServiceChecker = t.Callable[[str], bool] 9 | 10 | 11 | @attr.s(auto_attribs=True, slots=True) 12 | class CheckHostingServiceHandler: 13 | check_hosting_service: HostingServiceChecker 14 | service_name: str = attr.ib( 15 | default=attr.Factory(lambda self: str(self.check_hosting_service), takes_self=True) 16 | ) 17 | package_name: str = attr.ib(init=False, default=None) 18 | 19 | def _handle_connection_error(self, error): 20 | print(error, file=sys.stderr) 21 | print( 22 | f"Could not establish connection to {self.service_name}.\n" 23 | "Could not determine whether the name " 24 | f"'{self.package_name}' is already \"taken\" on {self.service_name}." 25 | ) 26 | 27 | def __call__(self, package_name: str): 28 | try: 29 | res: bool = self.check_hosting_service(package_name) 30 | except RequestsConnectionError as error: 31 | self.package_name = package_name # a package "slug" (name) 32 | self._handle_connection_error(error) 33 | except Exception as error: # any other error 34 | print(str(error), file=sys.stdout) 35 | else: 36 | if res: 37 | print( 38 | f"Project registered under '{package_name}' is already TAKEN on " 39 | f"{self.service_name}.\nYou shall rename your Python Package first, if you" 40 | f" choose to publish it on {self.service_name}!" 41 | ) 42 | else: 43 | print( 44 | f"Name '{package_name}' is AVAILABLE on {self.service_name}!\n" 45 | "You will not need to rename your Python Package if you choose to publish" 46 | f" it on {self.service_name} :)" 47 | ) 48 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/handler.py: -------------------------------------------------------------------------------- 1 | from typing import Mapping 2 | 3 | import attr 4 | 5 | from .handle_hosting_service_check import CheckHostingServiceHandler 6 | 7 | 8 | @attr.s(auto_attribs=True, slots=True, frozen=True) 9 | class Handlers: 10 | handlers: Mapping 11 | 12 | def __getattr__(self, service_name): 13 | return lambda request_result: self.handlers[service_name](request_result) 14 | 15 | def __call__(self, request_result): 16 | return getattr(self, request_result.service_name)(request_result) 17 | 18 | @staticmethod 19 | def from_checkers(checkers): 20 | return Handlers({str(x): CheckHostingServiceResultHandler(str(x)) for x in checkers}) 21 | 22 | 23 | @attr.s(auto_attribs=True, slots=True, frozen=True) 24 | class CheckHostingServiceResultHandler: 25 | service_name: str 26 | 27 | @staticmethod 28 | def is_future_response_200(result) -> bool: 29 | return result.future.result().status_code == 200 30 | 31 | def __call__(self, request_result): 32 | if request_result: 33 | return CheckHostingServiceHandler( 34 | lambda x: self.is_future_response_200(request_result), self.service_name 35 | )(request_result.name) 36 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/value_extractor.py: -------------------------------------------------------------------------------- 1 | from typing import Mapping 2 | 3 | import attr 4 | 5 | 6 | class ValueExtractor: 7 | def __call__(self, data: Mapping) -> str: 8 | raise NotImplementedError 9 | 10 | def __str__(self): 11 | raise NotImplementedError 12 | 13 | 14 | @attr.s(auto_attribs=True, slots=True, frozen=True) 15 | class BaseValueExtractor(ValueExtractor): 16 | key_name: str 17 | 18 | def __call__(self, data: Mapping) -> str: 19 | return data[self.key_name] 20 | 21 | def __str__(self): 22 | return self.key_name 23 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/hosting_services/web_hosting_service.py: -------------------------------------------------------------------------------- 1 | import attr 2 | from software_patterns import SubclassRegistry 3 | 4 | 5 | @attr.s(auto_attribs=True, slots=True, frozen=True) 6 | class URLGetter: 7 | url_pattern: str 8 | service_name: str 9 | 10 | def __call__(self, name: str): 11 | return self.url_pattern.format(name=name) 12 | 13 | def __str__(self): 14 | return self.service_name 15 | 16 | 17 | @attr.s(auto_attribs=True, slots=True, frozen=True) 18 | class WebHostingService: 19 | url: URLGetter 20 | 21 | def __str__(self): 22 | return str(self.url) 23 | 24 | @staticmethod 25 | def create(url_pattern: str, service_name: str): 26 | return WebHostingService(URLGetter(url_pattern, service_name)) 27 | 28 | 29 | class HostingServiceInfo: 30 | def create(self, *args, **kwargs): 31 | """Factory method for creating WebHostingService instances. 32 | 33 | Raises: 34 | NotImplementedError: [description] 35 | """ 36 | raise NotImplementedError 37 | 38 | @property 39 | def service(self): 40 | return self.create() 41 | 42 | @property 43 | def variable_name(self) -> str: 44 | raise NotImplementedError 45 | 46 | 47 | class HostingServicesInfo(SubclassRegistry[HostingServiceInfo]): 48 | pass 49 | 50 | 51 | class HostingServices(metaclass=HostingServicesInfo): 52 | pass 53 | 54 | 55 | @HostingServices.register_as_subclass('pypi') 56 | class PyPIServerFactory(HostingServiceInfo): 57 | def create(self, *args, **kwargs): 58 | return WebHostingService.create('https://pypi.org/project/{name}', 'pypi') 59 | 60 | @property 61 | def variable_name(self) -> str: 62 | return 'pkg_name' 63 | 64 | 65 | @HostingServices.register_as_subclass('readthedocs') 66 | class ReadTheDocsServerFactory(HostingServiceInfo): 67 | def create(self, *args, **kwargs): 68 | return WebHostingService.create('https://{name}.readthedocs.io/', 'readthedocs') 69 | 70 | @property 71 | def variable_name(self) -> str: 72 | return 'readthedocs_project_slug' 73 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/main.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | from .generator import generator 5 | from .post_main import post_main 6 | from .pre_main import pre_main 7 | from .request import Request 8 | 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | my_dir = os.path.dirname(os.path.realpath(__file__)) 13 | 14 | 15 | WEB_SERVERS = ['pypi', 'readthedocs'] 16 | 17 | 18 | def generate( 19 | # interactive=True, 20 | no_input=False, # INTERACTIVE ON by Default 21 | offline=False, 22 | # extra_context=None, 23 | replay=False, 24 | overwrite=False, 25 | output_dir='.', 26 | config_file=None, 27 | skip_if_file_exists=False, 28 | # deprecated 29 | default_config=False, 30 | password=None, 31 | directory=None, 32 | checkout=None, 33 | ### 34 | ) -> str: 35 | """Create Python Project, with CI/CD pipeline, from the project template. 36 | 37 | Generate/Scaffold a new Python Project, including configuration enabling 38 | automations such as CI and Continuous Delivery of Docker and Python 39 | 'artifacts', and Continuous Documentation of the Python Project. 40 | """ 41 | print('Start Python Generator !') 42 | # Future HTTP requests to pypi.org and readthedocs.org web servers 43 | request = pre_main( 44 | Request( 45 | config_file=config_file, 46 | default_config=default_config, 47 | web_servers=WEB_SERVERS, 48 | no_input=no_input, 49 | extra_context=None, 50 | offline=offline, 51 | ) 52 | ) 53 | ## GENERATION from Template; delegate to Cookiecutter callable ## 54 | project_dir = generator( 55 | # COOKIECUTTER TEMPLATE (cookiecutter.json) 56 | os.path.abspath(os.path.join(my_dir, '..')), # template dir path 57 | checkout=checkout, 58 | # no_input=no_input, 59 | no_input=True, 60 | extra_context=request.extra_context, 61 | replay=replay, 62 | overwrite_if_exists=overwrite, 63 | output_dir=output_dir, 64 | config_file=config_file, 65 | default_config=default_config, 66 | password=password, 67 | directory=directory, 68 | skip_if_file_exists=skip_if_file_exists, 69 | ) 70 | ## POST GENERATION ## 71 | # Check if out-of-the-box Generated Project, coincidentally, requires slight modifications 72 | # for automatic and seemless "PyPI Upload" and "ReadTheDocs Build" process to 73 | # work. This can happen if the project name is already taken by another project 74 | # on PyPI or ReadTheDocs. 75 | post_main(request) 76 | 77 | print('Finished :)') 78 | return project_dir 79 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/post_main.py: -------------------------------------------------------------------------------- 1 | from requests.exceptions import ConnectionError as RequestsConnectionError 2 | 3 | 4 | def post_main(request): 5 | """Check if any CI 'deployment' (ie in pypi), would require minor tweak.""" 6 | # ie if gen proj py pkg name is 'gg', and pypi.org/project/gg/ is already taken 7 | for result in request.check_results: 8 | try: 9 | request.check.handle(result) 10 | except RequestsConnectionError as error: 11 | raise CheckWebServerError( 12 | f"Connection error while checking {result.service_name} web server" 13 | ) from error 14 | 15 | 16 | class CheckWebServerError(Exception): 17 | """Raised on Connection Error, when Requesting a Web Server's Future.""" 18 | 19 | pass 20 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/pre_main.py: -------------------------------------------------------------------------------- 1 | from .helpers import parse_context 2 | from .hosting_services import Engine 3 | 4 | 5 | def pre_main(request): 6 | """Do preparatory steps before the Generation (rendering) process. 7 | 8 | Uses Futures to make async http request to pypi.org and readthedocs.org web 9 | servers to check if the "intented" project name/id is already taken. 10 | 11 | This is done to proactively notify the user after the Generation process 12 | that the project name/id is already taken, and to suggest they either change 13 | the project name/id or to re-run the generation process with a different 14 | project name/id. 15 | """ 16 | ## External Services Clients Initialization ## 17 | 18 | # Activate: if User Config is given and Default Config is False 19 | deactivate_signal = request.offline or bool(request.default_config) 20 | 21 | request.check = Engine.create(request.config_file, deactivate_signal) 22 | 23 | # Start Requesting Futures! - Hosting Service: PyPI, Read The Docs 24 | request.check_results = request.check.check(request.web_servers) 25 | """ 26 | If skipped due to missing info in User Config, we can expect Logs roughly as: 27 | logger.info( 28 | "Skipping check of remote server, because of missing context variable" 29 | ) 30 | logger.info(error) 31 | """ 32 | _context = request.extra_context or {} 33 | interactive_mode = not bool(request.no_input) 34 | 35 | # If INTERACTIVE, Run Interactive Dialog Pipeline, to update Context 36 | if interactive_mode: 37 | ### INTERACTIVE TERMINAL DIALOGS ### 38 | user_input = parse_context(request.config_file) 39 | 40 | ## STORE CONTEXT ## 41 | _context.update( 42 | { 43 | # Adapt from dialog to same interface as cookiecutter.json and biskotaki ci config file yaml 44 | 'interpreters': { 45 | 'supported-interpreters': user_input.pop('supported-interpreters') 46 | }, 47 | **user_input, 48 | } 49 | ) 50 | 51 | request.extra_context = dict(_context) if _context else None 52 | return request 53 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/proxy.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Generic, Tuple, TypeVar 4 | 5 | from software_patterns import Proxy 6 | 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | T = TypeVar('T') 11 | 12 | 13 | class BaseProxy(Proxy[T], Generic[T]): 14 | @staticmethod 15 | def log_info_args(message: str, *args, **kwargs) -> Tuple[str, str]: 16 | return message, json.dumps( 17 | { 18 | 'keyword_args': {k: str(v) for k, v in kwargs.items()}, 19 | 'positional_args': [str(arg_value) for arg_value in args], 20 | }, 21 | indent=4, 22 | sort_keys=True, 23 | ) 24 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/request.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | from typing import Any, Iterable, List, Union 3 | 4 | import attr 5 | 6 | from .check_server_result import CheckWebServerResult 7 | 8 | 9 | @attr.s(kw_only=True, auto_attribs=True, slots=True) 10 | class Request: 11 | config_file: t.Union[str, None] 12 | default_config: bool 13 | web_servers: List[str] 14 | no_input: bool 15 | extra_context: t.Union[t.Dict, None] 16 | check: Any = attr.ib(default=None) 17 | check_results: Union[None, Iterable[CheckWebServerResult]] = attr.ib(default=None) 18 | offline: bool = False 19 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/__init__.py: -------------------------------------------------------------------------------- 1 | from . import interpreters_support, string_sanitizers 2 | from .input_sanitization import sanitize 3 | 4 | 5 | __all__ = ['sanitize', 'string_sanitizers', 'interpreters_support'] 6 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/interpreters_support.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | 3 | from .input_sanitization import Sanitize 4 | 5 | 6 | InterpretersSequence = t.Sequence[str] 7 | 8 | 9 | # TODO Improvement: use an Enum 10 | 11 | # Must be maintained to match the available python interpreters on the CI Provider 12 | VALID_PYTHON_VERSIONS = { 13 | '3.6', 14 | '3.7', 15 | # TODO: verify which versions are available on the CI Provider and eliminate 16 | # the ones that are not available 17 | # TODO: from remaining available on CI versions, start throwing warning to user if theyt are about to be elminated 18 | # from CI in the near future, or if they are an old unmaintained python version 19 | '3.8', 20 | '3.9', 21 | '3.10', 22 | '3.11', 23 | '3.12', 24 | '3.13', 25 | '3.14', 26 | } 27 | 28 | 29 | @Sanitize.register_sanitizer('interpreters') 30 | def verify_input_interpreters(interpreters: InterpretersSequence) -> None: 31 | user_interpreters_set = set(interpreters) 32 | if len(user_interpreters_set) != len(interpreters): 33 | raise InvalidInterpretersError("Found duplicate interpreters!") 34 | 35 | if not user_interpreters_set.issubset(VALID_PYTHON_VERSIONS): 36 | # not all user requested interpreters are included in the supported ones 37 | raise InvalidInterpretersError( 38 | "Unsupported interpreter given Error!\n" 39 | + "Given interpreters: [{given}]\n".format(given=', '.join(interpreters)) 40 | + "Supported interpreters: [{supported}]\n".format( 41 | supported=', '.join(VALID_PYTHON_VERSIONS) 42 | ) 43 | + "Unsupported interpreters: [{unsupported}]".format( 44 | unsupported=', '.join(iter(unsupported_interpreters(interpreters))) 45 | ) 46 | ) 47 | 48 | 49 | def unsupported_interpreters(interpreters: InterpretersSequence) -> t.Iterator[str]: 50 | for interpreter in interpreters: 51 | if interpreter not in VALID_PYTHON_VERSIONS: 52 | yield interpreter 53 | 54 | 55 | @Sanitize.register_exception('interpreters') 56 | class InvalidInterpretersError(Exception): 57 | pass 58 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/string_sanitizers/__init__.py: -------------------------------------------------------------------------------- 1 | from cookiecutter_python.utils import load 2 | 3 | 4 | load(type('Dummy', (), {})) 5 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/string_sanitizers/base_sanitizer.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | from abc import ABC, abstractmethod 3 | 4 | import attr 5 | 6 | 7 | class SanitizerInterface(ABC): 8 | """Sanitizer for the Generator Input Parameters.""" 9 | 10 | def __call__(self, data: t.Any) -> None: 11 | """Sanitize input data. 12 | 13 | Verifies that the input data have a valid value and/or format. Raises an 14 | exception, if the data do pass the check(s). 15 | """ 16 | raise NotImplementedError 17 | 18 | 19 | class AbstractSanitizer(SanitizerInterface, ABC): 20 | verify: t.Callable[[t.Any], None] 21 | exception_msg: str 22 | 23 | @abstractmethod 24 | def log_message(self, error, data) -> t.Tuple: 25 | raise NotImplementedError 26 | 27 | def __call__(self, data): 28 | self.verify(data) 29 | 30 | 31 | @attr.s(auto_attribs=True, frozen=True, slots=True) 32 | class BaseSanitizer(AbstractSanitizer): 33 | verify: t.Callable[[t.Any], None] 34 | exception_msg: str 35 | _log_func: t.Callable[[str, t.Any], t.Tuple] 36 | 37 | def log_message(self, error: str, data) -> t.Tuple: 38 | return self._log_func(error, data) 39 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/string_sanitizers/sanitize_reg_input.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import typing as t 4 | from typing import Pattern, Tuple 5 | 6 | from ..input_sanitization import Sanitize 7 | from .base_sanitizer import BaseSanitizer 8 | 9 | 10 | __all__ = ['BaseSanitizer', 'InputValueError'] 11 | 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class RegExSanitizer: 17 | regex: t.ClassVar[Pattern] 18 | sanitizer: t.ClassVar[BaseSanitizer] 19 | exception_msg: t.ClassVar[str] 20 | 21 | def __call__(self, data): 22 | self.sanitizer(data) 23 | 24 | @classmethod 25 | def log_message(cls, error, data) -> t.Tuple: 26 | raise NotImplementedError 27 | 28 | @classmethod 29 | def _string(cls, data) -> str: 30 | if isinstance(data, str): 31 | return data 32 | return json.dumps(data, indent=4, sort_keys=True) 33 | 34 | def __init__(self): 35 | def _log_message(error, input_data): 36 | raw_log_args: Tuple = type(self).log_message(error, input_data) 37 | return tuple([raw_log_args[0]] + [self._string(x) for x in raw_log_args[1:]]) 38 | 39 | type(self).sanitizer = BaseSanitizer( 40 | self._verify, 41 | type(self).exception_msg if type(self).exception_msg else '', 42 | _log_message, 43 | ) 44 | 45 | def _verify(self, string: str): 46 | try: 47 | if not self.regex.match(string): 48 | msg = "RegEx Miss Match Error" 49 | logger.error(*self.sanitizer.log_message(msg, string)) 50 | raise RegExMissMatchError(msg) 51 | except RegExMissMatchError as not_matching_regex: 52 | raise InputValueError(self.sanitizer.exception_msg) from not_matching_regex 53 | 54 | 55 | class RegExMissMatchError(Exception): 56 | pass 57 | 58 | 59 | @Sanitize.register_exception('module-name') 60 | @Sanitize.register_exception('semantic-version') 61 | class InputValueError(Exception): 62 | pass 63 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/string_sanitizers/sanitize_reg_module_name.py: -------------------------------------------------------------------------------- 1 | import re 2 | import typing as t 3 | 4 | from ..input_sanitization import Sanitize 5 | from .sanitize_reg_input import RegExSanitizer 6 | 7 | 8 | class ModuleNameSanitizer(RegExSanitizer): 9 | regex = re.compile(r'^[_a-zA-Z][_a-zA-Z0-9]+$') 10 | exception_msg = 'Expected a valid Python Module name value' 11 | 12 | @classmethod 13 | def log_message(cls, error, module) -> t.Tuple[t.Union[str, t.Mapping], ...]: 14 | return ( 15 | "%s: %s", 16 | str(error), 17 | { 18 | 'module_name_regex': str(cls.regex.pattern), 19 | 'module_name': str(module), 20 | }, 21 | ) 22 | 23 | 24 | module_name_sanitizer = ModuleNameSanitizer() 25 | 26 | 27 | @Sanitize.register_sanitizer('module-name') 28 | def sanitize_module_name(module_name: str) -> None: 29 | module_name_sanitizer(module_name) 30 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/sanitization/string_sanitizers/sanitize_reg_version.py: -------------------------------------------------------------------------------- 1 | import re 2 | import typing as t 3 | 4 | from ..input_sanitization import Sanitize 5 | from .sanitize_reg_input import RegExSanitizer 6 | 7 | 8 | class VersionSanitizer(RegExSanitizer): 9 | regex = re.compile( 10 | r'^(?P0|[1-9]\d*)' 11 | r'\.' 12 | r'(?P0|[1-9]\d*)' 13 | r'\.' 14 | r'(?P0|[1-9]\d*)' 15 | r'(?:-' 16 | r'(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)' 17 | r'(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?' 18 | r'(?:\+' 19 | r'(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' 20 | ) 21 | exception_msg = 'Expected a Semantic Version value' 22 | 23 | @classmethod 24 | def log_message(cls, error, string) -> t.Tuple[t.Union[str, t.Mapping], ...]: 25 | return ( 26 | "%s: %s", 27 | str(error), 28 | { 29 | 'semver_regex': str(cls.regex.pattern), 30 | 'version_string': str(string), 31 | }, 32 | ) 33 | 34 | 35 | version_sanitizer = VersionSanitizer() 36 | 37 | 38 | @Sanitize.register_sanitizer('semantic-version') 39 | def sanitize_version(version: str) -> None: 40 | version_sanitizer(version) 41 | -------------------------------------------------------------------------------- /src/cookiecutter_python/backend/user_config_proxy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from typing import Any, MutableMapping 4 | 5 | from cookiecutter.config import get_user_config as cookie_get_config 6 | from software_patterns import ProxySubject, Singleton 7 | 8 | from .proxy import BaseProxy 9 | 10 | 11 | __all__ = ['get_user_config'] 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | my_dir = os.path.dirname(os.path.realpath(__file__)) 18 | 19 | 20 | ReturnValueType = MutableMapping[str, Any] 21 | 22 | 23 | class GetUserConfigSubject(ProxySubject[ReturnValueType]): 24 | pass 25 | 26 | 27 | class GetUserConfigProxy(BaseProxy[ReturnValueType]): 28 | def request(self, *args, **kwargs): 29 | logger.info( 30 | *BaseProxy.log_info_args('Get User Config Proxy Request: %s', *args, **kwargs) 31 | ) 32 | return super().request(*args, **kwargs) 33 | 34 | 35 | # Singleton and Adapter of cookiecutter.config.get_user_config 36 | class GetUserConfigProxySingleton(metaclass=Singleton): 37 | def __init__(self, proxy_factory) -> None: 38 | super().__init__() 39 | self._proxy = proxy_factory() 40 | 41 | def __call__(self, *args: Any, **kwds: Any) -> ReturnValueType: 42 | return self._proxy.request(*args, **kwds) 43 | 44 | 45 | get_user_config = GetUserConfigProxySingleton( 46 | lambda: GetUserConfigProxy(GetUserConfigSubject(cookie_get_config)) 47 | ) 48 | -------------------------------------------------------------------------------- /src/cookiecutter_python/cli_handlers.py: -------------------------------------------------------------------------------- 1 | from cookiecutter_python.backend.error_handling import HandlerBuilder 2 | from cookiecutter_python.exceptions import error_2_str 3 | 4 | 5 | def handle_error(error): 6 | return HandlerBuilder.create(error_2_str(error))(error) 7 | -------------------------------------------------------------------------------- /src/cookiecutter_python/cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "project_name": "My New Project", 3 | "project_type": ["module", "module+cli", "pytest-plugin"], 4 | "project_slug": "{{ cookiecutter.project_name|lower|replace(' ', '-') }}", 5 | "pkg_name": "{{ cookiecutter.project_slug|replace('-', '_') }}", 6 | "repo_name": "{{ cookiecutter.project_slug }}", 7 | "readthedocs_project_slug": "{{ cookiecutter.project_slug }}", 8 | "docker_image": "{{ cookiecutter.project_slug }}", 9 | "full_name": "John Doe", 10 | "author": "{{ cookiecutter.full_name }}", 11 | "author_email": "john_doe@protonmail.com", 12 | "github_username": "john-doe-gh-account-name", 13 | "project_short_description": "Create artificial artwork using AI.", 14 | "pypi_subtitle": "{{ cookiecutter.project_short_description }}", 15 | "release_date": "{% now 'utc', '%Y-%m-%d' %}", 16 | "year": "{% now 'utc', '%Y' %}", 17 | "version": "0.0.1", 18 | "initialize_git_repo": ["yes", "no"], 19 | "interpreters": { 20 | "supported-interpreters": [ 21 | "3.6", 22 | "3.7", 23 | "3.8", 24 | "3.9", 25 | "3.10", 26 | "3.11" 27 | ] 28 | }, 29 | "docs_builder": ["sphinx", "mkdocs"], 30 | "rtd_python_version": ["3.8", "3.9", "3.10", "3.11", "3.12"], 31 | "cicd": ["stable", "experimental"] 32 | } 33 | -------------------------------------------------------------------------------- /src/cookiecutter_python/exceptions.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | 3 | from cookiecutter.exceptions import CookiecutterException, UndefinedVariableInTemplate 4 | 5 | from cookiecutter_python.backend import CheckWebServerError 6 | from cookiecutter_python.backend.hosting_services.exceptions import ( 7 | ContextVariableDoesNotExist, 8 | ) 9 | from cookiecutter_python.utils import load 10 | 11 | 12 | cookiecutter_exceptions = load(CookiecutterException, 'cookiecutter') 13 | 14 | 15 | exceptions: t.MutableMapping[str, t.Tuple[t.Type[Exception], ...]] = { 16 | 'critical': tuple(cookiecutter_exceptions + [ContextVariableDoesNotExist]), # type: ignore 17 | 'non-critical': (CheckWebServerError,), 18 | } 19 | 20 | 21 | def error_2_str(error): 22 | recognized_non_critical = {UndefinedVariableInTemplate} 23 | recognized_critical = set(exceptions['critical']).difference(recognized_non_critical) 24 | 25 | ## Mark NON Critical for program execution ## 26 | # Program should potentially be able to handle that exception and proceed 27 | if isinstance(error, tuple(recognized_non_critical)): 28 | # We 'mark as non-critical', when we find out-of-scope variables (when a 29 | # template uses a variable which is not defined in the context), 30 | # anticipating the injection of out-of-scope variables programmatically. 31 | return 'non-critical' 32 | 33 | ## Mark CRITICAL for program execution ## 34 | # Program's execution should potentially stop, as unable to handle exception 35 | if isinstance(error, tuple(recognized_critical)): 36 | # We Classify as Critical, all our Generator's backend engine Exceptions 37 | # which are cokiecutter exceptions, but we exclude 38 | # UndefinedVariableInTemplate and we add our Generator's exceptions: 39 | # - ContextVariableDoesNotExist 40 | return 'critical' 41 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/cookiecutter_python/handle/__init__.py -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/dialogs/__init__.py: -------------------------------------------------------------------------------- 1 | from . import lib # noqa 2 | from .dialog import InteractiveDialog 3 | 4 | 5 | __all__ = ['InteractiveDialog'] 6 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/dialogs/dialog.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | 3 | from software_patterns import SubclassRegistry 4 | 5 | 6 | class Dialog: 7 | @abstractmethod 8 | def dialog(self, *args, **kwargs): 9 | raise NotImplementedError 10 | 11 | 12 | class DialogRegistry(SubclassRegistry[Dialog]): 13 | pass 14 | 15 | 16 | class InteractiveDialog(metaclass=DialogRegistry): 17 | pass 18 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/dialogs/lib/__init__.py: -------------------------------------------------------------------------------- 1 | from cookiecutter_python.utils import load 2 | 3 | from ..dialog import InteractiveDialog 4 | 5 | 6 | __all__ = ['InteractiveDialog'] 7 | 8 | 9 | # Import all classes subclassing InteractiveDialog 10 | load(InteractiveDialog) 11 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/interactive_cli_pipeline.py: -------------------------------------------------------------------------------- 1 | """Handles sequence of Interactive User Dialogs, for Context Information.""" 2 | 3 | from .node_factory import NodeFactory 4 | 5 | 6 | class InteractiveDialogsPipeline: 7 | """Handles sequence of Interactive User Dialogs, for Context Information.""" 8 | 9 | dialogs = [ 10 | 'project-name', 11 | ] 12 | 13 | def process(self, request): 14 | """Process sequence of Interactive User Dialogs, for Context Information.""" 15 | for dialog in self.dialogs: 16 | request = NodeFactory.create(dialog).process(request) 17 | return request 18 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/node_base.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | 3 | from .node_interface import Node 4 | 5 | 6 | class DialogNode(Node[t.List, t.Mapping[str, t.Any]]): 7 | """Handles a single Interactive User Dialog, for Context Information.""" 8 | 9 | def __init__(self, dialog): 10 | self.ela = dialog 11 | 12 | def process(self, request): 13 | """Process a single Interactive User Dialog, for Context Information.""" 14 | return self.ela.dialog(*request) 15 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/node_factory.py: -------------------------------------------------------------------------------- 1 | from .dialogs import InteractiveDialog 2 | from .node_base import DialogNode 3 | 4 | 5 | class NodeFactory: 6 | @staticmethod 7 | def create(dialog_name: str): 8 | return DialogNode( 9 | InteractiveDialog.create( 10 | dialog_name, 11 | ) 12 | ) 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/handle/node_interface.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Generic, Optional, TypeVar 3 | 4 | 5 | T = TypeVar('T') 6 | TT = TypeVar('TT') 7 | 8 | 9 | class Node(ABC, Generic[T, TT]): 10 | @abstractmethod 11 | def process(self, request: T) -> Optional[TT]: 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/hooks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/cookiecutter_python/hooks/__init__.py -------------------------------------------------------------------------------- /src/cookiecutter_python/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/cookiecutter_python/py.typed -------------------------------------------------------------------------------- /src/cookiecutter_python/utils.py: -------------------------------------------------------------------------------- 1 | import typing as t 2 | from importlib import import_module 3 | from inspect import isclass 4 | from pathlib import Path 5 | from pkgutil import iter_modules 6 | 7 | from ._find_lib import find_lib 8 | 9 | 10 | T = t.TypeVar('T') 11 | 12 | 13 | def load(interface: t.Type[T], module: t.Optional[str] = None) -> t.List[t.Type[T]]: 14 | """Dynamically import all class objects that implement the given interface. 15 | 16 | The classes (class objects) are discovered and imported in the namespace, by 17 | searching within each module found inside the input 'dire' (path) directory. 18 | 19 | Each class object is an attribute found in a module's namespace. 20 | We classify an attribute as a (correct) "class to import", if the following 21 | python boolean expression evaluates to True: 22 | 23 | isclass(attribute) and issubclass(attribute, interface) 24 | 25 | If 'dire' is not given then we consider the modules that are inside the same 26 | directory as the one where the module of the invoking code resides. 27 | 28 | Args: 29 | interface (Type[T]): the type (ie class) that the imported classes 30 | should 'inherit' (subclass) from 31 | module (str): module dotted-path containing the modules to inspect. Defaults to the 32 | same module (directory) as the one where the module of the invoking 33 | code resides. 34 | """ 35 | lib_dir: str 36 | dotted_lib_path: str # 37 | lib_dir, dotted_lib_path = find_lib(module=module) 38 | 39 | if not Path(lib_dir).exists(): 40 | raise FileNotFoundError 41 | 42 | objects = [] 43 | 44 | # iterate through the modules inside the LIB directory 45 | for _, module_name, _ in iter_modules([lib_dir]): 46 | # if module has a register_as_subclass decorator then the below import 47 | # will cause the class to be registered in the Facility/Factory Registry 48 | module_object = import_module( 49 | '{package}.{module}'.format(package=dotted_lib_path, module=module_name) 50 | ) 51 | 52 | for attribute_name in dir(module_object): 53 | attribute = getattr(module_object, attribute_name) 54 | 55 | if ( 56 | attribute != interface 57 | and isclass(attribute) 58 | and issubclass(attribute, interface) 59 | ): 60 | objects.append(attribute) 61 | return objects 62 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | {{ cookiecutter.pkg_name }} 4 | tests 5 | 6 | [report] 7 | show_missing = true 8 | precision = 2 9 | omit = *migrations* 10 | exclude_lines = 11 | raise NotImplementedError 12 | raise NotImplemented 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/codecov-upload.yml: -------------------------------------------------------------------------------- 1 | ###################################### 2 | ## Reusable Codecov Workflow ## 3 | ###################################### 4 | 5 | on: 6 | workflow_call: 7 | inputs: 8 | coverage_artifact: 9 | required: true 10 | type: string 11 | description: "Name of the coverage artifact to upload to Codecov" 12 | verbose: 13 | required: false 14 | type: boolean 15 | description: > 16 | Whether to print more info on Workflow (web) interface; default: true 17 | If true, with "print" more information on $GITHUB_OUTPUT. 18 | default: true 19 | 20 | jobs: 21 | upload_coverage: 22 | name: Upload Coverage to Codecov 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout Repository 26 | uses: actions/checkout@v4 27 | 28 | # Download the coverage artifact 29 | - name: Download Coverage Artifact 30 | uses: actions/download-artifact@v4 31 | with: 32 | name: ${{ "{{" }} inputs.coverage_artifact {{ "}}" }} 33 | 34 | # Get the Codecov binary 35 | - name: Get Codecov Binary 36 | run: | 37 | curl -Os https://uploader.codecov.io/latest/linux/codecov 38 | chmod +x codecov 39 | 40 | # Upload coverage reports to Codecov 41 | - name: Upload Coverage Reports to Codecov 42 | run: | 43 | for file in coverage*.xml; do 44 | OS_NAME=$(echo $file | sed -E "s/coverage-(\w+)-.*/\1/") 45 | PY_VERSION=$(echo $file | sed -E "s/coverage-\w+-(\d+\.\d+).*/\1/") 46 | ./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose 47 | echo "[INFO] Sent to Codecov: $file" 48 | done 49 | 50 | # If Verbose, Write to Workflow Output 51 | - name: Report Uploaded Coverage XML Files 52 | if: ${{ "{{" }} inputs.verbose {{ "}}" }} 53 | run: | 54 | for file in coverage*.xml; do 55 | 56 | echo " - Codecov Upload: $file" >> $GITHUB_OUTPUT 57 | 58 | done 59 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/labeler.yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | label_PR: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | # This Job behaves as a Listener to PR events, and each step is a Handler 12 | steps: 13 | # HANDLER 1: Label PR, given file changes and Labeling Rules '.github/labeler.yml' 14 | - uses: actions/labeler@v5 15 | with: 16 | # if you want your labels to trigger other Workflows, pass-in a PAT 17 | # with permission for label creation events to trigger listeners 18 | repo-token: ${{ "{{" }} secrets.GITHUB_TOKEN {{ "}}" }} 19 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vscode/ 3 | 4 | *.egg-info/ 5 | *.pyc 6 | *\.bak 7 | \.fuse* 8 | .coverage 9 | .DS_Store 10 | *__pycache__ 11 | 12 | docs/_build/ 13 | dist/ 14 | build/ 15 | htmlcov/ 16 | 17 | .tox/ 18 | node_modules 19 | 20 | dependency-graphs/ 21 | test-results/ 22 | uml-diagrams/ 23 | pydoer-graphs/ 24 | 25 | # LOGS 26 | 27 | cookie-py.log 28 | 29 | .fuse_hidden* 30 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.prospector.yml: -------------------------------------------------------------------------------- 1 | # output-format: json 2 | 3 | strictness: high 4 | test-warnings: true 5 | doc-warnings: false 6 | member-warnings: false 7 | inherits: 8 | - default 9 | ignore-paths: 10 | - docs 11 | ignore-patterns: 12 | - (^|/)skip(this)?(/|$) 13 | autodetect: false 14 | max-line-length: 95 15 | 16 | 17 | # TOOLS 18 | 19 | pyflakes: 20 | run: true 21 | 22 | 23 | pyroma: 24 | run: true 25 | disable: 26 | - PYR15 27 | - PYR18 28 | 29 | dodgy: 30 | run: true 31 | 32 | mccabe: 33 | run: true 34 | options: 35 | max-complexity: 12 36 | 37 | 38 | # INACTIVE 39 | 40 | pylint: 41 | run: false 42 | 43 | bandit: 44 | run: false 45 | 46 | frosted: 47 | run: false 48 | 49 | pep8: 50 | run: false 51 | 52 | pep257: 53 | run: false 54 | 55 | mypy: 56 | run: false 57 | 58 | vulture: 59 | run: false 60 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "{{ cookiecutter.rtd_python_version }}" 13 | 14 | # ALL JOBS implied: https://docs.readthedocs.io/en/stable/builds.html 15 | jobs: 16 | pre_install: 17 | - python --version 18 | - python -m pip install poetry 19 | - python -m poetry export -o req-docs.txt -E docs 20 | post_install: 21 | - python -m pip install -e . 22 | - python -m pip install pyyaml 23 | pre_build: 24 | - python ./scripts/visualize-ga-workflow.py > ./docs/cicd_mermaid.md 25 | - python ./scripts/visualize-dockerfile.py > ./docs/dockerfile_mermaid.md 26 | 27 | 28 | {% if cookiecutter.docs_builder == "mkdocs" %} 29 | # Build documentation in the "docs/" directory with mkdocs 30 | mkdocs: 31 | configuration: mkdocs.yml 32 | {% elif cookiecutter.docs_builder == "sphinx" %} 33 | # Build documentation in the docs/ directory with Sphinx 34 | sphinx: 35 | builder: html 36 | configuration: docs/conf.py 37 | fail_on_warning: false 38 | 39 | # Optionally build your docs in additional formats such as PDF 40 | formats: 41 | - pdf 42 | - epub 43 | {% endif %} 44 | 45 | 46 | # Optional but recommended, declare the Python requirements required 47 | # to build your documentation 48 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 49 | python: 50 | install: 51 | - requirements: req-docs.txt 52 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | 0.0.1 ({{ cookiecutter.release_date }}) 6 | ======================================= 7 | 8 | | This is the first ever release of the **{{ cookiecutter.pkg_name }}** Python Package. 9 | | The package is open source and is part of the **{{ cookiecutter.project_name }}** Project. 10 | | The project is hosted in a public repository on github at https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }} 11 | | The project was scaffolded using the `Cookiecutter Python Package`_ (cookiecutter) Template at https://github.com/boromir674/cookiecutter-python-package/tree/master/src/cookiecutter_python 12 | 13 | | Scaffolding included: 14 | 15 | - **CI Pipeline** running on Github Actions at https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}/actions 16 | - `Test Workflow` running a multi-factor **Build Matrix** spanning different `platform`'s and `python version`'s 17 | 1. Platforms: `ubuntu-latest`, `macos-latest` 18 | 2. Python Interpreters: `3.6`, `3.7`, `3.8`, `3.9`, `3.10` 19 | 20 | - Automated **Test Suite** with parallel Test execution across multiple cpus. 21 | - Code Coverage 22 | - **Automation** in a 'make' like fashion, using **tox** 23 | - Seamless `Lint`, `Type Check`, `Build` and `Deploy` *operations* 24 | 25 | 26 | .. LINKS 27 | 28 | .. _Cookiecutter Python Package: https://python-package-generator.readthedocs.io/en/master/ 29 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.16-slim-bullseye as builder 2 | 3 | COPY poetry.lock pyproject.toml ./ 4 | 5 | # Determine where to install poetry 6 | ENV POETRY_HOME=/opt/poetry 7 | 8 | # Install Poetry & generate a requirements.txt file 9 | RUN python -c 'from urllib.request import urlopen; print(urlopen("https://install.python-poetry.org").read().decode())' | python && \ 10 | "$POETRY_HOME/bin/poetry" export -f requirements.txt > requirements.txt 11 | 12 | FROM python:3.9.16-slim-bullseye as install 13 | 14 | # Keep the requirements.txt file from the builder image 15 | COPY --from=builder requirements.txt . 16 | 17 | # Pre emptively add the user's bin folder to PATH 18 | ENV PATH="/root/.local/bin:$PATH" 19 | 20 | RUN apt-get update && \ 21 | apt-get install -y --no-install-recommends build-essential && \ 22 | pip install -U pip && \ 23 | apt-get clean && \ 24 | rm -rf /var/lib/apt/lists/* && \ 25 | pip install --no-cache-dir --user -r requirements.txt 26 | 27 | COPY . . 28 | RUN pip install --no-cache-dir --user . 29 | {% if cookiecutter.project_type == "module+cli" %} 30 | 31 | CMD [ "{{ cookiecutter.pkg_name|replace('_', '-') }}" ] 32 | {% endif %} -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.rst 3 | include CONTRIBUTING.md 4 | include CHANGELOG.rst 5 | 6 | recursive-include src *.py 7 | recursive-include src *.pyi 8 | 9 | recursive-include tests *.py 10 | 11 | recursive-include docs *.rst 12 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '{{ cookiecutter.version }}' 2 | {% if cookiecutter.project_type == "pytest-plugin" %} 3 | from .fixtures import my_fixture 4 | 5 | __all__ = ['my_fixture'] 6 | {% endif %} -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/__main__.py: -------------------------------------------------------------------------------- 1 | """Run `python -m {{ cookiecutter.pkg_name }}`. 2 | 3 | Allow running {{ cookiecutter.project_name }}, also by invoking 4 | the python module: 5 | 6 | `python -m {{ cookiecutter.pkg_name }}` 7 | 8 | This is an alternative to directly invoking the cli that uses python as the 9 | "entrypoint". 10 | """ 11 | 12 | from __future__ import absolute_import 13 | 14 | from {{ cookiecutter.pkg_name }}.cli import main 15 | 16 | 17 | if __name__ == "__main__": # pragma: no cover 18 | main(prog_name="{{ cookiecutter.pkg_name|replace('_', '-') }}") # pylint: disable=unexpected-keyword-arg 19 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/_logging.py: -------------------------------------------------------------------------------- 1 | """Set up Application Logs 2 | 3 | This module defines how the emitted application logs are handled and where 4 | they are written/streamed. 5 | The application logs are written in full details (ie with timestamps) to a file 6 | and also streamed to the console in a more concise format. 7 | 8 | # Console/Terminal Log: 9 | - We Stream Logs of INFO (and above) Level on Console's stderr 10 | - The rendered Log format is: : 11 | 12 | # Disk File Log: 13 | - we Write Logs of ALL Levels on a Disk File 14 | - The rendered Log format is: : 15 | - The FILE_TARGET_LOGS, variable (see below), defines the path to the log file 16 | 17 | Log Levels: 18 | - CRITICAL 19 | - ERROR 20 | - WARNING 21 | - INFO 22 | - DEBUG 23 | 24 | Usage: 25 | Do a 'from . import _logging' in the root __init__.py of your package and 26 | all submodules 'inherit' the logging configuration 27 | """ 28 | 29 | import logging 30 | 31 | 32 | # for 'biskotaki' app/code 33 | FILE_TARGET_LOGS = 'biskotaki.log' 34 | 35 | #### FILE LOGGING 36 | # set up logging to file for DEBUG Level and above 37 | logging.basicConfig( 38 | level=logging.DEBUG, 39 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', 40 | datefmt='%m-%d %H:%M', 41 | filename=FILE_TARGET_LOGS, 42 | filemode='w', 43 | ) 44 | 45 | #### CONSOLE LOGGING 46 | console = logging.StreamHandler() 47 | 48 | ### Handler which writes DEBUG messages or higher to the sys.stderr ### 49 | console.setLevel(logging.DEBUG) 50 | # console.setLevel(logging.INFO) 51 | 52 | # set a format which is simpler for console use 53 | formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') 54 | # tell the handler to use this format 55 | console.setFormatter(formatter) 56 | # add the handler to the root logger 57 | logging.getLogger('').addHandler(console) 58 | 59 | 60 | # Now, we can log to the root logger, or any other logger. First the root... 61 | # logging.info('Blah blah') 62 | 63 | # Now, define a couple of other loggers which might represent areas in your 64 | # application: 65 | 66 | # logger1 = logging.getLogger('myapp.area1') 67 | # logger2 = logging.getLogger('myapp.area2') 68 | # logger3 = logging.getLogger(__name__) 69 | 70 | # logger1.debug('balh blah') 71 | # logger1.info('balh blah') 72 | # logger2.warning('balh blah') 73 | # logger3.error('balh blah') 74 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/cli.py: -------------------------------------------------------------------------------- 1 | """Main `{{ cookiecutter.pkg_name }}` CLI.""" 2 | 3 | import os 4 | import sys 5 | 6 | import click 7 | 8 | from . import __version__ 9 | 10 | 11 | this_file_location = os.path.dirname(os.path.realpath(os.path.abspath(__file__))) 12 | 13 | 14 | def version_msg(): 15 | """{{ cookiecutter.pkg_name }} version, location and Python version. 16 | 17 | Get message about {{ cookiecutter.pkg_name }} version, location 18 | and Python version. 19 | """ 20 | python_version = sys.version[:3] 21 | message = u"{{ cookiecutter.project_name }} %(version)s from {} (Python {})" 22 | location = os.path.dirname(this_file_location) 23 | return message.format(location, python_version) 24 | 25 | 26 | @click.command(context_settings=dict(help_option_names=[u'-h', u'--help'])) 27 | @click.version_option(__version__, u'-V', u'--version', message=version_msg()) 28 | # @click.option('-v', '--verbose', is_flag=True, help='Print debug information', default=False) 29 | def main( 30 | # verbose, 31 | ): 32 | """TODO Write this content that gets renders when invoking with --help flag!""" 33 | try: 34 | pass 35 | except Exception as error: # pylint: disable=broad-except 36 | click.echo(error) 37 | sys.exit(1) 38 | 39 | 40 | if __name__ == "__main__": # pragma: no cover 41 | main() 42 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/fixtures.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=redefined-outer-name 2 | import pytest 3 | 4 | 5 | @pytest.fixture 6 | def my_fixture(): 7 | return 'Implement Me!' 8 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/cookiecutter_python/{{ cookiecutter.project_slug }}/src/{{ cookiecutter.pkg_name }}/py.typed -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/conftest.py: -------------------------------------------------------------------------------- 1 | pytest_plugins = "pytester" 2 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/smoke_test.py: -------------------------------------------------------------------------------- 1 | def test_import_module(): 2 | import {{ cookiecutter.pkg_name }} 3 | 4 | assert {{ cookiecutter.pkg_name }} is not None 5 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | def get_main_arguments(): 6 | return type('A', (), {'command_line_script_args': None, 'main_function_kwargs': {}}) 7 | 8 | 9 | @pytest.mark.runner_setup(mix_stderr=False) 10 | def test_cli( 11 | get_main_arguments, 12 | isolated_cli_runner, 13 | ): 14 | from {{ cookiecutter.pkg_name }}.cli import main 15 | 16 | main_arguments = get_main_arguments() 17 | result = isolated_cli_runner.invoke( 18 | main, 19 | args=main_arguments.command_line_script_args, 20 | input=None, 21 | env=None, 22 | catch_exceptions=False, 23 | **main_arguments.main_function_kwargs, 24 | ) 25 | assert result.exit_code == 0 26 | assert result.stdout == '' 27 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/test_invoking_cli.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | def test_invoking_cli_as_python_module(run_subprocess): 5 | result = run_subprocess( 6 | sys.executable, 7 | '-m', 8 | '{{ cookiecutter.pkg_name }}', 9 | '--help', 10 | ) 11 | assert result.exit_code == 0 12 | assert result.stderr == '' 13 | assert result.stdout.split('\n')[0] == "Usage: {{ cookiecutter.pkg_name|replace('_', '-') }} [OPTIONS]" 14 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/tests/test_my_fixture.py: -------------------------------------------------------------------------------- 1 | def test_fixture(testdir): 2 | testdir.makepyfile( 3 | """ 4 | import pytest 5 | 6 | def test_fixture(my_fixture): 7 | 8 | assert my_fixture == 'Implement Me!' 9 | """ 10 | ) 11 | result = testdir.runpytest("--verbose") 12 | result.stdout.fnmatch_lines("test_fixture.py::test_fixture PASSED*") 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to {{ cookiecutter.project_name }} Documentation! 2 | 3 | [//]: # (Render a few important badges: CI/CD Status, RTD, Coverage, Latest Tag/Sem Ver) 4 | 5 | {{ cookiecutter.project_name }} is an open source TODO 6 | 7 | [//]: # (Same a few words about what this does) 8 | 9 | [//]: # (Maybe state Goal and/or small motivation note) 10 | 11 | [//]: # (Leverage Mermaid to show high of what happens) 12 | 13 | [//]: # (Ideally record video with demo and embed here) 14 | 15 | ## Quick-start 16 | 17 | TODO 18 | 19 | ## :material-book-open: Documentation 20 | 21 | Read about how to use the `{{ cookiecutter.pkg_name }}` package, understand its features 22 | and capabilities. 23 | 24 | Learn how to use the `{{ cookiecutter.pkg_name }}` package, achieve goals leverating understand its features 25 | and capabilities. 26 | 27 | 28 |
29 | 30 | 31 | - :fontawesome-regular-circle-play:{ .lg .middle } __`How-to` Guides__ 32 | 33 | --- 34 | 35 | Step-by-step `Guides` that leverage **{{ cookiecutter.pkg_name }}** to achieve `Goals`, such as: 36 | 37 | - TODO 1 38 | - TODO 2 39 | 40 | [:octicons-arrow-right-24: :material-rocket-launch: `Install`, `Run`, `Use`](./guides/index.md) 41 | 42 | 43 | - :material-application-brackets-outline:{ .lg .middle } __API References__ 44 | 45 | --- 46 | [//]: # (link ./reference/CLI.md does not exist yet, it is generate at docs build-time) 47 | [:octicons-arrow-right-24: :material-console:{ .lg .middle } {{ cookiecutter.pkg_name|replace('_', '-') }} CLI](./reference/CLI.md) 48 | 49 | [//]: # (link ./reference/{{ cookiecutter.pkg_name }}.md does not exist yet, it is generate at docs build-time) 50 | [:octicons-arrow-right-24: :material-language-python: API Refs](./reference/{{ cookiecutter.pkg_name }}) 51 | 52 | 53 | - :fontawesome-solid-book-open:{ .lg .middle } __Topics__ 54 | 55 | --- 56 | 57 | **Explanations / Topics** 58 | 59 | [:octicons-arrow-right-24: :material-language-python: Architecture ](./topics/arch.md) 60 | 61 | [//]: # (Add important Topics here) 62 | 63 | 64 | - :fontawesome-solid-book-open:{ .lg .middle } __Development Topics__ 65 | 66 | --- 67 | 68 | **Topics / Explanations** on Development 69 | 70 | [:octicons-arrow-right-24: :material-hammer-screwdriver: Development Topics ](./topics/development/index.md) 71 | 72 | 73 |
74 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/tags.md: -------------------------------------------------------------------------------- 1 | # Tags 2 | 3 | Following is a list of relevant tags: 4 | 5 | [TAGS] -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/arch.md: -------------------------------------------------------------------------------- 1 | # Software Architecture 2 | 3 | [//]: # (this is a comment) 4 | [//]: # (Description of what is this Page) 5 | 6 | Here you can find the software architecture of the project. 7 | 8 | ## Module Dependencies 9 | 10 | [//]: # (Description of what is this Section) 11 | 12 | Here you can find the dependencies between the modules of the project. 13 | 14 | The dependencies are Visualized as a Graph, where Nodes are the modules and the Edges are python ``import`` statements. 15 | 16 | The dependencies are visualized, after running the following command: 17 | 18 | ```sh 19 | tox -e pydeps 20 | ``` 21 | 22 | !!! Tip 23 | 24 | Right-click and open image in new Tab for better inspection 25 | 26 | ### First-party Dependencies 27 | 28 | [//]: # (Inner Python Imports SVG Graph) 29 | 30 | ![First-party Dependencies](../assets/deps_inner.svg) 31 | 32 | 33 | ### First and Third party Dependencies 34 | 35 | [//]: # (First-Party with 3rd-party having all incoming edges to our individual Modules) 36 | 37 | ![All Dependencies - C](../assets/deps_all.svg) 38 | 39 | 40 | ### 1st+3rd party Deps - 1st as Cluster 41 | 42 | [//]: # ("Boxed" First-Party with 3rd-party having all incoming edges to our Box) 43 | 44 | ![All Dependencies - B](../assets/deps_ktc.svg) 45 | 46 | 47 | ### 1st+3rd party Deps - 1st+3rd as Cluster 48 | 49 | [//]: # ("Boxed" First-Party with 3rd-party having 1 incoming edge to our Box) 50 | 51 | ![All Dependencies - A](../assets/deps_ktc-mcs_2.svg) 52 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/development/build_process_DAG.md: -------------------------------------------------------------------------------- 1 | ## Docker Build Process DAG 2 | 3 | > Understand how we leverage `Docker` in the build process. 4 | 5 | The project features a `Dockerfile`, designed for 6 | 7 | - multi-stage builds 8 | - parallel stage building (assuming appropriate build backend) 9 | - size minimization of the produced `Docker` image 10 | - minimization of vulerabilities 11 | 12 | ## Dockerfile visualized as Directed Acyclic Graph (DAG) 13 | 14 | > Understand the execution path of `docker build`, via **DAG visualization** 15 | 16 | {% raw %}{% include 'topics/development/dockerfile_mermaid.md' %}{% endraw %} 17 | 18 | - `solid boxes` represent distinct docker **stages** and their *aliases* 19 | - `solid arrows` represent **stage dependencies**; `FROM a AS b` type of instructions 20 | - `dotted arrows` represent **stage COPY**: `COPY --from=a /path /path` type of instructions 21 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/development/cicd.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - CICD 4 | --- 5 | 6 | ## CI/CD Pipeline 7 | 8 | > Understand what Jobs are part of the CI/CD Pipeline 9 | 10 | **CI/CD Pipeline** is implemented as `Github Actions Workflow` in a YAML file format. 11 | 12 | ### Workflow of Jobs: visualized as a Directed Acyclic Graph (DAG) 13 | 14 | > Understand the Job Dependencies at "compile time" 15 | 16 | **YAML Workflow: ./.github/workflows/cicd.yml** 17 | 18 | {% raw %}{% include 'topics/development/cicd_mermaid.md' %}{% endraw %} 19 | 20 | - `solid boxes` represent **Jobs** declared in the `jobs` array of the YAML Workflow 21 | - `solid arrows` represent **Job Dependencies**; `job_A.needs: [job_b, job_c]` type of yaml objects 22 | 23 | 24 | [//]: # (TODO add section to EXPLAIN the CI/CD Pipeline at runtime) 25 | 26 | [//]: # (TODO make screenshot of CI Server run and paste here) 27 | 28 | [//]: # (TODO add link to live CI server Pipeline RUNS) 29 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/development/cicd_mermaid.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | graph LR; 3 | test_n_build 4 | test_n_build --> codecov_coverage_host 5 | test_n_build --> docker_build 6 | lint 7 | docs 8 | code_visualization 9 | test_n_build --> signal_deploy 10 | signal_deploy --> pypi_publish 11 | signal_deploy --> gh_release 12 | ``` 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/development/dockerfile_mermaid.md: -------------------------------------------------------------------------------- 1 | ## Dockerfile Flow Chart 2 | 3 | **Dockerfile: Dockerfile** 4 | 5 | ```mermaid 6 | graph TB; 7 | python:3.9.16-slim-bullseye --> builder 8 | python:3.9.16-slim-bullseye --> install 9 | builder -. "requirements.txt" .-> install 10 | ``` 11 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "mkdocs" %}docs{% else %}PyGen_TO_DELETE{% endif %}/topics/development/index.md: -------------------------------------------------------------------------------- 1 | # Development 2 | Here you will find topics related to `Development`, the `build`, and the 3 | `CI/CD` Pipeline design of the **Biskotaki** open-source project. 4 | 5 | 6 |
7 | 8 | 9 | - :material-docker:{ .lg .middle } __Docker__ 10 | 11 | --- 12 | 13 | Dockerfile design, Build Process 14 | 15 | [:octicons-arrow-right-24: Topic ](./build_process_DAG.md) 16 | 17 | 18 | - :simple-githubactions:{ .lg .middle } __CI/CD Pipeline__ 19 | 20 | --- 21 | 22 | Github Actions Workflow of Jobs, visualized as a DAG 23 | 24 | [:octicons-arrow-right-24: Topic ](./cicd.md) 25 | 26 | 27 | - :material-state-machine:{ .lg .middle } __Git Ops Processes__ 28 | 29 | --- 30 | 31 | Step-by-step Processes, leveraging `git` and `CI` for **Releasing changes** 32 | 33 | [:octicons-arrow-right-24: Docs ](./gitops/) 34 | 35 |
36 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/contents/10_introduction.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Introduction 3 | ============ 4 | 5 | | This is **{{ cookiecutter.project_name }}**, a *Python Package* desinged to ... 6 | 7 | | Goal of this project is to TODO Document 8 | | Additionally, TODO Document 9 | 10 | | This documentation aims to help people understand what are the package's features and to demonstrate 11 | | how to leverage them for their use cases. 12 | | It also presents the overall package design. 13 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/contents/20_why_this_package.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Why this Package? 3 | ================= 4 | 5 | So, why would one opt for this Package? 6 | 7 | It is **easy** to *install* (using pip) and intuitive to *use*. 8 | 9 | **{{ cookiecutter.project_name }}** features TODO Document 10 | 11 | Well-tested against multiple Python Interpreter versions (3.6 - 3.10), 12 | tested on both *Linux* (Ubuntu) and *Darwin* (Macos) platforms. 13 | 14 | Tests trigger automatically on **CI**. 15 | The package's releases follow **Semantic Versioning**. 16 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/contents/30_usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | ------------ 6 | Installation 7 | ------------ 8 | 9 | | **{{ cookiecutter.pkg_name }}** is available on PyPI hence you can use `pip` to install it. 10 | 11 | It is recommended to perform the installation in an isolated `python virtual environment` (env). 12 | You can create and activate an `env` using any tool of your preference (ie `virtualenv`, `venv`, `pyenv`). 13 | 14 | Assuming you have 'activated' a `python virtual environment`: 15 | 16 | .. code-block:: shell 17 | 18 | python -m pip install {{ cookiecutter.project_slug|replace('_', '-') }} 19 | 20 | 21 | --------------- 22 | Simple Use Case 23 | --------------- 24 | 25 | | Common Use Case for the {{ cookiecutter.pkg_name }} is to TODO Document 26 | 27 | TODO Document 28 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/contents/40_modules.rst: -------------------------------------------------------------------------------- 1 | {{ cookiecutter.pkg_name }} 2 | =================================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | {{ cookiecutter.pkg_name }} 8 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/contents/{{ cookiecutter.pkg_name }}.rst: -------------------------------------------------------------------------------- 1 | {{ cookiecutter.pkg_name }} package 2 | ===================================================================== 3 | 4 | 5 | Module contents 6 | --------------- 7 | 8 | .. automodule:: {{ cookiecutter.pkg_name }} 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to {{ cookiecutter.project_name }} documentation! 2 | ========================================================================================= 3 | 4 | .. include:: ../README.rst 5 | 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | :caption: Contents: 10 | 11 | contents/10_introduction 12 | contents/20_why_this_package 13 | contents/30_usage 14 | contents/40_modules 15 | 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /src/cookiecutter_python/{{ cookiecutter.project_slug }}/{% if cookiecutter.docs_builder == "sphinx" %}docs{% else %}PyGen_TO_DELETE{% endif %}/spelling_wordlist.txt: -------------------------------------------------------------------------------- 1 | virtualenv 2 | macOS 3 | Macos 4 | conda 5 | env 6 | 7 | Quickstart 8 | Submodules 9 | Subpackages 10 | 11 | # python modules/object names 12 | {{ cookiecutter.pkg_name }} 13 | utils 14 | metaclass 15 | args 16 | kwargs 17 | Iterable 18 | json 19 | len 20 | 21 | # Software Engineering 22 | runnable 23 | instantiation 24 | subclasses 25 | dev 26 | templating 27 | linter 28 | 29 | deserialization 30 | discretization 31 | interpretable 32 | pre 33 | accomodate 34 | eg 35 | ie 36 | iterable 37 | indexable 38 | nb 39 | quantisized 40 | runtime 41 | 42 | # 2nd, 3rd 43 | nd 44 | 45 | cookiecutter 46 | pypi 47 | sdist 48 | cpu 49 | 50 | # Github 51 | github 52 | 53 | # Open Source Software Licences 54 | affero 55 | -------------------------------------------------------------------------------- /src/stubs/cookiecutter/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/stubs/cookiecutter/__init__.pyi -------------------------------------------------------------------------------- /src/stubs/cookiecutter/config.pyi: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Any, MutableMapping, Optional, Union 3 | 4 | def get_user_config( 5 | config_file: Optional[str], default_dict: Optional[bool] 6 | ) -> MutableMapping[str, Any]: ... 7 | def get_config(config_path: Union[str, Path]) -> MutableMapping[str, Any]: ... 8 | -------------------------------------------------------------------------------- /src/stubs/cookiecutter/exceptions.pyi: -------------------------------------------------------------------------------- 1 | class CookiecutterException(Exception): ... 2 | class InvalidConfiguration(Exception): ... 3 | class UndefinedVariableInTemplate(Exception): ... 4 | class ContextDecodingException(Exception): ... 5 | class NonTemplatedInputDirException(Exception): ... 6 | -------------------------------------------------------------------------------- /src/stubs/cookiecutter/generate.pyi: -------------------------------------------------------------------------------- 1 | # # Prod function 2 | # def generate_context( 3 | # context_file='cookiecutter.json', default_context=None, extra_context=None 4 | # ): 5 | 6 | from typing import Any, Mapping, Optional 7 | 8 | def generate_context( 9 | context_file: str = 'cookiecutter.json', 10 | default_context: Optional[Mapping[str, Any]] = None, 11 | extra_context: Optional[Mapping[str, Any]] = None, 12 | ) -> Mapping[str, Any]: ... 13 | -------------------------------------------------------------------------------- /src/stubs/cookiecutter/main.pyi: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | def cookiecutter( 4 | template: str, 5 | checkout: Optional[str], 6 | no_input: bool, 7 | extra_context: Optional[dict], 8 | replay: bool, 9 | overwrite_if_exists: bool, 10 | output_dir: Optional[str], 11 | config_file: Optional[str], 12 | default_config: bool, 13 | password: Optional[str], 14 | directory: Optional[str], 15 | skip_if_file_exists: bool, 16 | ) -> str: ... 17 | -------------------------------------------------------------------------------- /src/stubs/git/__init__.pyi: -------------------------------------------------------------------------------- 1 | import os 2 | import typing as t 3 | 4 | class Repo: 5 | def __init__(self, folder_path: str, **kwargs: t.Any) -> None: ... 6 | @classmethod 7 | def init( 8 | cls, 9 | path: t.Union[t.Union[str, "os.PathLike[str]"], None] = None, 10 | mkdir: bool = True, 11 | # odbt: t.Type[GitCmdObjectDB] = GitCmdObjectDB, 12 | expand_vars: bool = True, 13 | **kwargs: t.Any, 14 | ) -> "Repo": ... 15 | def is_dirty(self, **kwargs: t.Any) -> bool: ... 16 | @property 17 | def git(self) -> t.Any: ... 18 | 19 | class Actor: 20 | def __init__(self, name: str, email: str) -> None: ... 21 | -------------------------------------------------------------------------------- /src/stubs/git/exc.pyi: -------------------------------------------------------------------------------- 1 | class InvalidGitRepositoryError(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /src/stubs/requests_futures/__init__.pyi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/src/stubs/requests_futures/__init__.pyi -------------------------------------------------------------------------------- /src/stubs/requests_futures/sessions.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | class Response: 4 | status_code: int 5 | 6 | class Future: 7 | def result(self) -> Response: ... 8 | 9 | class FuturesSession: 10 | def get(self, url: str, **kwargs: Any) -> Future: ... 11 | -------------------------------------------------------------------------------- /tests/biskotaki_ci/test_regression_biskotaki.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | 6 | # the files we intend to check for, in a biskotatki geb project 7 | # these files should help build regressoin tests, as minimum set to verify 8 | @pytest.fixture( 9 | params=[ 10 | 'scripts/parse_version.py', 11 | 'scripts/visualize-dockerfile.py', 12 | 'scripts/visualize-ga-workflow.py', 13 | ] 14 | ) 15 | def biskotaki_file_expected(request): 16 | return request.param 17 | 18 | 19 | # we concern with different assertions, given the same generated project 20 | # so we use a "fixture" to receive the same generated project, to test against 21 | 22 | 23 | # tests on Project Generated from .github/biskotaki.yaml 24 | def test_gen_ci_biskotaki_has_expected_files( 25 | biskotaki_file_expected, 26 | biskotaki_ci_project, 27 | ): 28 | ## GIVEN freshly Generated Project, with User Config '.github/biskotaki.yaml' 29 | 30 | ## AND a file, that we expect to be generated 31 | # supplied by the 'biskotaki_file_expected' fixture, passed in this test 32 | expected_file: str = biskotaki_file_expected 33 | assert isinstance(expected_file, str), f"expected_file: {expected_file} is not a string" 34 | expected_file_path: Path = Path(expected_file) 35 | # sanity that path is relative, so that we can use it to check for existence 36 | assert ( 37 | not expected_file_path.is_absolute() 38 | ), f"expected_file_path: {expected_file_path} is not relative" 39 | 40 | ## WHEN we check if the generated project has the expected files 41 | ERROR_MSG = ( 42 | f"File: {expected_file_path} does not exist\n" 43 | f"Relative File path {expected_file_path}, and gen Biskotaki Project Dir: {biskotaki_ci_project}, do not make for an existing file\n" 44 | f" Possible causes:\n" 45 | " - This could be due to the Generator failing to create the File (ie bug)\n" 46 | " - Could be that test is falsely expecting File in Biskotaki.\n" 47 | " How to fix:\n" 48 | " - If this is a bug, then we caught a Regressoin Error -> fix the Generator\n" 49 | " - If this is a false positive, then we need to update the test to not expect this file\n" 50 | " For this, we should also Advertise that this file is no longer generated\n\n" 51 | " Make sure we communicate this is: Docs, Readme, PR, Release Note, Sem Ver Tag, etc\n\n" 52 | " It could be a 'Public API' change, hence this verbose error message\n" 53 | ) 54 | 55 | assert (biskotaki_ci_project / expected_file_path).exists(), ERROR_MSG 56 | -------------------------------------------------------------------------------- /tests/data/biskotaki-with-no-docs-specs.yaml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: Biskotaki 3 | project_type: module 4 | project_slug: biskotaki 5 | repo_name: biskotaki 6 | full_name: Konstantinos Lampridis 7 | author: Konstantinos Lampridis 8 | author_email: k.lampridis@hotmail.com 9 | github_username: boromir674 10 | project_short_description: Project entirely generated using https://github.com/boromir674/cookiecutter-python-package/ 11 | initialize_git_repo: 'no' 12 | -------------------------------------------------------------------------------- /tests/data/biskotaki-without-interpreters.yaml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: Biskotaki 3 | project_type: module 4 | project_slug: biskotaki 5 | docker_image: biskotaki 6 | repo_name: biskotaki 7 | pkg_name: biskotaki 8 | full_name: Konstantinos Lampridis 9 | author: Konstantinos Lampridis 10 | author_email: k.lampridis@hotmail.com 11 | github_username: boromir674 12 | project_short_description: Project entirely generated using https://github.com/boromir674/cookiecutter-python-package/ 13 | initialize_git_repo: 'no' 14 | ## READ THE DOCS CI ## 15 | docs_builder: sphinx 16 | rtd_python_version: "3.10" 17 | -------------------------------------------------------------------------------- /tests/data/correct_python_package_names.txt: -------------------------------------------------------------------------------- 1 | absl 2 | artificial_artwork 3 | coverage 4 | _distutils_hack 5 | google_auth_oauthlib 6 | h5py 7 | PIL 8 | py 9 | pyasn1 10 | pyasn1_modules 11 | __pycache__ 12 | pyparsing 13 | _pytest 14 | pytest_cov 15 | rsa 16 | six 17 | urllib3 18 | -------------------------------------------------------------------------------- /tests/data/gold-standard.yml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: Biskotaki Gold Standard 3 | project_type: module+cli 4 | project_slug: biskotaki-gold-standard 5 | pkg_name: biskotakigold 6 | repo_name: biskotaki-gold 7 | readthedocs_project_slug: biskotaki-gold 8 | docker_image: bgs 9 | full_name: Konstantinos Lampridis 10 | author: Konstantinos Lampridis 11 | email: k.lampridis@hotmail.com 12 | author_email: k.lampridis@hotmail.com 13 | github_username: boromir674 14 | project_short_description: Project generated from https://github.com/boromir674/cookiecutter-python-package/ 15 | initialize_git_repo: 'no' 16 | 17 | # Python 3.12 is in bugfix mode, same as 3.11 18 | interpreters: {"supported-interpreters": ["3.10", "3.11", "3.12"]} 19 | 20 | ## Documentation Config ## 21 | docs_builder: "mkdocs" 22 | ## READ THE DOCS CI Config ## 23 | rtd_python_version: "3.11" 24 | cicd: 'experimental' 25 | -------------------------------------------------------------------------------- /tests/data/pytest-fixture.yaml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: My Fixture 3 | project_type: pytest-plugin 4 | project_slug: my-fixture 5 | repo_name: my-fixture 6 | pkg_name: my_fixture 7 | full_name: Konstantinos Lampridis 8 | author: Konstantinos Lampridis 9 | email: k.lampridis@hotmail.com 10 | author_email: k.lampridis@hotmail.com 11 | github_username: boromir674 12 | project_short_description: Project entirely generated using https://github.com/boromir674/cookiecutter-python-package/ 13 | initialize_git_repo: 'yes' 14 | docs_builder: 'mkdocs' 15 | rtd_python_version: "3.10" 16 | -------------------------------------------------------------------------------- /tests/data/rendering/only_list_template/cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "project_dir_name": "unit-test-new-project", 3 | "some_setting": ["some_option", "another_option"] 4 | } 5 | -------------------------------------------------------------------------------- /tests/data/rendering/only_list_template/hooks/pre_gen_project.py: -------------------------------------------------------------------------------- 1 | # 2 | -------------------------------------------------------------------------------- /tests/data/rendering/only_list_template/{{ cookiecutter.project_dir_name }}/a.txt: -------------------------------------------------------------------------------- 1 | ELA{% if cookiecutter.some_setting == "some_option" %} 2 | some_option{% else %} 3 | another_option 4 | {% endif %} -------------------------------------------------------------------------------- /tests/data/rendering/user_config.yml: -------------------------------------------------------------------------------- 1 | default_context: 2 | project_name: My Fixture 3 | project_type: pytest-plugin 4 | project_slug: my-fixture 5 | repo_name: my-fixture 6 | pkg_name: my_fixture 7 | full_name: Konstantinos Lampridis 8 | author: Konstantinos Lampridis 9 | email: k.lampridis@hotmail.com 10 | author_email: k.lampridis@hotmail.com 11 | github_username: boromir674 12 | project_short_description: Project entirely generated using https://github.com/boromir674/cookiecutter-python-package/ 13 | initialize_git_repo: 'no' 14 | docs_builder: mkdocs 15 | rtd_python_version: "3.10" 16 | temp_var: a 17 | some_setting: another_option -------------------------------------------------------------------------------- /tests/data/snapshots/README.md: -------------------------------------------------------------------------------- 1 | # Snapshots of Generator Output for Regression Testing 2 | 3 | We maintain 2 `Biskotaki` Projects, Generated with input `User Config`, 4 | the [./.github/biskotaki.yaml](../../../.github/biskotaki.yaml) file. 5 | 6 | Since, `rendering` is involved in the `Generation`, we call them `Snapshots` 7 | 8 | - [biskotaki-no-input](./biskotaki-no-input/) Generated with `Interactive mode` OFF 9 | - [biskotaki-interactive](./biskotaki-interactive/) Generated with `Interactive mode` ON 10 | 11 | They should correspond to what gets `rendered`, using the latest `Generator`. 12 | 13 | They should both correspond to output produced using using the **latest** version of `Generator` (ie latest python distribution release on PyPI). 14 | 15 | 16 | ## Maintaining the Snapshots 17 | 18 | ### Snapshot `biskotaki-no-input` -> Interactive Mode OFF 19 | 20 | 1. **Automatically Update Test Snapshot:** 21 | 22 | Optionally, first make sure env is OK: `tox -e dev -vv --notest` 23 | ```shell 24 | ./scripts/update-snapshot.sh 25 | ``` 26 | 2. **Git Add:** 27 | ```shell 28 | git add tests/data/snapshots/biskotaki-no-input 29 | ``` 30 | 3. **Git Commit:** 31 | ```shell 32 | git commit -m "tests(data): update biskotaki-no-input Snapshot, used for Regression Testing" 33 | ``` 34 | 35 | ### Snapshot `biskotaki-interactive` -> Interactive Mode OFF 36 | 37 | **TLDR**: copy-paste below into terminal prompt: 38 | {"supported-interpreters": ["3.6", "3.7", "3.8", "3.9", "3.10"]} 39 | 40 | when prompted with `interpreters [default]:` 41 | 42 | 43 | 1. **Interactively Generate Biskotaki and automatically Update Test Snapshot:** 44 | 45 | Optionally, first make sure env is OK: `tox -e dev -vv --notest` 46 | ```shell 47 | ./scripts/update-snapshot-interactive.sh 48 | ``` 49 | When prompted with `interpreters [default]:` 50 | Paste: 51 | ```shell 52 | {"supported-interpreters": ["3.6", "3.7", "3.8", "3.9", "3.10"]} 53 | ``` 54 | 2. **Git Add:** 55 | ```shell 56 | git add tests/data/snapshots/biskotaki-interactive 57 | ``` 58 | 3. **Git Commit:** 59 | ```shell 60 | git commit -m "tests(data): update biskotaki-interactive Snapshot, used for Regression Testing" 61 | ``` 62 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | biskotakigold 4 | tests 5 | 6 | [report] 7 | show_missing = true 8 | precision = 2 9 | omit = *migrations* 10 | exclude_lines = 11 | raise NotImplementedError 12 | raise NotImplemented 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.github/workflows/codecov-upload.yml: -------------------------------------------------------------------------------- 1 | ###################################### 2 | ## Reusable Codecov Workflow ## 3 | ###################################### 4 | 5 | on: 6 | workflow_call: 7 | inputs: 8 | coverage_artifact: 9 | required: true 10 | type: string 11 | description: "Name of the coverage artifact to upload to Codecov" 12 | verbose: 13 | required: false 14 | type: boolean 15 | description: > 16 | Whether to print more info on Workflow (web) interface; default: true 17 | If true, with "print" more information on $GITHUB_OUTPUT. 18 | default: true 19 | 20 | jobs: 21 | upload_coverage: 22 | name: Upload Coverage to Codecov 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout Repository 26 | uses: actions/checkout@v4 27 | 28 | # Download the coverage artifact 29 | - name: Download Coverage Artifact 30 | uses: actions/download-artifact@v4 31 | with: 32 | name: ${{ inputs.coverage_artifact }} 33 | 34 | # Get the Codecov binary 35 | - name: Get Codecov Binary 36 | run: | 37 | curl -Os https://uploader.codecov.io/latest/linux/codecov 38 | chmod +x codecov 39 | 40 | # Upload coverage reports to Codecov 41 | - name: Upload Coverage Reports to Codecov 42 | run: | 43 | for file in coverage*.xml; do 44 | OS_NAME=$(echo $file | sed -E "s/coverage-(\w+)-.*/\1/") 45 | PY_VERSION=$(echo $file | sed -E "s/coverage-\w+-(\d+\.\d+).*/\1/") 46 | ./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose 47 | echo "[INFO] Sent to Codecov: $file" 48 | done 49 | 50 | # If Verbose, Write to Workflow Output 51 | - name: Report Uploaded Coverage XML Files 52 | if: ${{ inputs.verbose }} 53 | run: | 54 | for file in coverage*.xml; do 55 | 56 | echo " - Codecov Upload: $file" >> $GITHUB_OUTPUT 57 | 58 | done 59 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.github/workflows/labeler.yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | label_PR: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | # This Job behaves as a Listener to PR events, and each step is a Handler 12 | steps: 13 | # HANDLER 1: Label PR, given file changes and Labeling Rules '.github/labeler.yml' 14 | - uses: actions/labeler@v5 15 | with: 16 | # if you want your labels to trigger other Workflows, pass-in a PAT 17 | # with permission for label creation events to trigger listeners 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vscode/ 3 | 4 | *.egg-info/ 5 | *.pyc 6 | *\.bak 7 | \.fuse* 8 | .coverage 9 | .DS_Store 10 | *__pycache__ 11 | 12 | docs/_build/ 13 | dist/ 14 | build/ 15 | htmlcov/ 16 | 17 | .tox/ 18 | node_modules 19 | 20 | dependency-graphs/ 21 | test-results/ 22 | uml-diagrams/ 23 | pydoer-graphs/ 24 | 25 | # LOGS 26 | 27 | cookie-py.log 28 | 29 | .fuse_hidden* 30 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.prospector.yml: -------------------------------------------------------------------------------- 1 | # output-format: json 2 | 3 | strictness: high 4 | test-warnings: true 5 | doc-warnings: false 6 | member-warnings: false 7 | inherits: 8 | - default 9 | ignore-paths: 10 | - docs 11 | ignore-patterns: 12 | - (^|/)skip(this)?(/|$) 13 | autodetect: false 14 | max-line-length: 95 15 | 16 | 17 | # TOOLS 18 | 19 | pyflakes: 20 | run: true 21 | 22 | 23 | pyroma: 24 | run: true 25 | disable: 26 | - PYR15 27 | - PYR18 28 | 29 | dodgy: 30 | run: true 31 | 32 | mccabe: 33 | run: true 34 | options: 35 | max-complexity: 12 36 | 37 | 38 | # INACTIVE 39 | 40 | pylint: 41 | run: false 42 | 43 | bandit: 44 | run: false 45 | 46 | frosted: 47 | run: false 48 | 49 | pep8: 50 | run: false 51 | 52 | pep257: 53 | run: false 54 | 55 | mypy: 56 | run: false 57 | 58 | vulture: 59 | run: false 60 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | 14 | # ALL JOBS implied: https://docs.readthedocs.io/en/stable/builds.html 15 | jobs: 16 | pre_install: 17 | - python --version 18 | - python -m pip install poetry 19 | - python -m poetry export -o req-docs.txt -E docs 20 | post_install: 21 | - python -m pip install -e . 22 | - python -m pip install pyyaml 23 | pre_build: 24 | - python ./scripts/visualize-ga-workflow.py > ./docs/cicd_mermaid.md 25 | - python ./scripts/visualize-dockerfile.py > ./docs/dockerfile_mermaid.md 26 | 27 | 28 | 29 | # Build documentation in the "docs/" directory with mkdocs 30 | mkdocs: 31 | configuration: mkdocs.yml 32 | 33 | 34 | 35 | # Optional but recommended, declare the Python requirements required 36 | # to build your documentation 37 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 38 | python: 39 | install: 40 | - requirements: req-docs.txt 41 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | 0.0.1 (2024-03-13) 6 | ======================================= 7 | 8 | | This is the first ever release of the **biskotakigold** Python Package. 9 | | The package is open source and is part of the **Biskotaki Gold Standard** Project. 10 | | The project is hosted in a public repository on github at https://github.com/boromir674/biskotaki-gold 11 | | The project was scaffolded using the `Cookiecutter Python Package`_ (cookiecutter) Template at https://github.com/boromir674/cookiecutter-python-package/tree/master/src/cookiecutter_python 12 | 13 | | Scaffolding included: 14 | 15 | - **CI Pipeline** running on Github Actions at https://github.com/boromir674/biskotaki-gold/actions 16 | - `Test Workflow` running a multi-factor **Build Matrix** spanning different `platform`'s and `python version`'s 17 | 1. Platforms: `ubuntu-latest`, `macos-latest` 18 | 2. Python Interpreters: `3.6`, `3.7`, `3.8`, `3.9`, `3.10` 19 | 20 | - Automated **Test Suite** with parallel Test execution across multiple cpus. 21 | - Code Coverage 22 | - **Automation** in a 'make' like fashion, using **tox** 23 | - Seamless `Lint`, `Type Check`, `Build` and `Deploy` *operations* 24 | 25 | 26 | .. LINKS 27 | 28 | .. _Cookiecutter Python Package: https://python-package-generator.readthedocs.io/en/master/ 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.16-slim-bullseye as builder 2 | 3 | COPY poetry.lock pyproject.toml ./ 4 | 5 | # Determine where to install poetry 6 | ENV POETRY_HOME=/opt/poetry 7 | 8 | # Install Poetry & generate a requirements.txt file 9 | RUN python -c 'from urllib.request import urlopen; print(urlopen("https://install.python-poetry.org").read().decode())' | python && \ 10 | "$POETRY_HOME/bin/poetry" export -f requirements.txt > requirements.txt 11 | 12 | FROM python:3.9.16-slim-bullseye as install 13 | 14 | # Keep the requirements.txt file from the builder image 15 | COPY --from=builder requirements.txt . 16 | 17 | # Pre emptively add the user's bin folder to PATH 18 | ENV PATH="/root/.local/bin:$PATH" 19 | 20 | RUN apt-get update && \ 21 | apt-get install -y --no-install-recommends build-essential && \ 22 | pip install -U pip && \ 23 | apt-get clean && \ 24 | rm -rf /var/lib/apt/lists/* && \ 25 | pip install --no-cache-dir --user -r requirements.txt 26 | 27 | COPY . . 28 | RUN pip install --no-cache-dir --user . 29 | 30 | 31 | CMD [ "biskotakigold" ] 32 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to Biskotaki Gold Standard Documentation! 2 | 3 | [//]: # (Render a few important badges: CI/CD Status, RTD, Coverage, Latest Tag/Sem Ver) 4 | 5 | Biskotaki Gold Standard is an open source TODO 6 | 7 | [//]: # (Same a few words about what this does) 8 | 9 | [//]: # (Maybe state Goal and/or small motivation note) 10 | 11 | [//]: # (Leverage Mermaid to show high of what happens) 12 | 13 | [//]: # (Ideally record video with demo and embed here) 14 | 15 | ## Quick-start 16 | 17 | TODO 18 | 19 | ## :material-book-open: Documentation 20 | 21 | Read about how to use the `biskotakigold` package, understand its features 22 | and capabilities. 23 | 24 | Learn how to use the `biskotakigold` package, achieve goals leverating understand its features 25 | and capabilities. 26 | 27 | 28 |
29 | 30 | 31 | - :fontawesome-regular-circle-play:{ .lg .middle } __`How-to` Guides__ 32 | 33 | --- 34 | 35 | Step-by-step `Guides` that leverage **biskotakigold** to achieve `Goals`, such as: 36 | 37 | - TODO 1 38 | - TODO 2 39 | 40 | [:octicons-arrow-right-24: :material-rocket-launch: `Install`, `Run`, `Use`](./guides/index.md) 41 | 42 | 43 | - :material-application-brackets-outline:{ .lg .middle } __API References__ 44 | 45 | --- 46 | [//]: # (link ./reference/CLI.md does not exist yet, it is generate at docs build-time) 47 | [:octicons-arrow-right-24: :material-console:{ .lg .middle } biskotakigold CLI](./reference/CLI.md) 48 | 49 | [//]: # (link ./reference/biskotakigold.md does not exist yet, it is generate at docs build-time) 50 | [:octicons-arrow-right-24: :material-language-python: API Refs](./reference/biskotakigold) 51 | 52 | 53 | - :fontawesome-solid-book-open:{ .lg .middle } __Topics__ 54 | 55 | --- 56 | 57 | **Explanations / Topics** 58 | 59 | [:octicons-arrow-right-24: :material-language-python: Architecture ](./topics/arch.md) 60 | 61 | [//]: # (Add important Topics here) 62 | 63 | 64 | - :fontawesome-solid-book-open:{ .lg .middle } __Development Topics__ 65 | 66 | --- 67 | 68 | **Topics / Explanations** on Development 69 | 70 | [:octicons-arrow-right-24: :material-hammer-screwdriver: Development Topics ](./topics/development/index.md) 71 | 72 | 73 |
74 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/tags.md: -------------------------------------------------------------------------------- 1 | # Tags 2 | 3 | Following is a list of relevant tags: 4 | 5 | [TAGS] -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/arch.md: -------------------------------------------------------------------------------- 1 | # Software Architecture 2 | 3 | [//]: # (this is a comment) 4 | [//]: # (Description of what is this Page) 5 | 6 | Here you can find the software architecture of the project. 7 | 8 | ## Module Dependencies 9 | 10 | [//]: # (Description of what is this Section) 11 | 12 | Here you can find the dependencies between the modules of the project. 13 | 14 | The dependencies are Visualized as a Graph, where Nodes are the modules and the Edges are python ``import`` statements. 15 | 16 | The dependencies are visualized, after running the following command: 17 | 18 | ```sh 19 | tox -e pydeps 20 | ``` 21 | 22 | !!! Tip 23 | 24 | Right-click and open image in new Tab for better inspection 25 | 26 | ### First-party Dependencies 27 | 28 | [//]: # (Inner Python Imports SVG Graph) 29 | 30 | ![First-party Dependencies](../assets/deps_inner.svg) 31 | 32 | 33 | ### First and Third party Dependencies 34 | 35 | [//]: # (First-Party with 3rd-party having all incoming edges to our individual Modules) 36 | 37 | ![All Dependencies - C](../assets/deps_all.svg) 38 | 39 | 40 | ### 1st+3rd party Deps - 1st as Cluster 41 | 42 | [//]: # ("Boxed" First-Party with 3rd-party having all incoming edges to our Box) 43 | 44 | ![All Dependencies - B](../assets/deps_ktc.svg) 45 | 46 | 47 | ### 1st+3rd party Deps - 1st+3rd as Cluster 48 | 49 | [//]: # ("Boxed" First-Party with 3rd-party having 1 incoming edge to our Box) 50 | 51 | ![All Dependencies - A](../assets/deps_ktc-mcs_2.svg) 52 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/development/build_process_DAG.md: -------------------------------------------------------------------------------- 1 | ## Docker Build Process DAG 2 | 3 | > Understand how we leverage `Docker` in the build process. 4 | 5 | The project features a `Dockerfile`, designed for 6 | 7 | - multi-stage builds 8 | - parallel stage building (assuming appropriate build backend) 9 | - size minimization of the produced `Docker` image 10 | - minimization of vulerabilities 11 | 12 | ## Dockerfile visualized as Directed Acyclic Graph (DAG) 13 | 14 | > Understand the execution path of `docker build`, via **DAG visualization** 15 | 16 | {% include 'topics/development/dockerfile_mermaid.md' %} 17 | 18 | - `solid boxes` represent distinct docker **stages** and their *aliases* 19 | - `solid arrows` represent **stage dependencies**; `FROM a AS b` type of instructions 20 | - `dotted arrows` represent **stage COPY**: `COPY --from=a /path /path` type of instructions 21 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/development/cicd.md: -------------------------------------------------------------------------------- 1 | --- 2 | tags: 3 | - CICD 4 | --- 5 | 6 | ## CI/CD Pipeline 7 | 8 | > Understand what Jobs are part of the CI/CD Pipeline 9 | 10 | **CI/CD Pipeline** is implemented as `Github Actions Workflow` in a YAML file format. 11 | 12 | ### Workflow of Jobs: visualized as a Directed Acyclic Graph (DAG) 13 | 14 | > Understand the Job Dependencies at "compile time" 15 | 16 | **YAML Workflow: ./.github/workflows/cicd.yml** 17 | 18 | {% include 'topics/development/cicd_mermaid.md' %} 19 | 20 | - `solid boxes` represent **Jobs** declared in the `jobs` array of the YAML Workflow 21 | - `solid arrows` represent **Job Dependencies**; `job_A.needs: [job_b, job_c]` type of yaml objects 22 | 23 | 24 | [//]: # (TODO add section to EXPLAIN the CI/CD Pipeline at runtime) 25 | 26 | [//]: # (TODO make screenshot of CI Server run and paste here) 27 | 28 | [//]: # (TODO add link to live CI server Pipeline RUNS) 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/development/cicd_mermaid.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | graph LR; 3 | test_n_build 4 | test_n_build --> codecov_coverage_host 5 | test_n_build --> docker_build 6 | lint 7 | docs 8 | code_visualization 9 | test_n_build --> signal_deploy 10 | signal_deploy --> pypi_publish 11 | signal_deploy --> gh_release 12 | ``` 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/development/dockerfile_mermaid.md: -------------------------------------------------------------------------------- 1 | ## Dockerfile Flow Chart 2 | 3 | **Dockerfile: Dockerfile** 4 | 5 | ```mermaid 6 | graph TB; 7 | python:3.9.16-slim-bullseye --> builder 8 | python:3.9.16-slim-bullseye --> install 9 | builder -. "requirements.txt" .-> install 10 | ``` 11 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/docs/topics/development/index.md: -------------------------------------------------------------------------------- 1 | # Development 2 | Here you will find topics related to `Development`, the `build`, and the 3 | `CI/CD` Pipeline design of the **Biskotaki** open-source project. 4 | 5 | 6 |
7 | 8 | 9 | - :material-docker:{ .lg .middle } __Docker__ 10 | 11 | --- 12 | 13 | Dockerfile design, Build Process 14 | 15 | [:octicons-arrow-right-24: Topic ](./build_process_DAG.md) 16 | 17 | 18 | - :simple-githubactions:{ .lg .middle } __CI/CD Pipeline__ 19 | 20 | --- 21 | 22 | Github Actions Workflow of Jobs, visualized as a DAG 23 | 24 | [:octicons-arrow-right-24: Topic ](./cicd.md) 25 | 26 | 27 | - :material-state-machine:{ .lg .middle } __Git Ops Processes__ 28 | 29 | --- 30 | 31 | Step-by-step Processes, leveraging `git` and `CI` for **Releasing changes** 32 | 33 | [:octicons-arrow-right-24: Docs ](./gitops/) 34 | 35 |
36 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.1' 2 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/__main__.py: -------------------------------------------------------------------------------- 1 | """Run `python -m biskotakigold`. 2 | 3 | Allow running Biskotaki Gold Standard, also by invoking 4 | the python module: 5 | 6 | `python -m biskotakigold` 7 | 8 | This is an alternative to directly invoking the cli that uses python as the 9 | "entrypoint". 10 | """ 11 | 12 | from __future__ import absolute_import 13 | 14 | from biskotakigold.cli import main 15 | 16 | 17 | if __name__ == "__main__": # pragma: no cover 18 | main(prog_name="biskotakigold") # pylint: disable=unexpected-keyword-arg 19 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/_logging.py: -------------------------------------------------------------------------------- 1 | """Set up Application Logs 2 | 3 | This module defines how the emitted application logs are handled and where 4 | they are written/streamed. 5 | The application logs are written in full details (ie with timestamps) to a file 6 | and also streamed to the console in a more concise format. 7 | 8 | # Console/Terminal Log: 9 | - We Stream Logs of INFO (and above) Level on Console's stderr 10 | - The rendered Log format is: : 11 | 12 | # Disk File Log: 13 | - we Write Logs of ALL Levels on a Disk File 14 | - The rendered Log format is: : 15 | - The FILE_TARGET_LOGS, variable (see below), defines the path to the log file 16 | 17 | Log Levels: 18 | - CRITICAL 19 | - ERROR 20 | - WARNING 21 | - INFO 22 | - DEBUG 23 | 24 | Usage: 25 | Do a 'from . import _logging' in the root __init__.py of your package and 26 | all submodules 'inherit' the logging configuration 27 | """ 28 | 29 | import logging 30 | 31 | 32 | # for 'biskotaki' app/code 33 | FILE_TARGET_LOGS = 'biskotaki.log' 34 | 35 | #### FILE LOGGING 36 | # set up logging to file for DEBUG Level and above 37 | logging.basicConfig( 38 | level=logging.DEBUG, 39 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', 40 | datefmt='%m-%d %H:%M', 41 | filename=FILE_TARGET_LOGS, 42 | filemode='w', 43 | ) 44 | 45 | #### CONSOLE LOGGING 46 | console = logging.StreamHandler() 47 | 48 | ### Handler which writes DEBUG messages or higher to the sys.stderr ### 49 | console.setLevel(logging.DEBUG) 50 | # console.setLevel(logging.INFO) 51 | 52 | # set a format which is simpler for console use 53 | formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') 54 | # tell the handler to use this format 55 | console.setFormatter(formatter) 56 | # add the handler to the root logger 57 | logging.getLogger('').addHandler(console) 58 | 59 | 60 | # Now, we can log to the root logger, or any other logger. First the root... 61 | # logging.info('Blah blah') 62 | 63 | # Now, define a couple of other loggers which might represent areas in your 64 | # application: 65 | 66 | # logger1 = logging.getLogger('myapp.area1') 67 | # logger2 = logging.getLogger('myapp.area2') 68 | # logger3 = logging.getLogger(__name__) 69 | 70 | # logger1.debug('balh blah') 71 | # logger1.info('balh blah') 72 | # logger2.warning('balh blah') 73 | # logger3.error('balh blah') 74 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/cli.py: -------------------------------------------------------------------------------- 1 | """Main `biskotakigold` CLI.""" 2 | 3 | import os 4 | import sys 5 | 6 | import click 7 | 8 | from . import __version__ 9 | 10 | 11 | this_file_location = os.path.dirname(os.path.realpath(os.path.abspath(__file__))) 12 | 13 | 14 | def version_msg(): 15 | """biskotakigold version, location and Python version. 16 | 17 | Get message about biskotakigold version, location 18 | and Python version. 19 | """ 20 | python_version = sys.version[:3] 21 | message = u"Biskotaki Gold Standard %(version)s from {} (Python {})" 22 | location = os.path.dirname(this_file_location) 23 | return message.format(location, python_version) 24 | 25 | 26 | @click.command(context_settings=dict(help_option_names=[u'-h', u'--help'])) 27 | @click.version_option(__version__, u'-V', u'--version', message=version_msg()) 28 | # @click.option('-v', '--verbose', is_flag=True, help='Print debug information', default=False) 29 | def main( 30 | # verbose, 31 | ): 32 | """TODO Write this content that gets renders when invoking with --help flag!""" 33 | try: 34 | pass 35 | except Exception as error: # pylint: disable=broad-except 36 | click.echo(error) 37 | sys.exit(1) 38 | 39 | 40 | if __name__ == "__main__": # pragma: no cover 41 | main() 42 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/tests/data/snapshots/biskotaki-gold-standard/src/biskotakigold/py.typed -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/tests/smoke_test.py: -------------------------------------------------------------------------------- 1 | def test_import_module(): 2 | import biskotakigold 3 | 4 | assert biskotakigold is not None 5 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.fixture 5 | def get_main_arguments(): 6 | return type('A', (), {'command_line_script_args': None, 'main_function_kwargs': {}}) 7 | 8 | 9 | @pytest.mark.runner_setup(mix_stderr=False) 10 | def test_cli( 11 | get_main_arguments, 12 | isolated_cli_runner, 13 | ): 14 | from biskotakigold.cli import main 15 | 16 | main_arguments = get_main_arguments() 17 | result = isolated_cli_runner.invoke( 18 | main, 19 | args=main_arguments.command_line_script_args, 20 | input=None, 21 | env=None, 22 | catch_exceptions=False, 23 | **main_arguments.main_function_kwargs, 24 | ) 25 | assert result.exit_code == 0 26 | assert result.stdout == '' 27 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-gold-standard/tests/test_invoking_cli.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | def test_invoking_cli_as_python_module(run_subprocess): 5 | result = run_subprocess( 6 | sys.executable, 7 | '-m', 8 | 'biskotakigold', 9 | '--help', 10 | ) 11 | assert result.exit_code == 0 12 | assert result.stderr == '' 13 | assert result.stdout.split('\n')[0] == "Usage: biskotakigold [OPTIONS]" 14 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | biskotaki 4 | tests 5 | 6 | [report] 7 | show_missing = true 8 | precision = 2 9 | omit = *migrations* 10 | exclude_lines = 11 | raise NotImplementedError 12 | raise NotImplemented 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.github/workflows/codecov-upload.yml: -------------------------------------------------------------------------------- 1 | ###################################### 2 | ## Reusable Codecov Workflow ## 3 | ###################################### 4 | 5 | on: 6 | workflow_call: 7 | inputs: 8 | coverage_artifact: 9 | required: true 10 | type: string 11 | description: "Name of the coverage artifact to upload to Codecov" 12 | verbose: 13 | required: false 14 | type: boolean 15 | description: > 16 | Whether to print more info on Workflow (web) interface; default: true 17 | If true, with "print" more information on $GITHUB_OUTPUT. 18 | default: true 19 | 20 | jobs: 21 | upload_coverage: 22 | name: Upload Coverage to Codecov 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout Repository 26 | uses: actions/checkout@v4 27 | 28 | # Download the coverage artifact 29 | - name: Download Coverage Artifact 30 | uses: actions/download-artifact@v4 31 | with: 32 | name: ${{ inputs.coverage_artifact }} 33 | 34 | # Get the Codecov binary 35 | - name: Get Codecov Binary 36 | run: | 37 | curl -Os https://uploader.codecov.io/latest/linux/codecov 38 | chmod +x codecov 39 | 40 | # Upload coverage reports to Codecov 41 | - name: Upload Coverage Reports to Codecov 42 | run: | 43 | for file in coverage*.xml; do 44 | OS_NAME=$(echo $file | sed -E "s/coverage-(\w+)-.*/\1/") 45 | PY_VERSION=$(echo $file | sed -E "s/coverage-\w+-(\d+\.\d+).*/\1/") 46 | ./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose 47 | echo "[INFO] Sent to Codecov: $file" 48 | done 49 | 50 | # If Verbose, Write to Workflow Output 51 | - name: Report Uploaded Coverage XML Files 52 | if: ${{ inputs.verbose }} 53 | run: | 54 | for file in coverage*.xml; do 55 | 56 | echo " - Codecov Upload: $file" >> $GITHUB_OUTPUT 57 | 58 | done 59 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.github/workflows/labeler.yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | label_PR: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | # This Job behaves as a Listener to PR events, and each step is a Handler 12 | steps: 13 | # HANDLER 1: Label PR, given file changes and Labeling Rules '.github/labeler.yml' 14 | - uses: actions/labeler@v5 15 | with: 16 | # if you want your labels to trigger other Workflows, pass-in a PAT 17 | # with permission for label creation events to trigger listeners 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vscode/ 3 | 4 | *.egg-info/ 5 | *.pyc 6 | *\.bak 7 | \.fuse* 8 | .coverage 9 | .DS_Store 10 | *__pycache__ 11 | 12 | docs/_build/ 13 | dist/ 14 | build/ 15 | htmlcov/ 16 | 17 | .tox/ 18 | node_modules 19 | 20 | dependency-graphs/ 21 | test-results/ 22 | uml-diagrams/ 23 | pydoer-graphs/ 24 | 25 | # LOGS 26 | 27 | cookie-py.log 28 | 29 | .fuse_hidden* 30 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.prospector.yml: -------------------------------------------------------------------------------- 1 | # output-format: json 2 | 3 | strictness: high 4 | test-warnings: true 5 | doc-warnings: false 6 | member-warnings: false 7 | inherits: 8 | - default 9 | ignore-paths: 10 | - docs 11 | ignore-patterns: 12 | - (^|/)skip(this)?(/|$) 13 | autodetect: false 14 | max-line-length: 95 15 | 16 | 17 | # TOOLS 18 | 19 | pyflakes: 20 | run: true 21 | 22 | 23 | pyroma: 24 | run: true 25 | disable: 26 | - PYR15 27 | - PYR18 28 | 29 | dodgy: 30 | run: true 31 | 32 | mccabe: 33 | run: true 34 | options: 35 | max-complexity: 12 36 | 37 | 38 | # INACTIVE 39 | 40 | pylint: 41 | run: false 42 | 43 | bandit: 44 | run: false 45 | 46 | frosted: 47 | run: false 48 | 49 | pep8: 50 | run: false 51 | 52 | pep257: 53 | run: false 54 | 55 | mypy: 56 | run: false 57 | 58 | vulture: 59 | run: false 60 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # ALL JOBS implied: https://docs.readthedocs.io/en/stable/builds.html 15 | jobs: 16 | pre_install: 17 | - python --version 18 | - python -m pip install poetry 19 | - python -m poetry export -o req-docs.txt -E docs 20 | post_install: 21 | - python -m pip install -e . 22 | - python -m pip install pyyaml 23 | pre_build: 24 | - python ./scripts/visualize-ga-workflow.py > ./docs/cicd_mermaid.md 25 | - python ./scripts/visualize-dockerfile.py > ./docs/dockerfile_mermaid.md 26 | 27 | 28 | 29 | # Build documentation in the docs/ directory with Sphinx 30 | sphinx: 31 | builder: html 32 | configuration: docs/conf.py 33 | fail_on_warning: false 34 | 35 | # Optionally build your docs in additional formats such as PDF 36 | formats: 37 | - pdf 38 | - epub 39 | 40 | 41 | 42 | # Optional but recommended, declare the Python requirements required 43 | # to build your documentation 44 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 45 | python: 46 | install: 47 | - requirements: req-docs.txt 48 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | 0.0.1 (2025-03-27) 6 | ======================================= 7 | 8 | | This is the first ever release of the **biskotaki** Python Package. 9 | | The package is open source and is part of the **Biskotaki** Project. 10 | | The project is hosted in a public repository on github at https://github.com/boromir674/biskotaki 11 | | The project was scaffolded using the `Cookiecutter Python Package`_ (cookiecutter) Template at https://github.com/boromir674/cookiecutter-python-package/tree/master/src/cookiecutter_python 12 | 13 | | Scaffolding included: 14 | 15 | - **CI Pipeline** running on Github Actions at https://github.com/boromir674/biskotaki/actions 16 | - `Test Workflow` running a multi-factor **Build Matrix** spanning different `platform`'s and `python version`'s 17 | 1. Platforms: `ubuntu-latest`, `macos-latest` 18 | 2. Python Interpreters: `3.6`, `3.7`, `3.8`, `3.9`, `3.10` 19 | 20 | - Automated **Test Suite** with parallel Test execution across multiple cpus. 21 | - Code Coverage 22 | - **Automation** in a 'make' like fashion, using **tox** 23 | - Seamless `Lint`, `Type Check`, `Build` and `Deploy` *operations* 24 | 25 | 26 | .. LINKS 27 | 28 | .. _Cookiecutter Python Package: https://python-package-generator.readthedocs.io/en/master/ 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.16-slim-bullseye as builder 2 | 3 | COPY poetry.lock pyproject.toml ./ 4 | 5 | # Determine where to install poetry 6 | ENV POETRY_HOME=/opt/poetry 7 | 8 | # Install Poetry & generate a requirements.txt file 9 | RUN python -c 'from urllib.request import urlopen; print(urlopen("https://install.python-poetry.org").read().decode())' | python && \ 10 | "$POETRY_HOME/bin/poetry" export -f requirements.txt > requirements.txt 11 | 12 | FROM python:3.9.16-slim-bullseye as install 13 | 14 | # Keep the requirements.txt file from the builder image 15 | COPY --from=builder requirements.txt . 16 | 17 | # Pre emptively add the user's bin folder to PATH 18 | ENV PATH="/root/.local/bin:$PATH" 19 | 20 | RUN apt-get update && \ 21 | apt-get install -y --no-install-recommends build-essential && \ 22 | pip install -U pip && \ 23 | apt-get clean && \ 24 | rm -rf /var/lib/apt/lists/* && \ 25 | pip install --no-cache-dir --user -r requirements.txt 26 | 27 | COPY . . 28 | RUN pip install --no-cache-dir --user . 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/contents/10_introduction.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Introduction 3 | ============ 4 | 5 | | This is **Biskotaki**, a *Python Package* desinged to ... 6 | 7 | | Goal of this project is to TODO Document 8 | | Additionally, TODO Document 9 | 10 | | This documentation aims to help people understand what are the package's features and to demonstrate 11 | | how to leverage them for their use cases. 12 | | It also presents the overall package design. 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/contents/20_why_this_package.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Why this Package? 3 | ================= 4 | 5 | So, why would one opt for this Package? 6 | 7 | It is **easy** to *install* (using pip) and intuitive to *use*. 8 | 9 | **Biskotaki** features TODO Document 10 | 11 | Well-tested against multiple Python Interpreter versions (3.6 - 3.10), 12 | tested on both *Linux* (Ubuntu) and *Darwin* (Macos) platforms. 13 | 14 | Tests trigger automatically on **CI**. 15 | The package's releases follow **Semantic Versioning**. 16 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/contents/30_usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | ------------ 6 | Installation 7 | ------------ 8 | 9 | | **biskotaki** is available on PyPI hence you can use `pip` to install it. 10 | 11 | It is recommended to perform the installation in an isolated `python virtual environment` (env). 12 | You can create and activate an `env` using any tool of your preference (ie `virtualenv`, `venv`, `pyenv`). 13 | 14 | Assuming you have 'activated' a `python virtual environment`: 15 | 16 | .. code-block:: shell 17 | 18 | python -m pip install biskotaki 19 | 20 | 21 | --------------- 22 | Simple Use Case 23 | --------------- 24 | 25 | | Common Use Case for the biskotaki is to TODO Document 26 | 27 | TODO Document 28 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/contents/40_modules.rst: -------------------------------------------------------------------------------- 1 | biskotaki 2 | =================================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | biskotaki 8 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/contents/biskotaki.rst: -------------------------------------------------------------------------------- 1 | biskotaki package 2 | ===================================================================== 3 | 4 | 5 | Module contents 6 | --------------- 7 | 8 | .. automodule:: biskotaki 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Biskotaki documentation! 2 | ========================================================================================= 3 | 4 | .. include:: ../README.rst 5 | 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | :caption: Contents: 10 | 11 | contents/10_introduction 12 | contents/20_why_this_package 13 | contents/30_usage 14 | contents/40_modules 15 | 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/docs/spelling_wordlist.txt: -------------------------------------------------------------------------------- 1 | virtualenv 2 | macOS 3 | Macos 4 | conda 5 | env 6 | 7 | Quickstart 8 | Submodules 9 | Subpackages 10 | 11 | # python modules/object names 12 | biskotaki 13 | utils 14 | metaclass 15 | args 16 | kwargs 17 | Iterable 18 | json 19 | len 20 | 21 | # Software Engineering 22 | runnable 23 | instantiation 24 | subclasses 25 | dev 26 | templating 27 | linter 28 | 29 | deserialization 30 | discretization 31 | interpretable 32 | pre 33 | accomodate 34 | eg 35 | ie 36 | iterable 37 | indexable 38 | nb 39 | quantisized 40 | runtime 41 | 42 | # 2nd, 3rd 43 | nd 44 | 45 | cookiecutter 46 | pypi 47 | sdist 48 | cpu 49 | 50 | # Github 51 | github 52 | 53 | # Open Source Software Licences 54 | affero 55 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/src/biskotaki/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.1' 2 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/src/biskotaki/_logging.py: -------------------------------------------------------------------------------- 1 | """Set up Application Logs 2 | 3 | This module defines how the emitted application logs are handled and where 4 | they are written/streamed. 5 | The application logs are written in full details (ie with timestamps) to a file 6 | and also streamed to the console in a more concise format. 7 | 8 | # Console/Terminal Log: 9 | - We Stream Logs of INFO (and above) Level on Console's stderr 10 | - The rendered Log format is: : 11 | 12 | # Disk File Log: 13 | - we Write Logs of ALL Levels on a Disk File 14 | - The rendered Log format is: : 15 | - The FILE_TARGET_LOGS, variable (see below), defines the path to the log file 16 | 17 | Log Levels: 18 | - CRITICAL 19 | - ERROR 20 | - WARNING 21 | - INFO 22 | - DEBUG 23 | 24 | Usage: 25 | Do a 'from . import _logging' in the root __init__.py of your package and 26 | all submodules 'inherit' the logging configuration 27 | """ 28 | 29 | import logging 30 | 31 | 32 | # for 'biskotaki' app/code 33 | FILE_TARGET_LOGS = 'biskotaki.log' 34 | 35 | #### FILE LOGGING 36 | # set up logging to file for DEBUG Level and above 37 | logging.basicConfig( 38 | level=logging.DEBUG, 39 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', 40 | datefmt='%m-%d %H:%M', 41 | filename=FILE_TARGET_LOGS, 42 | filemode='w', 43 | ) 44 | 45 | #### CONSOLE LOGGING 46 | console = logging.StreamHandler() 47 | 48 | ### Handler which writes DEBUG messages or higher to the sys.stderr ### 49 | console.setLevel(logging.DEBUG) 50 | # console.setLevel(logging.INFO) 51 | 52 | # set a format which is simpler for console use 53 | formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') 54 | # tell the handler to use this format 55 | console.setFormatter(formatter) 56 | # add the handler to the root logger 57 | logging.getLogger('').addHandler(console) 58 | 59 | 60 | # Now, we can log to the root logger, or any other logger. First the root... 61 | # logging.info('Blah blah') 62 | 63 | # Now, define a couple of other loggers which might represent areas in your 64 | # application: 65 | 66 | # logger1 = logging.getLogger('myapp.area1') 67 | # logger2 = logging.getLogger('myapp.area2') 68 | # logger3 = logging.getLogger(__name__) 69 | 70 | # logger1.debug('balh blah') 71 | # logger1.info('balh blah') 72 | # logger2.warning('balh blah') 73 | # logger3.error('balh blah') 74 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/src/biskotaki/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/tests/data/snapshots/biskotaki-interactive/src/biskotaki/py.typed -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-interactive/tests/smoke_test.py: -------------------------------------------------------------------------------- 1 | def test_import_module(): 2 | import biskotaki 3 | 4 | assert biskotaki is not None 5 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | biskotaki 4 | tests 5 | 6 | [report] 7 | show_missing = true 8 | precision = 2 9 | omit = *migrations* 10 | exclude_lines = 11 | raise NotImplementedError 12 | raise NotImplemented 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.github/workflows/codecov-upload.yml: -------------------------------------------------------------------------------- 1 | ###################################### 2 | ## Reusable Codecov Workflow ## 3 | ###################################### 4 | 5 | on: 6 | workflow_call: 7 | inputs: 8 | coverage_artifact: 9 | required: true 10 | type: string 11 | description: "Name of the coverage artifact to upload to Codecov" 12 | verbose: 13 | required: false 14 | type: boolean 15 | description: > 16 | Whether to print more info on Workflow (web) interface; default: true 17 | If true, with "print" more information on $GITHUB_OUTPUT. 18 | default: true 19 | 20 | jobs: 21 | upload_coverage: 22 | name: Upload Coverage to Codecov 23 | runs-on: ubuntu-latest 24 | steps: 25 | - name: Checkout Repository 26 | uses: actions/checkout@v4 27 | 28 | # Download the coverage artifact 29 | - name: Download Coverage Artifact 30 | uses: actions/download-artifact@v4 31 | with: 32 | name: ${{ inputs.coverage_artifact }} 33 | 34 | # Get the Codecov binary 35 | - name: Get Codecov Binary 36 | run: | 37 | curl -Os https://uploader.codecov.io/latest/linux/codecov 38 | chmod +x codecov 39 | 40 | # Upload coverage reports to Codecov 41 | - name: Upload Coverage Reports to Codecov 42 | run: | 43 | for file in coverage*.xml; do 44 | OS_NAME=$(echo $file | sed -E "s/coverage-(\w+)-.*/\1/") 45 | PY_VERSION=$(echo $file | sed -E "s/coverage-\w+-(\d+\.\d+).*/\1/") 46 | ./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose 47 | echo "[INFO] Sent to Codecov: $file" 48 | done 49 | 50 | # If Verbose, Write to Workflow Output 51 | - name: Report Uploaded Coverage XML Files 52 | if: ${{ inputs.verbose }} 53 | run: | 54 | for file in coverage*.xml; do 55 | 56 | echo " - Codecov Upload: $file" >> $GITHUB_OUTPUT 57 | 58 | done 59 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.github/workflows/labeler.yaml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | jobs: 6 | label_PR: 7 | permissions: 8 | contents: read 9 | pull-requests: write 10 | runs-on: ubuntu-latest 11 | # This Job behaves as a Listener to PR events, and each step is a Handler 12 | steps: 13 | # HANDLER 1: Label PR, given file changes and Labeling Rules '.github/labeler.yml' 14 | - uses: actions/labeler@v5 15 | with: 16 | # if you want your labels to trigger other Workflows, pass-in a PAT 17 | # with permission for label creation events to trigger listeners 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vscode/ 3 | 4 | *.egg-info/ 5 | *.pyc 6 | *\.bak 7 | \.fuse* 8 | .coverage 9 | .DS_Store 10 | *__pycache__ 11 | 12 | docs/_build/ 13 | dist/ 14 | build/ 15 | htmlcov/ 16 | 17 | .tox/ 18 | node_modules 19 | 20 | dependency-graphs/ 21 | test-results/ 22 | uml-diagrams/ 23 | pydoer-graphs/ 24 | 25 | # LOGS 26 | 27 | cookie-py.log 28 | 29 | .fuse_hidden* 30 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.prospector.yml: -------------------------------------------------------------------------------- 1 | # output-format: json 2 | 3 | strictness: high 4 | test-warnings: true 5 | doc-warnings: false 6 | member-warnings: false 7 | inherits: 8 | - default 9 | ignore-paths: 10 | - docs 11 | ignore-patterns: 12 | - (^|/)skip(this)?(/|$) 13 | autodetect: false 14 | max-line-length: 95 15 | 16 | 17 | # TOOLS 18 | 19 | pyflakes: 20 | run: true 21 | 22 | 23 | pyroma: 24 | run: true 25 | disable: 26 | - PYR15 27 | - PYR18 28 | 29 | dodgy: 30 | run: true 31 | 32 | mccabe: 33 | run: true 34 | options: 35 | max-complexity: 12 36 | 37 | 38 | # INACTIVE 39 | 40 | pylint: 41 | run: false 42 | 43 | bandit: 44 | run: false 45 | 46 | frosted: 47 | run: false 48 | 49 | pep8: 50 | run: false 51 | 52 | pep257: 53 | run: false 54 | 55 | mypy: 56 | run: false 57 | 58 | vulture: 59 | run: false 60 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # ALL JOBS implied: https://docs.readthedocs.io/en/stable/builds.html 15 | jobs: 16 | pre_install: 17 | - python --version 18 | - python -m pip install poetry 19 | - python -m poetry export -o req-docs.txt -E docs 20 | post_install: 21 | - python -m pip install -e . 22 | - python -m pip install pyyaml 23 | pre_build: 24 | - python ./scripts/visualize-ga-workflow.py > ./docs/cicd_mermaid.md 25 | - python ./scripts/visualize-dockerfile.py > ./docs/dockerfile_mermaid.md 26 | 27 | 28 | 29 | # Build documentation in the docs/ directory with Sphinx 30 | sphinx: 31 | builder: html 32 | configuration: docs/conf.py 33 | fail_on_warning: false 34 | 35 | # Optionally build your docs in additional formats such as PDF 36 | formats: 37 | - pdf 38 | - epub 39 | 40 | 41 | 42 | # Optional but recommended, declare the Python requirements required 43 | # to build your documentation 44 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 45 | python: 46 | install: 47 | - requirements: req-docs.txt 48 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | 0.0.1 (2025-03-26) 6 | ======================================= 7 | 8 | | This is the first ever release of the **biskotaki** Python Package. 9 | | The package is open source and is part of the **Biskotaki** Project. 10 | | The project is hosted in a public repository on github at https://github.com/boromir674/biskotaki 11 | | The project was scaffolded using the `Cookiecutter Python Package`_ (cookiecutter) Template at https://github.com/boromir674/cookiecutter-python-package/tree/master/src/cookiecutter_python 12 | 13 | | Scaffolding included: 14 | 15 | - **CI Pipeline** running on Github Actions at https://github.com/boromir674/biskotaki/actions 16 | - `Test Workflow` running a multi-factor **Build Matrix** spanning different `platform`'s and `python version`'s 17 | 1. Platforms: `ubuntu-latest`, `macos-latest` 18 | 2. Python Interpreters: `3.6`, `3.7`, `3.8`, `3.9`, `3.10` 19 | 20 | - Automated **Test Suite** with parallel Test execution across multiple cpus. 21 | - Code Coverage 22 | - **Automation** in a 'make' like fashion, using **tox** 23 | - Seamless `Lint`, `Type Check`, `Build` and `Deploy` *operations* 24 | 25 | 26 | .. LINKS 27 | 28 | .. _Cookiecutter Python Package: https://python-package-generator.readthedocs.io/en/master/ 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.16-slim-bullseye as builder 2 | 3 | COPY poetry.lock pyproject.toml ./ 4 | 5 | # Determine where to install poetry 6 | ENV POETRY_HOME=/opt/poetry 7 | 8 | # Install Poetry & generate a requirements.txt file 9 | RUN python -c 'from urllib.request import urlopen; print(urlopen("https://install.python-poetry.org").read().decode())' | python && \ 10 | "$POETRY_HOME/bin/poetry" export -f requirements.txt > requirements.txt 11 | 12 | FROM python:3.9.16-slim-bullseye as install 13 | 14 | # Keep the requirements.txt file from the builder image 15 | COPY --from=builder requirements.txt . 16 | 17 | # Pre emptively add the user's bin folder to PATH 18 | ENV PATH="/root/.local/bin:$PATH" 19 | 20 | RUN apt-get update && \ 21 | apt-get install -y --no-install-recommends build-essential && \ 22 | pip install -U pip && \ 23 | apt-get clean && \ 24 | rm -rf /var/lib/apt/lists/* && \ 25 | pip install --no-cache-dir --user -r requirements.txt 26 | 27 | COPY . . 28 | RUN pip install --no-cache-dir --user . 29 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/contents/10_introduction.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Introduction 3 | ============ 4 | 5 | | This is **Biskotaki**, a *Python Package* desinged to ... 6 | 7 | | Goal of this project is to TODO Document 8 | | Additionally, TODO Document 9 | 10 | | This documentation aims to help people understand what are the package's features and to demonstrate 11 | | how to leverage them for their use cases. 12 | | It also presents the overall package design. 13 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/contents/20_why_this_package.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Why this Package? 3 | ================= 4 | 5 | So, why would one opt for this Package? 6 | 7 | It is **easy** to *install* (using pip) and intuitive to *use*. 8 | 9 | **Biskotaki** features TODO Document 10 | 11 | Well-tested against multiple Python Interpreter versions (3.6 - 3.10), 12 | tested on both *Linux* (Ubuntu) and *Darwin* (Macos) platforms. 13 | 14 | Tests trigger automatically on **CI**. 15 | The package's releases follow **Semantic Versioning**. 16 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/contents/30_usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | ------------ 6 | Installation 7 | ------------ 8 | 9 | | **biskotaki** is available on PyPI hence you can use `pip` to install it. 10 | 11 | It is recommended to perform the installation in an isolated `python virtual environment` (env). 12 | You can create and activate an `env` using any tool of your preference (ie `virtualenv`, `venv`, `pyenv`). 13 | 14 | Assuming you have 'activated' a `python virtual environment`: 15 | 16 | .. code-block:: shell 17 | 18 | python -m pip install biskotaki 19 | 20 | 21 | --------------- 22 | Simple Use Case 23 | --------------- 24 | 25 | | Common Use Case for the biskotaki is to TODO Document 26 | 27 | TODO Document 28 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/contents/40_modules.rst: -------------------------------------------------------------------------------- 1 | biskotaki 2 | =================================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | biskotaki 8 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/contents/biskotaki.rst: -------------------------------------------------------------------------------- 1 | biskotaki package 2 | ===================================================================== 3 | 4 | 5 | Module contents 6 | --------------- 7 | 8 | .. automodule:: biskotaki 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Biskotaki documentation! 2 | ========================================================================================= 3 | 4 | .. include:: ../README.rst 5 | 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | :caption: Contents: 10 | 11 | contents/10_introduction 12 | contents/20_why_this_package 13 | contents/30_usage 14 | contents/40_modules 15 | 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/docs/spelling_wordlist.txt: -------------------------------------------------------------------------------- 1 | virtualenv 2 | macOS 3 | Macos 4 | conda 5 | env 6 | 7 | Quickstart 8 | Submodules 9 | Subpackages 10 | 11 | # python modules/object names 12 | biskotaki 13 | utils 14 | metaclass 15 | args 16 | kwargs 17 | Iterable 18 | json 19 | len 20 | 21 | # Software Engineering 22 | runnable 23 | instantiation 24 | subclasses 25 | dev 26 | templating 27 | linter 28 | 29 | deserialization 30 | discretization 31 | interpretable 32 | pre 33 | accomodate 34 | eg 35 | ie 36 | iterable 37 | indexable 38 | nb 39 | quantisized 40 | runtime 41 | 42 | # 2nd, 3rd 43 | nd 44 | 45 | cookiecutter 46 | pypi 47 | sdist 48 | cpu 49 | 50 | # Github 51 | github 52 | 53 | # Open Source Software Licences 54 | affero 55 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/src/biskotaki/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.1' 2 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/src/biskotaki/_logging.py: -------------------------------------------------------------------------------- 1 | """Set up Application Logs 2 | 3 | This module defines how the emitted application logs are handled and where 4 | they are written/streamed. 5 | The application logs are written in full details (ie with timestamps) to a file 6 | and also streamed to the console in a more concise format. 7 | 8 | # Console/Terminal Log: 9 | - We Stream Logs of INFO (and above) Level on Console's stderr 10 | - The rendered Log format is: : 11 | 12 | # Disk File Log: 13 | - we Write Logs of ALL Levels on a Disk File 14 | - The rendered Log format is: : 15 | - The FILE_TARGET_LOGS, variable (see below), defines the path to the log file 16 | 17 | Log Levels: 18 | - CRITICAL 19 | - ERROR 20 | - WARNING 21 | - INFO 22 | - DEBUG 23 | 24 | Usage: 25 | Do a 'from . import _logging' in the root __init__.py of your package and 26 | all submodules 'inherit' the logging configuration 27 | """ 28 | 29 | import logging 30 | 31 | 32 | # for 'biskotaki' app/code 33 | FILE_TARGET_LOGS = 'biskotaki.log' 34 | 35 | #### FILE LOGGING 36 | # set up logging to file for DEBUG Level and above 37 | logging.basicConfig( 38 | level=logging.DEBUG, 39 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', 40 | datefmt='%m-%d %H:%M', 41 | filename=FILE_TARGET_LOGS, 42 | filemode='w', 43 | ) 44 | 45 | #### CONSOLE LOGGING 46 | console = logging.StreamHandler() 47 | 48 | ### Handler which writes DEBUG messages or higher to the sys.stderr ### 49 | console.setLevel(logging.DEBUG) 50 | # console.setLevel(logging.INFO) 51 | 52 | # set a format which is simpler for console use 53 | formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') 54 | # tell the handler to use this format 55 | console.setFormatter(formatter) 56 | # add the handler to the root logger 57 | logging.getLogger('').addHandler(console) 58 | 59 | 60 | # Now, we can log to the root logger, or any other logger. First the root... 61 | # logging.info('Blah blah') 62 | 63 | # Now, define a couple of other loggers which might represent areas in your 64 | # application: 65 | 66 | # logger1 = logging.getLogger('myapp.area1') 67 | # logger2 = logging.getLogger('myapp.area2') 68 | # logger3 = logging.getLogger(__name__) 69 | 70 | # logger1.debug('balh blah') 71 | # logger1.info('balh blah') 72 | # logger2.warning('balh blah') 73 | # logger3.error('balh blah') 74 | -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/src/biskotaki/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/boromir674/cookiecutter-python-package/e41cee0d3cbd3a14718b35317594dfec508b616c/tests/data/snapshots/biskotaki-no-input/src/biskotaki/py.typed -------------------------------------------------------------------------------- /tests/data/snapshots/biskotaki-no-input/tests/smoke_test.py: -------------------------------------------------------------------------------- 1 | def test_import_module(): 2 | import biskotaki 3 | 4 | assert biskotaki is not None 5 | -------------------------------------------------------------------------------- /tests/data/test_cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "cookiecutter": { 3 | "project_name": "Pytest Object Getter", 4 | "project_slug": "pytest-object-getter", 5 | "project_type": "module", 6 | "repo_name": "pytest-object-getter", 7 | "pkg_name": "pytest_object_getter", 8 | "full_name": "Konstantinos Lampridis", 9 | "author": "Konstantinos Lampridis", 10 | "author_email": "k.lampridis@hotmail.com", 11 | "github_username": "boromir674", 12 | "project_short_description": "Import any object from a 3rd party module while mocking its namespace on demand.", 13 | "pypi_subtitle": "Import any object from a 3rd party module while mocking its namespace on demand.", 14 | "release_date": "2022-04-19", 15 | "year": "2022", 16 | "version": "0.0.1", 17 | "initialize_git_repo": "yes", 18 | "interpreters": { 19 | "supported-interpreters": [ 20 | "3.6", 21 | "3.7", 22 | "3.8", 23 | "3.9", 24 | "3.10", 25 | "3.11" 26 | ] 27 | }, 28 | "docs_builder": "sphinx", 29 | "rtd_python_version": "3.10", 30 | "cicd": "stable", 31 | "_template": "." 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /tests/test_ci_pipeline_generation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.parametrize( 5 | 'config_file', 6 | [ 7 | # Test Case 1 8 | # '.github/biskotaki.yaml', 9 | # Test Case 2 10 | None, 11 | ], 12 | ) 13 | def test_file_is_valid_yaml(config_file, user_config, mock_check, tmpdir): 14 | """Test Generator produces Valid CI config files, as expected. 15 | 16 | This Test features the following: 17 | - automatically mocks futures (web/http) 18 | """ 19 | from pathlib import Path 20 | 21 | # user_config_file: Path = Path(__file__).parent / '..' / config_file 22 | # Generate the pipeline 23 | from cookiecutter_python.backend.main import generate 24 | 25 | default_parameters = user_config[config_file] 26 | mock_check.config = default_parameters 27 | mock_check('pypi', True) 28 | mock_check('readthedocs', True) 29 | 30 | project_dir: str = generate( 31 | no_input=True, 32 | output_dir=tmpdir, 33 | config_file=config_file, 34 | default_config=False, 35 | ) 36 | 37 | generate_ci_pipeline_config = Path(project_dir) / '.github' / 'workflows' / 'test.yaml' 38 | assert generate_ci_pipeline_config.exists() 39 | assert generate_ci_pipeline_config.is_file() 40 | 41 | # Assert that the pipeline is valid yaml 42 | import re 43 | 44 | import yaml 45 | 46 | def sanitize_load(s: str): 47 | for w in "on".split(): 48 | reg = re.compile(r'^(on):', re.MULTILINE) 49 | s = reg.sub(r'\1:', s) 50 | # >> Issue: [B506:yaml_load] Use of unsafe yaml load. Allows instantiation of arbitrary objects. Consider yaml.safe_load(). 51 | # Severity: Medium Confidence: High 52 | # CWE: CWE-20 (https://cwe.mitre.org/data/definitions/20.html) 53 | # More Info: https://bandit.readthedocs.io/en/1.7.7/plugins/b506_yaml_load.html 54 | return yaml.safe_load(s) 55 | 56 | ci_config = sanitize_load(generate_ci_pipeline_config.read_text()) 57 | 58 | assert ci_config is not None 59 | assert isinstance(ci_config, dict) 60 | 61 | assert 'name' in ci_config 62 | assert 'on' + '' in ci_config, 'on is missing: \n' + '\n'.join( 63 | [str(x) for x in ci_config.keys()] 64 | ) 65 | assert 'jobs' in ci_config 66 | 67 | assert 'test_suite' in ci_config['jobs'] 68 | assert 'pypi_publish' in ci_config['jobs'] 69 | assert 'check_which_git_branch_we_are_on' in ci_config['jobs'] 70 | assert 'docker_build' in ci_config['jobs'] 71 | -------------------------------------------------------------------------------- /tests/test_git_sdk.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def test_git_sdk_init(tmp_path: Path): 5 | from git import Repo 6 | 7 | # GIVEN a temporary empty folder 8 | project_folder = tmp_path / "unit_test_git_sdk_init" 9 | project_folder.mkdir(parents=True, exist_ok=True) 10 | # sanity that no files exist 11 | assert not any(project_folder.iterdir()) 12 | # WHEN we call the git_sdk_init function 13 | _ = Repo.init(project_folder) 14 | # THEN a .git folder should be created 15 | assert (project_folder / ".git").exists() 16 | 17 | 18 | def test_git_sdk_is_dirty(tmp_path: Path): 19 | from git import Repo 20 | 21 | # GIVEN a temporary empty folder 22 | project_folder = tmp_path / "unit_test_git_sdk_is_dirty" 23 | project_folder.mkdir(parents=True, exist_ok=True) 24 | # sanity that no files exist 25 | assert not any(project_folder.iterdir()) 26 | # WHEN we call the git_sdk_init function 27 | repo = Repo.init(project_folder) 28 | # THEN a .git folder should be created 29 | assert (project_folder / ".git").exists() 30 | print("\n" + str(project_folder)) 31 | # WHEN we create a new file 32 | new_file = project_folder / "test_file.txt" 33 | new_file.write_text("Hello World!") 34 | 35 | # WHEN we check if the repo is dirty 36 | assert not repo.is_dirty() 37 | 38 | # cw = repo.config_writer() 39 | 40 | # # Access the global configuration writer 41 | # WARNING THIS destroys the format of .gitconfig and remove comments !!!! 42 | # with repo.config_writer(config_level='global') as cw: 43 | # # Add the safe.directory entry 44 | # cw.add_value('safe', 'directory', str(project_folder)) 45 | 46 | # cr = repo.config_reader() # use reader to assert writer effect 47 | -------------------------------------------------------------------------------- /tests/test_is_repo_clean_function.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def test_is_git_repo_clean_returns_true_for_new_repo(my_run_subprocess, tmp_path: Path): 5 | # GIVEN a directory with one file inside 6 | git_folder = (tmp_path / 'non_git_dir').resolve().absolute() 7 | git_folder.mkdir() 8 | (git_folder / 'a.txt').touch() 9 | 10 | # GIVEN it is initialized as a git repository 11 | from git import Repo 12 | 13 | repo = Repo.init(f"{git_folder}") 14 | assert (git_folder / '.git').exists() 15 | assert (git_folder / '.git').is_dir() 16 | 17 | result = my_run_subprocess( 18 | 'git', 19 | *['status', '--porcelain'], 20 | cwd=str(git_folder), 21 | check=False, # prevent raising exception 22 | ) 23 | 24 | assert result.exit_code == 0 25 | assert result.stdout == '?? a.txt\n' 26 | assert result.stderr == '' 27 | 28 | # WHEN calling is_git_repo_clean function 29 | repo_is_clean = not repo.is_dirty() 30 | 31 | # THEN the command should return True 32 | assert repo_is_clean is True 33 | -------------------------------------------------------------------------------- /tests/test_module.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | 4 | 5 | def test_simple_invocation(run_subprocess): 6 | result = run_subprocess( 7 | sys.executable, 8 | '-m', 9 | 'cookiecutter_python', 10 | '--help', 11 | ) 12 | assert result.exit_code == 0 13 | assert re.match(r'Usage: generate\-python \[OPTIONS\]', result.stdout.split('\n')[0]) 14 | assert result.stderr == '' 15 | 16 | 17 | def test_importing(get_object): 18 | main_in__main__namespace = get_object('main', 'cookiecutter_python.__main__') 19 | assert '__call__' in dir(main_in__main__namespace) 20 | -------------------------------------------------------------------------------- /tests/test_running_test_suite.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | import pytest 5 | 6 | 7 | @pytest.fixture 8 | def environment(): 9 | def get_environment_variables(project_dir: str): 10 | environment_variables = { 11 | 'PYTHONPATH': f'{str(os.path.join(project_dir, "src"))}', 12 | } 13 | if sys.platform in ('linux', 'cygwin'): # ie we are on a Linux-like OS 14 | return dict( 15 | environment_variables, 16 | **{'LC_ALL': 'C.UTF-8', 'LANG': 'C.UTF-8'}, 17 | ) 18 | if sys.platform == 'darwin': # ie we are on a Mac OS 19 | return dict( 20 | environment_variables, 21 | **{'LC_ALL': 'en_GB.UTF-8', 'LANG': 'en_GB.UTF-8'}, 22 | ) 23 | if sys.platform == 'win32': # ie we are on a Windows OS 24 | return dict( 25 | environment_variables, 26 | **{ 27 | 'LC_ALL': 'C.UTF-8', 28 | 'LANG': 'C.UTF-8', 29 | 'PYTHONHASHSEED': '2577074909', 30 | }, 31 | ) 32 | raise RuntimeError(f'Unexpected System Found: {sys.platform}') 33 | 34 | return get_environment_variables 35 | 36 | 37 | @pytest.mark.skipif(sys.platform == 'win32', reason="not working out-of-the-box for Windows") 38 | def test_running_pytest(environment, run_subprocess, project_dir): 39 | result = run_subprocess( 40 | sys.executable, 41 | '-m', 42 | 'pytest', 43 | '-s', 44 | '-ra', 45 | '-vv', 46 | os.path.join(project_dir, 'tests'), 47 | check=False, 48 | env=environment(project_dir), 49 | ) 50 | assert ' failed' not in result.stdout 51 | assert result.stderr == '' 52 | assert result.exit_code == 0 53 | -------------------------------------------------------------------------------- /tests/test_version_string.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | 6 | def test_version_msg_function_returns_expected_string(distro_loc: Path): 7 | # GIVEN the version_msg function 8 | from cookiecutter_python.cli import version_msg 9 | 10 | # WHEN the version_msg function is called 11 | result: str = version_msg() 12 | 13 | # THEN it should return the expected string 14 | import sys 15 | 16 | EXPECTED_PYTHON_VERSION: str = ".".join(map(str, sys.version_info[:3])) 17 | 18 | EXPECTED_PARENT_DIR_OF_COOKIECUTTER_PYTHON: Path = distro_loc.parent 19 | 20 | assert result == ( 21 | f'Python Generator %(version)s from {EXPECTED_PARENT_DIR_OF_COOKIECUTTER_PYTHON} (Python {EXPECTED_PYTHON_VERSION})' 22 | ) 23 | 24 | 25 | @pytest.mark.runner_setup(mix_stderr=False) 26 | def test_cli_version_flag_returns_expected_string( 27 | distro_loc: Path, 28 | isolated_cli_runner, 29 | ): 30 | from cookiecutter_python import __version__ 31 | from cookiecutter_python.cli import main 32 | 33 | result = isolated_cli_runner.invoke( 34 | main, 35 | args=['--version'], 36 | input=None, 37 | env=None, 38 | catch_exceptions=False, 39 | ) 40 | assert result.exit_code == 0 41 | 42 | import sys 43 | 44 | EXPECTED_PYTHON_VERSION: str = ".".join(map(str, sys.version_info[:3])) 45 | 46 | EXPECTED_PARENT_DIR_OF_COOKIECUTTER_PYTHON: Path = distro_loc.parent 47 | 48 | assert result.stdout == ( 49 | f'Python Generator {__version__} from {EXPECTED_PARENT_DIR_OF_COOKIECUTTER_PYTHON} (Python {EXPECTED_PYTHON_VERSION})\n' 50 | ) 51 | --------------------------------------------------------------------------------