├── .github ├── dependabot.yaml ├── reference-workflows │ ├── CI_1_1_1.yaml │ ├── CI_1_1_2.yaml │ ├── CI_1_2_1.yaml │ ├── CI_1_2_2.yaml │ ├── CI_1_3_1.yaml │ ├── CI_1_3_2.yaml │ ├── CI_2_1_1.yaml │ ├── CI_2_1_2.yaml │ ├── CI_2_2_1.yaml │ ├── CI_2_2_2.yaml │ ├── CI_2_3_1.yaml │ ├── CI_2_3_2.yaml │ └── README.md └── workflows │ └── verify-ghas.yaml ├── .gitignore ├── LICENSE ├── README.md ├── cookiecutter.json ├── docs ├── Makefile ├── conf.py ├── index.rst ├── make.bat └── nuances.rst ├── hooks ├── post_gen_project.py └── pre_gen_project.py ├── tests ├── regenerate_references.sh ├── run_yaml.py └── setup_cookiecutter.py └── {{cookiecutter.repo_name}} ├── .codecov.yml ├── .gitattributes ├── .github ├── CONTRIBUTING.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── CI.yaml │ └── codeql.yaml ├── .gitignore ├── .readthedocs.yaml ├── CODE_OF_CONDUCT.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── devtools ├── README.md ├── conda-envs │ └── test_env.yaml └── scripts │ └── create_conda_env.py ├── docs ├── Makefile ├── README.md ├── _static │ └── README.md ├── _templates │ └── README.md ├── api.rst ├── conf.py ├── developer_guide.rst ├── getting_started.rst ├── index.rst ├── make.bat ├── requirements.yaml └── user_guide.rst ├── pyproject.toml ├── setup.cfg └── {{cookiecutter.repo_name}} ├── __init__.py ├── data ├── README.md └── look_and_say.dat ├── py.typed ├── tests ├── __init__.py └── test_{{cookiecutter.repo_name}}.py └── {{cookiecutter.first_module_name}}.py /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_1_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - conda-forge 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_1_1_1 --cov-report=xml --color=yes prj_1_1_1/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_1_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - conda-forge 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_1_1_2 --cov-report=xml --color=yes prj_1_1_2/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_2_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - defaults 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_1_2_1 --cov-report=xml --color=yes prj_1_2_1/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_2_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - defaults 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_1_2_2 --cov-report=xml --color=yes prj_1_2_2/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_3_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: ${{ matrix.python-version }} 44 | 45 | - name: Testing Dependencies 46 | shell: bash 47 | run: | 48 | python -m pip install -U pytest pytest-cov codecov 49 | 50 | - name: Install package 51 | shell: bash 52 | run: | 53 | python -m pip install . 54 | 55 | - name: Run tests 56 | shell: bash 57 | 58 | run: | 59 | pytest -v --cov=prj_1_3_1 --cov-report=xml --color=yes prj_1_3_1/tests/ 60 | 61 | - name: CodeCov 62 | uses: codecov/codecov-action@v5 63 | with: 64 | files: ./coverage.xml 65 | flags: unittests 66 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 67 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_1_3_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: ${{ matrix.python-version }} 44 | 45 | - name: Testing Dependencies 46 | shell: bash 47 | run: | 48 | python -m pip install -U pytest pytest-cov codecov 49 | 50 | - name: Install package 51 | shell: bash 52 | run: | 53 | python -m pip install . 54 | 55 | - name: Run tests 56 | shell: bash 57 | 58 | run: | 59 | pytest -v --cov=prj_1_3_2 --cov-report=xml --color=yes prj_1_3_2/tests/ 60 | 61 | - name: CodeCov 62 | uses: codecov/codecov-action@v5 63 | with: 64 | files: ./coverage.xml 65 | flags: unittests 66 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 67 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_1_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - conda-forge 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_2_1_1 --cov-report=xml --color=yes prj_2_1_1/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_1_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - conda-forge 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_2_1_2 --cov-report=xml --color=yes prj_2_1_2/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_2_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - defaults 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_2_2_1 --cov-report=xml --color=yes prj_2_2_1/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_2_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 41 | - uses: mamba-org/setup-micromamba@v2 42 | with: 43 | environment-file: devtools/conda-envs/test_env.yaml 44 | environment-name: test 45 | condarc: | 46 | channels: 47 | - defaults 48 | create-args: >- 49 | python=${{ matrix.python-version }} 50 | 51 | - name: Install package 52 | # conda setup requires this special shell 53 | shell: bash -l {0} 54 | run: | 55 | python -m pip install . --no-deps 56 | micromamba list 57 | 58 | - name: Run tests 59 | # conda setup requires this special shell 60 | shell: bash -l {0} 61 | run: | 62 | pytest -v --cov=prj_2_2_2 --cov-report=xml --color=yes prj_2_2_2/tests/ 63 | 64 | - name: CodeCov 65 | uses: codecov/codecov-action@v5 66 | with: 67 | files: ./coverage.xml 68 | flags: unittests 69 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 70 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_3_1.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: ${{ matrix.python-version }} 44 | 45 | - name: Testing Dependencies 46 | shell: bash 47 | run: | 48 | python -m pip install -U pytest pytest-cov codecov 49 | 50 | - name: Install package 51 | shell: bash 52 | run: | 53 | python -m pip install . 54 | 55 | - name: Run tests 56 | shell: bash 57 | 58 | run: | 59 | pytest -v --cov=prj_2_3_1 --cov-report=xml --color=yes prj_2_3_1/tests/ 60 | 61 | - name: CodeCov 62 | uses: codecov/codecov-action@v5 63 | with: 64 | files: ./coverage.xml 65 | flags: unittests 66 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 67 | -------------------------------------------------------------------------------- /.github/reference-workflows/CI_2_3_2.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on ${{ matrix.os }}, Python ${{ matrix.python-version }} 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | 40 | - name: Set up Python ${{ matrix.python-version }} 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: ${{ matrix.python-version }} 44 | 45 | - name: Testing Dependencies 46 | shell: bash 47 | run: | 48 | python -m pip install -U pytest pytest-cov codecov 49 | 50 | - name: Install package 51 | shell: bash 52 | run: | 53 | python -m pip install . 54 | 55 | - name: Run tests 56 | shell: bash 57 | 58 | run: | 59 | pytest -v --cov=prj_2_3_2 --cov-report=xml --color=yes prj_2_3_2/tests/ 60 | 61 | - name: CodeCov 62 | uses: codecov/codecov-action@v5 63 | with: 64 | files: ./coverage.xml 65 | flags: unittests 66 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 67 | -------------------------------------------------------------------------------- /.github/reference-workflows/README.md: -------------------------------------------------------------------------------- 1 | # Reference GitHub Actions Workflows 2 | 3 | This directory contains the reference workflows for all of the GitHub Actions generated from the Cookiecutter 4 | 5 | Another GHA Workflow validates that the outputs of this folder match the outputs of the Cookiecutter 6 | 7 | That same workflow also has an approximate implementation of the GHA to at least try to emulate its steps. 8 | 9 | To regenerate the references, run the "regenerate_references.sh" script from the "tests" directory in the root of the 10 | Cookiecutter. There is a bit of manual adjustment required if the scripts change, but this is at least a simple test. 11 | -------------------------------------------------------------------------------- /.github/workflows/verify-ghas.yaml: -------------------------------------------------------------------------------- 1 | name: Pseudo Validate GHA Output 2 | 3 | # Approximation of the GHA "on" block 4 | on: 5 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 6 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 7 | push: 8 | branches: 9 | - "main" 10 | pull_request: 11 | branches: 12 | - "main" 13 | schedule: 14 | # Nightly tests run on main by default: 15 | # Scheduled workflows run on the latest commit on the default or base branch. 16 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 17 | - cron: "0 0 * * 0" 18 | 19 | # Custom global env block (Has to be recreated in each matrix, can't pass sequences to this) 20 | env: # I want to share these between jobs, but I can't access the env from inside matrix, has to be on the steps. 21 | licenses: "1 2" 22 | depend-sources: "1 2 3" 23 | rtd: "1 2" 24 | 25 | 26 | jobs: 27 | generate-cookiecutter: 28 | name: "Cookiecutter Artifacts" 29 | runs-on: ${{ matrix.os }} 30 | strategy: 31 | matrix: 32 | os: [ubuntu-latest, macOS-latest, windows-latest] 33 | python-version: ["3.11", "3.12", "3.13"] 34 | steps: 35 | - uses: actions/checkout@v4 36 | 37 | - name: Set up Python ${{ matrix.python-version }} 38 | uses: actions/setup-python@v5 39 | with: 40 | python-version: ${{ matrix.python-version }} 41 | 42 | - name: "Install dependencies" 43 | shell: bash 44 | run: | 45 | python -m pip install -U pyyaml cookiecutter 46 | 47 | - name: "Construct Cookiecutter" 48 | shell: bash 49 | run: | 50 | mkdir artifact_upload # Uploading artifact directory uploads as "path/*" rather than "path", so stage the upload. 51 | for LIC in ${{ env.licenses }} 52 | do 53 | for DEP in ${{ env.depend-sources }} 54 | do 55 | for RTD in ${{ env.rtd }} 56 | do 57 | COMBO="$LIC"_"$DEP"_"$RTD" 58 | python tests/setup_cookiecutter.py prj_$COMBO $LIC $DEP $RTD 59 | mv prj_$COMBO artifact_upload/prj_$COMBO 60 | done 61 | done 62 | done 63 | 64 | - name: "Upload artifacts" 65 | if: ${{ matrix.os == 'ubuntu-latest' && matrix.python-version == 3.12 }} # Upload only if ubuntu and latest python (only need to run once) 66 | uses: actions/upload-artifact@v4 67 | with: 68 | name: cookiecutter_outputs 69 | path: artifact_upload 70 | include-hidden-files: true 71 | 72 | compare-action-output: 73 | name: "Compare GHA Output" 74 | runs-on: ubuntu-latest 75 | needs: "generate-cookiecutter" 76 | strategy: 77 | matrix: 78 | license: [1, 2] 79 | depend-source: [1, 2, 3] 80 | rtd: [1, 2] 81 | 82 | steps: 83 | - uses: actions/checkout@v4 84 | 85 | - name: "Fetch Artifacts" 86 | uses: actions/download-artifact@v4 87 | with: 88 | name: cookiecutter_outputs 89 | 90 | - name: "Compare Reference CI" 91 | shell: bash 92 | run: | 93 | COMBO="${{ matrix.license }}_${{ matrix.depend-source }}_${{ matrix.rtd }}" 94 | mv prj_$COMBO/.github/workflows/CI.yaml CI_$COMBO.yaml 95 | COMPARE=$(diff CI_$COMBO.yaml .github/reference-workflows/CI_$COMBO.yaml) 96 | if [[ ! -z $COMPARE ]] 97 | then 98 | echo "CI_$COMBO.yaml differs from reference!" 99 | echo $COMPARE 100 | exit 1 101 | fi 102 | 103 | 104 | conda-forge-dep: 105 | needs: "generate-cookiecutter" 106 | name: Test CF (Approx) on ${{ matrix.os }}, Python ${{ matrix.python-version }} 107 | runs-on: ${{ matrix.os }} 108 | strategy: # Approximate strategy, uses a few other options 109 | matrix: 110 | os: [ubuntu-latest , macOS-latest, windows-latest] 111 | python-version: ["3.11", "3.12", "3.13"] 112 | license: [1] # Nonstandard 113 | rtd: [1, 2] # Nonstandard 114 | steps: 115 | # - uses: actions/checkout@v4 # This isn't necessary here 116 | 117 | - name: "Fetch Artifacts" 118 | uses: actions/download-artifact@v4 119 | with: 120 | name: cookiecutter_outputs 121 | 122 | - name: Additional info about the build 123 | shell: bash 124 | run: | 125 | uname -a 126 | df -h 127 | ulimit -a 128 | 129 | # - name: "Change directory" # Have to CD here to make sure this works 130 | # shell: bash 131 | # run: | 132 | # cd prj_${{ matrix.license }}_1_${{ matrix.rtd }} 133 | 134 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 135 | - uses: mamba-org/setup-micromamba@v2 136 | with: 137 | environment-file: prj_${{ matrix.license }}_1_${{ matrix.rtd }}/devtools/conda-envs/test_env.yaml 138 | environment-name: test 139 | create-args: >- 140 | python=${{ matrix.python-version }} 141 | condarc: | 142 | channels: 143 | - conda-forge 144 | 145 | - name: Install package 146 | 147 | # conda setup requires this special shell 148 | shell: bash -l {0} 149 | working-directory: prj_${{ matrix.license }}_1_${{ matrix.rtd }} # Nonstandard 150 | run: | 151 | python -m pip install . --no-deps 152 | micromamba list 153 | 154 | 155 | - name: Run tests 156 | 157 | # conda setup requires this special shell 158 | shell: bash -l {0} 159 | working-directory: prj_${{ matrix.license }}_1_${{ matrix.rtd }} # Nonstandard 160 | run: | 161 | pytest -v --cov=prj_${{ matrix.license }}_1_${{ matrix.rtd }} --cov-report=xml --color=yes prj_${{ matrix.license }}_1_${{ matrix.rtd }}/tests/ 162 | 163 | - name: CodeCov 164 | uses: codecov/codecov-action@v5 165 | with: 166 | files: ./prj_${{ matrix.license }}_1_${{ matrix.rtd }}/coverage.xml 167 | flags: unittests 168 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 169 | 170 | 171 | conda-defaults-dep: 172 | needs: "generate-cookiecutter" 173 | name: Test Conda Defaults (Approx) on ${{ matrix.os }}, Python ${{ matrix.python-version }} 174 | runs-on: ${{ matrix.os }} 175 | strategy: # Approximate strategy, uses a few other options 176 | matrix: 177 | os: [ubuntu-latest , macOS-latest, windows-latest] 178 | python-version: ["3.11", "3.12", "3.13"] 179 | license: [1] # Nonstandard 180 | rtd: [1, 2] # Nonstandard 181 | 182 | steps: 183 | # - uses: actions/checkout@v4 # This isn't necessary here 184 | 185 | - name: Additional info about the build 186 | shell: bash 187 | run: | 188 | uname -a 189 | df -h 190 | ulimit -a 191 | 192 | - name: "Fetch Artifacts" 193 | uses: actions/download-artifact@v4 194 | with: 195 | name: cookiecutter_outputs 196 | 197 | # - name: "Change directory" # Have to CD here to make sure this works 198 | # shell: bash 199 | # run: | 200 | # cd prj_${{ matrix.license }}_2_${{ matrix.rtd }} 201 | 202 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 203 | - uses: mamba-org/setup-micromamba@v2 204 | with: 205 | environment-file: prj_${{ matrix.license }}_2_${{ matrix.rtd }}/devtools/conda-envs/test_env.yaml 206 | environment-name: test 207 | create-args: >- 208 | python=${{ matrix.python-version }} 209 | condarc: | 210 | channels: 211 | - defaults 212 | 213 | - name: Install package 214 | 215 | # conda setup requires this special shell 216 | shell: bash -l {0} 217 | working-directory: prj_${{ matrix.license }}_2_${{ matrix.rtd }} # Nonstandard 218 | run: | 219 | python -m pip install . --no-deps 220 | micromamba list 221 | 222 | 223 | - name: Run tests 224 | 225 | # conda setup requires this special shell 226 | shell: bash -l {0} 227 | working-directory: prj_${{ matrix.license }}_2_${{ matrix.rtd }} # Nonstandard 228 | run: | 229 | pytest -v --cov=prj_${{ matrix.license }}_2_${{ matrix.rtd }} --cov-report=xml --color=yes prj_${{ matrix.license }}_2_${{ matrix.rtd }}/tests/ 230 | 231 | - name: CodeCov 232 | uses: codecov/codecov-action@v5 233 | with: 234 | files: ./prj_${{ matrix.license }}_2_${{ matrix.rtd }}/coverage.xml 235 | flags: unittests 236 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 237 | 238 | pip-dep: 239 | needs: "generate-cookiecutter" 240 | name: Test Pip (Approx) on ${{ matrix.os }}, Python ${{ matrix.python-version }} 241 | runs-on: ${{ matrix.os }} 242 | strategy: # Approximate strategy, uses a few other options 243 | matrix: 244 | os: [ubuntu-latest , macOS-latest, windows-latest] 245 | python-version: ["3.11", "3.12", "3.13"] 246 | license: [1] # Nonstandard 247 | rtd: [1, 2] # Nonstandard 248 | 249 | steps: 250 | # - uses: actions/checkout@v4 # This isn't necessary here 251 | 252 | - name: Additional info about the build 253 | shell: bash 254 | run: | 255 | uname -a 256 | df -h 257 | ulimit -a 258 | 259 | - name: "Fetch Artifacts" 260 | uses: actions/download-artifact@v4 261 | with: 262 | name: cookiecutter_outputs 263 | 264 | 265 | # - name: "Change directory" # Have to CD here to make sure this works 266 | # shell: bash 267 | # run: | 268 | # cd prj_${{ matrix.license }}_3_${{ matrix.rtd }} 269 | 270 | 271 | - name: Set up Python ${{ matrix.python-version }} 272 | uses: actions/setup-python@v5 273 | with: 274 | python-version: ${{ matrix.python-version }} 275 | 276 | - name: Testing Dependencies 277 | shell: bash 278 | run: | 279 | python -m pip install -U pytest pytest-cov codecov 280 | 281 | - name: Install package 282 | 283 | shell: bash 284 | working-directory: prj_${{ matrix.license }}_3_${{ matrix.rtd }} # Nonstandard 285 | run: | 286 | python -m pip install . 287 | 288 | 289 | - name: Run tests 290 | 291 | shell: bash 292 | working-directory: prj_${{ matrix.license }}_3_${{ matrix.rtd }} # Nonstandard 293 | run: | 294 | pytest -v --cov=prj_${{ matrix.license }}_3_${{ matrix.rtd }} --cov-report=xml --color=yes prj_${{ matrix.license }}_3_${{ matrix.rtd }}/tests/ 295 | 296 | - name: CodeCov 297 | uses: codecov/codecov-action@v5 298 | with: 299 | files: ./prj_${{ matrix.license }}_3_${{ matrix.rtd }}/coverage.xml 300 | flags: unittests 301 | name: codecov-${{ matrix.os }}-py${{ matrix.python-version }} 302 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # Pycharm settings 98 | .idea 99 | *.iml 100 | *.iws 101 | *.ipr 102 | 103 | # Ignore Sublime Text settings 104 | *.sublime-workspace 105 | *.sublime-project 106 | 107 | # vim swap 108 | *.swp 109 | 110 | # mkdocs documentation 111 | /site 112 | 113 | # mypy 114 | .mypy_cache/ 115 | 116 | # profraw files from LLVM? Unclear exactly what triggers this 117 | # There are reports this comes from LLVM profiling, but also Xcode 9. 118 | *profraw 119 | 120 | # Cookiecutter-CMS Test Artifacts 121 | /tests/CI_files 122 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Molecular Software Sciences Institute 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cookiecutter for Computational Molecular Sciences (CMS) Python Packages 2 | [//]: # (Badges) 3 | [![GitHub Actions Build Status](https://github.com/MolSSI/cookiecutter-cms/workflows/Pseudo%20Validate%20GHA%20Output/badge.svg)](https://github.com/MolSSI/cookiecutter-cms/actions?query=workflow%3A%22Pseudo+Validate+GHA+Output%22) 4 | [![Documentation Status](https://readthedocs.org/projects/cookiecutter-cms/badge/?version=latest)](https://cookiecutter-cms.readthedocs.io/en/latest/?badge=latest) 5 | 6 | A [cookiecutter](https://github.com/cookiecutter/cookiecutter) template for those interested in developing computational 7 | molecular packages in Python. Skeletal starting repositories can be created from this template to create the file 8 | structure semi-autonomously, so you can focus on what's important: the science! 9 | 10 | The skeletal structure is designed to help you get started, but do not feel limited by the skeleton's features included 11 | here. Just to name a few things you can alter to suit your needs: change continuous integration options, remove 12 | deployment platforms, or test with a different suite. 13 | 14 | ## Features 15 | * Python-centric skeletal structure with initial module files 16 | * Pre-configured `pyproject.toml` and `setup.cfg` for installation and packaging 17 | * Pre-configured Windows, Linux, and OSX continuous integration on GitHub Actions. 18 | * Choice of dependency locations through `conda-forge`, default `conda`, or `pip` 19 | * Basic testing structure with [PyTest](https://docs.pytest.org/en/latest/) 20 | * Automatic `git` initialization + tag 21 | * GitHub Hooks 22 | * Automatic package version control with [Versioningit](https://versioningit.readthedocs.io/en/stable/) 23 | * Sample data inclusion with packaging instructions 24 | * Basic documentation structure powered by [Sphinx](http://www.sphinx-doc.org/en/master/) 25 | * Automatic license file inclusion from several common Open Source licenses (optional) 26 | 27 | ## Requirements 28 | 29 | * Python 3.11, 3.12, or 3.13 30 | * [Cookiecutter](http://cookiecutter.readthedocs.io/en/latest/installation.html) 31 | * [Git](https://git-scm.com/) 32 | 33 | ## Usage 34 | 35 | With [`cookiecutter` installed](https://cookiecutter.readthedocs.io/en/latest/installation.html#install-cookiecutter), 36 | execute the following command inside the folder you want to create the skeletal repository. 37 | 38 | ```bash 39 | cookiecutter gh:molssi/cookiecutter-cms 40 | ``` 41 | 42 | Which fetches this repository from github automatically and prompts the user for some simple information such as 43 | package name, author(s), and licences. 44 | 45 | [![The cookiecutter in action](http://img.youtube.com/vi/_E7AlaG8zbk/0.jpg)](http://www.youtube.com/watch?v=_E7AlaG8zbk "Computational Molecular Sciences Cookieucutter Example") 46 | 47 | ## Supported Python Versions 48 | The MolSSI Cookiecutter will strive to support the current version of Python, two minor versions before. This 49 | philosophy is in align with [Conda-Forge's](https://conda-forge.org/docs/orga/guidelines.html#python) guidelines 50 | and gives projects ample time to implement new features. 51 | 52 | ### When to drop support for older Python versions? 53 | 54 | Project developers can freely choose when to drop support for older versions of Python, or if they don't want to support 55 | as many. The general rules we recommend are: 56 | 57 | * Support at least two Python versions: The most recent and the preceding minor version. E.g. 3.11 and 3.12 58 | * Dropping Python versions should require a minor Project Version increment. 59 | * New Python versions have been supported for at least one minor revision. E.g Project X.Y supports Python 3.11 and 3.12; 60 | Project X.Y+1 supports Python 3.11, 3.12 and 3.13; Project X.Y+2 supports Python 3.11 and 3.12. 61 | * Add deprecation warnings if features will be removed. 62 | 63 | ### Where is setup.py? 64 | 65 | For a long time, many Python projects relied on one of the libraries `distutils` or `setuptools` and a corresponding 66 | meta-data defining file often called `setup.py`. These dependencies required python to run, and by its nature limited 67 | how much configuration could be done. `setup.py` has since been superseded by a new file called `pyproject.toml`, which 68 | is a build-system agnostic file which serves much of the same purpose, but can be extended to any number of tools, many 69 | of which can be retrieved from the internet simply by defining it in the `pyproject.toml` file. Many of the features 70 | which were in `setup.py` can be replaced by equivalent keys in the `pyproject.toml`. By default, the cookiecutter uses 71 | the `setuptools` backend anyways, just with the modernized install specification. 72 | 73 | ## Next steps and web integrations 74 | 75 | The repository contains a number of "hooks" that integrate with a variety of web services. To fully integrate the 76 | project with these web services and to get started developing your project please proceed through the following 77 | directions. 78 | 79 | ### Local installation 80 | 81 | For development work it is often recommended to do a "local" python install via `pip install -e .`. This command will 82 | insert your new project into your Python site-packages folder so that it can be found in any directory on your computer. 83 | 84 | ### Setting up with GitHub 85 | 86 | Upon creation, this project will initialize the output as a `git` repository. However, this does not automatically 87 | register the repository with GitHub. To do this, follow the instructions for 88 | [Adding an existing project to GitHub using the command line](https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/) 89 | . Follow the first step to create the repository on GitHub, but ignore the warnings about the README, license, and 90 | `.gitignore` files as this repo creates them. From there, you can skip to after the "first commit" instructions and 91 | proceed from there. 92 | 93 | ### Testing 94 | The Python testing framework was chosen to be [pytest](https://pytest.org) for this project. Other testing frameworks are available; 95 | however, the authors believe the combination of easy [parametrization of tests](https://docs.pytest.org/en/latest/parametrize.html), 96 | [fixtures](https://docs.pytest.org/en/latest/fixture.html), and [test marking](https://docs.pytest.org/en/latest/example/markers.html) 97 | make `pytest` particularly well suited for molecular software packages. 98 | 99 | To get started additional tests can be added to the `project/tests/` folder. Any function starting with `test_*` will automatically be 100 | included in the testing framework. While these can be added in anywhere in your directory structure, it is highly recommended to keep them 101 | contained within the `project/tests/` folder. 102 | 103 | Tests can be run with the `pytest -v` command. There are a number of additional command line arguments to 104 | [explore](https://docs.pytest.org/en/latest/usage.html). 105 | 106 | ### Continuous Integration (GitHub Actions) 107 | 108 | As of version 1.3, we provide preconfigured workflows for [GitHub Actions](https://github.com/features/actions), with 109 | support for Linux, MacOS and Windows. Conda support is possible thanks to the excellent 110 | [@mamba-org's `provision-with-micromamba` action](https://github.com/marketplace/actions/provision-with-micromamba). We 111 | encourage you read its documentation for further details on GitHub Actions themselves. 112 | 113 | The Cookiecutter's GitHub Actions does a number of things differently than the output Actions. We detail those 114 | differences below, but none of this is needed to understand the output GitHub Action Workflows, which are much simpler. 115 | 116 | The Cookiecutter ability to test GitHub Actions it generates has some limitations, but are still properly tested. 117 | This repository has a multi-job GitHub Action Workflow to do a few things: 118 | * Run the Cookiecutter and generate outputs. 119 | * Compare the output CI's to references. 120 | * Run an approximate implementation of the generated CI files. 121 | 122 | If the reference files need re-generated, there is a script to help with this. 123 | 124 | Ideally, the Cookiecutter would run the generated output files in real time. However, that is currently impossible with 125 | GitHub Actions (as of October 14 2020). We Cookiecutter-CMS maintainers have also looked at reactive PR’s which 126 | implement on different branches and make new PR’s and setting up dummy repositories and pushing to those and then 127 | monitoring the test/return from that. This was all determined to be overly complicated, although we welcome suggestions 128 | and ideas for improvements. 129 | 130 | ### Discontinued CI Strategies: Travis & AppVeyor 131 | We **no longer recommend** projects to use [Travis-CI](https://travis-ci.com) or [AppVeyor](https://www.appveyor.com) 132 | for CI services. We found the AppVeyor service to be notorious slow in practice, and Travis 133 | [updated their billing model](https://blog.travis-ci.com/2020-11-02-travis-ci-new-billing) to charge for OSX testing and 134 | further limit their Linux concurrency, even for fully open source software. Given the rise of 135 | [GitHub Actions](https://github.com/features/actions), we feel it was appropriate to transition off these platforms as 136 | of the CMS Cookiecutter's 1.5 release. 137 | 138 | The final version of the CMS-Cookiecutter with Travis and AppVeyor support can be found 139 | here: https://github.com/MolSSI/cookiecutter-cms/releases/tag/1.4 for legacy purposes. 140 | 141 | #### Pre-caching common build data 142 | 143 | Some continuous integration platforms allow for caching of build data, which you may, or may not, find advantageous. 144 | The general purpose of the caches are to store and fetch files and folders which may take a long time to either 145 | generate or download every time you want to run a CI build; often because build (and developer) time is limited. 146 | However, if the cached data changes any time during a build, then the whole targeted cache is updated and uploaded. 147 | So, you should only cache things you do not expect to change. 148 | 149 | You may be tempted to cache the Conda installer or Python dependencies fetched from `conda` or `pip`, however, this 150 | is an ill advised idea for two main reasons: 151 | 152 | 1. Your package's dependencies are constantly updating, 153 | so you want catch things which break due to dependencies before your user does. Having CI automatically trigger when 154 | you make changes and at scheduled intervals helps catch these things as soon as possible. 155 | * Because you should expect dependencies updating, you will have to upload a new cache each build anyways, somewhat 156 | invalidating one of the advantages of a cache. 157 | 2. It is a good idea to make sure your testing emulates the most basic user of your code if possible. 158 | If your target users include people who will try to download your package and have it "just work" for their project, 159 | then your CI testing should try to do this as well. This would include getting newest, updated installer and 160 | dependencies. One example 161 | of this may be industry, non-developer users, who do not know all the nuances and inner workings of package 162 | dependencies or versions. It is not reasonable to expect them to know these nuances either, its why you are the 163 | developer. 164 | 165 | There may be some times where the caching feature is helpful for you. One example: including test data which is too 166 | large to store on GitHub, but also has a slow mirror hosting it. A cache will help speed up the test since you 167 | will not have to download from the slower mirror. If you this sounds like a helpful feature, you can check out the 168 | links below. We do not implement them for this Cookiecutter, but they can be added to your package as needed. 169 | 170 | * [GitHub Actions Caching](https://docs.github.com/en/free-pro-team@latest/actions/guides/caching-dependencies-to-speed-up-workflows) 171 | 172 | There are caching capabilities for the `mamba-org/provision-with-micromamba` action, if you are using it as well. 173 | 174 | * [Setup Micromamba GHA Caching](https://github.com/mamba-org/provision-with-micromamba#cache-downloads) 175 | 176 | ### Documentation 177 | Make a [ReadTheDocs](https://readthedocs.org) account and turn on the git hook. Although you can manually make the 178 | documentation yourself through [Sphinx](http://www.sphinx-doc.org/en/master/usage/quickstart.html), you can also 179 | configure [ReadTheDocs](https://docs.readthedocs.io/en/latest/getting_started.html) to automatically build and 180 | publish the documentation for you. The initial skeleton of the documentation can be found in the `docs` folder 181 | of your output. 182 | 183 | ### Static Code Analysis 184 | Make a [LGTM](https://lgtm.com) account and add your project. If desired you can add code review integration by clicking the large green button! 185 | 186 | Static code analysis dramatically enhances the quality of your code by finding a large number of common mistakes that both novice and advanced programmers make. There are many static analysis codes on the market, but we have seen that LGTM is a delicate balance between verbosity and catching true errors. 187 | 188 | ### Additional Python Settings in `setup.cfg` 189 | 190 | This Cookiecutter generates the package, but there are a several package-specific Python settings you can tune to your 191 | package's installation needs. 192 | These are settings in the `setup.cfg` file, 193 | which contains instructions for Python on how to install your package. 194 | Each of the options in the file are commented with what it does and when it should be 195 | used. 196 | 197 | ### Versioningit 198 | 199 | Versioningit automatically provides a version string based on the `git` tag and commit hash, which is then exposed 200 | through a `project.__version__` attribute in your 201 | `project/__init__.py`. For example, if you mint a tag (a release) for a project 202 | through `git tag -a 0.1 -m "Release 0.1."` (push to GitHub through `git push origin 0.1`), this tag will then reflect in 203 | your project: `project.__version__ 204 | == 0.1`. Otherwise, a per-commit version is available which looks like 205 | `0.3.0+81.g332bfc1`. This string shows the current git (the "g") hash `332bfc1` 206 | is 81 commits beyond the version 0.3 tag. 207 | 208 | 209 | ## Conda and PyPI (`pip`) 210 | 211 | Should you deploy and/or develop on [Conda](https://anaconda.org) (with the `conda-build` tool) or [PyPI](https://pypi.org/) (with the `pip` tool)? Good question, 212 | both have their own advantages and weaknesses as they both are designed to do very different things. Fortunately, 213 | many of the features you will need for this Cookiecutter overlap. 214 | We will not advocate here for one or the other, nor will we cover all the differences. We can however recommend some 215 | additional resources where you can read and find out more at the end of this section. 216 | 217 | We will cover the major differences that you the developer will see between the two as they relate to this Cookiecutter. 218 | 219 | For testing purpose, the PyPi tool, `pip`, is much faster about 220 | building your packages than the Conda tool, `conda-build`, will be. Depending on the number of dependencies, you may 221 | have conditions where `conda-build` takes 10-20 min to resolve, download, configure, and install all dependencies 222 | *before your tests start*, whereas `pip` would do the same in about 5 min. It is also important to note that both 223 | `pip` and `conda-build` are not *testing tools* in and of themselves; they are deployment and dependency resolution 224 | tools. For pure testing, we include other packages like [pytest](https://pytest.org). 225 | 226 | From a deployment perspective, it is possible to deploy your package on both platforms, although doing so is beyond 227 | the scope of this Cookiecutter. 228 | 229 | Lastly, these are optional features! You could choose to not rely on either Conda or PyPI, assuming your package 230 | does not need dependencies. We do highly recommend you pick one of them for dependency resolution so you (and your 231 | potential users) are not having to manually find and install all the dependencies you may have. To put some historical 232 | perspective on this, NumPy and SciPy used to ask the users to install the [BLAS](http://www.netlib.org/blas/) and 233 | [LAPACK](http://www.netlib.org/lapack/) libraries on their own, and 234 | then also make sure they were linked correctly to use in Python. These hurdles are no longer required through the 235 | package managers, Huzzah! 236 | 237 | ### Additional reading for Conda and PyPI 238 | 239 | * [Author of the Python Data Science Handbook from O'Rilley's Blog on Conda Myths and Misconceptions](https://jakevdp.github.io/blog/2016/08/25/conda-myths-and-misconceptions/) 240 | * [Conda's Package Management docs](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html) 241 | * [`pip` User Guide](https://pip.pypa.io/en/stable/user_guide/) 242 | 243 | ## Conda Build vs. Conda Environments 244 | 245 | We recommend creating Conda environments rather than relying on conda build for *testing* purposes, assuming you have 246 | opted for Conda as a dependency manager. Earlier versions of this Cookiecutter would conduct testing by first 247 | bundling the package for distribution through 248 | [Conda Build](https://conda.io/docs/user-guide/tasks/build-packages/index.html), and then installing the package 249 | locally to execute tests on. This had the advantage of ensuring your package *could* be bundled for distribution and 250 | that all of its dependencies resolved correctly. However, it had the disadvantage of being painfully slow and rather 251 | confusing to debug should things go wrong on the build, even before the testing. 252 | 253 | The replacement option to this is to pre-create the conda environment and then install your package into it with 254 | no dependency resolution for testing. This helps separate out the concepts of **testing** and **deployment** which 255 | are separate actions, even though deployment should only come after testing, and you should be ready to do both. 256 | This should simplify and accelerate the testing process, but 257 | does mean maintaining two, albeit similar, files since a Conda Environment file has a different YAML syntax than 258 | a Conda Build `meta.yaml` file. We feel these benefits outweigh the costs and have adopted this model. 259 | 260 | ## Deploying your code 261 | 262 | Simply testing your code is insufficient for good coding practices; you *should* be ready to deploy 263 | your code as well. Do not be afraid of deployment though; Python deployment over the last several years 264 | has been getting easier, especially when there are others to manage your deployment for you. 265 | There are several ways to handle this. We will cover a couple here, depending on the conditions 266 | which best suit your needs. The list below is neither exhaustive nor exclusive. There are times 267 | when you may want to build your packages yourself and upload them for developmental purposes, 268 | but we recommend letting others handle (and help you) with deployment. 269 | These are meant to serve as guides to help you get started. 270 | 271 | Deployment should not get in the way of testing. You could configure the GitHub Action scripts 272 | to handle the build stage after the test stage, but this should only be done by advanced 273 | users or those looking to deploy themselves. 274 | 275 | 276 | ### Deployment Method 1: Conda Forge 277 | 278 | The [Conda Forge](https://conda-forge.org/) community is great, and it is the recommended location to deploy your 279 | packages. The community is highly active and many scientific developers have been moving here to access not 280 | only Conda Forge's deployment tools, but also for easy access to all the other Python packages which have 281 | been deployed on the platform. Even though they provide the deployment architecture, you need to still 282 | test your program's ability to be packaged through `conda-build`. 283 | If you choose either Conda dependency option, additional 284 | tests will be added to GitHub Actions which *only* package through `conda-build`. 285 | 286 | This method relies on the conda `meta.yaml` file. 287 | 288 | ### Deployment Method 2: Conda through someone else's manager 289 | 290 | This option is identical to the Conda Forge method, but relies on a different group's deployment platform 291 | such as [Bioconda](https://bioconda.github.io/) or [Omnia](http://www.omnia.md/). Each platform has their 292 | own rules, which may include packaging your program yourself and uploading. Check each platform's 293 | instructions and who else deploys to them before choosing this option to ensure its right for you. 294 | 295 | This method relies on the conda `meta.yaml` file. 296 | 297 | ### Deployment Method 3: Upload package to PyPi 298 | 299 | The [Python Package Index (PyPi)](https://pypi.org/) is another place to manage your package and have dependencies 300 | resolve. This option typically relies on `pip` to create your packages and dependencies must be specified in 301 | your `pyproject.toml` file to resolve correctly. 302 | 303 | ### Deployment Method 4: Manually upload your package to some source 304 | 305 | Sometimes, your package is niche enough, developmental enough, or proprietary enough to warrant manually 306 | packaging and uploading your program. This may also apply if you want regular developmental builds which you 307 | upload manually to test. In this case, you will want to change your CI scripts to include a build, and 308 | optional upload step on completion of tests. 309 | 310 | ## Output Skeleton 311 | 312 | This will be the skeleton made by this `cookiecutter`, the items marked in `{{ }}` will be replaced by your choices 313 | upon setup. 314 | 315 | ``` 316 | . 317 | ├── CODE_OF_CONDUCT.md <- Code of Conduct for developers and users 318 | ├── LICENSE <- License file 319 | ├── MANIFEST.in <- Packaging information for pip 320 | ├── README.md <- Description of project which GitHub will render 321 | ├── {{repo_name}} <- Basic Python Package import file 322 | │ ├── {{first_module_name}}.py <- Starting packge module 323 | │ ├── __init__.py <- Basic Python Package import file 324 | │ ├── _version.py <- Generated file from VersionInGit. Created on package install, not initilization. 325 | │ ├── data <- Sample additional data (non-code) which can be packaged. Just an example, delete in production 326 | │ │   ├── README.md 327 | │ │   └── look_and_say.dat 328 | │ ├── py.typed <- Marker file indicating PEP 561 type hinting. 329 | │ └── tests <- Unit test directory with sample tests 330 | │    ├── __init__.py 331 | │     └── test_{{repo_name}}.py 332 | ├── devtools <- Deployment, packaging, and CI helpers directory 333 | │   ├── README.md 334 | │   ├── conda-envs <- Conda environments for testing 335 | │   │   └── test_env.yaml 336 | │   └── scripts 337 | │   └── create_conda_env.py <- OS agnostic Helper script to make conda environments based on simple flags 338 | ├── docs <- Documentation template folder with many settings already filled in 339 | │   ├── Makefile 340 | │   ├── README.md <- Instructions on how to build the docs 341 | │   ├── _static 342 | │   │   └── README.md 343 | │   ├── _templates 344 | │   │   └── README.md 345 | │   ├── api.rst 346 | │   ├── conf.py 347 | │   ├── getting_started.rst 348 | │   ├── index.rst 349 | │   ├── make.bat 350 | │   └── requirements.yaml <- Documenation building specific requirements. Usually a smaller set than the main program 351 | ├── pyproject.toml <- Generic Python build system configuration (PEP-517). 352 | ├── readthedocs.yml 353 | ├── setup.cfg <- Near-master config file to make house INI-like settings for Coverage, Flake8, YAPF, etc. 354 | ├── .codecov.yml <- Codecov config to help reduce its verbosity to more reasonable levels 355 | ├── .github <- GitHub hooks for user contribution, pull request guides and GitHub Actions CI 356 | │   ├── CONTRIBUTING.md 357 | │   ├── PULL_REQUEST_TEMPLATE.md 358 | │   └── workflows 359 | │   └── CI.yaml 360 | ├── .gitignore <- Stock helper file telling git what file name patterns to ignore when adding files 361 | ├── .gitattributes <- Stock helper file telling GitHub how to bundle files in the tarball, should not need to be touched most times 362 | └── .lgtm.yml 363 | ``` 364 | 365 | ## Acknowledgments 366 | 367 | This cookiecutter is developed by Levi N. Naden and Jessica A. Nash from 368 | the [Molecular Sciences Software Institute (MolSSI)](http://molssi.org/); and Daniel G. A. Smith 369 | of [ENTOS](https://www.entos.ai/). Additional major development has been provided by M. Eric Irrgang. Copyright (c) 370 | 2022. 371 | 372 | Directory structure template based on recommendation from the 373 | [Chodera Lab's Software Development Guidelines](https://github.com/choderalab/software-development/blob/master/STRUCTURING_YOUR_PROJECT.md) 374 | . 375 | 376 | Original hosting of repository owned by the [Chodera Lab](https://github.com/choderalab) 377 | 378 | Elements of this repository drawn from the 379 | [cookiecutter-data-science](https://github.com/drivendata/cookiecutter-data-science) by Driven Data 380 | and the [MolSSI Python Template](https://github.com/MolSSI/python_template) 381 | -------------------------------------------------------------------------------- /cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "project_name": "ProjectName", 3 | "repo_name": "{{ cookiecutter.project_name.lower().replace(' ', '_') }}", 4 | "first_module_name": "{{ cookiecutter.repo_name.lower().replace(' ', '_') }}", 5 | "author_name": "Your name (or your organization/company/team)", 6 | "author_email": "Your email (or your organization/company/team)", 7 | "description": "A short description of the project (less than one line).", 8 | "open_source_license": [ 9 | 10 | "MIT", 11 | "BSD-3-Clause", 12 | "LGPLv3", 13 | "Not Open Source" 14 | ], 15 | "dependency_source": [ 16 | "Prefer conda-forge with pip fallback", 17 | "Prefer default anaconda channel with pip fallback", 18 | "Dependencies from pip only (no conda)" 19 | ], 20 | 21 | "include_ReadTheDocs": ["y", "n"], 22 | "_cms_cc_version": "1.11" 23 | } 24 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = ComputationalMolecularSciencesCookiecutter 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'Computational Molecular Sciences Cookiecutter' 23 | copyright = '2021, Levi N. Naden, Jessica A. Nash, Daniel G. A. Smith' 24 | author = 'Levi N. Naden, Jessica A. Nash Daniel G. A. Smith' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.mathjax', 43 | ] 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ['_templates'] 47 | 48 | # The suffix(es) of source filenames. 49 | # You can specify multiple suffix as a list of string: 50 | # 51 | # source_suffix = ['.rst', '.md'] 52 | source_suffix = '.rst' 53 | 54 | # The master toctree document. 55 | master_doc = 'index' 56 | 57 | # The language for content autogenerated by Sphinx. Refer to documentation 58 | # for a list of supported languages. 59 | # 60 | # This is also used if you do content translation via gettext catalogs. 61 | # Usually you set "language" from the command line for these cases. 62 | language = None 63 | 64 | # List of patterns, relative to source directory, that match files and 65 | # directories to ignore when looking for source files. 66 | # This pattern also affects html_static_path and html_extra_path . 67 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 68 | 69 | # The name of the Pygments (syntax highlighting) style to use. 70 | pygments_style = 'sphinx' 71 | 72 | 73 | # -- Options for HTML output ------------------------------------------------- 74 | 75 | # The theme to use for HTML and HTML Help pages. See the documentation for 76 | # a list of builtin themes. 77 | # 78 | html_theme = 'sphinx_rtd_theme' 79 | 80 | # Theme options are theme-specific and customize the look and feel of a theme 81 | # further. For a list of options available for each theme, see the 82 | # documentation. 83 | # 84 | # html_theme_options = {} 85 | 86 | # Add any paths that contain custom static files (such as style sheets) here, 87 | # relative to this directory. They are copied after the builtin static files, 88 | # so a file named "default.css" will overwrite the builtin "default.css". 89 | html_static_path = ['_static'] 90 | 91 | # Custom sidebar templates, must be a dictionary that maps document names 92 | # to template names. 93 | # 94 | # The default sidebars (for documents that don't match any pattern) are 95 | # defined by theme itself. Builtin themes are using these templates by 96 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 97 | # 'searchbox.html']``. 98 | # 99 | # html_sidebars = {'**': ['globaltoc.html', 'localtoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html']} 100 | 101 | 102 | # -- Options for HTMLHelp output --------------------------------------------- 103 | 104 | # Output file base name for HTML help builder. 105 | htmlhelp_basename = 'ComputationalMolecularSciencesCookiecutterdoc' 106 | 107 | 108 | # -- Options for LaTeX output ------------------------------------------------ 109 | 110 | latex_elements = { 111 | # The paper size ('letterpaper' or 'a4paper'). 112 | # 113 | # 'papersize': 'letterpaper', 114 | 115 | # The font size ('10pt', '11pt' or '12pt'). 116 | # 117 | # 'pointsize': '10pt', 118 | 119 | # Additional stuff for the LaTeX preamble. 120 | # 121 | # 'preamble': '', 122 | 123 | # Latex figure (float) alignment 124 | # 125 | # 'figure_align': 'htbp', 126 | } 127 | 128 | # Grouping the document tree into LaTeX files. List of tuples 129 | # (source start file, target name, title, 130 | # author, documentclass [howto, manual, or own class]). 131 | latex_documents = [ 132 | (master_doc, 133 | 'ComputationalMolecularSciencesCookiecutter.tex', 134 | 'Computational Molecular Sciences Cookiecutter Documentation', 135 | 'Levi N. Naden, Daniel G. A. Smith', 'manual'), 136 | ] 137 | 138 | 139 | # -- Options for manual page output ------------------------------------------ 140 | 141 | # One entry per manual page. List of tuples 142 | # (source start file, name, description, authors, manual section). 143 | man_pages = [ 144 | (master_doc, 'computationalmolecularsciencescookiecutter', 145 | 'Computational Molecular Sciences Cookiecutter Documentation', 146 | [author], 1) 147 | ] 148 | 149 | 150 | # -- Options for Texinfo output ---------------------------------------------- 151 | 152 | # Grouping the document tree into Texinfo files. List of tuples 153 | # (source start file, target name, title, author, 154 | # dir menu entry, description, category) 155 | texinfo_documents = [ 156 | (master_doc, 'ComputationalMolecularSciencesCookiecutter', 157 | 'Computational Molecular Sciences Cookiecutter Documentation', 158 | author, 'ComputationalMolecularSciencesCookiecutter', 159 | 'A Cookiecutter which provides skeletal packages of Computational Molecular Sciences Software.', 160 | 'Miscellaneous'), 161 | ] 162 | 163 | 164 | # -- Extension configuration ------------------------------------------------- 165 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. Computational Molecular Sciences Cookiecutter documentation master file, created by 2 | sphinx-quickstart on Fri Apr 27 10:12:46 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Cookiecutter for Computational Molecular Sciences Python Packages 7 | ================================================================= 8 | 9 | .. note:: 10 | 11 | These docs are are mainly a recreation of the package's ``README.md`` file as an example of building the docs. 12 | Feel free to borrow from this example and consider splitting the docs into multiple pages! 13 | 14 | 15 | A `cookiecutter `_ template for those interested in developing computational 16 | molecular sciences packages in Python. Skeletal starting repositories can be created from this template to create the 17 | file structure semi-autonomously so you can focus on what's important: the science! 18 | 19 | The skeletal structure is designed to help you get started, but do not feel limited by the skeleton's features 20 | included here. Just to name a few things you can alter to suite your needs: change continuous integration options, 21 | remove deployment platforms, or test with a different suite. 22 | 23 | Features 24 | -------- 25 | * Python-centric skeletal structure with initial module files 26 | * Pre-configured ``pyproject.toml`` and ``setup.cfg`` for installation and packaging 27 | * Pre-configured Window, Linux, and OSX continuous integration on GitHub Actions 28 | * Choice of dependency locations through ``conda-forge``, default ``conda``, or ``pip`` 29 | * Basic testing structure with `PyTest `_ 30 | * Automatic ``git`` initialization + tag 31 | * GitHub Hooks 32 | * Automatic package version control with `Versioningit `_ 33 | * Sample data inclusion with packaging instructions 34 | * Basic documentation structure powered by `Sphinx `_ 35 | * Automatic license file inclusion from several common Open Source licenses (optional) 36 | 37 | .. versionchanged:: 1.7 38 | Added support for :pep:`517` and :pep:`561`. 39 | 40 | .. versionchanged:: 1.8 41 | Switch from Versioneer to Versioningit for :pep:`517`-only builds. 42 | ``setup.py`` file is no longer generated. 43 | 44 | .. versionchanged:: 1.9 45 | Added support for :pep:`621`. Update to Python 3.10 46 | 47 | .. versionchanged:: 1.10 48 | Switched to Micromamba as CI conda source. 49 | 50 | Requirements 51 | ------------ 52 | 53 | * Python 3.8, 3.9, or 3.10 54 | * `Cookiecutter `_ 55 | * `Git `_ 56 | 57 | 58 | Usage 59 | ----- 60 | 61 | With `cookiecutter installed `_, 62 | execute the following command inside the folder you want to create the skeletal repository. 63 | 64 | .. code:: bash 65 | 66 | cookiecutter gh:molssi/cookiecutter-cms 67 | 68 | 69 | Which fetches this repository from github automatically and prompts the user for some simple information such as 70 | package name, author(s), and licences. 71 | 72 | .. raw:: html 73 | 74 | 77 | 78 | 79 | Next steps and web integrations 80 | ------------------------------- 81 | The repository contains a number of "hooks" that integrate with a variety of web services. To fully integrate the project 82 | with these web services and to get started developing your project please proceed through the following directions. 83 | 84 | Local installation 85 | ^^^^^^^^^^^^^^^^^^ 86 | For development work it is often recommended to do a "local" python install via ``pip install -e .``. 87 | This command will insert your 88 | new project into your Python site-packages folder so that it can be found in any directory on your computer. 89 | 90 | Setting up with GitHub 91 | ^^^^^^^^^^^^^^^^^^^^^^ 92 | Upon creation, this project will initialize the output as a ``git`` repository compatible with 93 | `Versioneer `_. However, this does not automatically register the 94 | repository with GitHub. To do this, follow the instructions for 95 | `Adding an existing project to GitHub using the command line `_. 96 | Follow the first step to create the repository on GitHub, but ignore the warnings about the README, license, and 97 | ``.gitignore`` files as this repo creates them. From there, you can skip to after the "first commit" instructions and 98 | proceed from there. 99 | 100 | Testing 101 | ^^^^^^^ 102 | The Python testing framework was chosen to be `pytest `_ for this project. Other testing frameworks are available; 103 | however, the authors believe the combination of easy `parametrization of tests `_, 104 | `fixtures `_, and `test marking `_ 105 | make ``pytest`` particularly well suited for molecular software packages. 106 | 107 | To get started additional tests can be added to the ``project/tests/`` folder. Any function starting with ``test_*`` will automatically be 108 | included in the testing framework. While these can be added in anywhere in your directory structure, it is highly recommended to keep them 109 | contained within the ``project/tests/`` folder. 110 | 111 | Tests can be run with the ``pytest -v`` command. There are a number of additional command line arguments to 112 | `explore `_. 113 | 114 | Continuous Integration 115 | ^^^^^^^^^^^^^^^^^^^^^^ 116 | 117 | As of version 1.3, we provide preconfigured workflows for [GitHub Actions](https://github.com/features/actions), with 118 | support for Linux, MacOS and Windows. Conda support is possible thanks to the excellent 119 | [@conda-incubator's `setup-miniconda` action](https://github.com/conda-incubator/setup-miniconda). We encourage you 120 | read its documentation for further details on GitHub Actions themselves. 121 | 122 | Testing is accomplished with `GitHub Actions `_ for Linux, Windows, and macOS 123 | testing. This framework is chosen as it is completely free for open source projects and allow you to automatically 124 | verify that your project works under a variety of OS's and 125 | Python versions; all within the existing GitHub Framework. 126 | To begin please, ensure GitHub Actions are enabled for your repository in its main settings area. This is often on by 127 | default, so there may not need to be any additional steps needed. 128 | You will also want to correct the badges which appear on the output README file to point to the correct links 129 | 130 | Documentation 131 | ^^^^^^^^^^^^^ 132 | Make a `ReadTheDocs `_ account and turn on the git hook. Although you can manually make the 133 | documentation yourself through `Sphinx `_, you can also 134 | `configure ReadTheDocs `_ to automatically build and 135 | publish the documentation for you. The initial skeleton of the documentation can be found in the ``docs`` folder 136 | of your output. 137 | 138 | Additional Python Settings in ``setup.cfg`` and ``pyproject.toml`` 139 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 140 | 141 | This Cookiecutter generates the package, but there are a several package-specific Python settings you can tune to your 142 | package's installation needs. 143 | These are settings in :file:`setup.cfg` and :file:`pyproject.toml`, 144 | which contain instructions for Python on 145 | how to install your package. Each of the options in the file are commented with what it does and when it should be 146 | used. 147 | 148 | 149 | Why is Python 2.X not on the supported versions? 150 | ------------------------------------------------ 151 | New projects generally should not be built with Python 2.7 support in mind, see the 152 | `Python 3 Statement `_. Although the final Python 2.7 release was 153 | `supported through 2020 `_ and is the default on many legacy systems, Python 154 | 3 has been released for almost a decade and projects long term usage should not be shacked by legacy methods that will 155 | have to be replaced in very short order as Python 2 support is retired. 156 | 157 | 158 | 159 | Additional Pages 160 | ================ 161 | 162 | .. toctree:: 163 | :maxdepth: 2 164 | 165 | nuances 166 | 167 | 168 | Acknowledgments 169 | =============== 170 | 171 | This cookiecutter is developed by Levi N. Naden and Jessica A. Nash 172 | from the `Molecular Sciences Software Institute (MolSSI) `_; and 173 | Daniel G. A. Smith of `ENTOS `_. Additional major development has been provided by M. Eric Irrgang. 174 | Copyright (c) 2022. 175 | 176 | Directory structure template based on recommendation from the 177 | `Chodera Lab's Software Development Guidelines `_. 178 | 179 | Original hosting of repository owned by the `Chodera Lab `_ 180 | 181 | Elements of this repository drawn from the 182 | `cookiecutter-data-science `_ by Driven Data 183 | and the `MolSSI Python Template `_. 184 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=ComputationalMolecularSciencesCookiecutter 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/nuances.rst: -------------------------------------------------------------------------------- 1 | Warnings and Caveats from the Cookiecutter 2 | ========================================== 3 | 4 | We encourage users to look at the parent Computational Molecular Sciences Cookiecutter as ways to template their own output 5 | projects from the `Cookiecutter `_. However, there are a few things the 6 | parent does to make the illustration work, but should probably not be followed in your projects. These are mostly 7 | because the parent has to simulate an output, then test the output of the cookiecutter, which is something you will 8 | not have to do with your project... Unless you are making a Cookiecutter which makes Cookiecutters, but that is 9 | beyond the scope of this project. 10 | 11 | Continuous Integration (CI) Caveats 12 | ----------------------------------- 13 | 14 | The parent Cookiecutter must emulate the the process of creating and running tests, while in its own tests. Since 15 | GitHub Actions are not intended to do this, we have to do some trickery to approximate the YAML output files 16 | after executing the Cookiecutter, and then compare the real outputs to stored references. 17 | This is something you, the user of this Cookiecutter, should not have to worry about 18 | and can instead just use GitHub Actions as intend. 19 | 20 | 21 | Writing helpful documentation 22 | ----------------------------- 23 | The primary documentation for this Cookiecutter is mostly just a copy of the main README.md file. Your docs should be 24 | more detailed in ways the README.md cannot. The README.md file is rendered by GitHub, but will (should) not contain all 25 | of the detailed instructions, settings, applications, benchmarking which can be elaborated on in full documentations. 26 | -------------------------------------------------------------------------------- /hooks/post_gen_project.py: -------------------------------------------------------------------------------- 1 | """ 2 | Post Cookie Generation script(s) 3 | 4 | These scripts are executed from the output folder. 5 | If any error is raised, the cookie cutter creation fails and crashes 6 | """ 7 | 8 | import os 9 | import subprocess as sp 10 | 11 | 12 | def decode_string(string): 13 | """Helper function to covert byte-string to string, but allows normal strings""" 14 | try: 15 | return string.decode() 16 | except AttributeError: 17 | return string 18 | 19 | 20 | def invoke_shell(command, error_ok=False, print_output=True): 21 | 22 | return_code = 0 # Successful return code 23 | 24 | try: 25 | output = sp.check_output(command, shell=True, stderr=sp.STDOUT) 26 | except sp.CalledProcessError as e: 27 | output = e.output 28 | return_code = e.returncode 29 | if not error_ok: 30 | raise e 31 | if print_output: 32 | print(decode_string(output)) 33 | return decode_string(output), return_code 34 | 35 | 36 | def git_init_and_tag(): 37 | """ 38 | Invoke the initial git and tag with 0.0.0 to make an initial version for 39 | Versioneer to ID if not already in a git repository. 40 | """ 41 | 42 | # Check if we are in a git repository - calling `git status` outside of a git repository will return 128 43 | _, return_code = invoke_shell("git status", error_ok=True, print_output=False) 44 | # Create a repository and commit if not in one. 45 | if return_code == 128: 46 | # Initialize git 47 | invoke_shell("git init") 48 | 49 | 50 | # Add files created by cookiecutter 51 | invoke_shell("git add .") 52 | invoke_shell( 53 | "git commit -m \"Initial commit after CMS Cookiecutter creation, version {}\"".format( 54 | '{{ cookiecutter._cms_cc_version }}')) 55 | 56 | # change default branch name to main 57 | # safer than --init-branch=main 58 | # because it works with older versions of git 59 | invoke_shell("git branch -M main") 60 | 61 | # Check for a tag 62 | version = invoke_shell("git tag", error_ok=True) 63 | # Tag if no tag exists 64 | if not version: 65 | invoke_shell("git tag 0.0.0 -m \"Initial commit from MolSSI cookie cutter\"") 66 | else: 67 | print("\ngit repository detected. " 68 | "CookieCutter files have been created in {{ cookiecutter.repo_name }} directory.") 69 | 70 | 71 | def remove_rtd(): 72 | include_rtd = '{{ cookiecutter.include_ReadTheDocs }}' 73 | if include_rtd == "n": 74 | rtd_env = os.path.join("docs", "requirements.yaml") 75 | os.remove('.readthedocs.yaml') 76 | os.remove(rtd_env) 77 | 78 | 79 | def random_file_cleanup_removal(): 80 | """Remove random files which can be generated under certain conditions""" 81 | random_file_list = [ 82 | "default.profraw", # Remove default.profraw files, see #105 83 | ] 84 | for random_file in random_file_list: 85 | try: 86 | os.remove(random_file) 87 | except FileNotFoundError: 88 | pass 89 | 90 | 91 | remove_rtd() 92 | random_file_cleanup_removal() 93 | git_init_and_tag() 94 | -------------------------------------------------------------------------------- /hooks/pre_gen_project.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pre Cookie Generation script(s) 3 | 4 | If any error is raised, the cookie cutter creation fails and crashes 5 | """ 6 | 7 | import re 8 | import sys 9 | 10 | MODULE_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$' 11 | EMAIL_REGEX = r'[^@]+@[^@]+\.[^@]+' 12 | 13 | repo_name = '{{ cookiecutter.repo_name }}' 14 | module_name = '{{ cookiecutter.first_module_name }}' 15 | 16 | author_email = '{{ cookiecutter.author_email }}' 17 | 18 | for key in [repo_name, module_name]: 19 | if not re.match(MODULE_REGEX, key): 20 | print(key, re.match(MODULE_REGEX, key)) 21 | print('ERROR: "{}" is not a valid Python module name!'.format(key)) 22 | 23 | # exits with status 1 to indicate failure 24 | sys.exit(1) 25 | 26 | if not re.match(EMAIL_REGEX, author_email): 27 | print('ERROR: "{}" is not a valid email address!'.format(author_email)) 28 | 29 | # exits with status 1 to indicate failure 30 | sys.exit(1) 31 | -------------------------------------------------------------------------------- /tests/regenerate_references.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Regenerate the reference workflow scripts, accepts a path to the 4 | 5 | for LIC in 1 2 6 | do 7 | for DEP in 1 2 3 8 | do 9 | for RTD in 1 2 10 | do 11 | SEQUENCE=_"$LIC"_"$DEP"_"$RTD" 12 | python setup_cookiecutter.py prj${SEQUENCE} ${LIC} ${DEP} ${RTD} .. 13 | mkdir -p CI_files 14 | mv prj${SEQUENCE}/.github/workflows/CI.yaml CI_files/CI${SEQUENCE}.yaml 15 | rm -rf prj${SEQUENCE} 16 | done 17 | done 18 | done 19 | -------------------------------------------------------------------------------- /tests/run_yaml.py: -------------------------------------------------------------------------------- 1 | """ 2 | Runs arbitrary YAML commands 3 | """ 4 | 5 | import os 6 | import sys 7 | import yaml 8 | 9 | 10 | def parse_yaml(filename, key): 11 | """ 12 | Opens a YAML file. 13 | """ 14 | 15 | with open(filename, "r") as infile: 16 | ret = yaml.safe_load(infile) 17 | 18 | return ret[key] 19 | 20 | 21 | def gen_commands(name, commands): 22 | """ 23 | Runs YAML commands and prints their output 24 | """ 25 | 26 | print("Generating bash file to run the following:") 27 | ret = [] 28 | for command in commands: 29 | command = " ".join([os.path.expandvars(x) for x in command.split()]) 30 | print(" %s" % command) 31 | ret.append(command) 32 | 33 | ret = "\n".join(ret) 34 | with open(name, "w") as ofile: 35 | ofile.write(ret) 36 | 37 | 38 | if __name__ == "__main__": 39 | filename = sys.argv[1] 40 | key = sys.argv[2] 41 | out_name = sys.argv[3] 42 | 43 | commands = parse_yaml(filename, key) 44 | gen_commands(out_name, commands) 45 | -------------------------------------------------------------------------------- /tests/setup_cookiecutter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Simulates a cookiecutter run 3 | """ 4 | 5 | from subprocess import Popen, PIPE, STDOUT 6 | from os.path import abspath 7 | import sys 8 | 9 | project = sys.argv[1] 10 | lic = sys.argv[2] 11 | provider = sys.argv[3] 12 | rtd = sys.argv[4] 13 | try: 14 | cookie_path = abspath(sys.argv[5]) 15 | except IndexError: 16 | cookie_path = "." 17 | 18 | print("Options: open_source_license=%s, ci_provider=%s, rtd=%s" % (lic, provider, rtd)) 19 | 20 | # Setup the options 21 | options = [project, # Repo name 22 | project, # Project name 23 | project, # First module name 24 | "cookie monster", # Author name 25 | "cookiemonster@trash.can", # Author email 26 | "", # Description 27 | lic, # License 28 | provider, # ci_provider 29 | rtd] 30 | 31 | # Open a thread 32 | p = Popen(["cookiecutter", cookie_path], stdin=PIPE, stdout=PIPE) 33 | 34 | # Communicate options 35 | opts = "\n".join(options).encode("UTF-8") 36 | output = p.communicate(opts)[0].decode() 37 | try: 38 | if p.returncode != 0: 39 | raise RuntimeError("Cookiecutter did not run successfully!") 40 | finally: 41 | # Print the output for prosperity 42 | print("\n".join(output.split(": "))) 43 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.codecov.yml: -------------------------------------------------------------------------------- 1 | # Codecov configuration to make it a bit less noisy 2 | coverage: 3 | status: 4 | patch: false 5 | project: 6 | default: 7 | threshold: 50% 8 | comment: 9 | layout: "header" 10 | require_changes: false 11 | branches: null 12 | behavior: default 13 | flags: null 14 | paths: null -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.gitattributes: -------------------------------------------------------------------------------- 1 | {{cookiecutter.repo_name}}/_version.py export-subst 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | We welcome contributions from external contributors, and this document 4 | describes how to merge code changes into this {{cookiecutter.repo_name}}. 5 | 6 | ## Getting Started 7 | 8 | * Make sure you have a [GitHub account](https://github.com/signup/free). 9 | * [Fork](https://help.github.com/articles/fork-a-repo/) this repository on GitHub. 10 | * On your local machine, 11 | [clone](https://help.github.com/articles/cloning-a-repository/) your fork of 12 | the repository. 13 | 14 | ## Making Changes 15 | 16 | * Add some really awesome code to your local fork. It's usually a [good 17 | idea](http://blog.jasonmeridth.com/posts/do-not-issue-pull-requests-from-your-master-branch/) 18 | to make changes on a 19 | [branch](https://help.github.com/articles/creating-and-deleting-branches-within-your-repository/) 20 | with the branch name relating to the feature you are going to add. 21 | * When you are ready for others to examine and comment on your new feature, 22 | navigate to your fork of {{cookiecutter.repo_name}} on GitHub and open a [pull 23 | request](https://help.github.com/articles/using-pull-requests/) (PR). Note that 24 | after you launch a PR from one of your fork's branches, all 25 | subsequent commits to that branch will be added to the open pull request 26 | automatically. Each commit added to the PR will be validated for 27 | mergability, compilation and test suite compliance; the results of these tests 28 | will be visible on the PR page. 29 | * If you're providing a new feature, you must add test cases and documentation. 30 | * When the code is ready to go, make sure you run the test suite using pytest. 31 | * When you're ready to be considered for merging, check the "Ready to go" 32 | box on the PR page to let the {{cookiecutter.repo_name}} devs know that the changes are complete. 33 | The code will not be merged until this box is checked, the continuous 34 | integration returns checkmarks, 35 | and multiple core developers give "Approved" reviews. 36 | 37 | # Additional Resources 38 | 39 | * [General GitHub documentation](https://help.github.com/) 40 | * [PR best practices](http://codeinthehole.com/writing/pull-requests-and-other-good-practices-for-teams-using-github/) 41 | * [A guide to contributing to software packages](http://www.contribution-guide.org) 42 | * [Thinkful PR example](http://www.thinkful.com/learn/github-pull-request-tutorial/#Time-to-Submit-Your-First-PR) 43 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | Provide a brief description of the PR's purpose here. 3 | 4 | ## Todos 5 | Notable points that this PR has either accomplished or will accomplish. 6 | - [ ] TODO 1 7 | 8 | ## Questions 9 | - [ ] Question1 10 | 11 | ## Status 12 | - [ ] Ready to go -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/workflows/CI.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | # GitHub has started calling new repo's first branch "main" https://github.com/github/renaming 8 | # The cookiecutter uses the "--initial-branch" flag when it runs git-init 9 | push: 10 | branches: 11 | - "main" 12 | pull_request: 13 | branches: 14 | - "main" 15 | schedule: 16 | # Weekly tests run on main by default: 17 | # Scheduled workflows run on the latest commit on the default or base branch. 18 | # (from https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) 19 | - cron: "0 0 * * 0" 20 | 21 | jobs: 22 | test: 23 | name: Test on {{ '${{ matrix.os }}' }}, Python {{ '${{ matrix.python-version }}' }} 24 | runs-on: {{ '${{ matrix.os }}' }} 25 | strategy: 26 | matrix: 27 | os: [macOS-latest, ubuntu-latest, windows-latest] 28 | python-version: ["3.11", "3.12", "3.13"] 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | 33 | - name: Additional info about the build 34 | shell: bash 35 | run: | 36 | uname -a 37 | df -h 38 | ulimit -a 39 | {% if cookiecutter.dependency_source == 'Dependencies from pip only (no conda)' %} 40 | - name: Set up Python {{ '${{ matrix.python-version }}' }} 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: {{ '${{ matrix.python-version }}' }} 44 | 45 | - name: Testing Dependencies 46 | shell: bash 47 | run: | 48 | python -m pip install -U pytest pytest-cov codecov 49 | {% else %} 50 | # More info on options: https://github.com/marketplace/actions/setup-micromamba 51 | - uses: mamba-org/setup-micromamba@v2 52 | with: 53 | environment-file: devtools/conda-envs/test_env.yaml 54 | environment-name: test 55 | {%- if cookiecutter.dependency_source == 'Prefer conda-forge with pip fallback' %} 56 | condarc: | 57 | channels: 58 | - conda-forge 59 | {%- elif cookiecutter.dependency_source == 'Prefer default anaconda channel with pip fallback' %} 60 | condarc: | 61 | channels: 62 | - defaults 63 | {%- endif %} 64 | create-args: >- 65 | python={{ '${{ matrix.python-version }}' }} 66 | {% endif %} 67 | - name: Install package 68 | {%- if cookiecutter.dependency_source == 'Dependencies from pip only (no conda)' %} 69 | shell: bash 70 | run: | 71 | python -m pip install . 72 | {% else %} 73 | # conda setup requires this special shell 74 | shell: bash -l {0} 75 | run: | 76 | python -m pip install . --no-deps 77 | micromamba list 78 | {% endif %} 79 | - name: Run tests 80 | {%- if cookiecutter.dependency_source == 'Dependencies from pip only (no conda)' %} 81 | shell: bash 82 | {% else %} 83 | # conda setup requires this special shell 84 | shell: bash -l {0} 85 | {%- endif %} 86 | run: | 87 | pytest -v --cov={{ cookiecutter.repo_name }} --cov-report=xml --color=yes {{ cookiecutter.repo_name }}/tests/ 88 | 89 | - name: CodeCov 90 | uses: codecov/codecov-action@v5 91 | with: 92 | files: ./coverage.xml 93 | flags: unittests 94 | name: codecov-{{ '${{ matrix.os }}' }}-py{{ '${{ matrix.python-version }}' }} 95 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.github/workflows/codeql.yaml: -------------------------------------------------------------------------------- 1 | name: "CodeQL Advanced" 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | schedule: 9 | - cron: '20 3 * * 1' 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze ({{ '${{ matrix.language }}' }}) 14 | # Runner size impacts CodeQL analysis time. To learn more, please see: 15 | # - https://gh.io/recommended-hardware-resources-for-running-codeql 16 | # - https://gh.io/supported-runners-and-hardware-resources 17 | # - https://gh.io/using-larger-runners (GitHub.com only) 18 | # Consider using larger runners or machines with greater resources for possible analysis time improvements. 19 | runs-on: {{ '${{ (matrix.language == \'swift\' && \'macos-latest\') || \'ubuntu-latest\' }}' }} 20 | permissions: 21 | # required for all workflows 22 | security-events: write 23 | 24 | # required to fetch internal or private CodeQL packs 25 | packages: read 26 | 27 | # only required for workflows in private repositories 28 | actions: read 29 | contents: read 30 | 31 | strategy: 32 | fail-fast: false 33 | matrix: 34 | include: 35 | - language: actions 36 | build-mode: none 37 | - language: python 38 | build-mode: none 39 | # CodeQL supports the following values keywords for 'language': 'actions', 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' 40 | # Use `c-cpp` to analyze code written in C, C++ or both 41 | # Use 'java-kotlin' to analyze code written in Java, Kotlin or both 42 | # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both 43 | # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, 44 | # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. 45 | # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how 46 | # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages 47 | steps: 48 | - name: Checkout repository 49 | uses: actions/checkout@v4 50 | 51 | # Add any setup steps before running the `github/codeql-action/init` action. 52 | # This includes steps like installing compilers or runtimes (`actions/setup-node` 53 | # or others). This is typically only required for manual builds. 54 | # - name: Setup runtime (example) 55 | # uses: actions/setup-example@v1 56 | 57 | # Initializes the CodeQL tools for scanning. 58 | - name: Initialize CodeQL 59 | uses: github/codeql-action/init@v3 60 | with: 61 | languages: {{ '${{ matrix.language }}' }} 62 | build-mode: {{ '${{ matrix.build-mode }}' }} 63 | # If you wish to specify custom queries, you can do so here or in a config file. 64 | # By default, queries listed here will override any specified in a config file. 65 | # Prefix the list here with "+" to use these queries and those in the config file. 66 | 67 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 68 | # queries: security-extended,security-and-quality 69 | 70 | # If the analyze step fails for one of the languages you are analyzing with 71 | # "We were unable to automatically build your code", modify the matrix above 72 | # to set the build mode to "manual" for that language. Then modify this step 73 | # to build your code. 74 | # ℹ️ Command-line programs to run using the OS shell. 75 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 76 | - if: matrix.build-mode == 'manual' 77 | shell: bash 78 | run: | 79 | echo 'If you are using a "manual" build mode for one or more of the' \ 80 | 'languages you are analyzing, replace this with the commands to build' \ 81 | 'your code, for example:' 82 | echo ' make bootstrap' 83 | echo ' make release' 84 | exit 1 85 | 86 | - name: Perform CodeQL Analysis 87 | uses: github/codeql-action/analyze@v3 88 | with: 89 | category: "/language:{{ '${{matrix.language}}' }}" -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | .pytest_cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # Pycharm settings 99 | .idea 100 | *.iml 101 | *.iws 102 | *.ipr 103 | 104 | # Ignore Sublime Text settings 105 | *.sublime-workspace 106 | *.sublime-project 107 | 108 | # vim swap 109 | *.swp 110 | 111 | # mkdocs documentation 112 | /site 113 | 114 | # mypy 115 | .mypy_cache/ 116 | 117 | # profraw files from LLVM? Unclear exactly what triggers this 118 | # There are reports this comes from LLVM profiling, but also Xcode 9. 119 | *profraw 120 | 121 | # In-tree generated files 122 | */_version.py 123 | # Cookiecutter-CMS Test Artifacts 124 | /tests/CI_files 125 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "mambaforge-22.9" 7 | 8 | conda: 9 | environment: docs/requirements.yaml -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, 8 | body size, disability, ethnicity, gender identity and expression, level of 9 | experience, nationality, personal appearance, race, religion, or sexual 10 | identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment include: 15 | 16 | * Using welcoming and inclusive language 17 | * Being respectful of differing viewpoints and experiences 18 | * Gracefully accepting constructive criticism 19 | * Focusing on what is best for the community 20 | * Showing empathy towards other community members 21 | 22 | Examples of unacceptable behavior by participants include: 23 | 24 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 25 | * Trolling, insulting/derogatory comments, and personal or political attacks 26 | * Public or private harassment 27 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 28 | * Other conduct which could reasonably be considered inappropriate in a professional setting 29 | 30 | ## Our Responsibilities 31 | 32 | Project maintainers are responsible for clarifying the standards of acceptable 33 | behavior and are expected to take appropriate and fair corrective action in 34 | response to any instances of unacceptable behavior. 35 | 36 | Project maintainers have the right and responsibility to remove, edit, or 37 | reject comments, commits, code, wiki edits, issues, and other contributions 38 | that are not aligned to this Code of Conduct, or to ban temporarily or 39 | permanently any contributor for other behaviors that they deem inappropriate, 40 | threatening, offensive, or harmful. 41 | 42 | Moreover, project maintainers will strive to offer feedback and advice to 43 | ensure quality and consistency of contributions to the code. Contributions 44 | from outside the group of project maintainers are strongly welcomed but the 45 | final decision as to whether commits are merged into the codebase rests with 46 | the team of project maintainers. 47 | 48 | ## Scope 49 | 50 | This Code of Conduct applies both within project spaces and in public spaces 51 | when an individual is representing the project or its community. Examples of 52 | representing a project or community include using an official project e-mail 53 | address, posting via an official social media account, or acting as an 54 | appointed representative at an online or offline event. Representation of a 55 | project may be further defined and clarified by project maintainers. 56 | 57 | ## Enforcement 58 | 59 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 60 | reported by contacting the project team at '{{cookiecutter.author_email}}'. The project team will 61 | review and investigate all complaints, and will respond in a way that it deems 62 | appropriate to the circumstances. The project team is obligated to maintain 63 | confidentiality with regard to the reporter of an incident. Further details of 64 | specific enforcement policies may be posted separately. 65 | 66 | Project maintainers who do not follow or enforce the Code of Conduct in good 67 | faith may face temporary or permanent repercussions as determined by other 68 | members of the project's leadership. 69 | 70 | ## Attribution 71 | 72 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 73 | version 1.4, available at 74 | [http://contributor-covenant.org/version/1/4][version] 75 | 76 | [homepage]: http://contributor-covenant.org 77 | [version]: http://contributor-covenant.org/version/1/4/ 78 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/LICENSE: -------------------------------------------------------------------------------- 1 | {% if cookiecutter.open_source_license == 'MIT' %} 2 | MIT License 3 | 4 | Copyright (c) {% now 'utc', '%Y' %} {{ cookiecutter.author_name }} 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | {% elif cookiecutter.open_source_license == 'BSD-3-Clause' %} 24 | Copyright {% now 'utc', '%Y' %} {{ cookiecutter.author_name }} 25 | 26 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the 27 | following conditions are met: 28 | 29 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 30 | 31 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following 32 | disclaimer in the documentation and/or other materials provided with the distribution. 33 | 34 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote 35 | products derived from this software without specific prior written permission. 36 | 37 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 38 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 39 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 40 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 41 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 42 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 43 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 44 | {% elif cookiecutter.open_source_license == 'LGPLv3' %} 45 | GNU LESSER GENERAL PUBLIC LICENSE 46 | Version 3, 29 June 2007 47 | 48 | Copyright (C) 2007 Free Software Foundation, Inc. 49 | 50 | Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. 51 | 52 | This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. 53 | 54 | 0. Additional Definitions. 55 | As used herein, “this License” refers to version 3 of the GNU Lesser General Public License, and the “GNU GPL” refers to version 3 of the GNU General Public License. 56 | 57 | “The Library” refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. 58 | 59 | An “Application” is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. 60 | 61 | A “Combined Work” is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the “Linked Version”. 62 | 63 | The “Minimal Corresponding Source” for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. 64 | 65 | The “Corresponding Application Code” for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. 66 | 67 | 1. Exception to Section 3 of the GNU GPL. 68 | You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. 69 | 70 | 2. Conveying Modified Versions. 71 | If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: 72 | 73 | a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or 74 | b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy. 75 | 3. Object Code Incorporating Material from Library Header Files. 76 | The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: 77 | 78 | a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. 79 | b) Accompany the object code with a copy of the GNU GPL and this license document. 80 | 4. Combined Works. 81 | You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: 82 | 83 | a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. 84 | b) Accompany the Combined Work with a copy of the GNU GPL and this license document. 85 | c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. 86 | d) Do one of the following: 87 | 0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. 88 | 1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. 89 | e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) 90 | 5. Combined Libraries. 91 | You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: 92 | 93 | a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. 94 | b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 95 | 6. Revised Versions of the GNU Lesser General Public License. 96 | The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. 97 | 98 | Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. 99 | 100 | If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. 101 | {% endif %} -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CODE_OF_CONDUCT.md 2 | 3 | global-exclude *.py[cod] __pycache__ *.so 4 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/README.md: -------------------------------------------------------------------------------- 1 | {{cookiecutter.project_name}} 2 | ============================== 3 | [//]: # (Badges) 4 | [![GitHub Actions Build Status](https://github.com/REPLACE_WITH_OWNER_ACCOUNT/{{cookiecutter.repo_name}}/workflows/CI/badge.svg)](https://github.com/REPLACE_WITH_OWNER_ACCOUNT/{{cookiecutter.repo_name}}/actions?query=workflow%3ACI) 5 | [![codecov](https://codecov.io/gh/REPLACE_WITH_OWNER_ACCOUNT/{{cookiecutter.project_name}}/branch/main/graph/badge.svg)](https://codecov.io/gh/REPLACE_WITH_OWNER_ACCOUNT/{{cookiecutter.project_name}}/branch/main) 6 | 7 | 8 | {{cookiecutter.description}} 9 | 10 | ### Copyright 11 | 12 | Copyright (c) {% now 'utc', '%Y' %}, {{cookiecutter.author_name}} 13 | 14 | 15 | #### Acknowledgements 16 | 17 | Project based on the 18 | [Computational Molecular Science Python Cookiecutter](https://github.com/molssi/cookiecutter-cms) version {{cookiecutter._cms_cc_version}}. 19 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/devtools/README.md: -------------------------------------------------------------------------------- 1 | # Development, testing, and deployment tools 2 | 3 | This directory contains a collection of tools for running Continuous Integration (CI) tests, 4 | conda installation, and other development tools not directly related to the coding process. 5 | 6 | 7 | ## Manifest 8 | 9 | ### Continuous Integration 10 | 11 | You should test your code, but do not feel compelled to use these specific programs. You also may not need Unix and 12 | Windows testing if you only plan to deploy on specific platforms. These are just to help you get started. 13 | 14 | ### Conda Environment: 15 | 16 | This directory contains the files to setup the Conda environment for testing purposes 17 | 18 | * `conda-envs`: directory containing the YAML file(s) which fully describe Conda Environments, their dependencies, and those dependency provenance's 19 | * `test_env.yaml`: Simple test environment file with base dependencies. Channels are not specified here and therefore respect global Conda configuration 20 | 21 | ### Additional Scripts: 22 | 23 | This directory contains OS agnostic helper scripts which don't fall in any of the previous categories 24 | * `scripts` 25 | * `create_conda_env.py`: Helper program for spinning up new conda environments based on a starter file with Python Version and Env. Name command-line options 26 | 27 | 28 | ## How to contribute changes 29 | - Clone the repository if you have write access to the main repo, fork the repository if you are a collaborator. 30 | - Make a new branch with `git checkout -b {your branch name}` 31 | - Make changes and test your code 32 | - Ensure that the test environment dependencies (`conda-envs`) line up with the build and deploy dependencies (`conda-recipe/meta.yaml`) 33 | - Push the branch to the repo (either the main or your fork) with `git push -u origin {your branch name}` 34 | * Note that `origin` is the default name assigned to the remote, yours may be different 35 | - Make a PR on GitHub with your changes 36 | - We'll review the changes and get your code into the repo after lively discussion! 37 | 38 | 39 | ## Checklist for updates 40 | - [ ] Make sure there is an/are issue(s) opened for your specific update 41 | - [ ] Create the PR, referencing the issue 42 | - [ ] Debug the PR as needed until tests pass 43 | - [ ] Tag the final, debugged version 44 | * `git tag -a X.Y.Z [latest pushed commit] && git push --follow-tags` 45 | - [ ] Get the PR merged in 46 | 47 | ## Versioneer Auto-version 48 | [Versioneer](https://github.com/warner/python-versioneer) will automatically infer what version 49 | is installed by looking at the `git` tags and how many commits ahead this version is. The format follows 50 | [PEP 440](https://www.python.org/dev/peps/pep-0440/) and has the regular expression of: 51 | ```regexp 52 | \d+.\d+.\d+(?\+\d+-[a-z0-9]+) 53 | ``` 54 | If the version of this commit is the same as a `git` tag, the installed version is the same as the tag, 55 | e.g. `{{cookiecutter.repo_name}}-0.1.2`, otherwise it will be appended with `+X` where `X` is the number of commits 56 | ahead from the last tag, and then `-YYYYYY` where the `Y`'s are replaced with the `git` commit hash. 57 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/devtools/conda-envs/test_env.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | channels: 3 | {% if cookiecutter.dependency_source == 'Prefer conda-forge over the default anaconda channel with pip fallback' %} 4 | - conda-forge 5 | {% endif %} 6 | - defaults 7 | dependencies: 8 | # Base depends 9 | - python 10 | - pip 11 | 12 | # Testing 13 | - pytest 14 | - pytest-cov 15 | {% if cookiecutter.dependency_source == 'Prefer default anaconda channel with pip fallback' %} 16 | # Pip-only installs 17 | - pip: 18 | - codecov 19 | {% else %} - codecov 20 | 21 | # Pip-only installs 22 | #- pip: 23 | # - codecov 24 | {% endif %} 25 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/devtools/scripts/create_conda_env.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import re 4 | import glob 5 | import shutil 6 | import subprocess as sp 7 | from tempfile import TemporaryDirectory 8 | from contextlib import contextmanager 9 | # YAML imports 10 | try: 11 | import yaml # PyYAML 12 | loader = yaml.safe_load 13 | except ImportError: 14 | try: 15 | import ruamel_yaml as yaml # Ruamel YAML 16 | except ImportError: 17 | try: 18 | # Load Ruamel YAML from the base conda environment 19 | from importlib import util as import_util 20 | CONDA_BIN = os.path.dirname(os.environ['CONDA_EXE']) 21 | ruamel_yaml_path = glob.glob(os.path.join(CONDA_BIN, '..', 22 | 'lib', 'python*.*', 'site-packages', 23 | 'ruamel_yaml', '__init__.py'))[0] 24 | # Based on importlib example, but only needs to load_module since its the whole package, not just 25 | # a module 26 | spec = import_util.spec_from_file_location('ruamel_yaml', ruamel_yaml_path) 27 | yaml = spec.loader.load_module() 28 | except (KeyError, ImportError, IndexError): 29 | raise ImportError("No YAML parser could be found in this or the conda environment. " 30 | "Could not find PyYAML or Ruamel YAML in the current environment, " 31 | "AND could not find Ruamel YAML in the base conda environment through CONDA_EXE path. " 32 | "Environment not created!") 33 | loader = yaml.YAML(typ="safe").load # typ="safe" avoids odd typing on output 34 | 35 | 36 | @contextmanager 37 | def temp_cd(): 38 | """Temporary CD Helper""" 39 | cwd = os.getcwd() 40 | with TemporaryDirectory() as td: 41 | try: 42 | os.chdir(td) 43 | yield 44 | finally: 45 | os.chdir(cwd) 46 | 47 | 48 | # Args 49 | parser = argparse.ArgumentParser(description='Creates a conda environment from file for a given Python version.') 50 | parser.add_argument('-n', '--name', type=str, 51 | help='The name of the created Python environment') 52 | parser.add_argument('-p', '--python', type=str, 53 | help='The version of the created Python environment') 54 | parser.add_argument('conda_file', 55 | help='The file for the created Python environment') 56 | 57 | args = parser.parse_args() 58 | 59 | # Open the base file 60 | with open(args.conda_file, "r") as handle: 61 | yaml_script = loader(handle.read()) 62 | 63 | python_replacement_string = "python {}*".format(args.python) 64 | 65 | try: 66 | for dep_index, dep_value in enumerate(yaml_script['dependencies']): 67 | if re.match('python([ ><=*]+[0-9.*]*)?$', dep_value): # Match explicitly 'python' and its formats 68 | yaml_script['dependencies'].pop(dep_index) 69 | break # Making the assumption there is only one Python entry, also avoids need to enumerate in reverse 70 | except (KeyError, TypeError): 71 | # Case of no dependencies key, or dependencies: None 72 | yaml_script['dependencies'] = [] 73 | finally: 74 | # Ensure the python version is added in. Even if the code does not need it, we assume the env does 75 | yaml_script['dependencies'].insert(0, python_replacement_string) 76 | 77 | # Figure out conda path 78 | if "CONDA_EXE" in os.environ: 79 | conda_path = os.environ["CONDA_EXE"] 80 | else: 81 | conda_path = shutil.which("conda") 82 | if conda_path is None: 83 | raise RuntimeError("Could not find a conda binary in CONDA_EXE variable or in executable search path") 84 | 85 | print("CONDA ENV NAME {}".format(args.name)) 86 | print("PYTHON VERSION {}".format(args.python)) 87 | print("CONDA FILE NAME {}".format(args.conda_file)) 88 | print("CONDA PATH {}".format(conda_path)) 89 | 90 | # Write to a temp directory which will always be cleaned up 91 | with temp_cd(): 92 | temp_file_name = "temp_script.yaml" 93 | with open(temp_file_name, 'w') as f: 94 | f.write(yaml.dump(yaml_script)) 95 | sp.call("{} env create -n {} -f {}".format(conda_path, args.name, temp_file_name), shell=True) 96 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = {{cookiecutter.repo_name}} 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/README.md: -------------------------------------------------------------------------------- 1 | # Compiling {{cookiecutter.project_name}}'s Documentation 2 | 3 | The docs for this project are built with [Sphinx](http://www.sphinx-doc.org/en/master/). 4 | To compile the docs, first ensure that the necessary dependencies are installed. 5 | 6 | {% if (cookiecutter.dependency_source == 'Prefer conda-forge over the default anaconda channel with pip fallback' or cookiecutter.dependency_source == 'Prefer default anaconda channel with pip fallback') %} 7 | 8 | You can use the provided `requirements.yaml` file to create a conda environment with the necessary dependencies by running 9 | 10 | ```bash 11 | conda env create -f requirements.yaml 12 | ``` 13 | 14 | if you wish to install dependencies in your current environment, you can do 15 | 16 | ```bash 17 | conda env update --file requirements.yaml 18 | ``` 19 | 20 | {% elif cookiecutter.dependency_source == 'Dependencies from pip only (no conda)' %} 21 | ```bash 22 | pip install sphinx pydata-sphinx-theme sphinx-copybutton sphinx-design 23 | ``` 24 | {% endif %} 25 | 26 | Once installed, you can use the `Makefile` in this directory to compile static HTML pages by 27 | ```bash 28 | make html 29 | ``` 30 | 31 | The documentation contains default pages for "Getting Started", "User Guide", "Developer Guide" and API reference. 32 | We recommend adopting these sections of documentation for your project to ensure comprehensive documentation for all aspects of your project. 33 | 34 | The compiled docs will be in the `_build` directory and can be viewed by opening `index.html` (which may itself 35 | be inside a directory called `html/` depending on what version of Sphinx is installed). 36 | 37 | {% if (cookiecutter.include_ReadTheDocs == 'y') %} 38 | A configuration file for [Read The Docs](https://readthedocs.org/) (readthedocs.yaml) is included in the top level of the repository. To use Read the Docs to host your documentation, go to https://readthedocs.org/ and connect this repository. You may need to change your default branch to `main` under Advanced Settings for the project. 39 | 40 | If you would like to use Read The Docs with `autodoc` (included automatically) and your package has dependencies, you will need to include those dependencies in your documentation yaml file (`docs/requirements.yaml`). 41 | 42 | {% endif %} -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/_static/README.md: -------------------------------------------------------------------------------- 1 | # Static Doc Directory 2 | 3 | Add any paths that contain custom static files (such as style sheets) here, 4 | relative to the `conf.py` file's directory. 5 | They are copied after the builtin static files, 6 | so a file named "default.css" will overwrite the builtin "default.css". 7 | 8 | The path to this folder is set in the Sphinx `conf.py` file in the line: 9 | ```python 10 | templates_path = ['_static'] 11 | ``` 12 | 13 | ## Examples of file to add to this directory 14 | * Custom Cascading Style Sheets 15 | * Custom JavaScript code 16 | * Static logo images 17 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/_templates/README.md: -------------------------------------------------------------------------------- 1 | # Templates Doc Directory 2 | 3 | Add any paths that contain templates here, relative to 4 | the `conf.py` file's directory. 5 | They are copied after the builtin template files, 6 | so a file named "page.html" will overwrite the builtin "page.html". 7 | 8 | The path to this folder is set in the Sphinx `conf.py` file in the line: 9 | ```python 10 | html_static_path = ['_templates'] 11 | ``` 12 | 13 | ## Examples of file to add to this directory 14 | * HTML extensions of stock pages like `page.html` or `layout.html` 15 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/api.rst: -------------------------------------------------------------------------------- 1 | API Documentation 2 | ================= 3 | 4 | .. autosummary:: 5 | :toctree: autosummary 6 | 7 | {{cookiecutter.repo_name}}.canvas 8 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | 15 | # Incase the project was not installed 16 | import os 17 | import sys 18 | sys.path.insert(0, os.path.abspath('..')) 19 | 20 | import {{cookiecutter.repo_name}} 21 | 22 | 23 | # -- Project information ----------------------------------------------------- 24 | 25 | project = '{{cookiecutter.project_name}}' 26 | copyright = ("{% now 'utc', '%Y' %}, {{cookiecutter.author_name}}. Project structure based on the " 27 | "Computational Molecular Science Python Cookiecutter version {{cookiecutter._cms_cc_version}}") 28 | author = '{{cookiecutter.author_name}}' 29 | 30 | # The short X.Y version 31 | version = '' 32 | # The full version, including alpha/beta/rc tags 33 | release = '' 34 | 35 | 36 | # -- General configuration --------------------------------------------------- 37 | 38 | # If your documentation needs a minimal Sphinx version, state it here. 39 | # 40 | # needs_sphinx = '1.0' 41 | 42 | # Add any Sphinx extension module names here, as strings. They can be 43 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 44 | # ones. 45 | extensions = [ 46 | 'sphinx.ext.autosummary', 47 | 'sphinx.ext.autodoc', 48 | 'sphinx.ext.mathjax', 49 | 'sphinx.ext.viewcode', 50 | 'sphinx.ext.napoleon', 51 | 'sphinx.ext.intersphinx', 52 | 'sphinx.ext.extlinks', 53 | 'sphinx_design', 54 | 'sphinx_copybutton', 55 | ] 56 | 57 | 58 | autosummary_generate = True 59 | napoleon_google_docstring = False 60 | napoleon_use_param = False 61 | napoleon_use_ivar = True 62 | 63 | # Add any paths that contain templates here, relative to this directory. 64 | templates_path = ['_templates'] 65 | 66 | # The suffix(es) of source filenames. 67 | # You can specify multiple suffix as a list of string: 68 | # 69 | # source_suffix = ['.rst', '.md'] 70 | source_suffix = '.rst' 71 | 72 | # The master toctree document. 73 | master_doc = 'index' 74 | 75 | # The language for content autogenerated by Sphinx. Refer to documentation 76 | # for a list of supported languages. 77 | # 78 | # This is also used if you do content translation via gettext catalogs. 79 | # Usually you set "language" from the command line for these cases. 80 | language = None 81 | 82 | # List of patterns, relative to source directory, that match files and 83 | # directories to ignore when looking for source files. 84 | # This pattern also affects html_static_path and html_extra_path . 85 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 86 | 87 | # The name of the Pygments (syntax highlighting) style to use. 88 | pygments_style = 'default' 89 | 90 | 91 | # -- Options for HTML output ------------------------------------------------- 92 | 93 | # The theme to use for HTML and HTML Help pages. See the documentation for 94 | # a list of builtin themes. 95 | # 96 | html_theme = 'pydata_sphinx_theme' 97 | 98 | # Theme options are theme-specific and customize the look and feel of a theme 99 | # further. For a list of options available for each theme, see the 100 | # documentation. 101 | # 102 | # html_theme_options = {} 103 | 104 | # Add any paths that contain custom static files (such as style sheets) here, 105 | # relative to this directory. They are copied after the builtin static files, 106 | # so a file named "default.css" will overwrite the builtin "default.css". 107 | html_static_path = ['_static'] 108 | 109 | # Custom sidebar templates, must be a dictionary that maps document names 110 | # to template names. 111 | # 112 | # The default sidebars (for documents that don't match any pattern) are 113 | # defined by theme itself. Builtin themes are using these templates by 114 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 115 | # 'searchbox.html']``. 116 | # 117 | # html_sidebars = {} 118 | 119 | 120 | # -- Options for HTMLHelp output --------------------------------------------- 121 | 122 | # Output file base name for HTML help builder. 123 | htmlhelp_basename = '{{cookiecutter.repo_name}}doc' 124 | 125 | 126 | # -- Options for LaTeX output ------------------------------------------------ 127 | 128 | latex_elements = { 129 | # The paper size ('letterpaper' or 'a4paper'). 130 | # 131 | # 'papersize': 'letterpaper', 132 | 133 | # The font size ('10pt', '11pt' or '12pt'). 134 | # 135 | # 'pointsize': '10pt', 136 | 137 | # Additional stuff for the LaTeX preamble. 138 | # 139 | # 'preamble': '', 140 | 141 | # Latex figure (float) alignment 142 | # 143 | # 'figure_align': 'htbp', 144 | } 145 | 146 | # Grouping the document tree into LaTeX files. List of tuples 147 | # (source start file, target name, title, 148 | # author, documentclass [howto, manual, or own class]). 149 | latex_documents = [ 150 | (master_doc, '{{cookiecutter.repo_name}}.tex', '{{cookiecutter.project_name}} Documentation', 151 | '{{cookiecutter.repo_name}}', 'manual'), 152 | ] 153 | 154 | 155 | # -- Options for manual page output ------------------------------------------ 156 | 157 | # One entry per manual page. List of tuples 158 | # (source start file, name, description, authors, manual section). 159 | man_pages = [ 160 | (master_doc, '{{cookiecutter.repo_name}}', '{{cookiecutter.project_name}} Documentation', 161 | [author], 1) 162 | ] 163 | 164 | 165 | # -- Options for Texinfo output ---------------------------------------------- 166 | 167 | # Grouping the document tree into Texinfo files. List of tuples 168 | # (source start file, target name, title, author, 169 | # dir menu entry, description, category) 170 | texinfo_documents = [ 171 | (master_doc, '{{cookiecutter.repo_name}}', '{{cookiecutter.project_name}} Documentation', 172 | author, '{{cookiecutter.repo_name}}', '{{cookiecutter.description}}', 173 | 'Miscellaneous'), 174 | ] 175 | 176 | 177 | # -- Extension configuration ------------------------------------------------- 178 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/developer_guide.rst: -------------------------------------------------------------------------------- 1 | Developer Guide 2 | =============== 3 | 4 | This page details how to contribute to {{cookiecutter.project_name}}. 5 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/getting_started.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | 5 | You might choose to write an overview tutorial or set of tutorials. 6 | 7 | .. code-block:: python 8 | 9 | import {{cookiecutter.repo_name}} 10 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/index.rst: -------------------------------------------------------------------------------- 1 | .. {{cookiecutter.repo_name}} documentation master file, created by 2 | sphinx-quickstart on Thu Mar 15 13:55:56 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to {{cookiecutter.project_name}}'s documentation! 7 | ========================================================= 8 | 9 | .. grid:: 1 1 2 2 10 | 11 | .. grid-item-card:: Getting Started 12 | :margin: 0 3 0 0 13 | 14 | Learn the basics of using {{cookiecutter.project_name}}. 15 | 16 | .. button-link:: ./getting_started.html 17 | :color: primary 18 | :outline: 19 | :expand: 20 | 21 | To the Getting Started Guide 22 | 23 | 24 | 25 | .. grid-item-card:: User Guide 26 | :margin: 0 3 0 0 27 | 28 | An in-depth guide for users. 29 | 30 | .. button-link:: ./user_guide.html 31 | :color: primary 32 | :outline: 33 | :expand: 34 | 35 | To the User Guide 36 | 37 | 38 | 39 | .. grid-item-card:: API Reference 40 | :margin: 0 3 0 0 41 | 42 | How to use the API of {{cookiecutter.project_name}}. 43 | 44 | .. button-link:: ./api.html 45 | :color: primary 46 | :outline: 47 | :expand: 48 | 49 | To the API Reference. 50 | 51 | 52 | 53 | .. grid-item-card:: Developer Guide 54 | :margin: 0 3 0 0 55 | 56 | How to contribute to {{cookiecutter.project_name}}. 57 | 58 | .. button-link:: ./developer_guide.html 59 | :color: primary 60 | :outline: 61 | :expand: 62 | 63 | To the Developer Guide 64 | 65 | 66 | .. toctree:: 67 | :maxdepth: 2 68 | :hidden: 69 | :titlesonly: 70 | 71 | getting_started 72 | user_guide 73 | api 74 | developer_guide 75 | 76 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ={{cookiecutter.repo_name}} 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/requirements.yaml: -------------------------------------------------------------------------------- 1 | name: docs_{{cookiecutter.project_name}} 2 | channels: 3 | {% if cookiecutter.dependency_source == 'Prefer conda-forge with pip fallback' %} 4 | - conda-forge 5 | {% endif %} 6 | - defaults 7 | dependencies: 8 | # Base depends 9 | - python 10 | - pip 11 | {% if cookiecutter.dependency_source == 'Prefer conda-forge with pip fallback' %} 12 | - pydata-sphinx-theme 13 | - sphinx-design 14 | - sphinx-copybutton 15 | {% endif %} 16 | 17 | {% if cookiecutter.dependency_source == 'Prefer default anaconda channel with pip fallback' %} 18 | # Pip-only installs 19 | - pip: 20 | - -e ../ 21 | - pydata-sphinx-theme 22 | - sphinx-design 23 | - sphinx-copybutton 24 | {% else %} 25 | 26 | # Pip-only installs 27 | - pip: 28 | - -e ../ 29 | {% endif %} 30 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/docs/user_guide.rst: -------------------------------------------------------------------------------- 1 | User Guide 2 | =============== 3 | 4 | This page details how to use {{cookiecutter.project_name}}. 5 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0", "versioningit~=2.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | # Self-descriptive entries which should always be present 6 | # https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ 7 | [project] 8 | name = "{{cookiecutter.repo_name}}" 9 | description = "{{cookiecutter.description}}" 10 | dynamic = ["version"] 11 | readme = "README.md" 12 | authors = [ 13 | { name = "{{cookiecutter.author_name}}", email = "{{cookiecutter.author_email}}" } 14 | ] 15 | license = "{{cookiecutter.open_source_license}}" 16 | license-files = ["LICENSE"] 17 | # See https://pypi.org/classifiers/ 18 | classifiers = [ 19 | "Programming Language :: Python :: 3", 20 | ] 21 | requires-python = ">=3.8" 22 | # Declare any run-time dependencies that should be installed with the package. 23 | #dependencies = [ 24 | # "importlib-resources;python_version<'3.10'", 25 | #] 26 | 27 | # Update the urls once the hosting is set up. 28 | #[project.urls] 29 | #"Source" = "https://github.com//{{cookiecutter.repo_name}}/" 30 | #"Documentation" = "https://{{cookiecutter.repo_name}}.readthedocs.io/" 31 | 32 | [project.optional-dependencies] 33 | test = [ 34 | "pytest>=6.1.2", 35 | ] 36 | 37 | [tool.setuptools] 38 | # This subkey is a beta stage development and keys may change in the future, see https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html for more details 39 | # 40 | # As of version 0.971, mypy does not support type checking of installed zipped 41 | # packages (because it does not actually import the Python packages). 42 | # We declare the package not-zip-safe so that our type hints are also available 43 | # when checking client code that uses our (installed) package. 44 | # Ref: 45 | # https://mypy.readthedocs.io/en/stable/installed_packages.html?highlight=zip#using-installed-packages-with-mypy-pep-561 46 | zip-safe = false 47 | # Let setuptools discover the package in the current directory, 48 | # but be explicit about non-Python files. 49 | # See also: 50 | # https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#setuptools-specific-configuration 51 | # Note that behavior is currently evolving with respect to how to interpret the 52 | # "data" and "tests" subdirectories. As of setuptools 63, both are automatically 53 | # included if namespaces is true (default), even if the package is named explicitly 54 | # (instead of using 'find'). With 'find', the 'tests' subpackage is discovered 55 | # recursively because of its __init__.py file, but the data subdirectory is excluded 56 | # with include-package-data = false and namespaces = false. 57 | include-package-data = false 58 | [tool.setuptools.packages.find] 59 | namespaces = false 60 | where = ["."] 61 | 62 | # Ref https://setuptools.pypa.io/en/latest/userguide/datafiles.html#package-data 63 | [tool.setuptools.package-data] 64 | {{cookiecutter.repo_name}} = [ 65 | "py.typed" 66 | ] 67 | 68 | [tool.versioningit] 69 | default-version = "1+unknown" 70 | 71 | [tool.versioningit.format] 72 | distance = "{base_version}+{distance}.{vcs}{rev}" 73 | dirty = "{base_version}+{distance}.{vcs}{rev}.dirty" 74 | distance-dirty = "{base_version}+{distance}.{vcs}{rev}.dirty" 75 | 76 | [tool.versioningit.vcs] 77 | # The method key: 78 | method = "git" # <- The method name 79 | # Parameters to pass to the method: 80 | match = ["*"] 81 | default-tag = "1.0.0" 82 | 83 | [tool.versioningit.write] 84 | file = "{{cookiecutter.repo_name}}/_version.py" 85 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/setup.cfg: -------------------------------------------------------------------------------- 1 | # Helper file to handle all configs 2 | 3 | [coverage:run] 4 | # .coveragerc to control coverage.py and pytest-cov 5 | omit = 6 | # Omit the tests 7 | */tests/* 8 | # Omit generated versioneer 9 | {{cookiecutter.repo_name}}/_version.py 10 | 11 | [yapf] 12 | # YAPF, in .style.yapf files this shows up as "[style]" header 13 | COLUMN_LIMIT = 119 14 | INDENT_WIDTH = 4 15 | USE_TABS = False 16 | 17 | [flake8] 18 | # Flake8, PyFlakes, etc 19 | max-line-length = 119 20 | 21 | [aliases] 22 | test = pytest 23 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/__init__.py: -------------------------------------------------------------------------------- 1 | """{{cookiecutter.description}}""" 2 | 3 | # Add imports here 4 | from .{{cookiecutter.first_module_name}} import * 5 | 6 | 7 | from ._version import __version__ 8 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/data/README.md: -------------------------------------------------------------------------------- 1 | # Sample Package Data 2 | 3 | This directory contains sample additional data you may want to include with your package. 4 | This is a place where non-code related additional information (such as data files, molecular structures, etc.) can 5 | go that you want to ship alongside your code. 6 | 7 | Please note that it is not recommended to place large files in your git directory. If your project requires files larger 8 | than a few megabytes in size it is recommended to host these files elsewhere. This is especially true for binary files 9 | as the `git` structure is unable to correctly take updates to these files and will store a complete copy of every version 10 | in your `git` history which can quickly add up. As a note most `git` hosting services like GitHub have a 1 GB per repository 11 | cap. 12 | 13 | ## Including package data 14 | 15 | Modify your package's `pyproject.toml` file. 16 | Update the [tool.setuptools.package_data](https://setuptools.pypa.io/en/latest/userguide/datafiles.html#package-data) 17 | and point it at the correct files. 18 | Paths are relative to `package_dir`. 19 | 20 | Package data can be accessed at run time with `importlib.resources` or the `importlib_resources` back port. 21 | See https://setuptools.pypa.io/en/latest/userguide/datafiles.html#accessing-data-files-at-runtime 22 | for suggestions. 23 | 24 | If modules within your package will access internal data files using 25 | [the recommended approach](https://setuptools.pypa.io/en/latest/userguide/datafiles.html#accessing-data-files-at-runtime), 26 | you may need to include `importlib_resources` in your package dependencies. 27 | In `pyproject.toml`, include the following in your `[project]` table. 28 | ``` 29 | dependencies = [ 30 | "importlib-resources;python_version<'3.10'", 31 | ] 32 | ``` 33 | 34 | ## Manifest 35 | 36 | * `look_and_say.dat`: first entries of the "Look and Say" integer series, sequence [A005150](https://oeis.org/A005150) 37 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/data/look_and_say.dat: -------------------------------------------------------------------------------- 1 | 1 2 | 11 3 | 21 4 | 1211 5 | 111221 6 | 312211 7 | 13112221 8 | 1113213211 9 | 31131211131221 10 | 13211311123113112211 11 | 11131221133112132113212221 12 | 3113112221232112111312211312113211 13 | 1321132132111213122112311311222113111221131221 14 | 11131221131211131231121113112221121321132132211331222113112211 15 | 311311222113111231131112132112311321322112111312211312111322212311322113212221 -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/py.typed: -------------------------------------------------------------------------------- 1 | # PEP 561 marker file. See https://peps.python.org/pep-0561/ 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Empty init file in case you choose a package besides PyTest such as Nose which may look for such a file. 3 | """ 4 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit and regression test for the {{cookiecutter.repo_name}} package. 3 | """ 4 | 5 | # Import package, test suite, and other packages as needed 6 | import sys 7 | 8 | import pytest 9 | 10 | import {{cookiecutter.repo_name}} 11 | 12 | 13 | def test_{{cookiecutter.repo_name}}_imported(): 14 | """Sample test, will always pass so long as import statement worked.""" 15 | assert "{{cookiecutter.repo_name}}" in sys.modules 16 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/{{cookiecutter.first_module_name}}.py: -------------------------------------------------------------------------------- 1 | """Provide the primary functions.""" 2 | 3 | 4 | def canvas(with_attribution=True): 5 | """ 6 | Placeholder function to show example docstring (NumPy format). 7 | 8 | Replace this function and doc string for your own project. 9 | 10 | Parameters 11 | ---------- 12 | with_attribution : bool, Optional, default: True 13 | Set whether or not to display who the quote is from. 14 | 15 | Returns 16 | ------- 17 | quote : str 18 | Compiled string including quote and optional attribution. 19 | """ 20 | 21 | quote = "The code is but a canvas to our imagination." 22 | if with_attribution: 23 | quote += "\n\t- Adapted from Henry David Thoreau" 24 | return quote 25 | 26 | 27 | if __name__ == "__main__": 28 | # Do something if this file is invoked on its own 29 | print(canvas()) 30 | --------------------------------------------------------------------------------