├── docs
├── 404.md
├── howto
│ └── index.md
├── static
│ ├── images
│ │ ├── bear.png
│ │ ├── favicon.ico
│ │ ├── python-love.png
│ │ ├── logo-background.png
│ │ └── logo-no-background.png
│ └── css
│ │ └── mkdocstrings.css
└── index.md
├── .gitlab
├── .gitkeep
├── issue_template
│ ├── .gitkeep
│ ├── task.md
│ └── bug.md
└── .gitlab-webide.yml
├── app
├── __init__.py
├── core
│ ├── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── cache.py
│ │ ├── security.py
│ │ ├── logs.py
│ │ ├── faker.py
│ │ ├── utils.py
│ │ ├── db_format.py
│ │ ├── func.py
│ │ ├── files.py
│ │ ├── time.py
│ │ └── dataframe.py
│ ├── settings
│ │ ├── __init__.py
│ │ └── logger.py
│ └── config.py
├── domain
│ ├── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ └── manager.py
│ ├── models
│ │ └── __init__.py
│ └── schemas
│ │ └── __init__.py
├── infrastructure
│ └── __init__.py
├── interfaces
│ ├── api
│ │ └── __init__.py
│ └── cli
│ │ └── __init__.py
└── main.py
├── test
├── __init__.py
├── config.py
└── test_functions.py
├── hooks
├── post_gen_project.py
└── pre_gen_project.py
├── .github
├── ISSUE_TEMPLATE
│ ├── bug.md
│ └── task.md
└── PULL_REQUEST_TEMPLATE
│ └── pr_template.md
├── {{cookiecutter.directory_name}}
├── .gitlab
│ ├── .gitkeep
│ └── issue_template
│ │ ├── .gitkeep
│ │ ├── task.md
│ │ └── bug.md
├── app
│ ├── __init__.py
│ ├── core
│ │ ├── __init__.py
│ │ ├── utils
│ │ │ ├── __init__.py
│ │ │ ├── cache.py
│ │ │ ├── security.py
│ │ │ ├── logs.py
│ │ │ ├── faker.py
│ │ │ ├── utils.py
│ │ │ ├── db_format.py
│ │ │ ├── func.py
│ │ │ ├── files.py
│ │ │ ├── time.py
│ │ │ └── dataframe.py
│ │ ├── settings
│ │ │ ├── __init__.py
│ │ │ └── logger.py
│ │ └── config.py
│ ├── domain
│ │ ├── __init__.py
│ │ ├── models
│ │ │ └── __init__.py
│ │ ├── schemas
│ │ │ └── __init__.py
│ │ └── utils
│ │ │ ├── __init__.py
│ │ │ └── manager.py
│ ├── infrastructure
│ │ └── __init__.py
│ ├── interfaces
│ │ ├── api
│ │ │ └── __init__.py
│ │ └── cli
│ │ │ └── __init__.py
│ └── main.py
├── test
│ ├── __init__.py
│ ├── config.py
│ └── test_functions.py
├── .github
│ ├── ISSUE_TEMPLATE
│ │ ├── bug.md
│ │ └── task.md
│ └── PULL_REQUEST_TEMPLATE
│ │ └── pr_template.md
├── .env.example
├── scripts
│ ├── launch.sh
│ └── generate_docs.py
├── codecov.yaml
├── mypy.ini
├── .devcontainer
│ ├── docker-compose.yml
│ └── devcontainer.json
├── Docker
│ ├── db.dockerfile
│ ├── ci.dockerfile
│ └── python.dockerfile
├── notebooks
│ └── test.ipynb
├── .vscode
│ ├── google_no_types.mustache
│ ├── settings.json
│ ├── launch.json
│ └── extensions.json
├── tox.ini
├── README.md
├── docker-compose.yml
├── .pre-commit-config.yaml
├── mkdocs.yml
├── Makefile
└── pyproject.toml
├── scripts
├── launch.sh
└── generate_docs.py
├── config.yaml
├── .devcontainer
├── docker-compose.yml
└── devcontainer.json
├── Docker
├── db.dockerfile
├── ci.dockerfile
└── python.dockerfile
├── readme
├── SECURITY.md
├── CONTRIBUTING-en.md
├── CONTRIBUTING-it.md
├── README-en.md
└── README-it.md
├── notebooks
└── test.ipynb
├── .vscode
├── google_no_types.mustache
├── settings.json
├── launch.json
└── extensions.json
├── cookiecutter.json
├── LICENSE
├── replay
└── Bear.json
├── .pre-commit-config.yaml
├── README.md
├── mkdocs.yml
├── Makefile
├── pyproject.toml
├── mani.yml
└── .gitignore
/docs/404.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitlab/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/config.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/domain/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/core/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/domain/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hooks/post_gen_project.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/task.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitlab/issue_template/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/core/settings/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/domain/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/domain/schemas/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/infrastructure/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/interfaces/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/interfaces/cli/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/howto/index.md:
--------------------------------------------------------------------------------
1 | # Bear
2 |
3 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.gitlab/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/test/config.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/domain/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.github/ISSUE_TEMPLATE/bug.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.github/ISSUE_TEMPLATE/task.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.gitlab/issue_template/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/settings/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/domain/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/domain/schemas/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/domain/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/infrastructure/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/interfaces/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/interfaces/cli/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/static/images/bear.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PythonBiellaGroup/Bear/HEAD/docs/static/images/bear.png
--------------------------------------------------------------------------------
/docs/static/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PythonBiellaGroup/Bear/HEAD/docs/static/images/favicon.ico
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.env.example:
--------------------------------------------------------------------------------
1 | LOG_VERBOSITY=INFO
2 | APP_NAME=bakky
3 | APP_VERSION=0.0.1
4 | DEBUG=True
--------------------------------------------------------------------------------
/docs/static/images/python-love.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PythonBiellaGroup/Bear/HEAD/docs/static/images/python-love.png
--------------------------------------------------------------------------------
/docs/static/images/logo-background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PythonBiellaGroup/Bear/HEAD/docs/static/images/logo-background.png
--------------------------------------------------------------------------------
/docs/static/images/logo-no-background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PythonBiellaGroup/Bear/HEAD/docs/static/images/logo-no-background.png
--------------------------------------------------------------------------------
/scripts/launch.sh:
--------------------------------------------------------------------------------
1 | export PYTHONPATH=$(pwd)
2 | export API_ENDPOINT_PORT=8080
3 | export VERBOSITY=DEBUG
4 | export DEBUG_MODE=True
5 | python3 app/main.py
6 | #python3 app/main.py
7 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/scripts/launch.sh:
--------------------------------------------------------------------------------
1 | export PYTHONPATH=$(pwd)
2 | export API_ENDPOINT_PORT=8080
3 | export VERBOSITY=DEBUG
4 | export DEBUG_MODE=True
5 | python3 app/main.py
6 | #python3 app/main.py
7 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/codecov.yaml:
--------------------------------------------------------------------------------
1 | coverage:
2 | range: 70..100
3 | round: down
4 | precision: 1
5 | status:
6 | project:
7 | default:
8 | target: 90%
9 | threshold: 0.5%
10 |
--------------------------------------------------------------------------------
/.gitlab/.gitlab-webide.yml:
--------------------------------------------------------------------------------
1 | terminal:
2 | # This can be any image that has the necessary runtime environment for your project.
3 | image: ubuntu:20.04
4 | # before_script:
5 | # - sudo apt update && sudo apt upgrade -y
6 | # script: sleep 60
7 | variables:
8 | TEST_ENV: "test"
9 |
--------------------------------------------------------------------------------
/config.yaml:
--------------------------------------------------------------------------------
1 | # Cookiecutter config for tests
2 | default_context:
3 | project: testone
4 | project_description: "local test for tempy"
5 | author: "PBG"
6 | directory_name: testone
7 | open_source_license: "MIT"
8 | replay_dir: "./replay"
9 | abbreviations:
10 | pp: https://github.com/PythonBiellaGroup/Bear
11 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | plugins = pydantic.mypy, sqlmypy
3 | ignore_missing_imports = True
4 | disallow_untyped_defs = True
5 | exclude = ['volumes/', "alembic/", "scripts/", "docs/", "settings/", ".vscode/", ".venv/", ".pytest_cache/", ".mypy_cache/", ".gitlab/", ".github/", ".devcontainer/", "Docker/", "dashboards/"]
6 |
--------------------------------------------------------------------------------
/.devcontainer/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | develop:
4 | platform: linux/amd64
5 | build:
6 | dockerfile: ./Docker/python.dockerfile
7 | context: ../
8 | restart: always
9 | environment:
10 | LOG_VERBOSITY: ${LOG_VERBOSITY:-DEBUG}
11 | volumes:
12 | - ..:/workspace:cached
13 | command: /bin/sh -c "while sleep 1000; do :; done"
14 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.devcontainer/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | develop:
4 | platform: linux/amd64
5 | build:
6 | dockerfile: ./Docker/python.dockerfile
7 | context: ../
8 | restart: always
9 | environment:
10 | LOG_VERBOSITY: ${LOG_VERBOSITY:-DEBUG}
11 | volumes:
12 | - ..:/workspace:cached
13 | command: /bin/sh -c "while sleep 1000; do :; done"
14 |
--------------------------------------------------------------------------------
/app/main.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 |
3 | from app.core.config import settings
4 | from app.domain.utils.manager import convert_numbers, logic_test
5 |
6 | if __name__ == "__main__":
7 | logger.info(f"Welcome to: {settings.APP_NAME}")
8 |
9 | message = "Ciao JeyDi!"
10 | numbers = [1, 2, 3, 4, 5, 6]
11 |
12 | new_message = logic_test(message)
13 | result = convert_numbers(numbers)
14 | logger.info(f"Message: {new_message}, with numbers: {numbers}")
15 |
--------------------------------------------------------------------------------
/hooks/pre_gen_project.py:
--------------------------------------------------------------------------------
1 | # def deprecation_warning():
2 | # print("""
3 |
4 | # =============================================================================
5 | # *** DEPRECATION WARNING ***
6 |
7 | # Insert here you message
8 |
9 | # Please update any scripts/automation you have to append the `-c v1` option,
10 | # which is available now.
11 |
12 | # =============================================================================
13 |
14 | # """)
15 |
16 |
17 | # deprecation_warning()
18 |
--------------------------------------------------------------------------------
/Docker/db.dockerfile:
--------------------------------------------------------------------------------
1 | FROM --platform=linux/amd64 postgres:16.8
2 |
3 | RUN DEBIAN_FRONTEND=noninteractive apt update && apt install -y libpq-dev gcc curl
4 |
5 | RUN mkdir -p /project/backup/
6 | RUN mkdir -p /project/query/
7 |
8 | # Automatically initialize and configure the database
9 | # COPY scripts/init.sql /docker-entrypoint-initdb.d/
10 |
11 | # Automatically initialize backups
12 | #COPY backup/backup.sql /project/backup/
13 |
14 | # install python
15 |
16 | # launch postgres
17 | CMD ["postgres"]
18 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/main.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 |
3 | from app.core.config import settings
4 | from app.domain.utils.manager import convert_numbers, logic_test
5 |
6 | if __name__ == "__main__":
7 | logger.info(f"Welcome to: {settings.APP_NAME}")
8 |
9 | message = "Ciao JeyDi!"
10 | numbers = [1, 2, 3, 4, 5, 6]
11 |
12 | new_message = logic_test(message)
13 | result = convert_numbers(numbers)
14 | logger.info(f"Message: {new_message}, with numbers: {numbers}")
15 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/Docker/db.dockerfile:
--------------------------------------------------------------------------------
1 | FROM --platform=linux/amd64 postgres:16.8
2 |
3 | RUN DEBIAN_FRONTEND=noninteractive apt update && apt install -y libpq-dev gcc curl
4 |
5 | RUN mkdir -p /project/backup/
6 | RUN mkdir -p /project/query/
7 |
8 | # Automatically initialize and configure the database
9 | # COPY scripts/init.sql /docker-entrypoint-initdb.d/
10 |
11 | # Automatically initialize backups
12 | #COPY backup/backup.sql /project/backup/
13 |
14 | # install python
15 |
16 | # launch postgres
17 | CMD ["postgres"]
18 |
--------------------------------------------------------------------------------
/.gitlab/issue_template/task.md:
--------------------------------------------------------------------------------
1 | # Conversation
2 |
3 | Breve descrizione coincisa di cosa deve fare il task (elenco di punti o descrizione, più è dettagliato, meglio è)
4 |
5 | - [ ] descrizione singola attività 1
6 | - [ ] descrizione singola attività 2
7 |
8 | # Confirmation
9 |
10 | Descrizione di cosa deve accadere affinchè questo task sia completato (elenco di punti o descrizione)
11 |
12 | - [ ] descrizione conferma 1
13 | - [ ] descrizione conferma 2
14 |
15 |
16 | Ricordati inoltre di posizionare correttamente una label nel caso servisse per dare una **priorità** al task
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.gitlab/issue_template/task.md:
--------------------------------------------------------------------------------
1 | # Conversation
2 |
3 | Short description of what the task must do (list of points or description, the more detailed, the better)
4 |
5 | - [ ] description of single activity 1
6 | - [ ] description of single activity 2
7 |
8 | # Confirmation
9 |
10 | Description of what must happen for this task to be completed (list of points or description)
11 |
12 | - [ ] description of confirmation 1
13 | - [ ] description of confirmation 2
14 |
15 | Remember also to have a correct position of the labels if it's important to give a **priority** to the task
16 |
--------------------------------------------------------------------------------
/app/core/utils/cache.py:
--------------------------------------------------------------------------------
1 | import time
2 | from functools import lru_cache, wraps
3 |
4 |
5 | def time_cache(ttl_seconds: int, maxsize=1):
6 | """Cache the result of a function for at most ttl_seconds seconds."""
7 |
8 | def decorator(func):
9 | @lru_cache(maxsize=maxsize)
10 | def internal(_, *args, **kwds):
11 | return func(*args, **kwds)
12 |
13 | @wraps(func)
14 | def wrapper(*args, **kwds):
15 | timestamp = time.time() // ttl_seconds
16 | return internal(timestamp, *args, **kwds)
17 |
18 | return wrapper
19 |
20 | return decorator
21 |
--------------------------------------------------------------------------------
/readme/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security
2 |
3 | Documentation regarding the security aspects of this repository
4 |
5 | ## Reporting a security issue
6 |
7 | If there is a security issue please open a new issue in the library or write to telegram group.
8 |
9 | Specify:
10 |
11 | - What's the problem
12 | - How did you tested and find the problem
13 | - Possible solutions
14 |
15 | ## Preferred languages
16 |
17 | It's prefer to write the report in English, Italian is also accepted.
18 |
19 | ## Policy
20 |
21 | The PBG Admin team will take in consideration the issue and we will start creating a pull request to fix the problem.
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/cache.py:
--------------------------------------------------------------------------------
1 | import time
2 | from functools import lru_cache, wraps
3 |
4 |
5 | def time_cache(ttl_seconds: int, maxsize=1):
6 | """Cache the result of a function for at most ttl_seconds seconds."""
7 |
8 | def decorator(func):
9 | @lru_cache(maxsize=maxsize)
10 | def internal(_, *args, **kwds):
11 | return func(*args, **kwds)
12 |
13 | @wraps(func)
14 | def wrapper(*args, **kwds):
15 | timestamp = time.time() // ttl_seconds
16 | return internal(timestamp, *args, **kwds)
17 |
18 | return wrapper
19 |
20 | return decorator
21 |
--------------------------------------------------------------------------------
/notebooks/test.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "source": [
6 | "# Test Notebook\n",
7 | "\n",
8 | "This is a test notebook to show you where the notebook can stay in the folder :)\n",
9 | "\n",
10 | "Remember: you can use the notebook in VSCode also :)"
11 | ],
12 | "metadata": {}
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": null,
17 | "source": [
18 | "print(\"Hello notebook!\")"
19 | ],
20 | "outputs": [],
21 | "metadata": {}
22 | }
23 | ],
24 | "metadata": {
25 | "orig_nbformat": 4,
26 | "language_info": {
27 | "name": "python"
28 | }
29 | },
30 | "nbformat": 4,
31 | "nbformat_minor": 2
32 | }
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/notebooks/test.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "source": [
6 | "# Test Notebook\n",
7 | "\n",
8 | "This is a test notebook to show you where the notebook can stay in the folder :)\n",
9 | "\n",
10 | "Remember: you can use the notebook in VSCode also :)"
11 | ],
12 | "metadata": {}
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": null,
17 | "source": [
18 | "print(\"Hello notebook!\")"
19 | ],
20 | "outputs": [],
21 | "metadata": {}
22 | }
23 | ],
24 | "metadata": {
25 | "orig_nbformat": 4,
26 | "language_info": {
27 | "name": "python"
28 | }
29 | },
30 | "nbformat": 4,
31 | "nbformat_minor": 2
32 | }
33 |
--------------------------------------------------------------------------------
/.vscode/google_no_types.mustache:
--------------------------------------------------------------------------------
1 | {{! Google Without Types Docstring Template }}
2 | {{summaryPlaceholder}}
3 |
4 | {{extendedSummaryPlaceholder}}
5 |
6 | {{#parametersExist}}
7 | Args:
8 | {{#args}}
9 | {{var}}: {{descriptionPlaceholder}}
10 | {{/args}}
11 | {{#kwargs}}
12 | {{var}}: {{descriptionPlaceholder}}. Defaults to {{&default}}.
13 | {{/kwargs}}
14 | {{/parametersExist}}
15 |
16 | {{#exceptionsExist}}
17 | Raises:
18 | {{#exceptions}}
19 | {{descriptionPlaceholder}}
20 | {{/exceptions}}
21 | {{/exceptionsExist}}
22 |
23 | {{#returnsExist}}
24 | Returns:
25 | {{#returns}}
26 | {{descriptionPlaceholder}}
27 | {{/returns}}
28 | {{/returnsExist}}
29 |
30 | {{#yieldsExist}}
31 | Yields:
32 | {{#yields}}
33 | {{descriptionPlaceholder}}
34 | {{/yields}}
35 | {{/yieldsExist}}
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.vscode/google_no_types.mustache:
--------------------------------------------------------------------------------
1 | {{! Google Without Types Docstring Template }}
2 | {{summaryPlaceholder}}
3 |
4 | {{extendedSummaryPlaceholder}}
5 |
6 | {{#parametersExist}}
7 | Args:
8 | {{#args}}
9 | {{var}}: {{descriptionPlaceholder}}
10 | {{/args}}
11 | {{#kwargs}}
12 | {{var}}: {{descriptionPlaceholder}}. Defaults to {{&default}}.
13 | {{/kwargs}}
14 | {{/parametersExist}}
15 |
16 | {{#exceptionsExist}}
17 | Raises:
18 | {{#exceptions}}
19 | {{descriptionPlaceholder}}
20 | {{/exceptions}}
21 | {{/exceptionsExist}}
22 |
23 | {{#returnsExist}}
24 | Returns:
25 | {{#returns}}
26 | {{descriptionPlaceholder}}
27 | {{/returns}}
28 | {{/returnsExist}}
29 |
30 | {{#yieldsExist}}
31 | Yields:
32 | {{#yields}}
33 | {{descriptionPlaceholder}}
34 | {{/yields}}
35 | {{/yieldsExist}}
--------------------------------------------------------------------------------
/Docker/ci.dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:22.04
2 |
3 | # Metadata
4 | LABEL name="CI PBG"
5 | LABEL maintainer="PythonBiellaGroup"
6 |
7 | ENV TZ=Europe/Rome
8 | RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
9 |
10 | RUN DEBIAN_FRONTEND=noninteractive apt update && apt install -y \
11 | libpq-dev gcc wget curl gnupg2 openssh-client make build-essential git unzip\
12 | && mkdir -p ~/.ssh \
13 | && apt clean && rm -rf /var/lib/apt/lists/*
14 |
15 | # Install aws cli
16 | # RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
17 | # && unzip awscliv2.zip \
18 | # && ./aws/install \
19 | # && rm -rf awscliv2.zip ./aws
20 |
21 | # Setup a ssh config file
22 | # COPY bin/ssh-config.sh /usr/bin/ssh-config.sh
23 | # COPY ./config /root/.ssh/config
24 | # RUN chmod 400 /root/.ssh/config
25 | # RUN chmod +x /usr/bin/ssh-config.sh
--------------------------------------------------------------------------------
/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "project": "Your project name",
3 | "project_description": "Short description of the project and the package",
4 | "author": "PythonBiellaGroup",
5 | "directory_name": "{{cookiecutter.project.lower().strip().replace(' ','-')}}",
6 | "open_source_license": [
7 | "Not open source",
8 | "MIT",
9 | "LGPL3",
10 | "GPL3",
11 | "APACHE2"
12 | ],
13 | "_copy_without_render": [
14 | ".vscode/*",
15 | "__cc_*",
16 | "google_no_types.mustache",
17 | "test_create.py",
18 | "post_gen_project.py",
19 | "pre_gen_project.py",
20 | ".pre-commit-config.yaml",
21 | "codecov.yaml",
22 | ".github/*",
23 | ".gitlab/*",
24 | "docs/*",
25 | "Makefile",
26 | "mypy.ini",
27 | ".flake8",
28 | ".env",
29 | "create_examples.sh"
30 | ]
31 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/Docker/ci.dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:22.04
2 |
3 | # Metadata
4 | LABEL name="CI PBG"
5 | LABEL maintainer="PythonBiellaGroup"
6 |
7 | ENV TZ=Europe/Rome
8 | RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
9 |
10 | RUN DEBIAN_FRONTEND=noninteractive apt update && apt install -y \
11 | libpq-dev gcc wget curl gnupg2 openssh-client make build-essential git unzip\
12 | && mkdir -p ~/.ssh \
13 | && apt clean && rm -rf /var/lib/apt/lists/*
14 |
15 | # Install aws cli
16 | # RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
17 | # && unzip awscliv2.zip \
18 | # && ./aws/install \
19 | # && rm -rf awscliv2.zip ./aws
20 |
21 | # Setup a ssh config file
22 | # COPY bin/ssh-config.sh /usr/bin/ssh-config.sh
23 | # COPY ./config /root/.ssh/config
24 | # RUN chmod 400 /root/.ssh/config
25 | # RUN chmod +x /usr/bin/ssh-config.sh
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/tox.ini:
--------------------------------------------------------------------------------
1 | [flake8]
2 | per-file-ignores = __init__.py:F401
3 | # PEP-8 The following are ignored:
4 | # E731 do not assign a lambda expression, use a def
5 | # E203 whitespace before ':'
6 | # E501 line too long
7 | # W503 line break before binary operator
8 | # W605 invalid escape sequence
9 | ignore = E731, E203, E501, W503, W605
10 | exclude =
11 | .git,
12 | __pycache__,
13 | docs/source/conf.py,
14 | old,
15 | build,
16 | dist,
17 | .venv,
18 | max-complexity = 10
19 | max-line-length = 120
20 |
21 | [tox]
22 | skipsdist = true
23 | envlist = py38, py39, py310, py311
24 |
25 | [gh-actions]
26 | python =
27 | 3.8: py38
28 | 3.9: py39
29 | 3.10: py310
30 | 3.11: py311
31 |
32 | [testenv]
33 | passenv = PYTHON_VERSION
34 | whitelist_externals = poetry
35 | commands =
36 | poetry install -v
37 | pytest --doctest-modules tests --cov --cov-config=pyproject.toml --cov-report=xml
38 | mypy
39 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/README.md:
--------------------------------------------------------------------------------
1 | # {{cookiecutter.project}}
2 |
3 | ## Description
4 |
5 | {{cookiecutter.project}} is a python project
6 |
7 | ## Getting Started
8 |
9 | ### Prerequisites
10 |
11 | - Python 3.10 or higher
12 | - uv
13 |
14 | ### How to use it
15 |
16 | 1. Clone the repository
17 | 2. Create a new `.env` based on `.env.example`:
18 |
19 | ```bash
20 | cp .env.example .env
21 | ```
22 |
23 | 3. Configure the `.env` variables if it's necessary:
24 |
25 | ```bash
26 | LOG_VERBOSITY=INFO
27 | APP_NAME=bakky
28 | APP_VERSION=0.0.1
29 | DEBUG=True
30 | ```
31 |
32 | 4. Install the dependencies with python uv: `uv sync`
33 | 5. Run the project: you have different method to use
34 | 1. Use vscode debugger configured inside the repo
35 | 2. Use the command `uv run app/main.py` to run the project manually
36 | 3. Use the bash script: `./scripts/launch.sh`
37 | 4. User Makefile: `make run`
38 | 6. You can launch the docker-compose by doing: `make launch`
39 |
40 | Remember that you can use `vscode` to run the project with `devcontainer`
41 |
--------------------------------------------------------------------------------
/.gitlab/issue_template/bug.md:
--------------------------------------------------------------------------------
1 | ## Summary
2 |
3 | (Summarize the bug encountered concisely)
4 |
5 | ## Steps to reproduce
6 |
7 | (How one can reproduce the issue - this is very important)
8 |
9 | ## Example Project
10 |
11 | (If possible, please create an example project here on GitLab.com that exhibits the problematic
12 | behavior, and link to it here in the bug report.
13 | If you are using an older version of GitLab, this will also determine whether the bug has been fixed
14 | in a more recent version)
15 |
16 | ## What is the current bug behavior?
17 |
18 | (What actually happens)
19 |
20 | ## What is the expected correct behavior?
21 |
22 | (What you should see instead)
23 |
24 | ## Relevant logs and/or screenshots
25 |
26 | (Paste any relevant logs - please use code blocks (```) to format console output, logs, and code, as
27 | it's very hard to read otherwise.)
28 |
29 | ## Possible fixes
30 |
31 | (If you can, link to the line of code that might be responsible for the problem)
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Python Biella Group
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.gitlab/issue_template/bug.md:
--------------------------------------------------------------------------------
1 | ## Summary
2 |
3 | (Summarize the bug encountered concisely)
4 |
5 | ## Steps to reproduce
6 |
7 | (How one can reproduce the issue - this is very important)
8 |
9 | ## Example Project
10 |
11 | (If possible, please create an example project here on GitLab.com that exhibits the problematic
12 | behavior, and link to it here in the bug report.
13 | If you are using an older version of GitLab, this will also determine whether the bug has been fixed
14 | in a more recent version)
15 |
16 | ## What is the current bug behavior?
17 |
18 | (What actually happens)
19 |
20 | ## What is the expected correct behavior?
21 |
22 | (What you should see instead)
23 |
24 | ## Relevant logs and/or screenshots
25 |
26 | (Paste any relevant logs - please use code blocks (```) to format console output, logs, and code, as
27 | it's very hard to read otherwise.)
28 |
29 | ## Possible fixes
30 |
31 | (If you can, link to the line of code that might be responsible for the problem)
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/replay/Bear.json:
--------------------------------------------------------------------------------
1 | {
2 | "cookiecutter": {
3 | "project": "testone",
4 | "project_description": "local test for tempy",
5 | "author": "PBG",
6 | "directory_name": "testone",
7 | "open_source_license": "MIT",
8 | "_copy_without_render": [
9 | ".vscode/*",
10 | "__cc_*",
11 | "google_no_types.mustache",
12 | "test_create.py",
13 | "post_gen_project.py",
14 | "pre_gen_project.py",
15 | ".pre-commit-config.yaml",
16 | "codecov.yaml",
17 | ".github/*",
18 | ".gitlab/*",
19 | "docs/*",
20 | "Makefile",
21 | "mypy.ini",
22 | ".flake8",
23 | ".env",
24 | "create_examples.sh"
25 | ],
26 | "_template": ".",
27 | "_output_dir": "/Users/anguzzo/Projects/pbg/Bear",
28 | "_repo_dir": ".",
29 | "_checkout": null
30 | },
31 | "_cookiecutter": {
32 | "project": "testone",
33 | "project_description": "local test for tempy",
34 | "author": "PBG",
35 | "directory_name": "testone",
36 | "open_source_license": [
37 | "MIT",
38 | "Not open source",
39 | "LGPL3",
40 | "GPL3",
41 | "APACHE2"
42 | ]
43 | }
44 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.8"
2 | services:
3 | app:
4 | #image: endpoint where your image is : develop
5 | platform: linux/amd64
6 | build:
7 | dockerfile: ./Docker/python.dockerfile
8 | context: .
9 | container_name: app-core
10 | env_file:
11 | - .env
12 | restart: always
13 | environment:
14 | LOG_VERBOSITY: ${LOG_VERBOSITY}
15 | depends_on:
16 | - db
17 | volumes:
18 | - ${ECS_LOG_PATH:-./logs/project}:/project/logs
19 | ports:
20 | - "${APP_DOCKER_PORT:-8045}:${APP_ENDPOINT_PORT:-8000}"
21 |
22 | db:
23 | platform: linux/amd64
24 | build:
25 | dockerfile: ./Docker/db.dockerfile
26 | context: .
27 | env_file:
28 | - .env
29 | container_name: app-db
30 | #image: postgres:13.2
31 | environment:
32 | POSTGRES_DB: ${APP_DB_NAME}
33 | POSTGRES_USER: ${APP_DB_USER}
34 | POSTGRES_PASSWORD: ${APP_DB_PASSWORD}
35 | ports:
36 | - ${EXTERNAL_DB_PORT:-5492}:5432
37 | volumes:
38 | - ${POSTGRES_DATA_DIR:-/opt/admin-app/data/postgres}:/var/lib/postgresql/data
39 | - ${POSTGRES_BACKUP_DIR:-/opt/admin-app/backup}:/project/backup
40 |
--------------------------------------------------------------------------------
/test/test_functions.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | import pytest
4 | from loguru import logger
5 |
6 | from app.domain.utils.manager import convert_numbers, logic_test
7 |
8 |
9 | @pytest.mark.functions
10 | def test_logic_base():
11 | """Test the logic_base function.
12 |
13 | This function tests the logic_base function by checking if the output message is equal to "CIAO JEYDI!".
14 | """
15 | logger.debug("test logic base con messaggio")
16 | message = "Ciao PythonBiellaGroup!"
17 | message = logic_test(message)
18 | assert message == "CIAO PythonBiellaGroup!"
19 |
20 |
21 | @pytest.mark.core
22 | def test_entities():
23 | """Test the entities function.
24 |
25 | This function tests the `convert_numbers` function by passing a list of numbers and
26 | asserting that the result is a list and the expected output is returned.
27 |
28 | """
29 | logger.debug("test entities base con lista di numeri")
30 | numbers = [1, 2, 3, 4, 5]
31 | result = convert_numbers(numbers)
32 | assert result is List
33 | assert result == [25]
34 |
35 |
36 | if __name__ == "__main__":
37 | logger.info("test di esempio")
38 |
39 | test_logic_base()
40 | test_entities()
41 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/test/test_functions.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | import pytest
4 | from loguru import logger
5 |
6 | from app.src.core.manager import convert_numbers, logic_test
7 |
8 |
9 | @pytest.mark.functions
10 | def test_logic_base():
11 | """Test the logic_base function.
12 |
13 | This function tests the logic_base function by checking if the output message is equal to "CIAO JEYDI!".
14 | """
15 | logger.debug("test logic base con messaggio")
16 | message = "Ciao PythonBiellaGroup!"
17 | message = logic_test(message)
18 | assert message == "CIAO PythonBiellaGroup!"
19 |
20 |
21 | @pytest.mark.core
22 | def test_entities():
23 | """Test the entities function.
24 |
25 | This function tests the `convert_numbers` function by passing a list of numbers and
26 | asserting that the result is a list and the expected output is returned.
27 |
28 | """
29 | logger.debug("test entities base con lista di numeri")
30 | numbers = [1, 2, 3, 4, 5]
31 | result = convert_numbers(numbers)
32 | assert result is List
33 | assert result == [25]
34 |
35 |
36 | if __name__ == "__main__":
37 | logger.info("test di esempio")
38 |
39 | test_logic_base()
40 | test_entities()
41 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
3 | "python.envFile": "${workspaceFolder}/.env",
4 | "python.testing.pytestArgs": [
5 | "tests"
6 | ],
7 | "python.testing.unittestEnabled": false,
8 | "python.testing.pytestEnabled": true,
9 | "files.autoSave": "afterDelay",
10 | "autoDocstring.customTemplatePath": "",
11 | "window.title": "${rootName} ${separator} ${activeEditorShort}",
12 | "[python]": {
13 | "editor.insertSpaces": true,
14 | "editor.tabSize": 4
15 | },
16 | "files.exclude": {
17 | "**/__pycache__": true
18 | },
19 | "python.languageServer": "Pylance",
20 | "editor.formatOnSave": true,
21 | "editor.formatOnPaste": true,
22 | "notebook.lineNumbers": "on",
23 | "editor.inlineSuggest.enabled": true,
24 | "editor.formatOnType": true,
25 | "git.autofetch": true,
26 | "editor.defaultFormatter": "charliermarsh.ruff",
27 | "python.terminal.activateEnvInCurrentTerminal": true,
28 | "[toml]": {
29 | "editor.defaultFormatter": "tamasfe.even-better-toml"
30 | },
31 | "[yaml]": {
32 | "editor.defaultFormatter": "redhat.vscode-yaml"
33 | },
34 | "[dockerfile]": {
35 | "editor.defaultFormatter": "ms-azuretools.vscode-docker"
36 | },
37 | "[markdown]": {
38 | "editor.defaultFormatter": "yzhang.markdown-all-in-one"
39 | }
40 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
3 | "python.envFile": "${workspaceFolder}/.env",
4 | "python.testing.pytestArgs": [
5 | "tests"
6 | ],
7 | "python.testing.unittestEnabled": false,
8 | "python.testing.pytestEnabled": true,
9 | "files.autoSave": "afterDelay",
10 | "autoDocstring.customTemplatePath": "",
11 | "window.title": "${rootName} ${separator} ${activeEditorShort}",
12 | "[python]": {
13 | "editor.insertSpaces": true,
14 | "editor.tabSize": 4
15 | },
16 | "files.exclude": {
17 | "**/__pycache__": true
18 | },
19 | "python.languageServer": "Pylance",
20 | "editor.formatOnSave": true,
21 | "editor.formatOnPaste": true,
22 | "notebook.lineNumbers": "on",
23 | "editor.inlineSuggest.enabled": true,
24 | "editor.formatOnType": true,
25 | "git.autofetch": true,
26 | "editor.defaultFormatter": "charliermarsh.ruff",
27 | "python.terminal.activateEnvInCurrentTerminal": true,
28 | "[toml]": {
29 | "editor.defaultFormatter": "tamasfe.even-better-toml"
30 | },
31 | "[yaml]": {
32 | "editor.defaultFormatter": "redhat.vscode-yaml"
33 | },
34 | "[dockerfile]": {
35 | "editor.defaultFormatter": "ms-azuretools.vscode-docker"
36 | },
37 | "[markdown]": {
38 | "editor.defaultFormatter": "yzhang.markdown-all-in-one"
39 | }
40 | }
--------------------------------------------------------------------------------
/docs/static/css/mkdocstrings.css:
--------------------------------------------------------------------------------
1 | /* Indentation. */
2 | div.doc-contents:not(.first) {
3 | padding-left: 25px;
4 | border-left: .05rem solid var(--md-typeset-table-color);
5 | }
6 |
7 | /* Mark external links as such. */
8 | a.external::after,
9 | a.autorefs-external::after {
10 | /* https://primer.style/octicons/arrow-up-right-24 */
11 | mask-image: url('data:image/svg+xml, ');
12 | -webkit-mask-image: url('data:image/svg+xml, ');
13 | content: ' ';
14 |
15 | display: inline-block;
16 | vertical-align: middle;
17 | position: relative;
18 |
19 | height: 1em;
20 | width: 1em;
21 | background-color: var(--md-typeset-a-color);
22 | }
23 |
24 | a.external:hover::after,
25 | a.autorefs-external:hover::after {
26 | background-color: var(--md-accent-fg-color);
27 | }
28 |
29 | :root {
30 | --md-primary-fg-color: #093051;
31 | --md-primary-fg-color--light: #9DF249;
32 | --md-primary-fg-color--dark: #152037;
33 | }
34 |
--------------------------------------------------------------------------------
/Docker/python.dockerfile:
--------------------------------------------------------------------------------
1 | # Reference:
2 | # * https://hynek.me/articles/docker-uv/
3 |
4 | ARG UV_VERSION="0.6.14"
5 | FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv
6 |
7 | FROM python:3.12-slim AS base
8 |
9 | # Metadata
10 | LABEL name="Python 3.12"
11 | LABEL maintainer="PythonBiellaGroup"
12 |
13 | ARG UV_PYTHON=python3.12
14 |
15 | # Install requirements and clean up
16 | RUN apt update && apt install -y --no-install-recommends \
17 | curl ca-certificates libpq-dev gcc git make unzip \
18 | libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev \
19 | && apt clean && rm -rf /var/lib/apt/lists/*
20 |
21 | # Install aws cli and clean up
22 | # RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
23 | # && unzip awscliv2.zip \
24 | # && ./aws/install \
25 | # && rm -rf awscliv2.zip ./aws
26 |
27 | ### Start build prep
28 |
29 | COPY --from=uv /uv /bin/uv
30 |
31 | # - Silence uv complaining about not being able to use hard links,
32 | # - tell uv to byte-compile packages for faster application startups,
33 | # - prevent uv from accidentally downloading isolated Python builds,
34 | # - pick a Python version to use for the uv command.
35 | # - add the cargo binary directory to the PATH
36 | ENV \
37 | UV_LINK_MODE=copy \
38 | UV_COMPILE_BYTECODE=1 \
39 | UV_PYTHON_DOWNLOADS=never \
40 | UV_PYTHON=${UV_PYTHON} \
41 | PATH="/root/.cargo/bin/:$PATH"
42 |
43 |
--------------------------------------------------------------------------------
/scripts/generate_docs.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from pathlib import Path
3 |
4 | import mkdocs_gen_files
5 | from mkdocs_gen_files.nav import Nav
6 |
7 | nav = Nav()
8 |
9 | root = Path(__file__).parent.parent
10 | src = root / "datalink" / "src"
11 | sys.path.insert(0, str(src)) # Add the source directory to sys.path
12 |
13 | for path in sorted(src.rglob("*.py")):
14 | module_path = path.relative_to(src).with_suffix("")
15 | doc_path = path.relative_to(src).with_suffix(".md")
16 | full_doc_path = Path("reference", doc_path)
17 |
18 | parts = tuple(module_path.parts)
19 |
20 | # Skip if parts are empty or only contain "__init__" or "__main__"
21 | if not parts or parts[-1] in {"__init__", "__main__"}:
22 | continue
23 |
24 | # If the last part is "__init__", treat it as the module's index page
25 | if parts[-1] == "__init__":
26 | parts = parts[:-1] # Remove "__init__" from parts
27 | doc_path = doc_path.with_name("index.md")
28 | full_doc_path = full_doc_path.with_name("index.md")
29 |
30 | nav[parts] = doc_path.as_posix()
31 |
32 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
33 | ident = ".".join(parts)
34 | fd.write(f"::: {ident}")
35 |
36 | mkdocs_gen_files.set_edit_path(full_doc_path, path.relative_to(root))
37 |
38 | with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
39 | nav_file.writelines(nav.build_literate_nav())
40 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/Docker/python.dockerfile:
--------------------------------------------------------------------------------
1 | # Reference:
2 | # * https://hynek.me/articles/docker-uv/
3 |
4 | ARG UV_VERSION="0.6.14"
5 | FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv
6 |
7 | FROM python:3.12-slim AS base
8 |
9 | # Metadata
10 | LABEL name="Python 3.12"
11 | LABEL maintainer="PythonBiellaGroup"
12 |
13 | ARG UV_PYTHON=python3.12
14 |
15 | # Install requirements and clean up
16 | RUN apt update && apt install -y --no-install-recommends \
17 | curl ca-certificates libpq-dev gcc git make unzip \
18 | libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev \
19 | && apt clean && rm -rf /var/lib/apt/lists/*
20 |
21 | # Install aws cli and clean up
22 | # RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
23 | # && unzip awscliv2.zip \
24 | # && ./aws/install \
25 | # && rm -rf awscliv2.zip ./aws
26 |
27 | ### Start build prep
28 |
29 | COPY --from=uv /uv /bin/uv
30 |
31 | # - Silence uv complaining about not being able to use hard links,
32 | # - tell uv to byte-compile packages for faster application startups,
33 | # - prevent uv from accidentally downloading isolated Python builds,
34 | # - pick a Python version to use for the uv command.
35 | # - add the cargo binary directory to the PATH
36 | ENV \
37 | UV_LINK_MODE=copy \
38 | UV_COMPILE_BYTECODE=1 \
39 | UV_PYTHON_DOWNLOADS=never \
40 | UV_PYTHON=${UV_PYTHON} \
41 | PATH="/root/.cargo/bin/:$PATH"
42 |
43 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/scripts/generate_docs.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from pathlib import Path
3 |
4 | import mkdocs_gen_files
5 | from mkdocs_gen_files.nav import Nav
6 |
7 | nav = Nav()
8 |
9 | root = Path(__file__).parent.parent
10 | src = root / "datalink" / "src"
11 | sys.path.insert(0, str(src)) # Add the source directory to sys.path
12 |
13 | for path in sorted(src.rglob("*.py")):
14 | module_path = path.relative_to(src).with_suffix("")
15 | doc_path = path.relative_to(src).with_suffix(".md")
16 | full_doc_path = Path("reference", doc_path)
17 |
18 | parts = tuple(module_path.parts)
19 |
20 | # Skip if parts are empty or only contain "__init__" or "__main__"
21 | if not parts or parts[-1] in {"__init__", "__main__"}:
22 | continue
23 |
24 | # If the last part is "__init__", treat it as the module's index page
25 | if parts[-1] == "__init__":
26 | parts = parts[:-1] # Remove "__init__" from parts
27 | doc_path = doc_path.with_name("index.md")
28 | full_doc_path = full_doc_path.with_name("index.md")
29 |
30 | nav[parts] = doc_path.as_posix()
31 |
32 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
33 | ident = ".".join(parts)
34 | fd.write(f"::: {ident}")
35 |
36 | mkdocs_gen_files.set_edit_path(full_doc_path, path.relative_to(root))
37 |
38 | with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
39 | nav_file.writelines(nav.build_literate_nav())
40 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/pr_template.md:
--------------------------------------------------------------------------------
1 | # Description
2 |
3 | Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
4 |
5 | Fixes # (issue)
6 |
7 | ## Type of change
8 |
9 | Please delete options that are not relevant.
10 |
11 | - [ ] Bug fix (non-breaking change which fixes an issue)
12 | - [ ] New feature (non-breaking change which adds functionality)
13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
14 | - [ ] This change requires a documentation update
15 |
16 | # How Has This Been Tested?
17 |
18 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
19 |
20 | - [ ] Test A
21 | - [ ] Test B
22 |
23 | **Test Configuration**:
24 | * Firmware version:
25 | * Hardware:
26 | * Toolchain:
27 | * SDK:
28 |
29 | # Checklist:
30 |
31 | - [ ] My code follows the style guidelines of this project
32 | - [ ] I have performed a self-review of my own code
33 | - [ ] I have commented my code, particularly in hard-to-understand areas
34 | - [ ] I have made corresponding changes to the documentation
35 | - [ ] My changes generate no new warnings
36 | - [ ] I have added tests that prove my fix is effective or that my feature works
37 | - [ ] New and existing unit tests pass locally with my changes
38 | - [ ] Any dependent changes have been merged and published in downstream modules
39 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.github/PULL_REQUEST_TEMPLATE/pr_template.md:
--------------------------------------------------------------------------------
1 | # Description
2 |
3 | Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
4 |
5 | Fixes # (issue)
6 |
7 | ## Type of change
8 |
9 | Please delete options that are not relevant.
10 |
11 | - [ ] Bug fix (non-breaking change which fixes an issue)
12 | - [ ] New feature (non-breaking change which adds functionality)
13 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
14 | - [ ] This change requires a documentation update
15 |
16 | # How Has This Been Tested?
17 |
18 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
19 |
20 | - [ ] Test A
21 | - [ ] Test B
22 |
23 | **Test Configuration**:
24 | * Firmware version:
25 | * Hardware:
26 | * Toolchain:
27 | * SDK:
28 |
29 | # Checklist:
30 |
31 | - [ ] My code follows the style guidelines of this project
32 | - [ ] I have performed a self-review of my own code
33 | - [ ] I have commented my code, particularly in hard-to-understand areas
34 | - [ ] I have made corresponding changes to the documentation
35 | - [ ] My changes generate no new warnings
36 | - [ ] I have added tests that prove my fix is effective or that my feature works
37 | - [ ] New and existing unit tests pass locally with my changes
38 | - [ ] Any dependent changes have been merged and published in downstream modules
39 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Python: Current File",
9 | "type": "debugpy",
10 | "request": "launch",
11 | "program": "${file}",
12 | "console": "integratedTerminal",
13 | "cwd": "${workspaceFolder}",
14 | "env": {
15 | "PYTHONPATH": "${cwd}",
16 | "VERBOSITY": "DEBUG"
17 | }
18 | },
19 | {
20 | "name": "Main File",
21 | "type": "debugpy",
22 | "request": "launch",
23 | "program": "app/main.py",
24 | "console": "integratedTerminal",
25 | "cwd": "${workspaceFolder}",
26 | "env": {
27 | "PYTHONPATH": "${cwd}",
28 | "VERBOSITY": "DEBUG"
29 | }
30 | },
31 | {
32 | "name": "Python: Streamlit",
33 | "type": "debugpy",
34 | "request": "launch",
35 | "module": "streamlit",
36 | "args": [
37 | "run",
38 | "${workspaceFolder}/app/main.py"
39 | ],
40 | "cwd": "${workspaceFolder}",
41 | "env": {
42 | "PYTHONPATH": "${cwd}"
43 | }
44 | },
45 | {
46 | "name": "FastAPI",
47 | "type": "debugpy",
48 | "request": "launch",
49 | "module": "uvicorn",
50 | "cwd": "${workspaceFolder}",
51 | "args": [
52 | "app.main:app",
53 | "--reload",
54 | "--port",
55 | "8044"
56 | ],
57 | "jinja": true,
58 | "env": {
59 | "PYTHONPATH": "${cwd}",
60 | "TEST": "debugger"
61 | }
62 | },
63 | ]
64 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Python: Current File",
9 | "type": "debugpy",
10 | "request": "launch",
11 | "program": "${file}",
12 | "console": "integratedTerminal",
13 | "cwd": "${workspaceFolder}",
14 | "env": {
15 | "PYTHONPATH": "${cwd}",
16 | "VERBOSITY": "DEBUG"
17 | }
18 | },
19 | {
20 | "name": "Main File",
21 | "type": "debugpy",
22 | "request": "launch",
23 | "program": "app/main.py",
24 | "console": "integratedTerminal",
25 | "cwd": "${workspaceFolder}",
26 | "env": {
27 | "PYTHONPATH": "${cwd}",
28 | "VERBOSITY": "DEBUG"
29 | }
30 | },
31 | {
32 | "name": "Python: Streamlit",
33 | "type": "debugpy",
34 | "request": "launch",
35 | "module": "streamlit",
36 | "args": [
37 | "run",
38 | "${workspaceFolder}/app/main.py"
39 | ],
40 | "cwd": "${workspaceFolder}",
41 | "env": {
42 | "PYTHONPATH": "${cwd}"
43 | }
44 | },
45 | {
46 | "name": "FastAPI",
47 | "type": "debugpy",
48 | "request": "launch",
49 | "module": "uvicorn",
50 | "cwd": "${workspaceFolder}",
51 | "args": [
52 | "app.main:app",
53 | "--reload",
54 | "--port",
55 | "8044"
56 | ],
57 | "jinja": true,
58 | "env": {
59 | "PYTHONPATH": "${cwd}",
60 | "TEST": "debugger"
61 | }
62 | },
63 | ]
64 | }
--------------------------------------------------------------------------------
/app/domain/utils/manager.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from loguru import logger
4 |
5 | from app.core.config import settings
6 |
7 |
8 | def logic_test(message: str = None) -> str:
9 | """Logic test function.
10 |
11 | This is a simple example how to write functions in Python
12 |
13 | Args:
14 | message (str, optional): A message you want to use. Defaults to None.
15 |
16 | Raises:
17 | Exception: If it's impossible to compose the message
18 |
19 | Returns:
20 | str: The elaborated message by the function
21 | """
22 | logger.debug("Test logic function")
23 |
24 | try:
25 | message = message.upper()
26 | except Exception as e:
27 | logger.error(f"Impossible to compose the message: {message}, because: {e}")
28 | logger.exception(f"Error: {e}")
29 | raise Exception(e) # noqa: B904
30 |
31 | logger.debug(f"Message modified: {message} on the app: {settings.APP_NAME}")
32 |
33 | return message
34 |
35 |
36 | def convert_numbers(numbers: List[int]) -> int:
37 | """Convert and sum all elements in a list.
38 |
39 | Args:
40 | numbers (List[int]): List of integers to sum
41 |
42 | Returns:
43 | int: the result of the sum
44 | """
45 | return sum(numbers)
46 |
47 |
48 | def name_parsing(name: str = None) -> str:
49 | """Ester egg spaghetti.
50 |
51 | Args:
52 | name (str, optional): the name you want to pass. Defaults to None.
53 |
54 | Returns:
55 | str: the result string
56 | """
57 | if name is None:
58 | logger.error(f"Name: {name} not valid, please retry..")
59 | logger.exception("Name not valid..")
60 |
61 | logger.info(f"Hello: {name.strip().lower()}, welcome here!")
62 | logger.debug("So do you like spaghetti right?")
63 |
64 | return name
65 |
--------------------------------------------------------------------------------
/app/core/utils/security.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | from functools import lru_cache
3 |
4 | from passlib.context import CryptContext
5 |
6 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
7 |
8 |
9 | def get_password_hash(password: str) -> str:
10 | """Hash a password using SHA256 and a strong password hash e.g. bcrypt.
11 |
12 | Args:
13 | password (str): the password to hash
14 |
15 | Returns:
16 | str: the hashed password
17 |
18 | """
19 | return pwd_context.hash(password)
20 |
21 |
22 | def verify_password(incoming_password: str, existing_password: str) -> bool:
23 | """Safely compare two strings using SHA256 and a strong password hash e.g. bcrypt.
24 |
25 | Args:
26 | incoming_password (str): the password to check
27 | existing_password (str): the password to check against
28 |
29 | Returns:
30 | bool: True if the passwords match, False otherwise
31 | """
32 | return verify_hashs(
33 | hashlib.sha256(incoming_password.encode()).hexdigest(),
34 | hashlib.sha256(existing_password.encode()).hexdigest(),
35 | )
36 |
37 |
38 | @lru_cache
39 | def verify_hashs(incoming_hash: str, existing_hash: str) -> bool:
40 | """Compare two hashs using a slow hash algorithm and a cache.
41 |
42 | The cache will speed up repeated password checks (e.g. the correct credentials), but sill slow
43 | down brute force attacks. To prevent timing attacks of the caching layer, an other hash
44 | function must be used before the caching layer.
45 |
46 | Args:
47 | incoming_hash (str): the hash to check
48 | existing_hash (str): the hash to check against
49 |
50 | Returns:
51 | bool: True if the hashs match, False otherwise
52 | """
53 | return pwd_context.verify(incoming_hash, get_password_hash(existing_hash))
54 |
--------------------------------------------------------------------------------
/app/core/utils/logs.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import time
3 |
4 | from loguru import logger
5 |
6 |
7 | class Profiler:
8 | """Logging proxy that injects to duration since object creation."""
9 |
10 | def __init__(self):
11 | self.start = time.time()
12 | self.last = 0
13 |
14 | def stamp(self, message: str) -> tuple[float, str]:
15 | """Stamp message with elapsed time."""
16 | seconds = time.time() - self.start
17 | return seconds, f"{message} @{seconds:.3f}s"
18 |
19 | def log(self, level: int, message: str, *args: list, **kwds: dict):
20 | """Generic log call, adds elapsed time to message and extra."""
21 | duration, message = self.stamp(message)
22 | time_delta = duration - self.last
23 | self.last = duration
24 | logger.log(level, message, *args, **kwds, duration=duration, time_delta=time_delta)
25 |
26 | def debug(self, message: str, *args: list, **kwds: dict):
27 | """Debug log with elapsed time in message and extra."""
28 | self.log(logging.DEBUG, message, *args, **kwds)
29 |
30 | def info(self, message: str, *args: list, **kwds: dict):
31 | """Info log with elapsed time in message and extra."""
32 | self.log(logging.INFO, message, *args, **kwds)
33 |
34 | def warn(self, message: str, *args: list, **kwds: dict):
35 | """Warn log with elapsed time in message and extra."""
36 | self.log(logging.WARN, message, *args, **kwds)
37 |
38 | def error(self, message: str, *args: list, **kwds: dict):
39 | """Error log with elapsed time in message and extra."""
40 | self.log(logging.ERROR, message, *args, **kwds)
41 |
42 |
43 | def send_ping(email: str, message: str):
44 | """Test function to send a dummy ping for testing purpose."""
45 | return f"You send to: {email} the message: {message}"
46 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/domain/utils/manager.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from loguru import logger
4 |
5 | from app.core.config import settings
6 |
7 |
8 | def logic_test(message: str = None) -> str:
9 | """Logic test function.
10 |
11 | This is a simple example how to write functions in Python
12 |
13 | Args:
14 | message (str, optional): A message you want to use. Defaults to None.
15 |
16 | Raises:
17 | Exception: If it's impossible to compose the message
18 |
19 | Returns:
20 | str: The elaborated message by the function
21 | """
22 | logger.debug("Test logic function")
23 |
24 | try:
25 | message = message.upper()
26 | except Exception as e:
27 | logger.error(f"Impossible to compose the message: {message}, because: {e}")
28 | logger.exception(f"Error: {e}")
29 | raise Exception(e) # noqa: B904
30 |
31 | logger.debug(f"Message modified: {message} on the app: {settings.APP_NAME}")
32 |
33 | return message
34 |
35 |
36 | def convert_numbers(numbers: List[int]) -> int:
37 | """Convert and sum all elements in a list.
38 |
39 | Args:
40 | numbers (List[int]): List of integers to sum
41 |
42 | Returns:
43 | int: the result of the sum
44 | """
45 | return sum(numbers)
46 |
47 |
48 | def name_parsing(name: str = None) -> str:
49 | """Ester egg spaghetti.
50 |
51 | Args:
52 | name (str, optional): the name you want to pass. Defaults to None.
53 |
54 | Returns:
55 | str: the result string
56 | """
57 | if name is None:
58 | logger.error(f"Name: {name} not valid, please retry..")
59 | logger.exception("Name not valid..")
60 |
61 | logger.info(f"Hello: {name.strip().lower()}, welcome here!")
62 | logger.debug("So do you like spaghetti right?")
63 |
64 | return name
65 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/security.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | from functools import lru_cache
3 |
4 | from passlib.context import CryptContext
5 |
6 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
7 |
8 |
9 | def get_password_hash(password: str) -> str:
10 | """Hash a password using SHA256 and a strong password hash e.g. bcrypt.
11 |
12 | Args:
13 | password (str): the password to hash
14 |
15 | Returns:
16 | str: the hashed password
17 |
18 | """
19 | return pwd_context.hash(password)
20 |
21 |
22 | def verify_password(incoming_password: str, existing_password: str) -> bool:
23 | """Safely compare two strings using SHA256 and a strong password hash e.g. bcrypt.
24 |
25 | Args:
26 | incoming_password (str): the password to check
27 | existing_password (str): the password to check against
28 |
29 | Returns:
30 | bool: True if the passwords match, False otherwise
31 | """
32 | return verify_hashs(
33 | hashlib.sha256(incoming_password.encode()).hexdigest(),
34 | hashlib.sha256(existing_password.encode()).hexdigest(),
35 | )
36 |
37 |
38 | @lru_cache
39 | def verify_hashs(incoming_hash: str, existing_hash: str) -> bool:
40 | """Compare two hashs using a slow hash algorithm and a cache.
41 |
42 | The cache will speed up repeated password checks (e.g. the correct credentials), but sill slow
43 | down brute force attacks. To prevent timing attacks of the caching layer, an other hash
44 | function must be used before the caching layer.
45 |
46 | Args:
47 | incoming_hash (str): the hash to check
48 | existing_hash (str): the hash to check against
49 |
50 | Returns:
51 | bool: True if the hashs match, False otherwise
52 | """
53 | return pwd_context.verify(incoming_hash, get_password_hash(existing_hash))
54 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/logs.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import time
3 |
4 | from loguru import logger
5 |
6 |
7 | class Profiler:
8 | """Logging proxy that injects to duration since object creation."""
9 |
10 | def __init__(self):
11 | self.start = time.time()
12 | self.last = 0
13 |
14 | def stamp(self, message: str) -> tuple[float, str]:
15 | """Stamp message with elapsed time."""
16 | seconds = time.time() - self.start
17 | return seconds, f"{message} @{seconds:.3f}s"
18 |
19 | def log(self, level: int, message: str, *args: list, **kwds: dict):
20 | """Generic log call, adds elapsed time to message and extra."""
21 | duration, message = self.stamp(message)
22 | time_delta = duration - self.last
23 | self.last = duration
24 | logger.log(level, message, *args, **kwds, duration=duration, time_delta=time_delta)
25 |
26 | def debug(self, message: str, *args: list, **kwds: dict):
27 | """Debug log with elapsed time in message and extra."""
28 | self.log(logging.DEBUG, message, *args, **kwds)
29 |
30 | def info(self, message: str, *args: list, **kwds: dict):
31 | """Info log with elapsed time in message and extra."""
32 | self.log(logging.INFO, message, *args, **kwds)
33 |
34 | def warn(self, message: str, *args: list, **kwds: dict):
35 | """Warn log with elapsed time in message and extra."""
36 | self.log(logging.WARN, message, *args, **kwds)
37 |
38 | def error(self, message: str, *args: list, **kwds: dict):
39 | """Error log with elapsed time in message and extra."""
40 | self.log(logging.ERROR, message, *args, **kwds)
41 |
42 |
43 | def send_ping(email: str, message: str):
44 | """Test function to send a dummy ping for testing purpose."""
45 | return f"You send to: {email} the message: {message}"
46 |
--------------------------------------------------------------------------------
/app/core/utils/faker.py:
--------------------------------------------------------------------------------
1 | def fake(message: str, mooving: int = 4) -> str:
2 | """Encrypts a message by shifting each character by a given number of positions.
3 |
4 | Args:
5 | message (str): The message to be encrypted.
6 | mooving (int, optional): The number of positions to shift each character. Defaults to 4.
7 |
8 | Returns:
9 | str: The encrypted message.
10 | """
11 | # Define the caracters list to encrypt, including common accented letters
12 | characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz"
13 |
14 | result = ""
15 |
16 | for char in message:
17 | if char in characters:
18 | # Find the position of the char after the mooving
19 | new_index = (characters.index(char) + mooving) % len(characters)
20 | result += characters[new_index]
21 | else:
22 | # If the char is not in the list, leave it unchanged
23 | result += char
24 | return result
25 |
26 |
27 | def unfake(message: str, mooving: int = -4) -> str:
28 | """Decrypts a message by shifting each character by a given amount.
29 |
30 | Args:
31 | message (str): The message to be decrypted.
32 | mooving (int, optional): The amount by which each character should be shifted. Defaults to -4.
33 |
34 | Returns:
35 | str: The decrypted message.
36 | """
37 | # Define the character you want to decrypt
38 | characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz"
39 |
40 | result = ""
41 |
42 | for char in message:
43 | if char in characters:
44 | # Find the position of the char after the mooving
45 | indice_nuovo = (characters.index(char) + mooving) % len(characters)
46 | result += characters[indice_nuovo]
47 | else:
48 | # If the char is not in the list, leave it unchanged
49 | result += char
50 | return result
51 |
--------------------------------------------------------------------------------
/readme/CONTRIBUTING-en.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | Your contribution is the most important thing for us!!
4 |
5 | We are a community, and without your help and the help of contributors, we can't do anything :)
6 |
7 | So, first of all: THANK YOU!
8 |
9 | If you want to contribute to this repository, please check these requirements first:
10 |
11 | - We suggest working on Linux systems; this template is not designed to work on Windows at the moment. Therefore, we recommend using: `wsl2`, `linux`, or `macos`.
12 | - Have `python` and `uv` installed: check the `pyproject.toml` file to find the correct versions used in this project.
13 | - Have `makefile`.
14 |
15 | We recommend using Visual Studio Code. You can find a list of useful extensions and a dedicated Python profile inside the `.vscode` folder.
16 |
17 | ## Development and Updates
18 |
19 | If you want to start the project and test it, you need to:
20 |
21 | 1. Fork the project into your GitHub profile. Then clone it locally.
22 | 2. Create a new branch, for example: `develop`.
23 | 3. Install the libraries: `uv sync`. This command will create a `.venv` folder inside the project directory with all the updated dependencies.
24 | 4. Then you can start editing the files inside the folder.
25 | 5. If you want to test if everything works, you need to "cookiecutterize" the project. You can do this using `just` or the Makefile with the command: `make bake-test` or `just bake-test`. This function can generate a test project called **testone** inside the project folder. If Visual Studio Code does not open automatically, you can use `code testone` to open the project in a new Visual Studio Code window and check if it works.
26 | 6. After modifying something in the testone project, unfortunately, you need to copy and paste the updates into the `cookiecutter.directory_name`.
27 | 7. After updating everything, remember to create a pull request from your GitHub project to the original project so we can review the changes and update the public code.
28 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/faker.py:
--------------------------------------------------------------------------------
1 | def fake(message: str, mooving: int = 4) -> str:
2 | """Encrypts a message by shifting each character by a given number of positions.
3 |
4 | Args:
5 | message (str): The message to be encrypted.
6 | mooving (int, optional): The number of positions to shift each character. Defaults to 4.
7 |
8 | Returns:
9 | str: The encrypted message.
10 | """
11 | # Define the caracters list to encrypt, including common accented letters
12 | characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz"
13 |
14 | result = ""
15 |
16 | for char in message:
17 | if char in characters:
18 | # Find the position of the char after the mooving
19 | new_index = (characters.index(char) + mooving) % len(characters)
20 | result += characters[new_index]
21 | else:
22 | # If the char is not in the list, leave it unchanged
23 | result += char
24 | return result
25 |
26 |
27 | def unfake(message: str, mooving: int = -4) -> str:
28 | """Decrypts a message by shifting each character by a given amount.
29 |
30 | Args:
31 | message (str): The message to be decrypted.
32 | mooving (int, optional): The amount by which each character should be shifted. Defaults to -4.
33 |
34 | Returns:
35 | str: The decrypted message.
36 | """
37 | # Define the character you want to decrypt
38 | characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz"
39 |
40 | result = ""
41 |
42 | for char in message:
43 | if char in characters:
44 | # Find the position of the char after the mooving
45 | indice_nuovo = (characters.index(char) + mooving) % len(characters)
46 | result += characters[indice_nuovo]
47 | else:
48 | # If the char is not in the list, leave it unchanged
49 | result += char
50 | return result
51 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | default_language_version:
3 | python: python3.12
4 | default_stages: [pre-commit, pre-push]
5 | repos:
6 | - repo: https://github.com/pre-commit/pre-commit-hooks
7 | rev: v4.4.0
8 | hooks:
9 | - id: check-added-large-files # Prevent giant files from being committed
10 | args: [--maxkb=5000]
11 | - id: check-json # Attempts to load all json files to verify syntax
12 | exclude: ^(.dev|.*\.vscode)
13 | - id: check-merge-conflict # Check for files that contain merge conflict strings
14 | - id: check-toml # Attempts to load all TOML files to verify syntax
15 | - id: detect-private-key # Checks for the existence of private keys
16 | - id: pretty-format-json # Checks that all your JSON files are pretty. "Pretty" here means that keys are sorted and indented. You can configure this with the following commandline options
17 | args: [--autofix]
18 | # exclude: ^.vscode
19 | exclude: ^(.dev|.*\.vscode)
20 | - repo: local
21 | hooks:
22 | - id: ruff
23 | name: ruff
24 | entry: uv run ruff check --no-cache --force-exclude --fix --exit-non-zero-on-fix --show-fixes .
25 | language: system
26 | types: [python]
27 | # - id: mypy
28 | # name: mypy
29 | # entry: mypy datalink
30 | # language: system
31 | # types: [python]
32 | # pass_filenames: false
33 | - id: detect-secrets
34 | name: detect-secrets
35 | # entry: git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
36 | entry: git diff --staged --name-only -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
37 | language: system
38 | types: [bash]
39 | # - id: detect_secrets
40 | # name: detect_secrets
41 | # entry: uv run detect-secrets scan --baseline .secrets_baseline.json
42 | # language: system
43 | # types: [python]
44 | # exclude: package.lock.json
45 |
--------------------------------------------------------------------------------
/app/core/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from functools import cache
3 | from typing import List
4 |
5 | from loguru import logger
6 | from pydantic import AnyHttpUrl, ValidationError
7 | from pydantic_settings import BaseSettings, SettingsConfigDict
8 |
9 | from app.core.settings.logger import LoggerSettings
10 |
11 |
12 | class Settings(BaseSettings):
13 | """Settings class for application settings and secrets management.
14 |
15 | Official documentation on pydantic settings management:
16 | - https://pydantic-docs.helpmanual.io/usage/settings/
17 | """
18 |
19 | # Setup the .env file system reading
20 | model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=True, extra="ignore")
21 |
22 | # Project details
23 | APP_NAME: str = "bear"
24 | APP_VERSION: str = "0.1.0"
25 | API_PREFIX: str = "/api/v1"
26 | DEBUG: bool = False
27 | SECRET_KEY: str = "dev"
28 |
29 | # Application Path
30 | APP_PATH: str = os.path.abspath(".")
31 | REPO_PATH: str = os.path.abspath(".")
32 | CONFIG_PATH: str = os.path.join(APP_PATH, "app", "src", "configs")
33 |
34 | # CORS
35 | BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = []
36 |
37 | # Logger settings
38 | LOGGER: LoggerSettings = LoggerSettings(repo_path=REPO_PATH)
39 |
40 | @classmethod
41 | @cache
42 | def get_settings(cls) -> "Settings":
43 | """Generate and get the settings."""
44 | try:
45 | settings = cls() # noqa
46 | settings.LOGGER.setup_logger() # Initialize logger
47 | return settings
48 | except ValidationError as e:
49 | logger.error(f"Validation error in settings: {e}")
50 | raise # Raise the validation error to alert the user
51 | except Exception as message:
52 | logger.error(f"Error: impossible to get the settings: {message}")
53 | raise
54 |
55 |
56 | # default settings with initialization
57 | settings = Settings.get_settings()
58 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | default_language_version:
3 | python: python3.12
4 | default_stages: [pre-commit, pre-push]
5 | repos:
6 | - repo: https://github.com/pre-commit/pre-commit-hooks
7 | rev: v4.4.0
8 | hooks:
9 | - id: check-added-large-files # Prevent giant files from being committed
10 | args: [--maxkb=5000]
11 | - id: check-json # Attempts to load all json files to verify syntax
12 | exclude: ^(.dev|.*\.vscode)
13 | - id: check-merge-conflict # Check for files that contain merge conflict strings
14 | - id: check-toml # Attempts to load all TOML files to verify syntax
15 | - id: detect-private-key # Checks for the existence of private keys
16 | - id: pretty-format-json # Checks that all your JSON files are pretty. "Pretty" here means that keys are sorted and indented. You can configure this with the following commandline options
17 | args: [--autofix]
18 | # exclude: ^.vscode
19 | exclude: ^(.dev|.*\.vscode)
20 | - repo: local
21 | hooks:
22 | - id: ruff
23 | name: ruff
24 | entry: uv run ruff check --no-cache --force-exclude --fix --exit-non-zero-on-fix --show-fixes .
25 | language: system
26 | types: [python]
27 | # - id: mypy
28 | # name: mypy
29 | # entry: mypy datalink
30 | # language: system
31 | # types: [python]
32 | # pass_filenames: false
33 | - id: detect-secrets
34 | name: detect-secrets
35 | # entry: git ls-files -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
36 | entry: git diff --staged --name-only -z | xargs -0 detect-secrets-hook --baseline .secrets.baseline
37 | language: system
38 | types: [bash]
39 | # - id: detect_secrets
40 | # name: detect_secrets
41 | # entry: uv run detect-secrets scan --baseline .secrets_baseline.json
42 | # language: system
43 | # types: [python]
44 | # exclude: package.lock.json
45 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "njpwerner.autodocstring",
4 | "ms-python.black-formatter",
5 | "streetsidesoftware.code-spell-checker",
6 | "naumovs.color-highlight",
7 | "ms-vscode-remote.remote-containers",
8 | "ms-azuretools.vscode-docker",
9 | "dbaeumer.vscode-eslint",
10 | "tamasfe.even-better-toml",
11 | "mhutchie.git-graph",
12 | "donjayamanne.githistory",
13 | "GitHub.codespaces",
14 | "GitHub.copilot-chat",
15 | "GitHub.vscode-pull-request-github",
16 | "GitHub.remotehub",
17 | "eamodio.gitlens",
18 | "oderwat.indent-rainbow",
19 | "VisualStudioExptTeam.vscodeintellicode",
20 | "VisualStudioExptTeam.intellicode-api-usage-examples",
21 | "VisualStudioExptTeam.vscodeintellicode-completions",
22 | "VisualStudioExptTeam.vscodeintellicode-insiders",
23 | "streetsidesoftware.code-spell-checker-italian",
24 | "ms-toolsai.jupyter",
25 | "ms-toolsai.vscode-jupyter-cell-tags",
26 | "ms-toolsai.jupyter-keymap",
27 | "ms-toolsai.jupyter-renderers",
28 | "ms-toolsai.vscode-jupyter-powertoys",
29 | "ms-toolsai.vscode-jupyter-slideshow",
30 | "ms-kubernetes-tools.vscode-kubernetes-tools",
31 | "ms-vsliveshare.vsliveshare",
32 | "ms-vscode.makefile-tools",
33 | "yzhang.markdown-all-in-one",
34 | "DavidAnson.vscode-markdownlint",
35 | "ms-python.mypy-type-checker",
36 | "ms-playwright.playwright",
37 | "ms-python.vscode-pylance",
38 | "ms-python.pylint",
39 | "ms-python.python",
40 | "KevinRose.vsc-python-indent",
41 | "mechatroner.rainbow-csv",
42 | "ms-vscode-remote.remote-ssh",
43 | "ms-vscode-remote.remote-ssh-edit",
44 | "ms-vscode.remote-server",
45 | "ms-vscode-remote.vscode-remote-extensionpack",
46 | "ms-vscode.remote-explorer",
47 | "ms-vscode.remote-repositories",
48 | "medo64.render-crlf",
49 | "charliermarsh.ruff",
50 | "vscode-icons-team.vscode-icons",
51 | "ms-vscode-remote.remote-wsl",
52 | "redhat.vscode-yaml"
53 | ]
54 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from functools import cache
3 | from typing import List
4 |
5 | from loguru import logger
6 | from pydantic import AnyHttpUrl, ValidationError
7 | from pydantic_settings import BaseSettings, SettingsConfigDict
8 |
9 | from app.core.settings.logger import LoggerSettings
10 |
11 |
12 | class Settings(BaseSettings):
13 | """Settings class for application settings and secrets management.
14 |
15 | Official documentation on pydantic settings management:
16 | - https://pydantic-docs.helpmanual.io/usage/settings/
17 | """
18 |
19 | # Setup the .env file system reading
20 | model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=True, extra="ignore")
21 |
22 | # Project details
23 | APP_NAME: str = "bear"
24 | APP_VERSION: str = "0.1.0"
25 | API_PREFIX: str = "/api/v1"
26 | DEBUG: bool = False
27 | SECRET_KEY: str = "dev"
28 |
29 | # Application Path
30 | APP_PATH: str = os.path.abspath(".")
31 | REPO_PATH: str = os.path.abspath(".")
32 | CONFIG_PATH: str = os.path.join(APP_PATH, "app", "src", "configs")
33 |
34 | # CORS
35 | BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = []
36 |
37 | # Logger settings
38 | LOGGER: LoggerSettings = LoggerSettings(repo_path=REPO_PATH)
39 |
40 | @classmethod
41 | @cache
42 | def get_settings(cls) -> "Settings":
43 | """Generate and get the settings."""
44 | try:
45 | settings = cls() # noqa
46 | settings.LOGGER.setup_logger() # Initialize logger
47 | return settings
48 | except ValidationError as e:
49 | logger.error(f"Validation error in settings: {e}")
50 | raise # Raise the validation error to alert the user
51 | except Exception as message:
52 | logger.error(f"Error: impossible to get the settings: {message}")
53 | raise
54 |
55 |
56 | # default settings with initialization
57 | settings = Settings.get_settings()
58 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "njpwerner.autodocstring",
4 | "ms-python.black-formatter",
5 | "streetsidesoftware.code-spell-checker",
6 | "naumovs.color-highlight",
7 | "ms-vscode-remote.remote-containers",
8 | "ms-azuretools.vscode-docker",
9 | "dbaeumer.vscode-eslint",
10 | "tamasfe.even-better-toml",
11 | "mhutchie.git-graph",
12 | "donjayamanne.githistory",
13 | "GitHub.codespaces",
14 | "GitHub.copilot-chat",
15 | "GitHub.vscode-pull-request-github",
16 | "GitHub.remotehub",
17 | "eamodio.gitlens",
18 | "oderwat.indent-rainbow",
19 | "VisualStudioExptTeam.vscodeintellicode",
20 | "VisualStudioExptTeam.intellicode-api-usage-examples",
21 | "VisualStudioExptTeam.vscodeintellicode-completions",
22 | "VisualStudioExptTeam.vscodeintellicode-insiders",
23 | "streetsidesoftware.code-spell-checker-italian",
24 | "ms-toolsai.jupyter",
25 | "ms-toolsai.vscode-jupyter-cell-tags",
26 | "ms-toolsai.jupyter-keymap",
27 | "ms-toolsai.jupyter-renderers",
28 | "ms-toolsai.vscode-jupyter-powertoys",
29 | "ms-toolsai.vscode-jupyter-slideshow",
30 | "ms-kubernetes-tools.vscode-kubernetes-tools",
31 | "ms-vsliveshare.vsliveshare",
32 | "ms-vscode.makefile-tools",
33 | "yzhang.markdown-all-in-one",
34 | "DavidAnson.vscode-markdownlint",
35 | "ms-python.mypy-type-checker",
36 | "ms-playwright.playwright",
37 | "ms-python.vscode-pylance",
38 | "ms-python.pylint",
39 | "ms-python.python",
40 | "KevinRose.vsc-python-indent",
41 | "mechatroner.rainbow-csv",
42 | "ms-vscode-remote.remote-ssh",
43 | "ms-vscode-remote.remote-ssh-edit",
44 | "ms-vscode.remote-server",
45 | "ms-vscode-remote.vscode-remote-extensionpack",
46 | "ms-vscode.remote-explorer",
47 | "ms-vscode.remote-repositories",
48 | "medo64.render-crlf",
49 | "charliermarsh.ruff",
50 | "vscode-icons-team.vscode-icons",
51 | "ms-vscode-remote.remote-wsl",
52 | "redhat.vscode-yaml"
53 | ]
54 | }
--------------------------------------------------------------------------------
/readme/CONTRIBUTING-it.md:
--------------------------------------------------------------------------------
1 | # Come contribuire
2 |
3 | Il tuo contributo è la cosa più importante per noi!!
4 |
5 | Siamo una comunità e senza il tuo aiuto e l'aiuto dei contributori non possiamo fare nulla :)
6 |
7 | Quindi, prima di tutto: GRAZIE!
8 |
9 | Se desideri contribuire a questo repository, controlla questi requisiti prima:
10 |
11 | - Suggeriamo di lavorare su sistemi linux, questo template non è fatto per funzionare su windows al momento, suggeriamo quindi di utilizzare: `wsl2`, `linux`, `macos`.
12 | - Avere `python`e `uv` installati: controlla il `pyproject.toml` per trovare le versioni corrette utilizzate in questo progetto
13 | - Avere `makefile`.
14 |
15 | Consigliamo di utilizzare Visual Studio Code, puoi trovare un elenco di estensioni utili e un profilo Python dedicato all'interno della cartella `.vscode`.
16 |
17 | ## Sviluppo e aggiornamento
18 |
19 | Se desideri avviare il progetto e testarlo, devi:
20 |
21 | 1. Forkare il progetto nel tuo profilo GitHub. Quindi clonalo in locale.
22 | 2. Crea un nuovo branch, ad esempio: `develop`
23 | 3. Installa le librerie: `uv sync`. Questo comando creerà una cartella .venv all'interno della cartella del progetto con tutte le dipendenze aggiornate.
24 | 4. Quindi puoi iniziare a modificare i file all'interno della cartella.
25 | 5. Se desideri testare se tutto funziona, devi "cookiecutterizzare" il progetto, puoi farlo utilizzando just o Makefile con il comando: `make bake-test` o `just bake-test`. Questa funzione può generare un progetto di test chiamato **testone** all'interno della cartella del progetto, se Visual Studio Code non si apre automaticamente, puoi utilizzare `code testone` per aprire il progetto in una nuova finestra di Visual Studio Code e verificare se funziona.
26 | 6. Dopo aver modificato qualcosa nel progetto di testone, purtroppo devi copiare e incollare gli aggiornamenti all'interno della `cookiecutter.directory_name`.
27 | 7. Dopo aver aggiornato tutto, ricorda di creare una pull request dal tuo progetto GitHub al progetto originale in modo che possiamo esaminare le modifiche e aggiornare il codice pubblico.
28 |
--------------------------------------------------------------------------------
/app/core/utils/utils.py:
--------------------------------------------------------------------------------
1 | import importlib
2 | import inspect
3 | from typing import Any, Callable
4 |
5 | import loguru as logger
6 |
7 |
8 | def load_class(path_to_module: str, class_name: str) -> Any:
9 | """Load a specific class using simply the path of the module and the class name.
10 |
11 | This function use importlib to simplify the loading of a class.
12 |
13 | Args:
14 | path_to_module (str): the python path to the module
15 | class_name (str): the name of the class you want to load
16 |
17 | Returns:
18 | (Any): the class loaded
19 | """
20 | module = importlib.import_module(path_to_module)
21 | return getattr(module, class_name)
22 |
23 |
24 | def get_default_args(func: Callable):
25 | """Get the default arguments of a function.
26 |
27 | Args:
28 | func (Callable): the function you want to get the default arguments
29 | Returns:
30 | (dict): the default arguments of the function
31 | """
32 | signature = inspect.signature(func)
33 | return {k: v.default for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty}
34 |
35 |
36 | # String to bool
37 | def string_to_bool(text: str) -> bool:
38 | """Convert input string into booleean.
39 |
40 | Args:
41 | text: Input string
42 |
43 | Raises:
44 | Unrecognized string
45 |
46 | Returns:
47 | result (boolean): True or False
48 | """
49 | if text.lower().strip() in [
50 | "true",
51 | "1",
52 | "t",
53 | "y",
54 | "yes",
55 | "yeah",
56 | "yup",
57 | "certainly",
58 | "uh-huh",
59 | ]:
60 | result = True
61 | elif text.lower().strip() in [
62 | "false",
63 | "wrong",
64 | "no",
65 | "0",
66 | "n",
67 | "nah",
68 | "nope",
69 | "nah-nah",
70 | ]:
71 | result = False
72 | else:
73 | message = "Could not convert string to bool"
74 | logger.error(message)
75 | raise ValueError(message)
76 |
77 | return result
78 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/utils.py:
--------------------------------------------------------------------------------
1 | import importlib
2 | import inspect
3 | from typing import Any, Callable
4 |
5 | import loguru as logger
6 |
7 |
8 | def load_class(path_to_module: str, class_name: str) -> Any:
9 | """Load a specific class using simply the path of the module and the class name.
10 |
11 | This function use importlib to simplify the loading of a class.
12 |
13 | Args:
14 | path_to_module (str): the python path to the module
15 | class_name (str): the name of the class you want to load
16 |
17 | Returns:
18 | (Any): the class loaded
19 | """
20 | module = importlib.import_module(path_to_module)
21 | return getattr(module, class_name)
22 |
23 |
24 | def get_default_args(func: Callable):
25 | """Get the default arguments of a function.
26 |
27 | Args:
28 | func (Callable): the function you want to get the default arguments
29 | Returns:
30 | (dict): the default arguments of the function
31 | """
32 | signature = inspect.signature(func)
33 | return {k: v.default for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty}
34 |
35 |
36 | # String to bool
37 | def string_to_bool(text: str) -> bool:
38 | """Convert input string into booleean.
39 |
40 | Args:
41 | text: Input string
42 |
43 | Raises:
44 | Unrecognized string
45 |
46 | Returns:
47 | result (boolean): True or False
48 | """
49 | if text.lower().strip() in [
50 | "true",
51 | "1",
52 | "t",
53 | "y",
54 | "yes",
55 | "yeah",
56 | "yup",
57 | "certainly",
58 | "uh-huh",
59 | ]:
60 | result = True
61 | elif text.lower().strip() in [
62 | "false",
63 | "wrong",
64 | "no",
65 | "0",
66 | "n",
67 | "nah",
68 | "nope",
69 | "nah-nah",
70 | ]:
71 | result = False
72 | else:
73 | message = "Could not convert string to bool"
74 | logger.error(message)
75 | raise ValueError(message)
76 |
77 | return result
78 |
--------------------------------------------------------------------------------
/app/core/settings/logger.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from loguru import logger
5 | from pydantic_settings import BaseSettings, SettingsConfigDict
6 |
7 |
8 | class LoggerSettings(BaseSettings):
9 | """Logger configuration settings."""
10 |
11 | model_config = SettingsConfigDict(
12 | env_file=".env",
13 | env_file_encoding="utf-8",
14 | case_sensitive=True,
15 | extra="ignore",
16 | )
17 |
18 | APP_NAME: str = "bear"
19 | LOG_VERBOSITY: str = "DEBUG"
20 | LOG_ROTATION_SIZE: str = "100MB"
21 | LOG_RETENTION: str = "30 days"
22 | LOG_FILE_NAME: str = APP_NAME + "_{time:DD-MM-YYYY}.log"
23 | LOG_FORMAT: str = "{level}\t| {time:DD-MM-YYYY:HH:mm:ss!UTC} utc | {file}:{module}:{line} | {message}"
24 | REPO_PATH: str = os.path.abspath(".") # Set REPO_PATH to the current directory
25 | LOG_FOLDER: str = os.path.join(REPO_PATH, "logs") # Default log folder
26 | LOG_FILE_PATH: str = os.path.join(LOG_FOLDER, LOG_FILE_NAME)
27 | PROFILE: bool = False
28 |
29 | def __init__(self, **kwargs):
30 | super().__init__(**kwargs)
31 | self.LOG_FOLDER = os.path.join(self.REPO_PATH, "logs") # Dynamically set LOG_FOLDER
32 | self.LOG_FILE_PATH = os.path.join(self.LOG_FOLDER, self.LOG_FILE_NAME) # Update LOG_FILE_PATH
33 | self.APP_NAME = self.APP_NAME
34 | self.LOG_FILE_NAME = self.APP_NAME + "_{time:D-M-YY}.log"
35 |
36 | def setup_logger(self):
37 | """Configure the logger."""
38 | logger.remove() # Remove previous handlers
39 | logger.add(
40 | sink=sys.stderr,
41 | colorize=True,
42 | format=self.LOG_FORMAT,
43 | level=self.LOG_VERBOSITY,
44 | serialize=False,
45 | catch=True,
46 | backtrace=False,
47 | diagnose=False,
48 | )
49 | logger.add(
50 | sink=self.LOG_FILE_PATH,
51 | rotation=self.LOG_ROTATION_SIZE,
52 | retention=self.LOG_RETENTION,
53 | colorize=True,
54 | format=self.LOG_FORMAT,
55 | level=self.LOG_VERBOSITY,
56 | serialize=False,
57 | catch=True,
58 | backtrace=False,
59 | diagnose=False,
60 | encoding="utf8",
61 | )
62 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/settings/logger.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from loguru import logger
5 | from pydantic_settings import BaseSettings, SettingsConfigDict
6 |
7 |
8 | class LoggerSettings(BaseSettings):
9 | """Logger configuration settings."""
10 |
11 | model_config = SettingsConfigDict(
12 | env_file=".env",
13 | env_file_encoding="utf-8",
14 | case_sensitive=True,
15 | extra="ignore",
16 | )
17 |
18 | APP_NAME: str = "bear"
19 | LOG_VERBOSITY: str = "DEBUG"
20 | LOG_ROTATION_SIZE: str = "100MB"
21 | LOG_RETENTION: str = "30 days"
22 | LOG_FILE_NAME: str = APP_NAME + "_{time:DD-MM-YYYY}.log"
23 | LOG_FORMAT: str = "{level}\t| {time:DD-MM-YYYY:HH:mm:ss!UTC} utc | {file}:{module}:{line} | {message}"
24 | REPO_PATH: str = os.path.abspath(".") # Set REPO_PATH to the current directory
25 | LOG_FOLDER: str = os.path.join(REPO_PATH, "logs") # Default log folder
26 | LOG_FILE_PATH: str = os.path.join(LOG_FOLDER, LOG_FILE_NAME)
27 | PROFILE: bool = False
28 |
29 | def __init__(self, **kwargs):
30 | super().__init__(**kwargs)
31 | self.LOG_FOLDER = os.path.join(self.REPO_PATH, "logs") # Dynamically set LOG_FOLDER
32 | self.LOG_FILE_PATH = os.path.join(self.LOG_FOLDER, self.LOG_FILE_NAME) # Update LOG_FILE_PATH
33 | self.APP_NAME = self.APP_NAME
34 | self.LOG_FILE_NAME = self.APP_NAME + "_{time:D-M-YY}.log"
35 |
36 | def setup_logger(self):
37 | """Configure the logger."""
38 | logger.remove() # Remove previous handlers
39 | logger.add(
40 | sink=sys.stderr,
41 | colorize=True,
42 | format=self.LOG_FORMAT,
43 | level=self.LOG_VERBOSITY,
44 | serialize=False,
45 | catch=True,
46 | backtrace=False,
47 | diagnose=False,
48 | )
49 | logger.add(
50 | sink=self.LOG_FILE_PATH,
51 | rotation=self.LOG_ROTATION_SIZE,
52 | retention=self.LOG_RETENTION,
53 | colorize=True,
54 | format=self.LOG_FORMAT,
55 | level=self.LOG_VERBOSITY,
56 | serialize=False,
57 | catch=True,
58 | backtrace=False,
59 | diagnose=False,
60 | encoding="utf8",
61 | )
62 |
--------------------------------------------------------------------------------
/app/core/utils/db_format.py:
--------------------------------------------------------------------------------
1 | from decimal import Decimal
2 | from typing import Optional, Union
3 |
4 | import numpy as np
5 |
6 |
7 | def format_db_month(date: str) -> str:
8 | """Format month string into Postgres date field.
9 |
10 | Args:
11 | date (str): Date string
12 |
13 | Returns:
14 | str: Postgres formatted date
15 | """
16 | if str(date) == "nan":
17 | return "NULL"
18 | return f"TO_DATE('{date}','YYYY-MM')"
19 |
20 |
21 | def format_db_week(date: str) -> str:
22 | """Format week string into Postgres date field.
23 |
24 | Args:
25 | date (str): Date string
26 |
27 | Returns:
28 | str: Postgres formatted date
29 | """
30 | if str(date) == "nan":
31 | return "NULL"
32 | return f"TO_DATE('{date}','YYYY-wIW')"
33 |
34 |
35 | def format_db_date(date: str) -> str:
36 | """Format date string into Postgres date field.
37 |
38 | Args:
39 | date (str): Date string
40 |
41 | Returns:
42 | str: Postgres formatted date
43 | """
44 | if str(date) == "nan":
45 | return "NULL"
46 | return f"TO_DATE('{date}','YYYY-MM-DD')"
47 |
48 |
49 | def format_db_string(x: Optional[str]) -> str:
50 | """Format a string and replace quotes for postgres compatibility.
51 |
52 | Args:
53 | x (str): String to format
54 |
55 | Returns:
56 | str: Postgres formatted string
57 | """
58 | if x is not None:
59 | x = x.replace("'", "''")
60 | x = x.replace(":", ": ")
61 |
62 | return f"'{x}'"
63 |
64 |
65 | def format_db_float(x: Union[float, Decimal, str]) -> float:
66 | """Format float or decimal types for postgres compatibility.
67 |
68 | Args:
69 | x (Union[float, Decimal]): Float to format
70 |
71 | Returns:
72 | float: Postgres formatted float
73 | """
74 | if x is None or x == "":
75 | return 0
76 | if np.isnan(float(x)):
77 | return 0
78 |
79 | return float(x)
80 |
81 |
82 | def format_db_int(x: Union[int, float, str]) -> int:
83 | """Format integer or float in integer.
84 |
85 | Args:
86 | x (Union[int, float]): Integer or float to format
87 |
88 | Returns:
89 | int: Postgres formatted integer
90 | """
91 | if x is None or x == "":
92 | return 0
93 | if np.isnan(float(x)):
94 | return 0
95 |
96 | x = float(x)
97 | return int(x)
98 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/db_format.py:
--------------------------------------------------------------------------------
1 | from decimal import Decimal
2 | from typing import Optional, Union
3 |
4 | import numpy as np
5 |
6 |
7 | def format_db_month(date: str) -> str:
8 | """Format month string into Postgres date field.
9 |
10 | Args:
11 | date (str): Date string
12 |
13 | Returns:
14 | str: Postgres formatted date
15 | """
16 | if str(date) == "nan":
17 | return "NULL"
18 | return f"TO_DATE('{date}','YYYY-MM')"
19 |
20 |
21 | def format_db_week(date: str) -> str:
22 | """Format week string into Postgres date field.
23 |
24 | Args:
25 | date (str): Date string
26 |
27 | Returns:
28 | str: Postgres formatted date
29 | """
30 | if str(date) == "nan":
31 | return "NULL"
32 | return f"TO_DATE('{date}','YYYY-wIW')"
33 |
34 |
35 | def format_db_date(date: str) -> str:
36 | """Format date string into Postgres date field.
37 |
38 | Args:
39 | date (str): Date string
40 |
41 | Returns:
42 | str: Postgres formatted date
43 | """
44 | if str(date) == "nan":
45 | return "NULL"
46 | return f"TO_DATE('{date}','YYYY-MM-DD')"
47 |
48 |
49 | def format_db_string(x: Optional[str]) -> str:
50 | """Format a string and replace quotes for postgres compatibility.
51 |
52 | Args:
53 | x (str): String to format
54 |
55 | Returns:
56 | str: Postgres formatted string
57 | """
58 | if x is not None:
59 | x = x.replace("'", "''")
60 | x = x.replace(":", ": ")
61 |
62 | return f"'{x}'"
63 |
64 |
65 | def format_db_float(x: Union[float, Decimal, str]) -> float:
66 | """Format float or decimal types for postgres compatibility.
67 |
68 | Args:
69 | x (Union[float, Decimal]): Float to format
70 |
71 | Returns:
72 | float: Postgres formatted float
73 | """
74 | if x is None or x == "":
75 | return 0
76 | if np.isnan(float(x)):
77 | return 0
78 |
79 | return float(x)
80 |
81 |
82 | def format_db_int(x: Union[int, float, str]) -> int:
83 | """Format integer or float in integer.
84 |
85 | Args:
86 | x (Union[int, float]): Integer or float to format
87 |
88 | Returns:
89 | int: Postgres formatted integer
90 | """
91 | if x is None or x == "":
92 | return 0
93 | if np.isnan(float(x)):
94 | return 0
95 |
96 | x = float(x)
97 | return int(x)
98 |
--------------------------------------------------------------------------------
/app/core/utils/func.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import time
3 | from functools import lru_cache, wraps
4 |
5 |
6 | def multiplexed(func):
7 | """Call wrapped function repeatedly for each set of zipped positional args."""
8 |
9 | @wraps(func)
10 | def wrapper(*args, **kwds):
11 | results = []
12 | for arg in zip(*args, strict=False):
13 | results.append(func(*arg, **kwds))
14 |
15 | return results
16 |
17 | return wrapper
18 |
19 |
20 | def multiplexed_method(func):
21 | """Call wrapped function repeatedly for each set of zipped positional args except self."""
22 |
23 | @wraps(func)
24 | def wrapper(self, *args, **kwds):
25 | @multiplexed
26 | def inject_self(*a, **k):
27 | return func(self, *a, **k)
28 |
29 | return inject_self(*args, **kwds)
30 |
31 | return wrapper
32 |
33 |
34 | def time_cache(ttl_seconds: int, maxsize=1):
35 | """Cache the result of a function for at most ttl_seconds seconds."""
36 |
37 | def decorator(func):
38 | @lru_cache(maxsize=maxsize)
39 | def internal(_, *args, **kwds):
40 | return func(*args, **kwds)
41 |
42 | @wraps(func)
43 | def wrapper(*args, **kwds):
44 | timestamp = time.time() // ttl_seconds
45 | return internal(timestamp, *args, **kwds)
46 |
47 | return wrapper
48 |
49 | return decorator
50 |
51 |
52 | def batched(batch_size: int = 64):
53 | """Call wrapped async function repeatedly for batch_sized chunks of ALL POSITIONAL ARGS."""
54 |
55 | def decorator(func):
56 | if not asyncio.iscoroutinefunction(func):
57 | raise TypeError(f"Decorated function must be async: {func}")
58 |
59 | @wraps(func)
60 | async def wrapper(*args, **kwds):
61 | if len(args) == 0:
62 | raise TypeError("Wrapped function must have at least one positional arg")
63 |
64 | seq = list(zip(*args, strict=False))
65 | results = []
66 | for chunk in (seq[pos : pos + batch_size] for pos in range(0, len(seq), batch_size)):
67 | # Convert each arg into a list
68 | args = [list(arg) for arg in zip(*chunk, strict=False)]
69 | batch_result = await func(*args, **kwds)
70 | try:
71 | results.extend(batch_result)
72 | except TypeError as e:
73 | raise TypeError("Wrapped function must return an iterable") from e
74 |
75 | return results
76 |
77 | return wrapper
78 |
79 | return decorator
80 |
81 |
82 | def batched_method(batch_size=64):
83 | """Call wrapped function repeatedly for batch_sized chunks of ALL POSITIONAL ARGS except self."""
84 |
85 | def decorator(func):
86 | @wraps(func)
87 | async def wrapper(self, *args, **kwds):
88 | @batched(batch_size=batch_size)
89 | async def inject_self(*a, **k):
90 | return await func(self, *a, **k)
91 |
92 | return await inject_self(*args, **kwds)
93 |
94 | return wrapper
95 |
96 | return decorator
97 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/func.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import time
3 | from functools import lru_cache, wraps
4 |
5 |
6 | def multiplexed(func):
7 | """Call wrapped function repeatedly for each set of zipped positional args."""
8 |
9 | @wraps(func)
10 | def wrapper(*args, **kwds):
11 | results = []
12 | for arg in zip(*args, strict=False):
13 | results.append(func(*arg, **kwds))
14 |
15 | return results
16 |
17 | return wrapper
18 |
19 |
20 | def multiplexed_method(func):
21 | """Call wrapped function repeatedly for each set of zipped positional args except self."""
22 |
23 | @wraps(func)
24 | def wrapper(self, *args, **kwds):
25 | @multiplexed
26 | def inject_self(*a, **k):
27 | return func(self, *a, **k)
28 |
29 | return inject_self(*args, **kwds)
30 |
31 | return wrapper
32 |
33 |
34 | def time_cache(ttl_seconds: int, maxsize=1):
35 | """Cache the result of a function for at most ttl_seconds seconds."""
36 |
37 | def decorator(func):
38 | @lru_cache(maxsize=maxsize)
39 | def internal(_, *args, **kwds):
40 | return func(*args, **kwds)
41 |
42 | @wraps(func)
43 | def wrapper(*args, **kwds):
44 | timestamp = time.time() // ttl_seconds
45 | return internal(timestamp, *args, **kwds)
46 |
47 | return wrapper
48 |
49 | return decorator
50 |
51 |
52 | def batched(batch_size: int = 64):
53 | """Call wrapped async function repeatedly for batch_sized chunks of ALL POSITIONAL ARGS."""
54 |
55 | def decorator(func):
56 | if not asyncio.iscoroutinefunction(func):
57 | raise TypeError(f"Decorated function must be async: {func}")
58 |
59 | @wraps(func)
60 | async def wrapper(*args, **kwds):
61 | if len(args) == 0:
62 | raise TypeError("Wrapped function must have at least one positional arg")
63 |
64 | seq = list(zip(*args, strict=False))
65 | results = []
66 | for chunk in (seq[pos : pos + batch_size] for pos in range(0, len(seq), batch_size)):
67 | # Convert each arg into a list
68 | args = [list(arg) for arg in zip(*chunk, strict=False)]
69 | batch_result = await func(*args, **kwds)
70 | try:
71 | results.extend(batch_result)
72 | except TypeError as e:
73 | raise TypeError("Wrapped function must return an iterable") from e
74 |
75 | return results
76 |
77 | return wrapper
78 |
79 | return decorator
80 |
81 |
82 | def batched_method(batch_size=64):
83 | """Call wrapped function repeatedly for batch_sized chunks of ALL POSITIONAL ARGS except self."""
84 |
85 | def decorator(func):
86 | @wraps(func)
87 | async def wrapper(self, *args, **kwds):
88 | @batched(batch_size=batch_size)
89 | async def inject_self(*a, **k):
90 | return await func(self, *a, **k)
91 |
92 | return await inject_self(*args, **kwds)
93 |
94 | return wrapper
95 |
96 | return decorator
97 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | [![Contributors][contributors-shield]][contributors-url] [![Forks][forks-shield]][forks-url] [![Stargazers][stars-shield]][stars-url] [![Issues][issues-shield]][issues-url] [![MIT License][license-shield]][license-url]
6 |
7 |
8 |
9 |
Python Biella Group: Bear
10 |
11 |
Base Environment for Any Reasonable project
12 |
13 |
Report Bug
14 | ·
15 |
Request Feature
16 |
17 |
18 |
19 | - [ITA README DOCUMENTATION](readme/README-it.md)
20 | - [ENG README DOCUMENTATION](readme/README-en.md)
21 |
22 | ## How can you use it?
23 |
24 | There are 2 ways to use this template:
25 |
26 | - **First mode:** Use with github template system, when you create a new repository, select this template to automatically create a new repository with this structure.
27 |
28 | After creating the repository, you can clone the project and then launch this `Make` command to clean and prepare the repository:
29 |
30 | ```bash
31 | make clean
32 | ```
33 |
34 | You will have the repository ready to use.
35 |
36 | - **Second mode**: Use with cookiecutter, you can use this template with cookiecutter to create a new project. To do this, follow these steps:
37 |
38 | ```bash
39 | # if you don't have cookiecutter installed, install it with pip
40 | pip install cookiecutter
41 |
42 | # if you use https
43 | cookiecutter https://github.com/PythonBiellaGroup/Bear.git
44 |
45 | # if you use ssh
46 | cookiecutter git@github.com:PythonBiellaGroup/Bear.git
47 | ```
48 |
49 | ## Contribute
50 |
51 | If you want to contribute to this project, please follow these guidelines:
52 |
53 | - [ITA CONTRIBUTING RULES](readme/CONTRIBUTING-ita.md)
54 | - [ENG CONTRIBUTING RULES](readme/CONTRIBUTING-en.md)
55 |
56 | ## License
57 |
58 | This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details.
59 |
60 | ## Security
61 |
62 | If you discover any security vulnerabilities, please report them by following the [SECURITY.md](readme/SECURITY.md) guidelines.
63 |
64 | ## Contacts
65 |
66 | You can contact us following the links inside our website [Python Biella Group](https://www.pythonbiellagroup.it/)
67 |
68 |
69 | [contributors-shield]: https://img.shields.io/github/contributors/PythonBiellaGroup/Bear.svg?style=for-the-badge
70 | [contributors-url]: https://github.com/PythonBiellaGroup/Bear/graphs/contributors
71 | [forks-shield]: https://img.shields.io/github/forks/PythonBiellaGroup/Bear.svg?style=for-the-badge
72 | [forks-url]: https://github.com/PythonBiellaGroup/Bear/forks
73 | [stars-shield]: https://img.shields.io/github/stars/PythonBiellaGroup/Bear.svg?style=for-the-badge
74 | [stars-url]: https://github.com/PythonBiellaGroup/Bear/stargazers
75 | [issues-shield]: https://img.shields.io/github/issues/PythonBiellaGroup/Bear.svg?style=for-the-badge
76 | [issues-url]: https://github.com/PythonBiellaGroup/Bear/issues
77 | [license-shield]: https://img.shields.io/github/license/PythonBiellaGroup/Bear.svg?style=for-the-badge
78 | [license-url]: https://github.com/PythonBiellaGroup/Bear/blob/main/LICENSE
79 | [contacts-shield]: https://img.shields.io/badge/linktree-39E09B?style=for-the-badge&logo=linktree&logoColor=white
80 | [contacts-url]: https://linktr.ee/PythonBiellaGroup
81 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # To launch mkdocs use: mkdocs serve
3 | # To launch with debug mode use: mkdocs serve -v
4 | # To build mkdocs use: mkdocs build --clean
5 |
6 | # Project information
7 | site_name: PythonBiellaGroup
8 | site_url: https://github.com/PythonBiellaGroup/Bear
9 | site_description: Bear is a Python template system for quick python development.
10 | site_author: PythonBiellaGroup
11 | copyright: Made with ♥ by PBG.
12 |
13 | # Repository
14 | repo_url: https://github.com/PythonBiellaGroup/Bear
15 | repo_name: PythonBiellaGroup/Bear
16 | edit_uri: "" #disables edit button
17 |
18 | # Configuration
19 | theme:
20 | name: material
21 | # Static files
22 | static_templates:
23 | - 404.html
24 |
25 | # Don't include MkDocs' JavaScript
26 | # include_search_page: false
27 | # search_index_only: true
28 | #
29 | language: en
30 | features:
31 | - navigation.tabs
32 | - search.suggest
33 | - search.highlight
34 | # - search.share
35 | # - header.autohide
36 | # - content.code.annotate
37 | # - navigation.indexes
38 | # - navigation.sections
39 | # - navigation.tabs
40 | logo: static/images/logo.png
41 | favicon: static/images/favicon.ico
42 | font:
43 | text: Ubuntu
44 | code: Roboto mono
45 | palette:
46 | # Light mode
47 | - scheme: default
48 | primary: custom
49 | accent: custom
50 | dark: custom
51 | toggle:
52 | icon: material/toggle-switch
53 | name: Switch to dark mode
54 | # Dark mode
55 | - scheme: slate
56 | primary: green
57 | accent: green
58 | dark: custom
59 | toggle:
60 | icon: material/toggle-switch-off-outline
61 | name: Switch to light mode
62 |
63 | plugins:
64 | - search
65 | - autorefs
66 | - gen-files:
67 | scripts:
68 | - scripts/generate_docs.py
69 | - literate-nav:
70 | nav_file: SUMMARY.md
71 | - section-index
72 | - mkdocstrings:
73 | default_handler: python
74 | handlers:
75 | python:
76 | paths: [app/src]
77 | options:
78 | group_by_category: true
79 | show_category_heading: true
80 | show_signature_annotations: true
81 | line_length: 60
82 | separate_signature: true
83 | show_root_toc_entry: false
84 | merge_init_into_class: true
85 |
86 | extra_css:
87 | - static/css/mkdocstrings.css
88 |
89 | # #Extra material theme settings
90 | extra:
91 | disqus: PythonBiellaGroup
92 | generator: false
93 | social:
94 | - icon: fontawesome/solid/paper-plane
95 | link: mailto:pythonbiellagroup@gmail.com
96 | name: Write to us
97 | - icon: fontawesome/brands/twitter
98 | link:
99 | name: Twitter
100 | - icon: fontawesome/brands/gitlab
101 | link:
102 | name: Gitlab repo
103 | - icon: fontawesome/brands/linkedin
104 | link:
105 | name: Linkedin
106 |
107 | markdown_extensions:
108 | - def_list
109 | - pymdownx.highlight:
110 | anchor_linenums: true
111 | line_spans: __span
112 | pygments_lang_class: true
113 | - pymdownx.inlinehilite
114 | - pymdownx.snippets
115 | - pymdownx.superfences
116 |
117 | # Page tree
118 | nav:
119 | # rest of the navigation...
120 | # defer to gen-files + literate-nav
121 | - Home: index.md
122 | - How to use: howto/index.md
123 | - Code Reference: reference/
124 | # rest of the navigation...
125 |
126 | watch:
127 | - app
128 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # To launch mkdocs use: mkdocs serve
3 | # To launch with debug mode use: mkdocs serve -v
4 | # To build mkdocs use: mkdocs build --clean
5 |
6 | # Project information
7 | site_name: PythonBiellaGroup
8 | site_url: https://github.com/PythonBiellaGroup/Bear
9 | site_description: Bear is a Python template system for quick python development.
10 | site_author: PythonBiellaGroup
11 | copyright: Made with ♥ by PBG.
12 |
13 | # Repository
14 | repo_url: https://github.com/PythonBiellaGroup/Bear
15 | repo_name: PythonBiellaGroup/Bear
16 | edit_uri: "" #disables edit button
17 |
18 | # Configuration
19 | theme:
20 | name: material
21 | # Static files
22 | static_templates:
23 | - 404.html
24 |
25 | # Don't include MkDocs' JavaScript
26 | # include_search_page: false
27 | # search_index_only: true
28 | #
29 | language: en
30 | features:
31 | - navigation.tabs
32 | - search.suggest
33 | - search.highlight
34 | # - search.share
35 | # - header.autohide
36 | # - content.code.annotate
37 | # - navigation.indexes
38 | # - navigation.sections
39 | # - navigation.tabs
40 | logo: static/images/logo.png
41 | favicon: static/images/favicon.ico
42 | font:
43 | text: Ubuntu
44 | code: Roboto mono
45 | palette:
46 | # Light mode
47 | - scheme: default
48 | primary: custom
49 | accent: custom
50 | dark: custom
51 | toggle:
52 | icon: material/toggle-switch
53 | name: Switch to dark mode
54 | # Dark mode
55 | - scheme: slate
56 | primary: green
57 | accent: green
58 | dark: custom
59 | toggle:
60 | icon: material/toggle-switch-off-outline
61 | name: Switch to light mode
62 |
63 | plugins:
64 | - search
65 | - autorefs
66 | - gen-files:
67 | scripts:
68 | - scripts/generate_docs.py
69 | - literate-nav:
70 | nav_file: SUMMARY.md
71 | - section-index
72 | - mkdocstrings:
73 | default_handler: python
74 | handlers:
75 | python:
76 | paths: [app/src]
77 | options:
78 | group_by_category: true
79 | show_category_heading: true
80 | show_signature_annotations: true
81 | line_length: 60
82 | separate_signature: true
83 | show_root_toc_entry: false
84 | merge_init_into_class: true
85 |
86 | extra_css:
87 | - static/css/mkdocstrings.css
88 |
89 | # #Extra material theme settings
90 | extra:
91 | disqus: PythonBiellaGroup
92 | generator: false
93 | social:
94 | - icon: fontawesome/solid/paper-plane
95 | link: mailto:pythonbiellagroup@gmail.com
96 | name: Write to us
97 | - icon: fontawesome/brands/twitter
98 | link:
99 | name: Twitter
100 | - icon: fontawesome/brands/gitlab
101 | link:
102 | name: Gitlab repo
103 | - icon: fontawesome/brands/linkedin
104 | link:
105 | name: Linkedin
106 |
107 | markdown_extensions:
108 | - def_list
109 | - pymdownx.highlight:
110 | anchor_linenums: true
111 | line_spans: __span
112 | pygments_lang_class: true
113 | - pymdownx.inlinehilite
114 | - pymdownx.snippets
115 | - pymdownx.superfences
116 |
117 | # Page tree
118 | nav:
119 | # rest of the navigation...
120 | # defer to gen-files + literate-nav
121 | - Home: index.md
122 | - How to use: howto/index.md
123 | - Code Reference: reference/
124 | # rest of the navigation...
125 |
126 | watch:
127 | - app
128 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Python Biella Group: Bear
2 |
3 | This project it's the base template for a Python project that we are using in PythonBiellaGroup to create our tools, libraries and projects.
4 |
5 | We call it **Bear** because it's a **B**ase **E**nvironment for **A**ny **R**easonable project and also because the bear it's the symbol of the city of Biella.
6 |
7 | It's based on **Modern Python Tools** such as:
8 | - cookiecutter: for templating
9 | - poetry: for dependency management
10 | - pdm: for dependency management
11 | - flake8: for linting
12 | - mypy: for static type checking
13 | - black: for code formatting
14 | - bandit: for security checks
15 | - pre-commit: for pre-commit hooks
16 | - pdm: for linting
17 |
18 | We suggest to use **VSCode** as IDE for this project since you can find a lot of prepared configurations for:
19 | - debugging
20 | - testing
21 | - settings
22 | - extensions
23 |
24 | You can find and extensive documentation created with **mkdocs** to [this github page link](https://pythonbiellagroup.github.io/bear/)
25 |
26 | ## How to use it
27 |
28 | You can use this repository as a template to create your own project with **cookiecutter**
29 |
30 | Just remember to add **cookiecutter** as a dependency into your local version installation of python using pip (or something else)
31 | ```bash
32 | pip install cookiecutter
33 | ```
34 |
35 | You can use this following command (both on Windows and Posix systems):
36 | ```bash
37 | cookiecutter gh:PythonBiellaGroup/bear
38 | ```
39 |
40 | Or also with the full https path:
41 | ```bash
42 | cookiecutter https://github.com/PythonBiellaGroup/Bear
43 | ```
44 |
45 | once you launch this commands just follow the guide and fill the required fields.
46 |
47 | ## How to maintain it
48 |
49 | Unfortunately there is no automatic way to update the templates inside cookiecutter yet, you have to do it manually.
50 |
51 | 1. Clone the repository
52 | 2. Launch the dependency installation using: poetry or pdm
53 | 1. `poetry install`
54 | 2. or `pdm install`
55 | 3. Modify something
56 | 4. If you want to test a specific inner template (like the Base template) you can launch: `cookiecutter .` to test cookiecutter project generation
57 | 1. After that you can modify the template
58 | 2. When you finish your modification you have to copy and paste all the modifications manually inside the cookiecutter generation folder
59 | 5. Then remember to open a pull request or push to the repository (in develop firtst) if you have the permissions.
60 |
61 | Please remember also to follow a Gitflow workflow and to use the **develop** branch as the main branch for development.
62 |
63 | ### Documentation
64 |
65 | To build and maintain the documentatio
66 |
67 | ## How to contribute
68 |
69 | You can help us to improve this project by opening issues or doing some pull request if you want to add more functionalities or if you want to fix some bugs.
70 |
71 | Please follow the [Contributing guidelines](CONTRIBUTING.md) to contribute to this project.
72 |
73 | ## License
74 |
75 | This repository is licensed under the MIT license. See LICENSE file for details.
76 |
77 | If you use this repository in your work, please cite it as or just write to us to say thanks with your feedback and experience :)
78 |
79 | ## Documentation
80 |
81 | Useful links and other documentation website you can check
82 |
83 | - [Our website with the documentation](https://pythonbiellagroup.it)
84 | - [The repository for our documentation](https://github.com/PythonBiellaGroup/doc-website)
85 | - [Hypermodern python repository](https://github.com/cjolowicz/hypermodern-python)
86 | - [The hypermodern python official medium article](https://medium.com/@cjolowicz/hypermodern-python-d44485d9d769)
87 | - [Modern Python repository](https://github.com/rhettinger/modernpython)
88 | - [Awesome Pyproject](https://github.com/carlosperate/awesome-pyproject/blob/master/README.md)
89 | - [Python developer roadmap](https://roadmap.sh/python/)
90 | - [Creating a modern python development environment medium article](https://itnext.io/creating-a-modern-python-development-environment-3d383c944877)
91 | - [Modern python interesting practices](https://www.stuartellis.name/articles/python-modern-practices/)
92 | - [4 Keys to write modern python in 2022](https://www.infoworld.com/article/3648061/4-keys-to-writing-modern-python-in-2022.html)
93 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ####----Cookiecutter commands----####
2 | .PHONY: cookiecutter
3 | bake: ## bake without inputs and overwrite if exists.
4 | @uv run cookiecutter . --no-input -f
5 |
6 | bake-clear: ## remove a previous cookiecutter bake
7 | @rm -rf testone || true
8 |
9 | bake-test: ## bake the python project to test
10 | @rm -rf testone || true
11 | @uv run cookiecutter . --no-input -f --config-file config.yaml
12 | @code testone
13 |
14 | ####----Basic configurations----####
15 | .PHONY: pre-commit
16 | install_pre_commit: ## configure and install pre commit tool
17 | @uv run pre-commit install
18 |
19 | uninstall_pre_commit: ## configure and install pre commit tool
20 | @uv run pre-commit uninstall
21 |
22 | .PHONY: install
23 | install: ## Install the uv and python environment
24 | @echo "🚀 Creating virtual environment using uv"
25 | @uv run --env-file .env -- uv sync --all-groups && uv pip install -e .
26 |
27 | update: ## Update the uv environment
28 | @echo "🚀 Updating virtual environment using uv"
29 | @uv run --env-file .env -- uv lock --upgrade && uv sync --all-groups && uv pip install -e .
30 |
31 | .PHONY: check_project
32 | check_project: secrets ## Run code quality tools.
33 | @echo "🚀 Checking uv lock file consistency with 'pyproject.toml': Running uv lock --locked"
34 | @uv lock --locked
35 | @echo "🚀 Linting code: Running pre-commit"
36 | @uv run pre-commit run -a
37 |
38 | # # This is different from the gitleaks pre-commit since it checks also unstaged files
39 | # @gitleaks protect --no-banner --verbose
40 |
41 | .PHONY: test
42 | test: ## Test the code with pytest.
43 | @echo "🚀 Testing code: Running pytest"
44 | @uv run pytest --cov --cov-config=pyproject.toml --cov-report=xml tests
45 |
46 | ### Project specific tasks
47 | .PHONY: project
48 | launch_py3: # Launch the main file with python 3
49 | @export PYTHONPATH=$(pwd) && python3 app/main.py
50 | launch_py: # Launch the main file with python
51 | @export PYTHONPATH=$(pwd) && python app/main.py
52 |
53 | ####----Documentation----####
54 | .PHONY: docs
55 | docs: ## Launch mkdocs documentation locally
56 | uv run mkdocs serve
57 |
58 | docs_build: ## Build mkdocs for local test
59 | uv run mkdocs build
60 |
61 | docs_launch_local: ## Launch mkdocs documentation locally with the local building artefacts
62 | uv run mkdocs build
63 | uv run mkdocs serve -v --dev-addr=0.0.0.0:8000
64 |
65 | docs_deploy: ## Deploy mkdocs documentation to github pages
66 | uv run mkdocs build -c -v --site-dir public
67 | uv run mkdocs gh-deploy --force
68 |
69 | docs_public: ## Build mkdocs for official online release
70 | uv run mkdocs build -c -v --site-dir public
71 |
72 | ####----Package Release----####
73 | .PHONY: package
74 | package_build: ## Build the python package
75 | poetry build
76 |
77 | pypi: ## Build and upload the python package to pypi
78 | python setup.py sdist
79 | python setup.py bdist_wheel --universal
80 | twine upload dist/*
81 |
82 | ####----Docker----####
83 | .PHONY: docker
84 |
85 | launch: ## launch the python application containers
86 | docker compose -p bear up --build -d
87 |
88 | launch_all: ## launch the backend project containers only
89 | docker compose -p bear up --build -d app
90 |
91 | launch_db: ## launch the database container only
92 | docker compose -p bear up --build -d db
93 |
94 | launch_dremio: ## launch the dremio container only
95 | docker compose -p bear up --build -d dremio
96 |
97 | check: ## check the status of the docker containers
98 | docker ps -a | grep "bear"
99 |
100 | check_logs: ## check the logs of the application container
101 | docker logs -t app
102 |
103 | check_exec: ## exec bash in the python app container
104 | docker exec -it app /bin/bash
105 |
106 | stop: ## stop all containers
107 | docker compose -p bear down
108 | # docker compose down -v
109 |
110 | stop_clear: ## stop containers and clean the volumes
111 | docker compose -p bear down -v
112 |
113 | clean_volumes: ## clean the docker volumes
114 | docker volume prune
115 |
116 | ####----Project----####
117 | .PHONY: help
118 | help: ## Ask for help in the Makefile
119 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
120 |
121 | .PHONY: project
122 | clean: ## Clean the projects of unwanted cached folders
123 | @rm -rf **/.ipynb_checkpoints **/.pytest_cache **/__pycache__ **/**/__pycache__ ./notebooks/ipynb_checkpoints .pytest_cache ./dist ./volumes
124 | @rm -rf {{cookiecutter.directory_name}} readmes cookiecutter.json hooks replay .gitlab
125 |
126 | restore: ## Restore the projects to the start (hard clean)
127 | rm -rf **/.ipynb_checkpoints **/.pytest_cache **/__pycache__ **/**/__pycache__ ./notabooks/ipynb_checkpoints .pytest_cache ./dist .venv pdm.lock
128 |
129 | .DEFAULT_GOAL := help
130 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/Makefile:
--------------------------------------------------------------------------------
1 | ####----Cookiecutter commands----####
2 | .PHONY: cookiecutter
3 | bake: ## bake without inputs and overwrite if exists.
4 | @cookiecutter --no-input . --overwrite-if-exists
5 |
6 | bake-clear: ## remove a previous cookiecutter bake
7 | @rm -rf testone || true
8 |
9 | bake-test: ## bake the python project to test
10 | @rm -rf testone || true
11 | @uv run cookiecutter --no-input . --overwrite-if-exists --directory="python" --config-file config.yaml
12 | @code testone
13 |
14 | ####----Basic configurations----####
15 | .PHONY: pre-commit
16 | install_pre_commit: ## configure and install pre commit tool
17 | @uv run pre-commit install
18 |
19 | uninstall_pre_commit: ## configure and install pre commit tool
20 | @uv run pre-commit uninstall
21 |
22 | .PHONY: install
23 | install: ## Install the uv and python environment
24 | @echo "🚀 Creating virtual environment using uv"
25 | @uv run --env-file .env -- uv sync --all-groups && uv pip install -e .
26 |
27 | update: ## Update the uv environment
28 | @echo "🚀 Updating virtual environment using uv"
29 | @uv run --env-file .env -- uv lock --upgrade && uv sync --all-groups && uv pip install -e .
30 |
31 | .PHONY: check_project
32 | check_project: secrets ## Run code quality tools.
33 | @echo "🚀 Checking uv lock file consistency with 'pyproject.toml': Running uv lock --locked"
34 | @uv lock --locked
35 | @echo "🚀 Linting code: Running pre-commit"
36 | @uv run pre-commit run -a
37 |
38 | # # This is different from the gitleaks pre-commit since it checks also unstaged files
39 | # @gitleaks protect --no-banner --verbose
40 |
41 | .PHONY: test
42 | test: ## Test the code with pytest.
43 | @echo "🚀 Testing code: Running pytest"
44 | @uv run pytest --cov --cov-config=pyproject.toml --cov-report=xml tests
45 |
46 | ### Project specific tasks
47 | .PHONY: project
48 | launch_py3: # Launch the main file with python 3
49 | @export PYTHONPATH=$(pwd) && python3 app/main.py
50 | launch_py: # Launch the main file with python
51 | @export PYTHONPATH=$(pwd) && python app/main.py
52 |
53 | ####----Documentation----####
54 | .PHONY: docs
55 | docs: ## Launch mkdocs documentation locally
56 | uv run mkdocs serve
57 |
58 | docs_build: ## Build mkdocs for local test
59 | uv run mkdocs build
60 |
61 | docs_launch_local: ## Launch mkdocs documentation locally with the local building artefacts
62 | uv run mkdocs build
63 | uv run mkdocs serve -v --dev-addr=0.0.0.0:8000
64 |
65 | docs_deploy: ## Deploy mkdocs documentation to github pages
66 | uv run mkdocs build -c -v --site-dir public
67 | uv run mkdocs gh-deploy --force
68 |
69 | docs_public: ## Build mkdocs for official online release
70 | uv run mkdocs build -c -v --site-dir public
71 |
72 | ####----Package Release----####
73 | .PHONY: package
74 | package_build: ## Build the python package
75 | poetry build
76 |
77 | pypi: ## Build and upload the python package to pypi
78 | python setup.py sdist
79 | python setup.py bdist_wheel --universal
80 | twine upload dist/*
81 |
82 | ####----Docker----####
83 | .PHONY: docker
84 |
85 | launch: ## launch the python application containers
86 | docker compose -p bear up --build -d
87 |
88 | launch_all: ## launch the backend project containers only
89 | docker compose -p bear up --build -d app
90 |
91 | launch_db: ## launch the database container only
92 | docker compose -p bear up --build -d db
93 |
94 | launch_dremio: ## launch the dremio container only
95 | docker compose -p bear up --build -d dremio
96 |
97 | check: ## check the status of the docker containers
98 | docker ps -a | grep "bear"
99 |
100 | check_logs: ## check the logs of the application container
101 | docker logs -t app
102 |
103 | check_exec: ## exec bash in the python app container
104 | docker exec -it app /bin/bash
105 |
106 | stop: ## stop all containers
107 | docker compose -p bear down
108 | # docker compose down -v
109 |
110 | stop_clear: ## stop containers and clean the volumes
111 | docker compose -p bear down -v
112 |
113 | clean_volumes: ## clean the docker volumes
114 | docker volume prune
115 |
116 | ####----Project----####
117 | .PHONY: help
118 | help: ## Ask for help in the Makefile
119 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
120 |
121 | .PHONY: project
122 | clean: ## Clean the projects of unwanted cached folders
123 | @rm -rf **/.ipynb_checkpoints **/.pytest_cache **/__pycache__ **/**/__pycache__ ./notebooks/ipynb_checkpoints .pytest_cache ./dist ./volumes
124 | @rm -rf {{cookiecutter.directory_name}} readmes cookiecutter.json hooks replay
125 |
126 | restore: ## Restore the projects to the start (hard clean)
127 | rm -rf **/.ipynb_checkpoints **/.pytest_cache **/__pycache__ **/**/__pycache__ ./notabooks/ipynb_checkpoints .pytest_cache ./dist .venv pdm.lock
128 |
129 | .DEFAULT_GOAL := help
130 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.187.0/containers/docker-existing-docker-compose
3 | // If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
4 | {
5 | "name": "PBG-devcontainer",
6 | // Update the 'dockerComposeFile' list if you have more compose files or use different names.
7 | // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
8 | "dockerComposeFile": [
9 | "docker-compose.yml"
10 | ],
11 | // The 'service' property is the name of the service for the container that VS Code should
12 | // use. Update this value and .devcontainer/docker-compose.yml to the real service name.
13 | "service": "develop",
14 | // The optional 'workspaceFolder' property is the path VS Code should open by default when
15 | // connected. This is typically a file mount in .devcontainer/docker-compose.yml
16 | "workspaceFolder": "/workspace",
17 | // Set *default* container specific settings.json values on container create.
18 | "customizations": {
19 | "vscode": {
20 | "settings": {
21 | "python.pythonPath": "${workspaceFolder}/.venv/bin/python",
22 | "python.terminal.activateEnvInCurrentTerminal": true,
23 | "python.linting.flake8Enabled": true,
24 | "python.linting.mypyEnabled": false,
25 | "python.linting.enabled": true,
26 | "python.testing.pytestArgs": [
27 | "test"
28 | ],
29 | "python.testing.unittestEnabled": false,
30 | "python.testing.pytestEnabled": true,
31 | "editor.formatOnSave": true,
32 | "files.autoSave": "afterDelay",
33 | "python.formatting.provider": "black",
34 | "autoDocstring.customTemplatePath": ".vscode/google_no_types.mustache",
35 | "window.title": "${rootName} ${separator} ${activeEditorShort}",
36 | "[python]": {
37 | "editor.insertSpaces": true,
38 | "editor.tabSize": 4
39 | },
40 | "files.exclude": {
41 | "**/__pycache__": true
42 | }
43 | },
44 | "extensions": [
45 | "alefragnani.bookmarks",
46 | "streetsidesoftware.code-spell-checker",
47 | "ms-azuretools.vscode-docker",
48 | "docsmsft.docs-markdown",
49 | "docsmsft.docs-preview",
50 | "docsmsft.docs-yaml",
51 | "mikestead.dotenv",
52 | "sleistner.vscode-fileutils",
53 | "mhutchie.git-graph",
54 | "donjayamanne.githistory",
55 | "github.codespaces",
56 | "github.copilot",
57 | "github.remotehub",
58 | "eamodio.gitlens",
59 | "oderwat.indent-rainbow",
60 | "streetsidesoftware.code-spell-checker-italian",
61 | "ms-toolsai.jupyter",
62 | "ms-toolsai.jupyter-keymap",
63 | "ms-vsliveshare.vsliveshare",
64 | "yzhang.markdown-all-in-one",
65 | "pkief.material-icon-theme",
66 | "s3gf4ult.monokai-vibrant",
67 | "pnp.polacode",
68 | "ms-ossdata.vscode-postgresql",
69 | "ms-python.vscode-pylance",
70 | "ms-python.python",
71 | "njpwerner.autodocstring",
72 | "kevinrose.vsc-python-indent",
73 | "mechatroner.rainbow-csv",
74 | "ms-vscode-remote.remote-containers",
75 | "ms-vscode-remote.remote-ssh",
76 | "ms-vscode-remote.remote-ssh-edit",
77 | "ms-vscode-remote.remote-wsl",
78 | "ms-vscode-remote.vscode-remote-extensionpack",
79 | "liveecommerce.vscode-remote-workspace",
80 | "medo64.render-crlf",
81 | "stkb.rewrap",
82 | "mtxr.sqltools-driver-pg",
83 | "mtxr.sqltools-driver-sqlite",
84 | "arjun.swagger-viewer",
85 | "wayou.vscode-todo-highlight",
86 | "gruntfuggly.todo-tree",
87 | "visualstudioexptteam.vscodeintellicode",
88 | "webhint.vscode-webhint",
89 | "redhat.vscode-yaml",
90 | "charliermarsh.ruff"
91 | ]
92 | }
93 | },
94 | // Add the IDs of extensions you want installed when the container is created.
95 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
96 | "forwardPorts": [
97 | 8042
98 | ]
99 | // Uncomment the next line if you want start specific services in your Docker Compose config.
100 | // "runServices": [],
101 | // Uncomment the next line if you want to keep your containers running after VS Code shuts down.
102 | // "shutdownAction": "none",
103 | // Uncomment the next line to run commands after the container is created - for example installing curl.
104 | // "postCreateCommand": "apt-get update && apt-get install -y curl",
105 | // Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
106 | // "remoteUser": "vscode"
107 | }
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.187.0/containers/docker-existing-docker-compose
3 | // If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
4 | {
5 | "name": "PBG-devcontainer",
6 | // Update the 'dockerComposeFile' list if you have more compose files or use different names.
7 | // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
8 | "dockerComposeFile": [
9 | "docker-compose.yml"
10 | ],
11 | // The 'service' property is the name of the service for the container that VS Code should
12 | // use. Update this value and .devcontainer/docker-compose.yml to the real service name.
13 | "service": "develop",
14 | // The optional 'workspaceFolder' property is the path VS Code should open by default when
15 | // connected. This is typically a file mount in .devcontainer/docker-compose.yml
16 | "workspaceFolder": "/workspace",
17 | // Set *default* container specific settings.json values on container create.
18 | "customizations": {
19 | "vscode": {
20 | "settings": {
21 | "python.pythonPath": "${workspaceFolder}/.venv/bin/python",
22 | "python.terminal.activateEnvInCurrentTerminal": true,
23 | "python.linting.flake8Enabled": true,
24 | "python.linting.mypyEnabled": false,
25 | "python.linting.enabled": true,
26 | "python.testing.pytestArgs": [
27 | "test"
28 | ],
29 | "python.testing.unittestEnabled": false,
30 | "python.testing.pytestEnabled": true,
31 | "editor.formatOnSave": true,
32 | "files.autoSave": "afterDelay",
33 | "python.formatting.provider": "black",
34 | "autoDocstring.customTemplatePath": ".vscode/google_no_types.mustache",
35 | "window.title": "${rootName} ${separator} ${activeEditorShort}",
36 | "[python]": {
37 | "editor.insertSpaces": true,
38 | "editor.tabSize": 4
39 | },
40 | "files.exclude": {
41 | "**/__pycache__": true
42 | }
43 | },
44 | "extensions": [
45 | "alefragnani.bookmarks",
46 | "streetsidesoftware.code-spell-checker",
47 | "ms-azuretools.vscode-docker",
48 | "docsmsft.docs-markdown",
49 | "docsmsft.docs-preview",
50 | "docsmsft.docs-yaml",
51 | "mikestead.dotenv",
52 | "sleistner.vscode-fileutils",
53 | "mhutchie.git-graph",
54 | "donjayamanne.githistory",
55 | "github.codespaces",
56 | "github.copilot",
57 | "github.remotehub",
58 | "eamodio.gitlens",
59 | "oderwat.indent-rainbow",
60 | "streetsidesoftware.code-spell-checker-italian",
61 | "ms-toolsai.jupyter",
62 | "ms-toolsai.jupyter-keymap",
63 | "ms-vsliveshare.vsliveshare",
64 | "yzhang.markdown-all-in-one",
65 | "pkief.material-icon-theme",
66 | "s3gf4ult.monokai-vibrant",
67 | "pnp.polacode",
68 | "ms-ossdata.vscode-postgresql",
69 | "ms-python.vscode-pylance",
70 | "ms-python.python",
71 | "njpwerner.autodocstring",
72 | "kevinrose.vsc-python-indent",
73 | "mechatroner.rainbow-csv",
74 | "ms-vscode-remote.remote-containers",
75 | "ms-vscode-remote.remote-ssh",
76 | "ms-vscode-remote.remote-ssh-edit",
77 | "ms-vscode-remote.remote-wsl",
78 | "ms-vscode-remote.vscode-remote-extensionpack",
79 | "liveecommerce.vscode-remote-workspace",
80 | "medo64.render-crlf",
81 | "stkb.rewrap",
82 | "mtxr.sqltools-driver-pg",
83 | "mtxr.sqltools-driver-sqlite",
84 | "arjun.swagger-viewer",
85 | "wayou.vscode-todo-highlight",
86 | "gruntfuggly.todo-tree",
87 | "visualstudioexptteam.vscodeintellicode",
88 | "webhint.vscode-webhint",
89 | "redhat.vscode-yaml",
90 | "charliermarsh.ruff"
91 | ]
92 | }
93 | },
94 | // Add the IDs of extensions you want installed when the container is created.
95 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
96 | "forwardPorts": [
97 | 8042
98 | ]
99 | // Uncomment the next line if you want start specific services in your Docker Compose config.
100 | // "runServices": [],
101 | // Uncomment the next line if you want to keep your containers running after VS Code shuts down.
102 | // "shutdownAction": "none",
103 | // Uncomment the next line to run commands after the container is created - for example installing curl.
104 | // "postCreateCommand": "apt-get update && apt-get install -y curl",
105 | // Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
106 | // "remoteUser": "vscode"
107 | }
--------------------------------------------------------------------------------
/app/core/utils/files.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import shutil
4 |
5 | import yaml
6 | from loguru import logger
7 |
8 |
9 | def check_if_file_exists(file: str) -> bool:
10 | """Check if a file exist in a specific path.
11 |
12 | Args:
13 | file (str): the file path or name
14 |
15 | Returns:
16 | (bool): True if the file exist, False otherwise
17 | """
18 | if os.path.exists(file) and os.path.isfile(file):
19 | return True
20 |
21 | return False
22 |
23 |
24 | def init_folder(path: str, output=False) -> bool: # noqa
25 | """Initialize a folder (create if not exist).
26 |
27 | Args:
28 | path (str): path of the folder
29 | output (bool, optional): if you want to see the path of the folder. Defaults to False.
30 |
31 | Returns:
32 | (bool): True if the folder is created, False otherwise
33 | """
34 | try:
35 | os.mkdir(path)
36 | return True
37 | except Exception as message:
38 | if output:
39 | print(f"{path} already exists: {message}")
40 | return False
41 |
42 |
43 | def remove_folder(folder_path: str, force: bool = False) -> bool: # noqa
44 | """Remove a folder if exist, if not exist return false.
45 |
46 | If you want remove folders not empty you have to use force=True, but be careful because can be really dangerous!
47 |
48 | Args:
49 | folder_path (str): path of the folder
50 | force (bool, optional): if you want to remove folders not empty. Defaults to False.
51 |
52 | Returns:
53 | (bool): True if the folder is removed, False otherwise
54 | """
55 | # Check if the folder exists
56 | if os.path.exists(folder_path):
57 | if force:
58 | # forcing the removal (can be dangerous)
59 | # this can help to remove folder not empty
60 | shutil.rmtree(folder_path, ignore_errors=True)
61 | else:
62 | # If it exists, remove it (safe remove)
63 | os.rmdir(folder_path)
64 | print(f"Removed folder at {folder_path}")
65 | return True
66 |
67 | return False
68 |
69 |
70 | # Yaml Library and functions
71 | def get_folder_path(custom_path: str = "", force_creation: bool = False) -> str: # noqa
72 | """Get the folder absolute path starting from a relative path of your python launch file.
73 |
74 | This function is os independent, so it's possibile to use everywherre
75 |
76 | Args:
77 | custom_path (str, optional): The relative path of your path search. Defaults to "".
78 | force_creation (bool, optional): if the path doesn't exist, force the creation of the folder. Defaults to False.
79 |
80 | Returns:
81 | str: The absolute path you want to search
82 | """
83 | if custom_path == "" or custom_path is None:
84 | basepath = os.path.abspath("")
85 | else:
86 | basepath = os.path.abspath(custom_path)
87 |
88 | # Check if the folder exist, if not exit you can create with a flag
89 | if not os.path.exists(basepath):
90 | logger.error("WARNING: Path doesn't exist")
91 | if force_creation:
92 | logger.debug("Force creation folder")
93 | try:
94 | os.makedirs(basepath)
95 | except Exception as message:
96 | logger.error(f"Impossible to create the folder: {message}")
97 |
98 | logger.debug(f"PATH: {basepath}, force creation: {force_creation}")
99 | return basepath
100 |
101 |
102 | def search_path(to_path: str, filename: str) -> str:
103 | """Search a specific filename into a folder path.
104 |
105 | Args:
106 | to_path (str): path where you want to search
107 | filename (str): name of the file
108 |
109 | Returns:
110 | str: the relative path where the file is
111 | """
112 | try:
113 | if to_path == "" or to_path is None:
114 | to_path = get_folder_path("./")
115 |
116 | if filename == "" or filename is None:
117 | filename = "result.yml"
118 |
119 | if re.search(r"yml", filename) is False:
120 | filename = filename + ".yml"
121 |
122 | return os.path.join(to_path, filename)
123 |
124 | except Exception as message:
125 | logger.error(f"Path: {to_path}, or filename: {filename} not found: {message}")
126 | return ""
127 |
128 |
129 | def read_yaml(file_path: str, filename: str = "") -> dict:
130 | """Read a yaml file from disk.
131 |
132 | Args:
133 | file_path (str): path where you want to load
134 | filename (str, optional): Name of the file you want to load. Defaults to "".
135 |
136 | Returns:
137 | dict: The dictionary readed from the yaml file
138 | """
139 | file_path = search_path(file_path, filename)
140 |
141 | try:
142 | with open(file_path) as file:
143 | data = yaml.safe_load(file)
144 | file.close()
145 | logger.debug(f"Yaml file: {filename} loaded")
146 | return data
147 |
148 | except Exception as message:
149 | logger.error(f"Impossible to load the file: {filename} with path: {file_path}")
150 | logger.error(f"Error: {message}")
151 | return {}
152 |
153 |
154 | def write_yaml(to_path: str, filename: str, obj_save: object) -> bool:
155 | """Write some properties to generic yaml file.
156 |
157 | Args:
158 | to_path (str): the path you want to write the file
159 | filename (str): the name of the file
160 | obj_save (obj): the python object you want to save (for example a dictionary)
161 |
162 | Returns:
163 | (bool) a boolean value with success or insuccess)
164 | """
165 | file_path = search_path(to_path, filename)
166 |
167 | try:
168 | with open(file_path, "w") as file:
169 | yaml.dump(obj_save, file)
170 | file.close()
171 | logger.debug(f"File successfully written to: {file_path}")
172 | return True
173 |
174 | except Exception as message:
175 | logger.error(f"Impossible to write the file: {message}")
176 | return False
177 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/files.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import shutil
4 |
5 | import yaml
6 | from loguru import logger
7 |
8 |
9 | def check_if_file_exists(file: str) -> bool:
10 | """Check if a file exist in a specific path.
11 |
12 | Args:
13 | file (str): the file path or name
14 |
15 | Returns:
16 | (bool): True if the file exist, False otherwise
17 | """
18 | if os.path.exists(file) and os.path.isfile(file):
19 | return True
20 |
21 | return False
22 |
23 |
24 | def init_folder(path: str, output=False) -> bool: # noqa
25 | """Initialize a folder (create if not exist).
26 |
27 | Args:
28 | path (str): path of the folder
29 | output (bool, optional): if you want to see the path of the folder. Defaults to False.
30 |
31 | Returns:
32 | (bool): True if the folder is created, False otherwise
33 | """
34 | try:
35 | os.mkdir(path)
36 | return True
37 | except Exception as message:
38 | if output:
39 | print(f"{path} already exists: {message}")
40 | return False
41 |
42 |
43 | def remove_folder(folder_path: str, force: bool = False) -> bool: # noqa
44 | """Remove a folder if exist, if not exist return false.
45 |
46 | If you want remove folders not empty you have to use force=True, but be careful because can be really dangerous!
47 |
48 | Args:
49 | folder_path (str): path of the folder
50 | force (bool, optional): if you want to remove folders not empty. Defaults to False.
51 |
52 | Returns:
53 | (bool): True if the folder is removed, False otherwise
54 | """
55 | # Check if the folder exists
56 | if os.path.exists(folder_path):
57 | if force:
58 | # forcing the removal (can be dangerous)
59 | # this can help to remove folder not empty
60 | shutil.rmtree(folder_path, ignore_errors=True)
61 | else:
62 | # If it exists, remove it (safe remove)
63 | os.rmdir(folder_path)
64 | print(f"Removed folder at {folder_path}")
65 | return True
66 |
67 | return False
68 |
69 |
70 | # Yaml Library and functions
71 | def get_folder_path(custom_path: str = "", force_creation: bool = False) -> str: # noqa
72 | """Get the folder absolute path starting from a relative path of your python launch file.
73 |
74 | This function is os independent, so it's possibile to use everywherre
75 |
76 | Args:
77 | custom_path (str, optional): The relative path of your path search. Defaults to "".
78 | force_creation (bool, optional): if the path doesn't exist, force the creation of the folder. Defaults to False.
79 |
80 | Returns:
81 | str: The absolute path you want to search
82 | """
83 | if custom_path == "" or custom_path is None:
84 | basepath = os.path.abspath("")
85 | else:
86 | basepath = os.path.abspath(custom_path)
87 |
88 | # Check if the folder exist, if not exit you can create with a flag
89 | if not os.path.exists(basepath):
90 | logger.error("WARNING: Path doesn't exist")
91 | if force_creation:
92 | logger.debug("Force creation folder")
93 | try:
94 | os.makedirs(basepath)
95 | except Exception as message:
96 | logger.error(f"Impossible to create the folder: {message}")
97 |
98 | logger.debug(f"PATH: {basepath}, force creation: {force_creation}")
99 | return basepath
100 |
101 |
102 | def search_path(to_path: str, filename: str) -> str:
103 | """Search a specific filename into a folder path.
104 |
105 | Args:
106 | to_path (str): path where you want to search
107 | filename (str): name of the file
108 |
109 | Returns:
110 | str: the relative path where the file is
111 | """
112 | try:
113 | if to_path == "" or to_path is None:
114 | to_path = get_folder_path("./")
115 |
116 | if filename == "" or filename is None:
117 | filename = "result.yml"
118 |
119 | if re.search(r"yml", filename) is False:
120 | filename = filename + ".yml"
121 |
122 | return os.path.join(to_path, filename)
123 |
124 | except Exception as message:
125 | logger.error(f"Path: {to_path}, or filename: {filename} not found: {message}")
126 | return ""
127 |
128 |
129 | def read_yaml(file_path: str, filename: str = "") -> dict:
130 | """Read a yaml file from disk.
131 |
132 | Args:
133 | file_path (str): path where you want to load
134 | filename (str, optional): Name of the file you want to load. Defaults to "".
135 |
136 | Returns:
137 | dict: The dictionary readed from the yaml file
138 | """
139 | file_path = search_path(file_path, filename)
140 |
141 | try:
142 | with open(file_path) as file:
143 | data = yaml.safe_load(file)
144 | file.close()
145 | logger.debug(f"Yaml file: {filename} loaded")
146 | return data
147 |
148 | except Exception as message:
149 | logger.error(f"Impossible to load the file: {filename} with path: {file_path}")
150 | logger.error(f"Error: {message}")
151 | return {}
152 |
153 |
154 | def write_yaml(to_path: str, filename: str, obj_save: object) -> bool:
155 | """Write some properties to generic yaml file.
156 |
157 | Args:
158 | to_path (str): the path you want to write the file
159 | filename (str): the name of the file
160 | obj_save (obj): the python object you want to save (for example a dictionary)
161 |
162 | Returns:
163 | (bool) a boolean value with success or insuccess)
164 | """
165 | file_path = search_path(to_path, filename)
166 |
167 | try:
168 | with open(file_path, "w") as file:
169 | yaml.dump(obj_save, file)
170 | file.close()
171 | logger.debug(f"File successfully written to: {file_path}")
172 | return True
173 |
174 | except Exception as message:
175 | logger.error(f"Impossible to write the file: {message}")
176 | return False
177 |
--------------------------------------------------------------------------------
/app/core/utils/time.py:
--------------------------------------------------------------------------------
1 | import datetime as dt
2 | from datetime import date, datetime, timedelta, timezone
3 | from typing import Any, Optional
4 |
5 | import numpy as np
6 | import pandas as pd
7 |
8 | from app.core.utils.conversions import date2millis, millis2date
9 |
10 | # from typing import Any, Callable, TypeVar
11 | # SF = TypeVar("SF", bound=Callable[..., Any])
12 | # VF = TypeVar("VF", bound=Callable[..., np.array])
13 | # vectorize: Callable[[SF], VF] = np.vectorize
14 |
15 | # Current date (with a format)
16 | # Current week number
17 | # Current time
18 |
19 |
20 | @np.vectorize
21 | def datetime_now():
22 | """Get the current date and time."""
23 | return datetime.now(timezone.utc)
24 |
25 |
26 | @np.vectorize
27 | def date_now_plus_30_days():
28 | """Get the current date plus 30 days."""
29 | return datetime.now(timezone.utc) + timedelta(days=30)
30 |
31 |
32 | @np.vectorize
33 | def datetime_now_plus_7_days():
34 | """Get the current date plus 7 days."""
35 | return datetime.now(timezone.utc) + timedelta(days=7)
36 |
37 |
38 | @np.vectorize
39 | def datetime_now_plus_1_year():
40 | """Get the current date plus 1 year."""
41 | return datetime.now(timezone.utc) + timedelta(days=365)
42 |
43 |
44 | @np.vectorize
45 | def first_day_of_month(any_day: date) -> date:
46 | """Get the first day of the month for any day.
47 |
48 | Args:
49 | any_day (date): Any day
50 |
51 | Returns:
52 | date: First day of the month
53 | """
54 | return any_day.replace(day=1)
55 |
56 |
57 | @np.vectorize
58 | def last_day_of_month(any_day: date) -> date:
59 | """Get the last day of the month for any day.
60 |
61 | Args:
62 | any_day (date): Any day
63 |
64 | Returns:
65 | date: Last day of the month
66 |
67 | """
68 | # this will never fail
69 | # get close to the end of the month for any day, and add 4 days 'over'
70 | next_month = any_day.replace(day=28) + timedelta(days=4)
71 | # subtract the number of remaining 'overage' days to get last day of current month, or said programattically said, the previous day of the first of next month
72 | return next_month - timedelta(days=next_month.day)
73 |
74 |
75 | @np.vectorize
76 | def last_day_of_week(date: date) -> date:
77 | """Get the last day of the week for any day.
78 |
79 | Args:
80 | date (date): Any day
81 |
82 | Returns:
83 | date: Last day of the week
84 |
85 | """
86 | start = date - timedelta(days=date.weekday())
87 | return start + timedelta(days=6)
88 |
89 |
90 | # milliseconds
91 | @np.vectorize
92 | def ms_to_date(ms: int) -> np.array:
93 | """Convert milliseconds to date (datetime).
94 |
95 | Warning: the results it's a numpy array
96 |
97 | Args:
98 | ms (int): Milliseconds to convert
99 | Returns:
100 | np.array(dt.datetime): Converted date
101 | """
102 | return millis2date(ms)
103 |
104 |
105 | @np.vectorize
106 | def date_to_ms(x: str, date_format: str = "%Y-%m-%d %H:%M:%S") -> np.array:
107 | """Convert a date in a string format to milliseconds using a specific format.
108 |
109 | Warning: the results it's a numpy array
110 |
111 | Args:
112 | x (str): Date to convert
113 | format_date (str, optional): Format of the date. Defaults to "%Y-%m-%d %H:%M:%S".
114 |
115 | Returns:
116 | np.array(dt.datime): Converted date
117 | """
118 | return date2millis(x, date_format)
119 |
120 |
121 | # Strings
122 | @np.vectorize
123 | def str_to_date(x: str, date_format: str = "%Y-%m-%d") -> dt.date:
124 | """Convert a string to date.
125 |
126 | Args:
127 | x: String to convert
128 | format (str, optional): Format of the string. Defaults to "%Y-%m-%d".
129 |
130 | Returns:
131 | dt.date: Converted date
132 | """
133 | return dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc).date()
134 |
135 |
136 | @np.vectorize
137 | def date_to_str(x: date, date_format: str = "%Y-%m-%d") -> str:
138 | """Date to string.
139 |
140 | Args:
141 | x (date): Date to convert
142 | format (str): Format of the string. Defaults to "%Y-%m-%d".
143 |
144 | Returns:
145 | str: Converted string
146 | """
147 | return x.strftime(date_format) if not pd.isnull(x) else np.nan
148 |
149 |
150 | @np.vectorize
151 | def str_to_datetime(x: str, date_format: str = "%Y-%m-%d %H:%M:%S") -> dt.datetime:
152 | """String to datetime.
153 |
154 | Args:
155 | x (str): String to convert
156 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
157 |
158 | Returns:
159 | dt.datetime: Converted datetime
160 | """
161 | return dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc)
162 |
163 |
164 | @np.vectorize
165 | def datetime_to_str(x: date, date_format: str = "%Y-%m-%d %H:%M:%S") -> str:
166 | """Convert Datetime to string.
167 |
168 | Args:
169 | x (date): Datetime to convert
170 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
171 |
172 | Return:
173 | str: Converted string
174 |
175 | """
176 | return x.strftime(date_format) if not pd.isnull(x) else np.nan
177 |
178 |
179 | # generic
180 | @np.vectorize
181 | def acquire_date(x: Optional[Any], date_format: str = "%Y-%m-%d %H:%M:%S") -> Any:
182 | """Acquire and output a date from a string.
183 |
184 | Args:
185 | x (Optional[Any]): String to convert
186 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
187 |
188 | Returns:
189 | Any: Extracted date
190 | """
191 | if isinstance(x, str):
192 | return (
193 | dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc).date() if not pd.isnull(x) else np.nan
194 | )
195 |
196 | if isinstance(x, dt.date):
197 | return x
198 |
199 | return np.nan
200 |
201 |
202 | def is_timezone_aware(dt):
203 | """Check if a datetime object is timezone aware.
204 |
205 | Args:
206 | dt (datetime): The datetime object to check.
207 |
208 | Returns:
209 | bool: True if the datetime object is timezone aware, False otherwise.
210 | """
211 | return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
212 |
--------------------------------------------------------------------------------
/app/core/utils/dataframe.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import pandas as pd
4 | import yaml
5 | from loguru import logger
6 | from pandas import DataFrame
7 |
8 | from app.core.utils.files import search_path
9 |
10 |
11 | def remove_dict_none_values(dictionary: dict) -> list:
12 | """Remove dictionary keys whose value is None.
13 |
14 | Args:
15 | dictionary [dict]: dictionary to clean
16 |
17 | Returns:
18 | a list of keys without none values
19 | """
20 | return list(map(dictionary.pop, [i for i in dictionary if dictionary[i] is None]))
21 |
22 |
23 | def data_iterator(data: pd.DataFrame, batch_size: int):
24 | """Yield batches of the input dataframe with a sequential index.
25 |
26 | Args:
27 | data (pd.DataFrame): input dataframe
28 | batch_size (int): size of the batch
29 | """
30 | for i in range(0, len(data), batch_size):
31 | yield data.iloc[i : i + batch_size]
32 |
33 |
34 | def semi_join(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
35 | """Pandas dataframe semi join.
36 |
37 | Args:
38 | left (DataFrame): left dataframe
39 | right (DataFrame): right dataframe
40 | left_on (str): column name of the left dataframe
41 | right_on (str): column name of the right dataframe
42 |
43 | Returns:
44 | Pandas DataFrame: the left dataframe with only the rows that are in the right dataframe
45 | """
46 | semi = right[right_on].drop_duplicates() if not right.empty else []
47 | is_in = left[left_on].isin(semi)
48 | return left[is_in]
49 |
50 |
51 | def semi_join_if_any(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
52 | """Pandas dataframe semi join.
53 |
54 | Args:
55 | left (DataFrame): left dataframe
56 | right (DataFrame): right dataframe
57 | left_on (str): column name of the left dataframe
58 | right_on (str): column name of the right dataframe
59 |
60 | Returns:
61 | Pandas DataFrame: the left dataframe with only the rows that are in the right dataframe
62 | """
63 | if right.empty:
64 | return left
65 |
66 | return semi_join(left, right, left_on, right_on)
67 |
68 |
69 | def anti_join(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
70 | """Pandas dataframe anti join.
71 |
72 | Args:
73 | left (DataFrame): left dataframe
74 | right (DataFrame): right dataframe
75 | left_on (str): column name of the left dataframe
76 | right_on (str): column name of the right dataframe
77 |
78 | Returns:
79 | Pandas DataFrame: the left dataframe with only the rows that are not in the right dataframe
80 | """
81 | semi = right[right_on].drop_duplicates()
82 | is_in = left[left_on].isin(semi)
83 | return left[~is_in]
84 |
85 |
86 | def remove_columns(dataset: pd.DataFrame, cols: list) -> pd.DataFrame:
87 | """Remove columns from a pandas dataframe.
88 |
89 | Args:
90 | dataset (pd.DataFrame): pandas dataframe as input
91 | cols (list of strings): list of columns you want to remove
92 |
93 | Returns:
94 | the dataset without the columns
95 | """
96 | if isinstance(cols, str):
97 | cols = [cols]
98 | cols = [c for c in cols if c in dataset.columns]
99 | return dataset.drop(cols, axis=1)
100 |
101 |
102 | def keep_columns(df: pd.DataFrame, cols: list) -> pd.DataFrame:
103 | """Keep only the columns you want in a pandas dataframe and remove everything else.
104 |
105 | Args:
106 | df (pd.DataFrame): input pandas dataframe
107 | cols (list): the list of columns you want to keep inside the pandas dataframe
108 |
109 | Returns:
110 | (pd.DataFrame): the dataframe with only the columns you want to keep
111 | """
112 | if isinstance(cols, str):
113 | cols = [cols]
114 | cols = [c for c in cols if c in df.columns]
115 | return df.loc[:, cols]
116 |
117 |
118 | def save_dict_to_json(d: dict, file: str) -> str:
119 | """Save a dictionary to a json file.
120 |
121 | Args:
122 | d (dict): the dictionary to save
123 | file (str): the file path or filename where you want to save the dictionary
124 |
125 | Returns:
126 | (str): the filepath of the saved file
127 | """
128 | with open(file, "w") as fp:
129 | json.dump(d, fp, sort_keys=True, indent=4)
130 | return file
131 |
132 |
133 | # Check Dataframe Utility function
134 | def check_df(dataframe: pd.DataFrame, sample: bool = False) -> None: # noqa
135 | """Get the informations about a pandas dataframe (shape, columns, dtypes, ...).
136 |
137 | This function use the logger to print the informations (with an "info" level)
138 |
139 | Args:
140 | dataframe (pd.DataFrame): the pandas dataframe you want to check
141 | sample (bool): If you want also a sample of top 5 rows. Defaults to False.
142 | """
143 | logger.info(f"Dataframe Shape: {dataframe.shape} with rows: {dataframe.shape[0]} and columns: {dataframe.shape[1]}")
144 | logger.info(f"\nDF Columns: \n{list(dataframe.columns)}")
145 | if sample:
146 | logger.info(f"\nData:\n{dataframe.head(5)}")
147 |
148 |
149 | def write_dataset_yaml(to_path: str = "", filename: str = "", dataset: pd.DataFrame = None) -> bool:
150 | """Write a pandas dataset to yaml.
151 |
152 | Args:
153 | to_path (str, optional): Path where you want to save the yaml file. Defaults to "".
154 | filename (str, optional): Name of the file to use. Defaults to "".
155 | dataset (pd.DataFrame, optional): Pandas dataframe to save. Defaults to None.
156 |
157 | Returns:
158 | bool: If the
159 | """
160 | file_path = search_path(to_path, filename)
161 |
162 | if not isinstance(dataset, pd.DataFrame):
163 | logger.error("Please use a Pandas dataframe with write_dataset_yaml function")
164 | return False
165 |
166 | try:
167 | with open(file_path, "w") as file:
168 | yaml.dump(dataset, file)
169 | file.close()
170 | logger.debug(f"File successfully write to: {file_path}")
171 | return True
172 |
173 | except Exception as message:
174 | logger.error(f"Impossible to write the file: {message}")
175 | return False
176 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/time.py:
--------------------------------------------------------------------------------
1 | import datetime as dt
2 | from datetime import date, datetime, timedelta, timezone
3 | from typing import Any, Optional
4 |
5 | import numpy as np
6 | import pandas as pd
7 |
8 | from app.core.utils.conversions import date2millis, millis2date
9 |
10 | # from typing import Any, Callable, TypeVar
11 | # SF = TypeVar("SF", bound=Callable[..., Any])
12 | # VF = TypeVar("VF", bound=Callable[..., np.array])
13 | # vectorize: Callable[[SF], VF] = np.vectorize
14 |
15 | # Current date (with a format)
16 | # Current week number
17 | # Current time
18 |
19 |
20 | @np.vectorize
21 | def datetime_now():
22 | """Get the current date and time."""
23 | return datetime.now(timezone.utc)
24 |
25 |
26 | @np.vectorize
27 | def date_now_plus_30_days():
28 | """Get the current date plus 30 days."""
29 | return datetime.now(timezone.utc) + timedelta(days=30)
30 |
31 |
32 | @np.vectorize
33 | def datetime_now_plus_7_days():
34 | """Get the current date plus 7 days."""
35 | return datetime.now(timezone.utc) + timedelta(days=7)
36 |
37 |
38 | @np.vectorize
39 | def datetime_now_plus_1_year():
40 | """Get the current date plus 1 year."""
41 | return datetime.now(timezone.utc) + timedelta(days=365)
42 |
43 |
44 | @np.vectorize
45 | def first_day_of_month(any_day: date) -> date:
46 | """Get the first day of the month for any day.
47 |
48 | Args:
49 | any_day (date): Any day
50 |
51 | Returns:
52 | date: First day of the month
53 | """
54 | return any_day.replace(day=1)
55 |
56 |
57 | @np.vectorize
58 | def last_day_of_month(any_day: date) -> date:
59 | """Get the last day of the month for any day.
60 |
61 | Args:
62 | any_day (date): Any day
63 |
64 | Returns:
65 | date: Last day of the month
66 |
67 | """
68 | # this will never fail
69 | # get close to the end of the month for any day, and add 4 days 'over'
70 | next_month = any_day.replace(day=28) + timedelta(days=4)
71 | # subtract the number of remaining 'overage' days to get last day of current month, or said programattically said, the previous day of the first of next month
72 | return next_month - timedelta(days=next_month.day)
73 |
74 |
75 | @np.vectorize
76 | def last_day_of_week(date: date) -> date:
77 | """Get the last day of the week for any day.
78 |
79 | Args:
80 | date (date): Any day
81 |
82 | Returns:
83 | date: Last day of the week
84 |
85 | """
86 | start = date - timedelta(days=date.weekday())
87 | return start + timedelta(days=6)
88 |
89 |
90 | # milliseconds
91 | @np.vectorize
92 | def ms_to_date(ms: int) -> np.array:
93 | """Convert milliseconds to date (datetime).
94 |
95 | Warning: the results it's a numpy array
96 |
97 | Args:
98 | ms (int): Milliseconds to convert
99 | Returns:
100 | np.array(dt.datetime): Converted date
101 | """
102 | return millis2date(ms)
103 |
104 |
105 | @np.vectorize
106 | def date_to_ms(x: str, date_format: str = "%Y-%m-%d %H:%M:%S") -> np.array:
107 | """Convert a date in a string format to milliseconds using a specific format.
108 |
109 | Warning: the results it's a numpy array
110 |
111 | Args:
112 | x (str): Date to convert
113 | format_date (str, optional): Format of the date. Defaults to "%Y-%m-%d %H:%M:%S".
114 |
115 | Returns:
116 | np.array(dt.datime): Converted date
117 | """
118 | return date2millis(x, date_format)
119 |
120 |
121 | # Strings
122 | @np.vectorize
123 | def str_to_date(x: str, date_format: str = "%Y-%m-%d") -> dt.date:
124 | """Convert a string to date.
125 |
126 | Args:
127 | x: String to convert
128 | format (str, optional): Format of the string. Defaults to "%Y-%m-%d".
129 |
130 | Returns:
131 | dt.date: Converted date
132 | """
133 | return dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc).date()
134 |
135 |
136 | @np.vectorize
137 | def date_to_str(x: date, date_format: str = "%Y-%m-%d") -> str:
138 | """Date to string.
139 |
140 | Args:
141 | x (date): Date to convert
142 | format (str): Format of the string. Defaults to "%Y-%m-%d".
143 |
144 | Returns:
145 | str: Converted string
146 | """
147 | return x.strftime(date_format) if not pd.isnull(x) else np.nan
148 |
149 |
150 | @np.vectorize
151 | def str_to_datetime(x: str, date_format: str = "%Y-%m-%d %H:%M:%S") -> dt.datetime:
152 | """String to datetime.
153 |
154 | Args:
155 | x (str): String to convert
156 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
157 |
158 | Returns:
159 | dt.datetime: Converted datetime
160 | """
161 | return dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc)
162 |
163 |
164 | @np.vectorize
165 | def datetime_to_str(x: date, date_format: str = "%Y-%m-%d %H:%M:%S") -> str:
166 | """Convert Datetime to string.
167 |
168 | Args:
169 | x (date): Datetime to convert
170 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
171 |
172 | Return:
173 | str: Converted string
174 |
175 | """
176 | return x.strftime(date_format) if not pd.isnull(x) else np.nan
177 |
178 |
179 | # generic
180 | @np.vectorize
181 | def acquire_date(x: Optional[Any], date_format: str = "%Y-%m-%d %H:%M:%S") -> Any:
182 | """Acquire and output a date from a string.
183 |
184 | Args:
185 | x (Optional[Any]): String to convert
186 | format (str): Format of the string. Defaults to "%Y-%m-%d %H:%M:%S".
187 |
188 | Returns:
189 | Any: Extracted date
190 | """
191 | if isinstance(x, str):
192 | return (
193 | dt.datetime.strptime(x, date_format).replace(tzinfo=dt.timezone.utc).date() if not pd.isnull(x) else np.nan
194 | )
195 |
196 | if isinstance(x, dt.date):
197 | return x
198 |
199 | return np.nan
200 |
201 |
202 | def is_timezone_aware(dt):
203 | """Check if a datetime object is timezone aware.
204 |
205 | Args:
206 | dt (datetime): The datetime object to check.
207 |
208 | Returns:
209 | bool: True if the datetime object is timezone aware, False otherwise.
210 | """
211 | return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
212 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/app/core/utils/dataframe.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import pandas as pd
4 | import yaml
5 | from loguru import logger
6 | from pandas import DataFrame
7 |
8 | from app.core.utils.files import search_path
9 |
10 |
11 | def remove_dict_none_values(dictionary: dict) -> list:
12 | """Remove dictionary keys whose value is None.
13 |
14 | Args:
15 | dictionary [dict]: dictionary to clean
16 |
17 | Returns:
18 | a list of keys without none values
19 | """
20 | return list(map(dictionary.pop, [i for i in dictionary if dictionary[i] is None]))
21 |
22 |
23 | def data_iterator(data: pd.DataFrame, batch_size: int):
24 | """Yield batches of the input dataframe with a sequential index.
25 |
26 | Args:
27 | data (pd.DataFrame): input dataframe
28 | batch_size (int): size of the batch
29 | """
30 | for i in range(0, len(data), batch_size):
31 | yield data.iloc[i : i + batch_size]
32 |
33 |
34 | def semi_join(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
35 | """Pandas dataframe semi join.
36 |
37 | Args:
38 | left (DataFrame): left dataframe
39 | right (DataFrame): right dataframe
40 | left_on (str): column name of the left dataframe
41 | right_on (str): column name of the right dataframe
42 |
43 | Returns:
44 | Pandas DataFrame: the left dataframe with only the rows that are in the right dataframe
45 | """
46 | semi = right[right_on].drop_duplicates() if not right.empty else []
47 | is_in = left[left_on].isin(semi)
48 | return left[is_in]
49 |
50 |
51 | def semi_join_if_any(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
52 | """Pandas dataframe semi join.
53 |
54 | Args:
55 | left (DataFrame): left dataframe
56 | right (DataFrame): right dataframe
57 | left_on (str): column name of the left dataframe
58 | right_on (str): column name of the right dataframe
59 |
60 | Returns:
61 | Pandas DataFrame: the left dataframe with only the rows that are in the right dataframe
62 | """
63 | if right.empty:
64 | return left
65 |
66 | return semi_join(left, right, left_on, right_on)
67 |
68 |
69 | def anti_join(left: DataFrame, right: DataFrame, left_on: str, right_on: str) -> DataFrame:
70 | """Pandas dataframe anti join.
71 |
72 | Args:
73 | left (DataFrame): left dataframe
74 | right (DataFrame): right dataframe
75 | left_on (str): column name of the left dataframe
76 | right_on (str): column name of the right dataframe
77 |
78 | Returns:
79 | Pandas DataFrame: the left dataframe with only the rows that are not in the right dataframe
80 | """
81 | semi = right[right_on].drop_duplicates()
82 | is_in = left[left_on].isin(semi)
83 | return left[~is_in]
84 |
85 |
86 | def remove_columns(dataset: pd.DataFrame, cols: list) -> pd.DataFrame:
87 | """Remove columns from a pandas dataframe.
88 |
89 | Args:
90 | dataset (pd.DataFrame): pandas dataframe as input
91 | cols (list of strings): list of columns you want to remove
92 |
93 | Returns:
94 | the dataset without the columns
95 | """
96 | if isinstance(cols, str):
97 | cols = [cols]
98 | cols = [c for c in cols if c in dataset.columns]
99 | return dataset.drop(cols, axis=1)
100 |
101 |
102 | def keep_columns(df: pd.DataFrame, cols: list) -> pd.DataFrame:
103 | """Keep only the columns you want in a pandas dataframe and remove everything else.
104 |
105 | Args:
106 | df (pd.DataFrame): input pandas dataframe
107 | cols (list): the list of columns you want to keep inside the pandas dataframe
108 |
109 | Returns:
110 | (pd.DataFrame): the dataframe with only the columns you want to keep
111 | """
112 | if isinstance(cols, str):
113 | cols = [cols]
114 | cols = [c for c in cols if c in df.columns]
115 | return df.loc[:, cols]
116 |
117 |
118 | def save_dict_to_json(d: dict, file: str) -> str:
119 | """Save a dictionary to a json file.
120 |
121 | Args:
122 | d (dict): the dictionary to save
123 | file (str): the file path or filename where you want to save the dictionary
124 |
125 | Returns:
126 | (str): the filepath of the saved file
127 | """
128 | with open(file, "w") as fp:
129 | json.dump(d, fp, sort_keys=True, indent=4)
130 | return file
131 |
132 |
133 | # Check Dataframe Utility function
134 | def check_df(dataframe: pd.DataFrame, sample: bool = False) -> None: # noqa
135 | """Get the informations about a pandas dataframe (shape, columns, dtypes, ...).
136 |
137 | This function use the logger to print the informations (with an "info" level)
138 |
139 | Args:
140 | dataframe (pd.DataFrame): the pandas dataframe you want to check
141 | sample (bool): If you want also a sample of top 5 rows. Defaults to False.
142 | """
143 | logger.info(f"Dataframe Shape: {dataframe.shape} with rows: {dataframe.shape[0]} and columns: {dataframe.shape[1]}")
144 | logger.info(f"\nDF Columns: \n{list(dataframe.columns)}")
145 | if sample:
146 | logger.info(f"\nData:\n{dataframe.head(5)}")
147 |
148 |
149 | def write_dataset_yaml(to_path: str = "", filename: str = "", dataset: pd.DataFrame = None) -> bool:
150 | """Write a pandas dataset to yaml.
151 |
152 | Args:
153 | to_path (str, optional): Path where you want to save the yaml file. Defaults to "".
154 | filename (str, optional): Name of the file to use. Defaults to "".
155 | dataset (pd.DataFrame, optional): Pandas dataframe to save. Defaults to None.
156 |
157 | Returns:
158 | bool: If the
159 | """
160 | file_path = search_path(to_path, filename)
161 |
162 | if not isinstance(dataset, pd.DataFrame):
163 | logger.error("Please use a Pandas dataframe with write_dataset_yaml function")
164 | return False
165 |
166 | try:
167 | with open(file_path, "w") as file:
168 | yaml.dump(dataset, file)
169 | file.close()
170 | logger.debug(f"File successfully write to: {file_path}")
171 | return True
172 |
173 | except Exception as message:
174 | logger.error(f"Impossible to write the file: {message}")
175 | return False
176 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "bear"
3 | dynamic = ["version"]
4 | description = "Python template"
5 | license = { text = "MIT" }
6 | requires-python = ">=3.10, <3.13"
7 | authors = [
8 | { name = "Andrea Guzzo", email = "andrea.guzzo92@gmail.com" },
9 | { name = "PBG", email = "pythonbiellagroup@gmail.com" },
10 | ]
11 | readme = "README.md"
12 | keywords = ["pbg", "python", "template", "bear"]
13 | dependencies = [
14 | "cookiecutter>=2.6.0",
15 | "ecs-logging>=2.2.0",
16 | "loguru>=0.7.3",
17 | "pydantic>=2.11.3",
18 | "pydantic-settings>=2.9.0",
19 | ]
20 |
21 | # Custom index with private repository
22 | # [[tool.uv.index]]
23 | # name = "pbg"
24 | # url = "your-repository-url"
25 | # explicit = true
26 |
27 | # [tool.uv.sources]
28 | # your-package = { workspace = true }
29 |
30 | [tool.setuptools.packages.find]
31 | where = ["."]
32 | include = ["app*"]
33 | exclude = ["test*"]
34 |
35 | [tool.setuptools.package-data]
36 | "*" = ["*.yaml"]
37 |
38 | [tool.setuptools.dynamic]
39 | version = { file = "VERSION" }
40 |
41 | [dependency-groups]
42 | dev = [
43 | "commitizen>=3.9.1",
44 | "deptry>=0.20.0",
45 | "detect-secrets>=1.5.0",
46 | "docker>=7.1.0",
47 | "exceptiongroup>=1.2.2",
48 | "fsspec>=2024.6.1",
49 | "ipython>=8.27.0",
50 | "jupyter>=1.1.1",
51 | "memory-profiler>=0.61.0",
52 | "moto>=5.0.13",
53 | "mypy>=1.11.2",
54 | "pre-commit>=3.8.0",
55 | "pytest>=8.3.2",
56 | "pytest-asyncio>=0.24.0",
57 | "pytest-cov>=5.0.0",
58 | "ruff>=0.6.3",
59 | "s3fs>=2024.6.1",
60 | "sqlalchemy-stubs>=0.4",
61 | "tox>=4.18.0",
62 | ]
63 |
64 | docs = [
65 | "black>=24.8.0",
66 | "mkdocs>=1.5.3",
67 | "mkdocs-autorefs>=0.5.0",
68 | "mkdocs-gen-files<1.0.0,>=0.5.0",
69 | "mkdocs-literate-nav<1.0.0,>=0.6.1",
70 | "mkdocs-macros-plugin>=1.0.5",
71 | "mkdocs-material>=9.5.5",
72 | "mkdocs-minify-plugin>=0.7.2",
73 | "mkdocs-redirects>=1.2.1",
74 | "mkdocs-section-index<1.0.0,>=0.3.9",
75 | "mkdocstrings>=0.24.0",
76 | "mkdocstrings-python>=1.8.0",
77 | ]
78 |
79 | ### PLUGINS ###
80 | [tool.pytest.ini_options]
81 | markers = [
82 | "functions: mark test for the base functionalities",
83 | "core: all about the core",
84 | "duckdb: duckdb functionalities",
85 | "files: file write and read functionalities",
86 | "postgres: postgres functionalities",
87 | "dremio: dremio functionalities",
88 | ]
89 | log_cli = true
90 | log_cli_level = "DEBUG"
91 |
92 |
93 | [tool.coverage.report]
94 | skip_empty = true
95 |
96 | [tool.coverage.run]
97 | branch = true
98 | source = ["app"]
99 |
100 | [tool.commitizen]
101 | name = "cz_conventional_commits"
102 | tag_format = "v$major.$minor.$patch$prerelease"
103 | version = "0.1.0"
104 | version_files = ["__version__.py", "pyproject.toml:version"]
105 | style = [
106 | [
107 | "qmark",
108 | "fg:#ff9d00 bold",
109 | ],
110 | [
111 | "question",
112 | "bold",
113 | ],
114 | [
115 | "answer",
116 | "fg:#ff9d00 bold",
117 | ],
118 | [
119 | "pointer",
120 | "fg:#ff9d00 bold",
121 | ],
122 | [
123 | "highlighted",
124 | "fg:#ff9d00 bold",
125 | ],
126 | [
127 | "selected",
128 | "fg:#cc5454",
129 | ],
130 | [
131 | "separator",
132 | "fg:#cc5454",
133 | ],
134 | [
135 | "instruction",
136 | "",
137 | ],
138 | [
139 | "text",
140 | "",
141 | ],
142 | [
143 | "disabled",
144 | "fg:#858585 italic",
145 | ],
146 | ]
147 |
148 | #### RUFF
149 | [tool.ruff]
150 | line-length = 120
151 | indent-width = 4
152 | output-format = "concise"
153 | # Assume Python 3.10.
154 | target-version = "py312"
155 |
156 | # A list of file patterns to include when linting.
157 | include = ["**/pyproject.toml", "*.py", "*.pyi"]
158 | # extend-include = ["*.ipynb"]
159 |
160 | # Always autofix, but never try to fix `F401` (unused imports).
161 | fix = true
162 |
163 | # Exclude a variety of commonly ignored directories (you can have some problems)
164 | exclude = [
165 | ".direnv",
166 | ".eggs",
167 | ".git",
168 | ".hg",
169 | ".mypy_cache",
170 | ".nox",
171 | ".pants.d",
172 | ".ruff_cache",
173 | ".svn",
174 | ".tox",
175 | ".venv",
176 | "__pypackages__",
177 | "_build",
178 | "buck-out",
179 | "build",
180 | "dist",
181 | "node_modules",
182 | "venv",
183 | ".venv",
184 | "*ipynb_checkpoints",
185 | "*.ipynb",
186 | "test/*",
187 | "__init__.py",
188 | ]
189 |
190 | [tool.ruff.lint]
191 | fixable = ["ALL"]
192 | unfixable = ["F401"]
193 | # Rules: https://beta.ruff.rs/docs/rules/
194 | # Enable Pyflakes `E` and `F` codes by default.
195 | select = [
196 | #default
197 | "E", # pycodestyle error
198 | "F", #flake8 error
199 | #extra
200 | "A", # bultin shadowing
201 | "B", # flake8 bugbear
202 | "BLE", # aboid bare excepts
203 | "C4", # simplify comprehensions
204 | "D", # docstyle
205 | "DTZ", # datetime errors
206 | "FBT", # avoid boolean trap
207 | "G", # logging format
208 | "I", # flake8-isort import
209 | "N", # pep8 naming
210 | "RET", # return statements values
211 | "S", # bandit
212 | "YTT", # wrong usage of sys.info
213 | "B", # flake8-bugbear
214 | ]
215 | ignore = [
216 | "B008", # do not perform function calls in argument defaults
217 | "BLE001", #Do not catch blind exception: {name}
218 | "C901", # too complex
219 | "D107", # Missing docstring in __init__
220 | "D203", # 1 blank line required before class docstring
221 | "D213", # Multi-line docstring summary should start at the second line
222 | "D417", # Missing argument description in the docstring for {definition}: {name}
223 | "E501", # Line too long ({width} > {limit} characters)
224 | "E501", # line too long, handled by black
225 | "D100",
226 | "FBT001", # boolean trap
227 | "FBT002", # boolean trap
228 | "G004", # logging statement using fstring formatting
229 | ]
230 | # Allow unused variables when underscore-prefixed.
231 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
232 |
233 | [tool.ruff.format]
234 | # select = ["E4", "E7", "E9", "F"]
235 | # exclude = ["*.ipynb"]
236 | # ignore = []
237 | quote-style = "double"
238 | indent-style = "space"
239 | skip-magic-trailing-comma = false
240 | line-ending = "auto"
241 | docstring-code-format = true
242 | docstring-code-line-length = 20
243 |
244 | [tool.ruff.lint.mccabe]
245 | # Unlike Flake8, default to a complexity level of 10.
246 | max-complexity = 10
247 |
248 | [tool.ruff.lint.flake8-quotes]
249 | docstring-quotes = "double"
250 |
251 | [tool.ruff.lint.pydocstyle]
252 | convention = "google"
253 |
254 | [tool.ruff.lint.isort]
255 | known-third-party = ["fastapi", "pydantic", "starlette"]
256 |
257 | [tool.ruff.lint.per-file-ignores]
258 | "__init__.py" = ["D104", "F401", "I002"]
259 | "test*.py" = ["S101", "T201"]
260 |
--------------------------------------------------------------------------------
/{{cookiecutter.directory_name}}/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "bear"
3 | dynamic = ["version"]
4 | description = "Python template"
5 | license = { text = "MIT" }
6 | requires-python = ">=3.10, <3.13"
7 | authors = [
8 | { name = "Andrea Guzzo", email = "andrea.guzzo92@gmail.com" },
9 | { name = "PBG", email = "pythonbiellagroup@gmail.com" },
10 | ]
11 | readme = "README.md"
12 | keywords = ["pbg", "python", "template", "bear"]
13 | dependencies = [
14 | "ecs-logging>=2.2.0",
15 | "cookiecutter>=2.6.0",
16 | "loguru>=0.7.3",
17 | "pydantic>=2.11.3",
18 | "pydantic-settings>=2.9.0",
19 | ]
20 |
21 | # Custom index with private repository
22 | # [[tool.uv.index]]
23 | # name = "pbg"
24 | # url = "your-repository-url"
25 | # explicit = true
26 |
27 | # [tool.uv.sources]
28 | # your-package = { workspace = true }
29 |
30 | [tool.setuptools.packages.find]
31 | where = ["."]
32 | include = ["app*"]
33 | exclude = ["test*"]
34 |
35 | [tool.setuptools.package-data]
36 | "*" = ["*.yaml"]
37 |
38 | [tool.setuptools.dynamic]
39 | version = { file = "VERSION" }
40 |
41 | [dependency-groups]
42 | dev = [
43 | "commitizen>=3.9.1",
44 | "deptry>=0.20.0",
45 | "detect-secrets>=1.5.0",
46 | "docker>=7.1.0",
47 | "exceptiongroup>=1.2.2",
48 | "fsspec>=2024.6.1",
49 | "ipython>=8.27.0",
50 | "jupyter>=1.1.1",
51 | "memory-profiler>=0.61.0",
52 | "moto>=5.0.13",
53 | "mypy>=1.11.2",
54 | "pre-commit>=3.8.0",
55 | "pytest>=8.3.2",
56 | "pytest-asyncio>=0.24.0",
57 | "pytest-cov>=5.0.0",
58 | "ruff>=0.6.3",
59 | "s3fs>=2024.6.1",
60 | "sqlalchemy-stubs>=0.4",
61 | "tox>=4.18.0",
62 | ]
63 |
64 | docs = [
65 | "black>=24.8.0",
66 | "mkdocs>=1.5.3",
67 | "mkdocs-autorefs>=0.5.0",
68 | "mkdocs-gen-files<1.0.0,>=0.5.0",
69 | "mkdocs-literate-nav<1.0.0,>=0.6.1",
70 | "mkdocs-macros-plugin>=1.0.5",
71 | "mkdocs-material>=9.5.5",
72 | "mkdocs-minify-plugin>=0.7.2",
73 | "mkdocs-redirects>=1.2.1",
74 | "mkdocs-section-index<1.0.0,>=0.3.9",
75 | "mkdocstrings>=0.24.0",
76 | "mkdocstrings-python>=1.8.0",
77 | ]
78 |
79 | ### PLUGINS ###
80 | [tool.pytest.ini_options]
81 | markers = [
82 | "functions: mark test for the base functionalities",
83 | "core: all about the core",
84 | "duckdb: duckdb functionalities",
85 | "files: file write and read functionalities",
86 | "postgres: postgres functionalities",
87 | "dremio: dremio functionalities",
88 | ]
89 | log_cli = true
90 | log_cli_level = "DEBUG"
91 |
92 |
93 | [tool.coverage.report]
94 | skip_empty = true
95 |
96 | [tool.coverage.run]
97 | branch = true
98 | source = ["app"]
99 |
100 | [tool.commitizen]
101 | name = "cz_conventional_commits"
102 | tag_format = "v$major.$minor.$patch$prerelease"
103 | version = "0.1.0"
104 | version_files = ["__version__.py", "pyproject.toml:version"]
105 | style = [
106 | [
107 | "qmark",
108 | "fg:#ff9d00 bold",
109 | ],
110 | [
111 | "question",
112 | "bold",
113 | ],
114 | [
115 | "answer",
116 | "fg:#ff9d00 bold",
117 | ],
118 | [
119 | "pointer",
120 | "fg:#ff9d00 bold",
121 | ],
122 | [
123 | "highlighted",
124 | "fg:#ff9d00 bold",
125 | ],
126 | [
127 | "selected",
128 | "fg:#cc5454",
129 | ],
130 | [
131 | "separator",
132 | "fg:#cc5454",
133 | ],
134 | [
135 | "instruction",
136 | "",
137 | ],
138 | [
139 | "text",
140 | "",
141 | ],
142 | [
143 | "disabled",
144 | "fg:#858585 italic",
145 | ],
146 | ]
147 |
148 | #### RUFF
149 | [tool.ruff]
150 | line-length = 120
151 | indent-width = 4
152 | output-format = "concise"
153 | # Assume Python 3.10.
154 | target-version = "py312"
155 |
156 | # A list of file patterns to include when linting.
157 | include = ["**/pyproject.toml", "*.py", "*.pyi"]
158 | # extend-include = ["*.ipynb"]
159 |
160 | # Always autofix, but never try to fix `F401` (unused imports).
161 | fix = true
162 |
163 | # Exclude a variety of commonly ignored directories (you can have some problems)
164 | exclude = [
165 | ".direnv",
166 | ".eggs",
167 | ".git",
168 | ".hg",
169 | ".mypy_cache",
170 | ".nox",
171 | ".pants.d",
172 | ".ruff_cache",
173 | ".svn",
174 | ".tox",
175 | ".venv",
176 | "__pypackages__",
177 | "_build",
178 | "buck-out",
179 | "build",
180 | "dist",
181 | "node_modules",
182 | "venv",
183 | ".venv",
184 | "*ipynb_checkpoints",
185 | "*.ipynb",
186 | "test/*",
187 | "__init__.py",
188 | ]
189 |
190 | [tool.ruff.lint]
191 | fixable = ["ALL"]
192 | unfixable = ["F401"]
193 | # Rules: https://beta.ruff.rs/docs/rules/
194 | # Enable Pyflakes `E` and `F` codes by default.
195 | select = [
196 | #default
197 | "E", # pycodestyle error
198 | "F", #flake8 error
199 | #extra
200 | "A", # bultin shadowing
201 | "B", # flake8 bugbear
202 | "BLE", # aboid bare excepts
203 | "C4", # simplify comprehensions
204 | "D", # docstyle
205 | "DTZ", # datetime errors
206 | "FBT", # avoid boolean trap
207 | "G", # logging format
208 | "I", # flake8-isort import
209 | "N", # pep8 naming
210 | "RET", # return statements values
211 | "S", # bandit
212 | "YTT", # wrong usage of sys.info
213 | "B", # flake8-bugbear
214 | ]
215 | ignore = [
216 | "B008", # do not perform function calls in argument defaults
217 | "BLE001", #Do not catch blind exception: {name}
218 | "C901", # too complex
219 | "D107", # Missing docstring in __init__
220 | "D203", # 1 blank line required before class docstring
221 | "D213", # Multi-line docstring summary should start at the second line
222 | "D417", # Missing argument description in the docstring for {definition}: {name}
223 | "E501", # Line too long ({width} > {limit} characters)
224 | "E501", # line too long, handled by black
225 | "D100",
226 | "FBT001", # boolean trap
227 | "FBT002", # boolean trap
228 | "G004", # logging statement using fstring formatting
229 | ]
230 | # Allow unused variables when underscore-prefixed.
231 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
232 |
233 | [tool.ruff.format]
234 | # select = ["E4", "E7", "E9", "F"]
235 | # exclude = ["*.ipynb"]
236 | # ignore = []
237 | quote-style = "double"
238 | indent-style = "space"
239 | skip-magic-trailing-comma = false
240 | line-ending = "auto"
241 | docstring-code-format = true
242 | docstring-code-line-length = 20
243 |
244 | [tool.ruff.lint.mccabe]
245 | # Unlike Flake8, default to a complexity level of 10.
246 | max-complexity = 10
247 |
248 | [tool.ruff.lint.flake8-quotes]
249 | docstring-quotes = "double"
250 |
251 | [tool.ruff.lint.pydocstyle]
252 | convention = "google"
253 |
254 | [tool.ruff.lint.isort]
255 | known-third-party = ["fastapi", "pydantic", "starlette"]
256 |
257 | [tool.ruff.lint.per-file-ignores]
258 | "__init__.py" = ["D104", "F401", "I002"]
259 | "test*.py" = ["S101", "T201"]
260 |
--------------------------------------------------------------------------------
/mani.yml:
--------------------------------------------------------------------------------
1 | # PYTHON BIELLA GROUP MANI TEMPLATE
2 |
3 | # All the tags are singular names
4 | # Remember to have the same folder structure, to configure the gitlab in ~/.ssh/config with your ssh key
5 | # It's not possible use HTTPS connections, just ssh.
6 | # https://github.com/alajmo/mani
7 | # https://dev.to/alajmo/mani-a-cli-tool-to-manage-multiple-repositories-1eg
8 |
9 | # USEFULL COMMANDS
10 | # mani list projects || see list of the projects
11 | # mani describe tasks || describe list of tasks
12 | # mani describe projects || describe list of projects
13 | # mani sync || syncronize all the repositories and download
14 | # mani list projects --tree || see list of the projects in a folder tree
15 | # mani exec --all --output table --parallel 'find . -type f | wc -l' || count number of files in each project in parallel
16 | # mani describe task git-overview || describe a specific task
17 | # mani run git-status --tags library || launch a command for some specific tags
18 | # mani exec --all --output table --parallel 'find . -type f | wc -l' || run custom command for all projects
19 | # mani run git-overview -t bash -d frontend/ -o table || run a task that have multiple commands and display the result in a table
20 |
21 | # mani run update-all || automatically fetch, clone and checkout all the branches in parallel
22 |
23 | ###############################################
24 | ##### Collection of global specifications #####
25 | ###############################################
26 |
27 | specs:
28 | custom:
29 | output: table
30 | parallel: true
31 |
32 | targets:
33 | all:
34 | all: true
35 |
36 | themes:
37 | custom:
38 | table:
39 | options:
40 | draw_border: true
41 | separate_columns: true
42 | separate_header: true
43 | separate_rows: true
44 |
45 | ############################################
46 | ##### Collection of Tasks and commands #####
47 | ############################################
48 |
49 | ### TASKS
50 | tasks:
51 | hello:
52 | desc: Print Hello World
53 | cmd: echo "Hello World"
54 |
55 | branch-all:
56 | desc: Download git and all branches
57 | spec: custom
58 | parallel: true
59 | target: all
60 | cmd: |
61 | for branch in $(git branch -r | grep -v '\->' | sed 's/.*origin\///'); do
62 | git checkout $branch || git checkout -b $branch origin/$branch
63 | done
64 |
65 | branch-all-force:
66 | desc: Download git and all branches
67 | spec: custom
68 | parallel: true
69 | target: all
70 | cmd: |
71 | for branch in $(git branch -r | grep -v '\->' | sed 's/.*origin\///'); do
72 | git checkout $branch || git checkout -b $branch origin/$branch || (git stash && git checkout $branch || git checkout -b $branch origin/$branch)
73 | done
74 |
75 | update-all:
76 | desc: Download git and all branches
77 | spec: custom
78 | parallel: true
79 | target: all
80 | commands:
81 | - task: git-fetch-pull
82 | - task: branch-all
83 |
84 | git-overview:
85 | desc: show branch, local and remote diffs, last commit and date
86 | spec: custom
87 | target: all
88 | theme: custom
89 | commands:
90 | - task: git-branch
91 | - task: git-last-commit-msg
92 | - task: git-last-commit-date
93 |
94 | git-branch-develop:
95 | desc: checkout the dev or develop branch and come back to main
96 | spec: custom
97 | target: all
98 | theme: custom
99 | commands:
100 | - task: git-fetch
101 | - task: git-develop
102 | - task: git-pull
103 | - task: git-main
104 | - task: git-pull
105 |
106 | git-branch-dev:
107 | desc: checkout the dev or develop branch and come back to main
108 | spec: custom
109 | target: all
110 | theme: custom
111 | commands:
112 | - task: git-fetch
113 | - task: git-dev
114 | - task: git-pull
115 | - task: git-main
116 | - task: git-pull
117 |
118 | git-clone:
119 | desc: clone all the repositories
120 | spec: custom
121 | cmd: git clone
122 |
123 | git-status:
124 | desc: show working tree status
125 | spec: custom
126 | cmd: git status -s
127 |
128 | git-fetch:
129 | desc: fetch remote updates
130 | spec: custom
131 | cmd: git fetch --all
132 |
133 | git-fetch-pull:
134 | desc: fetch and pull remote updates
135 | spec: custom
136 | cmd: git fetch --all && git pull --all
137 |
138 | git-prune:
139 | desc: remove local branches which have been deleted on remote
140 | spec: custom
141 | env:
142 | remote: origin
143 | cmd: git remote prune $remote
144 |
145 | git-switch:
146 | desc: switch branch setting the env for this command (by default it's main)
147 | spec: custom
148 | env:
149 | branch: main
150 | cmd: git checkout $branch
151 |
152 | git-create:
153 | desc: create branch
154 | spec: custom
155 | cmd: git checkout -b $branch
156 |
157 | git-develop:
158 | desc: checkout develop branch
159 | spec: custom
160 | env:
161 | remote: origin
162 | cmd: git checkout develop
163 |
164 | git-dev:
165 | desc: checkout dev branch
166 | spec: custom
167 | cmd: git checkout dev
168 |
169 | git-main:
170 | desc: checkout main branch
171 | spec: custom
172 | cmd: git checkout main
173 |
174 | git-master:
175 | desc: checkout main branch
176 | spec: custom
177 | cmd: git checkout master
178 |
179 | git-change-origin:
180 | desc: change the origin of the repository
181 | spec: custom
182 | commands:
183 | # - name: remove old origin
184 | # cmd: git remote remove origin
185 | - name: add new origin
186 | cmd: git remote set-url origin "${remote_url}"
187 |
188 | # git-delete:
189 | # desc: delete branch
190 | # spec: custom
191 | # cmd: git branch -D $branch
192 |
193 | git-pull:
194 | desc: pull the changes
195 | spec: custom
196 | cmd: git pull --all
197 |
198 | git-last-commit-msg:
199 | desc: show last commit
200 | spec: custom
201 | cmd: git log -1 --pretty=%B
202 |
203 | git-last-commit-date:
204 | desc: show last commit date
205 | spec: custom
206 | cmd: |
207 | git log -1 --format="%cd (%cr)" -n 1 --date=format:"%d %b %y" \
208 | | sed 's/ //'
209 |
210 | git-branch:
211 | desc: show current git branch
212 | spec: custom
213 | cmd: git rev-parse --abbrev-ref HEAD
214 |
215 | git-daily:
216 | desc: show branch, local and remote diffs, last commit and date
217 | spec: custom
218 | commands:
219 | - name: branch
220 | cmd: git rev-parse --abbrev-ref HEAD
221 |
222 | - name: status
223 | cmd: git status
224 |
225 | - name: local diff
226 | cmd: git diff --name-only | wc -l
227 |
228 | - name: remote diff
229 | cmd: |
230 | current_branch=$(git rev-parse --abbrev-ref HEAD)
231 | git diff "$current_branch" "origin/$current_branch" --name-only 2> /dev/null | wc -l
232 |
233 | - name: last commit
234 | cmd: git log -1 --pretty=%B
235 |
236 | - name: commit date
237 | cmd: git log -1 --format="%cd (%cr)" -n 1 --date=format:"%d %b %y" | sed 's/ //'
238 |
239 | ##############################
240 | ###### PROJECT MAPPINGS ######
241 | ##############################
242 |
243 | ### PROJECTS
244 | projects:
245 | PBG:
246 | path: .
247 |
248 | #### Bear
249 | bear:
250 | path: pbg/bear
251 | url: git@github.com:PythonBiellaGroup/Bear.git
252 | tags: [project, bear, pbg]
253 | env:
254 | remote_url: git@github.com:PythonBiellaGroup/Bear.git
255 |
--------------------------------------------------------------------------------
/readme/README-en.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | [![Contributors][contributors-shield]][contributors-url] [![Forks][forks-shield]][forks-url] [![Stargazers][stars-shield]][stars-url] [![Issues][issues-shield]][issues-url] [![MIT License][license-shield]][license-url]
6 |
7 |
8 |
9 |
Python Biella Group: Bear
10 |
11 |
Base Environment for Any Reasonable project
12 |
13 |
Report Bug
14 | ·
15 |
Request Feature
16 |
17 |
18 |
19 |
20 | Table of Contents
21 |
22 | About The Project
23 | Built With
24 | Roadmap
25 | Usage
26 | How to maintain it
27 | Technical Documentation
28 | Contributing
29 | License
30 | Known issues
31 | Contacts
32 | References
33 |
34 |
35 |
36 | ## About The Project
37 |
38 | This project it's the base template for a Python project that we are using in PythonBiellaGroup to create our tools, libraries and projects.
39 |
40 | We call it **Bear** because it's a **B**ase **E**nvironment for **A**ny **R**easonable project and also because the bear is the symbol of the city of Biella (Piedmont) Italy.
41 |
42 | ### Requirements
43 |
44 | For this project it's required to have installed:
45 |
46 | - Python: >=3.10 <=3.12
47 | - uv
48 |
49 | ### Built With
50 |
51 | It's based on **Modern Python Tools** such as:
52 |
53 | - cookiecutter: for templating
54 | - uv: for dependency management
55 | - mypy: for static type checking
56 | - ruff: for code formatting, linting, checking, security, etc..
57 | - pytest: for testing
58 | - pre-commit: for pre-commit hooks
59 | - pydantic: for type checking
60 | - pydantic-settings: for the setting management
61 |
62 | We suggest to use **VSCode** as IDE for this project since you can find a lot of prepared configurations for:
63 |
64 | - debugging
65 | - testing
66 | - settings
67 | - extensions
68 | - testone
69 |
70 | You can find and extensive documentation created with **mkdocs** to [this github page link](https://pythonbiellagroup.github.io/bear/)
71 |
72 | ## Roadmap
73 |
74 | - [x] Update the project with uv
75 | - [x] Fix cookiecutter for windows powershell usage
76 | - [x] Add mkdocs implementation on Base package with an example with python code
77 | - [x] Fix pre-commit
78 | - [x] Add better implementation of detect-secrets
79 | - [x] Better README documentation and CONTRIBUTING documentation
80 | - [x] Fix the docker with poetry
81 | - [x] Fix the devcontainer
82 | - [x] Add a docker container with PDM installation
83 | - [x] Add package build
84 |
85 | See the [open issues](https://github.com/PythonBiellaGroup/Bear/issues) for a full list of proposed features (and known issues).
86 |
87 | (back to top )
88 |
89 | ## Usage
90 |
91 | You can use this repository as a template to create your own project with **cookiecutter**
92 |
93 | Just remember to add **cookiecutter** as a dependency into your local version installation of python using pip (or something else).
94 |
95 | ```bash
96 | pip install cookiecutter
97 | ```
98 |
99 | You can use this following command (both on Windows and Posix systems):
100 |
101 | ```bash
102 | # If you are using https
103 | cookiecutter https://github.com/PythonBiellaGroup/Bear.git
104 |
105 | # If you are using ssh
106 | cookiecutter git@github.com:PythonBiellaGroup/Bear.git
107 | ```
108 |
109 | once you launch this commands just follow the guide and fill the required fields.
110 |
111 | You can also create an Alias for this command to make it easier to use it in your terminal:
112 |
113 | ```bash
114 | # If you are using https
115 | alias pbg-project="cookiecutter https://github.com/PythonBiellaGroup/Bear.git --overwrite-if-exists"
116 |
117 | # If you are using ssh
118 | alias pbg-project="cookiecutter git@github.com:PythonBiellaGroup/Bear.git --overwrite-if-exists"
119 | ```
120 |
121 | so you can use simply the command: `pbg-project` after you restart your terminal to download and create a new project.
122 |
123 | (back to top )
124 |
125 | ## How to maintain it
126 |
127 | Unfortunately there is no automatic way to update the templates inside cookiecutter yet, you have to do it manually.
128 |
129 | 1. Clone the repository
130 | 2. Launch the dependency installation using uv: `uv sync`
131 | 3. Modify something
132 | 4. If you want to test a specific inner template you can use the command: `make bake-test`, it will create a new folder with the name `testone` and it will copy the template inside it.
133 | 1. After that you can launch and modify the template
134 | 2. When you finish your modification you have to copy and paste all the modifications manually inside the cookiecutter generation folder
135 | 5. Then remember to open a pull request or push to the repository (in develop firtst) if you have the permissions.
136 |
137 | Please remember also to follow a Gitflow workflow and to use the **develop** branch as the main branch for development.
138 |
139 | (back to top )
140 |
141 | ## Known issues
142 |
143 | With mac if you want to use `devcontainer` with vscode probably you will experience a long building time on the first time. This is due to the `amd64` base docker image we are using as a baseline.
144 |
145 | (back to top )
146 |
147 | ## References
148 |
149 | Useful links and other documentation website you can check
150 |
151 | - [Our website with the documentation](https://pythonbiellagroup.it)
152 | - [The repository for our documentation](https://github.com/PythonBiellaGroup/doc-website)
153 | - [Hypermodern python repository](https://github.com/cjolowicz/hypermodern-python)
154 | - [The hypermodern python official medium article](https://medium.com/@cjolowicz/hypermodern-python-d44485d9d769)
155 | - [Modern Python repository](https://github.com/rhettinger/modernpython)
156 | - [Awesome Pyproject](https://github.com/carlosperate/awesome-pyproject/blob/master/README.md)
157 | - [Python developer roadmap](https://roadmap.sh/python/)
158 | - [Creating a modern python development environment medium article](https://itnext.io/creating-a-modern-python-development-environment-3d383c944877)
159 | - [Modern python interesting practices](https://www.stuartellis.name/articles/python-modern-practices/)
160 | - [4 Keys to write modern python in 2022](https://www.infoworld.com/article/3648061/4-keys-to-writing-modern-python-in-2022.html)
161 | - [cookiecutter-poetry good implementation](https://github.com/fpgmaas/cookiecutter-poetry)
162 | - [dev container video tutorial](https://www.youtube.com/watch?v=0H2miBK_gAk)
163 | - [Ruff official documentation](https://github.com/charliermarsh/ruff/blob/main/README.md)
164 | - [Ruff vscode extension](https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff)
165 | - [Chef repository with some more modern tooling](https://github.com/baggiponte/chef)
166 |
167 | (back to top )
168 |
169 | [contributors-shield]: https://img.shields.io/github/contributors/PythonBiellaGroup/Bear.svg?style=for-the-badge
170 | [contributors-url]: https://github.com/PythonBiellaGroup/Bear/graphs/contributors
171 | [forks-shield]: https://img.shields.io/github/forks/PythonBiellaGroup/Bear.svg?style=for-the-badge
172 | [forks-url]: https://github.com/PythonBiellaGroup/Bear/forks
173 | [stars-shield]: https://img.shields.io/github/stars/PythonBiellaGroup/Bear.svg?style=for-the-badge
174 | [stars-url]: https://github.com/PythonBiellaGroup/Bear/stargazers
175 | [issues-shield]: https://img.shields.io/github/issues/PythonBiellaGroup/Bear.svg?style=for-the-badge
176 | [issues-url]: https://github.com/PythonBiellaGroup/Bear/issues
177 | [license-shield]: https://img.shields.io/github/license/PythonBiellaGroup/Bear.svg?style=for-the-badge
178 | [license-url]: https://github.com/PythonBiellaGroup/Bear/blob/main/LICENSE
179 | [contacts-shield]: https://img.shields.io/badge/linktree-39E09B?style=for-the-badge&logo=linktree&logoColor=white
180 | [contacts-url]: https://linktr.ee/PythonBiellaGroup
181 |
--------------------------------------------------------------------------------
/readme/README-it.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | [![Contributors][contributors-shield]][contributors-url] [![Forks][forks-shield]][forks-url] [![Stargazers][stars-shield]][stars-url] [![Issues][issues-shield]][issues-url] [![MIT License][license-shield]][license-url]
5 |
6 |
17 |
18 |
19 | Indice
20 |
21 | Informazioni sul Progetto
22 | Costruito con
23 | Roadmap
24 | Utilizzo
25 | Come mantenerlo
26 | Documentazione Tecnica
27 | Contributi
28 | Licenza
29 | Problemi Conosciuti
30 | Contatti
31 | Riferimenti
32 |
33 |
34 |
35 | ## Informazioni sul Progetto
36 |
37 | Questo progetto è il modello base per un progetto Python che utilizziamo in PythonBiellaGroup per creare i nostri strumenti, librerie e progetti.
38 |
39 | Lo chiamiamo **Bear** perché è un **B**ase **E**nvironment for **A**ny **R**easonable project e anche perché l'orso è il simbolo della città di Biella (Piemonte), Italia.
40 |
41 | ### Requisiti
42 |
43 | Per questo progetto è necessario avere installato:
44 |
45 | - Python: >=3.10 <=3.12
46 | - uv
47 |
48 | ### Costruito con
49 |
50 | Si basa su **Strumenti Moderni per Python** come:
51 |
52 | - cookiecutter: per il templating
53 | - uv: per la gestione delle dipendenze
54 | - mypy: per il controllo statico dei tipi
55 | - ruff: per formattazione del codice, linting, controllo, sicurezza, ecc.
56 | - pytest: per i test
57 | - pre-commit: per i pre-commit hook
58 | - pydantic: per il controllo dei tipi
59 | - pydantic-settings: per la gestione delle impostazioni
60 |
61 | Suggeriamo di utilizzare **VSCode** come IDE per questo progetto poiché puoi trovare molte configurazioni già pronte per:
62 |
63 | - debug
64 | - test
65 | - impostazioni
66 | - estensioni
67 | - testone
68 |
69 | Puoi trovare un'ampia documentazione creata con **mkdocs** a [questo link della pagina GitHub](https://pythonbiellagroup.github.io/bear/)
70 |
71 | ## Roadmap
72 |
73 | - [x] Aggiornare il progetto con uv
74 | - [x] Correggere cookiecutter per l'uso con Windows PowerShell
75 | - [x] Aggiungere implementazione mkdocs nel pacchetto base con un esempio di codice Python
76 | - [x] Correggere pre-commit
77 | - [x] Aggiungere una migliore implementazione di detect-secrets
78 | - [x] Migliorare la documentazione README e CONTRIBUTING
79 | - [x] Correggere il docker con poetry
80 | - [x] Correggere il devcontainer
81 | - [x] Aggiungere un container Docker con installazione di PDM
82 | - [x] Aggiungere build del pacchetto
83 |
84 | Consulta le [issue aperte](https://github.com/PythonBiellaGroup/Bear/issues) per un elenco completo delle funzionalità proposte (e dei problemi noti).
85 |
86 | (torna in alto )
87 |
88 | ## Utilizzo
89 |
90 | Puoi utilizzare questo repository come modello per creare il tuo progetto con **cookiecutter**
91 |
92 | Ricorda di aggiungere **cookiecutter** come dipendenza nella tua installazione locale di Python utilizzando pip (o altro).
93 |
94 | ```bash
95 | pip install cookiecutter
96 |
97 | # Se stai usando https
98 | cookiecutter https://github.com/PythonBiellaGroup/Bear.git
99 |
100 | # Se stai usando ssh
101 | cookiecutter git@github.com:PythonBiellaGroup/Bear.git
102 | ```
103 |
104 | Una volta lanciati questi comandi, segui la guida e compila i campi richiesti.
105 |
106 | Puoi anche creare un Alias per questo comando per renderlo più facile da usare nel tuo terminale
107 |
108 | ```bash
109 | # Se stai usando https
110 | alias pbg-project="cookiecutter https://github.com/PythonBiellaGroup/Bear.git --overwrite-if-exists"
111 |
112 | # Se stai usando ssh
113 | alias pbg-project="cookiecutter git@github.com:PythonBiellaGroup/Bear.git --overwrite-if-exists"
114 | ```
115 |
116 | ## Come mantenerlo
117 |
118 | Così potrai semplicemente usare il comando: pbg-project dopo aver riavviato il terminale per scaricare e creare un nuovo progetto.
119 |
120 | (torna in alto )
121 |
122 | Come mantenerlo
123 | Purtroppo non esiste ancora un modo automatico per aggiornare i modelli all'interno di cookiecutter, devi farlo manualmente.
124 |
125 | Clona il repository
126 | Lancia l'installazione delle dipendenze usando uv: uv sync
127 | Modifica qualcosa
128 | Se vuoi testare un modello interno specifico puoi usare il comando: make bake-test, creerà una nuova cartella con il nome testone e copierà il modello al suo interno.
129 | Dopo di ciò puoi lanciare e modificare il modello
130 | Quando hai finito le modifiche devi copiare e incollare manualmente tutte le modifiche nella cartella di generazione di cookiecutter
131 | Ricorda poi di aprire una pull request o di fare il push nel repository (prima in develop) se hai i permessi.
132 | Ricorda anche di seguire un flusso di lavoro Gitflow e di utilizzare il branch develop come branch principale per lo sviluppo.
133 |
134 | (torna in alto )
135 |
136 | ## Problemi conosciuti
137 |
138 | Su Mac, se vuoi utilizzare devcontainer con vscode probabilmente sperimenterai un lungo tempo di build la prima volta. Questo è dovuto all'immagine Docker base amd64 che stiamo utilizzando come base.
139 |
140 | (torna in alto )
141 |
142 | ## References
143 |
144 | Useful links and other documentation website you can check
145 |
146 | - [Our website with the documentation](https://pythonbiellagroup.it)
147 | - [The repository for our documentation](https://github.com/PythonBiellaGroup/doc-website)
148 | - [Hypermodern python repository](https://github.com/cjolowicz/hypermodern-python)
149 | - [The hypermodern python official medium article](https://medium.com/@cjolowicz/hypermodern-python-d44485d9d769)
150 | - [Modern Python repository](https://github.com/rhettinger/modernpython)
151 | - [Awesome Pyproject](https://github.com/carlosperate/awesome-pyproject/blob/master/README.md)
152 | - [Python developer roadmap](https://roadmap.sh/python/)
153 | - [Creating a modern python development environment medium article](https://itnext.io/creating-a-modern-python-development-environment-3d383c944877)
154 | - [Modern python interesting practices](https://www.stuartellis.name/articles/python-modern-practices/)
155 | - [4 Keys to write modern python in 2022](https://www.infoworld.com/article/3648061/4-keys-to-writing-modern-python-in-2022.html)
156 | - [cookiecutter-poetry good implementation](https://github.com/fpgmaas/cookiecutter-poetry)
157 | - [dev container video tutorial](https://www.youtube.com/watch?v=0H2miBK_gAk)
158 | - [Ruff official documentation](https://github.com/charliermarsh/ruff/blob/main/README.md)
159 | - [Ruff vscode extension](https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff)
160 | - [Chef repository with some more modern tooling](https://github.com/baggiponte/chef)
161 |
162 | (back to top )
163 |
164 | [contributors-shield]: https://img.shields.io/github/contributors/PythonBiellaGroup/Bear.svg?style=for-the-badge
165 | [contributors-url]: https://github.com/PythonBiellaGroup/Bear/graphs/contributors
166 | [forks-shield]: https://img.shields.io/github/forks/PythonBiellaGroup/Bear.svg?style=for-the-badge
167 | [forks-url]: https://github.com/PythonBiellaGroup/Bear/forks
168 | [stars-shield]: https://img.shields.io/github/stars/PythonBiellaGroup/Bear.svg?style=for-the-badge
169 | [stars-url]: https://github.com/PythonBiellaGroup/Bear/stargazers
170 | [issues-shield]: https://img.shields.io/github/issues/PythonBiellaGroup/Bear.svg?style=for-the-badge
171 | [issues-url]: https://github.com/PythonBiellaGroup/Bear/issues
172 | [license-shield]: https://img.shields.io/github/license/PythonBiellaGroup/Bear.svg?style=for-the-badge
173 | [license-url]: https://github.com/PythonBiellaGroup/Bear/blob/main/LICENSE
174 | [contacts-shield]: https://img.shields.io/badge/linktree-39E09B?style=for-the-badge&logo=linktree&logoColor=white
175 | [contacts-url]: https://linktr.ee/PythonBiellaGroup
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Personal Gitignore
2 | # Created by: 30/11/2020
3 | # By: Andrea Guzzo
4 |
5 | # Hint: Search '>' for the macro rules
6 |
7 | ########################
8 | #### > CUSTOM RULES ####
9 | ########################
10 |
11 | .DS_Store
12 | .venv
13 | .pdm.toml
14 | .ruff_cache
15 | .pdm-python
16 | .env
17 | .python-version
18 | logs
19 | !.devcontainer
20 | !.flake8
21 | data
22 | duckdb
23 | test/db/files/*
24 | test/db
25 |
26 | #We don't want to push the data
27 | *.csv
28 | *.xlsx
29 | *.xls
30 |
31 | #We want to push and sync also the project gitlab templates
32 | !.gitlab
33 |
34 | #Remove also the saved models
35 | saved_models
36 |
37 | # Mkdocs
38 | site
39 |
40 | ##########################################
41 | #### > PyCharm e JetBrains con JIRA #####
42 | ##########################################
43 |
44 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
45 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
46 |
47 | # User-specific stuff
48 | .idea/**/workspace.xml
49 | .idea/**/tasks.xml
50 | .idea/**/usage.statistics.xml
51 | .idea/**/dictionaries
52 | .idea/**/shelf
53 |
54 | # Generated files
55 | .idea/**/contentModel.xml
56 |
57 | # Sensitive or high-churn files
58 | .idea/**/dataSources/
59 | .idea/**/dataSources.ids
60 | .idea/**/dataSources.local.xml
61 | .idea/**/sqlDataSources.xml
62 | .idea/**/dynamic.xml
63 | .idea/**/uiDesigner.xml
64 | .idea/**/dbnavigator.xml
65 |
66 | # Gradle
67 | .idea/**/gradle.xml
68 | .idea/**/libraries
69 |
70 | # Gradle and Maven with auto-import
71 | # When using Gradle or Maven with auto-import, you should exclude module files,
72 | # since they will be recreated, and may cause churn. Uncomment if using
73 | # auto-import.
74 | # .idea/artifacts
75 | # .idea/compiler.xml
76 | # .idea/jarRepositories.xml
77 | # .idea/modules.xml
78 | # .idea/*.iml
79 | # .idea/modules
80 | # *.iml
81 | # *.ipr
82 |
83 | # CMake
84 | cmake-build-*/
85 |
86 | # Mongo Explorer plugin
87 | .idea/**/mongoSettings.xml
88 |
89 | # File-based project format
90 | *.iws
91 |
92 | # IntelliJ
93 | out/
94 |
95 | # mpeltonen/sbt-idea plugin
96 | .idea_modules/
97 |
98 | # JIRA plugin
99 | atlassian-ide-plugin.xml
100 |
101 | # Cursive Clojure plugin
102 | .idea/replstate.xml
103 |
104 | # Crashlytics plugin (for Android Studio and IntelliJ)
105 | com_crashlytics_export_strings.xml
106 | crashlytics.properties
107 | crashlytics-build.properties
108 | fabric.properties
109 |
110 | # Editor-based Rest Client
111 | .idea/httpRequests
112 |
113 | # Android studio 3.1+ serialized cache file
114 | .idea/caches/build_file_checksums.ser
115 |
116 | ######################################
117 | #### > VSCode Visual Studio Code #####
118 | ######################################
119 |
120 | !.vscode/settings.json
121 | !.vscode/tasks.json
122 | !.vscode/launch.json
123 | !.vscode/extensions.json
124 | *.code-workspace
125 |
126 |
127 | !launch.json
128 |
129 | # Local History for Visual Studio Code
130 | .history/
131 |
132 | #################################
133 | #### > OS - Operating System ####
134 | #################################
135 |
136 |
137 | ######## MAC
138 |
139 | # General
140 | .DS_Store
141 | .AppleDouble
142 | .LSOverride
143 |
144 | # Icon must end with two \r
145 | Icon
146 |
147 | # Thumbnails
148 | ._*
149 |
150 | # Files that might appear in the root of a volume
151 | .DocumentRevisions-V100
152 | .fseventsd
153 | .Spotlight-V100
154 | .TemporaryItems
155 | .Trashes
156 | .VolumeIcon.icns
157 | .com.apple.timemachine.donotpresent
158 |
159 | # Directories potentially created on remote AFP share
160 | .AppleDB
161 | .AppleDesktop
162 | Network Trash Folder
163 | Temporary Items
164 | .apdisk
165 |
166 |
167 | ######### WINDOWS
168 |
169 | # Windows thumbnail cache files
170 | Thumbs.db
171 | Thumbs.db:encryptable
172 | ehthumbs.db
173 | ehthumbs_vista.db
174 |
175 | # Dump file
176 | *.stackdump
177 |
178 | # Folder config file
179 | [Dd]esktop.ini
180 |
181 | # Recycle Bin used on file shares
182 | $RECYCLE.BIN/
183 |
184 | # Windows Installer files
185 | *.cab
186 | *.msi
187 | *.msix
188 | *.msm
189 | *.msp
190 |
191 | # Windows shortcuts
192 | *.lnk
193 |
194 |
195 |
196 | ######### LINUX
197 |
198 | # SPDX-License-Identifier: GPL-2.0-only
199 | #
200 | # NOTE! Don't add files that are generated in specific
201 | # subdirectories here. Add them in the ".gitignore" file
202 | # in that subdirectory instead.
203 | #
204 | # NOTE! Please use 'git ls-files -i --exclude-standard'
205 | # command after changing this file, to see if there are
206 | # any tracked files which get ignored after the change.
207 | #
208 | # Normal rules (sorted alphabetically)
209 | #
210 | *.a
211 | *.asn1.[ch]
212 | *.bin
213 | *.bz2
214 | *.c.[012]*.*
215 | *.dt.yaml
216 | *.dtb
217 | *.dtb.S
218 | *.dwo
219 | *.elf
220 | *.gcno
221 | *.gz
222 | *.i
223 | *.ko
224 | *.lex.c
225 | *.ll
226 | *.lst
227 | *.lz4
228 | *.lzma
229 | *.lzo
230 | *.mod
231 | *.mod.c
232 | *.o
233 | *.o.*
234 | *.patch
235 | *.s
236 | *.so
237 | *.so.dbg
238 | *.su
239 | *.symtypes
240 | *.tab.[ch]
241 | *.tar
242 | *.xz
243 | *.zst
244 | Module.symvers
245 | modules.builtin
246 | modules.order
247 |
248 | #
249 | # Top-level generic files
250 | #
251 | /tags
252 | /TAGS
253 | /linux
254 | /vmlinux
255 | /vmlinux.32
256 | /vmlinux.symvers
257 | /vmlinux-gdb.py
258 | /vmlinuz
259 | /System.map
260 | /Module.markers
261 | /modules.builtin.modinfo
262 | /modules.nsdeps
263 |
264 | #
265 | # RPM spec file (make rpm-pkg)
266 | #
267 | /*.spec
268 |
269 | #
270 | # Debian directory (make deb-pkg)
271 | #
272 | /debian/
273 |
274 | #
275 | # Snap directory (make snap-pkg)
276 | #
277 | /snap/
278 |
279 | #
280 | # tar directory (make tar*-pkg)
281 | #
282 | /tar-install/
283 |
284 | #
285 | # We don't want to ignore the following even if they are dot-files
286 | #
287 | !.clang-format
288 | !.cocciconfig
289 | !.get_maintainer.ignore
290 | !.gitattributes
291 | !.gitignore
292 | !.mailmap
293 |
294 | #
295 | # Generated include files
296 | #
297 | /include/config/
298 | /include/generated/
299 | /include/ksym/
300 | /arch/*/include/generated/
301 |
302 | # stgit generated dirs
303 | patches-*
304 |
305 | # quilt's files
306 | patches
307 | series
308 |
309 | # cscope files
310 | cscope.*
311 | ncscope.*
312 |
313 | # gnu global files
314 | GPATH
315 | GRTAGS
316 | GSYMS
317 | GTAGS
318 |
319 | # id-utils files
320 | ID
321 |
322 | *.orig
323 | *~
324 | \#*#
325 |
326 | #
327 | # Leavings from module signing
328 | #
329 | extra_certificates
330 | signing_key.pem
331 | signing_key.priv
332 | signing_key.x509
333 | x509.genkey
334 |
335 | # Kconfig presets
336 | /all.config
337 | /alldef.config
338 | /allmod.config
339 | /allno.config
340 | /allrandom.config
341 | /allyes.config
342 |
343 | # Kconfig savedefconfig output
344 | /defconfig
345 |
346 | # Kdevelop4
347 | *.kdev4
348 |
349 | # Clang's compilation database file
350 | /compile_commands.json
351 |
352 | # Documentation toolchain
353 | sphinx_*/
354 |
355 | ###################
356 | #### > PYTHON #####
357 | ###################
358 |
359 | # Byte-compiled / optimized / DLL files
360 | __pycache__/
361 | *.py[cod]
362 | *$py.class
363 |
364 | # C extensions
365 | *.so
366 |
367 | # Distribution / packaging
368 | .Python
369 | build/
370 | develop-eggs/
371 | dist/
372 | downloads/
373 | eggs/
374 | .eggs/
375 | lib/
376 | lib64/
377 | parts/
378 | sdist/
379 | var/
380 | wheels/
381 | share/python-wheels/
382 | *.egg-info/
383 | .installed.cfg
384 | *.egg
385 | MANIFEST
386 |
387 | # PyInstaller
388 | # Usually these files are written by a python script from a template
389 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
390 | *.manifest
391 | *.spec
392 |
393 | # Installer logs
394 | pip-log.txt
395 | pip-delete-this-directory.txt
396 |
397 | # Unit test / coverage reports
398 | htmlcov/
399 | .tox/
400 | .nox/
401 | .coverage
402 | .coverage.*
403 | .cache
404 | nosetests.xml
405 | coverage.xml
406 | *.cover
407 | *.py,cover
408 | .hypothesis/
409 | .pytest_cache/
410 | cover/
411 |
412 | # Translations
413 | *.mo
414 | *.pot
415 |
416 | # Django stuff:
417 | *.log
418 | local_settings.py
419 | db.sqlite3
420 | db.sqlite3-journal
421 |
422 | # Flask stuff:
423 | instance/
424 | .webassets-cache
425 |
426 | # Scrapy stuff:
427 | .scrapy
428 |
429 | # Sphinx documentation
430 | docs/_build/
431 |
432 | # PyBuilder
433 | .pybuilder/
434 | target/
435 |
436 | # Poetry
437 | #poetry.lock
438 |
439 |
440 | # Jupyter Notebook
441 | .ipynb_checkpoints
442 |
443 | # IPython
444 | profile_default/
445 | ipython_config.py
446 |
447 | # pyenv
448 | # For a library or package, you might want to ignore these files since the code is
449 | # intended to run in multiple environments; otherwise, check them in:
450 | # .python-version
451 |
452 | # pipenv
453 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
454 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
455 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
456 | # install all needed dependencies.
457 | #Pipfile.lock
458 |
459 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
460 | __pypackages__/
461 |
462 | # Celery stuff
463 | celerybeat-schedule
464 | celerybeat.pid
465 |
466 | # SageMath parsed files
467 | *.sage.py
468 |
469 | # Environments
470 | .venv
471 | env/
472 | venv/
473 | ENV/
474 | env.bak/
475 | venv.bak/
476 |
477 | # Spyder project settings
478 | .spyderproject
479 | .spyproject
480 |
481 | # Rope project settings
482 | .ropeproject
483 |
484 | # mkdocs documentation
485 | /site
486 |
487 | # mypy
488 | .mypy_cache/
489 | .dmypy.json
490 | dmypy.json
491 |
492 | # Pyre type checker
493 | .pyre/
494 |
495 | # pytype static type analyzer
496 | .pytype/
497 |
498 | # Cython debug symbols
499 | cython_debug/
500 |
501 | # data and backup folders used by db
502 | backup/
503 |
--------------------------------------------------------------------------------