├── .env ├── .github └── workflows │ └── flake8.yml ├── .gitignore ├── Dockerfile ├── Jenkinsfile ├── README.md ├── alembic.ini ├── core ├── __init__.py └── config.py ├── db ├── __init__.py ├── base.py ├── db_session.py └── items.py ├── docker-compose.yml ├── endpoints ├── __init__.py ├── base.py ├── items.py ├── liveness_probe.py └── s3.py ├── main.py ├── migrations ├── README ├── __init__.py ├── env.py ├── script.py.mako └── versions │ └── __init__.py ├── models ├── __init__.py └── items.py ├── requirements.txt ├── tests ├── __init__.py ├── base_test.py ├── test_data │ ├── data1.txt │ └── data2.txt └── test_s3.py └── utils ├── __init__.py ├── s3.py └── validate.py /.env: -------------------------------------------------------------------------------- 1 | DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/back_end 2 | SERVER_HOST=0.0.0.0 3 | SERVER_PORT=8080 4 | 5 | ENDPOINT_URL=http://127.0.0.1:9000 6 | BUCKET=back_end 7 | ACCESS_KEY=minioadmin 8 | SECRET_KEY=minioadmin 9 | -------------------------------------------------------------------------------- /.github/workflows/flake8.yml: -------------------------------------------------------------------------------- 1 | name: Flake8 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.8", "3.9", "3.10"] 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python ${{ matrix.python-version }} 14 | uses: actions/setup-python@v3 15 | with: 16 | python-version: ${{ matrix.python-version }} 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install -r requirements.txt 21 | pip install flake8 22 | - name: Analysing the code with flake8 23 | run: | 24 | flake8 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # My tests 7 | check.py 8 | 9 | # Store 10 | .DS_Store 11 | 12 | # pipenv 13 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 14 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 15 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 16 | # install all needed dependencies. 17 | #Pipfile.lock 18 | 19 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 20 | __pypackages__/ 21 | 22 | # Environments 23 | .venv 24 | env/ 25 | venv/ 26 | 27 | # Pyre type checker 28 | .pyre/ 29 | .idea/* -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8-slim 2 | WORKDIR /app 3 | ADD ./ /app 4 | RUN pip install -r requirements.txt 5 | EXPOSE 8080 6 | CMD python main.py -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | @Library('jenkins-library') _ 2 | 3 | node('cloud-agent') { 4 | 5 | stage('Clone'){ 6 | container('kaniko') { 7 | checkout scm 8 | stash includes: '**', name: 'repo' 9 | } 10 | } 11 | 12 | stage('Tests') { 13 | node('centos'){ 14 | unstash 'repo' 15 | sh 'docker system prune -f' 16 | sh 'docker-compose up --build --exit-code-from app' 17 | } 18 | } 19 | 20 | stage('Kaniko Build & Push Image') { 21 | container('kaniko') { 22 | scmInfo = checkout scm 23 | runKanikoBuildPush(GIT_COMMIT: "${scmInfo.GIT_COMMIT}") 24 | } 25 | } 26 | 27 | stage('Helm chart update and commit') { 28 | container('jenkins-yq') { 29 | withCredentials([gitUsernamePassword(credentialsId: 'gitlab_cred',gitToolName: 'git-tool')]) { 30 | runHelmUpdateCommit(GIT_COMMIT: "${scmInfo.GIT_COMMIT}", INIT_CONTAINER: "false") 31 | } 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # REST API with FastAPI, PostgreSQL, Alembic, SQLAlchemy and Minio (S3 cloud storage) 2 | ![flake8-linter](https://github.com/sav116/REST-API/actions/workflows/flake8.yml/badge.svg) 3 | 4 | [FastAPI](https://fastapi.tiangolo.com/) is a Python framework and set of tools that allow developers to invoke commonly used functions using a REST interface. 5 | 6 | [SQLAlchemy](https://www.sqlalchemy.org/) is a package that makes it easier for Python programs to communicate with databases. Most of the time, this library is used as an Object Relational Mapper (ORM) tool, which automatically converts function calls to SQL queries and translates Python classes to tables on relational databases. 7 | 8 | Many web, mobile, geospatial, and analytics applications use [PostgreSQL](https://www.postgresql.org/) as their primary data storage or data warehouse. 9 | 10 | [Alembic](https://alembic.sqlalchemy.org/en/latest/) is a lightweight database migration tool for usage with the SQLAlchemy Database Toolkit for Python. 11 | 12 | [Minio](https://min.io/) is a popular open source object storage server compatible with Amazon S3 cloud storage. 13 | 14 | ## How to run the REST API 15 | Get this project from Github 16 | ``` 17 | git clone https://github.com/sav116/REST-API 18 | ``` 19 | 20 | ### Installing PostgreSQL 21 | Adding environment variables 22 | ``` 23 | SRC_DB_DATA=~/db_data 24 | POSTGRES_DB=back_end 25 | POSTGRES_USER=postgres 26 | POSTGRES_PASSWORD=postgres 27 | ``` 28 | 29 | 30 | Creating directory for volume of database 31 | ``` 32 | mkdir $SRC_DB_DATA 33 | ``` 34 | 35 | Starting db container 36 | ``` 37 | docker run -d --name my_postgres \ 38 | -v $SRC_DB_DATA:/var/lib/postgresql/data -p 5432:5432 \ 39 | -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD \ 40 | -e POSTGRES_USER=$POSTGRES_USER \ 41 | -e POSTGRES_DB=$POSTGRES_DB \ 42 | postgres:14 43 | ``` 44 | 45 | ### Installing Minio 46 | Adding environment variables 47 | ``` 48 | SRC_MINIO_DATA=~/minio_data 49 | MINIO_BUCKET=bucket 50 | ``` 51 | 52 | Starting minio container 53 | ``` 54 | docker run --name my_minio -d -v $SRC_MINIO_DATA:/data \ 55 | -p 9000:9000 \ 56 | -p 9001:9001 \ 57 | minio/minio server /data --console-address ":9001" 58 | ``` 59 | 60 | ## Installing minio-client for creating bucket 61 | OS X: 62 | ``` 63 | wget https://dl.minio.io/client/mc/release/darwin-amd64/mc 64 | ``` 65 | 66 | Linux: 67 | ``` 68 | wget https://dl.minio.io/client/mc/release/linux-amd64/mc 69 | ``` 70 | 71 | Making executable and rename it 72 | ``` 73 | chmod +x mc && mv mc minio-client 74 | ``` 75 | 76 | Creating bucket 77 | ``` 78 | ./minio-client alias set minio http://localhost:9000 minioadmin minioadmin 79 | ./minio-client mb minio/$MINIO_BUCKET 80 | ``` 81 | 82 | ## Migration db schema using Alembic 83 | 84 | Installing Alembic 85 | ``` 86 | cd REST-API && pip install alembic psycopg2 environs 87 | ``` 88 | 89 | Creating new schema version 90 | ``` 91 | alembic revision --autogenerate -m "added table items" 92 | ``` 93 | 94 | Starting migrations 95 | ``` 96 | alembic upgrade head 97 | ``` 98 | 99 | ## Building and starting application 100 | 101 | ``` 102 | docker build -t back_end . && docker run -p 8080:8080 -it back_end 103 | ``` 104 | 105 | #### Swagger available to http://0.0.0.0:8080/docs 106 | 107 | ## Author 108 | [Artem Solovev](https://github.com/sav116) 109 | 110 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # sys.path path, will be prepended to sys.path if present. 11 | # defaults to the current working directory. 12 | prepend_sys_path = . 13 | 14 | # timezone to use when rendering the date within the migration file 15 | # as well as the filename. 16 | # If specified, requires the python-dateutil library that can be 17 | # installed by adding `alembic[tz]` to the pip requirements 18 | # string value is passed to dateutil.tz.gettz() 19 | # leave blank for localtime 20 | # timezone = 21 | 22 | # max length of characters to apply to the 23 | # "slug" field 24 | # truncate_slug_length = 40 25 | 26 | # set to 'true' to run the environment during 27 | # the 'revision' command, regardless of autogenerate 28 | # revision_environment = false 29 | 30 | # set to 'true' to allow .pyc and .pyo files without 31 | # a source .py file to be detected as revisions in the 32 | # versions/ directory 33 | # sourceless = false 34 | 35 | # version location specification; This defaults 36 | # to migrations/versions. When using multiple version 37 | # directories, initial revisions must be specified with --version-path. 38 | # The path separator used here should be the separator specified by "version_path_separator" below. 39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 40 | 41 | # version path separator; As mentioned above, this is the character used to split 42 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 43 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 44 | # Valid values for version_path_separator are: 45 | # 46 | # version_path_separator = : 47 | # version_path_separator = ; 48 | # version_path_separator = space 49 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 50 | 51 | # the output encoding used when revision files 52 | # are written from script.py.mako 53 | # output_encoding = utf-8 54 | 55 | sqlalchemy.url = %(DATABASE_CONNECTION_STRING)s 56 | 57 | 58 | [post_write_hooks] 59 | # post_write_hooks defines scripts or Python functions that are run 60 | # on newly generated revision scripts. See the documentation for further 61 | # detail and examples 62 | 63 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 64 | # hooks = black 65 | # black.type = console_scripts 66 | # black.entrypoint = black 67 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 68 | 69 | # Logging configuration 70 | [loggers] 71 | keys = root,sqlalchemy,alembic 72 | 73 | [handlers] 74 | keys = console 75 | 76 | [formatters] 77 | keys = generic 78 | 79 | [logger_root] 80 | level = WARN 81 | handlers = console 82 | qualname = 83 | 84 | [logger_sqlalchemy] 85 | level = WARN 86 | handlers = 87 | qualname = sqlalchemy.engine 88 | 89 | [logger_alembic] 90 | level = INFO 91 | handlers = 92 | qualname = alembic 93 | 94 | [handler_console] 95 | class = StreamHandler 96 | args = (sys.stderr,) 97 | level = NOTSET 98 | formatter = generic 99 | 100 | [formatter_generic] 101 | format = %(levelname)-5.5s [%(name)s] %(message)s 102 | datefmt = %H:%M:%S 103 | -------------------------------------------------------------------------------- /core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/core/__init__.py -------------------------------------------------------------------------------- /core/config.py: -------------------------------------------------------------------------------- 1 | from environs import Env 2 | 3 | env = Env() 4 | env.read_env() 5 | 6 | # postgres connection 7 | DATABASE_URL = env.str("DATABASE_URL") 8 | SERVER_HOST = env.str("SERVER_HOST") 9 | SERVER_PORT = env.int("SERVER_PORT") 10 | 11 | # s3 connection 12 | ENDPOINT_URL = env.str("ENDPOINT_URL") 13 | BUCKET = env.str("BUCKET") 14 | ACCESS_KEY = env.str("ACCESS_KEY") 15 | SECRET_KEY = env.str("SECRET_KEY") 16 | -------------------------------------------------------------------------------- /db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/db/__init__.py -------------------------------------------------------------------------------- /db/base.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.orm import sessionmaker, declarative_base 3 | 4 | from core.config import DATABASE_URL 5 | 6 | engine = create_engine(DATABASE_URL, 7 | echo=True) 8 | 9 | Base = declarative_base() 10 | 11 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 12 | -------------------------------------------------------------------------------- /db/db_session.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | from db.base import SessionLocal 3 | 4 | 5 | def get_db() -> Generator: 6 | db = SessionLocal() 7 | try: 8 | yield db 9 | finally: 10 | db.close() 11 | -------------------------------------------------------------------------------- /db/items.py: -------------------------------------------------------------------------------- 1 | from db.base import Base 2 | from sqlalchemy.dialects.postgresql import TEXT, INTEGER, BOOLEAN 3 | from sqlalchemy import Column, MetaData 4 | 5 | metadata = MetaData() 6 | 7 | 8 | class Item(Base): 9 | __tablename__ = 'items' 10 | metadata = metadata 11 | id = Column(INTEGER, primary_key=True) 12 | name = Column(TEXT) 13 | description = Column(TEXT) 14 | price = Column(INTEGER) 15 | on_offer = Column(BOOLEAN) 16 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | app: 4 | build: . 5 | depends_on: 6 | - db 7 | - s3 8 | ports: 9 | - "8000:8000" 10 | environment: 11 | TEST: "True" 12 | command: sh -c "while ! nc -z db 5432; do sleep 1; done; sleep 15 && alembic upgrade head && pytest" 13 | 14 | db: 15 | image: postgres:14 16 | environment: 17 | POSTGRES_USER: postgres 18 | POSTGRES_PASSWORD: postgres 19 | POSTGRES_DB: mlcm 20 | ports: 21 | - "5432:5432" 22 | healthcheck: 23 | test: [ "CMD-SHELL", "pg_isready -U postgres" ] 24 | interval: 5s 25 | timeout: 5s 26 | retries: 5 27 | s3: 28 | image: minio/minio 29 | container_name: myminio 30 | ports: 31 | - "9000:9000" 32 | - "9001:9001" 33 | command: server --address ":9000" --console-address ":9001" /data 34 | createbuckets: 35 | image: minio/mc 36 | depends_on: 37 | - s3 38 | entrypoint: > 39 | /bin/sh -c " 40 | sleep 10; 41 | /usr/bin/mc config host add myminio http://myminio:9000 minioadmin minioadmin; 42 | /usr/bin/mc mb myminio/mlcm; 43 | sleep 999; 44 | exit 0; 45 | " 46 | -------------------------------------------------------------------------------- /endpoints/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/endpoints/__init__.py -------------------------------------------------------------------------------- /endpoints/base.py: -------------------------------------------------------------------------------- 1 | # prefix = '/api/v1' 2 | 3 | prefix = '' 4 | -------------------------------------------------------------------------------- /endpoints/items.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, status, HTTPException, Depends 2 | from sqlalchemy.orm import Session 3 | 4 | from db.db_session import get_db 5 | from db.items import Item as dbItem 6 | from models.items import Item 7 | from endpoints.base import prefix 8 | 9 | router = APIRouter(prefix=prefix) 10 | 11 | 12 | @router.get("/items", status_code=status.HTTP_200_OK) 13 | async def get_all(db: Session = Depends(get_db)): 14 | return db.query(dbItem).all() 15 | 16 | 17 | @router.get('/item/{item_id}', response_model=Item, 18 | status_code=status.HTTP_200_OK) 19 | async def get_by_id(item_id: int, 20 | db: Session = Depends(get_db)): 21 | return db.query(dbItem).filter(dbItem.id == item_id).first() 22 | 23 | 24 | @router.post('/items', response_model=Item, 25 | status_code=status.HTTP_201_CREATED) 26 | async def create(item: Item, 27 | db: Session = Depends(get_db)): 28 | db_item = db.query(dbItem).filter(dbItem.name == item.name).first() 29 | 30 | if db_item is not None: 31 | raise HTTPException(status_code=400, detail="Item already exists") 32 | 33 | new_item = dbItem( 34 | name=item.name, 35 | price=item.price, 36 | description=item.description, 37 | on_offer=item.on_offer 38 | ) 39 | 40 | db.add(new_item) 41 | db.commit() 42 | 43 | return new_item 44 | 45 | 46 | @router.put('/item/{item_id}', response_model=Item, 47 | status_code=status.HTTP_200_OK) 48 | async def update(item_id: int, 49 | item: Item, 50 | db: Session = Depends(get_db)): 51 | item_to_update = db.query(dbItem).filter(dbItem.id == item_id).first() 52 | item_to_update.name = item.name 53 | item_to_update.price = item.price 54 | item_to_update.description = item.description 55 | item_to_update.on_offer = item.on_offer 56 | 57 | db.commit() 58 | 59 | return item_to_update 60 | 61 | 62 | @router.delete('/item/{item_id}') 63 | async def delete(item_id: int, 64 | db: Session = Depends(get_db)): 65 | item_to_delete = db.query(dbItem).filter(dbItem.id == item_id).first() 66 | 67 | if item_to_delete is None: 68 | raise HTTPException( 69 | status_code=status.HTTP_404_NOT_FOUND, 70 | detail="Resource Not Found") 71 | 72 | db.delete(item_to_delete) 73 | db.commit() 74 | 75 | return item_to_delete 76 | -------------------------------------------------------------------------------- /endpoints/liveness_probe.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, status 2 | from endpoints.base import prefix 3 | 4 | router = APIRouter(prefix=prefix) 5 | 6 | 7 | @router.get("/healthz", status_code=status.HTTP_200_OK) 8 | async def get_healthz(): 9 | return {"status": "healthy"} 10 | 11 | 12 | @router.get("/", status_code=status.HTTP_200_OK) 13 | async def get_root(): 14 | return {"status": "healthy"} 15 | -------------------------------------------------------------------------------- /endpoints/s3.py: -------------------------------------------------------------------------------- 1 | from typing import List, Dict 2 | from fastapi import UploadFile 3 | from starlette.responses import StreamingResponse 4 | from fastapi import APIRouter, status, HTTPException 5 | 6 | from utils.s3 import S3Loader 7 | from endpoints.base import prefix 8 | 9 | router = APIRouter(prefix=prefix) 10 | 11 | 12 | @router.get("/s3_api/file/{file_path:path}", 13 | status_code=status.HTTP_200_OK) 14 | async def get_file(file_path: str): 15 | s3 = S3Loader(file_path=file_path) 16 | if s3.object is not None: 17 | return StreamingResponse( 18 | s3.streamer(), media_type=s3.object['ContentType'], headers={ 19 | "Content-Disposition": f"attachment;filename={s3.file_name}"}) 20 | 21 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, 22 | detail="Resource Not Found") 23 | 24 | 25 | @router.get("/s3_api/directory/{dir_path:path}", 26 | status_code=status.HTTP_200_OK) 27 | async def get_list_files_in_directory(dir_path: str): 28 | s3 = S3Loader(dir_path=dir_path) 29 | if 'Contents' in s3.objects: 30 | return s3.get_file_names() 31 | raise HTTPException(status_code=status.HTTP_204_NO_CONTENT, 32 | detail="Folder isn't exist") 33 | 34 | 35 | @router.post("/s3_api/{dir_path:path}", 36 | response_model=Dict, 37 | status_code=status.HTTP_201_CREATED) 38 | async def upload_file(files: List[UploadFile], 39 | dir_path: str): 40 | s3 = S3Loader(files=files, file_path=dir_path) 41 | if s3.upload_files(): 42 | return { 43 | "Files uploaded ": [ 44 | dir_path + 45 | '/' + 46 | file.filename for file in files]} 47 | raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, 48 | detail="Couldn't uploaded files") 49 | 50 | 51 | @router.put("/s3_api/{file_path:path}", 52 | status_code=status.HTTP_200_OK) 53 | async def update_file(files: UploadFile, 54 | file_path: str): 55 | s3 = S3Loader(files=[files], file_path=file_path) 56 | s3.update_file() 57 | return {"Updated File": file_path} 58 | 59 | 60 | @router.delete("/s3_api/{file_path:path}", 61 | status_code=status.HTTP_200_OK) 62 | async def delete_file(file_path: str): 63 | s3 = S3Loader(file_path=file_path) 64 | s3.delete_file() 65 | return {"Deleted File": file_path} 66 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import uvicorn 2 | from fastapi import FastAPI 3 | from fastapi.openapi.docs import get_swagger_ui_html 4 | 5 | from core.config import SERVER_HOST, SERVER_PORT 6 | from endpoints import items, s3, liveness_probe 7 | 8 | app = FastAPI(openapi_url="/api/v1/openapi.json", 9 | swagger_ui_oauth2_redirect_url="/api/v1/docs/oauth2-redirect") 10 | 11 | app.include_router(s3.router) 12 | app.include_router(liveness_probe.router) 13 | app.include_router(items.router) 14 | 15 | 16 | @app.get("/api/v1/docs", include_in_schema=False) 17 | async def get_documentation(): 18 | return get_swagger_ui_html(openapi_url="openapi.json", title="Swagger") 19 | 20 | 21 | if __name__ == "__main__": 22 | # os.system('alembic upgrade head') 23 | uvicorn.run("main:app", port=SERVER_PORT, host=SERVER_HOST, reload=True) 24 | -------------------------------------------------------------------------------- /migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration 2 | -------------------------------------------------------------------------------- /migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/migrations/__init__.py -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | from core.config import DATABASE_URL 9 | from db import items 10 | 11 | # this is the Alembic Config object, which provides 12 | # access to the values within the .ini file in use. 13 | config = context.config 14 | 15 | # Set connection db string 16 | section = config.config_ini_section 17 | config.set_section_option(section, "DATABASE_CONNECTION_STRING", DATABASE_URL) 18 | 19 | # Interpret the config file for Python logging. 20 | # This line sets up loggers basically. 21 | fileConfig(config.config_file_name) 22 | 23 | # add your model's MetaData object here 24 | # for 'autogenerate' support 25 | # from myapp import mymodel 26 | # target_metadata = mymodel.Base.metadata 27 | target_metadata = [items.metadata] 28 | 29 | 30 | # other values from the config, defined by the needs of env.py, 31 | # can be acquired: 32 | # my_important_option = config.get_main_option("my_important_option") 33 | # ... etc. 34 | 35 | 36 | def run_migrations_offline(): 37 | """Run migrations in 'offline' mode. 38 | 39 | This configures the context with just a URL 40 | and not an Engine, though an Engine is acceptable 41 | here as well. By skipping the Engine creation 42 | we don't even need a DBAPI to be available. 43 | 44 | Calls to context.execute() here emit the given string to the 45 | script output. 46 | 47 | """ 48 | url = config.get_main_option("sqlalchemy.url") 49 | context.configure( 50 | url=url, 51 | target_metadata=target_metadata, 52 | literal_binds=True, 53 | dialect_opts={"paramstyle": "named"}, 54 | ) 55 | 56 | with context.begin_transaction(): 57 | context.run_migrations() 58 | 59 | 60 | def run_migrations_online(): 61 | """Run migrations in 'online' mode. 62 | 63 | In this scenario we need to create an Engine 64 | and associate a connection with the context. 65 | 66 | """ 67 | connectable = engine_from_config( 68 | config.get_section(config.config_ini_section), 69 | prefix="sqlalchemy.", 70 | poolclass=pool.NullPool, 71 | ) 72 | 73 | with connectable.connect() as connection: 74 | context.configure( 75 | connection=connection, target_metadata=target_metadata 76 | ) 77 | 78 | with context.begin_transaction(): 79 | context.run_migrations() 80 | 81 | 82 | if context.is_offline_mode(): 83 | run_migrations_offline() 84 | else: 85 | run_migrations_online() 86 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /migrations/versions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/migrations/versions/__init__.py -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/models/__init__.py -------------------------------------------------------------------------------- /models/items.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class Item(BaseModel): # serializer 5 | id: int 6 | name: str 7 | description: str 8 | price: int 9 | on_offer: bool 10 | 11 | class Config: 12 | orm_mode = True 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | environs~=9.5.0 2 | fastapi~=0.73.0 3 | uvicorn~=0.17.4 4 | sqlalchemy~=1.4.31 5 | databases~=0.5.5 6 | psycopg2-binary~=2.9.3 7 | alembic~=1.7.6 8 | boto3~=1.21.0 9 | python-multipart~=0.0.5 10 | pydantic~=1.9.0 11 | pytest~=7.0.1 12 | requests~=2.27.1 -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/tests/__init__.py -------------------------------------------------------------------------------- /tests/base_test.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | 3 | from main import app 4 | 5 | client = TestClient(app) 6 | -------------------------------------------------------------------------------- /tests/test_data/data1.txt: -------------------------------------------------------------------------------- 1 | data1 -------------------------------------------------------------------------------- /tests/test_data/data2.txt: -------------------------------------------------------------------------------- 1 | data2 -------------------------------------------------------------------------------- /tests/test_s3.py: -------------------------------------------------------------------------------- 1 | from tests.base_test import client 2 | from endpoints.base import prefix 3 | 4 | api = prefix + '/s3_api' 5 | s3_test_folder = '/test_data' 6 | 7 | test_file_name = 'dump.sql' 8 | 9 | test_file_1 = 'tests/test_data/data1.txt' 10 | test_file_2 = 'tests/test_data/data1.txt' 11 | 12 | 13 | def test_upload_file(): 14 | with open(test_file_1, 'rb') as f: 15 | body = f.read() 16 | response = client.post( 17 | api + s3_test_folder, 18 | files={ 19 | "files": ( 20 | 'dump.sql', 21 | body, 22 | "multipart/form-data")}) 23 | assert response.status_code == 201 24 | 25 | 26 | def test_get_file(): 27 | response = client.get( 28 | api + 29 | '/file' + 30 | s3_test_folder + 31 | '%2F' + 32 | test_file_name) 33 | assert response.status_code == 200 34 | 35 | 36 | def test_get_file_names(): 37 | response = client.get(api + '/directory' + s3_test_folder) 38 | assert response.status_code == 200 39 | 40 | 41 | def test_update_file(): 42 | with open(test_file_2, 'rb') as f: 43 | body = f.read() 44 | response = client.put( 45 | api + 46 | s3_test_folder + 47 | '%2F' + 48 | test_file_1, 49 | files={ 50 | "files": ( 51 | test_file_1, 52 | body, 53 | "multipart/form-data")}) 54 | assert response.status_code == 200 55 | 56 | 57 | def test_delete_file(): 58 | response = client.delete(api + s3_test_folder + '%2F' + test_file_name) 59 | assert response.status_code == 200 60 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sav-labs/rest-api/03268766364590c7fea23c9cd469e0a3d5dc3899/utils/__init__.py -------------------------------------------------------------------------------- /utils/s3.py: -------------------------------------------------------------------------------- 1 | from typing import List, Dict, Iterator 2 | from botocore.exceptions import ClientError 3 | import boto3 4 | 5 | from core.config import BUCKET, ACCESS_KEY, SECRET_KEY, ENDPOINT_URL 6 | 7 | class S3Loader: 8 | """ Loader for s3 """ 9 | 10 | def __init__( 11 | self, 12 | dir_path: str = None, 13 | file_path: str = None, 14 | files: list = None, 15 | bucket: str = BUCKET): 16 | self.dir_path = dir_path 17 | self.files = files 18 | self.bucket = bucket 19 | self.file_path = file_path 20 | self._resource = self._get_resource() 21 | self._client = self._get_client() 22 | self._bucket = self._get_bucket() 23 | if file_path: 24 | self.object = self._get_object() 25 | self.file_name = file_path.split('/')[-1] 26 | if dir_path: 27 | self.objects = self._get_list_objects() 28 | 29 | def _get_list_objects(self): 30 | return self._client.list_objects( 31 | Bucket=self.bucket, Prefix=self.dir_path) 32 | 33 | def _get_object(self): 34 | try: 35 | return self._client.get_object( 36 | Bucket=self.bucket, Key=self.file_path) 37 | except ClientError: 38 | return None 39 | 40 | def _get_session(self): 41 | return boto3.session.Session(aws_access_key_id=ACCESS_KEY, 42 | aws_secret_access_key=SECRET_KEY) 43 | 44 | def _get_client(self): 45 | return self._get_session().client('s3', endpoint_url=ENDPOINT_URL) 46 | 47 | def _get_resource(self): 48 | return self._get_session().resource('s3', endpoint_url=ENDPOINT_URL) 49 | 50 | def _get_bucket(self): 51 | return self._resource.Bucket(self.bucket) 52 | 53 | def get_file_names(self) -> List[dict]: 54 | files = [] 55 | for obj in self.objects['Contents']: 56 | files.append({obj['Key']: { 57 | 'last_modified': obj['LastModified'], 58 | 'size': obj['Size'] 59 | }}) 60 | return files 61 | 62 | def streamer(self) -> Iterator: 63 | for i in self.object['Body']: 64 | yield i 65 | 66 | def upload_files(self) -> bool: 67 | for file in self.files: 68 | try: 69 | self._client.upload_fileobj( 70 | file.file, self.bucket, 71 | self.file_path + '/' + file.filename) 72 | except ClientError: 73 | return False 74 | return True 75 | 76 | def update_file(self) -> None: 77 | return self._client.upload_fileobj( 78 | self.files[0].file, self.bucket, self.file_path) 79 | 80 | def delete_file(self) -> Dict: 81 | return self._client.delete_object( 82 | Bucket=self.bucket, Key=self.file_path) 83 | -------------------------------------------------------------------------------- /utils/validate.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | 4 | def is_valid_uuid(*values): 5 | for value in values: 6 | try: 7 | uuid.UUID(value) 8 | except ValueError: 9 | return False 10 | return True 11 | --------------------------------------------------------------------------------