├── src ├── api │ ├── __init__.py │ ├── routes │ │ ├── __init__.py │ │ └── file.py │ └── responses │ │ ├── file_response.py │ │ └── response.py ├── core │ ├── __init__.py │ └── config.py ├── dto │ ├── __init__.py │ └── file_dto.py ├── tests │ ├── __init__.py │ ├── functional │ │ ├── test_upload_init.py │ │ ├── test_upload_chunk.py │ │ └── test_upload_complete.py │ ├── conftest.py │ └── test_utils.py ├── constants │ ├── __init__.py │ ├── messages.py │ ├── upload_stauts.py │ ├── errors.py │ └── file_extensions.py ├── exceptions │ ├── __init__.py │ ├── handler.py │ └── http_exception.py ├── handlers │ ├── __init__.py │ ├── base_handler.py │ └── file_handler.py ├── repositories │ ├── __init__.py │ ├── base_repository.py │ └── file_repository.py ├── services │ ├── __init__.py │ ├── base_service.py │ └── file_service.py ├── infrastructure │ ├── __init__.py │ ├── db │ │ ├── init.sql │ │ └── mysql.py │ ├── rabbitmq.py │ ├── celery.py │ └── minio.py ├── alembic │ ├── README │ ├── script.py.mako │ ├── env.py │ └── versions │ │ └── 6636fe22b643_tables.py ├── entities │ ├── __init__.py │ ├── file.py │ └── celery_task.py ├── tasks │ ├── __init__.py │ └── file_upload_task.py ├── requirements.txt ├── main.py ├── .env.example ├── utils.py └── alembic.ini ├── .gitignore ├── Dockerfile ├── .github ├── dependabot.yml └── workflows │ └── test.yml ├── supervisord.conf ├── .devcontainer └── devcontainer.json ├── docker-compose.yml ├── README.md └── filemanager.postman_collection.json /src/api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/dto/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/api/routes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/constants/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/exceptions/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/handlers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/repositories/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/services/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/infrastructure/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/alembic/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /src/constants/messages.py: -------------------------------------------------------------------------------- 1 | class Message: 2 | UPLOADED_CHUNK = 'The chunk is uploaded successfully!' -------------------------------------------------------------------------------- /src/infrastructure/db/init.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE IF NOT EXISTS filemanager; 2 | CREATE DATABASE IF NOT EXISTS filemanager_test; -------------------------------------------------------------------------------- /src/entities/__init__.py: -------------------------------------------------------------------------------- 1 | from .celery_task import CeleryTask 2 | from .file import File 3 | 4 | __all__ = ['CeleryTask', 'File'] 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | src/.env 3 | /docker-data 4 | src/supervisord.log 5 | src/supervisord.pid 6 | src/.coverage 7 | src/coverage.xml 8 | -------------------------------------------------------------------------------- /src/constants/upload_stauts.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class UploadStatus(str, Enum): 4 | SUCCESS = "SUCCESS" 5 | FAILURE = "FAILURE" 6 | PENDING = "PENDING" 7 | STARTED = "STARTED" -------------------------------------------------------------------------------- /src/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | from infrastructure.celery import celery 2 | from infrastructure.minio import minioStorage 3 | from core.config import config 4 | import os 5 | 6 | from . import file_upload_task 7 | 8 | -------------------------------------------------------------------------------- /src/services/base_service.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Generic, Type 2 | 3 | T = TypeVar("T") 4 | 5 | class BaseService(Generic[T]): 6 | def __init__(self, repo: Type[T]) -> None: 7 | self.repo = repo -------------------------------------------------------------------------------- /src/requirements.txt: -------------------------------------------------------------------------------- 1 | fastapi==0.111.1 2 | minio==7.2.7 3 | sqlalchemy==2.0.31 4 | pymysql==1.1.1 5 | alembic==1.13.2 6 | cryptography==43.0.0 7 | celery[mysql]===5.4.0 8 | aiofiles===24.1.0 9 | pytest===8.3.2 10 | pytest-cov===5.0.0 -------------------------------------------------------------------------------- /src/handlers/base_handler.py: -------------------------------------------------------------------------------- 1 | from api.responses.response import response 2 | from typing import TypeVar, Type, Generic 3 | 4 | T = TypeVar('T') 5 | 6 | 7 | class BaseHandler(Generic[T]): 8 | def __init__(self, service: Type[T]) -> None: 9 | self.service = service 10 | self.response = response 11 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | from api.routes import file 3 | from exceptions.handler import ExceptionHandler 4 | 5 | def create_application() -> FastAPI: 6 | app = FastAPI() 7 | app.include_router(file.router) 8 | ExceptionHandler(app) 9 | return app 10 | 11 | app = create_application() 12 | -------------------------------------------------------------------------------- /src/infrastructure/rabbitmq.py: -------------------------------------------------------------------------------- 1 | from typing import Self 2 | 3 | 4 | class RabbitMQ: 5 | 6 | _instance: Self = None 7 | 8 | def __new__(cls: Self) -> Self: 9 | if cls._instance == None: 10 | cls._instance = super().__new__(cls) 11 | return cls._instance 12 | 13 | 14 | rabbitMQ = RabbitMQ() 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 2 | 3 | WORKDIR /var/www 4 | 5 | COPY /src ./ 6 | COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf 7 | 8 | RUN pip install --no-cache-dir --upgrade -r /var/www/requirements.txt \ 9 | && pip install supervisor 10 | 11 | # Start supervisord 12 | CMD ["supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] 13 | -------------------------------------------------------------------------------- /src/tests/functional/test_upload_init.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from tests.test_utils import * 3 | 4 | 5 | def test_upload_initialize(test_app): 6 | response = test_app.post(f"{FILE_ENDPOINT}/upload/init") 7 | response_json = response.json() 8 | 9 | assert response.status_code == status.HTTP_200_OK 10 | validate_success_response_structure(response_json=response_json) 11 | assert "chunk_size" in response_json["data"] 12 | assert "upload_id" in response_json["data"] 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for more information: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | # https://containers.dev/guide/dependabot 6 | 7 | version: 2 8 | updates: 9 | - package-ecosystem: "devcontainers" 10 | directory: "/" 11 | schedule: 12 | interval: weekly 13 | -------------------------------------------------------------------------------- /supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | loglevel=info 4 | 5 | [program:fastapi] 6 | command=uvicorn main:app --host 0.0.0.0 --port 8000 7 | directory=/var/www 8 | autostart=true 9 | autorestart=true 10 | stderr_logfile=/var/log/fastapi.err.log 11 | stdout_logfile=/var/log/fastapi.out.log 12 | 13 | [program:celery] 14 | command=celery -A tasks worker --loglevel=info 15 | directory=/var/www 16 | autostart=true 17 | autorestart=true 18 | stderr_logfile=/var/log/celery.err.log 19 | stdout_logfile=/var/log/celery.out.log 20 | -------------------------------------------------------------------------------- /src/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from fastapi.testclient import TestClient 4 | from main import create_application 5 | 6 | os.environ["ENV"] = "testing" 7 | os.environ["APP_UPLOAD_DIR"] = "/uploads" 8 | os.environ["APP_MAX_CHUNK_SIZE"] = "1024" 9 | 10 | @pytest.fixture(scope="session", autouse=True) 11 | def setup_database(): 12 | os.system("alembic downgrade base") 13 | os.system("alembic upgrade head") 14 | yield 15 | 16 | @pytest.fixture(scope="module") 17 | def test_app(): 18 | app = create_application() 19 | with TestClient(app) as test_client: 20 | yield test_client -------------------------------------------------------------------------------- /src/.env.example: -------------------------------------------------------------------------------- 1 | ENV="dev" 2 | APP_UPLOAD_DIR="/uploads" 3 | APP_MAX_CHUNK_SIZE="10485760" 4 | 5 | MINIO_ROOT_USER="" 6 | MINIO_ROOT_PASSWORD="" 7 | MINIO_ACCESS_KEY="" 8 | MINIO_SECRET_KEY="" 9 | MINIO_PUBLIC_BUCKET="public" 10 | MINIO_PRIVATE_BUCKET="private" 11 | MINIO_ENDPOINT="minio:9000" 12 | MINIO_URL="http://localhost:9001" 13 | 14 | MYSQL_ROOT_PASSWORD="" 15 | MYSQL_HOST="db-mysql" 16 | MYSQL_PORT="3306" 17 | MYSQL_DATABASE="filemanager" 18 | MYSQL_TEST_DATABASE="filemanager_test" 19 | MYSQL_PASSWORD="" 20 | 21 | RABBITMQ_DEFAULT_USER="" 22 | RABBITMQ_DEFAULT_PASS="" 23 | RABBITMQ_HOST="rabbitmq" 24 | RABBITMQ_PORT="5672" -------------------------------------------------------------------------------- /src/constants/errors.py: -------------------------------------------------------------------------------- 1 | class Errors: 2 | BAD_REQUEST: str = "Something bad happend. Try again!" 3 | Permission_DENIED: str = "Permision denied" 4 | NOT_FOUND: str = "Not found!" 5 | FILE_NOT_FOUND: str = "File directory not found. Please initialize first!" 6 | FILE_UPLOADED_SUCCESSFULLY : str = "File uploaded previously!" 7 | FILE_PENDING_UPLOAD : str = "File is uploading!" 8 | 9 | class ValidatonErrors: 10 | INVALID_JSON_DETAIL: str = "Invalid JSON format for detail" 11 | INVALID_JSON_CREDENTIAL: str = "Invalid JSON format for credential" 12 | LE_CHUNCK_SIZE: str = "File sile is larger than valid chunk size" 13 | -------------------------------------------------------------------------------- /src/infrastructure/celery.py: -------------------------------------------------------------------------------- 1 | from celery import Celery as CeleryBase 2 | from core.config import config 3 | from typing import Self 4 | 5 | 6 | class Celery: 7 | _instance: Self = None 8 | 9 | def __new__(cls: Self) -> Self: 10 | if cls._instance == None: 11 | cls._instance = CeleryBase( 12 | 'tasks', broker=str(config.RABBITMQ_ENDPOINT), backend=str(config.CELERY_BACKEND_ENDPOINT)) 13 | return cls._instance 14 | 15 | 16 | celery = Celery() 17 | celery.conf.database_engine_options = {'echo': True} 18 | celery.conf.database_table_names = {'task': 'celery_tasks'} 19 | celery.conf.update(result_extended=True) -------------------------------------------------------------------------------- /src/api/responses/file_response.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from typing import Optional, Dict, Any 3 | from constants.upload_stauts import UploadStatus 4 | 5 | 6 | class UploadInitResponse(BaseModel): 7 | chunk_size: int 8 | upload_id: str 9 | 10 | 11 | class UploadChunkResponse(BaseModel): 12 | chunk_index: int 13 | upload_id: str 14 | 15 | 16 | class FileResponse(BaseModel): 17 | id: str 18 | path: str 19 | content_type: str 20 | detail: Optional[Dict[str, Any]] 21 | credential: Optional[Dict[str, Any]] 22 | download_url: str 23 | 24 | 25 | class UploadStatusResponse(BaseModel): 26 | status: UploadStatus 27 | -------------------------------------------------------------------------------- /src/repositories/base_repository.py: -------------------------------------------------------------------------------- 1 | from typing import Type, Generic, TypeVar 2 | from infrastructure.db.mysql import MySQLDB 3 | 4 | T = TypeVar("T") 5 | 6 | class BaseRepo(Generic[T]): 7 | def __init__(self, model: Type[T], db: MySQLDB) -> None: 8 | self.model = model 9 | self.db = db 10 | self.session = self.db.session 11 | 12 | def create(self, entity: T) -> T: 13 | self.session.add(entity) 14 | self.session.commit() 15 | self.session.refresh(entity) 16 | return entity 17 | 18 | def get(self, id: str) -> T: 19 | return self.session.query(self.model).filter(self.model.id == id).first() -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | from fastapi.exceptions import RequestValidationError 3 | from constants.errors import ValidatonErrors 4 | from typing import Dict 5 | 6 | 7 | def parse_json_to_dict(json_string: str, body: str) -> Dict[str, str]: 8 | parsed_dict = json.loads(json_string) 9 | 10 | if not isinstance(parsed_dict, dict): 11 | raise RequestValidationError(errors=[{ 12 | 'loc': ('body', body), 13 | 'msg': ValidatonErrors.INVALID_JSON_CREDENTIAL, 14 | 'type': 'value_error' 15 | }], 16 | body={body: "invalid_format"}) 17 | return {str(key): str(value) for key, value in parsed_dict.items()} 18 | -------------------------------------------------------------------------------- /src/entities/file.py: -------------------------------------------------------------------------------- 1 | from infrastructure.db.mysql import mysql as db 2 | from sqlalchemy import Column, String, JSON, Integer, VARCHAR, ForeignKey 3 | from sqlalchemy.orm import Mapped, mapped_column, relationship 4 | import uuid 5 | 6 | 7 | class File(db.Base): 8 | __tablename__ = "files" 9 | id = Column(VARCHAR(36), nullable=False, primary_key=True, unique=True, 10 | index=True, default=lambda: str(uuid.uuid4())) 11 | credential = Column(JSON(none_as_null=True)) 12 | path = Column(VARCHAR(255), nullable=False) 13 | content_type = Column(String(32), nullable=False) 14 | size = Column(Integer) 15 | detail = Column(JSON(none_as_null=True)) 16 | celery_task_id = Column(String(36)) -------------------------------------------------------------------------------- /src/repositories/file_repository.py: -------------------------------------------------------------------------------- 1 | from infrastructure.db.mysql import MySQLDB 2 | from .base_repository import BaseRepo 3 | from entities.file import File 4 | from dto.file_dto import FileBaseDTO 5 | 6 | 7 | class FileRepo(BaseRepo[File]): 8 | def __init__(self, db: MySQLDB) -> None: 9 | super().__init__(File, db) 10 | 11 | def get_file(self, id: str) -> File: 12 | return self.get(id=id) 13 | 14 | def create_file(self, file: FileBaseDTO) -> File: 15 | db_file = File( 16 | path=file.path, credential=file.credential, content_type=file.content_type, 17 | size=file.size, detail=file.detail, celery_task_id=file.celery_task_id 18 | ) 19 | return self.create(db_file) 20 | -------------------------------------------------------------------------------- /src/alembic/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | ${imports if imports else ""} 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = ${repr(up_revision)} 16 | down_revision: Union[str, None] = ${repr(down_revision)} 17 | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} 18 | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} 19 | 20 | 21 | def upgrade() -> None: 22 | ${upgrades if upgrades else "pass"} 23 | 24 | 25 | def downgrade() -> None: 26 | ${downgrades if downgrades else "pass"} 27 | -------------------------------------------------------------------------------- /src/entities/celery_task.py: -------------------------------------------------------------------------------- 1 | from infrastructure.db.mysql import mysql as db 2 | from sqlalchemy import Column, Text, TIMESTAMP, String, BLOB, Integer, BigInteger 3 | 4 | class CeleryTask(db.Base): 5 | __tablename__ = "celery_tasks" 6 | id = Column(BigInteger, primary_key=True, autoincrement=True) 7 | task_id = Column(String(255), nullable=False, unique=True, index=True) 8 | status = Column(String(50), nullable=False) 9 | result = Column(BLOB) 10 | date_done = Column(TIMESTAMP, default='CURRENT_TIMESTAMP') 11 | traceback = Column(Text) 12 | meta = Column(Text) 13 | name = Column(String(155)) 14 | args = Column(BLOB) 15 | kwargs = Column(BLOB) 16 | worker = Column(String(155)) 17 | retries = Column(Integer) 18 | queue = Column(String(155)) -------------------------------------------------------------------------------- /src/constants/file_extensions.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class FileExtension(str, Enum): 4 | txt = "txt" 5 | jpg = "jpg" 6 | png = "png" 7 | pdf = "pdf" 8 | mp4 = "mp4" 9 | mp3 = "mp3" 10 | mov = "mov" 11 | avi = "avi" 12 | doc = "doc" 13 | docx = "docx" 14 | xls = "xls" 15 | xlsx = "xlsx" 16 | ppt = "ppt" 17 | pptx = "pptx" 18 | gif = "gif" 19 | bmp = "bmp" 20 | tiff = "tiff" 21 | csv = "csv" 22 | json = "json" 23 | xml = "xml" 24 | html = "html" 25 | zip = "zip" 26 | rar = "rar" 27 | tar = "tar" 28 | gz = "gz" 29 | wav = "wav" 30 | flac = "flac" 31 | ogg = "ogg" 32 | webm = "webm" 33 | mkv = "mkv" 34 | heic = "heic" 35 | svg = "svg" 36 | psd = "psd" 37 | ai = "ai" 38 | eps = "eps" 39 | tif = "tif" 40 | tga = "tga" 41 | -------------------------------------------------------------------------------- /src/infrastructure/db/mysql.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.orm import sessionmaker 3 | from sqlalchemy.ext.declarative import declarative_base 4 | from core.config import config 5 | from typing import Self 6 | 7 | class MySQLDB: 8 | _instance: Self = None 9 | 10 | def __new__(cls: Self) -> Self: 11 | if cls._instance == None: 12 | cls._instance = super().__new__(cls) 13 | cls._instance.__initialize() 14 | return cls._instance 15 | 16 | def __initialize(self) -> None: 17 | self.engine = create_engine(str(config.MYSQL_DATABASE_URL)) 18 | self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine) 19 | self.Base = declarative_base() 20 | 21 | @property 22 | def session(self): 23 | return self.SessionLocal() 24 | 25 | mysql = MySQLDB() -------------------------------------------------------------------------------- /src/api/responses/response.py: -------------------------------------------------------------------------------- 1 | from fastapi.responses import JSONResponse 2 | from typing import Any, TypeVar, Generic, Type, Optional, Sequence 3 | from pydantic import BaseModel 4 | 5 | T = TypeVar('T') 6 | 7 | 8 | class SuccessResponse(BaseModel, Generic[T]): 9 | data: T 10 | message: Optional[str] = '' 11 | success: bool = True 12 | 13 | 14 | class ErrorResponse(BaseModel): 15 | errors: Optional[Sequence[str]] = [] 16 | message: Optional[str] = '' 17 | success: bool = False 18 | 19 | 20 | class Response: 21 | def success(self, content: SuccessResponse, status: int = 200) -> JSONResponse: 22 | return JSONResponse(content=content.model_dump(), status_code=status) 23 | 24 | def error(self, content: ErrorResponse, status: int = 400) -> JSONResponse: 25 | return JSONResponse(content=content.model_dump(), status_code=status) 26 | 27 | 28 | response = Response() 29 | -------------------------------------------------------------------------------- /src/dto/file_dto.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from typing import Any, Dict, Optional 3 | from fastapi import UploadFile 4 | from constants.file_extensions import FileExtension 5 | 6 | class UploadChunkDTO(BaseModel): 7 | chunk_size: int 8 | file: UploadFile 9 | upload_id: str 10 | chunk_index: int 11 | 12 | class UploadFileDTO(BaseModel): 13 | upload_id: str 14 | total_chunks: int 15 | file_extension: FileExtension 16 | total_size: int 17 | content_type: str 18 | detail: Optional[Dict[str, Any]] 19 | credential: Optional[Dict[str, Any]] 20 | 21 | class RetryUploadFileDTO(BaseModel): 22 | id: str 23 | credential: Optional[Dict[str, Any]] 24 | 25 | class FileBaseDTO(BaseModel): 26 | path: str 27 | credential: Optional[Dict[str, Any]] 28 | content_type: str 29 | size: int 30 | detail: Optional[Dict[str, Any]] 31 | celery_task_id: str 32 | 33 | class FileDTO(FileBaseDTO): 34 | id: str 35 | 36 | -------------------------------------------------------------------------------- /src/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | FILE_ENDPOINT = "/api/v1/file" 4 | CHUNK_SIZE = 1024 5 | 6 | def validate_success_response_structure(response_json): 7 | assert "data" in response_json, "Missing 'data' in response JSON" 8 | assert "message" in response_json, "Missing 'message' in response JSON" 9 | assert "success" in response_json, "Missing 'success' in response JSON" 10 | assert response_json["success"] == True 11 | 12 | 13 | def validate_error_response_structure(response_json): 14 | assert "errors" in response_json, "Missing 'errors' in response JSON" 15 | assert "message" in response_json, "Missing 'message' in response JSON" 16 | assert "success" in response_json, "Missing 'success' in response JSON" 17 | assert response_json["success"] == False 18 | 19 | 20 | def generate_file(chunk_size): 21 | file_content = b"0" * chunk_size 22 | file = BytesIO(file_content) 23 | file.name = "chunka" 24 | return file 25 | -------------------------------------------------------------------------------- /src/exceptions/handler.py: -------------------------------------------------------------------------------- 1 | from fastapi import Request, status 2 | from fastapi.exceptions import RequestValidationError 3 | from fastapi.responses import JSONResponse 4 | from api.responses.response import ErrorResponse, response 5 | 6 | class ExceptionHandler: 7 | def __init__(self, app): 8 | self.app = app 9 | self.register_handlers() 10 | 11 | def register_handlers(self): 12 | @self.app.exception_handler(RequestValidationError) 13 | async def validation_exception_handler(request: Request, exc: RequestValidationError): 14 | details = exc.errors() 15 | modified_details = [] 16 | for error in details: 17 | message = error['msg'].lower() 18 | location = "->".join(map(str, error['loc'])) 19 | modified_details.append(f"[{location}] {message}") 20 | return response.error( 21 | content=ErrorResponse(errors=modified_details), 22 | status=status.HTTP_422_UNPROCESSABLE_ENTITY 23 | ) 24 | -------------------------------------------------------------------------------- /src/tasks/file_upload_task.py: -------------------------------------------------------------------------------- 1 | from . import celery, minioStorage, config, os 2 | from minio import S3Error 3 | 4 | 5 | @celery.task() 6 | def upload_file_task(bucket: str, upload_id: str, total_chunks: int, filename: str): 7 | upload_dir = os.path.join(config.APP_UPLOAD_DIR, upload_id) 8 | final_file_path = os.path.join(upload_dir, "final_file") 9 | with open(final_file_path, "wb") as final_file: 10 | for i in range(total_chunks): 11 | chunk_path = os.path.join(upload_dir, f"{i}.part") 12 | with open(chunk_path, "rb") as chunk_file: 13 | content = chunk_file.read() 14 | final_file.write(content) 15 | with open(final_file_path, 'rb') as file: 16 | try: 17 | minioStorage.put_object( 18 | bucket, filename, file, length=-1, part_size=10 * 1024 * 1024) 19 | for i in range(total_chunks): 20 | chunk_path = os.path.join(upload_dir, f"{i}.part") 21 | os.remove(chunk_path) 22 | os.remove(final_file_path) 23 | os.removedirs(upload_dir) 24 | except S3Error as exc: 25 | return 0 -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests and Check Coverage 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | tests: 7 | name: Run tests 8 | runs-on: ubuntu-latest 9 | env: 10 | DOCKER_USER: ${{ secrets.DOCKER_USER }} 11 | DOCKER_PASS: ${{ secrets.DOCKER_PASS }} 12 | 13 | steps: 14 | - name: Checkout Code 15 | uses: actions/checkout@v4 16 | 17 | - name: Decode and create .env file 18 | run: | 19 | echo "${{ secrets.ENV_FILE_BASE64 }}" | base64 --decode > src/.env 20 | 21 | - name: Login to DockerHub 22 | run: docker login -u $DOCKER_USER -p $DOCKER_PASS 23 | 24 | - name: Build Docker images 25 | run: docker compose build 26 | 27 | - name: Run Containers 28 | run: docker compose up -d 29 | 30 | - name: Run tests 31 | run: docker compose run filemanager bash -c "pytest --cov=. --cov-report=xml" 32 | 33 | - name: Upload coverage to Codecov 34 | uses: codecov/codecov-action@v4 35 | with: 36 | verbose: true 37 | files: ./coverage.xml 38 | fail_ci_if_error: true 39 | token: ${{ secrets.CODECOV_TOKEN }} 40 | 41 | - name: Tear down Docker Compose 42 | run: docker compose down -------------------------------------------------------------------------------- /src/exceptions/http_exception.py: -------------------------------------------------------------------------------- 1 | from fastapi import status as http_status 2 | from constants.errors import Errors 3 | 4 | class BaseException(Exception): 5 | def __init__(self, message: str, status: int) -> None: 6 | self.message = message 7 | self.status = status 8 | super().__init__(self.message, self.status) 9 | 10 | class PermissionException(BaseException): 11 | def __init__(self) -> None: 12 | message = Errors.Permission_DENIED 13 | status = http_status.HTTP_403_FORBIDDEN 14 | super().__init__(message, status) 15 | 16 | class FileNotFoundException(BaseException): 17 | def __init__(self) -> None: 18 | message = Errors.NOT_FOUND 19 | status = http_status.HTTP_404_NOT_FOUND 20 | super().__init__(message, status) 21 | 22 | class FileUploadedException(BaseException): 23 | def __init__(self) -> None: 24 | message = Errors.FILE_UPLOADED_SUCCESSFULLY 25 | status = http_status.HTTP_400_BAD_REQUEST 26 | super().__init__(message, status) 27 | 28 | class FilePendingUploadException(BaseException): 29 | def __init__(self) -> None: 30 | message = Errors.FILE_PENDING_UPLOAD 31 | status = http_status.HTTP_400_BAD_REQUEST 32 | super().__init__(message, status) -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-dockerfile 3 | { 4 | "name": "Existing Dockerfile", 5 | "build": { 6 | // Sets the run context to one level up instead of the .devcontainer folder. 7 | "context": "..", 8 | // Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. 9 | "dockerfile": "../Dockerfile" 10 | }, 11 | 12 | // Features to add to the dev container. More info: https://containers.dev/features. 13 | // "features": {}, 14 | 15 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 16 | // "forwardPorts": [], 17 | 18 | // Uncomment the next line to run commands after the container is created. 19 | // "postCreateCommand": "cat /etc/os-release", 20 | 21 | // Configure tool-specific properties. 22 | "customizations": { 23 | "vscode": { 24 | "settings": { 25 | "files.exclude": { 26 | "**/__pycache__": true 27 | } 28 | }, 29 | "extensions": ["ms-python.python","ms-azuretools.vscode-docker","ms-python.autopep8","njpwerner.autodocstring"] 30 | } 31 | } 32 | 33 | // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. 34 | // "remoteUser": "devcontainer" 35 | } 36 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | 3 | services: 4 | 5 | filemanager: 6 | build: 7 | dockerfile: Dockerfile 8 | context: . 9 | container_name: filemanager 10 | env_file: 11 | - src/.env 12 | ports: 13 | - "8000:8000" 14 | volumes: 15 | - "./src:/var/www" 16 | - "./docker-data/log/:/var/log" 17 | networks: 18 | - filemanager 19 | depends_on: 20 | - db-mysql 21 | - minio 22 | 23 | minio: 24 | image: minio/minio 25 | container_name: minio 26 | ports: 27 | - "9001:9000" # api port - should not be exposed on production 28 | - "9090:9090" # console port 29 | volumes: 30 | - ./docker-data/data/minio/:/data 31 | env_file: 32 | - src/.env 33 | command: server --console-address ":9090" /data 34 | networks: 35 | - filemanager 36 | 37 | db-mysql: 38 | image: mysql 39 | container_name: mysql 40 | env_file: 41 | - src/.env 42 | volumes: 43 | - ./docker-data/data/mysql/:/var/lib/mysql/ 44 | - ./src/infrastructure/db/init.sql:/docker-entrypoint-initdb.d/init.sql 45 | ports: 46 | - 3306:3306 # Dont expose ports on production 47 | networks: 48 | - filemanager 49 | 50 | rabbitmq: 51 | image: rabbitmq 52 | container_name: rabbitmq 53 | env_file: 54 | - src/.env 55 | ports: 56 | - "5672:5672" # RabbitMQ main port 57 | - "15672:15672" # RabbitMQ management UI 58 | volumes: 59 | - ./docker-data/data/rabbitmq/:/var/lib/rabbitmq 60 | networks: 61 | - filemanager 62 | 63 | networks: 64 | filemanager: 65 | driver: bridge 66 | -------------------------------------------------------------------------------- /src/alembic/env.py: -------------------------------------------------------------------------------- 1 | from core.config import config as setting 2 | from sqlalchemy import create_engine 3 | from infrastructure.db.mysql import mysql as db 4 | 5 | from logging.config import fileConfig 6 | from sqlalchemy import pool 7 | 8 | from alembic import context 9 | from entities import * 10 | 11 | # this is the Alembic Config object, which provides 12 | # access to the values within the .ini file in use. 13 | config = context.config 14 | 15 | # Interpret the config file for Python logging. 16 | # This line sets up loggers basically. 17 | if config.config_file_name is not None: 18 | fileConfig(config.config_file_name) 19 | 20 | # add your model's MetaData object here 21 | # for 'autogenerate' support 22 | target_metadata = db.Base.metadata 23 | 24 | # other values from the config, defined by the needs of env.py, 25 | # can be acquired: 26 | # my_important_option = config.get_main_option("my_important_option") 27 | # ... etc. 28 | 29 | 30 | def run_migrations_offline() -> None: 31 | """Run migrations in 'offline' mode. 32 | 33 | This configures the context with just a URL 34 | and not an Engine, though an Engine is acceptable 35 | here as well. By skipping the Engine creation 36 | we don't even need a DBAPI to be available. 37 | 38 | Calls to context.execute() here emit the given string to the 39 | script output. 40 | 41 | """ 42 | url = config.get_main_option(setting.MYSQL_DATABASE_URL) 43 | context.configure( 44 | url=url, 45 | target_metadata=target_metadata, 46 | literal_binds=True, 47 | dialect_opts={"paramstyle": "named"}, 48 | ) 49 | 50 | with context.begin_transaction(): 51 | context.run_migrations() 52 | 53 | 54 | def run_migrations_online() -> None: 55 | """Run migrations in 'online' mode. 56 | 57 | In this scenario we need to create an Engine 58 | and associate a connection with the context. 59 | 60 | """ 61 | connectable = create_engine( 62 | str(setting.MYSQL_DATABASE_URL), 63 | poolclass=pool.NullPool, 64 | ) 65 | 66 | with connectable.connect() as connection: 67 | context.configure( 68 | connection=connection, target_metadata=target_metadata 69 | ) 70 | 71 | with context.begin_transaction(): 72 | context.run_migrations() 73 | 74 | 75 | if context.is_offline_mode(): 76 | run_migrations_offline() 77 | else: 78 | run_migrations_online() 79 | -------------------------------------------------------------------------------- /src/alembic/versions/6636fe22b643_tables.py: -------------------------------------------------------------------------------- 1 | """tables 2 | 3 | Revision ID: 6636fe22b643 4 | Revises: 5 | Create Date: 2024-08-10 13:26:54.846187 6 | 7 | """ 8 | from typing import Sequence, Union 9 | 10 | from alembic import op 11 | import sqlalchemy as sa 12 | 13 | 14 | # revision identifiers, used by Alembic. 15 | revision: str = '6636fe22b643' 16 | down_revision: Union[str, None] = None 17 | branch_labels: Union[str, Sequence[str], None] = None 18 | depends_on: Union[str, Sequence[str], None] = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_table('celery_tasks', 24 | sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False), 25 | sa.Column('task_id', sa.String(length=255), nullable=False), 26 | sa.Column('status', sa.String(length=50), nullable=False), 27 | sa.Column('result', sa.BLOB(), nullable=True), 28 | sa.Column('date_done', sa.TIMESTAMP(), nullable=True), 29 | sa.Column('traceback', sa.Text(), nullable=True), 30 | sa.Column('meta', sa.Text(), nullable=True), 31 | sa.Column('name', sa.String(length=155), nullable=True), 32 | sa.Column('args', sa.BLOB(), nullable=True), 33 | sa.Column('kwargs', sa.BLOB(), nullable=True), 34 | sa.Column('worker', sa.String(length=155), nullable=True), 35 | sa.Column('retries', sa.Integer(), nullable=True), 36 | sa.Column('queue', sa.String(length=155), nullable=True), 37 | sa.PrimaryKeyConstraint('id') 38 | ) 39 | op.create_index(op.f('ix_celery_tasks_task_id'), 'celery_tasks', ['task_id'], unique=True) 40 | op.create_table('files', 41 | sa.Column('id', sa.VARCHAR(length=36), nullable=False), 42 | sa.Column('credential', sa.JSON(none_as_null=True), nullable=True), 43 | sa.Column('path', sa.VARCHAR(length=255), nullable=False), 44 | sa.Column('content_type', sa.String(length=32), nullable=False), 45 | sa.Column('size', sa.Integer(), nullable=True), 46 | sa.Column('detail', sa.JSON(none_as_null=True), nullable=True), 47 | sa.Column('celery_task_id', sa.String(length=36), nullable=True), 48 | sa.PrimaryKeyConstraint('id') 49 | ) 50 | op.create_index(op.f('ix_files_id'), 'files', ['id'], unique=True) 51 | # ### end Alembic commands ### 52 | 53 | 54 | def downgrade() -> None: 55 | # ### commands auto generated by Alembic - please adjust! ### 56 | op.drop_index(op.f('ix_files_id'), table_name='files') 57 | op.drop_table('files') 58 | op.drop_index(op.f('ix_celery_tasks_task_id'), table_name='celery_tasks') 59 | op.drop_table('celery_tasks') 60 | # ### end Alembic commands ### 61 | -------------------------------------------------------------------------------- /src/core/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pydantic_core import MultiHostUrl 3 | from dotenv import load_dotenv 4 | 5 | load_dotenv() 6 | 7 | 8 | class Config: 9 | APP_UPLOAD_DIR = os.getenv("APP_UPLOAD_DIR") 10 | APP_MAX_CHUNK_SIZE = int(os.getenv("APP_MAX_CHUNK_SIZE")) 11 | ENV = os.getenv("ENV") 12 | print("ENV:", ENV) 13 | 14 | MINIO_ENDPOINT = os.getenv("MINIO_ENDPOINT") 15 | MINIO_URL = os.getenv("MINIO_URL") 16 | MINIO_ACCESS_KEY = os.getenv("MINIO_ACCESS_KEY") 17 | MINIO_SECRET_KEY = os.getenv("MINIO_SECRET_KEY") 18 | MINIO_PUBLIC_BUCKET = os.getenv('MINIO_PUBLIC_BUCKET', 'public') 19 | MINIO_PRIVATE_BUCKET = os.getenv('MINIO_PRIVATE_BUCKET', 'private') 20 | 21 | MYSQL_USER = os.getenv('MYSQL_USER', 'root') 22 | MYSQL_PASSWORD = os.getenv('MYSQL_ROOT_PASSWORD', 'password') 23 | MYSQL_HOST = os.getenv('MYSQL_HOST', 'db-mysql') 24 | MYSQL_PORT = os.getenv('MYSQL_PORT', '3306') 25 | MYSQL_DATABASE = os.getenv("MYSQL_DATABASE", "filemanager") 26 | MYSQL_TEST_DATABASE = os.getenv("MYSQL_TEST_DATABASE", "filemanager_test") 27 | 28 | @property 29 | def MYSQL_DATABASE_URL(self): 30 | if self.ENV == "testing": 31 | path = self.MYSQL_TEST_DATABASE 32 | else: 33 | path = self.MYSQL_DATABASE 34 | return MultiHostUrl.build( 35 | scheme="mysql+pymysql", 36 | username=self.MYSQL_USER, 37 | password=self.MYSQL_PASSWORD, 38 | host=self.MYSQL_HOST, 39 | port=int(self.MYSQL_PORT), 40 | path=path 41 | ) 42 | @property 43 | def CELERY_BACKEND_ENDPOINT(self): 44 | if self.ENV == "testing": 45 | path = self.MYSQL_TEST_DATABASE 46 | else: 47 | path = self.MYSQL_DATABASE 48 | return MultiHostUrl.build( 49 | scheme="db+mysql+pymysql", 50 | username=self.MYSQL_USER, 51 | password=self.MYSQL_PASSWORD, 52 | host=self.MYSQL_HOST, 53 | port=int(self.MYSQL_PORT), 54 | path=path 55 | ) 56 | 57 | RABBITMQ_DEFAULT_USER = os.getenv('RABBITMQ_DEFAULT_USER', 'root') 58 | RABBITMQ_DEFAULT_PASS = os.getenv('RABBITMQ_DEFAULT_PASS', 'password') 59 | RABBITMQ_HOST = os.getenv('RABBITMQ_HOST', 'localhost') 60 | RABBITMQ_PORT = os.getenv('RABBITMQ_PORT', '5672') 61 | 62 | @property 63 | def RABBITMQ_ENDPOINT(self): 64 | return MultiHostUrl.build( 65 | scheme="pyamqp", 66 | username=self.RABBITMQ_DEFAULT_USER, 67 | password=self.RABBITMQ_DEFAULT_PASS, 68 | host=self.RABBITMQ_HOST, 69 | port=int(self.RABBITMQ_PORT), 70 | ) 71 | 72 | config = Config() 73 | -------------------------------------------------------------------------------- /src/tests/functional/test_upload_chunk.py: -------------------------------------------------------------------------------- 1 | from fastapi import status 2 | from tests.test_utils import * 3 | import uuid 4 | 5 | 6 | def test_init_required_before_upload_chunk(test_app): 7 | file = generate_file(CHUNK_SIZE) 8 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", files={ 9 | "file": (file.name, file, "application/octet-stream")}, data={ 10 | "chunk_size": CHUNK_SIZE, 11 | "upload_id": str(uuid.uuid4()), 12 | "chunk_index": 0 13 | } 14 | ) 15 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 16 | validate_error_response_structure(response_json=response.json()) 17 | 18 | 19 | def test_chunk_file_is_required(test_app): 20 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", data={ 21 | "chunk_size": CHUNK_SIZE, 22 | "upload_id": str(uuid.uuid4()), 23 | "chunk_index": 0 24 | } 25 | ) 26 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 27 | validate_error_response_structure(response_json=response.json()) 28 | 29 | 30 | def test_chunk_size_is_required(test_app): 31 | file = generate_file(CHUNK_SIZE) 32 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", files={ 33 | "file": (file.name, file, "application/octet-stream")}, data={ 34 | "upload_id": str(uuid.uuid4()), 35 | "chunk_index": 0 36 | } 37 | ) 38 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 39 | validate_error_response_structure(response_json=response.json()) 40 | 41 | 42 | def test_upload_id_is_required(test_app): 43 | file = generate_file(CHUNK_SIZE) 44 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", files={ 45 | "file": (file.name, file, "application/octet-stream")}, data={ 46 | "chunk_size": CHUNK_SIZE, 47 | "chunk_index": 0 48 | } 49 | ) 50 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 51 | validate_error_response_structure(response_json=response.json()) 52 | 53 | 54 | def test_chunk_index_is_required(test_app): 55 | file = generate_file(CHUNK_SIZE) 56 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", files={ 57 | "file": (file.name, file, "application/octet-stream")}, data={ 58 | "upload_id": str(uuid.uuid4()), 59 | "chunk_size": CHUNK_SIZE, 60 | } 61 | ) 62 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 63 | validate_error_response_structure(response_json=response.json()) 64 | 65 | 66 | def test_chunk_upload(test_app): 67 | init_upload_response = test_app.post(f"{FILE_ENDPOINT}/upload/init") 68 | upload_id = init_upload_response.json()['data']['upload_id'] 69 | file = generate_file(CHUNK_SIZE) 70 | 71 | response = test_app.post(f"{FILE_ENDPOINT}/upload/chunk/", files={ 72 | "file": (file.name, file, "application/octet-stream")}, data={ 73 | "upload_id": upload_id, 74 | "chunk_size": CHUNK_SIZE, 75 | "chunk_index": 0 76 | } 77 | ) 78 | assert response.status_code == status.HTTP_200_OK 79 | validate_success_response_structure(response_json=response.json()) 80 | -------------------------------------------------------------------------------- /src/api/routes/file.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, UploadFile, Form, Request 2 | from fastapi.responses import JSONResponse 3 | from infrastructure.db.mysql import mysql as db 4 | from repositories.file_repository import FileRepo 5 | from services.file_service import FileService 6 | from handlers.file_handler import FileHandler 7 | from api.responses.file_response import FileResponse, UploadInitResponse, UploadChunkResponse, UploadStatusResponse 8 | from typing import Optional 9 | from api.responses.response import SuccessResponse, ErrorResponse 10 | from core.config import config 11 | from constants.file_extensions import FileExtension 12 | 13 | router = APIRouter( 14 | prefix="/api/v1/file", 15 | tags=["file"] 16 | ) 17 | 18 | file_handler = FileHandler(service=FileService(repo=FileRepo(db=db))) 19 | 20 | 21 | @router.post("/upload/init/", response_model=SuccessResponse[UploadInitResponse]) 22 | async def endpoint(): 23 | return await file_handler.upload_initialize() 24 | 25 | 26 | @router.post("/upload/chunk/", response_model=SuccessResponse[UploadChunkResponse], responses={ 27 | 422: {"model": ErrorResponse}, 28 | }) 29 | async def endpoint(chunk_size: int = Form(..., le=config.APP_MAX_CHUNK_SIZE), 30 | upload_id: str = Form(...), chunk_index: int = Form(...), file: UploadFile = Form(...)): 31 | return await file_handler.upload_chunk(chunk_size=chunk_size, upload_id=upload_id, chunk_index=chunk_index, file=file) 32 | 33 | 34 | @router.post("/upload/complete/", response_model=SuccessResponse[FileResponse], responses={ 35 | 422: {"model": ErrorResponse}, 36 | }) 37 | async def endpoint(upload_id: str = Form(...), total_chunks: int = Form(...), 38 | total_size: int = Form(...), credential: Optional[str] = Form(None), 39 | file_extension: FileExtension = Form(...), content_type: str = Form(...), 40 | detail: Optional[str] = Form(None)): 41 | return await file_handler.upload_complete(upload_id=upload_id, total_chunks=total_chunks, total_size=total_size, 42 | file_extension=file_extension, content_type=content_type, 43 | credential=credential, detail=detail) 44 | 45 | 46 | @router.get('/get/{file_id}', response_model=SuccessResponse[FileResponse], responses={ 47 | 404: {"model": ErrorResponse}, 48 | 422: {"model": ErrorResponse}, 49 | 403: {"model": ErrorResponse} 50 | }) 51 | async def endpoint(file_id: str, request: Request) -> JSONResponse: 52 | credential = dict(request.query_params) 53 | return await file_handler.get_file(file_id=file_id, credential=credential) 54 | 55 | 56 | @router.get('/status/{file_id}', response_model=SuccessResponse[UploadStatusResponse], responses={ 57 | 404: {"model": ErrorResponse}, 58 | 422: {"model": ErrorResponse}, 59 | 403: {"model": ErrorResponse} 60 | }) 61 | async def endpoint(file_id: str, request: Request) -> JSONResponse: 62 | credential = dict(request.query_params) 63 | return await file_handler.get_upload_status(file_id=file_id, credential=credential) 64 | 65 | 66 | @router.post('/upload/retry', response_model=SuccessResponse[FileResponse], responses={ 67 | 404: {"model": ErrorResponse}, 68 | 422: {"model": ErrorResponse}, 69 | 403: {"model": ErrorResponse} 70 | }) 71 | async def endpoint(file_id: str = Form(...), credential: Optional[str] = Form(None)) -> JSONResponse: 72 | return await file_handler.retry_upload(file_id=file_id, credential=credential) 73 | -------------------------------------------------------------------------------- /src/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | # Use forward slashes (/) also on windows to provide an os agnostic path 6 | script_location = alembic 7 | 8 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 9 | # Uncomment the line below if you want the files to be prepended with date and time 10 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 11 | # for all available tokens 12 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 13 | 14 | # sys.path path, will be prepended to sys.path if present. 15 | # defaults to the current working directory. 16 | prepend_sys_path = . 17 | 18 | # timezone to use when rendering the date within the migration file 19 | # as well as the filename. 20 | # If specified, requires the python>=3.9 or backports.zoneinfo library. 21 | # Any required deps can installed by adding `alembic[tz]` to the pip requirements 22 | # string value is passed to ZoneInfo() 23 | # leave blank for localtime 24 | #timezone = 25 | 26 | # max length of characters to apply to the "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to alembic/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | sqlalchemy.url = driver://user:pass@localhost/dbname 64 | 65 | 66 | [post_write_hooks] 67 | # post_write_hooks defines scripts or Python functions that are run 68 | # on newly generated revision scripts. See the documentation for further 69 | # detail and examples 70 | 71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 72 | # hooks = black 73 | # black.type = console_scripts 74 | # black.entrypoint = black 75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME 76 | 77 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 78 | # hooks = ruff 79 | # ruff.type = exec 80 | # ruff.executable = %(here)s/.venv/bin/ruff 81 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 82 | 83 | # Logging configuration 84 | [loggers] 85 | keys = root,sqlalchemy,alembic 86 | 87 | [handlers] 88 | keys = console 89 | 90 | [formatters] 91 | keys = generic 92 | 93 | [logger_root] 94 | level = WARN 95 | handlers = console 96 | qualname = 97 | 98 | [logger_sqlalchemy] 99 | level = WARN 100 | handlers = 101 | qualname = sqlalchemy.engine 102 | 103 | [logger_alembic] 104 | level = INFO 105 | handlers = 106 | qualname = alembic 107 | 108 | [handler_console] 109 | class = StreamHandler 110 | args = (sys.stderr,) 111 | level = NOTSET 112 | formatter = generic 113 | 114 | [formatter_generic] 115 | format = %(levelname)-5.5s [%(name)s] %(message)s 116 | datefmt = %H:%M:%S 117 | -------------------------------------------------------------------------------- /src/tests/functional/test_upload_complete.py: -------------------------------------------------------------------------------- 1 | from tests.test_utils import * 2 | import uuid 3 | from fastapi import status 4 | 5 | 6 | def test_init_required_before_upload_complete(test_app): 7 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 8 | "total_chunks": 1, 9 | "upload_id": str(uuid.uuid4()), 10 | "total_size": CHUNK_SIZE, 11 | "file_extension": "jpg", 12 | "content_type": "image/jpeg", 13 | } 14 | ) 15 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 16 | validate_error_response_structure(response_json=response.json()) 17 | 18 | 19 | def test_total_chunks_is_required(test_app): 20 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 21 | "upload_id": str(uuid.uuid4()), 22 | "total_size": CHUNK_SIZE, 23 | "file_extension": "jpg", 24 | "content_type": "image/jpeg", 25 | } 26 | ) 27 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 28 | validate_error_response_structure(response_json=response.json()) 29 | 30 | 31 | def test_upload_id_is_required(test_app): 32 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 33 | "total_chunks": 1, 34 | "total_size": CHUNK_SIZE, 35 | "file_extension": "jpg", 36 | "content_type": "image/jpeg", 37 | } 38 | ) 39 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 40 | validate_error_response_structure(response_json=response.json()) 41 | 42 | 43 | def test_total_size_is_required(test_app): 44 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 45 | "total_chunks": 1, 46 | "upload_id": str(uuid.uuid4()), 47 | "file_extension": "jpg", 48 | "content_type": "image/jpeg", 49 | } 50 | ) 51 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 52 | validate_error_response_structure(response_json=response.json()) 53 | 54 | 55 | def test_file_extension_is_required(test_app): 56 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 57 | "total_chunks": 1, 58 | "upload_id": str(uuid.uuid4()), 59 | "total_size": CHUNK_SIZE, 60 | "content_type": "image/jpeg", 61 | } 62 | ) 63 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 64 | validate_error_response_structure(response_json=response.json()) 65 | 66 | 67 | def test_file_extension_should_be_valid(test_app): 68 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 69 | "total_chunks": 1, 70 | "upload_id": str(uuid.uuid4()), 71 | "total_size": CHUNK_SIZE, 72 | "file_extension": "jpge", 73 | "content_type": "image/jpeg", 74 | } 75 | ) 76 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 77 | validate_error_response_structure(response_json=response.json()) 78 | 79 | 80 | def test_content_type_is_required(test_app): 81 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 82 | "total_chunks": 1, 83 | "upload_id": str(uuid.uuid4()), 84 | "total_size": CHUNK_SIZE, 85 | "file_extension": "jpg", 86 | } 87 | ) 88 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 89 | validate_error_response_structure(response_json=response.json()) 90 | 91 | 92 | def test_credential_should_be_valid(test_app): 93 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 94 | "total_chunks": 1, 95 | "upload_id": str(uuid.uuid4()), 96 | "total_size": CHUNK_SIZE, 97 | "file_extension": "jpg", 98 | "content_type": "image/jpeg", 99 | "credential": '{"user": {"name": "", "id": "1"}}' 100 | } 101 | ) 102 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 103 | validate_error_response_structure(response_json=response.json()) 104 | 105 | 106 | def test_detail_should_be_valid(test_app): 107 | response = test_app.post(f"{FILE_ENDPOINT}/upload/complete/", data={ 108 | "total_chunks": 1, 109 | "upload_id": str(uuid.uuid4()), 110 | "total_size": CHUNK_SIZE, 111 | "file_extension": "jpg", 112 | "content_type": "image/jpeg", 113 | "detail": '{"chunk": {"count": 1, "size": 1024}}' 114 | } 115 | ) 116 | assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY 117 | validate_error_response_structure(response_json=response.json()) 118 | -------------------------------------------------------------------------------- /src/services/file_service.py: -------------------------------------------------------------------------------- 1 | from repositories.file_repository import FileRepo 2 | from dto.file_dto import UploadFileDTO, UploadChunkDTO, RetryUploadFileDTO 3 | from typing import Dict, Any 4 | from entities.file import File 5 | import os 6 | import aiofiles 7 | from fastapi.exceptions import RequestValidationError 8 | from constants.errors import ValidatonErrors 9 | from infrastructure.minio import minioStorage 10 | from dto.file_dto import FileBaseDTO 11 | from services.base_service import BaseService 12 | from exceptions.http_exception import PermissionException, FileNotFoundException, FileUploadedException, FilePendingUploadException 13 | from tasks.file_upload_task import upload_file_task 14 | import uuid 15 | from core.config import config 16 | from celery.result import AsyncResult 17 | from tasks import celery 18 | from constants.upload_stauts import UploadStatus 19 | 20 | 21 | class FileService(BaseService[FileRepo]): 22 | def __init__(self, repo: FileRepo) -> None: 23 | super().__init__(repo=repo) 24 | 25 | async def upload_initialize(self) -> str: 26 | upload_id = str(uuid.uuid4()) 27 | os.makedirs(os.path.join( 28 | config.APP_UPLOAD_DIR, upload_id), exist_ok=True) 29 | return upload_id 30 | 31 | async def upload_chunk(self, payload: UploadChunkDTO) -> None: 32 | upload_dir = os.path.join(config.APP_UPLOAD_DIR, payload.upload_id) 33 | chunk_path = os.path.join(upload_dir, f"{payload.chunk_index}.part") 34 | async with aiofiles.open(chunk_path, "wb") as chunk_file: 35 | content = await payload.file.read() 36 | if len(content) > config.APP_MAX_CHUNK_SIZE: 37 | raise RequestValidationError(errors=[{ 38 | 'loc': ('body', 'file'), 39 | 'msg': ValidatonErrors.LE_CHUNCK_SIZE, 40 | 'type': 'value_error' 41 | }], 42 | body={"file": "invalid_size"}) 43 | await chunk_file.write(content) 44 | 45 | async def upload_complete(self, payload: UploadFileDTO) -> File: 46 | if not payload.credential: 47 | bucket = minioStorage.public_bucket 48 | else: 49 | bucket = minioStorage.private_bucket 50 | filename = f"{payload.upload_id}.{payload.file_extension.value}" 51 | if not os.path.exists(os.path.join(config.APP_UPLOAD_DIR, payload.upload_id)): 52 | raise FileNotFoundError 53 | celery_task = upload_file_task.delay(bucket=bucket, upload_id=payload.upload_id, 54 | total_chunks=payload.total_chunks, filename=filename) 55 | file = self.repo.create_file(FileBaseDTO(path=bucket + "/" + filename, content_type=payload.content_type, detail=payload.detail, 56 | size=payload.total_size, credential=payload.credential, celery_task_id=celery_task.id)) 57 | return file 58 | 59 | async def get_download_link(self, file: File) -> str: 60 | bucket_name = file.path.split("/")[0] 61 | filename = "/".join(file.path.split("/")[1:]) 62 | if not file.credential: 63 | return minioStorage.get_url(bucket_name=bucket_name, object_name=filename) 64 | else: 65 | for key, value in file.credential.items(): 66 | if not isinstance(value, str): 67 | file.credential[key] = str(value) 68 | return minioStorage.get_presigned_url("GET", bucket_name=bucket_name, object_name=filename, extra_query_params=file.credential) 69 | 70 | async def get_file(self, id: id, credential=Dict[str, Any]) -> File: 71 | file = self.repo.get_file(id=id) 72 | if file == None: 73 | raise FileNotFoundException 74 | if file.credential and credential != file.credential: 75 | raise PermissionException() 76 | return file 77 | 78 | async def get_upload_status(self, file_id: str, credential=Dict[str, Any]) -> str: 79 | file = await self.get_file(id=file_id, credential=credential) 80 | result = AsyncResult(file.celery_task_id) 81 | return result.state 82 | 83 | async def retry_upload(self, payload: RetryUploadFileDTO): 84 | file = await self.get_file(id=payload.id, credential=payload.credential) 85 | result = AsyncResult(file.celery_task_id) 86 | if result.status == UploadStatus.SUCCESS.value: 87 | raise FileUploadedException() 88 | if result.status == UploadStatus.PENDING.value or result.status == UploadStatus.STARTED.value: 89 | raise FilePendingUploadException() 90 | meta = celery.backend.get_task_meta(file.celery_task_id) 91 | upload_file_task.apply_async( 92 | args=meta['args'], kwargs=meta['kwargs'], task_id=file.celery_task_id) 93 | return file 94 | -------------------------------------------------------------------------------- /src/infrastructure/minio.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from core.config import config 3 | from minio import Minio 4 | from minio.helpers import ObjectWriteResult 5 | from typing import Self 6 | 7 | 8 | class MinioStorage: 9 | 10 | _instance: Self = None 11 | 12 | def __new__(cls: Self) -> Self: 13 | """ 14 | `MinioStorage` class the main entrypoint to use minio 15 | 16 | ## Example 17 | ```python 18 | from infrastructure.minio import MinioStorage 19 | 20 | minioStorage = MinioStorage() 21 | ``` 22 | """ 23 | if cls._instance == None: 24 | cls._instance = super().__new__(cls) 25 | cls._instance.__initialize() 26 | return cls._instance 27 | 28 | def __initialize(self) -> None: 29 | self.client = Minio( 30 | config.MINIO_ENDPOINT, 31 | access_key=config.MINIO_ACCESS_KEY, 32 | secret_key=config.MINIO_SECRET_KEY, 33 | secure=False, 34 | ) 35 | self.public_bucket = config.MINIO_PUBLIC_BUCKET 36 | self.private_bucket = config.MINIO_PRIVATE_BUCKET 37 | 38 | def bucket_exists(self, bucket_name) -> bool: 39 | """ 40 | Check if the bucket exists 41 | 42 | :param bucket_name: Name of the bucket 43 | """ 44 | found = self.client.bucket_exists(bucket_name) 45 | if not found: 46 | return False 47 | return True 48 | 49 | def create_bucket(self, bucket_name, location: str | None = None, object_lock: bool = False,) -> None: 50 | """ 51 | Create a bucket with given name and region and object lock 52 | 53 | :param bucket_name: Name of the bucket. 54 | :param location: Region in which the bucket will be created. 55 | :param object_lock: Flag to set object-lock feature. 56 | 57 | """ 58 | self.client.make_bucket(bucket_name, location, object_lock) 59 | 60 | def put_object(self, bucket_name, object_name, data, length, content_type="application/octet-stream", metadate=None, 61 | sse=None, progress=None, part_size=0, num_parallel_uploads=3, tags=None, retention=None, legal_hold=False) -> ObjectWriteResult: 62 | """ 63 | Uploads data from a stream to an object in a bucket. 64 | 65 | :param bucket_name: Name of the bucket. 66 | :param object_name: Object name in the bucket. 67 | :param data: An object having callable read() returning bytes object. 68 | :param length: Data size; -1 for unknown size and set valid part_size. 69 | :param content_type: Content type of the object. 70 | :param metadata: Any additional metadata to be uploaded along 71 | with your PUT request. 72 | :param sse: Server-side encryption. 73 | :param progress: A progress object; 74 | :param part_size: Multipart part size. 75 | :param num_parallel_uploads: Number of parallel uploads. 76 | :param tags: :class:`Tags` for the object. 77 | :param retention: :class:`Retention` configuration object. 78 | :param legal_hold: Flag to set legal hold for the object. 79 | :return: :class:`ObjectWriteResult` object. 80 | """ 81 | if not self.bucket_exists(bucket_name=bucket_name): 82 | self.create_bucket(bucket_name=bucket_name) 83 | return self.client.put_object(bucket_name, object_name, data, length, content_type, metadate, sse, progress, part_size, 84 | num_parallel_uploads, tags, retention, legal_hold) 85 | 86 | def get_presigned_url(self, method, bucket_name, object_name, expires=timedelta(days=7), response_headers=None, request_date=None, 87 | version_id=None, extra_query_params=None) -> str: 88 | """ 89 | Get presigned URL of an object for HTTP method, expiry time and custom 90 | request parameters. 91 | 92 | :param method: HTTP method. 93 | :param bucket_name: Name of the bucket. 94 | :param object_name: Object name in the bucket. 95 | :param expires: Expiry in seconds; defaults to 7 days. 96 | :param response_headers: Optional response_headers argument to 97 | specify response fields like date, size, 98 | type of file, data about server, etc. 99 | :param request_date: Optional request_date argument to 100 | specify a different request date. Default is 101 | current date. 102 | :param version_id: Version ID of the object. 103 | :param extra_query_params: Extra query parameters for advanced usage. 104 | :return: URL string. 105 | """ 106 | return self.client.get_presigned_url(method, bucket_name, object_name, expires, response_headers, request_date, version_id, extra_query_params) 107 | 108 | def get_url(self, bucket_name, object_name): 109 | return f"{config.MINIO_URL}/{bucket_name}/{object_name}" 110 | 111 | 112 | minioStorage = MinioStorage() 113 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # File Management Service 2 | 3 | [![codecov](https://codecov.io/github/hanieas/fastapi-file-management-service/graph/badge.svg?token=OGUBX46W31)](https://codecov.io/github/hanieas/fastapi-file-management-service) 4 | 5 | ## Table of Contents 6 | 1. [Introduction](#introduction) 7 | 2. [Technology Stack and Features](#technology-stack-and-features) 8 | 3. [Why a Separate File Management Service?](#why-a-separate-file-management-service) 9 | 4. [How to Use it?](#how-to-use-it) 10 | 5. [API Endpoints](#api-endpoints) 11 | 6. [Contributing](#Contributing) 12 | 13 | ## Introduction 14 | 15 | This microservice is designed to manage all file-related tasks. It uses **MinIO** for object storage and **MySQL** for managing file metadata. We support chunk uploads for handling large files efficiently, with **Celery** running background tasks to ensure smooth performance. 16 | 17 | ## Technology Stack and Features 18 | 19 | - ⚡ [**FastAPI**](https://fastapi.tiangolo.com) for the Python backend API. 20 | - 🧰 [SQLAlchemy](https://www.sqlalchemy.org/) for the Python SQL database interactions (ORM). 21 | - 🔍 [Pydantic](https://docs.pydantic.dev), used by FastAPI, for the data validation and settings management. 22 | - 🗄️ [MYSQL](https://www.mysql.com/) as the SQL database. 23 | - 🔄 [Alembic](https://alembic.sqlalchemy.org/en/latest) for database migrations. 24 | - 🔧 [Celery](https://docs.celeryq.dev/en/stable/) with [RabbitMQ](https://www.rabbitmq.com/) for task queue management and background processing. 25 | - 💾 [MinIO](https://min.io/) for scalable object storage with chunk upload support. 26 | - ✅ [Pytest](https://pytest.org) for testing to ensure code reliability and functionality. 27 | - 🐋 [Docker Compose](https://www.docker.com) for development and production. 28 | 29 | ## Why a Separate File Management Service? 30 | 31 | 1. Centralizes file operations, making management and maintenance easier. 32 | 2. Enables scaling file handling independently of other services. 33 | 3. Simplifies updates and changes to file handling without impacting other parts of the system. 34 | 4. Reduces code duplication by keeping file upload and retrieval logic in one place, resulting in cleaner code. 35 | 36 | ## How to Use it? 37 | 38 | 1. **Complete the `.env` File**: 39 | - Copy the contents of `.env.example` to a new file named `.env`. 40 | - Fill in the required environment variables based on your setup. 41 | 42 | 2. **Build the Docker Image**: 43 | - Run the following command to build the Docker image: 44 | ```bash 45 | docker compose build 46 | ``` 47 | 48 | 3. **Run the Containers**: 49 | - After the build is complete, start the containers in detached mode with: 50 | ```bash 51 | docker compose up -d 52 | ``` 53 | 54 | 4. **Migrate the Database**: 55 | - Access the running container to perform the database migration: 56 | ```bash 57 | docker compose exec filemanager bash 58 | ``` 59 | - Inside the container, run the migration using Alembic: 60 | ```bash 61 | alembic upgrade head 62 | ``` 63 | 64 | 5. **Access the Service**: 65 | - The project is now up and running, accessible on port `8000`. 66 | - You can access the project documentation by navigating to `/docs` on your browser. 67 | 68 | ## API Endpoints 69 | 70 | Here’s a quick reference guide to the available API endpoints, their methods, and what they do: 71 | 72 | | Method | URL | Description | 73 | |--------|---------------------------------------------|------------------------------------------------------------------| 74 | | POST | `/api/v1/file/upload/init/` | Initialize a new file upload session. | 75 | | POST | `/api/v1/file/upload/chunk/` | Upload a file chunk. | 76 | | POST | `/api/v1/file/upload/complete/` | Complete the file upload process. | 77 | | GET | `/api/v1/file/get/{file_id}` | Retrieve a file by its ID. | 78 | | GET | `/api/v1/file/status/{file_id}` | Check the upload status of a file. | 79 | | POST | `/api/v1/file/upload/retry` | Retry uploading a file. | 80 | 81 | A Postman collection export is also available for testing these endpoints. You can import it into Postman to quickly get started with API testing. 82 | 83 | ## Contributing 84 | 85 | We welcome contributions from everyone! If you have ideas for improvements, new features, or bug fixes, feel free to contribute to this project. Here's how you can get involved: 86 | 87 | 1. **Create an Issue**: 88 | - If you find a bug, have a question, or want to suggest a feature, please open an issue. This helps us track and discuss your ideas. 89 | 90 | 2. **Send a Pull Request (PR)**: 91 | - Fork the repository, make your changes in a new branch, and then create a pull request. 92 | - Please make sure your code follows the project's coding standards and passes all tests. 93 | 94 | We appreciate your contributions and will do our best to review and merge your pull requests promptly. Thank you for helping us improve this project! 95 | -------------------------------------------------------------------------------- /src/handlers/file_handler.py: -------------------------------------------------------------------------------- 1 | from services.file_service import FileService 2 | from fastapi import UploadFile, status 3 | from constants.messages import Message 4 | from dto.file_dto import UploadFileDTO, UploadChunkDTO, RetryUploadFileDTO 5 | from api.responses.file_response import FileResponse, UploadInitResponse, UploadChunkResponse, UploadStatusResponse 6 | from handlers.base_handler import BaseHandler 7 | from api.responses.response import SuccessResponse, ErrorResponse 8 | from fastapi.responses import JSONResponse 9 | from exceptions.http_exception import BaseException 10 | from constants.file_extensions import FileExtension 11 | from constants.errors import Errors 12 | from typing import Dict, Any 13 | from core.config import config 14 | from utils import parse_json_to_dict 15 | 16 | 17 | class FileHandler(BaseHandler[FileService]): 18 | def __init__(self, service: FileService) -> None: 19 | super().__init__(service=service) 20 | 21 | async def upload_initialize(self): 22 | upload_id = await self.service.upload_initialize() 23 | return self.response.success(content=SuccessResponse[UploadInitResponse](data=UploadInitResponse( 24 | chunk_size=config.APP_MAX_CHUNK_SIZE, 25 | upload_id=upload_id 26 | ))) 27 | 28 | async def upload_chunk(self, chunk_size: int, upload_id: str, chunk_index: int, file: UploadFile): 29 | payload = UploadChunkDTO( 30 | chunk_size=chunk_size, file=file, upload_id=upload_id, chunk_index=chunk_index) 31 | try: 32 | await self.service.upload_chunk(payload) 33 | return self.response.success(content=SuccessResponse[UploadChunkResponse]( 34 | data=UploadChunkResponse(chunk_index=chunk_index, upload_id=upload_id), message=Message.UPLOADED_CHUNK 35 | )) 36 | except FileNotFoundError as exc: 37 | return self.response.error(ErrorResponse(message=Errors.FILE_NOT_FOUND), status=status.HTTP_422_UNPROCESSABLE_ENTITY) 38 | 39 | async def upload_complete(self, upload_id: str, total_chunks: int, total_size: int, file_extension: FileExtension, 40 | content_type: str, credential: str, detail: str, size: int = 0) -> JSONResponse: 41 | if credential: 42 | credential_dict = parse_json_to_dict(credential, 'credential') 43 | else: 44 | credential_dict = None 45 | 46 | if detail: 47 | detail_dict = parse_json_to_dict(detail, 'detail') 48 | else: 49 | detail_dict = None 50 | payload = UploadFileDTO(upload_id=upload_id, total_chunks=total_chunks, total_size=total_size, file_extension=file_extension, 51 | content_type=content_type, detail=detail_dict, credential=credential_dict, size=size) 52 | try: 53 | file = await self.service.upload_complete(payload=payload) 54 | download_url = await self.service.get_download_link(file) 55 | data = FileResponse(id=file.id, path=file.path, credential=file.credential, 56 | content_type=file.content_type, detail=file.detail, download_url=download_url) 57 | return self.response.success(content=SuccessResponse[FileResponse](data=data)) 58 | except FileNotFoundError as exc: 59 | return self.response.error(ErrorResponse(message=Errors.FILE_NOT_FOUND), status=status.HTTP_422_UNPROCESSABLE_ENTITY) 60 | 61 | async def get_file(self, file_id: str, credential=Dict[str, Any]) -> JSONResponse: 62 | try: 63 | file = await self.service.get_file(id=file_id, credential=credential) 64 | download_url = await self.service.get_download_link(file) 65 | data = FileResponse(id=file.id, path=file.path, credential=file.credential, 66 | content_type=file.content_type, detail=file.detail, download_url=download_url) 67 | return self.response.success(SuccessResponse[FileResponse](data=data)) 68 | except BaseException as exception: 69 | return self.response.error(ErrorResponse(message=exception.message), status=exception.status) 70 | 71 | async def get_upload_status(self, file_id: str, credential=Dict[str, Any]) -> JSONResponse: 72 | try: 73 | result = await self.service.get_upload_status(file_id=file_id, credential=credential) 74 | return self.response.success(SuccessResponse[UploadStatusResponse](data=UploadStatusResponse(status=result))) 75 | except BaseException as exception: 76 | return self.response.error(ErrorResponse(message=exception.message), status=exception.status) 77 | 78 | async def retry_upload(self, file_id: str, credential: str): 79 | if credential: 80 | credential_dict = parse_json_to_dict(credential, 'credential') 81 | else: 82 | credential_dict = None 83 | payload = RetryUploadFileDTO(id=file_id, credential=credential_dict) 84 | try: 85 | file = await self.service.retry_upload(payload=payload) 86 | download_url = await self.service.get_download_link(file) 87 | data = FileResponse(id=file.id, path=file.path, credential=file.credential, 88 | content_type=file.content_type, detail=file.detail, download_url=download_url) 89 | return self.response.success(content=SuccessResponse[FileResponse](data=data)) 90 | except BaseException as exception: 91 | return self.response.error(ErrorResponse(message=exception.message), status=exception.status) 92 | -------------------------------------------------------------------------------- /filemanager.postman_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "61934e60-d9c4-4b89-91eb-7491641702fa", 4 | "name": "microservices", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", 6 | "_exporter_id": "1946064", 7 | "_collection_link": "https://nevis6.postman.co/workspace/Nevis-Workspace~7226aff7-af62-473c-b7f0-fb4d508aea1a/collection/1946064-61934e60-d9c4-4b89-91eb-7491641702fa?action=share&source=collection_link&creator=1946064" 8 | }, 9 | "item": [ 10 | { 11 | "name": "filemanager", 12 | "item": [ 13 | { 14 | "name": "upload init", 15 | "request": { 16 | "method": "POST", 17 | "header": [], 18 | "body": { 19 | "mode": "formdata", 20 | "formdata": [] 21 | }, 22 | "url": { 23 | "raw": "{{filemanager}}/upload/init/", 24 | "host": [ 25 | "{{filemanager}}" 26 | ], 27 | "path": [ 28 | "upload", 29 | "init", 30 | "" 31 | ] 32 | } 33 | }, 34 | "response": [ 35 | { 36 | "name": "200", 37 | "originalRequest": { 38 | "method": "POST", 39 | "header": [], 40 | "body": { 41 | "mode": "formdata", 42 | "formdata": [] 43 | }, 44 | "url": { 45 | "raw": "{{filemanager}}/upload/init/", 46 | "host": [ 47 | "{{filemanager}}" 48 | ], 49 | "path": [ 50 | "upload", 51 | "init", 52 | "" 53 | ] 54 | } 55 | }, 56 | "status": "OK", 57 | "code": 200, 58 | "_postman_previewlanguage": "json", 59 | "header": [ 60 | { 61 | "key": "date", 62 | "value": "Mon, 12 Aug 2024 06:53:25 GMT" 63 | }, 64 | { 65 | "key": "server", 66 | "value": "uvicorn" 67 | }, 68 | { 69 | "key": "content-length", 70 | "value": "112" 71 | }, 72 | { 73 | "key": "content-type", 74 | "value": "application/json" 75 | } 76 | ], 77 | "cookie": [], 78 | "body": "{\n \"data\": {\n \"chunck_size\": 10485760,\n \"upload_id\": \"f28a2047-2409-4a88-af3b-227038f6c81c\"\n },\n \"message\": \"\",\n \"success\": true\n}" 79 | } 80 | ] 81 | }, 82 | { 83 | "name": "upload chunk", 84 | "protocolProfileBehavior": { 85 | "disabledSystemHeaders": {} 86 | }, 87 | "request": { 88 | "method": "POST", 89 | "header": [ 90 | { 91 | "key": "Range", 92 | "value": "", 93 | "type": "text" 94 | } 95 | ], 96 | "body": { 97 | "mode": "formdata", 98 | "formdata": [ 99 | { 100 | "key": "file", 101 | "type": "file", 102 | "src": "/Users/haniyeh/Documents/files/3/segmentaa" 103 | }, 104 | { 105 | "key": "upload_id", 106 | "value": "766bed2c-549f-4586-af84-1784e6ac3781", 107 | "type": "text" 108 | }, 109 | { 110 | "key": "chunk_index", 111 | "value": "0", 112 | "type": "text" 113 | }, 114 | { 115 | "key": "chunk_size", 116 | "value": "10485760", 117 | "type": "text" 118 | } 119 | ] 120 | }, 121 | "url": { 122 | "raw": "{{filemanager}}/upload/chunk/", 123 | "host": [ 124 | "{{filemanager}}" 125 | ], 126 | "path": [ 127 | "upload", 128 | "chunk", 129 | "" 130 | ] 131 | } 132 | }, 133 | "response": [ 134 | { 135 | "name": "200 - chunk 0", 136 | "originalRequest": { 137 | "method": "POST", 138 | "header": [ 139 | { 140 | "key": "Range", 141 | "value": "", 142 | "type": "text" 143 | } 144 | ], 145 | "body": { 146 | "mode": "formdata", 147 | "formdata": [ 148 | { 149 | "key": "file", 150 | "type": "file", 151 | "src": "/Users/haniyeh/Documents/files/3/segmentaa" 152 | }, 153 | { 154 | "key": "upload_id", 155 | "value": "f28a2047-2409-4a88-af3b-227038f6c81c", 156 | "type": "text" 157 | }, 158 | { 159 | "key": "chunk_index", 160 | "value": "0", 161 | "type": "text" 162 | }, 163 | { 164 | "key": "chunk_size", 165 | "value": "10485760", 166 | "type": "text" 167 | } 168 | ] 169 | }, 170 | "url": { 171 | "raw": "{{filemanager}}/upload/chunk/", 172 | "host": [ 173 | "{{filemanager}}" 174 | ], 175 | "path": [ 176 | "upload", 177 | "chunk", 178 | "" 179 | ] 180 | } 181 | }, 182 | "status": "OK", 183 | "code": 200, 184 | "_postman_previewlanguage": "json", 185 | "header": [ 186 | { 187 | "key": "date", 188 | "value": "Mon, 12 Aug 2024 06:55:16 GMT" 189 | }, 190 | { 191 | "key": "server", 192 | "value": "uvicorn" 193 | }, 194 | { 195 | "key": "content-length", 196 | "value": "140" 197 | }, 198 | { 199 | "key": "content-type", 200 | "value": "application/json" 201 | } 202 | ], 203 | "cookie": [], 204 | "body": "{\n \"data\": {\n \"chunk_index\": 0,\n \"upload_id\": \"f28a2047-2409-4a88-af3b-227038f6c81c\"\n },\n \"message\": \"The chunk is uploaded successfully!\",\n \"success\": true\n}" 205 | }, 206 | { 207 | "name": "200 - chunk 1", 208 | "originalRequest": { 209 | "method": "POST", 210 | "header": [ 211 | { 212 | "key": "Range", 213 | "value": "", 214 | "type": "text" 215 | } 216 | ], 217 | "body": { 218 | "mode": "formdata", 219 | "formdata": [ 220 | { 221 | "key": "file", 222 | "type": "file", 223 | "src": "/Users/haniyeh/Documents/files/3/segmentab" 224 | }, 225 | { 226 | "key": "upload_id", 227 | "value": "f28a2047-2409-4a88-af3b-227038f6c81c", 228 | "type": "text" 229 | }, 230 | { 231 | "key": "chunk_index", 232 | "value": "1", 233 | "type": "text" 234 | }, 235 | { 236 | "key": "chunk_size", 237 | "value": "10485760", 238 | "type": "text" 239 | } 240 | ] 241 | }, 242 | "url": { 243 | "raw": "{{filemanager}}/upload/chunk/", 244 | "host": [ 245 | "{{filemanager}}" 246 | ], 247 | "path": [ 248 | "upload", 249 | "chunk", 250 | "" 251 | ] 252 | } 253 | }, 254 | "status": "OK", 255 | "code": 200, 256 | "_postman_previewlanguage": "json", 257 | "header": [ 258 | { 259 | "key": "date", 260 | "value": "Mon, 12 Aug 2024 06:55:42 GMT" 261 | }, 262 | { 263 | "key": "server", 264 | "value": "uvicorn" 265 | }, 266 | { 267 | "key": "content-length", 268 | "value": "140" 269 | }, 270 | { 271 | "key": "content-type", 272 | "value": "application/json" 273 | } 274 | ], 275 | "cookie": [], 276 | "body": "{\n \"data\": {\n \"chunk_index\": 1,\n \"upload_id\": \"f28a2047-2409-4a88-af3b-227038f6c81c\"\n },\n \"message\": \"The chunk is uploaded successfully!\",\n \"success\": true\n}" 277 | }, 278 | { 279 | "name": "200 - chunk 2", 280 | "originalRequest": { 281 | "method": "POST", 282 | "header": [ 283 | { 284 | "key": "Range", 285 | "value": "", 286 | "type": "text" 287 | } 288 | ], 289 | "body": { 290 | "mode": "formdata", 291 | "formdata": [ 292 | { 293 | "key": "file", 294 | "type": "file", 295 | "src": "/Users/haniyeh/Documents/files/3/segmentac" 296 | }, 297 | { 298 | "key": "upload_id", 299 | "value": "f28a2047-2409-4a88-af3b-227038f6c81c", 300 | "type": "text" 301 | }, 302 | { 303 | "key": "chunk_index", 304 | "value": "2", 305 | "type": "text" 306 | }, 307 | { 308 | "key": "chunk_size", 309 | "value": "847379", 310 | "type": "text" 311 | } 312 | ] 313 | }, 314 | "url": { 315 | "raw": "{{filemanager}}/upload/chunk/", 316 | "host": [ 317 | "{{filemanager}}" 318 | ], 319 | "path": [ 320 | "upload", 321 | "chunk", 322 | "" 323 | ] 324 | } 325 | }, 326 | "status": "OK", 327 | "code": 200, 328 | "_postman_previewlanguage": "json", 329 | "header": [ 330 | { 331 | "key": "date", 332 | "value": "Mon, 12 Aug 2024 06:56:26 GMT" 333 | }, 334 | { 335 | "key": "server", 336 | "value": "uvicorn" 337 | }, 338 | { 339 | "key": "content-length", 340 | "value": "140" 341 | }, 342 | { 343 | "key": "content-type", 344 | "value": "application/json" 345 | } 346 | ], 347 | "cookie": [], 348 | "body": "{\n \"data\": {\n \"chunk_index\": 2,\n \"upload_id\": \"f28a2047-2409-4a88-af3b-227038f6c81c\"\n },\n \"message\": \"The chunk is uploaded successfully!\",\n \"success\": true\n}" 349 | }, 350 | { 351 | "name": "422 - invalid upload_id", 352 | "originalRequest": { 353 | "method": "POST", 354 | "header": [ 355 | { 356 | "key": "Range", 357 | "value": "", 358 | "type": "text" 359 | } 360 | ], 361 | "body": { 362 | "mode": "formdata", 363 | "formdata": [ 364 | { 365 | "key": "file", 366 | "type": "file", 367 | "src": "/Users/haniyeh/Documents/files/3/segmentac" 368 | }, 369 | { 370 | "key": "upload_id", 371 | "value": "f28a2047-2409-4a88-af3b-227038f6c811", 372 | "type": "text" 373 | }, 374 | { 375 | "key": "chunk_index", 376 | "value": "2", 377 | "type": "text" 378 | }, 379 | { 380 | "key": "chunk_size", 381 | "value": "847379", 382 | "type": "text" 383 | } 384 | ] 385 | }, 386 | "url": { 387 | "raw": "{{filemanager}}/upload/chunk/", 388 | "host": [ 389 | "{{filemanager}}" 390 | ], 391 | "path": [ 392 | "upload", 393 | "chunk", 394 | "" 395 | ] 396 | } 397 | }, 398 | "status": "Unprocessable Entity", 399 | "code": 422, 400 | "_postman_previewlanguage": "json", 401 | "header": [ 402 | { 403 | "key": "date", 404 | "value": "Mon, 12 Aug 2024 06:56:49 GMT" 405 | }, 406 | { 407 | "key": "server", 408 | "value": "uvicorn" 409 | }, 410 | { 411 | "key": "content-length", 412 | "value": "92" 413 | }, 414 | { 415 | "key": "content-type", 416 | "value": "application/json" 417 | } 418 | ], 419 | "cookie": [], 420 | "body": "{\n \"errors\": [],\n \"message\": \"File directory not found. Please initialize first!\",\n \"success\": false\n}" 421 | }, 422 | { 423 | "name": "422 - invalid chunk size", 424 | "originalRequest": { 425 | "method": "POST", 426 | "header": [ 427 | { 428 | "key": "Range", 429 | "value": "", 430 | "type": "text" 431 | } 432 | ], 433 | "body": { 434 | "mode": "formdata", 435 | "formdata": [ 436 | { 437 | "key": "file", 438 | "type": "file", 439 | "src": "/Users/haniyeh/Documents/files/3/segmentac" 440 | }, 441 | { 442 | "key": "upload_id", 443 | "value": "f28a2047-2409-4a88-af3b-227038f6c811", 444 | "type": "text" 445 | }, 446 | { 447 | "key": "chunk_index", 448 | "value": "2", 449 | "type": "text" 450 | }, 451 | { 452 | "key": "chunk_size", 453 | "value": "84737900", 454 | "type": "text" 455 | } 456 | ] 457 | }, 458 | "url": { 459 | "raw": "{{filemanager}}/upload/chunk/", 460 | "host": [ 461 | "{{filemanager}}" 462 | ], 463 | "path": [ 464 | "upload", 465 | "chunk", 466 | "" 467 | ] 468 | } 469 | }, 470 | "status": "Unprocessable Entity", 471 | "code": 422, 472 | "_postman_previewlanguage": "json", 473 | "header": [ 474 | { 475 | "key": "date", 476 | "value": "Mon, 12 Aug 2024 06:57:13 GMT" 477 | }, 478 | { 479 | "key": "server", 480 | "value": "uvicorn" 481 | }, 482 | { 483 | "key": "content-length", 484 | "value": "109" 485 | }, 486 | { 487 | "key": "content-type", 488 | "value": "application/json" 489 | } 490 | ], 491 | "cookie": [], 492 | "body": "{\n \"errors\": [\n \"[body->chunk_size] input should be less than or equal to 10485760\"\n ],\n \"message\": \"\",\n \"success\": false\n}" 493 | } 494 | ] 495 | }, 496 | { 497 | "name": "upload complete", 498 | "request": { 499 | "method": "POST", 500 | "header": [], 501 | "body": { 502 | "mode": "formdata", 503 | "formdata": [ 504 | { 505 | "key": "upload_id", 506 | "value": "9ff8949a-7525-4f32-a5ef-34320cdd60d0", 507 | "type": "text" 508 | }, 509 | { 510 | "key": "total_chunks", 511 | "value": "3", 512 | "type": "text" 513 | }, 514 | { 515 | "key": "file_extension", 516 | "value": "jpg", 517 | "type": "text" 518 | }, 519 | { 520 | "key": "content_type", 521 | "value": "image/jpeg", 522 | "type": "text" 523 | }, 524 | { 525 | "key": "total_size", 526 | "value": "2944531", 527 | "type": "text" 528 | }, 529 | { 530 | "key": "credential", 531 | "value": "{\"user_id\":84793, \"page_id\":100}", 532 | "type": "text" 533 | } 534 | ] 535 | }, 536 | "url": { 537 | "raw": "{{filemanager}}/upload/complete/", 538 | "host": [ 539 | "{{filemanager}}" 540 | ], 541 | "path": [ 542 | "upload", 543 | "complete", 544 | "" 545 | ] 546 | } 547 | }, 548 | "response": [ 549 | { 550 | "name": "200 - public", 551 | "originalRequest": { 552 | "method": "POST", 553 | "header": [], 554 | "body": { 555 | "mode": "formdata", 556 | "formdata": [ 557 | { 558 | "key": "upload_id", 559 | "value": "f28a2047-2409-4a88-af3b-227038f6c81c", 560 | "type": "text" 561 | }, 562 | { 563 | "key": "total_chunks", 564 | "value": "3", 565 | "type": "text" 566 | }, 567 | { 568 | "key": "file_extension", 569 | "value": "jpg", 570 | "type": "text" 571 | }, 572 | { 573 | "key": "content_type", 574 | "value": "image/jpeg", 575 | "type": "text" 576 | }, 577 | { 578 | "key": "total_size", 579 | "value": "2944531", 580 | "type": "text" 581 | } 582 | ] 583 | }, 584 | "url": { 585 | "raw": "{{filemanager}}/upload/complete/", 586 | "host": [ 587 | "{{filemanager}}" 588 | ], 589 | "path": [ 590 | "upload", 591 | "complete", 592 | "" 593 | ] 594 | } 595 | }, 596 | "status": "OK", 597 | "code": 200, 598 | "_postman_previewlanguage": "json", 599 | "header": [ 600 | { 601 | "key": "date", 602 | "value": "Mon, 12 Aug 2024 06:58:39 GMT" 603 | }, 604 | { 605 | "key": "server", 606 | "value": "uvicorn" 607 | }, 608 | { 609 | "key": "content-length", 610 | "value": "286" 611 | }, 612 | { 613 | "key": "content-type", 614 | "value": "application/json" 615 | } 616 | ], 617 | "cookie": [], 618 | "body": "{\n \"data\": {\n \"id\": \"ef533045-5d37-4a8e-a2c6-73070e6ce152\",\n \"path\": \"public/f28a2047-2409-4a88-af3b-227038f6c81c.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": null,\n \"download_url\": \"http://localhost:9001/public/f28a2047-2409-4a88-af3b-227038f6c81c.jpg\"\n },\n \"message\": \"\",\n \"success\": true\n}" 619 | }, 620 | { 621 | "name": "422 - invalid file extension", 622 | "originalRequest": { 623 | "method": "POST", 624 | "header": [], 625 | "body": { 626 | "mode": "formdata", 627 | "formdata": [ 628 | { 629 | "key": "upload_id", 630 | "value": "f28a2047-2409-4a88-af3b-227038f6c81c", 631 | "type": "text" 632 | }, 633 | { 634 | "key": "total_chunks", 635 | "value": "3", 636 | "type": "text" 637 | }, 638 | { 639 | "key": "file_extension", 640 | "value": "jpg2", 641 | "type": "text" 642 | }, 643 | { 644 | "key": "content_type", 645 | "value": "image/jpeg", 646 | "type": "text" 647 | }, 648 | { 649 | "key": "total_size", 650 | "value": "2944531", 651 | "type": "text" 652 | } 653 | ] 654 | }, 655 | "url": { 656 | "raw": "{{filemanager}}/upload/complete/", 657 | "host": [ 658 | "{{filemanager}}" 659 | ], 660 | "path": [ 661 | "upload", 662 | "complete", 663 | "" 664 | ] 665 | } 666 | }, 667 | "status": "Unprocessable Entity", 668 | "code": 422, 669 | "_postman_previewlanguage": "json", 670 | "header": [ 671 | { 672 | "key": "date", 673 | "value": "Mon, 12 Aug 2024 06:59:10 GMT" 674 | }, 675 | { 676 | "key": "server", 677 | "value": "uvicorn" 678 | }, 679 | { 680 | "key": "content-length", 681 | "value": "349" 682 | }, 683 | { 684 | "key": "content-type", 685 | "value": "application/json" 686 | } 687 | ], 688 | "cookie": [], 689 | "body": "{\n \"errors\": [\n \"[body->file_extension] input should be 'txt', 'jpg', 'png', 'pdf', 'mp4', 'mp3', 'mov', 'avi', 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx', 'gif', 'bmp', 'tiff', 'csv', 'json', 'xml', 'html', 'zip', 'rar', 'tar', 'gz', 'wav', 'flac', 'ogg', 'webm', 'mkv', 'heic', 'svg', 'psd', 'ai', 'eps', 'tif' or 'tga'\"\n ],\n \"message\": \"\",\n \"success\": false\n}" 690 | }, 691 | { 692 | "name": "200 - private", 693 | "originalRequest": { 694 | "method": "POST", 695 | "header": [], 696 | "body": { 697 | "mode": "formdata", 698 | "formdata": [ 699 | { 700 | "key": "upload_id", 701 | "value": "d141cd65-db36-4b18-a4c1-e3e30281a51a", 702 | "type": "text" 703 | }, 704 | { 705 | "key": "total_chunks", 706 | "value": "3", 707 | "type": "text" 708 | }, 709 | { 710 | "key": "file_extension", 711 | "value": "jpg", 712 | "type": "text" 713 | }, 714 | { 715 | "key": "content_type", 716 | "value": "image/jpeg", 717 | "type": "text" 718 | }, 719 | { 720 | "key": "total_size", 721 | "value": "2944531", 722 | "type": "text" 723 | }, 724 | { 725 | "key": "credential", 726 | "value": "{\"user_id\":84793, \"page_id\":100}", 727 | "type": "text" 728 | } 729 | ] 730 | }, 731 | "url": { 732 | "raw": "{{filemanager}}/upload/complete/", 733 | "host": [ 734 | "{{filemanager}}" 735 | ], 736 | "path": [ 737 | "upload", 738 | "complete", 739 | "" 740 | ] 741 | } 742 | }, 743 | "status": "OK", 744 | "code": 200, 745 | "_postman_previewlanguage": "json", 746 | "header": [ 747 | { 748 | "key": "date", 749 | "value": "Mon, 12 Aug 2024 07:16:11 GMT" 750 | }, 751 | { 752 | "key": "server", 753 | "value": "uvicorn" 754 | }, 755 | { 756 | "key": "content-length", 757 | "value": "610" 758 | }, 759 | { 760 | "key": "content-type", 761 | "value": "application/json" 762 | } 763 | ], 764 | "cookie": [], 765 | "body": "{\n \"data\": {\n \"id\": \"13c016c4-d049-493a-9c82-4826430f53c9\",\n \"path\": \"private/d141cd65-db36-4b18-a4c1-e3e30281a51a.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": {\n \"page_id\": \"100\",\n \"user_id\": \"84793\"\n },\n \"download_url\": \"http://minio:9000/private/d141cd65-db36-4b18-a4c1-e3e30281a51a.jpg?page_id=100&user_id=84793&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=wTl8NuReL4eeE9vSDLmw%2F20240812%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240812T071611Z&X-Amz-Expires=604800&X-Amz-SignedHeaders=host&X-Amz-Signature=1cac78cff6895fdb8c63e6d2a9c3c5a541a78e9dc289f4f07564ffb079899b77\"\n },\n \"message\": \"\",\n \"success\": true\n}" 766 | } 767 | ] 768 | }, 769 | { 770 | "name": "get upload status", 771 | "request": { 772 | "method": "GET", 773 | "header": [], 774 | "url": { 775 | "raw": "{{filemanager}}/status/0156fead-9bc3-4f0b-94de-1ab76f588dcc?user_id=84793&page_id=100", 776 | "host": [ 777 | "{{filemanager}}" 778 | ], 779 | "path": [ 780 | "status", 781 | "0156fead-9bc3-4f0b-94de-1ab76f588dcc" 782 | ], 783 | "query": [ 784 | { 785 | "key": "user_id", 786 | "value": "84793" 787 | }, 788 | { 789 | "key": "page_id", 790 | "value": "100" 791 | } 792 | ] 793 | } 794 | }, 795 | "response": [ 796 | { 797 | "name": "200 - success", 798 | "originalRequest": { 799 | "method": "GET", 800 | "header": [], 801 | "url": { 802 | "raw": "{{filemanager}}/status/ef533045-5d37-4a8e-a2c6-73070e6ce152", 803 | "host": [ 804 | "{{filemanager}}" 805 | ], 806 | "path": [ 807 | "status", 808 | "ef533045-5d37-4a8e-a2c6-73070e6ce152" 809 | ] 810 | } 811 | }, 812 | "status": "OK", 813 | "code": 200, 814 | "_postman_previewlanguage": "json", 815 | "header": [ 816 | { 817 | "key": "date", 818 | "value": "Mon, 12 Aug 2024 06:59:46 GMT" 819 | }, 820 | { 821 | "key": "server", 822 | "value": "uvicorn" 823 | }, 824 | { 825 | "key": "content-length", 826 | "value": "57" 827 | }, 828 | { 829 | "key": "content-type", 830 | "value": "application/json" 831 | } 832 | ], 833 | "cookie": [], 834 | "body": "{\n \"data\": {\n \"status\": \"SUCCESS\"\n },\n \"message\": \"\",\n \"success\": true\n}" 835 | }, 836 | { 837 | "name": "404", 838 | "originalRequest": { 839 | "method": "GET", 840 | "header": [], 841 | "url": { 842 | "raw": "{{filemanager}}/status/ef533045-5d37-4a8e-a2c6-73070e6ce151", 843 | "host": [ 844 | "{{filemanager}}" 845 | ], 846 | "path": [ 847 | "status", 848 | "ef533045-5d37-4a8e-a2c6-73070e6ce151" 849 | ] 850 | } 851 | }, 852 | "status": "Not Found", 853 | "code": 404, 854 | "_postman_previewlanguage": "json", 855 | "header": [ 856 | { 857 | "key": "date", 858 | "value": "Mon, 12 Aug 2024 07:01:24 GMT" 859 | }, 860 | { 861 | "key": "server", 862 | "value": "uvicorn" 863 | }, 864 | { 865 | "key": "content-length", 866 | "value": "52" 867 | }, 868 | { 869 | "key": "content-type", 870 | "value": "application/json" 871 | } 872 | ], 873 | "cookie": [], 874 | "body": "{\n \"errors\": [],\n \"message\": \"Not found!\",\n \"success\": false\n}" 875 | }, 876 | { 877 | "name": "200 - failure", 878 | "originalRequest": { 879 | "method": "GET", 880 | "header": [], 881 | "url": { 882 | "raw": "{{filemanager}}/status/7b0654ee-6ecc-44e2-9317-89c93fd10621", 883 | "host": [ 884 | "{{filemanager}}" 885 | ], 886 | "path": [ 887 | "status", 888 | "7b0654ee-6ecc-44e2-9317-89c93fd10621" 889 | ] 890 | } 891 | }, 892 | "status": "OK", 893 | "code": 200, 894 | "_postman_previewlanguage": "json", 895 | "header": [ 896 | { 897 | "key": "date", 898 | "value": "Mon, 12 Aug 2024 07:06:11 GMT" 899 | }, 900 | { 901 | "key": "server", 902 | "value": "uvicorn" 903 | }, 904 | { 905 | "key": "content-length", 906 | "value": "57" 907 | }, 908 | { 909 | "key": "content-type", 910 | "value": "application/json" 911 | } 912 | ], 913 | "cookie": [], 914 | "body": "{\n \"data\": {\n \"status\": \"FAILURE\"\n },\n \"message\": \"\",\n \"success\": true\n}" 915 | }, 916 | { 917 | "name": "200 - success with credential", 918 | "originalRequest": { 919 | "method": "GET", 920 | "header": [], 921 | "url": { 922 | "raw": "{{filemanager}}/status/13c016c4-d049-493a-9c82-4826430f53c9?user_id=84793&page_id=100", 923 | "host": [ 924 | "{{filemanager}}" 925 | ], 926 | "path": [ 927 | "status", 928 | "13c016c4-d049-493a-9c82-4826430f53c9" 929 | ], 930 | "query": [ 931 | { 932 | "key": "user_id", 933 | "value": "84793" 934 | }, 935 | { 936 | "key": "page_id", 937 | "value": "100" 938 | } 939 | ] 940 | } 941 | }, 942 | "status": "OK", 943 | "code": 200, 944 | "_postman_previewlanguage": "json", 945 | "header": [ 946 | { 947 | "key": "date", 948 | "value": "Mon, 12 Aug 2024 07:16:54 GMT" 949 | }, 950 | { 951 | "key": "server", 952 | "value": "uvicorn" 953 | }, 954 | { 955 | "key": "content-length", 956 | "value": "57" 957 | }, 958 | { 959 | "key": "content-type", 960 | "value": "application/json" 961 | } 962 | ], 963 | "cookie": [], 964 | "body": "{\n \"data\": {\n \"status\": \"SUCCESS\"\n },\n \"message\": \"\",\n \"success\": true\n}" 965 | }, 966 | { 967 | "name": "403 - invalid credential", 968 | "originalRequest": { 969 | "method": "GET", 970 | "header": [], 971 | "url": { 972 | "raw": "{{filemanager}}/status/13c016c4-d049-493a-9c82-4826430f53c9?user_id=84793&page_id=101", 973 | "host": [ 974 | "{{filemanager}}" 975 | ], 976 | "path": [ 977 | "status", 978 | "13c016c4-d049-493a-9c82-4826430f53c9" 979 | ], 980 | "query": [ 981 | { 982 | "key": "user_id", 983 | "value": "84793" 984 | }, 985 | { 986 | "key": "page_id", 987 | "value": "101" 988 | } 989 | ] 990 | } 991 | }, 992 | "status": "Forbidden", 993 | "code": 403, 994 | "_postman_previewlanguage": "json", 995 | "header": [ 996 | { 997 | "key": "date", 998 | "value": "Mon, 12 Aug 2024 07:17:27 GMT" 999 | }, 1000 | { 1001 | "key": "server", 1002 | "value": "uvicorn" 1003 | }, 1004 | { 1005 | "key": "content-length", 1006 | "value": "58" 1007 | }, 1008 | { 1009 | "key": "content-type", 1010 | "value": "application/json" 1011 | } 1012 | ], 1013 | "cookie": [], 1014 | "body": "{\n \"errors\": [],\n \"message\": \"Permision denied\",\n \"success\": false\n}" 1015 | } 1016 | ] 1017 | }, 1018 | { 1019 | "name": "get", 1020 | "request": { 1021 | "method": "GET", 1022 | "header": [], 1023 | "url": { 1024 | "raw": "{{filemanager}}/get/13c016c4-d049-493a-9c82-4826430f53c9?user_id=84793&page_id=100", 1025 | "host": [ 1026 | "{{filemanager}}" 1027 | ], 1028 | "path": [ 1029 | "get", 1030 | "13c016c4-d049-493a-9c82-4826430f53c9" 1031 | ], 1032 | "query": [ 1033 | { 1034 | "key": "user_id", 1035 | "value": "84793" 1036 | }, 1037 | { 1038 | "key": "page_id", 1039 | "value": "100" 1040 | } 1041 | ] 1042 | } 1043 | }, 1044 | "response": [ 1045 | { 1046 | "name": "404", 1047 | "originalRequest": { 1048 | "method": "GET", 1049 | "header": [], 1050 | "url": { 1051 | "raw": "{{filemanager}}/get/ef533045-5d37-4a8e-a2c6-73070e6ce151", 1052 | "host": [ 1053 | "{{filemanager}}" 1054 | ], 1055 | "path": [ 1056 | "get", 1057 | "ef533045-5d37-4a8e-a2c6-73070e6ce151" 1058 | ] 1059 | } 1060 | }, 1061 | "status": "Not Found", 1062 | "code": 404, 1063 | "_postman_previewlanguage": "json", 1064 | "header": [ 1065 | { 1066 | "key": "date", 1067 | "value": "Mon, 12 Aug 2024 07:01:48 GMT" 1068 | }, 1069 | { 1070 | "key": "server", 1071 | "value": "uvicorn" 1072 | }, 1073 | { 1074 | "key": "content-length", 1075 | "value": "52" 1076 | }, 1077 | { 1078 | "key": "content-type", 1079 | "value": "application/json" 1080 | } 1081 | ], 1082 | "cookie": [], 1083 | "body": "{\n \"errors\": [],\n \"message\": \"Not found!\",\n \"success\": false\n}" 1084 | }, 1085 | { 1086 | "name": "200", 1087 | "originalRequest": { 1088 | "method": "GET", 1089 | "header": [], 1090 | "url": { 1091 | "raw": "{{filemanager}}/get/ef533045-5d37-4a8e-a2c6-73070e6ce152", 1092 | "host": [ 1093 | "{{filemanager}}" 1094 | ], 1095 | "path": [ 1096 | "get", 1097 | "ef533045-5d37-4a8e-a2c6-73070e6ce152" 1098 | ] 1099 | } 1100 | }, 1101 | "status": "OK", 1102 | "code": 200, 1103 | "_postman_previewlanguage": "json", 1104 | "header": [ 1105 | { 1106 | "key": "date", 1107 | "value": "Mon, 12 Aug 2024 07:02:05 GMT" 1108 | }, 1109 | { 1110 | "key": "server", 1111 | "value": "uvicorn" 1112 | }, 1113 | { 1114 | "key": "content-length", 1115 | "value": "286" 1116 | }, 1117 | { 1118 | "key": "content-type", 1119 | "value": "application/json" 1120 | } 1121 | ], 1122 | "cookie": [], 1123 | "body": "{\n \"data\": {\n \"id\": \"ef533045-5d37-4a8e-a2c6-73070e6ce152\",\n \"path\": \"public/f28a2047-2409-4a88-af3b-227038f6c81c.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": null,\n \"download_url\": \"http://localhost:9001/public/f28a2047-2409-4a88-af3b-227038f6c81c.jpg\"\n },\n \"message\": \"\",\n \"success\": true\n}" 1124 | }, 1125 | { 1126 | "name": "403 - invalid credential", 1127 | "originalRequest": { 1128 | "method": "GET", 1129 | "header": [], 1130 | "url": { 1131 | "raw": "{{filemanager}}/get/13c016c4-d049-493a-9c82-4826430f53c9?user_id=84793&page_id=101", 1132 | "host": [ 1133 | "{{filemanager}}" 1134 | ], 1135 | "path": [ 1136 | "get", 1137 | "13c016c4-d049-493a-9c82-4826430f53c9" 1138 | ], 1139 | "query": [ 1140 | { 1141 | "key": "user_id", 1142 | "value": "84793" 1143 | }, 1144 | { 1145 | "key": "page_id", 1146 | "value": "101" 1147 | } 1148 | ] 1149 | } 1150 | }, 1151 | "status": "Forbidden", 1152 | "code": 403, 1153 | "_postman_previewlanguage": "json", 1154 | "header": [ 1155 | { 1156 | "key": "date", 1157 | "value": "Mon, 12 Aug 2024 07:17:58 GMT" 1158 | }, 1159 | { 1160 | "key": "server", 1161 | "value": "uvicorn" 1162 | }, 1163 | { 1164 | "key": "content-length", 1165 | "value": "58" 1166 | }, 1167 | { 1168 | "key": "content-type", 1169 | "value": "application/json" 1170 | } 1171 | ], 1172 | "cookie": [], 1173 | "body": "{\n \"errors\": [],\n \"message\": \"Permision denied\",\n \"success\": false\n}" 1174 | }, 1175 | { 1176 | "name": "200 - with credential", 1177 | "originalRequest": { 1178 | "method": "GET", 1179 | "header": [], 1180 | "url": { 1181 | "raw": "{{filemanager}}/get/13c016c4-d049-493a-9c82-4826430f53c9?user_id=84793&page_id=100", 1182 | "host": [ 1183 | "{{filemanager}}" 1184 | ], 1185 | "path": [ 1186 | "get", 1187 | "13c016c4-d049-493a-9c82-4826430f53c9" 1188 | ], 1189 | "query": [ 1190 | { 1191 | "key": "user_id", 1192 | "value": "84793" 1193 | }, 1194 | { 1195 | "key": "page_id", 1196 | "value": "100" 1197 | } 1198 | ] 1199 | } 1200 | }, 1201 | "status": "OK", 1202 | "code": 200, 1203 | "_postman_previewlanguage": "json", 1204 | "header": [ 1205 | { 1206 | "key": "date", 1207 | "value": "Mon, 12 Aug 2024 07:18:11 GMT" 1208 | }, 1209 | { 1210 | "key": "server", 1211 | "value": "uvicorn" 1212 | }, 1213 | { 1214 | "key": "content-length", 1215 | "value": "610" 1216 | }, 1217 | { 1218 | "key": "content-type", 1219 | "value": "application/json" 1220 | } 1221 | ], 1222 | "cookie": [], 1223 | "body": "{\n \"data\": {\n \"id\": \"13c016c4-d049-493a-9c82-4826430f53c9\",\n \"path\": \"private/d141cd65-db36-4b18-a4c1-e3e30281a51a.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": {\n \"page_id\": \"100\",\n \"user_id\": \"84793\"\n },\n \"download_url\": \"http://minio:9000/private/d141cd65-db36-4b18-a4c1-e3e30281a51a.jpg?page_id=100&user_id=84793&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=wTl8NuReL4eeE9vSDLmw%2F20240812%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240812T071811Z&X-Amz-Expires=604800&X-Amz-SignedHeaders=host&X-Amz-Signature=281783d7686abb560366dea59ac9f951f2afd230c8dfe7007e7ba38e8b2195cb\"\n },\n \"message\": \"\",\n \"success\": true\n}" 1224 | } 1225 | ] 1226 | }, 1227 | { 1228 | "name": "retry upload", 1229 | "request": { 1230 | "method": "POST", 1231 | "header": [], 1232 | "body": { 1233 | "mode": "formdata", 1234 | "formdata": [ 1235 | { 1236 | "key": "file_id", 1237 | "value": "0156fead-9bc3-4f0b-94de-1ab76f588dcc", 1238 | "type": "text" 1239 | }, 1240 | { 1241 | "key": "credential", 1242 | "value": "{\"user_id\":84793, \"page_id\":100}", 1243 | "type": "text" 1244 | } 1245 | ] 1246 | }, 1247 | "url": { 1248 | "raw": "{{filemanager}}/upload/retry", 1249 | "host": [ 1250 | "{{filemanager}}" 1251 | ], 1252 | "path": [ 1253 | "upload", 1254 | "retry" 1255 | ] 1256 | } 1257 | }, 1258 | "response": [ 1259 | { 1260 | "name": "400 - uploaded previously", 1261 | "originalRequest": { 1262 | "method": "POST", 1263 | "header": [], 1264 | "body": { 1265 | "mode": "formdata", 1266 | "formdata": [ 1267 | { 1268 | "key": "file_id", 1269 | "value": "ef533045-5d37-4a8e-a2c6-73070e6ce152", 1270 | "type": "text" 1271 | } 1272 | ] 1273 | }, 1274 | "url": { 1275 | "raw": "{{filemanager}}/upload/retry", 1276 | "host": [ 1277 | "{{filemanager}}" 1278 | ], 1279 | "path": [ 1280 | "upload", 1281 | "retry" 1282 | ] 1283 | } 1284 | }, 1285 | "status": "Bad Request", 1286 | "code": 400, 1287 | "_postman_previewlanguage": "json", 1288 | "header": [ 1289 | { 1290 | "key": "date", 1291 | "value": "Mon, 12 Aug 2024 07:02:23 GMT" 1292 | }, 1293 | { 1294 | "key": "server", 1295 | "value": "uvicorn" 1296 | }, 1297 | { 1298 | "key": "content-length", 1299 | "value": "67" 1300 | }, 1301 | { 1302 | "key": "content-type", 1303 | "value": "application/json" 1304 | } 1305 | ], 1306 | "cookie": [], 1307 | "body": "{\n \"errors\": [],\n \"message\": \"File uploaded previously!\",\n \"success\": false\n}" 1308 | }, 1309 | { 1310 | "name": "200", 1311 | "originalRequest": { 1312 | "method": "POST", 1313 | "header": [], 1314 | "body": { 1315 | "mode": "formdata", 1316 | "formdata": [ 1317 | { 1318 | "key": "file_id", 1319 | "value": "7b0654ee-6ecc-44e2-9317-89c93fd10621", 1320 | "type": "text" 1321 | } 1322 | ] 1323 | }, 1324 | "url": { 1325 | "raw": "{{filemanager}}/upload/retry", 1326 | "host": [ 1327 | "{{filemanager}}" 1328 | ], 1329 | "path": [ 1330 | "upload", 1331 | "retry" 1332 | ] 1333 | } 1334 | }, 1335 | "status": "OK", 1336 | "code": 200, 1337 | "_postman_previewlanguage": "json", 1338 | "header": [ 1339 | { 1340 | "key": "date", 1341 | "value": "Mon, 12 Aug 2024 07:06:51 GMT" 1342 | }, 1343 | { 1344 | "key": "server", 1345 | "value": "uvicorn" 1346 | }, 1347 | { 1348 | "key": "content-length", 1349 | "value": "286" 1350 | }, 1351 | { 1352 | "key": "content-type", 1353 | "value": "application/json" 1354 | } 1355 | ], 1356 | "cookie": [], 1357 | "body": "{\n \"data\": {\n \"id\": \"7b0654ee-6ecc-44e2-9317-89c93fd10621\",\n \"path\": \"public/7e034a5f-3991-4b9b-a2e7-37feec9f8f42.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": null,\n \"download_url\": \"http://localhost:9001/public/7e034a5f-3991-4b9b-a2e7-37feec9f8f42.jpg\"\n },\n \"message\": \"\",\n \"success\": true\n}" 1358 | }, 1359 | { 1360 | "name": "403 - invalid credential", 1361 | "originalRequest": { 1362 | "method": "POST", 1363 | "header": [], 1364 | "body": { 1365 | "mode": "formdata", 1366 | "formdata": [ 1367 | { 1368 | "key": "file_id", 1369 | "value": "0156fead-9bc3-4f0b-94de-1ab76f588dcc", 1370 | "type": "text" 1371 | } 1372 | ] 1373 | }, 1374 | "url": { 1375 | "raw": "{{filemanager}}/upload/retry", 1376 | "host": [ 1377 | "{{filemanager}}" 1378 | ], 1379 | "path": [ 1380 | "upload", 1381 | "retry" 1382 | ] 1383 | } 1384 | }, 1385 | "status": "Forbidden", 1386 | "code": 403, 1387 | "_postman_previewlanguage": "json", 1388 | "header": [ 1389 | { 1390 | "key": "date", 1391 | "value": "Mon, 12 Aug 2024 07:22:12 GMT" 1392 | }, 1393 | { 1394 | "key": "server", 1395 | "value": "uvicorn" 1396 | }, 1397 | { 1398 | "key": "content-length", 1399 | "value": "58" 1400 | }, 1401 | { 1402 | "key": "content-type", 1403 | "value": "application/json" 1404 | } 1405 | ], 1406 | "cookie": [], 1407 | "body": "{\n \"errors\": [],\n \"message\": \"Permision denied\",\n \"success\": false\n}" 1408 | }, 1409 | { 1410 | "name": "200 - with credential", 1411 | "originalRequest": { 1412 | "method": "POST", 1413 | "header": [], 1414 | "body": { 1415 | "mode": "formdata", 1416 | "formdata": [ 1417 | { 1418 | "key": "file_id", 1419 | "value": "0156fead-9bc3-4f0b-94de-1ab76f588dcc", 1420 | "type": "text" 1421 | }, 1422 | { 1423 | "key": "credential", 1424 | "value": "{\"user_id\":84793, \"page_id\":100}", 1425 | "type": "text" 1426 | } 1427 | ] 1428 | }, 1429 | "url": { 1430 | "raw": "{{filemanager}}/upload/retry", 1431 | "host": [ 1432 | "{{filemanager}}" 1433 | ], 1434 | "path": [ 1435 | "upload", 1436 | "retry" 1437 | ] 1438 | } 1439 | }, 1440 | "status": "OK", 1441 | "code": 200, 1442 | "_postman_previewlanguage": "json", 1443 | "header": [ 1444 | { 1445 | "key": "date", 1446 | "value": "Mon, 12 Aug 2024 07:22:35 GMT" 1447 | }, 1448 | { 1449 | "key": "server", 1450 | "value": "uvicorn" 1451 | }, 1452 | { 1453 | "key": "content-length", 1454 | "value": "610" 1455 | }, 1456 | { 1457 | "key": "content-type", 1458 | "value": "application/json" 1459 | } 1460 | ], 1461 | "cookie": [], 1462 | "body": "{\n \"data\": {\n \"id\": \"0156fead-9bc3-4f0b-94de-1ab76f588dcc\",\n \"path\": \"private/9ff8949a-7525-4f32-a5ef-34320cdd60d0.jpg\",\n \"content_type\": \"image/jpeg\",\n \"detail\": null,\n \"credential\": {\n \"page_id\": \"100\",\n \"user_id\": \"84793\"\n },\n \"download_url\": \"http://minio:9000/private/9ff8949a-7525-4f32-a5ef-34320cdd60d0.jpg?page_id=100&user_id=84793&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=wTl8NuReL4eeE9vSDLmw%2F20240812%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240812T072236Z&X-Amz-Expires=604800&X-Amz-SignedHeaders=host&X-Amz-Signature=8ba09dd10c7c81c4d130fe63ca6cd1b458062a3850034e81e4eb65604691077c\"\n },\n \"message\": \"\",\n \"success\": true\n}" 1463 | } 1464 | ] 1465 | } 1466 | ], 1467 | "event": [ 1468 | { 1469 | "listen": "prerequest", 1470 | "script": { 1471 | "type": "text/javascript", 1472 | "packages": {}, 1473 | "exec": [ 1474 | "" 1475 | ] 1476 | } 1477 | }, 1478 | { 1479 | "listen": "test", 1480 | "script": { 1481 | "type": "text/javascript", 1482 | "packages": {}, 1483 | "exec": [ 1484 | "pm.test(\"Response time is less than 200ms\", function () {", 1485 | " pm.expect(pm.response.responseTime).to.be.below(200);", 1486 | "});" 1487 | ] 1488 | } 1489 | } 1490 | ] 1491 | } 1492 | ], 1493 | "event": [ 1494 | { 1495 | "listen": "prerequest", 1496 | "script": { 1497 | "type": "text/javascript", 1498 | "packages": {}, 1499 | "exec": [ 1500 | "" 1501 | ] 1502 | } 1503 | }, 1504 | { 1505 | "listen": "test", 1506 | "script": { 1507 | "type": "text/javascript", 1508 | "packages": {}, 1509 | "exec": [ 1510 | "" 1511 | ] 1512 | } 1513 | } 1514 | ], 1515 | "variable": [ 1516 | { 1517 | "key": "filemanager", 1518 | "value": "127.0.0.1:8000/api/v1/file", 1519 | "type": "string" 1520 | }, 1521 | { 1522 | "key": "gateway", 1523 | "value": "127.0.0.1:8080/api/v1/file", 1524 | "type": "string" 1525 | } 1526 | ] 1527 | } --------------------------------------------------------------------------------