├── conftest.py ├── static └── .gitkeep ├── tests ├── __init__.py └── datasource │ ├── __init__.py │ ├── test_ssh.py │ ├── test_postgres.py │ └── test_mysql.py ├── databack ├── schema │ ├── __init__.py │ └── request.py ├── __init__.py ├── utils.py ├── constants.py ├── locales │ ├── __init__.py │ ├── zh-CN.yml │ └── en-US.yml ├── enums.py ├── log.py ├── static.py ├── storage │ ├── __init__.py │ ├── local.py │ ├── ssh.py │ └── s3.py ├── validators.py ├── datasource │ ├── local.py │ ├── __init__.py │ ├── ssh.py │ ├── mongo.py │ ├── redis.py │ ├── clickhouse.py │ ├── mysql.py │ └── postgres.py ├── api │ ├── init.py │ ├── action_log.py │ ├── stat.py │ ├── __init__.py │ ├── restore.py │ ├── task_log.py │ ├── storage.py │ ├── datasource.py │ ├── auth.py │ ├── task.py │ └── admin.py ├── exceptions.py ├── discover.py ├── settings.py ├── scheduler.py ├── auth.py ├── app.py ├── depends.py ├── models.py └── tasks.py ├── images ├── home.png └── datasource.png ├── .env.example ├── migrations └── models │ ├── 3_20230612132224_update.py │ ├── 1_20230422144442_update.py │ ├── 2_20230524171549_update.py │ └── 0_20230419142428_init.py ├── Makefile ├── .github └── workflows │ ├── ci.yml │ └── deploy.yml ├── pyproject.toml ├── Dockerfile ├── README.md ├── .gitignore ├── .dockerignore └── LICENSE /conftest.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /static/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /databack/schema/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/datasource/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/datasource/test_ssh.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /images/home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/long2ice/databack/HEAD/images/home.png -------------------------------------------------------------------------------- /databack/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | router = APIRouter() 4 | -------------------------------------------------------------------------------- /images/datasource.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/long2ice/databack/HEAD/images/datasource.png -------------------------------------------------------------------------------- /databack/utils.py: -------------------------------------------------------------------------------- 1 | import aiofiles.os 2 | 3 | 4 | async def get_file_size(file): 5 | stat = await aiofiles.os.stat(file) 6 | return stat.st_size 7 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | DB_URL=mysql://root:123456@127.0.0.1:3306/databack 2 | DEBUG=True 3 | REDIS_URL=redis://127.0.0.1:6379/0 4 | SENTRY_DSN= 5 | ENV=development 6 | WORKER=True 7 | SECRET_KEY=xxx 8 | -------------------------------------------------------------------------------- /databack/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 4 | SCHEDULER_SLEEP_SECONDS = 60 5 | MASK_KEYS = [ 6 | "password", 7 | "old_password", 8 | "new_password", 9 | ] 10 | -------------------------------------------------------------------------------- /migrations/models/3_20230612132224_update.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | ALTER TABLE `tasklog` MODIFY COLUMN `size` BIGINT;""" 7 | 8 | 9 | async def downgrade(db: BaseDBAsyncClient) -> str: 10 | return """ 11 | ALTER TABLE `tasklog` MODIFY COLUMN `size` INT;""" 12 | -------------------------------------------------------------------------------- /migrations/models/1_20230422144442_update.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | ALTER TABLE `storage` MODIFY COLUMN `options` JSON;""" 7 | 8 | 9 | async def downgrade(db: BaseDBAsyncClient) -> str: 10 | return """ 11 | ALTER TABLE `storage` MODIFY COLUMN `options` JSON NOT NULL;""" 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | checkfiles = databack/ tests/ conftest.py 2 | py_warn = PYTHONDEVMODE=1 3 | 4 | style: 5 | @isort -src $(checkfiles) 6 | @black $(checkfiles) 7 | 8 | check: 9 | @black --check $(checkfiles) 10 | @ruff $(checkfiles) --fix 11 | @mypy $(checkfiles) 12 | 13 | test: 14 | $(py_warn) pytest --suppress-no-test-exit-code 15 | 16 | ci: check test 17 | 18 | build: 19 | @poetry build 20 | -------------------------------------------------------------------------------- /databack/locales/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import i18n 4 | from i18n import load_path 5 | 6 | from databack.constants import BASE_DIR 7 | 8 | 9 | def init(): 10 | load_path.append(os.path.join(BASE_DIR, "databack", "locales")) 11 | i18n.set("enable_memoization", True) 12 | i18n.set("fallback", "en-US") 13 | i18n.set("skip_locale_root_data", True) 14 | i18n.set("filename_format", "{locale}.{format}") 15 | -------------------------------------------------------------------------------- /databack/locales/zh-CN.yml: -------------------------------------------------------------------------------- 1 | data_source_exists: "数据源 '%{name}' 已经存在!" 2 | cron_invalid: "无效的 cron 表达式 '%{cron}'" 3 | task_disabled: "任务 '%{name}' 已经禁用" 4 | email_invalid: "无效的邮箱地址 '%{email}'" 5 | login: 6 | user_not_found: "用户不存在" 7 | password_error: "密码错误" 8 | admin_exists: "管理员用户已经存在" 9 | admin_inited: "管理员用户已经初始化" 10 | auth: 11 | not_active: "用户禁止登录" 12 | not_superuser: "只有管理员用户才能操作" 13 | not_login: "用户未登录" 14 | license_expired: "授权已经过期" 15 | -------------------------------------------------------------------------------- /databack/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class StorageType(str, Enum): 5 | local = "local" 6 | ssh = "ssh" 7 | s3 = "s3" 8 | 9 | 10 | class DataSourceType(str, Enum): 11 | clickhouse = "clickhouse" 12 | mysql = "mysql" 13 | postgres = "postgres" 14 | local = "local" 15 | ssh = "ssh" 16 | mongo = "mongo" 17 | redis = "redis" 18 | 19 | 20 | class TaskStatus(str, Enum): 21 | success = "success" 22 | failed = "failed" 23 | running = "running" 24 | -------------------------------------------------------------------------------- /databack/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from loguru import logger 4 | 5 | 6 | class InterceptHandler(logging.Handler): 7 | def emit(self, record): 8 | logger_opt = logger.opt(depth=6, exception=record.exc_info) 9 | logger_opt.log(record.levelname, record.getMessage()) 10 | 11 | 12 | def init_logging(): 13 | uvicorn = logging.getLogger("uvicorn.access") 14 | for h in uvicorn.handlers: 15 | uvicorn.removeHandler(h) 16 | handler = InterceptHandler() 17 | uvicorn.addHandler(handler) 18 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: [push, pull_request] 3 | jobs: 4 | ci: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v3 8 | - uses: actions/setup-python@v4 9 | with: 10 | python-version: "3.11" 11 | - name: Install and configure Poetry 12 | run: | 13 | pip install poetry --upgrade --pre 14 | poetry config virtualenvs.create false 15 | - name: Install dependencies 16 | run: poetry install 17 | - name: CI 18 | run: cp .env.example .env && make ci 19 | -------------------------------------------------------------------------------- /databack/locales/en-US.yml: -------------------------------------------------------------------------------- 1 | data_source_exists: "Data source '%{name}' already exists!" 2 | cron_invalid: "Invalid cron expression '%{cron}'" 3 | task_disabled: "Task '%{name}' is disabled" 4 | email_invalid: "Invalid email address '%{email}'" 5 | login: 6 | user_not_found: "User not found" 7 | password_error: "Password error" 8 | admin_exists: "Admin user already exists" 9 | admin_inited: "Admin user already inited" 10 | auth: 11 | not_active: "User not active" 12 | not_superuser: "Only superuser can do this" 13 | not_login: "User not login" 14 | license_expired: "License expired" 15 | -------------------------------------------------------------------------------- /databack/static.py: -------------------------------------------------------------------------------- 1 | from starlette.exceptions import HTTPException 2 | from starlette.staticfiles import StaticFiles 3 | from starlette.status import HTTP_404_NOT_FOUND 4 | 5 | 6 | class SPAStaticFiles(StaticFiles): 7 | async def get_response(self, path: str, scope): 8 | try: 9 | response = await super().get_response(path, scope) 10 | except HTTPException as e: 11 | if e.status_code == HTTP_404_NOT_FOUND: 12 | response = await super().get_response("index.html", scope) 13 | else: 14 | raise 15 | return response 16 | -------------------------------------------------------------------------------- /tests/datasource/test_postgres.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from databack.datasource.postgres import Postgres 4 | 5 | 6 | @pytest.mark.skip 7 | async def test_postgres(): 8 | pg_backup = Postgres( 9 | __host="localhost", 10 | __port=5432, 11 | __user="postgres", 12 | password="123456", 13 | backup_program="pg_dump", 14 | compress=True, 15 | ) 16 | backup = await pg_backup.get_backup() 17 | pg_restore = Postgres( 18 | __host="localhost", 19 | __port=5432, 20 | __user="postgres", 21 | password="123456", 22 | compress=True, 23 | backup_program="pg_restore", 24 | ) 25 | await pg_restore.restore(backup) 26 | -------------------------------------------------------------------------------- /tests/datasource/test_mysql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from databack.datasource.mysql import MySQL 4 | 5 | 6 | @pytest.mark.skip 7 | async def test_mysql(): 8 | kwargs = { 9 | "--host": "localhost", 10 | "--port": 3306, 11 | "--user": "root", 12 | "--password": "123456", 13 | "compress": True, 14 | } 15 | backup_kwargs = { 16 | "--include-databases": "test", 17 | "--set-gtid-purged": "OFF", 18 | "--add-drop-database": True, 19 | } 20 | mysql_backup = MySQL( 21 | **kwargs, 22 | **backup_kwargs, 23 | ) 24 | backup = await mysql_backup.get_backup() 25 | mysql_restore = MySQL(**kwargs) 26 | await mysql_restore.restore(backup) 27 | -------------------------------------------------------------------------------- /databack/storage/__init__.py: -------------------------------------------------------------------------------- 1 | import abc 2 | 3 | from pydantic import BaseModel 4 | 5 | from databack.enums import StorageType 6 | 7 | 8 | class Base: 9 | type: StorageType 10 | path: str 11 | options: BaseModel 12 | 13 | def __init__(self, options: BaseModel, path: str): 14 | self.options = options 15 | self.path = path 16 | 17 | @abc.abstractmethod 18 | async def check(self): 19 | raise NotImplementedError 20 | 21 | @abc.abstractmethod 22 | async def upload(self, file: str): 23 | raise NotImplementedError 24 | 25 | @abc.abstractmethod 26 | async def download(self, file: str): 27 | raise NotImplementedError 28 | 29 | @abc.abstractmethod 30 | async def delete(self, file: str): 31 | raise NotImplementedError 32 | -------------------------------------------------------------------------------- /databack/schema/request.py: -------------------------------------------------------------------------------- 1 | import json 2 | from enum import Enum 3 | 4 | from pydantic import BaseModel 5 | 6 | 7 | class Order(str, Enum): 8 | asc = "asc" 9 | desc = "desc" 10 | 11 | 12 | class Sort(BaseModel): 13 | field: str 14 | order: Order 15 | 16 | 17 | class Query(BaseModel): 18 | limit: int = 10 19 | offset: int = 0 20 | sorts: str | None = None 21 | 22 | @property 23 | def orders(self): 24 | orders = [] 25 | if self.sorts: 26 | for sort in json.loads(self.sorts): 27 | order = sort.get("order") 28 | field = sort.get("field") 29 | if order == Order.asc: 30 | orders.append(field) 31 | else: 32 | orders.append(f"-{field}") 33 | return orders 34 | -------------------------------------------------------------------------------- /databack/storage/local.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import aiofiles.os 4 | import aiofiles.ospath 5 | import aioshutil 6 | 7 | from databack.enums import StorageType 8 | from databack.storage import Base 9 | 10 | 11 | class Local(Base): 12 | type = StorageType.local 13 | 14 | async def check(self): 15 | return await aiofiles.ospath.exists(self.path) 16 | 17 | async def upload(self, file: str): 18 | await aioshutil.move(file, self.path) 19 | return os.path.join(self.path, os.path.basename(file)) 20 | 21 | async def download(self, file: str): 22 | await aioshutil.copy(file, self.path) 23 | 24 | async def delete(self, file: str): 25 | try: 26 | await aiofiles.os.remove(os.path.join(self.path, file)) 27 | except FileNotFoundError: 28 | pass 29 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: deploy 2 | on: 3 | push: 4 | branches: 5 | - "main" 6 | pull_request: 7 | branches: 8 | - "main" 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Build and Publish 15 | uses: VaultVulp/gp-docker-action@1.6.0 16 | with: 17 | github-token: ${{ secrets.GITHUB_TOKEN }} 18 | image-name: databack 19 | custom-args: --build-arg=GIT_TOKEN=${{ secrets.GIT_TOKEN }} 20 | deploy: 21 | runs-on: ubuntu-latest 22 | needs: 23 | - build 24 | steps: 25 | - name: Deploy 26 | uses: steebchen/kubectl@v2.0.0 27 | with: 28 | config: ${{ secrets.KUBE_CONFIG }} 29 | command: rollout restart deployment databack databack-worker databack-demo 30 | -------------------------------------------------------------------------------- /databack/validators.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from crontab import CronTab 3 | from email_validator import EmailNotValidError 4 | from fastapi import HTTPException 5 | from pydantic import validate_email 6 | from starlette.status import HTTP_400_BAD_REQUEST 7 | from tortoise.validators import Validator 8 | 9 | 10 | class CronValidator(Validator): 11 | def __call__(self, value: str): 12 | if not value: 13 | return 14 | try: 15 | CronTab(value) 16 | except ValueError: 17 | raise HTTPException( 18 | status_code=HTTP_400_BAD_REQUEST, 19 | detail=i18n.t("cron_invalid", cron=value), 20 | ) 21 | 22 | 23 | class EmailValidator(Validator): 24 | def __call__(self, value: str): 25 | if not value: 26 | return 27 | try: 28 | validate_email(value) 29 | except EmailNotValidError: 30 | raise HTTPException( 31 | status_code=HTTP_400_BAD_REQUEST, 32 | detail=i18n.t("email_invalid", email=value), 33 | ) 34 | -------------------------------------------------------------------------------- /databack/datasource/local.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | import aioshutil 5 | 6 | from databack.datasource import Base 7 | from databack.enums import DataSourceType 8 | 9 | 10 | class Local(Base): 11 | type = DataSourceType.local 12 | 13 | def __init__(self, **kwargs): 14 | super().__init__(**kwargs) 15 | self.path = self.kwargs.get("path") 16 | 17 | async def check(self): 18 | return os.path.exists(self.path) 19 | 20 | async def backup(self): 21 | temp_dir = tempfile.mkdtemp() 22 | if os.path.isdir(self.path): 23 | destination = os.path.join(temp_dir, self.filename, os.path.basename(self.path)) 24 | await aioshutil.copytree(self.path, destination) 25 | else: 26 | destination = os.path.join(temp_dir, self.filename) 27 | os.makedirs(destination, exist_ok=True) 28 | await aioshutil.copy(self.path, destination) 29 | return destination 30 | 31 | async def restore(self, file: str): 32 | file = await self.get_restore(file) 33 | await aioshutil.move(file, self.path) 34 | -------------------------------------------------------------------------------- /databack/api/init.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from fastapi import APIRouter, HTTPException 3 | from pydantic import BaseModel, EmailStr 4 | from starlette.status import HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN 5 | from tortoise.exceptions import IntegrityError 6 | 7 | from databack.auth import get_password_hash 8 | from databack.models import Admin 9 | 10 | router = APIRouter() 11 | 12 | 13 | class InitBody(BaseModel): 14 | nickname: str 15 | email: EmailStr 16 | password: str 17 | 18 | 19 | @router.post("/admin") 20 | async def init_admin( 21 | body: InitBody, 22 | ): 23 | if await Admin.exists(): 24 | raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=i18n.t("admin_inited")) 25 | try: 26 | await Admin.create( 27 | nickname=body.nickname, 28 | email=body.email, 29 | password=get_password_hash(body.password), 30 | is_superuser=True, 31 | ) 32 | except IntegrityError: 33 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=i18n.t("admin_exists")) 34 | 35 | 36 | @router.get("") 37 | async def get_init(): 38 | return {"inited": await Admin.exists()} 39 | -------------------------------------------------------------------------------- /databack/exceptions.py: -------------------------------------------------------------------------------- 1 | from fastapi import HTTPException 2 | from fastapi.exceptions import RequestValidationError 3 | from starlette.requests import Request 4 | from starlette.responses import JSONResponse 5 | from starlette.status import ( 6 | HTTP_404_NOT_FOUND, 7 | HTTP_422_UNPROCESSABLE_ENTITY, 8 | HTTP_500_INTERNAL_SERVER_ERROR, 9 | ) 10 | from tortoise.exceptions import DoesNotExist 11 | 12 | 13 | async def custom_http_exception_handler(request: Request, exc: HTTPException): 14 | return JSONResponse( 15 | status_code=exc.status_code, 16 | content={"error": exc.detail}, 17 | ) 18 | 19 | 20 | async def not_exists_exception_handler(request: Request, exc: DoesNotExist): 21 | return JSONResponse( 22 | status_code=HTTP_404_NOT_FOUND, 23 | content={"error": str(exc)}, 24 | ) 25 | 26 | 27 | async def validation_exception_handler(request: Request, exc: RequestValidationError): 28 | return JSONResponse( 29 | status_code=HTTP_422_UNPROCESSABLE_ENTITY, 30 | content={"error": str(exc)}, 31 | ) 32 | 33 | 34 | async def exception_handler(request: Request, exc: Exception): 35 | return JSONResponse( 36 | status_code=HTTP_500_INTERNAL_SERVER_ERROR, 37 | content={"error": str(exc)}, 38 | ) 39 | -------------------------------------------------------------------------------- /databack/discover.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | from databack import datasource, storage 4 | from databack.datasource import clickhouse, local, mongo, mysql, postgres, redis, ssh 5 | from databack.enums import DataSourceType, StorageType 6 | from databack.storage import local as local_storage 7 | from databack.storage import s3 8 | from databack.storage import ssh as ssh_storage 9 | 10 | 11 | def get_data_source(type_: DataSourceType) -> Type[datasource.Base]: 12 | match type_: 13 | case DataSourceType.mysql: 14 | return mysql.MySQL 15 | case DataSourceType.postgres: 16 | return postgres.Postgres 17 | case DataSourceType.local: 18 | return local.Local 19 | case DataSourceType.ssh: 20 | return ssh.SSH 21 | case DataSourceType.mongo: 22 | return mongo.Mongo 23 | case DataSourceType.redis: 24 | return redis.Redis 25 | case DataSourceType.clickhouse: 26 | return clickhouse.ClickHouse 27 | 28 | 29 | def get_storage(type_: StorageType) -> Type[storage.Base]: 30 | match type_: 31 | case StorageType.local: 32 | return local_storage.Local 33 | case StorageType.ssh: 34 | return ssh_storage.SSH 35 | case StorageType.s3: 36 | return s3.S3 37 | -------------------------------------------------------------------------------- /databack/api/action_log.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends 2 | from pydantic import BaseModel 3 | from tortoise.contrib.pydantic import pydantic_model_creator 4 | 5 | from databack.models import ActionLog 6 | from databack.schema.request import Query 7 | 8 | router = APIRouter() 9 | 10 | 11 | class ActionLogResponse(pydantic_model_creator(ActionLog)): # type: ignore 12 | admin_id: int 13 | 14 | 15 | class ActionLogsResponse(BaseModel): 16 | total: int 17 | data: list[ActionLogResponse] 18 | 19 | 20 | @router.get("", response_model=ActionLogsResponse) 21 | async def get_actions_logs( 22 | admin_id: int | None = None, 23 | method: str | None = None, 24 | path: str | None = None, 25 | query: Query = Depends(Query), 26 | ): 27 | qs = ActionLog.all() 28 | if admin_id: 29 | qs = qs.filter(admin_id=admin_id) 30 | if method: 31 | qs = qs.filter(method=method) 32 | if path: 33 | qs = qs.filter(path__icontains=path) 34 | total = await qs.count() 35 | data = await qs.limit(query.limit).offset(query.offset).order_by(*query.orders) 36 | return {"total": total, "data": data} 37 | 38 | 39 | @router.delete("/{pks}") 40 | async def delete_action_logs(pks: str): 41 | id_list = [int(pk) for pk in pks.split(",")] 42 | await ActionLog.filter(id__in=id_list).delete() 43 | -------------------------------------------------------------------------------- /databack/api/stat.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | from tortoise.expressions import RawSQL 3 | from tortoise.functions import Count 4 | 5 | from databack.models import ( 6 | ActionLog, 7 | Admin, 8 | DataSource, 9 | RestoreLog, 10 | Storage, 11 | Task, 12 | TaskLog, 13 | ) 14 | 15 | router = APIRouter() 16 | 17 | 18 | @router.get("") 19 | async def get_stats(): 20 | datasource_count = await DataSource.all().count() 21 | storage_count = await Storage.all().count() 22 | task_count = await Task.all().count() 23 | task_log_count = await TaskLog.all().count() 24 | restore_log_count = await RestoreLog.all().count() 25 | admin_count = await Admin.filter(is_active=True).count() 26 | action_log_count = await ActionLog.all().count() 27 | task_logs = ( 28 | await TaskLog.annotate(count=Count("id"), date=RawSQL("date(created_at)")) 29 | .group_by("status", "date") 30 | .values("status", "count", "date") 31 | ) 32 | return { 33 | "datasource_count": datasource_count, 34 | "storage_count": storage_count, 35 | "task_count": task_count, 36 | "task_log_count": task_log_count, 37 | "restore_log_count": restore_log_count, 38 | "task_logs": task_logs, 39 | "admin_count": admin_count, 40 | "action_log_count": action_log_count, 41 | } 42 | -------------------------------------------------------------------------------- /databack/api/__init__.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends 2 | 3 | from databack.api import action_log as action_log_router 4 | from databack.api import ( 5 | admin, 6 | auth, 7 | datasource, 8 | init, 9 | restore, 10 | stat, 11 | storage, 12 | task, 13 | task_log, 14 | ) 15 | from databack.depends import action_log, auth_required, set_i18n 16 | 17 | router = APIRouter(dependencies=[Depends(set_i18n)]) 18 | auth_router = APIRouter(dependencies=[Depends(auth_required), Depends(action_log)]) 19 | 20 | auth_router.include_router(task.router, prefix="/task", tags=["Task"]) 21 | auth_router.include_router(storage.router, prefix="/storage", tags=["Storage"]) 22 | auth_router.include_router(datasource.router, prefix="/datasource", tags=["Datasource"]) 23 | auth_router.include_router(task_log.router, prefix="/task_log", tags=["TaskLog"]) 24 | auth_router.include_router(stat.router, prefix="/stat", tags=["Stat"]) 25 | auth_router.include_router(restore.router, prefix="/restore", tags=["Restore"]) 26 | auth_router.include_router( 27 | action_log_router.router, 28 | prefix="/action_log", 29 | tags=["ActionLog"], 30 | ) 31 | auth_router.include_router(admin.router, prefix="/admin", tags=["Admin"]) 32 | 33 | router.include_router(auth.router, prefix="/auth", tags=["Auth"]) 34 | router.include_router(init.router, prefix="/init", tags=["Init"]) 35 | router.include_router(auth_router) 36 | -------------------------------------------------------------------------------- /databack/settings.py: -------------------------------------------------------------------------------- 1 | import sentry_sdk 2 | from pydantic_settings import BaseSettings 3 | from sentry_sdk.integrations.redis import RedisIntegration 4 | 5 | 6 | class Settings(BaseSettings): 7 | DEBUG: bool = False 8 | SENTRY_DSN: str | None 9 | ENV: str = "production" 10 | DB_URL: str 11 | REDIS_URL: str 12 | WORKER: bool = True 13 | SECRET_KEY: str 14 | HOST: str = "0.0.0.0" 15 | PORT: int = 8000 16 | GOOGLE_CLIENT_ID: str | None = None 17 | GOOGLE_CLIENT_SECRET: str | None = None 18 | GITHUB_CLIENT_ID: str | None = None 19 | GITHUB_CLIENT_SECRET: str | None = None 20 | 21 | @property 22 | def enable_github_oauth(self): 23 | return self.GITHUB_CLIENT_ID and self.GITHUB_CLIENT_SECRET 24 | 25 | @property 26 | def enable_google_oauth(self): 27 | return self.GOOGLE_CLIENT_ID and self.GOOGLE_CLIENT_SECRET 28 | 29 | class Config: 30 | env_file = ".env" 31 | 32 | 33 | settings = Settings() 34 | TORTOISE_ORM = { 35 | "apps": { 36 | "models": { 37 | "models": ["databack.models", "aerich.models"], 38 | "default_connection": "default", 39 | }, 40 | "rearq": { 41 | "models": ["rearq.server.models"], 42 | "default_connection": "default", 43 | }, 44 | }, 45 | "connections": {"default": settings.DB_URL}, 46 | } 47 | if settings.SENTRY_DSN: 48 | sentry_sdk.init( 49 | dsn=settings.SENTRY_DSN, 50 | environment=settings.ENV, 51 | integrations=[RedisIntegration()], 52 | traces_sample_rate=1.0, 53 | ) 54 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "databack" 3 | version = "0.1.0" 4 | description = "Backup you data from MySQL/PostgreSQL etc. to any other storages" 5 | authors = ["long2ice "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.11" 10 | fastapi = { version = "*", extras = ["all"] } 11 | tortoise-orm = { git = "https://github.com/tortoise/tortoise-orm.git", branch = "develop" } 12 | sentry-sdk = { version = "*", extras = ["fastapi"] } 13 | loguru = "*" 14 | aerich = "*" 15 | asyncmy = "^0.2.8" 16 | asyncpg = "*" 17 | rearq = { git = "https://github.com/long2ice/rearq.git", branch = "dev" } 18 | aioshutil = "*" 19 | aioboto3 = "*" 20 | asyncssh = "*" 21 | python-i18n = "*" 22 | fastapi-jwt = "*" 23 | passlib = {version = "*", extras = ["bcrypt"]} 24 | typer = {version = "*", extras = ["all"]} 25 | authlib = "*" 26 | pydantic-settings = "*" 27 | 28 | [tool.poetry.group.dev.dependencies] 29 | black = "*" 30 | isort = "*" 31 | mypy = "*" 32 | pytest = "*" 33 | pytest-asyncio = "*" 34 | pytest-custom-exit-code = "*" 35 | ruff = "*" 36 | types-aiofiles = "*" 37 | 38 | [tool.aerich] 39 | tortoise_orm = "databack.settings.TORTOISE_ORM" 40 | location = "./migrations" 41 | src_folder = "./." 42 | [build-system] 43 | requires = ["poetry-core"] 44 | build-backend = "poetry.core.masonry.api" 45 | 46 | [tool.isort] 47 | profile = "black" 48 | 49 | [tool.black] 50 | line-length = 100 51 | target-version = ['py37', 'py38', 'py39', 'py310'] 52 | 53 | [tool.pytest.ini_options] 54 | asyncio_mode = 'auto' 55 | 56 | [tool.mypy] 57 | ignore_missing_imports = true 58 | pretty = true 59 | plugins = ["pydantic.mypy"] 60 | 61 | [tool.ruff] 62 | line-length = 100 63 | -------------------------------------------------------------------------------- /migrations/models/2_20230524171549_update.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | ALTER TABLE `datasource` MODIFY COLUMN `type` VARCHAR(10) NOT NULL COMMENT 'clickhouse: clickhouse\nmysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 7 | ALTER TABLE `datasource` MODIFY COLUMN `type` VARCHAR(10) NOT NULL COMMENT 'clickhouse: clickhouse\nmysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 8 | ALTER TABLE `restorelog` MODIFY COLUMN `restore_type` VARCHAR(10) NOT NULL COMMENT 'clickhouse: clickhouse\nmysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 9 | ALTER TABLE `restorelog` MODIFY COLUMN `restore_type` VARCHAR(10) NOT NULL COMMENT 'clickhouse: clickhouse\nmysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis';""" 10 | 11 | 12 | async def downgrade(db: BaseDBAsyncClient) -> str: 13 | return """ 14 | ALTER TABLE `datasource` MODIFY COLUMN `type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 15 | ALTER TABLE `datasource` MODIFY COLUMN `type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 16 | ALTER TABLE `restorelog` MODIFY COLUMN `restore_type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis'; 17 | ALTER TABLE `restorelog` MODIFY COLUMN `restore_type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis';""" 18 | -------------------------------------------------------------------------------- /databack/datasource/__init__.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import os.path 3 | import tempfile 4 | 5 | import aioshutil 6 | from tortoise import timezone 7 | 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class Base: 12 | type: DataSourceType 13 | 14 | def __init__(self, **kwargs): 15 | other_options = kwargs.pop("other_options", None) 16 | self.options = other_options.split() if other_options else [] 17 | self.kwargs = kwargs 18 | self.compress = self.kwargs.pop("compress", True) 19 | 20 | @property 21 | def filename(self): 22 | return f'{timezone.now().strftime("%Y-%m-%d_%H-%M-%S")}' 23 | 24 | async def check(self): 25 | return True 26 | 27 | @abc.abstractmethod 28 | async def backup(self): 29 | raise NotImplementedError 30 | 31 | @abc.abstractmethod 32 | async def restore(self, file: str): 33 | raise NotImplementedError 34 | 35 | async def get_restore(self, file: str): 36 | if self.compress: 37 | temp_dir = tempfile.mkdtemp() 38 | await aioshutil.unpack_archive(file, temp_dir) 39 | ret = os.path.join(temp_dir, os.path.basename(file).replace(".tar.gz", "")) 40 | if os.path.isdir(file): 41 | await aioshutil.rmtree(file) 42 | else: 43 | os.remove(file) 44 | return ret 45 | return file 46 | 47 | async def get_backup(self): 48 | backup = await self.backup() 49 | if self.compress: 50 | ret = await aioshutil.make_archive(backup, "gztar", root_dir=os.path.dirname(backup)) 51 | if os.path.isdir(backup): 52 | await aioshutil.rmtree(backup) 53 | else: 54 | os.remove(backup) 55 | return ret 56 | return backup 57 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node as frontend-builder 2 | ARG GIT_TOKEN 3 | RUN git clone https://$GIT_TOKEN@github.com/long2ice/databack-web.git /databack-web 4 | WORKDIR /databack-web 5 | RUN npm install && npm run build 6 | 7 | FROM golang as tools-builder 8 | RUN apt update -y && apt install -y libkrb5-dev 9 | RUN git clone https://github.com/mongodb/mongo-tools /mongo-tools 10 | RUN cd /mongo-tools && ./make build -tools=mongodump,mongorestore 11 | RUN git clone https://github.com/yannh/redis-dump-go.git /redis-dump-go 12 | RUN cd /redis-dump-go && go build -o /redis-dump-go/redis-dump-go 13 | RUN go install github.com/AlexAkulov/clickhouse-backup/cmd/clickhouse-backup@latest 14 | 15 | FROM snakepacker/python:3.11 16 | RUN apt update -y && apt install -y mysql-client curl redis-tools gcc libc6-dev python3.11-dev 17 | RUN echo "deb http://apt.postgresql.org/pub/repos/apt jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list 18 | RUN curl -o /etc/apt/trusted.gpg.d/pgdg.asc https://www.postgresql.org/media/keys/ACCC4CF8.asc 19 | RUN apt update -y && apt install -y postgresql-client-16 20 | ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1 21 | ENV POETRY_VIRTUALENVS_CREATE=false 22 | RUN mkdir -p /databack 23 | COPY --from=frontend-builder /databack-web/dist /databack/static 24 | COPY --from=tools-builder /mongo-tools/bin/mongodump /usr/bin/mongodump 25 | COPY --from=tools-builder /mongo-tools/bin/mongorestore /usr/bin/mongorestore 26 | COPY --from=tools-builder /redis-dump-go/redis-dump-go /usr/bin/redis-dump-go 27 | COPY --from=tools-builder /go/bin/clickhouse-backup /usr/bin/clickhouse-backup 28 | WORKDIR /databack 29 | COPY ../pyproject.toml poetry.lock /databack/ 30 | RUN curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py && python3.11 get-pip.py && pip3.11 install poetry && poetry install --no-root && rm get-pip.py 31 | COPY .. /databack 32 | RUN poetry install 33 | CMD ["python3.11", "-m", "databack.app"] 34 | -------------------------------------------------------------------------------- /databack/datasource/ssh.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | import asyncssh 5 | 6 | from databack.datasource import Base 7 | from databack.enums import DataSourceType 8 | 9 | 10 | class SSH(Base): 11 | type = DataSourceType.ssh 12 | 13 | def __init__(self, **kwargs): 14 | super().__init__(**kwargs) 15 | self.path = self.kwargs.get("path") 16 | self.host = self.kwargs.get("host") 17 | self.port = self.kwargs.get("port") 18 | self.username = self.kwargs.get("username") 19 | self.password = self.kwargs.get("password") 20 | self.private_key = self.kwargs.get("private_key") 21 | self.private_key_pass = self.kwargs.get("private_key_pass") 22 | 23 | async def check(self): 24 | async with self._get_connection() as conn: 25 | return await conn.run(f"ls {self.path}", check=True) 26 | 27 | def _get_connection(self): 28 | private_key = asyncssh.import_private_key(self.private_key, self.private_key_pass) 29 | return asyncssh.connect( 30 | self.host, 31 | port=self.port, 32 | username=self.username, 33 | password=self.password, 34 | client_keys=private_key, 35 | known_hosts=None, 36 | ) 37 | 38 | async def backup(self): 39 | temp_dir = tempfile.mkdtemp() 40 | destination = os.path.join(temp_dir, self.filename) 41 | async with self._get_connection() as conn: 42 | async with conn.start_sftp_client() as sftp: 43 | await sftp.get(self.path, destination, recurse=True) 44 | return destination 45 | 46 | async def restore(self, file: str): 47 | file = await self.get_restore(file) 48 | async with self._get_connection() as conn: 49 | async with conn.start_sftp_client() as sftp: 50 | await sftp.put(file, self.path, recurse=True) 51 | -------------------------------------------------------------------------------- /databack/api/restore.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends 2 | from pydantic import BaseModel 3 | from starlette.status import HTTP_201_CREATED 4 | from tortoise import timezone 5 | from tortoise.contrib.pydantic import pydantic_model_creator 6 | 7 | from databack.enums import DataSourceType, TaskStatus 8 | from databack.models import RestoreLog 9 | from databack.schema.request import Query 10 | from databack.tasks import run_restore 11 | 12 | router = APIRouter() 13 | 14 | 15 | class GetRestoreResponse(BaseModel): 16 | class RestoreLogModel(pydantic_model_creator(RestoreLog)): # type: ignore 17 | task_log_id: int 18 | 19 | total: int 20 | data: list[RestoreLogModel] 21 | 22 | 23 | class RestoreRequest(BaseModel): 24 | options: dict 25 | task_log_id: int 26 | restore_type: DataSourceType 27 | 28 | 29 | @router.get("", response_model=GetRestoreResponse) 30 | async def get_restore_logs( 31 | limit: int = 10, 32 | offset: int = 0, 33 | status: TaskStatus | None = None, 34 | query: Query = Depends(Query), 35 | ): 36 | qs = RestoreLog.all() 37 | if status: 38 | qs = qs.filter(status=status) 39 | total = await qs.count() 40 | orders = query.orders if query.orders else ["-id"] 41 | data = await qs.order_by(*orders).limit(limit).offset(offset) 42 | return {"total": total, "data": data} 43 | 44 | 45 | @router.post("", status_code=HTTP_201_CREATED) 46 | async def restore_task_log(req: RestoreRequest): 47 | log = await RestoreLog.create( 48 | task_log_id=req.task_log_id, 49 | start_at=timezone.now(), 50 | options=req.options, 51 | restore_type=req.restore_type, 52 | ) 53 | await run_restore.delay(log.pk) 54 | 55 | 56 | @router.delete("/{pks}") 57 | async def delete_restore_log(pks: str): 58 | id_list = [int(pk) for pk in pks.split(",")] 59 | await RestoreLog.filter(id__in=id_list).delete() 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # databack 2 | 3 | `databack` is a tool to back up your data from MySQL/PostgreSQL/SSH etc. to any other storages like S3, SCP etc. Which 4 | can be run cron job to back up your data automatically, and you can also restore your data from backup files. 5 | 6 | ## Features 7 | 8 | - MySQL/PostgreSQL/SSH/Local data source. 9 | - S3/SCP/Local storage. 10 | - Cron job to back up your data automatically. 11 | - i18n support (English and Chinese). 12 | - Web UI to manage your backup tasks. 13 | 14 | ## Screenshots 15 | 16 | You can try it on [Demo](https://databack-demo.long2ice.io/) site. 17 | 18 | ![databack](./images/home.png) 19 | ![databack](./images/datasource.png) 20 | 21 | ## Deployment 22 | 23 | The easiest way to deploy `databack` is to use `docker-compose`. 24 | 25 | ```yml 26 | version: "3" 27 | services: 28 | databack: 29 | restart: always 30 | env_file: .env 31 | network_mode: host 32 | image: ghcr.io/long2ice/databack/databack:full 33 | ``` 34 | 35 | ## Configuration 36 | 37 | Just set environment variables in `.env` file. 38 | 39 | ```dotenv 40 | DB_URL=mysql://root:123456@127.0.0.1:3306/databack 41 | DEBUG=True 42 | REDIS_URL=redis://127.0.0.1:6379/0 43 | SENTRY_DSN='xxx' # remove it if you don't use sentry 44 | ENV=production 45 | WORKER=True 46 | ``` 47 | 48 | ## Worker 49 | 50 | By default, `databack` will start a builtin worker to run tasks when environment variable `WORKER` is `True`. If you 51 | want to start multiple workers, you can run `rearq databack.tasks:rearq worker` command. 52 | 53 | For `docekr-compose` deployment: 54 | 55 | ```yml 56 | version: "3" 57 | services: 58 | worker: 59 | restart: always 60 | env_file: .env 61 | network_mode: host 62 | image: ghcr.io/long2ice/databack/databack:full 63 | entrypoint: rearq databack.tasks:rearq worker -t 64 | ``` 65 | 66 | ## Frontend 67 | 68 | The frontend project is [here](https://github.com/long2ice/databack-web). 69 | 70 | ## License 71 | 72 | This project is licensed under the [Apache-2.0](./LICENSE) License. 73 | -------------------------------------------------------------------------------- /databack/datasource/mongo.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import tempfile 4 | 5 | import aioshutil 6 | 7 | from databack.datasource import Base 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class Mongo(Base): 12 | type = DataSourceType.mongo 13 | 14 | def __init__(self, **kwargs): 15 | super().__init__(**kwargs) 16 | for key, value in self.kwargs.items(): 17 | self.options.append(key) 18 | self.options.append(value) 19 | 20 | async def check(self): 21 | if not await aioshutil.which("mongodump"): 22 | raise RuntimeError("mongodump not found in PATH") 23 | if not await aioshutil.which("mongorestore"): 24 | raise RuntimeError("mongorestore not found in PATH") 25 | return True 26 | 27 | async def backup(self): 28 | temp_dir = tempfile.mkdtemp() 29 | options = self.options 30 | file = os.path.join(temp_dir, f"{self.filename}.gz") 31 | options.append(f"--archive={file}") 32 | options.append("--gzip") 33 | proc = await asyncio.create_subprocess_exec( 34 | "mongodump", 35 | *options, 36 | stdout=asyncio.subprocess.PIPE, 37 | stderr=asyncio.subprocess.PIPE, 38 | ) 39 | stdout, stderr = await proc.communicate() 40 | if proc.returncode != 0: 41 | raise RuntimeError(f"mongodump failed with {proc.returncode}: {stderr.decode()}") 42 | return file 43 | 44 | async def restore(self, file: str): 45 | file = await self.get_restore(file) 46 | options = self.options 47 | options.append(f"--archive={file}") 48 | options.append("--gzip") 49 | proc = await asyncio.create_subprocess_exec( 50 | "mongorestore", 51 | *options, 52 | stdout=asyncio.subprocess.PIPE, 53 | stderr=asyncio.subprocess.PIPE, 54 | ) 55 | stdout, stderr = await proc.communicate() 56 | if proc.returncode != 0: 57 | raise RuntimeError(f"mongorestore failed with {proc.returncode}: {stderr.decode()}") 58 | -------------------------------------------------------------------------------- /databack/storage/ssh.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import asyncssh 4 | from pydantic import BaseModel 5 | 6 | from databack.enums import StorageType 7 | from databack.storage import Base 8 | 9 | 10 | class SSHOptions(BaseModel): 11 | host: str 12 | port: int 13 | username: str 14 | password: str 15 | private_key: str 16 | private_key_pass: str 17 | 18 | 19 | class SSH(Base): 20 | type = StorageType.ssh 21 | options: SSHOptions 22 | 23 | def __init__( 24 | self, 25 | options: SSHOptions, 26 | path: str = "", 27 | ): 28 | super().__init__( 29 | options=options, 30 | path=path, 31 | ) 32 | self.host = options.host 33 | self.port = options.port 34 | self.username = options.username 35 | self.password = options.password 36 | self.private_key = options.private_key 37 | self.private_key_pass = options.private_key_pass 38 | 39 | async def check(self): 40 | async with self._get_connection() as conn: 41 | return await conn.run("ls", self.path, check=True) 42 | 43 | def _get_connection(self): 44 | private_key = asyncssh.import_private_key(self.private_key, self.private_key_pass) 45 | return asyncssh.connect( 46 | self.host, 47 | port=self.port, 48 | username=self.username, 49 | password=self.password, 50 | known_hosts=None, 51 | client_keys=private_key, 52 | ) 53 | 54 | async def upload(self, file: str): 55 | async with self._get_connection() as conn: 56 | async with conn.start_sftp_client() as sftp: 57 | await sftp.put(file, self.path) 58 | return os.path.join(self.path, os.path.basename(file)) 59 | 60 | async def download(self, file: str): 61 | async with self._get_connection() as conn: 62 | async with conn.start_sftp_client() as sftp: 63 | await sftp.get(file, self.path) 64 | 65 | async def delete(self, file: str): 66 | async with self._get_connection() as conn: 67 | await conn.run("rm", os.path.join(self.path, file)) 68 | -------------------------------------------------------------------------------- /databack/scheduler.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from crontab import CronTab 4 | from loguru import logger 5 | from tortoise import timezone 6 | from tortoise.expressions import Q 7 | 8 | from databack.constants import SCHEDULER_SLEEP_SECONDS 9 | from databack.models import Task 10 | from databack.tasks import run_backup 11 | 12 | 13 | class Scheduler: 14 | _wait_task = None 15 | _stop = False 16 | 17 | @classmethod 18 | async def start(cls): 19 | while not cls._stop: 20 | wait_seconds = [] 21 | try: 22 | tasks = ( 23 | await Task.filter( 24 | ~Q(cron=""), 25 | enabled=True, 26 | ) 27 | .only("id", "cron", "name", "next_run_at") 28 | .all() 29 | ) 30 | for task in tasks: 31 | if not task.next_run_at: 32 | await task.refresh_next_run_at() 33 | if task.next_run_at <= timezone.now(): 34 | logger.info(f"Run task {task.name} now!") 35 | await run_backup.delay(task.id) 36 | await task.refresh_next_run_at() 37 | cron = CronTab(task.cron) 38 | seconds = cron.next(default_utc=False) 39 | wait_seconds.append(seconds) 40 | except Exception as e: 41 | logger.error(f"Scheduler error: {e}") 42 | min_wait_seconds = min(wait_seconds) if wait_seconds else SCHEDULER_SLEEP_SECONDS 43 | logger.info(f"Scheduler will sleep {int(min_wait_seconds)} seconds for next task") 44 | cls._wait_task = asyncio.create_task(asyncio.sleep(min_wait_seconds)) 45 | try: 46 | await cls._wait_task 47 | except asyncio.CancelledError: 48 | pass 49 | 50 | @classmethod 51 | async def refresh(cls): 52 | if cls._wait_task: 53 | cls._wait_task.cancel() 54 | cls._wait_task = None 55 | 56 | @classmethod 57 | async def stop(cls): 58 | cls._stop = True 59 | await cls.refresh() 60 | -------------------------------------------------------------------------------- /databack/auth.py: -------------------------------------------------------------------------------- 1 | from authlib.integrations.starlette_client import OAuth 2 | from fastapi_jwt import JwtAccessBearer, JwtRefreshBearer 3 | from passlib.context import CryptContext 4 | from tortoise import timezone 5 | 6 | from databack.models import Admin 7 | from databack.settings import settings 8 | 9 | access_security = JwtAccessBearer(secret_key=settings.SECRET_KEY) 10 | refresh_security = JwtRefreshBearer( 11 | secret_key=settings.SECRET_KEY, 12 | ) 13 | pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") 14 | 15 | 16 | def verify_password(plain_password: str, hashed_password: str): 17 | return pwd_context.verify(plain_password, hashed_password) 18 | 19 | 20 | def get_password_hash(password: str): 21 | return pwd_context.hash(password) 22 | 23 | 24 | oauth = OAuth() 25 | 26 | if settings.enable_google_oauth: 27 | oauth.register( 28 | name="google", 29 | client_id=settings.GOOGLE_CLIENT_ID, 30 | client_secret=settings.GOOGLE_CLIENT_SECRET, 31 | server_metadata_url="https://accounts.google.com/.well-known/openid-configuration", 32 | client_kwargs={"scope": "openid email profile"}, 33 | ) 34 | if settings.enable_github_oauth: 35 | oauth.register( 36 | name="github", 37 | client_id=settings.GITHUB_CLIENT_ID, 38 | client_secret=settings.GITHUB_CLIENT_SECRET, 39 | access_token_url="https://github.com/login/oauth/access_token", 40 | access_token_params=None, 41 | authorize_url="https://github.com/login/oauth/authorize", 42 | authorize_params=None, 43 | api_base_url="https://api.github.com/", 44 | client_kwargs={"scope": "user:email"}, 45 | ) 46 | 47 | 48 | async def login(email: str, nickname: str): 49 | admin = await Admin.filter(email=email).first() 50 | if not admin: 51 | admin = await Admin.create(nickname=nickname, email=email, password="", is_superuser=False) 52 | 53 | admin.last_login_at = timezone.now() 54 | admin.nickname = nickname 55 | await admin.save(update_fields=["last_login_at", "nickname"]) 56 | subject = { 57 | "id": admin.pk, 58 | } 59 | return { 60 | "access_token": access_security.create_access_token(subject=subject), 61 | "refresh_token": refresh_security.create_refresh_token(subject=subject), 62 | } 63 | -------------------------------------------------------------------------------- /databack/datasource/redis.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import tempfile 3 | 4 | import aiofiles 5 | import aioshutil 6 | 7 | from databack.datasource import Base 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class Redis(Base): 12 | type = DataSourceType.redis 13 | 14 | def __init__(self, **kwargs): 15 | super().__init__(**kwargs) 16 | self.password = kwargs.pop("password", None) 17 | for key, value in self.kwargs.items(): 18 | if value: 19 | self.options.append(key) 20 | self.options.append(value) 21 | 22 | async def check(self): 23 | if not await aioshutil.which("redis-cli"): 24 | raise RuntimeError("redis-cli not found in PATH") 25 | if not await aioshutil.which("redis-dump-go"): 26 | raise RuntimeError("redis-dump-go not found in PATH") 27 | return True 28 | 29 | async def backup(self): 30 | temp_dir = tempfile.mkdtemp() 31 | options = self.options 32 | file = f"{temp_dir}/{self.filename}.resp" 33 | proc = await asyncio.create_subprocess_exec( 34 | "redis-dump-go", 35 | *options, 36 | stdout=asyncio.subprocess.PIPE, 37 | stderr=asyncio.subprocess.PIPE, 38 | env={"REDISDUMPGO_AUTH": self.password}, 39 | ) 40 | stdout, stderr = await proc.communicate() 41 | if proc.returncode != 0: 42 | raise RuntimeError(f"redis-cli failed with {proc.returncode}: {stderr.decode()}") 43 | async with aiofiles.open(file, "wb") as f: 44 | await f.write(stdout) 45 | return file 46 | 47 | async def restore(self, file: str): 48 | file = await self.get_restore(file) 49 | options = self.options 50 | options.append("--pipe") 51 | proc = await asyncio.create_subprocess_exec( 52 | "redis-cli", 53 | *options, 54 | stdout=asyncio.subprocess.PIPE, 55 | stderr=asyncio.subprocess.PIPE, 56 | stdin=asyncio.subprocess.PIPE, 57 | ) 58 | async with aiofiles.open(file, "rb") as f: 59 | stdout, stderr = await proc.communicate(input=await f.read()) 60 | if proc.returncode != 0: 61 | raise RuntimeError(f"redis-cli failed with {proc.returncode}: {stderr.decode()}") 62 | -------------------------------------------------------------------------------- /databack/app.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from contextlib import asynccontextmanager 3 | 4 | from aerich import Command 5 | from fastapi import FastAPI, HTTPException 6 | from fastapi.exceptions import RequestValidationError 7 | from rearq.server.app import app as rearq_server 8 | from starlette.middleware.sessions import SessionMiddleware 9 | from tortoise.contrib.fastapi import register_tortoise 10 | from tortoise.exceptions import DoesNotExist 11 | 12 | from databack import locales 13 | from databack.api import router 14 | from databack.exceptions import ( 15 | custom_http_exception_handler, 16 | exception_handler, 17 | not_exists_exception_handler, 18 | validation_exception_handler, 19 | ) 20 | from databack.log import init_logging 21 | from databack.scheduler import Scheduler 22 | from databack.settings import TORTOISE_ORM, settings 23 | from databack.static import SPAStaticFiles 24 | from databack.tasks import rearq 25 | 26 | 27 | @asynccontextmanager 28 | async def lifespan(_: FastAPI): 29 | init_logging() 30 | locales.init() 31 | aerich = Command(TORTOISE_ORM) 32 | await aerich.init() 33 | await aerich.upgrade(True) 34 | asyncio.ensure_future(Scheduler.start()) 35 | if settings.WORKER: 36 | await rearq_server.start_worker() 37 | yield 38 | await Scheduler.stop() 39 | 40 | 41 | if settings.DEBUG: 42 | app = FastAPI( 43 | debug=settings.DEBUG, 44 | lifespan=lifespan, 45 | ) 46 | else: 47 | app = FastAPI( 48 | debug=settings.DEBUG, 49 | lifespan=lifespan, 50 | redoc_url=None, 51 | docs_url=None, 52 | ) 53 | app.include_router(router, prefix="/api") 54 | app.mount("/rearq", rearq_server) 55 | app.mount("/", SPAStaticFiles(directory="static", html=True), name="static") 56 | app.add_middleware(SessionMiddleware, secret_key=settings.SECRET_KEY) 57 | 58 | rearq_server.set_rearq(rearq) 59 | register_tortoise( 60 | app, 61 | config=TORTOISE_ORM, 62 | ) 63 | app.add_exception_handler(HTTPException, custom_http_exception_handler) 64 | app.add_exception_handler(DoesNotExist, not_exists_exception_handler) 65 | app.add_exception_handler(RequestValidationError, validation_exception_handler) 66 | app.add_exception_handler(Exception, exception_handler) 67 | 68 | 69 | if __name__ == "__main__": 70 | import uvicorn 71 | 72 | uvicorn.run(app, host=settings.HOST, port=settings.PORT) 73 | -------------------------------------------------------------------------------- /databack/depends.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import i18n 4 | from fastapi import Depends, HTTPException, Security 5 | from fastapi_jwt import JwtAuthorizationCredentials 6 | from starlette.requests import Request 7 | from starlette.status import HTTP_403_FORBIDDEN 8 | 9 | from databack.auth import access_security 10 | from databack.constants import MASK_KEYS 11 | from databack.models import ActionLog, Admin 12 | from databack.scheduler import Scheduler 13 | 14 | 15 | async def refresh_scheduler(): 16 | yield 17 | await Scheduler.refresh() 18 | 19 | 20 | async def set_i18n(request: Request): 21 | lang = request.headers.get("Accept-Language", "en-US") 22 | lang = lang.split(",")[0] 23 | i18n.set("locale", lang) 24 | 25 | 26 | async def auth_required(credentials: JwtAuthorizationCredentials = Security(access_security)): 27 | return credentials.subject["id"] 28 | 29 | 30 | async def get_current_admin(pk: int = Depends(auth_required)): 31 | admin = await Admin.get(pk=pk) 32 | if not admin.is_active: 33 | raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=i18n.t("auth.not_active")) 34 | return admin 35 | 36 | 37 | async def superuser_required(admin: Admin = Depends(get_current_admin)): 38 | if not admin.is_superuser: 39 | raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=i18n.t("auth.not_superuser")) 40 | return admin 41 | 42 | 43 | def get_client_ip(request: Request): 44 | forwarded = request.headers.get("X-Forwarded-For") 45 | if forwarded: 46 | return forwarded.split(",")[0] 47 | return request.client.host # type: ignore 48 | 49 | 50 | async def action_log( 51 | request: Request, admin: Admin = Depends(get_current_admin), ip=Depends(get_client_ip) 52 | ): 53 | method = request.method 54 | if method in ["POST", "PUT", "PATCH", "DELETE"]: 55 | path = request.url.path 56 | content = {} 57 | if method != "DELETE": 58 | body = await request.body() 59 | if body: 60 | content = json.loads(body) 61 | else: 62 | content = dict(request.query_params) 63 | for key in content: 64 | if key in MASK_KEYS: 65 | content[key] = "******" 66 | await ActionLog.create( 67 | admin=admin, 68 | ip=ip, 69 | content=content, 70 | path=path, 71 | method=method, 72 | ) 73 | -------------------------------------------------------------------------------- /databack/datasource/clickhouse.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import tempfile 3 | 4 | import aiofiles 5 | import aioshutil 6 | 7 | from databack.datasource import Base 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class ClickHouse(Base): 12 | type = DataSourceType.clickhouse 13 | 14 | def __init__(self, **kwargs): 15 | super().__init__(**kwargs) 16 | for k, v in self.kwargs.items(): 17 | if v is True: 18 | self.options.append(k) 19 | else: 20 | self.options.append(f"{k}={v}") 21 | 22 | async def check(self): 23 | if not await aioshutil.which("clickhouse-backup"): 24 | raise RuntimeError("clickhouse-backup not found in PATH") 25 | return True 26 | 27 | @classmethod 28 | def _check_error(cls, std: bytes): 29 | if std and "[ERROR]" in std.decode(): 30 | raise RuntimeError(f"clickhouse-backup failed: {std.decode()}") 31 | 32 | async def backup(self): 33 | temp_dir = tempfile.mkdtemp() 34 | options = self.options 35 | file = f"{temp_dir}/{self.filename}.sql" 36 | options.append(f"--result-file={file}") 37 | proc = await asyncio.create_subprocess_exec( 38 | "mysqlpump", 39 | *options, 40 | stdout=asyncio.subprocess.PIPE, 41 | stderr=asyncio.subprocess.PIPE, 42 | ) 43 | stdout, stderr = await proc.communicate() 44 | if proc.returncode != 0: 45 | raise RuntimeError(f"mysqlpump failed with {proc.returncode}: {stderr.decode()}") 46 | self._check_error(stdout) 47 | self._check_error(stderr) 48 | return file 49 | 50 | async def restore(self, file: str): 51 | file = await self.get_restore(file) 52 | options = self.options 53 | proc = await asyncio.create_subprocess_exec( 54 | "mysql", 55 | *options, 56 | stdout=asyncio.subprocess.PIPE, 57 | stderr=asyncio.subprocess.PIPE, 58 | stdin=asyncio.subprocess.PIPE, 59 | ) 60 | async with aiofiles.open(file, "rb") as f: 61 | content = await f.read() 62 | stdout, stderr = await proc.communicate(content) 63 | if proc.returncode != 0: 64 | raise RuntimeError(f"mysql failed with {proc.returncode}: {stderr.decode()}") 65 | self._check_error(stdout) 66 | self._check_error(stderr) 67 | -------------------------------------------------------------------------------- /databack/storage/s3.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import aioboto3 4 | import aiofiles 5 | from botocore.client import Config 6 | from pydantic import BaseModel 7 | 8 | from databack.enums import StorageType 9 | from databack.storage import Base 10 | 11 | 12 | class S3Options(BaseModel): 13 | access_key_id: str 14 | secret_access_key: str 15 | region_name: str | None 16 | bucket_name: str 17 | endpoint_url: str 18 | access_style: str = "auto" 19 | 20 | 21 | class S3(Base): 22 | type = StorageType.s3 23 | options: S3Options 24 | 25 | def __init__( 26 | self, 27 | options: S3Options, 28 | path: str = "", 29 | ): 30 | super().__init__( 31 | options=options, 32 | path=path, 33 | ) 34 | self.endpoint_url = options.endpoint_url 35 | self.access_key_id = options.access_key_id 36 | self.secret_access_key = options.secret_access_key 37 | self.region_name = options.region_name 38 | self.bucket_name = options.bucket_name 39 | self.access_style = options.access_style 40 | self.path = path 41 | self.session = aioboto3.Session( 42 | aws_access_key_id=self.access_key_id, 43 | aws_secret_access_key=self.secret_access_key, 44 | region_name=self.region_name, 45 | ) 46 | self.s3_config = Config(s3={"addressing_style": self.access_style}) 47 | 48 | def _get_client(self): 49 | return self.session.client("s3", endpoint_url=self.endpoint_url, config=self.s3_config) 50 | 51 | async def check(self): 52 | async with self._get_client() as s3: 53 | return await s3.head_bucket(Bucket=self.bucket_name) 54 | 55 | async def upload(self, file: str): 56 | async with self._get_client() as s3: 57 | async with aiofiles.open(file, "rb") as f: 58 | key = os.path.join(self.path, os.path.basename(file)) 59 | await s3.put_object(Key=key, Body=await f.read(), Bucket=self.bucket_name) 60 | return key 61 | 62 | async def download(self, file: str): 63 | async with self._get_client() as s3: 64 | await s3.download_file(Key=file, Filename=self.path, Bucket=self.bucket_name) 65 | 66 | async def delete(self, file: str): 67 | async with self._get_client() as s3: 68 | await s3.delete_objects(Delete={"Objects": [{"Key": file}]}, Bucket=self.bucket_name) 69 | -------------------------------------------------------------------------------- /databack/api/task_log.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from fastapi import APIRouter, Depends 4 | 5 | from databack.discover import get_storage 6 | from databack.enums import TaskStatus 7 | from databack.models import TaskLog 8 | from databack.schema.request import Query 9 | 10 | router = APIRouter() 11 | 12 | 13 | @router.get("") 14 | async def get_task_logs( 15 | task_id: int | None = None, 16 | data_source_id: int | None = None, 17 | storage_id: int | None = None, 18 | status: str | None = None, 19 | started_at: datetime | None = None, 20 | ended_at: datetime | None = None, 21 | is_deleted: bool | None = None, 22 | query: Query = Depends(Query), 23 | ): 24 | qs = TaskLog.all() 25 | if task_id: 26 | qs = qs.filter(task_id=task_id) 27 | if status: 28 | qs = qs.filter(status=status) 29 | if started_at: 30 | qs = qs.filter(started_at__gte=started_at) 31 | if ended_at: 32 | qs = qs.filter(ended_at__lte=ended_at) 33 | if is_deleted is not None: 34 | qs = qs.filter(is_deleted=is_deleted) 35 | if data_source_id: 36 | qs = qs.filter(task__data_source_id=data_source_id) 37 | if storage_id: 38 | qs = qs.filter(task__storage_id=storage_id) 39 | total = await qs.count() 40 | data = ( 41 | await qs.order_by(*query.orders) 42 | .limit(query.limit) 43 | .offset(query.offset) 44 | .values( 45 | "id", 46 | "task_id", 47 | "status", 48 | "path", 49 | "size", 50 | "message", 51 | "is_deleted", 52 | "start_at", 53 | "end_at", 54 | data_source_type="task__data_source__type", 55 | data_source_name="task__data_source__name", 56 | storage_name="task__storage__name", 57 | ) 58 | ) 59 | return {"total": total, "data": data} 60 | 61 | 62 | @router.delete("/{pks}") 63 | async def delete_task_logs(pks: str): 64 | id_list = [int(pk) for pk in pks.split(",")] 65 | for pk in id_list: 66 | log = await TaskLog.get(id=pk).select_related("task__storage") 67 | storage = log.task.storage 68 | storage_cls = get_storage(storage.type) 69 | storage_obj = storage_cls(options=storage.options_parsed, path=storage.path) # type: ignore 70 | if log.status == TaskStatus.success: 71 | await storage_obj.delete(log.path) 72 | await log.delete() 73 | -------------------------------------------------------------------------------- /databack/datasource/mysql.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import tempfile 3 | 4 | import aiofiles 5 | import aioshutil 6 | 7 | from databack.datasource import Base 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class MySQL(Base): 12 | type = DataSourceType.mysql 13 | 14 | def __init__(self, **kwargs): 15 | super().__init__(**kwargs) 16 | for k, v in self.kwargs.items(): 17 | if v is True: 18 | self.options.append(k) 19 | else: 20 | self.options.append(f"{k}={v}") 21 | 22 | async def check(self): 23 | if not await aioshutil.which("mysqlpump"): 24 | raise RuntimeError("mysqlpump not found in PATH") 25 | if not await aioshutil.which("mysql"): 26 | raise RuntimeError("mysql not found in PATH") 27 | return True 28 | 29 | @classmethod 30 | def _check_error(cls, std: bytes): 31 | if std and "[ERROR]" in std.decode(): 32 | raise RuntimeError(f"mysqlpump failed: {std.decode()}") 33 | 34 | async def backup(self): 35 | temp_dir = tempfile.mkdtemp() 36 | options = self.options 37 | file = f"{temp_dir}/{self.filename}.sql" 38 | options.append(f"--result-file={file}") 39 | proc = await asyncio.create_subprocess_exec( 40 | "mysqlpump", 41 | *options, 42 | stdout=asyncio.subprocess.PIPE, 43 | stderr=asyncio.subprocess.PIPE, 44 | ) 45 | stdout, stderr = await proc.communicate() 46 | if proc.returncode != 0: 47 | raise RuntimeError(f"mysqlpump failed with {proc.returncode}: {stderr.decode()}") 48 | self._check_error(stdout) 49 | self._check_error(stderr) 50 | return file 51 | 52 | async def restore(self, file: str): 53 | file = await self.get_restore(file) 54 | options = self.options 55 | proc = await asyncio.create_subprocess_exec( 56 | "mysql", 57 | *options, 58 | stdout=asyncio.subprocess.PIPE, 59 | stderr=asyncio.subprocess.PIPE, 60 | stdin=asyncio.subprocess.PIPE, 61 | ) 62 | async with aiofiles.open(file, "rb") as f: 63 | content = await f.read() 64 | stdout, stderr = await proc.communicate(content) 65 | if proc.returncode != 0: 66 | raise RuntimeError(f"mysql failed with {proc.returncode}: {stderr.decode()}") 67 | self._check_error(stdout) 68 | self._check_error(stderr) 69 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | .idea 131 | -------------------------------------------------------------------------------- /databack/datasource/postgres.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import tempfile 3 | 4 | import aiofiles 5 | import aioshutil 6 | 7 | from databack.datasource import Base 8 | from databack.enums import DataSourceType 9 | 10 | 11 | class Postgres(Base): 12 | type = DataSourceType.postgres 13 | 14 | def __init__(self, password: str, backup_program: str | None = None, **kwargs): 15 | super().__init__(**kwargs) 16 | for k, v in self.kwargs.items(): 17 | if v is True: 18 | self.options.append(k) 19 | else: 20 | self.options.append(f"{k}={v}") 21 | self.password = password 22 | self.backup_program = backup_program 23 | 24 | async def check(self): 25 | if not await aioshutil.which(self.backup_program): 26 | raise ValueError(f"{self.backup_program} not found in PATH") 27 | if not await aioshutil.which("psql"): 28 | raise RuntimeError("psql not found in PATH") 29 | return True 30 | 31 | @classmethod 32 | def _check_error(cls, action: str, std: bytes): 33 | if std and "error:" in std.decode(): 34 | raise RuntimeError(f"{action} failed: {std.decode()}") 35 | 36 | async def backup(self): 37 | temp_dir = tempfile.mkdtemp() 38 | file = f"{temp_dir}/{self.filename}.sql" 39 | options = self.options 40 | options.append(f"--file={file}") 41 | proc = await asyncio.create_subprocess_exec( 42 | self.backup_program, 43 | *options, 44 | stdout=asyncio.subprocess.PIPE, 45 | stderr=asyncio.subprocess.PIPE, 46 | env={"PGPASSWORD": self.password}, 47 | ) 48 | stdout, stderr = await proc.communicate() 49 | if proc.returncode != 0: 50 | raise RuntimeError( 51 | f"{self.backup_program} failed with {proc.returncode}: {stderr.decode()}" 52 | ) 53 | self._check_error(self.backup_program, stdout) 54 | self._check_error(self.backup_program, stderr) 55 | return file 56 | 57 | async def restore(self, file: str): 58 | file = await self.get_restore(file) 59 | options = self.options 60 | options.append(f"--file={file}") 61 | proc = await asyncio.create_subprocess_exec( 62 | "psql", 63 | *options, 64 | stdout=asyncio.subprocess.PIPE, 65 | stderr=asyncio.subprocess.PIPE, 66 | stdin=asyncio.subprocess.PIPE, 67 | env={"PGPASSWORD": self.password}, 68 | ) 69 | async with aiofiles.open(file, "rb") as f: 70 | content = await f.read() 71 | stdout, stderr = await proc.communicate(content) 72 | if proc.returncode != 0: 73 | raise RuntimeError(f"psql failed with {proc.returncode}: {stderr.decode()}") 74 | self._check_error("psql", stdout) 75 | self._check_error("psql", stderr) 76 | -------------------------------------------------------------------------------- /databack/api/storage.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends, HTTPException 2 | from pydantic import BaseModel 3 | from starlette.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST 4 | from tortoise.contrib.pydantic import pydantic_queryset_creator 5 | 6 | from databack import discover 7 | from databack.enums import StorageType 8 | from databack.models import Storage 9 | from databack.schema.request import Query 10 | from databack.storage import s3, ssh 11 | 12 | router = APIRouter() 13 | 14 | 15 | class GetStorageResponse(BaseModel): 16 | total: int 17 | data: pydantic_queryset_creator(Storage, exclude=("options",)) # type: ignore 18 | 19 | 20 | @router.get("", response_model=GetStorageResponse) 21 | async def get_storages( 22 | limit: int = 10, 23 | offset: int = 0, 24 | name: str = "", 25 | type: StorageType | None = None, 26 | query: Query = Depends(Query), 27 | ): 28 | qs = Storage.all() 29 | if name: 30 | qs = qs.filter(name__icontains=name) 31 | if type: 32 | qs = qs.filter(type=type) 33 | total = await qs.count() 34 | storages = ( 35 | await qs.only("id", "name", "type", "path", "created_at", "updated_at") 36 | .order_by(*query.orders) 37 | .limit(limit) 38 | .offset(offset) 39 | ) 40 | return {"total": total, "data": storages} 41 | 42 | 43 | @router.get("/basic") 44 | async def get_storage_basic(): 45 | data = await Storage.all().values("id", "name") 46 | return data 47 | 48 | 49 | @router.get("/{pk}") # type: ignore 50 | async def get_storage(pk: int): 51 | return await Storage.get(id=pk) 52 | 53 | 54 | class CreateStorageRequest(BaseModel): 55 | type: StorageType 56 | name: str 57 | path: str 58 | options: s3.S3Options | ssh.SSHOptions | None 59 | 60 | 61 | @router.post("", status_code=HTTP_201_CREATED) 62 | async def create_storage(body: CreateStorageRequest): 63 | storage_cls = discover.get_storage(body.type) 64 | storage_obj = storage_cls(options=body.options, path=body.path) # type: ignore 65 | if not await storage_obj.check(): 66 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Storage check failed") 67 | await Storage.create(**body.dict()) 68 | 69 | 70 | class UpdateStorageRequest(BaseModel): 71 | type: StorageType | None 72 | name: str | None 73 | path: str | None 74 | options: s3.S3Options | ssh.SSHOptions | None 75 | 76 | 77 | @router.patch("/{pk}", status_code=HTTP_204_NO_CONTENT) 78 | async def update_storage(pk: int, body: UpdateStorageRequest): 79 | storage = await Storage.get(id=pk) 80 | storage_cls = discover.get_storage(storage.type) 81 | storage_obj = storage_cls( 82 | options=body.options or storage.options_parsed, path=body.path or storage.path 83 | ) 84 | if not await storage_obj.check(): 85 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Storage check failed") 86 | await Storage.filter(id=pk).update(**body.dict(exclude_none=True)) 87 | 88 | 89 | @router.delete("/{pks}", status_code=HTTP_204_NO_CONTENT) 90 | async def delete_storage(pks: str): 91 | id_list = [int(pk) for pk in pks.split(",")] 92 | await Storage.filter(id__in=id_list).delete() 93 | -------------------------------------------------------------------------------- /databack/api/datasource.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from fastapi import APIRouter, Depends, HTTPException 3 | from pydantic import BaseModel 4 | from starlette.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST 5 | from tortoise.contrib.pydantic import pydantic_queryset_creator 6 | from tortoise.exceptions import IntegrityError 7 | 8 | from databack.discover import get_data_source 9 | from databack.enums import DataSourceType 10 | from databack.models import DataSource 11 | from databack.schema.request import Query 12 | 13 | router = APIRouter() 14 | 15 | 16 | class GetDataSourceResponse(BaseModel): 17 | total: int 18 | data: pydantic_queryset_creator(DataSource, exclude=("options",)) # type: ignore 19 | 20 | 21 | @router.get("", response_model=GetDataSourceResponse) 22 | async def get_datasource( 23 | limit: int = 10, 24 | offset: int = 0, 25 | name: str = "", 26 | type: DataSourceType | None = None, 27 | query: Query = Depends(Query), 28 | ): 29 | qs = DataSource.all() 30 | if name: 31 | qs = qs.filter(name__icontains=name) 32 | if type: 33 | qs = qs.filter(type=type) 34 | total = await qs.count() 35 | data = ( 36 | await qs.only("id", "name", "type", "created_at", "updated_at") 37 | .order_by(*query.orders) 38 | .limit(limit) 39 | .offset(offset) 40 | ) 41 | return {"total": total, "data": data} 42 | 43 | 44 | @router.get("/basic") 45 | async def get_datasource_basic(): 46 | data = await DataSource.all().values("id", "name") 47 | return data 48 | 49 | 50 | @router.get("/{pk}") 51 | async def get_datasource_(pk: int): 52 | return await DataSource.get(id=pk) 53 | 54 | 55 | class CreateDataSourceRequest(BaseModel): 56 | type: DataSourceType 57 | name: str 58 | options: dict 59 | 60 | 61 | @router.post("", status_code=HTTP_201_CREATED) 62 | async def create_datasource(body: CreateDataSourceRequest): 63 | data_source_cls = get_data_source(body.type) 64 | data_source_obj = data_source_cls(**body.options) 65 | try: 66 | await data_source_obj.check() 67 | except Exception as e: 68 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=str(e)) 69 | try: 70 | await DataSource.create(**body.dict()) 71 | except IntegrityError: 72 | raise HTTPException( 73 | status_code=HTTP_400_BAD_REQUEST, 74 | detail=i18n.t("data_source_exists", name=body.name), 75 | ) 76 | 77 | 78 | class UpdateDataSourceRequest(BaseModel): 79 | type: str | None 80 | name: str | None 81 | options: dict | None 82 | 83 | 84 | @router.patch("/{pk}", status_code=HTTP_204_NO_CONTENT) 85 | async def update_datasource(pk: int, body: UpdateDataSourceRequest): 86 | data_source = await DataSource.get(id=pk) 87 | data_source_cls = get_data_source(data_source.type) 88 | data_source_obj = data_source_cls(**data_source.options) # type: ignore 89 | try: 90 | await data_source_obj.check() 91 | except Exception as e: 92 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=str(e)) 93 | try: 94 | await DataSource.filter(id=pk).update(**body.dict(exclude_none=True)) 95 | except IntegrityError: 96 | raise HTTPException( 97 | status_code=HTTP_400_BAD_REQUEST, 98 | detail=i18n.t("data_source_exists", name=body.name), 99 | ) 100 | 101 | 102 | @router.delete("/{pks}", status_code=HTTP_204_NO_CONTENT) 103 | async def delete_datasource(pks: str): 104 | id_list = [int(pk) for pk in pks.split(",")] 105 | await DataSource.filter(id__in=id_list).delete() 106 | -------------------------------------------------------------------------------- /databack/api/auth.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from fastapi import APIRouter, HTTPException, Security 3 | from fastapi_jwt import JwtAuthorizationCredentials 4 | from pydantic import BaseModel, EmailStr 5 | from starlette.requests import Request 6 | from starlette.status import HTTP_401_UNAUTHORIZED 7 | from tortoise import timezone 8 | 9 | from databack.auth import access_security 10 | from databack.auth import login as login_oauth 11 | from databack.auth import oauth, refresh_security, verify_password 12 | from databack.models import Admin 13 | from databack.settings import settings 14 | 15 | router = APIRouter() 16 | 17 | 18 | class LoginBody(BaseModel): 19 | email: EmailStr 20 | password: str 21 | 22 | 23 | @router.post("/login") 24 | async def login( 25 | body: LoginBody, 26 | ): 27 | admin = await Admin.filter(email=body.email).first() 28 | if not admin: 29 | raise HTTPException( 30 | status_code=HTTP_401_UNAUTHORIZED, detail=i18n.t("login.user_not_found") 31 | ) 32 | if not verify_password(body.password, admin.password): 33 | raise HTTPException( 34 | status_code=HTTP_401_UNAUTHORIZED, detail=i18n.t("login.password_error") 35 | ) 36 | admin.last_login_at = timezone.now() 37 | await admin.save(update_fields=["last_login_at"]) 38 | subject = { 39 | "id": admin.pk, 40 | } 41 | return { 42 | "access_token": access_security.create_access_token(subject=subject), 43 | "refresh_token": refresh_security.create_refresh_token(subject=subject), 44 | } 45 | 46 | 47 | @router.post("/refresh") 48 | async def refresh(credentials: JwtAuthorizationCredentials = Security(refresh_security)): 49 | access_token = access_security.create_access_token(subject=credentials.subject) 50 | refresh_token = refresh_security.create_refresh_token(subject=credentials.subject) 51 | return {"access_token": access_token, "refresh_token": refresh_token} 52 | 53 | 54 | class OauthResponse(BaseModel): 55 | type: str 56 | url: str 57 | 58 | 59 | @router.get("/oauth", response_model=list[OauthResponse]) 60 | async def oauth_login(request: Request, redirect_uri: str): 61 | ret = [] 62 | if settings.enable_github_oauth: 63 | client = oauth.github 64 | rv = await client.create_authorization_url(redirect_uri) 65 | await client.save_authorize_data(request, redirect_uri=redirect_uri, **rv) 66 | ret.append( 67 | { 68 | "type": "github", 69 | "url": rv["url"], 70 | } 71 | ) 72 | if settings.enable_google_oauth: 73 | client = oauth.google 74 | rv = await client.create_authorization_url(redirect_uri) 75 | await client.save_authorize_data(request, redirect_uri=redirect_uri, **rv) 76 | ret.append( 77 | { 78 | "type": "google", 79 | "url": rv["url"], 80 | } 81 | ) 82 | return ret 83 | 84 | 85 | @router.post("/google") 86 | async def auth_via_google(request: Request): 87 | token = await oauth.google.authorize_access_token(request) 88 | user = token["userinfo"] 89 | email = user["email"] 90 | nickname = user["name"] 91 | return await login_oauth( 92 | email, 93 | nickname, 94 | ) 95 | 96 | 97 | @router.post("/github") 98 | async def auth_via_github(request: Request): 99 | token = await oauth.github.authorize_access_token(request) 100 | res = await oauth.github.get("/user", token=token) 101 | ret = res.json() 102 | email = ret["email"] 103 | nickname = ret["name"] 104 | return await login_oauth( 105 | email, 106 | nickname, 107 | ) 108 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | ### Python template 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 100 | # This is especially recommended for binary packages to ensure reproducibility, and is more 101 | # commonly ignored for libraries. 102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 103 | #poetry.lock 104 | 105 | # pdm 106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 107 | #pdm.lock 108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 109 | # in version control. 110 | # https://pdm.fming.dev/#use-with-ide 111 | .pdm.toml 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | .idea/ 162 | 163 | -------------------------------------------------------------------------------- /databack/api/task.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from fastapi import APIRouter, Depends, HTTPException 3 | from pydantic import BaseModel 4 | from starlette.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST 5 | from tortoise.contrib.pydantic import pydantic_model_creator 6 | 7 | from databack import tasks 8 | from databack.depends import refresh_scheduler 9 | from databack.models import Task 10 | from databack.schema.request import Query 11 | 12 | router = APIRouter() 13 | 14 | 15 | class GetTaskResponse(BaseModel): 16 | class Task(pydantic_model_creator(Task)): # type: ignore 17 | storage_name: str 18 | data_source_name: str 19 | data_source_id: int 20 | storage_id: int 21 | 22 | total: int 23 | data: list[Task] 24 | 25 | 26 | @router.get("", response_model=GetTaskResponse) 27 | async def get_tasks( 28 | limit: int = 10, 29 | offset: int = 0, 30 | name: str = "", 31 | data_source_id: int | None = None, 32 | storage_id: int | None = None, 33 | compress: bool | None = None, 34 | enabled: bool | None = None, 35 | query: Query = Depends(Query), 36 | ): 37 | qs = Task.all() 38 | if name: 39 | qs = qs.filter(name__icontains=name) 40 | if data_source_id: 41 | qs = qs.filter(data_source_id=data_source_id) 42 | if storage_id: 43 | qs = qs.filter(storage_id=storage_id) 44 | if compress is not None: 45 | qs = qs.filter(compress=compress) 46 | if enabled is not None: 47 | qs = qs.filter(enabled=enabled) 48 | total = await qs.count() 49 | data = ( 50 | await qs.order_by(*query.orders) 51 | .limit(limit) 52 | .offset(offset) 53 | .values( 54 | "id", 55 | "name", 56 | "enabled", 57 | "cron", 58 | "created_at", 59 | "updated_at", 60 | "keep_num", 61 | "keep_days", 62 | "compress", 63 | "data_source_id", 64 | "storage_id", 65 | "sub_path", 66 | "next_run_at", 67 | storage_name="storage__name", 68 | data_source_name="data_source__name", 69 | ) 70 | ) 71 | return {"total": total, "data": data} 72 | 73 | 74 | class CreateTaskRequest(BaseModel): 75 | name: str 76 | storage_id: int 77 | data_source_id: int 78 | compress: bool = True 79 | keep_num: int = 0 80 | keep_days: int = 0 81 | enabled: bool = True 82 | sub_path: str = "" 83 | cron: str 84 | 85 | 86 | @router.post("", status_code=HTTP_201_CREATED, dependencies=[Depends(refresh_scheduler)]) 87 | async def create_task(body: CreateTaskRequest): 88 | await Task.create(**body.dict()) 89 | 90 | 91 | @router.post("/{pk}/run", status_code=HTTP_201_CREATED) 92 | async def run_task(pk: int): 93 | task = await Task.get(id=pk) 94 | if not task.enabled: 95 | raise HTTPException( 96 | status_code=HTTP_400_BAD_REQUEST, 97 | detail=i18n.t("task_disabled", name=task.name), 98 | ) 99 | await tasks.run_backup.delay(pk) 100 | 101 | 102 | class UpdateTaskRequest(BaseModel): 103 | name: str 104 | data_source_id: int 105 | storage_id: int 106 | keep_num: int 107 | keep_days: int 108 | sub_path: str 109 | enabled: bool 110 | cron: str 111 | 112 | 113 | @router.patch("/{pk}", status_code=HTTP_204_NO_CONTENT, dependencies=[Depends(refresh_scheduler)]) 114 | async def update_task(pk: int, body: UpdateTaskRequest): 115 | task = await Task.get(id=pk) 116 | old_cron = task.cron 117 | await task.update_from_dict(body.dict()).save() 118 | if old_cron != body.cron: 119 | await task.refresh_next_run_at() 120 | 121 | 122 | @router.delete("/{pks}", status_code=HTTP_204_NO_CONTENT, dependencies=[Depends(refresh_scheduler)]) 123 | async def delete_task( 124 | pks: str, 125 | ): 126 | ids = [int(pk) for pk in pks.split(",")] 127 | await Task.filter(id__in=ids).delete() 128 | -------------------------------------------------------------------------------- /databack/models.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | from crontab import CronTab 4 | from tortoise import Model, fields, timezone 5 | 6 | from databack.enums import DataSourceType, StorageType, TaskStatus 7 | from databack.storage.s3 import S3Options 8 | from databack.storage.ssh import SSHOptions 9 | from databack.validators import CronValidator, EmailValidator 10 | 11 | 12 | class BaseModel(Model): 13 | created_at = fields.DatetimeField(auto_now_add=True) 14 | updated_at = fields.DatetimeField(auto_now=True) 15 | 16 | class Meta: 17 | abstract = True 18 | 19 | 20 | class Storage(BaseModel): 21 | type = fields.CharEnumField(StorageType) 22 | name = fields.CharField(max_length=255, unique=True) 23 | path = fields.CharField(max_length=255, default="") 24 | options = fields.JSONField(null=True) 25 | 26 | @property 27 | def options_parsed(self): 28 | match self.type: 29 | case StorageType.local: 30 | return 31 | case StorageType.s3: 32 | return S3Options(**self.options) 33 | case StorageType.ssh: 34 | return SSHOptions(**self.options) 35 | case _: 36 | raise ValueError(f"Unknown storage type: {self.type}") 37 | 38 | 39 | class DataSource(BaseModel): 40 | type = fields.CharEnumField(DataSourceType) 41 | name = fields.CharField(max_length=255, unique=True) 42 | options = fields.JSONField() 43 | 44 | 45 | class Task(BaseModel): 46 | name = fields.CharField(max_length=255, unique=True) 47 | storage: fields.ForeignKeyRelation[Storage] = fields.ForeignKeyField("models.Storage") 48 | data_source: fields.ForeignKeyRelation[DataSource] = fields.ForeignKeyField("models.DataSource") 49 | compress = fields.BooleanField(default=True) 50 | keep_num = fields.IntField(default=0) 51 | keep_days = fields.IntField(default=0) 52 | enabled = fields.BooleanField(default=True) 53 | sub_path = fields.CharField(max_length=255, default="") 54 | cron = fields.CharField(max_length=255, validators=[CronValidator()]) 55 | next_run_at = fields.DatetimeField(null=True) 56 | 57 | async def refresh_next_run_at(self): 58 | if not self.cron: 59 | self.next_run_at = None 60 | else: 61 | cron = CronTab(self.cron) 62 | next_time = cron.next(default_utc=False) 63 | self.next_run_at = timezone.now() + timedelta(seconds=next_time) 64 | await self.save(update_fields=["next_run_at"]) 65 | 66 | 67 | class TaskLog(BaseModel): 68 | task: fields.ForeignKeyRelation[Task] = fields.ForeignKeyField("models.Task") 69 | status = fields.CharEnumField(TaskStatus) 70 | path = fields.CharField(max_length=255, null=True) 71 | size = fields.BigIntField(null=True) 72 | message = fields.TextField(null=True) 73 | is_deleted = fields.BooleanField(default=False) 74 | start_at = fields.DatetimeField() 75 | end_at = fields.DatetimeField(null=True) 76 | 77 | 78 | class RestoreLog(BaseModel): 79 | task_log: fields.ForeignKeyRelation[TaskLog] = fields.ForeignKeyField("models.TaskLog") 80 | message = fields.TextField(null=True) 81 | restore_type = fields.CharEnumField(DataSourceType) 82 | options = fields.JSONField() 83 | status = fields.CharEnumField(TaskStatus, default=TaskStatus.running) 84 | start_at = fields.DatetimeField() 85 | end_at = fields.DatetimeField(null=True) 86 | 87 | 88 | class Admin(BaseModel): 89 | nickname = fields.CharField(max_length=255) 90 | email = fields.CharField(max_length=255, unique=True, validators=[EmailValidator()]) 91 | last_login_at = fields.DatetimeField(null=True) 92 | password = fields.CharField(max_length=255) 93 | is_superuser = fields.BooleanField(default=False) 94 | is_active = fields.BooleanField(default=True) 95 | 96 | 97 | class ActionLog(BaseModel): 98 | admin: fields.ForeignKeyRelation[Admin] = fields.ForeignKeyField("models.Admin") 99 | ip = fields.CharField(max_length=255) 100 | content = fields.JSONField() 101 | path = fields.CharField(max_length=255) 102 | method = fields.CharField(max_length=10) 103 | -------------------------------------------------------------------------------- /databack/api/admin.py: -------------------------------------------------------------------------------- 1 | import i18n 2 | from fastapi import APIRouter, Depends, HTTPException, Security 3 | from fastapi_jwt import JwtAuthorizationCredentials 4 | from pydantic import BaseModel, EmailStr 5 | from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST 6 | from tortoise.contrib.pydantic import pydantic_model_creator, pydantic_queryset_creator 7 | from tortoise.exceptions import IntegrityError 8 | from tortoise.expressions import Q 9 | 10 | from databack import auth 11 | from databack.auth import access_security, get_password_hash 12 | from databack.depends import get_current_admin, superuser_required 13 | from databack.models import Admin 14 | from databack.schema.request import Query 15 | 16 | router = APIRouter() 17 | 18 | 19 | class GetAdminResponse(BaseModel): 20 | total: int 21 | data: pydantic_queryset_creator( # type: ignore 22 | Admin, 23 | ) 24 | 25 | 26 | @router.get("", response_model=GetAdminResponse) 27 | async def get_admin( 28 | search: str | None = None, 29 | is_active: bool | None = None, 30 | is_superuser: bool | None = None, 31 | query: Query = Depends(Query), 32 | ): 33 | qs = Admin.all() 34 | if search: 35 | qs = qs.filter(Q(nickname__icontains=search) | Q(email__icontains=search)) 36 | if is_active is not None: 37 | qs = qs.filter(is_active=is_active) 38 | if is_superuser is not None: 39 | qs = qs.filter(is_superuser=is_superuser) 40 | total = await qs.count() 41 | data = await qs.limit(query.limit).offset(query.offset).order_by(*query.orders) 42 | return {"total": total, "data": data} 43 | 44 | 45 | @router.get("/me", response_model=pydantic_model_creator(Admin, exclude=("password",))) 46 | async def get_me( 47 | credentials: JwtAuthorizationCredentials = Security(access_security), 48 | ): 49 | pk = credentials.subject["id"] 50 | admin = await Admin.get(pk=pk) 51 | return admin 52 | 53 | 54 | @router.delete("/{pks}", dependencies=[Depends(superuser_required)]) 55 | async def delete_admins(pks: str): 56 | id_list = [int(pk) for pk in pks.split(",")] 57 | await Admin.filter(id__in=id_list).delete() 58 | 59 | 60 | class CreateAdminBody(BaseModel): 61 | email: EmailStr 62 | password: str 63 | nickname: str 64 | is_superuser: bool 65 | is_active: bool 66 | 67 | 68 | @router.post("", dependencies=[Depends(superuser_required)]) 69 | async def create_admin(body: CreateAdminBody): 70 | try: 71 | await Admin.create( 72 | email=body.email, 73 | password=get_password_hash(body.password), 74 | nickname=body.nickname, 75 | is_superuser=body.is_superuser, 76 | is_active=body.is_active, 77 | ) 78 | except IntegrityError: 79 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=i18n.t("admin_exists")) 80 | 81 | 82 | class UpdateAdminBody(BaseModel): 83 | email: EmailStr | None 84 | password: str | None 85 | nickname: str | None 86 | is_superuser: bool | None 87 | is_active: bool | None 88 | 89 | 90 | class ChangePasswordBody(BaseModel): 91 | old_password: str 92 | new_password: str 93 | 94 | 95 | @router.patch("/password", status_code=HTTP_204_NO_CONTENT) 96 | async def change_password(body: ChangePasswordBody, admin: Admin = Depends(get_current_admin)): 97 | if not auth.verify_password(body.old_password, admin.password): 98 | raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=i18n.t("password_incorrect")) 99 | admin.password = get_password_hash(body.new_password) 100 | await admin.save(update_fields=["password"]) 101 | 102 | 103 | @router.patch("/{pk}", dependencies=[Depends(superuser_required)]) 104 | async def update_admin(pk: int, body: UpdateAdminBody): 105 | admin = await Admin.get(pk=pk) 106 | if body.email: 107 | admin.email = body.email 108 | if body.password: 109 | admin.password = get_password_hash(body.password) 110 | if body.nickname: 111 | admin.nickname = body.nickname 112 | if body.is_superuser is not None: 113 | admin.is_superuser = body.is_superuser 114 | if body.is_active is not None: 115 | admin.is_active = body.is_active 116 | await admin.save() 117 | -------------------------------------------------------------------------------- /databack/tasks.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import tempfile 3 | from datetime import timedelta 4 | 5 | import aioshutil 6 | from loguru import logger 7 | from rearq import ReArq 8 | from rearq.constants import JOB_TIMEOUT_UNLIMITED 9 | from tortoise import Tortoise, timezone 10 | 11 | from databack.discover import get_data_source, get_storage 12 | from databack.enums import TaskStatus 13 | from databack.models import RestoreLog, Task, TaskLog 14 | from databack.settings import TORTOISE_ORM, settings 15 | from databack.utils import get_file_size 16 | 17 | rearq = ReArq( 18 | redis_url=settings.REDIS_URL, 19 | keep_job_days=7, 20 | job_retry=0, 21 | raise_job_error=True, 22 | expire=60, 23 | ) 24 | 25 | 26 | @rearq.on_startup 27 | async def startup(): 28 | await Tortoise.init(config=TORTOISE_ORM) 29 | await Tortoise.generate_schemas() 30 | 31 | 32 | @rearq.on_shutdown 33 | async def shutdown(): 34 | await Tortoise.close_connections() 35 | 36 | 37 | @rearq.task(job_timeout=JOB_TIMEOUT_UNLIMITED) 38 | async def run_backup(pk: int): 39 | started_at = timezone.now() 40 | task = await Task.get(pk=pk, enabled=True).select_related("data_source", "storage") 41 | task_log = await TaskLog.create( 42 | task=task, 43 | status=TaskStatus.running, 44 | start_at=started_at, 45 | ) 46 | data_source = task.data_source 47 | storage = task.storage 48 | try: 49 | data_source_cls = get_data_source(data_source.type) 50 | data_source_obj = data_source_cls( 51 | compress=task.compress, **data_source.options # type: ignore 52 | ) 53 | storage_cls = get_storage(storage.type) 54 | storage_path = storage.path 55 | sub_path = task.sub_path 56 | storage_obj = storage_cls( 57 | options=storage.options_parsed, path=os.path.join(storage_path, sub_path) 58 | ) 59 | backup = await data_source_obj.get_backup() 60 | task_log.size = await get_file_size(backup) 61 | file = await storage_obj.upload(backup) 62 | await aioshutil.rmtree(os.path.dirname(backup)) 63 | task_log.status = TaskStatus.success 64 | task_log.path = file 65 | task_log.end_at = timezone.now() 66 | except Exception as e: 67 | logger.exception(e) 68 | task_log.status = TaskStatus.failed 69 | task_log.message = str(e) 70 | await task_log.save() 71 | if task_log.status == TaskStatus.success: 72 | qs = TaskLog.filter(task=task, status=TaskStatus.success, is_deleted=False) 73 | total_success = await qs.count() 74 | if 0 < task.keep_num < total_success: 75 | if task.keep_days > 0: 76 | qs = qs.filter(end_at__lte=timezone.now() - timedelta(days=task.keep_days)) 77 | task_logs_to_be_deleted = await qs.order_by("id").limit(total_success - task.keep_num) 78 | for task_log_to_be_deleted in task_logs_to_be_deleted: 79 | await storage_obj.delete(task_log_to_be_deleted.path) 80 | task_log_to_be_deleted.is_deleted = True 81 | await task_log_to_be_deleted.save(update_fields=["is_deleted"]) 82 | return task_log.pk 83 | 84 | 85 | @rearq.task(job_timeout=JOB_TIMEOUT_UNLIMITED) 86 | async def run_restore(pk: int): 87 | restore_log = await RestoreLog.get(pk=pk).select_related("task_log__task__storage") 88 | task_log = restore_log.task_log # type: TaskLog 89 | task = task_log.task 90 | if task_log.is_deleted or task_log.status != TaskStatus.success: 91 | return "TaskLog is deleted or not success" 92 | storage = task_log.task.storage 93 | data_source_cls = get_data_source(restore_log.restore_type) 94 | data_source_obj = data_source_cls(compress=task.compress, **restore_log.options) # type: ignore 95 | storage_cls = get_storage(storage.type) 96 | local_path = tempfile.mkdtemp() 97 | try: 98 | local_file = os.path.join(local_path, os.path.basename(task_log.path)) 99 | storage_obj = storage_cls(options=storage.options_parsed, path=local_file) 100 | await storage_obj.download(task_log.path) 101 | await data_source_obj.restore(local_file) 102 | await aioshutil.rmtree(local_path) 103 | await aioshutil.rmtree(os.path.dirname(local_file)) 104 | restore_log.status = TaskStatus.success 105 | except Exception as e: 106 | logger.exception(e) 107 | restore_log.status = TaskStatus.failed 108 | restore_log.message = str(e) 109 | restore_log.end_at = timezone.now() 110 | await restore_log.save() 111 | -------------------------------------------------------------------------------- /migrations/models/0_20230419142428_init.py: -------------------------------------------------------------------------------- 1 | from tortoise import BaseDBAsyncClient 2 | 3 | 4 | async def upgrade(db: BaseDBAsyncClient) -> str: 5 | return """ 6 | CREATE TABLE IF NOT EXISTS `admin` ( 7 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 8 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 9 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 10 | `nickname` VARCHAR(255) NOT NULL, 11 | `email` VARCHAR(255) NOT NULL UNIQUE, 12 | `last_login_at` DATETIME(6), 13 | `password` VARCHAR(255) NOT NULL, 14 | `is_superuser` BOOL NOT NULL DEFAULT 0, 15 | `is_active` BOOL NOT NULL DEFAULT 1 16 | ) CHARACTER SET utf8mb4; 17 | CREATE TABLE IF NOT EXISTS `actionlog` ( 18 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 19 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 20 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 21 | `ip` VARCHAR(255) NOT NULL, 22 | `content` JSON NOT NULL, 23 | `path` VARCHAR(255) NOT NULL, 24 | `method` VARCHAR(10) NOT NULL, 25 | `admin_id` INT NOT NULL, 26 | CONSTRAINT `fk_actionlo_admin_d6fe934d` FOREIGN KEY (`admin_id`) REFERENCES `admin` (`id`) ON DELETE CASCADE 27 | ) CHARACTER SET utf8mb4; 28 | CREATE TABLE IF NOT EXISTS `datasource` ( 29 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 30 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 31 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 32 | `type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis', 33 | `name` VARCHAR(255) NOT NULL UNIQUE, 34 | `options` JSON NOT NULL 35 | ) CHARACTER SET utf8mb4; 36 | CREATE TABLE IF NOT EXISTS `storage` ( 37 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 38 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 39 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 40 | `type` VARCHAR(5) NOT NULL COMMENT 'local: local\nssh: ssh\ns3: s3', 41 | `name` VARCHAR(255) NOT NULL UNIQUE, 42 | `path` VARCHAR(255) NOT NULL DEFAULT '', 43 | `options` JSON NOT NULL 44 | ) CHARACTER SET utf8mb4; 45 | CREATE TABLE IF NOT EXISTS `task` ( 46 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 47 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 48 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 49 | `name` VARCHAR(255) NOT NULL UNIQUE, 50 | `compress` BOOL NOT NULL DEFAULT 1, 51 | `keep_num` INT NOT NULL DEFAULT 0, 52 | `keep_days` INT NOT NULL DEFAULT 0, 53 | `enabled` BOOL NOT NULL DEFAULT 1, 54 | `sub_path` VARCHAR(255) NOT NULL DEFAULT '', 55 | `cron` VARCHAR(255) NOT NULL, 56 | `next_run_at` DATETIME(6), 57 | `data_source_id` INT NOT NULL, 58 | `storage_id` INT NOT NULL, 59 | CONSTRAINT `fk_task_datasour_a6c05e58` FOREIGN KEY (`data_source_id`) REFERENCES `datasource` (`id`) ON DELETE CASCADE, 60 | CONSTRAINT `fk_task_storage_da90270e` FOREIGN KEY (`storage_id`) REFERENCES `storage` (`id`) ON DELETE CASCADE 61 | ) CHARACTER SET utf8mb4; 62 | CREATE TABLE IF NOT EXISTS `tasklog` ( 63 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 64 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 65 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 66 | `status` VARCHAR(7) NOT NULL COMMENT 'success: success\nfailed: failed\nrunning: running', 67 | `path` VARCHAR(255), 68 | `size` INT, 69 | `message` LONGTEXT, 70 | `is_deleted` BOOL NOT NULL DEFAULT 0, 71 | `start_at` DATETIME(6) NOT NULL, 72 | `end_at` DATETIME(6), 73 | `task_id` INT NOT NULL, 74 | CONSTRAINT `fk_tasklog_task_ed604b07` FOREIGN KEY (`task_id`) REFERENCES `task` (`id`) ON DELETE CASCADE 75 | ) CHARACTER SET utf8mb4; 76 | CREATE TABLE IF NOT EXISTS `restorelog` ( 77 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 78 | `created_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), 79 | `updated_at` DATETIME(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), 80 | `message` LONGTEXT, 81 | `restore_type` VARCHAR(8) NOT NULL COMMENT 'mysql: mysql\npostgres: postgres\nlocal: local\nssh: ssh\nmongo: mongo\nredis: redis', 82 | `options` JSON NOT NULL, 83 | `status` VARCHAR(7) NOT NULL COMMENT 'success: success\nfailed: failed\nrunning: running' DEFAULT 'running', 84 | `start_at` DATETIME(6) NOT NULL, 85 | `end_at` DATETIME(6), 86 | `task_log_id` INT NOT NULL, 87 | CONSTRAINT `fk_restorel_tasklog_452898fe` FOREIGN KEY (`task_log_id`) REFERENCES `tasklog` (`id`) ON DELETE CASCADE 88 | ) CHARACTER SET utf8mb4; 89 | CREATE TABLE IF NOT EXISTS `aerich` ( 90 | `id` INT NOT NULL PRIMARY KEY AUTO_INCREMENT, 91 | `version` VARCHAR(255) NOT NULL, 92 | `app` VARCHAR(100) NOT NULL, 93 | `content` JSON NOT NULL 94 | ) CHARACTER SET utf8mb4;""" 95 | 96 | 97 | async def downgrade(db: BaseDBAsyncClient) -> str: 98 | return """ 99 | """ 100 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------