├── agents ├── __init__.py ├── test │ ├── __init__.py │ ├── finance_agent.py │ ├── research_agent.py │ └── basic.py ├── settings.py ├── calculator.py ├── finance.py ├── youtube.py ├── web_search.py └── research.py ├── api ├── __init__.py ├── routes │ ├── __init__.py │ ├── v1_router.py │ ├── health.py │ └── playground.py ├── main.py └── settings.py ├── db ├── __init__.py ├── migrations │ ├── versions │ │ └── .gitkeep │ ├── README │ ├── script.py.mako │ └── env.py ├── tables │ ├── __init__.py │ └── base.py ├── session.py ├── settings.py ├── README.md └── alembic.ini ├── tests ├── __init__.py └── evals │ └── test_calculator.py ├── utils ├── __init__.py ├── dttm.py └── log.py ├── workspace ├── __init__.py ├── .gitignore ├── example_secrets │ ├── prd_api_secrets.yml │ ├── prd_db_secrets.yml │ └── dev_api_secrets.yml ├── settings.py ├── dev_resources.py └── prd_resources.py ├── example.env ├── scripts ├── auth_ecr.sh ├── test.sh ├── _utils.sh ├── format.sh ├── build_prd_image.sh ├── build_dev_image.sh ├── validate.sh ├── install.sh ├── generate_requirements.sh └── entrypoint.sh ├── .editorconfig ├── .dockerignore ├── .gitignore ├── Dockerfile ├── .github └── workflows │ ├── docker-images.yml │ ├── validate.yml │ └── ecr-images.yml ├── pyproject.toml ├── README.md ├── requirements.txt └── LICENSE /agents/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /agents/test/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /api/routes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /workspace/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /db/migrations/versions/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /db/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /db/tables/__init__.py: -------------------------------------------------------------------------------- 1 | from db.tables.base import Base 2 | -------------------------------------------------------------------------------- /workspace/.gitignore: -------------------------------------------------------------------------------- 1 | # ignore inputs 2 | inputs 3 | 4 | # ignore outputs 5 | output 6 | 7 | # ignore secrets 8 | secrets 9 | -------------------------------------------------------------------------------- /workspace/example_secrets/prd_api_secrets.yml: -------------------------------------------------------------------------------- 1 | SECRET_KEY: "very_secret" 2 | # PHI_API_KEY: "sk-***" 3 | # OPENAI_API_KEY: "sk-***" 4 | -------------------------------------------------------------------------------- /workspace/example_secrets/prd_db_secrets.yml: -------------------------------------------------------------------------------- 1 | # Secrets used by RDS Database 2 | MASTER_USERNAME: api 3 | MASTER_USER_PASSWORD: "api9999!!" 4 | -------------------------------------------------------------------------------- /example.env: -------------------------------------------------------------------------------- 1 | # IMAGE_REPO=repo 2 | # BUILD_IMAGES=True 3 | # PUSH_IMAGES=True 4 | # AWS_PROFILE=ai-demos 5 | # PHI_API_KEY=sk-*** 6 | # OPENAI_API_KEY=sk-*** 7 | -------------------------------------------------------------------------------- /agents/test/finance_agent.py: -------------------------------------------------------------------------------- 1 | from agents.finance import get_finance_agent 2 | 3 | finance_agent = get_finance_agent() 4 | 5 | finance_agent.print_response("Tell me NVDA's stock price.", stream=True) 6 | -------------------------------------------------------------------------------- /agents/test/research_agent.py: -------------------------------------------------------------------------------- 1 | from agents.research import get_research_agent 2 | 3 | research_agent = get_research_agent() 4 | 5 | research_agent.print_response("Tell me about simulation theory", stream=True) 6 | -------------------------------------------------------------------------------- /scripts/auth_ecr.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # Authenticate with ecr 6 | aws ecr get-login-password --region [AWS_REGION] | docker login --username AWS --password-stdin [AWS_ACCOUNT_ID].dkr.ecr.[AWS_REGION].amazonaws.com 7 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_size = 2 5 | indent_style = space 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.py] 12 | indent_size = 4 13 | -------------------------------------------------------------------------------- /workspace/example_secrets/dev_api_secrets.yml: -------------------------------------------------------------------------------- 1 | SECRET_KEY: "very_secret" 2 | # PHI_API_KEY: "sk-***" 3 | # OPENAI_API_KEY: "sk-***" 4 | 5 | # AWS credentials 6 | # AWS_ACCESS_KEY_ID: "AWS_ACCESS_KEY_ID" 7 | # AWS_SECRET_ACCESS_KEY: "AWS_SECRET_ACCESS_KEY" 8 | -------------------------------------------------------------------------------- /agents/test/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | agent = Agent(model=OpenAIChat(id="gpt-4o"), markdown=True) 5 | 6 | # Print the response in the terminal 7 | agent.print_response("Share a 2 sentence horror story") 8 | -------------------------------------------------------------------------------- /utils/dttm.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | 4 | def current_utc() -> datetime: 5 | return datetime.now(timezone.utc) 6 | 7 | 8 | def current_utc_str(format: str = "%Y-%m-%dT%H:%M:%S.%fZ") -> str: 9 | return current_utc().strftime(format) 10 | -------------------------------------------------------------------------------- /api/routes/v1_router.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from api.routes.playground import playground_router 4 | from api.routes.health import health_check_router 5 | 6 | v1_router = APIRouter(prefix="/v1") 7 | v1_router.include_router(playground_router) 8 | v1_router.include_router(health_check_router) 9 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | docs 4 | .git 5 | 6 | # Cache 7 | .mypy_cache 8 | .ruff_cache 9 | .pytest_cache 10 | *__pycache__* 11 | *.egg-info 12 | *.pyc 13 | 14 | # Machine specific 15 | .idea 16 | .vscode 17 | 18 | # Ignore .env files 19 | .env 20 | .envrc 21 | 22 | # ignore virtualenvs 23 | .venv 24 | venv* 25 | aienv* 26 | 27 | .ipynb_checkpoints 28 | -------------------------------------------------------------------------------- /db/tables/base.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import MetaData 2 | from sqlalchemy.orm import DeclarativeBase 3 | 4 | 5 | class Base(DeclarativeBase): 6 | """ 7 | Base class for SQLAlchemy model definitions. 8 | 9 | https://fastapi.tiangolo.com/tutorial/sql-databases/#create-a-base-class 10 | https://docs.sqlalchemy.org/en/20/orm/mapping_api.html#sqlalchemy.orm.DeclarativeBase 11 | """ 12 | 13 | metadata = MetaData(schema="public") 14 | -------------------------------------------------------------------------------- /scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Test workspace using pytest: 5 | # Usage: ./scripts/test.sh 6 | ############################################################################ 7 | 8 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | REPO_ROOT="$(dirname $CURR_DIR)" 10 | source ${CURR_DIR}/_utils.sh 11 | 12 | print_heading "Testing workspace..." 13 | pytest ${REPO_ROOT} 14 | -------------------------------------------------------------------------------- /scripts/_utils.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Helper functions to import in other scripts 5 | ############################################################################ 6 | 7 | print_horizontal_line() { 8 | echo "------------------------------------------------------------" 9 | } 10 | 11 | print_heading() { 12 | print_horizontal_line 13 | echo "-*- $1" 14 | print_horizontal_line 15 | } 16 | 17 | print_status() { 18 | echo "-*- $1" 19 | } 20 | -------------------------------------------------------------------------------- /scripts/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Format workspace using ruff 5 | # Usage: ./scripts/format.sh 6 | ############################################################################ 7 | 8 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | REPO_ROOT="$(dirname $CURR_DIR)" 10 | source ${CURR_DIR}/_utils.sh 11 | 12 | print_heading "Formatting workspace..." 13 | print_heading "Running: ruff format ${REPO_ROOT}" 14 | ruff format ${REPO_ROOT} 15 | -------------------------------------------------------------------------------- /agents/settings.py: -------------------------------------------------------------------------------- 1 | from pydantic_settings import BaseSettings 2 | 3 | 4 | class AgentSettings(BaseSettings): 5 | """Agent settings that can be set using environment variables. 6 | 7 | Reference: https://pydantic-docs.helpmanual.io/usage/settings/ 8 | """ 9 | 10 | gpt_4: str = "gpt-4o" 11 | embedding_model: str = "text-embedding-3-small" 12 | default_max_completion_tokens: int = 16000 13 | default_temperature: float = 0 14 | 15 | 16 | # Create an AgentSettings object 17 | agent_settings = AgentSettings() 18 | -------------------------------------------------------------------------------- /scripts/build_prd_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 6 | WS_ROOT="$(dirname ${CURR_DIR})" 7 | DOCKERFILE="Dockerfile" 8 | REPO="repo" 9 | NAME="demo-agents" 10 | TAG="prd" 11 | 12 | # Run docker buildx create --use before running this script 13 | echo "Running: docker buildx build --platform=linux/amd64,linux/arm64 -t $REPO/$NAME:$TAG -f $DOCKERFILE $WS_ROOT --push" 14 | docker buildx build --platform=linux/amd64,linux/arm64 -t $REPO/$NAME:$TAG -f $DOCKERFILE $WS_ROOT --push 15 | -------------------------------------------------------------------------------- /scripts/build_dev_image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 6 | WS_ROOT="$(dirname ${CURR_DIR})" 7 | DOCKERFILE="Dockerfile" 8 | REPO="phidata" 9 | NAME="demo-agents" 10 | TAG="dev" 11 | 12 | # Run docker buildx create --use before running this script 13 | echo "Running: docker buildx build --platform=linux/amd64,linux/arm64 -t $REPO/$NAME:$TAG -f $DOCKERFILE $WS_ROOT --push" 14 | docker buildx build --platform=linux/amd64,linux/arm64 -t $REPO/$NAME:$TAG -f $DOCKERFILE $WS_ROOT --push 15 | -------------------------------------------------------------------------------- /tests/evals/test_calculator.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from phi.eval import Eval, EvalResult 4 | 5 | from agents.calculator import get_calculator_agent 6 | 7 | 8 | def test_9_11_bigger_or_9_9(): 9 | evaluation = Eval( 10 | agent=get_calculator_agent(), 11 | question="Is 9.11 bigger or 9.9?", 12 | expected_answer="9.11 is smaller than 9.9", 13 | ) 14 | result: Optional[EvalResult] = evaluation.print_result() 15 | 16 | assert result is not None and result.accuracy_score >= 8 17 | 18 | 19 | test_9_11_bigger_or_9_9() 20 | -------------------------------------------------------------------------------- /api/routes/health.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from utils.dttm import current_utc_str 4 | 5 | ###################################################### 6 | ## Router for health checks 7 | ###################################################### 8 | 9 | health_check_router = APIRouter(tags=["Health"]) 10 | 11 | 12 | @health_check_router.get("/health") 13 | def get_health(): 14 | """Check the health of the Api""" 15 | 16 | return { 17 | "status": "success", 18 | "router": "health", 19 | "path": "/health", 20 | "utc": current_utc_str(), 21 | } 22 | -------------------------------------------------------------------------------- /db/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /scripts/validate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Validate workspace using ruff and mypy: 5 | # 1. Lint using ruff 6 | # 2. Type check using mypy 7 | # Usage: ./scripts/validate.sh 8 | ############################################################################ 9 | 10 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 11 | REPO_ROOT="$(dirname $CURR_DIR)" 12 | source ${CURR_DIR}/_utils.sh 13 | 14 | print_heading "Validating workspace..." 15 | print_heading "Running: ruff check ${REPO_ROOT}" 16 | ruff check ${REPO_ROOT} 17 | print_heading "Running: mypy ${REPO_ROOT}" 18 | mypy ${REPO_ROOT} 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | .DS_Store 4 | 5 | # Python cache 6 | .mypy_cache 7 | *__pycache__* 8 | *.egg-info 9 | *.pyc 10 | *.pytest_cache 11 | *.ruff_cache 12 | *.cache* 13 | *.config* 14 | 15 | # Machine specific 16 | .idea 17 | .vscode 18 | 19 | # Ignore .env files 20 | .env 21 | .envrc 22 | 23 | # ignore storage dir 24 | storage 25 | 26 | # ignore .local dir 27 | .local 28 | 29 | # ignore dist dir 30 | dist 31 | 32 | # ignore virtualenvs 33 | .venv 34 | venv* 35 | aienv* 36 | apienv* 37 | appenv* 38 | llmenv* 39 | 40 | # ignore jupyter checkpoints 41 | .ipynb_checkpoints 42 | .Trash* 43 | 44 | # ignore uv.lock 45 | uv.lock 46 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM phidata/python:3.12 2 | 3 | ARG USER=app 4 | ARG APP_DIR=/app 5 | ENV APP_DIR=${APP_DIR} 6 | 7 | # Create user and home directory 8 | RUN groupadd -g 61000 ${USER} \ 9 | && useradd -g 61000 -u 61000 -ms /bin/bash -d ${APP_DIR} ${USER} 10 | 11 | WORKDIR ${APP_DIR} 12 | 13 | # Copy requirements.txt 14 | COPY requirements.txt ./ 15 | 16 | # Install requirements 17 | RUN --mount=type=cache,target=/root/.cache/uv \ 18 | uv pip sync requirements.txt --system 19 | 20 | # Copy project files 21 | COPY . . 22 | 23 | # Set permissions for the /app directory 24 | RUN chown -R ${USER}:${USER} ${APP_DIR} 25 | 26 | # Switch to non-root user 27 | USER ${USER} 28 | 29 | ENTRYPOINT ["/app/scripts/entrypoint.sh"] 30 | CMD ["chill"] 31 | -------------------------------------------------------------------------------- /db/session.py: -------------------------------------------------------------------------------- 1 | from typing import Generator 2 | 3 | from sqlalchemy.engine import Engine, create_engine 4 | from sqlalchemy.orm import Session, sessionmaker 5 | 6 | from db.settings import db_settings 7 | 8 | # Create SQLAlchemy Engine using a database URL 9 | db_url: str = db_settings.get_db_url() 10 | db_engine: Engine = create_engine(db_url, pool_pre_ping=True) 11 | 12 | # Create a SessionLocal class 13 | # https://fastapi.tiangolo.com/tutorial/sql-databases/#create-a-sessionlocal-class 14 | SessionLocal: sessionmaker[Session] = sessionmaker(autocommit=False, autoflush=False, bind=db_engine) 15 | 16 | 17 | def get_db() -> Generator[Session, None, None]: 18 | """ 19 | Dependency to get a database session. 20 | 21 | Yields: 22 | Session: An SQLAlchemy database session. 23 | """ 24 | db: Session = SessionLocal() 25 | try: 26 | yield db 27 | finally: 28 | db.close() 29 | -------------------------------------------------------------------------------- /api/routes/playground.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from phi.playground import Playground 3 | 4 | from agents.finance import get_finance_agent 5 | from agents.research import get_research_agent 6 | from agents.web_search import get_web_search_agent 7 | from agents.youtube import get_youtube_agent 8 | 9 | ###################################################### 10 | ## Router for the agent playground 11 | ###################################################### 12 | 13 | finance_agent = get_finance_agent(debug_mode=True) 14 | research_agent = get_research_agent(debug_mode=True) 15 | web_search_agent = get_web_search_agent(debug_mode=True) 16 | youtube_agent = get_youtube_agent(debug_mode=True) 17 | 18 | # Create a playground instance 19 | playground = Playground(agents=[web_search_agent, research_agent, finance_agent, youtube_agent]) 20 | # Log the playground endpoint with phidata.app 21 | if getenv("RUNTIME_ENV") == "dev": 22 | playground.create_endpoint("http://localhost:8000") 23 | 24 | playground_router = playground.get_router() 25 | -------------------------------------------------------------------------------- /.github/workflows/docker-images.yml: -------------------------------------------------------------------------------- 1 | name: Build Docker Images 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | build-api-image: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v4 17 | 18 | - name: Set up QEMU 19 | uses: docker/setup-qemu-action@v3 20 | 21 | - name: Set up Docker Buildx 22 | uses: docker/setup-buildx-action@v3 23 | 24 | - name: Docker Login 25 | uses: docker/login-action@v3 26 | with: 27 | username: ${{ secrets.DOCKERHUB_USERNAME }} 28 | password: ${{ secrets.DOCKERHUB_TOKEN }} 29 | 30 | - name: Build and push 31 | uses: docker/build-push-action@v5 32 | with: 33 | context: . 34 | file: Dockerfile 35 | platforms: linux/amd64,linux/arm64 36 | push: true 37 | tags: ${{ vars.DOCKERHUB_NAMESPACE }}/demo-agents:dev, ${{ vars.DOCKERHUB_NAMESPACE }}/demo-agents:prd 38 | -------------------------------------------------------------------------------- /scripts/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Create a venv and install workspace dependencies. 5 | # Usage: ./scripts/install.sh 6 | ############################################################################ 7 | 8 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 9 | REPO_ROOT="$(dirname $CURR_DIR)" 10 | VENV_DIR="${REPO_ROOT}/.venv" 11 | source ${CURR_DIR}/_utils.sh 12 | 13 | print_heading "Installing workspace..." 14 | 15 | print_heading "Creating virtual env" 16 | print_status "VIRTUAL_ENV=${VENV_DIR} uv venv" 17 | VIRTUAL_ENV=${VENV_DIR} uv venv 18 | 19 | print_heading "Installing requirements" 20 | print_status "VIRTUAL_ENV=${VENV_DIR} uv pip sync ${REPO_ROOT}/requirements.txt" 21 | VIRTUAL_ENV=${VENV_DIR} uv pip sync ${REPO_ROOT}/requirements.txt 22 | 23 | print_heading "Installing workspace in editable mode" 24 | VIRTUAL_ENV=${VENV_DIR} uv pip install -e ${REPO_ROOT} 25 | 26 | print_heading "Workspace installed" 27 | print_heading "Activate venv using: source .venv/bin/activate" 28 | 29 | -------------------------------------------------------------------------------- /agents/calculator.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.calculator import Calculator 6 | 7 | from agents.settings import agent_settings 8 | 9 | 10 | def get_calculator_agent( 11 | user_id: Optional[str] = None, 12 | session_id: Optional[str] = None, 13 | debug_mode: bool = False, 14 | ) -> Agent: 15 | return Agent( 16 | name="Calculator Agent", 17 | agent_id="calculator-agent", 18 | session_id=session_id, 19 | user_id=user_id, 20 | model=OpenAIChat( 21 | model=agent_settings.gpt_4, 22 | max_tokens=agent_settings.default_max_completion_tokens, 23 | temperature=agent_settings.default_temperature, 24 | ), 25 | instructions=["Use the calculator tool for comparisons."], 26 | tools=[Calculator(enable_all=True)], 27 | markdown=True, 28 | show_tool_calls=True, 29 | # Enable monitoring on phidata.app 30 | monitoring=True, 31 | debug_mode=debug_mode, 32 | ) 33 | -------------------------------------------------------------------------------- /api/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | from starlette.middleware.cors import CORSMiddleware 3 | 4 | from api.settings import api_settings 5 | from api.routes.v1_router import v1_router 6 | 7 | 8 | def create_app() -> FastAPI: 9 | """Create a FastAPI App 10 | 11 | Returns: 12 | FastAPI: FastAPI App 13 | """ 14 | 15 | # Create FastAPI App 16 | app: FastAPI = FastAPI( 17 | title=api_settings.title, 18 | version=api_settings.version, 19 | docs_url="/docs" if api_settings.docs_enabled else None, 20 | redoc_url="/redoc" if api_settings.docs_enabled else None, 21 | openapi_url="/openapi.json" if api_settings.docs_enabled else None, 22 | ) 23 | 24 | # Add v1 router 25 | app.include_router(v1_router) 26 | 27 | # Add Middlewares 28 | app.add_middleware( 29 | CORSMiddleware, 30 | allow_origins=api_settings.cors_origin_list, 31 | allow_credentials=True, 32 | allow_methods=["*"], 33 | allow_headers=["*"], 34 | ) 35 | 36 | return app 37 | 38 | 39 | # Create FastAPI app 40 | app = create_app() 41 | -------------------------------------------------------------------------------- /utils/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Optional 3 | 4 | 5 | def build_logger( 6 | logger_name: str, 7 | log_level: int = logging.INFO, 8 | show_time: bool = False, 9 | rich_tracebacks: bool = False, 10 | tracebacks_show_locals: bool = False, 11 | ) -> logging.Logger: 12 | from rich.logging import RichHandler 13 | 14 | rich_handler = RichHandler( 15 | show_time=show_time, rich_tracebacks=rich_tracebacks, tracebacks_show_locals=tracebacks_show_locals 16 | ) 17 | rich_handler.setFormatter( 18 | logging.Formatter( 19 | fmt="%(message)s", 20 | datefmt="[%X]", 21 | ) 22 | ) 23 | 24 | _logger = logging.getLogger(logger_name) 25 | _logger.addHandler(rich_handler) 26 | _logger.setLevel(log_level) 27 | _logger.propagate = False 28 | return _logger 29 | 30 | 31 | # Default logger instance 32 | logger: logging.Logger = build_logger("demo-agents") 33 | 34 | 35 | # Function to get or create a logger 36 | def get_logger(name: Optional[str] = None) -> logging.Logger: 37 | return logger if name is None else build_logger(name) 38 | -------------------------------------------------------------------------------- /.github/workflows/validate.yml: -------------------------------------------------------------------------------- 1 | name: Validate 2 | 3 | on: 4 | push: 5 | pull_request: 6 | types: 7 | - opened 8 | - edited 9 | - reopened 10 | branches: 11 | - "main" 12 | 13 | env: 14 | UV_SYSTEM_PYTHON: 1 15 | 16 | jobs: 17 | validate: 18 | runs-on: ubuntu-latest 19 | strategy: 20 | matrix: 21 | python-version: ["3.11"] 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | 26 | - name: Install uv 27 | uses: astral-sh/setup-uv@v3 28 | with: 29 | enable-cache: true 30 | cache-dependency-glob: "requirements**.txt" 31 | 32 | - name: Set up Python ${{ matrix.python-version }} 33 | uses: actions/setup-python@v5 34 | with: 35 | python-version: ${{ matrix.python-version }} 36 | 37 | - name: Install the project 38 | run: uv pip sync requirements.txt 39 | 40 | - name: Format with ruff 41 | run: uv run ruff format . 42 | 43 | - name: Lint with ruff 44 | run: uv run ruff check . 45 | 46 | - name: Type-check with mypy 47 | run: uv run mypy . 48 | 49 | # - name: Run tests 50 | # run: uv run pytest tests 51 | -------------------------------------------------------------------------------- /scripts/generate_requirements.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Generate requirements.txt from pyproject.toml 5 | # Usage: 6 | # ./scripts/generate_requirements.sh : Generate requirements.txt 7 | # ./scripts/generate_requirements.sh upgrade : Upgrade requirements.txt 8 | ############################################################################ 9 | 10 | CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 11 | REPO_ROOT="$(dirname $CURR_DIR)" 12 | source ${CURR_DIR}/_utils.sh 13 | 14 | print_heading "Generating requirements.txt..." 15 | 16 | if [[ "$#" -eq 1 ]] && [[ "$1" = "upgrade" ]]; 17 | then 18 | print_heading "Generating requirements.txt with upgrade" 19 | UV_CUSTOM_COMPILE_COMMAND="./scripts/generate_requirements.sh upgrade" \ 20 | uv pip compile ${REPO_ROOT}/pyproject.toml --no-cache --upgrade -o ${REPO_ROOT}/requirements.txt 21 | else 22 | print_heading "Generating requirements.txt" 23 | uv pip compile pyproject.toml -o requirements.txt 24 | UV_CUSTOM_COMPILE_COMMAND="./scripts/generate_requirements.sh" \ 25 | uv pip compile ${REPO_ROOT}/pyproject.toml --no-cache -o ${REPO_ROOT}/requirements.txt 26 | fi 27 | -------------------------------------------------------------------------------- /workspace/settings.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from phi.workspace.settings import WorkspaceSettings 4 | 5 | # 6 | # -*- Define workspace settings using a WorkspaceSettings object 7 | # these values can also be set using environment variables or a .env file 8 | # 9 | ws_settings = WorkspaceSettings( 10 | # Workspace name: used for naming cloud resources 11 | ws_name="demo-agents", 12 | # Path to the workspace root 13 | ws_root=Path(__file__).parent.parent.resolve(), 14 | # -*- Development env settings 15 | dev_env="dev", 16 | # -*- Development Apps 17 | dev_api_enabled=True, 18 | dev_db_enabled=True, 19 | # -*- Production env settings 20 | prd_env="prd", 21 | # -*- Production Apps 22 | prd_api_enabled=True, 23 | prd_db_enabled=True, 24 | # -*- AWS settings 25 | # Region for AWS resources 26 | aws_region="us-east-1", 27 | # Availability Zones for AWS resources 28 | aws_az1="us-east-1a", 29 | aws_az2="us-east-1b", 30 | # Subnet IDs in the aws_region 31 | subnet_ids=["subnet-067b9140c018160f6", "subnet-0f3ba3094301af603"], 32 | # -*- Image Settings 33 | # Name of the image 34 | image_name="demo-agents", 35 | # Repository for the image 36 | image_repo="phidata", 37 | # Build images locally 38 | build_images=True, 39 | ) 40 | -------------------------------------------------------------------------------- /agents/finance.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.yfinance import YFinanceTools 6 | 7 | from agents.settings import agent_settings 8 | from phi.storage.agent.postgres import PgAgentStorage 9 | 10 | from db.session import db_url 11 | 12 | finance_agent_storage = PgAgentStorage(table_name="finance_agent", db_url=db_url) 13 | 14 | 15 | def get_finance_agent( 16 | user_id: Optional[str] = None, 17 | session_id: Optional[str] = None, 18 | debug_mode: bool = False, 19 | ) -> Agent: 20 | return Agent( 21 | name="Finance Agent", 22 | role="Analyze financial data", 23 | agent_id="finance-agent", 24 | session_id=session_id, 25 | user_id=user_id, 26 | model=OpenAIChat( 27 | model=agent_settings.gpt_4, 28 | max_tokens=agent_settings.default_max_completion_tokens, 29 | temperature=agent_settings.default_temperature, 30 | ), 31 | tools=[YFinanceTools(enable_all=True)], 32 | description="You are a financial agent with the special skill of analyzing complex financial information.", 33 | instructions=[ 34 | "Always use tables to display data", 35 | "Keep your answers concise and engaging.", 36 | ], 37 | storage=finance_agent_storage, 38 | add_history_to_messages=True, 39 | num_history_responses=5, 40 | add_datetime_to_instructions=True, 41 | markdown=True, 42 | debug_mode=debug_mode, 43 | ) 44 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "demo-agents" 3 | version = "0.1.0" 4 | requires-python = ">=3.9" 5 | readme = "README.md" 6 | authors = [{ name = "Phidata Team", email = "hello@phidata.com" }] 7 | 8 | dependencies = [ 9 | "alembic", 10 | "beautifulsoup4", 11 | "duckdb", 12 | "duckduckgo-search", 13 | "exa_py", 14 | "fastapi[standard]", 15 | "google-search-results", 16 | "mypy", 17 | "nest_asyncio", 18 | "openai", 19 | "pgvector", 20 | "phidata[aws]==2.7.7", 21 | "pillow", 22 | "psycopg[binary]", 23 | "pypdf", 24 | "pytest", 25 | "python-docx", 26 | "ruff", 27 | "sqlalchemy", 28 | "tiktoken", 29 | "typer", 30 | "types-beautifulsoup4", 31 | "types-Pillow", 32 | "yfinance", 33 | "youtube_transcript_api" 34 | ] 35 | 36 | [build-system] 37 | requires = ["setuptools"] 38 | build-backend = "setuptools.build_meta" 39 | 40 | [tool.setuptools.packages.find] 41 | 42 | # Change this value to use a different directory for the phidata workspace. 43 | # [tool.phidata] 44 | # workspace = "workspace" 45 | 46 | [tool.ruff] 47 | line-length = 110 48 | exclude = ["aienv*", ".venv*"] 49 | [tool.ruff.lint.per-file-ignores] 50 | # Ignore `F401` (import violations) in all `__init__.py` files 51 | "__init__.py" = ["F401"] 52 | 53 | [tool.mypy] 54 | check_untyped_defs = true 55 | no_implicit_optional = true 56 | warn_unused_configs = true 57 | plugins = ["pydantic.mypy"] 58 | exclude = ["aienv*", ".venv*"] 59 | 60 | [[tool.mypy.overrides]] 61 | module = ["pgvector.*", "setuptools.*"] 62 | ignore_missing_imports = true 63 | 64 | [tool.uv.pip] 65 | no-annotate = true 66 | 67 | [tool.pytest.ini_options] 68 | log_cli = true 69 | -------------------------------------------------------------------------------- /db/settings.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Optional 3 | 4 | from pydantic_settings import BaseSettings 5 | 6 | from utils.log import logger 7 | 8 | 9 | class DbSettings(BaseSettings): 10 | """Database settings that can be set using environment variables. 11 | 12 | Reference: https://docs.pydantic.dev/latest/usage/pydantic_settings/ 13 | """ 14 | 15 | # Database configuration 16 | db_host: Optional[str] = None 17 | db_port: Optional[int] = None 18 | db_user: Optional[str] = None 19 | db_pass: Optional[str] = None 20 | db_database: Optional[str] = None 21 | db_driver: str = "postgresql+psycopg" 22 | # Create/Upgrade database on startup using alembic 23 | migrate_db: bool = False 24 | 25 | def get_db_url(self) -> str: 26 | db_url = "{}://{}{}@{}:{}/{}".format( 27 | self.db_driver, 28 | self.db_user, 29 | f":{self.db_pass}" if self.db_pass else "", 30 | self.db_host, 31 | self.db_port, 32 | self.db_database, 33 | ) 34 | # Use local database if RUNTIME_ENV is not set 35 | if "None" in db_url and getenv("RUNTIME_ENV") is None: 36 | from workspace.dev_resources import dev_db 37 | 38 | logger.debug("Using local connection") 39 | local_db_url = dev_db.get_db_connection_local() 40 | if local_db_url: 41 | db_url = local_db_url 42 | 43 | # Validate database connection 44 | if "None" in db_url or db_url is None: 45 | raise ValueError("Could not build database connection") 46 | return db_url 47 | 48 | 49 | # Create DbSettings object 50 | db_settings = DbSettings() 51 | -------------------------------------------------------------------------------- /.github/workflows/ecr-images.yml: -------------------------------------------------------------------------------- 1 | name: Build ECR Images 2 | 3 | on: workflow_dispatch 4 | 5 | permissions: 6 | # For AWS OIDC Token access as per https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-amazon-web-services#updating-your-github-actions-workflow 7 | id-token: write # This is required for requesting the JWT 8 | contents: read # This is required for actions/checkout 9 | 10 | env: 11 | ECR_REPO: YOUR ECR REPO 12 | # Create role using https://aws.amazon.com/blogs/security/use-iam-roles-to-connect-github-actions-to-actions-in-aws/ 13 | AWS_ROLE: YOUR_ROLE_ARN 14 | AWS_REGION: us-east-1 15 | 16 | jobs: 17 | build-api-image: 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v3 23 | - name: Set up QEMU 24 | uses: docker/setup-qemu-action@v3 25 | - name: Set up Docker Buildx 26 | uses: docker/setup-buildx-action@v3 27 | # https://github.com/marketplace/actions/configure-aws-credentials-action-for-github-actions 28 | - name: Configure AWS credentials 29 | uses: aws-actions/configure-aws-credentials@v4 30 | with: 31 | role-to-assume: ${{ env.AWS_ROLE }} 32 | aws-region: ${{ env.AWS_REGION }} 33 | # https://github.com/marketplace/actions/amazon-ecr-login-action-for-github-actions 34 | - name: ECR Login 35 | id: login-ecr 36 | uses: aws-actions/amazon-ecr-login@v2 37 | - name: Build, tag, and push docker image to Amazon ECR 38 | uses: docker/build-push-action@v5 39 | with: 40 | context: . 41 | file: Dockerfile 42 | platforms: linux/amd64,linux/arm64 43 | push: true 44 | tags: ${{ env.ECR_REPO }}/demo-agents:dev, ${{ env.ECR_REPO }}/demo-agents:prd 45 | -------------------------------------------------------------------------------- /agents/youtube.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.youtube_tools import YouTubeTools 6 | 7 | from agents.settings import agent_settings 8 | from phi.storage.agent.postgres import PgAgentStorage 9 | 10 | from db.session import db_url 11 | 12 | youtube_agent_storage = PgAgentStorage(table_name="finance_agent", db_url=db_url) 13 | 14 | 15 | def get_youtube_agent( 16 | user_id: Optional[str] = None, 17 | session_id: Optional[str] = None, 18 | debug_mode: bool = False, 19 | ) -> Agent: 20 | return Agent( 21 | name="YouTube Agent", 22 | agent_id="youtube-agent", 23 | session_id=session_id, 24 | user_id=user_id, 25 | model=OpenAIChat( 26 | model=agent_settings.gpt_4, 27 | max_tokens=agent_settings.default_max_completion_tokens, 28 | temperature=agent_settings.default_temperature, 29 | ), 30 | tools=[YouTubeTools()], 31 | description="You are a YouTube agent that has the special skill of understanding YouTube videos and answering questions about them.", 32 | instructions=[ 33 | "When the user asks about a video, confirm that they have provided a valid YouTube URL. If not, ask them for it.", 34 | "Using a video URL, get the video data using the `get_youtube_video_data` tool and captions using the `get_youtube_video_data` tool.", 35 | "Using the data and captions, answer the user's question in an engaging and thoughtful manner. Focus on the most important details.", 36 | "If you cannot find the answer in the video, say so and ask the user to provide more details.", 37 | "Keep your answers concise and engaging.", 38 | ], 39 | markdown=True, 40 | add_history_to_messages=True, 41 | num_history_responses=5, 42 | show_tool_calls=True, 43 | add_datetime_to_instructions=True, 44 | storage=youtube_agent_storage, 45 | # Enable monitoring on phidata.app 46 | monitoring=True, 47 | debug_mode=debug_mode, 48 | ) 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Demo Agents 2 | 3 | This repo contains the code for running phidata demo-agents in 2 environments: 4 | 5 | 1. **dev**: A development environment running locally on docker 6 | 2. **prd**: A production environment running on AWS ECS 7 | 8 | ## Setup Workspace 9 | 10 | 1. Clone the git repo 11 | 12 | > from the `demo-agents` dir: 13 | 14 | 2. Install workspace and activate the virtual env: 15 | 16 | ```sh 17 | ./scripts/install.sh 18 | source .venv/bin/activate 19 | ``` 20 | 21 | 3. Setup workspace: 22 | 23 | ```sh 24 | phi ws setup 25 | ``` 26 | 27 | 4. Copy `workspace/example_secrets` to `workspace/secrets`: 28 | 29 | ```sh 30 | cp -r workspace/example_secrets workspace/secrets 31 | ``` 32 | 33 | 5. Optional: Create `.env` file: 34 | 35 | ```sh 36 | cp example.env .env 37 | ``` 38 | 39 | ## Run Demo Agents locally 40 | 41 | 1. Install [docker desktop](https://www.docker.com/products/docker-desktop) 42 | 43 | 2. Set OpenAI Key 44 | 45 | Set the `OPENAI_API_KEY` environment variable using 46 | 47 | ```sh 48 | export OPENAI_API_KEY=sk-*** 49 | ``` 50 | 51 | **OR** set in the `.env` file 52 | 53 | 3. Start the workspace using: 54 | 55 | ```sh 56 | phi ws up 57 | ``` 58 | 59 | Open [localhost:8000/docs](http://localhost:8000/docs) to view the demo agents api. 60 | 61 | 4. Stop the workspace using: 62 | 63 | ```sh 64 | phi ws down 65 | ``` 66 | 67 | ## Next Steps: 68 | 69 | - [Run the Api App on AWS](https://docs.phidata.com/templates/demo-agents/run-aws) 70 | - Read how to [manage the development application](https://docs.phidata.com/how-to/development-app) 71 | - Read how to [manage the production application](https://docs.phidata.com/how-to/production-app) 72 | - Read how to [add python libraries](https://docs.phidata.com/how-to/python-libraries) 73 | - Read how to [format & validate your code](https://docs.phidata.com/how-to/format-and-validate) 74 | - Read how to [manage secrets](https://docs.phidata.com/how-to/secrets) 75 | - Add [CI/CD](https://docs.phidata.com/how-to/ci-cd) 76 | - Add [database tables](https://docs.phidata.com/how-to/database-tables) 77 | - Read the [Api App guide](https://docs.phidata.com/templates/demo-agents) 78 | -------------------------------------------------------------------------------- /api/settings.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from pydantic import field_validator, Field 4 | from pydantic_settings import BaseSettings 5 | from pydantic_core.core_schema import FieldValidationInfo 6 | 7 | 8 | class ApiSettings(BaseSettings): 9 | """Api settings that can be set using environment variables. 10 | 11 | Reference: https://pydantic-docs.helpmanual.io/usage/settings/ 12 | """ 13 | 14 | # Api title and version 15 | title: str = "demo-agents" 16 | version: str = "1.0" 17 | 18 | # Api runtime_env derived from the `runtime_env` environment variable. 19 | # Valid values include "dev", "stg", "prd" 20 | runtime_env: str = "dev" 21 | 22 | # Set to False to disable docs at /docs and /redoc 23 | docs_enabled: bool = True 24 | 25 | # Cors origin list to allow requests from. 26 | # This list is set using the set_cors_origin_list validator 27 | # which uses the runtime_env variable to set the 28 | # default cors origin list. 29 | cors_origin_list: Optional[List[str]] = Field(None, validate_default=True) 30 | 31 | @field_validator("runtime_env") 32 | def validate_runtime_env(cls, runtime_env): 33 | """Validate runtime_env.""" 34 | 35 | valid_runtime_envs = ["dev", "stg", "prd"] 36 | if runtime_env not in valid_runtime_envs: 37 | raise ValueError(f"Invalid runtime_env: {runtime_env}") 38 | 39 | return runtime_env 40 | 41 | @field_validator("cors_origin_list", mode="before") 42 | def set_cors_origin_list(cls, cors_origin_list, info: FieldValidationInfo): 43 | valid_cors = cors_origin_list or [] 44 | 45 | # Add phidata to cors origin list 46 | valid_cors.extend( 47 | [ 48 | "https://phidata.app", 49 | "https://www.phidata.app", 50 | "http://localhost:3000", 51 | "https://www.stgphi.com", 52 | ] 53 | ) 54 | 55 | runtime_env = info.data.get("runtime_env") 56 | if runtime_env == "dev": 57 | # 3000 is the default port for create-react-app 58 | valid_cors.extend(["http://localhost", "http://localhost:3000", "https://www.stgphi.com"]) 59 | 60 | return valid_cors 61 | 62 | 63 | # Create ApiSettings object 64 | api_settings = ApiSettings() 65 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by uv via the following command: 2 | # ./scripts/generate_requirements.sh 3 | alembic==1.13.3 4 | annotated-types==0.7.0 5 | anyio==4.6.2.post1 6 | beautifulsoup4==4.12.3 7 | boto3==1.35.43 8 | botocore==1.35.43 9 | certifi==2024.8.30 10 | charset-normalizer==3.4.0 11 | click==8.1.7 12 | distro==1.9.0 13 | dnspython==2.7.0 14 | docker==7.1.0 15 | docstring-parser==0.16 16 | duckdb==1.1.2 17 | duckduckgo-search==6.3.2 18 | email-validator==2.2.0 19 | exa-py==1.4.0 20 | exceptiongroup==1.2.2 21 | fastapi==0.115.2 22 | fastapi-cli==0.0.5 23 | frozendict==2.4.6 24 | gitdb==4.0.11 25 | gitpython==3.1.43 26 | google-search-results==2.4.2 27 | h11==0.14.0 28 | html5lib==1.1 29 | httpcore==1.0.6 30 | httptools==0.6.4 31 | httpx==0.27.2 32 | idna==3.10 33 | iniconfig==2.0.0 34 | jinja2==3.1.4 35 | jiter==0.6.1 36 | jmespath==1.0.1 37 | lxml==5.3.0 38 | mako==1.3.5 39 | markdown-it-py==3.0.0 40 | markupsafe==3.0.1 41 | mdurl==0.1.2 42 | multitasking==0.0.11 43 | mypy==1.12.0 44 | mypy-extensions==1.0.0 45 | nest-asyncio==1.6.0 46 | numpy==2.0.2 47 | openai==1.52.0 48 | packaging==24.1 49 | pandas==2.2.3 50 | peewee==3.17.7 51 | pgvector==0.3.5 52 | phidata==2.7.7 53 | pillow==11.0.0 54 | platformdirs==4.3.6 55 | pluggy==1.5.0 56 | primp==0.6.4 57 | psycopg==3.1.18 58 | psycopg-binary==3.1.18 59 | pydantic==2.9.2 60 | pydantic-core==2.23.4 61 | pydantic-settings==2.6.0 62 | pygments==2.18.0 63 | pypdf==5.0.1 64 | pytest==8.3.3 65 | python-dateutil==2.9.0.post0 66 | python-docx==1.1.2 67 | python-dotenv==1.0.1 68 | python-multipart==0.0.12 69 | pytz==2024.2 70 | pyyaml==6.0.2 71 | regex==2024.9.11 72 | requests==2.32.3 73 | rich==13.9.2 74 | ruff==0.7.0 75 | s3transfer==0.10.3 76 | shellingham==1.5.4 77 | six==1.16.0 78 | smmap==5.0.1 79 | sniffio==1.3.1 80 | soupsieve==2.6 81 | sqlalchemy==2.0.36 82 | starlette==0.40.0 83 | tiktoken==0.8.0 84 | tomli==2.0.2 85 | tqdm==4.66.5 86 | typer==0.12.5 87 | types-beautifulsoup4==4.12.0.20240907 88 | types-html5lib==1.1.11.20241018 89 | types-pillow==10.2.0.20240822 90 | typing-extensions==4.12.2 91 | tzdata==2024.2 92 | urllib3==1.26.20 93 | uvicorn==0.32.0 94 | uvloop==0.21.0 95 | watchfiles==0.24.0 96 | webencodings==0.5.1 97 | websockets==13.1 98 | yfinance==0.2.44 99 | youtube-transcript-api==0.6.2 100 | -------------------------------------------------------------------------------- /scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ############################################################################ 4 | # Container Entrypoint script 5 | ############################################################################ 6 | 7 | if [[ "$PRINT_ENV_ON_LOAD" = true || "$PRINT_ENV_ON_LOAD" = True ]]; then 8 | echo "==================================================" 9 | printenv 10 | echo "==================================================" 11 | fi 12 | 13 | ############################################################################ 14 | # Wait for Services 15 | ############################################################################ 16 | 17 | if [[ "$WAIT_FOR_DB" = true || "$WAIT_FOR_DB" = True ]]; then 18 | dockerize \ 19 | -wait tcp://$DB_HOST:$DB_PORT \ 20 | -timeout 300s 21 | fi 22 | 23 | if [[ "$WAIT_FOR_REDIS" = true || "$WAIT_FOR_REDIS" = True ]]; then 24 | dockerize \ 25 | -wait tcp://$REDIS_HOST:$REDIS_PORT \ 26 | -timeout 300s 27 | fi 28 | 29 | ############################################################################ 30 | # Install requirements 31 | ############################################################################ 32 | 33 | if [[ "$INSTALL_REQUIREMENTS" = true || "$INSTALL_REQUIREMENTS" = True ]]; then 34 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 35 | echo "Installing requirements: $REQUIREMENTS_FILE_PATH" 36 | pip3 install -r $REQUIREMENTS_FILE_PATH 37 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 38 | fi 39 | 40 | ############################################################################ 41 | # Migrate database 42 | ############################################################################ 43 | 44 | if [[ "$MIGRATE_DB" = true || "$MIGRATE_DB" = True ]]; then 45 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 46 | echo "Migrating Database" 47 | alembic -c db/alembic.ini upgrade head 48 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 49 | fi 50 | 51 | ############################################################################ 52 | # Start App 53 | ############################################################################ 54 | 55 | case "$1" in 56 | chill) 57 | ;; 58 | *) 59 | echo "Running: $@" 60 | exec "$@" 61 | ;; 62 | esac 63 | 64 | echo ">>> Hello World!" 65 | while true; do sleep 18000; done 66 | -------------------------------------------------------------------------------- /workspace/dev_resources.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.fastapi import FastApi 2 | from phi.docker.app.postgres import PgVectorDb 3 | from phi.docker.resource.image import DockerImage 4 | from phi.docker.resources import DockerResources 5 | 6 | from workspace.settings import ws_settings 7 | 8 | # 9 | # -*- Resources for the Development Environment 10 | # 11 | 12 | # -*- Dev image 13 | dev_image = DockerImage( 14 | name=f"{ws_settings.image_repo}/{ws_settings.image_name}", 15 | tag=ws_settings.dev_env, 16 | enabled=ws_settings.build_images, 17 | path=str(ws_settings.ws_root), 18 | push_image=False, 19 | ) 20 | 21 | # -*- Dev database running on port 5432:5432 22 | dev_db = PgVectorDb( 23 | name=f"{ws_settings.ws_name}-db", 24 | enabled=ws_settings.dev_db_enabled, 25 | pg_user="api", 26 | pg_password="api", 27 | pg_database="api", 28 | # Connect to this db on port 5432 29 | host_port=5432, 30 | ) 31 | 32 | # -*- Build container environment 33 | container_env = { 34 | "RUNTIME_ENV": "dev", 35 | # Get the OpenAI API key from the local environment 36 | # "OPENAI_API_KEY": getenv("OPENAI_API_KEY"), 37 | "PHI_MONITORING": "True", 38 | # Database configuration 39 | "DB_HOST": dev_db.get_db_host(), 40 | "DB_PORT": dev_db.get_db_port(), 41 | "DB_USER": dev_db.get_db_user(), 42 | "DB_PASS": dev_db.get_db_password(), 43 | "DB_DATABASE": dev_db.get_db_database(), 44 | # Wait for database to be available before starting the application 45 | "WAIT_FOR_DB": ws_settings.dev_db_enabled, 46 | # Migrate database on startup using alembic 47 | # "MIGRATE_DB": ws_settings.prd_db_enabled, 48 | } 49 | 50 | # -*- FastApi running on port 8000:8000 51 | dev_fastapi = FastApi( 52 | name=ws_settings.ws_name, 53 | enabled=ws_settings.dev_api_enabled, 54 | image=dev_image, 55 | command="uvicorn api.main:app --reload", 56 | port_number=8000, 57 | debug_mode=True, 58 | mount_workspace=True, 59 | env_vars=container_env, 60 | use_cache=ws_settings.use_cache, 61 | # Read secrets from secrets/dev_api_secrets.yml 62 | secrets_file=ws_settings.ws_root.joinpath("workspace/secrets/dev_api_secrets.yml"), 63 | depends_on=[dev_db], 64 | ) 65 | 66 | # -*- Dev DockerResources 67 | dev_docker_resources = DockerResources( 68 | env=ws_settings.dev_env, 69 | network=ws_settings.ws_name, 70 | apps=[dev_db, dev_fastapi], 71 | ) 72 | -------------------------------------------------------------------------------- /agents/web_search.py: -------------------------------------------------------------------------------- 1 | from textwrap import dedent 2 | from typing import Optional 3 | 4 | from phi.agent import Agent 5 | from phi.model.openai import OpenAIChat 6 | from phi.tools.serpapi_tools import SerpApiTools 7 | 8 | from agents.settings import agent_settings 9 | from phi.storage.agent.postgres import PgAgentStorage 10 | 11 | from db.session import db_url 12 | 13 | web_search_agent_storage = PgAgentStorage(table_name="web_search_agent", db_url=db_url) 14 | 15 | 16 | def get_web_search_agent( 17 | user_id: Optional[str] = None, 18 | session_id: Optional[str] = None, 19 | debug_mode: bool = False, 20 | ) -> Agent: 21 | return Agent( 22 | name="Web Search Agent", 23 | role="Search the web for information", 24 | agent_id="web-search-agent", 25 | session_id=session_id, 26 | user_id=user_id, 27 | model=OpenAIChat( 28 | model=agent_settings.gpt_4, 29 | max_tokens=agent_settings.default_max_completion_tokens, 30 | temperature=agent_settings.default_temperature, 31 | ), 32 | tools=[SerpApiTools()], 33 | description="You are a Web Search Agent that has the special skill of searching the web for information and presenting the results in a structured manner.", 34 | instructions=[ 35 | "To answer the user's question, first search the web for information by breaking down the user's question into smaller queries.", 36 | "Make sure you cover all the aspects of the question.", 37 | "Important: \n" 38 | " - Focus on legitimate sources\n" 39 | " - Always provide sources and the links to the information you used to answer the question\n" 40 | " - If you cannot find the answer, say so and ask the user to provide more details.", 41 | "Keep your answers concise and engaging.", 42 | ], 43 | expected_output=dedent("""\ 44 | Your answer should be in the following format: 45 | 46 | {provide a detailed answer to the user's question} 47 | 48 | ### Sources 49 | {provide the sources and links to the information you used to answer the question} 50 | """), 51 | storage=web_search_agent_storage, 52 | add_history_to_messages=True, 53 | num_history_responses=5, 54 | add_datetime_to_instructions=True, 55 | markdown=True, 56 | debug_mode=debug_mode, 57 | ) 58 | -------------------------------------------------------------------------------- /agents/research.py: -------------------------------------------------------------------------------- 1 | from textwrap import dedent 2 | from typing import Optional 3 | from datetime import datetime 4 | 5 | from phi.agent import Agent 6 | from phi.model.openai import OpenAIChat 7 | from phi.tools.exa import ExaTools 8 | 9 | from agents.settings import agent_settings 10 | from phi.storage.agent.postgres import PgAgentStorage 11 | 12 | from db.session import db_url 13 | 14 | research_agent_storage = PgAgentStorage(table_name="research_agent", db_url=db_url) 15 | 16 | 17 | def get_research_agent( 18 | user_id: Optional[str] = None, 19 | session_id: Optional[str] = None, 20 | debug_mode: bool = False, 21 | ) -> Agent: 22 | return Agent( 23 | name="Research Agent", 24 | role="Write research reports for the New York Times", 25 | agent_id="research-agent", 26 | session_id=session_id, 27 | user_id=user_id, 28 | model=OpenAIChat( 29 | model=agent_settings.gpt_4, 30 | max_tokens=agent_settings.default_max_completion_tokens, 31 | temperature=agent_settings.default_temperature, 32 | ), 33 | tools=[ExaTools(start_published_date=datetime.now().strftime("%Y-%m-%d"), type="keyword")], 34 | description="You are a Research Agent that has the special skill of writing New York Times worthy articles.", 35 | instructions=[ 36 | "If the user asks for a report or provides a topic, break down the topic into 3 different searches.", 37 | "For each search, run a search and read the results carefully.", 38 | "Prepare a NYT worthy article based on the results of the searches.", 39 | "Focus on facts and make sure to provide references.", 40 | "Keep your answers concise and engaging.", 41 | ], 42 | expected_output=dedent("""\ 43 | Your articles should be engaging, informative, well-structured and in markdown format. They should follow the following structure: 44 | 45 | ## Engaging Article Title 46 | 47 | ### Overview 48 | {give a brief introduction of the article and why the user should read this report} 49 | {make this section engaging and create a hook for the reader} 50 | 51 | ### Section 1 52 | {break the article into sections} 53 | {provide details/facts/processes in this section} 54 | 55 | ... more sections as necessary... 56 | 57 | ### Takeaways 58 | {provide key takeaways from the article} 59 | 60 | ### References 61 | - [Reference 1](link) 62 | - [Reference 2](link) 63 | """), 64 | markdown=True, 65 | add_history_to_messages=True, 66 | num_history_responses=5, 67 | add_datetime_to_instructions=True, 68 | storage=research_agent_storage, 69 | # Enable monitoring on phidata.app 70 | monitoring=True, 71 | debug_mode=debug_mode, 72 | ) 73 | -------------------------------------------------------------------------------- /db/migrations/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | 8 | from db.tables import Base 9 | from db.session import db_url 10 | 11 | # this is the Alembic Config object, which provides 12 | # access to the values within the .ini file in use. 13 | config = context.config 14 | 15 | # Interpret the config file for Python logging. 16 | # This line sets up loggers basically. 17 | if config.config_file_name is not None: 18 | fileConfig(config.config_file_name) 19 | 20 | config.set_main_option("sqlalchemy.url", db_url) 21 | 22 | # add your model's MetaData object here 23 | # for 'autogenerate' support 24 | # from myapp import mymodel 25 | # target_metadata = mymodel.Base.metadata 26 | target_metadata = Base.metadata 27 | 28 | 29 | # -*- Only include tables that are in the target_metadata 30 | # See: https://alembic.sqlalchemy.org/en/latest/autogenerate.html#omitting-table-names-from-the-autogenerate-process 31 | def include_name(name, type_, parent_names): 32 | if type_ == "table": 33 | return name in target_metadata.tables 34 | else: 35 | return True 36 | 37 | 38 | def run_migrations_offline() -> None: 39 | """Run migrations in 'offline' mode. 40 | 41 | This configures the context with just a URL 42 | and not an Engine, though an Engine is acceptable 43 | here as well. By skipping the Engine creation 44 | we don't even need a DBAPI to be available. 45 | 46 | Calls to context.execute() here emit the given string to the 47 | script output. 48 | 49 | """ 50 | url = config.get_main_option("sqlalchemy.url") 51 | context.configure( 52 | url=url, 53 | target_metadata=target_metadata, 54 | include_name=include_name, 55 | literal_binds=True, 56 | dialect_opts={"paramstyle": "named"}, 57 | version_table_schema=target_metadata.schema, 58 | ) 59 | 60 | with context.begin_transaction(): 61 | context.run_migrations() 62 | 63 | 64 | def run_migrations_online() -> None: 65 | """Run migrations in 'online' mode. 66 | 67 | In this scenario we need to create an Engine 68 | and associate a connection with the context. 69 | 70 | """ 71 | connectable = engine_from_config( 72 | config.get_section(config.config_ini_section, {}), 73 | prefix="sqlalchemy.", 74 | poolclass=pool.NullPool, 75 | ) 76 | 77 | with connectable.connect() as connection: 78 | context.configure( 79 | connection=connection, 80 | target_metadata=target_metadata, 81 | include_name=include_name, 82 | version_table_schema=target_metadata.schema, 83 | ) 84 | 85 | with context.begin_transaction(): 86 | context.run_migrations() 87 | 88 | 89 | if context.is_offline_mode(): 90 | run_migrations_offline() 91 | else: 92 | run_migrations_online() 93 | -------------------------------------------------------------------------------- /db/README.md: -------------------------------------------------------------------------------- 1 | ## Managing database migrations 2 | 3 | This guide outlines the steps to manage database migrations for your workspace. 4 | 5 | ## Table of Contents 6 | 7 | - [Prerequisites](#prerequisites) 8 | - [Running Migrations](#running-migrations) 9 | - [Create a Database Revision](#create-a-database-revision) 10 | - [Migrate the Database](#migrate-the-database) 11 | - [Environment-specific Instructions](#environment-specific-instructions) 12 | - [Development Environment](#development-environment) 13 | - [Production Environment](#production-environment) 14 | - [Creating the Migrations Directory](#creating-the-migrations-directory) 15 | - [Additional Resources](#additional-resources) 16 | 17 | --- 18 | 19 | ## Prerequisites 20 | 21 | 1. **Update Tables**: To run a migration, first we need to add or update SQLAlchemy tables in the `db/tables` directory. 22 | 2. **Import Classes**: Ensure that the SQLAlchemy table classes are imported in `db/tables/__init__.py`. 23 | 24 | ## Running Migrations 25 | 26 | ### Create a Database Revision 27 | 28 | After you have added or updated your table, create a new database revision using: 29 | 30 | ```bash 31 | alembic -c db/alembic.ini revision --autogenerate -m "Your Revision Message" 32 | ``` 33 | 34 | > **Note:** Replace `"Your Revision Message"` with a meaningful description of the changes. 35 | 36 | ### Migrate the Database by applying the revision 37 | 38 | Run the migration to update the database schema: 39 | 40 | ```bash 41 | alembic -c db/alembic.ini upgrade head 42 | ``` 43 | 44 | ## Environment-specific Instructions 45 | 46 | Let's explore the migration process for both development and production environments. 47 | 48 | ### Development Environment 49 | 50 | **Create Revision and Migrate:** 51 | 52 | ```bash 53 | docker exec -it demo-agents-api alembic -c db/alembic.ini revision --autogenerate -m "Your Revision Message" 54 | docker exec -it demo-agents-api alembic -c db/alembic.ini upgrade head 55 | ``` 56 | 57 | ### Production Environment 58 | 59 | #### Option 1: Automatic Migration at Startup 60 | 61 | Set the environment variable `MIGRATE_DB=True` to run migrations automatically when the container starts. This executes: 62 | 63 | ```bash 64 | alembic -c db/alembic.ini upgrade head 65 | ``` 66 | 67 | #### Option 2: Manual Migration via SSH 68 | 69 | SSH into the production container and run the migration manually: 70 | 71 | ```bash 72 | ECS_CLUSTER=demo-agents-cluster 73 | TASK_ARN=$(aws ecs list-tasks --cluster $ECS_CLUSTER --query "taskArns[0]" --output text) 74 | CONTAINER_NAME=demo-agents-api 75 | 76 | aws ecs execute-command --cluster $ECS_CLUSTER \ 77 | --task $TASK_ARN \ 78 | --container $CONTAINER_NAME \ 79 | --interactive \ 80 | --command "alembic -c db/alembic.ini upgrade head" 81 | ``` 82 | 83 | ## Creating the Migrations Directory 84 | 85 | > **Note:** These steps have already been completed and are included here for reference. 86 | 87 | 1. **Access the Development Container:** 88 | 89 | ```bash 90 | docker exec -it demo-agents-api zsh 91 | ``` 92 | 93 | 2. **Initialize Alembic Migrations:** 94 | 95 | ```bash 96 | cd db 97 | alembic init migrations 98 | ``` 99 | 100 | 3. **Post-Initialization Steps:** 101 | 102 | - **Update `alembic.ini`:** 103 | - Set `script_location = db/migrations`. 104 | - **Update `migrations/env.py`:** 105 | - Modify according to the [Alembic Autogenerate Documentation](https://alembic.sqlalchemy.org/en/latest/autogenerate.html). 106 | 107 | ## Additional Resources 108 | 109 | - **Adding Database Tables:** Refer to the [Phidata documentation](https://docs.phidata.com/day-2/database-tables) for detailed instructions on adding database tables. 110 | - **Environment Variable Note:** Setting `MIGRATE_DB=True` ensures that the migration command runs from the entrypoint script when the container starts. 111 | 112 | --- 113 | 114 | Feel free to customize this README further to suit your project's needs. 115 | -------------------------------------------------------------------------------- /db/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = db/migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file 10 | # for all available tokens 11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 12 | 13 | # sys.path path, will be prepended to sys.path if present. 14 | # defaults to the current working directory. 15 | prepend_sys_path = . 16 | 17 | # timezone to use when rendering the date within the migration file 18 | # as well as the filename. 19 | # If specified, requires the python-dateutil library that can be 20 | # installed by adding `alembic[tz]` to the pip requirements 21 | # string value is passed to dateutil.tz.gettz() 22 | # leave blank for localtime 23 | # timezone = 24 | 25 | # max length of characters to apply to the 26 | # "slug" field 27 | # truncate_slug_length = 40 28 | 29 | # set to 'true' to run the environment during 30 | # the 'revision' command, regardless of autogenerate 31 | # revision_environment = false 32 | 33 | # set to 'true' to allow .pyc and .pyo files without 34 | # a source .py file to be detected as revisions in the 35 | # versions/ directory 36 | # sourceless = false 37 | 38 | # version location specification; This defaults 39 | # to migrations/versions. When using multiple version 40 | # directories, initial revisions must be specified with --version-path. 41 | # The path separator used here should be the separator specified by "version_path_separator" below. 42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions 43 | 44 | # version path separator; As mentioned above, this is the character used to split 45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 47 | # Valid values for version_path_separator are: 48 | # 49 | # version_path_separator = : 50 | # version_path_separator = ; 51 | # version_path_separator = space 52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 53 | 54 | # set to 'true' to search source files recursively 55 | # in each "version_locations" directory 56 | # new in Alembic version 1.10 57 | # recursive_version_locations = false 58 | 59 | # the output encoding used when revision files 60 | # are written from script.py.mako 61 | # output_encoding = utf-8 62 | 63 | sqlalchemy.url = driver://user:pass@localhost/dbname 64 | 65 | 66 | [post_write_hooks] 67 | # post_write_hooks defines scripts or Python functions that are run 68 | # on newly generated revision scripts. See the documentation for further 69 | # detail and examples 70 | 71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 72 | hooks = black 73 | black.type = console_scripts 74 | black.entrypoint = black 75 | black.options = -l 79 REVISION_SCRIPT_FILENAME 76 | 77 | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary 78 | # hooks = ruff 79 | # ruff.type = exec 80 | # ruff.executable = %(here)s/.venv/bin/ruff 81 | # ruff.options = --fix REVISION_SCRIPT_FILENAME 82 | 83 | # Logging configuration 84 | [loggers] 85 | keys = root,sqlalchemy,alembic 86 | 87 | [handlers] 88 | keys = console 89 | 90 | [formatters] 91 | keys = generic 92 | 93 | [logger_root] 94 | level = WARN 95 | handlers = console 96 | qualname = 97 | 98 | [logger_sqlalchemy] 99 | level = WARN 100 | handlers = 101 | qualname = sqlalchemy.engine 102 | 103 | [logger_alembic] 104 | level = INFO 105 | handlers = 106 | qualname = alembic 107 | 108 | [handler_console] 109 | class = StreamHandler 110 | args = (sys.stderr,) 111 | level = NOTSET 112 | formatter = generic 113 | 114 | [formatter_generic] 115 | format = %(levelname)-5.5s [%(name)s] %(message)s 116 | datefmt = %H:%M:%S 117 | -------------------------------------------------------------------------------- /workspace/prd_resources.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.fastapi import FastApi 2 | from phi.aws.resources import AwsResources 3 | from phi.aws.resource.ecs import EcsCluster 4 | from phi.aws.resource.ec2 import SecurityGroup, InboundRule 5 | from phi.aws.resource.rds import DbInstance, DbSubnetGroup 6 | from phi.aws.resource.reference import AwsReference 7 | from phi.aws.resource.s3 import S3Bucket 8 | from phi.aws.resource.secret import SecretsManager 9 | from phi.docker.resources import DockerResources 10 | from phi.docker.resource.image import DockerImage 11 | 12 | from workspace.settings import ws_settings 13 | 14 | # 15 | # -*- Resources for the Production Environment 16 | # 17 | # Skip resource deletion when running `phi ws down` (set to True after initial deployment) 18 | skip_delete: bool = False 19 | # Save resource outputs to workspace/outputs 20 | save_output: bool = True 21 | 22 | # -*- Production image 23 | prd_image = DockerImage( 24 | name=f"{ws_settings.image_repo}/{ws_settings.image_name}", 25 | tag=ws_settings.prd_env, 26 | enabled=ws_settings.build_images, 27 | path=str(ws_settings.ws_root), 28 | platforms=["linux/amd64", "linux/arm64"], 29 | push_image=True, 30 | ) 31 | 32 | # -*- S3 bucket for production data (set enabled=True when needed) 33 | prd_bucket = S3Bucket( 34 | name=f"{ws_settings.prd_key}-data", 35 | enabled=False, 36 | acl="private", 37 | skip_delete=skip_delete, 38 | save_output=save_output, 39 | ) 40 | 41 | # -*- Secrets for production application 42 | prd_secret = SecretsManager( 43 | name=f"{ws_settings.prd_key}-secret", 44 | group="api", 45 | # Create secret from workspace/secrets/prd_api_secrets.yml 46 | secret_files=[ws_settings.ws_root.joinpath("workspace/secrets/prd_api_secrets.yml")], 47 | skip_delete=skip_delete, 48 | save_output=save_output, 49 | ) 50 | # -*- Secrets for production database 51 | prd_db_secret = SecretsManager( 52 | name=f"{ws_settings.prd_key}-db-secret", 53 | group="db", 54 | # Create secret from workspace/secrets/prd_db_secrets.yml 55 | secret_files=[ws_settings.ws_root.joinpath("workspace/secrets/prd_db_secrets.yml")], 56 | skip_delete=skip_delete, 57 | save_output=save_output, 58 | ) 59 | 60 | # -*- Security Group for the load balancer 61 | prd_lb_sg = SecurityGroup( 62 | name=f"{ws_settings.prd_key}-lb-security-group", 63 | group="api", 64 | description="Security group for the load balancer", 65 | inbound_rules=[ 66 | InboundRule( 67 | description="Allow HTTP traffic from the internet", 68 | port=80, 69 | cidr_ip="0.0.0.0/0", 70 | ), 71 | InboundRule( 72 | description="Allow HTTPS traffic from the internet", 73 | port=443, 74 | cidr_ip="0.0.0.0/0", 75 | ), 76 | ], 77 | skip_delete=skip_delete, 78 | save_output=save_output, 79 | ) 80 | # -*- Security Group for the application 81 | prd_sg = SecurityGroup( 82 | name=f"{ws_settings.prd_key}-security-group", 83 | enabled=ws_settings.prd_api_enabled, 84 | group="api", 85 | description="Security group for the production api", 86 | inbound_rules=[ 87 | InboundRule( 88 | description="Allow traffic from LB to the FastAPI server", 89 | port=8000, 90 | security_group_id=AwsReference(prd_lb_sg.get_security_group_id), 91 | ), 92 | ], 93 | depends_on=[prd_lb_sg], 94 | skip_delete=skip_delete, 95 | save_output=save_output, 96 | ) 97 | # -*- Security Group for the database 98 | prd_db_port = 5432 99 | prd_db_sg = SecurityGroup( 100 | name=f"{ws_settings.prd_key}-db-security-group", 101 | enabled=ws_settings.prd_db_enabled, 102 | group="db", 103 | description="Security group for the production database", 104 | inbound_rules=[ 105 | InboundRule( 106 | description="Allow traffic from the FastAPI server to the database", 107 | port=prd_db_port, 108 | security_group_id=AwsReference(prd_sg.get_security_group_id), 109 | ), 110 | ], 111 | depends_on=[prd_sg], 112 | skip_delete=skip_delete, 113 | save_output=save_output, 114 | ) 115 | 116 | # -*- RDS Database Subnet Group 117 | prd_db_subnet_group = DbSubnetGroup( 118 | name=f"{ws_settings.prd_key}-db-sg", 119 | enabled=ws_settings.prd_db_enabled, 120 | group="db", 121 | subnet_ids=ws_settings.subnet_ids, 122 | skip_delete=skip_delete, 123 | save_output=save_output, 124 | ) 125 | 126 | # -*- RDS Database Instance 127 | prd_db = DbInstance( 128 | name=f"{ws_settings.prd_key}-db", 129 | enabled=ws_settings.prd_db_enabled, 130 | group="db", 131 | db_name="api", 132 | port=prd_db_port, 133 | engine="postgres", 134 | engine_version="16.1", 135 | allocated_storage=64, 136 | # NOTE: For production, use a larger instance type. 137 | # Last checked price: ~$25 per month 138 | db_instance_class="db.t4g.small", 139 | db_security_groups=[prd_db_sg], 140 | db_subnet_group=prd_db_subnet_group, 141 | availability_zone=ws_settings.aws_az1, 142 | publicly_accessible=False, 143 | enable_performance_insights=True, 144 | aws_secret=prd_db_secret, 145 | skip_delete=skip_delete, 146 | save_output=save_output, 147 | # Do not wait for the db to be deleted 148 | wait_for_delete=False, 149 | ) 150 | 151 | # -*- ECS cluster 152 | launch_type = "FARGATE" 153 | prd_ecs_cluster = EcsCluster( 154 | name=f"{ws_settings.prd_key}-cluster", 155 | ecs_cluster_name=ws_settings.prd_key, 156 | capacity_providers=[launch_type], 157 | skip_delete=skip_delete, 158 | save_output=save_output, 159 | ) 160 | 161 | # -*- Build container environment 162 | container_env = { 163 | "RUNTIME_ENV": "prd", 164 | # Get the OpenAI API key from the local environment 165 | # "OPENAI_API_KEY": getenv("OPENAI_API_KEY"), 166 | "PHI_MONITORING": "True", 167 | # Database configuration 168 | "DB_HOST": AwsReference(prd_db.get_db_endpoint), 169 | "DB_PORT": AwsReference(prd_db.get_db_port), 170 | "DB_USER": AwsReference(prd_db.get_master_username), 171 | "DB_PASS": AwsReference(prd_db.get_master_user_password), 172 | "DB_DATABASE": AwsReference(prd_db.get_db_name), 173 | # Wait for database to be available before starting the application 174 | "WAIT_FOR_DB": ws_settings.prd_db_enabled, 175 | # Migrate database on startup using alembic 176 | # "MIGRATE_DB": ws_settings.prd_db_enabled, 177 | } 178 | 179 | # -*- FastApi running on ECS 180 | prd_fastapi = FastApi( 181 | name=ws_settings.prd_key, 182 | enabled=ws_settings.prd_api_enabled, 183 | group="api", 184 | image=prd_image, 185 | command="uvicorn api.main:app --workers 4", 186 | port_number=8000, 187 | ecs_task_cpu="2048", 188 | ecs_task_memory="4096", 189 | ecs_service_count=2, 190 | ecs_cluster=prd_ecs_cluster, 191 | aws_secrets=[prd_secret], 192 | subnets=ws_settings.subnet_ids, 193 | security_groups=[prd_sg], 194 | # To enable HTTPS, create an ACM certificate and add the ARN below: 195 | load_balancer_enable_https=True, 196 | load_balancer_certificate_arn="arn:aws:acm:us-east-1:497891874516:certificate/e822946f-02c9-4ed1-8177-97ef2f4f5b72", 197 | load_balancer_security_groups=[prd_lb_sg], 198 | create_load_balancer=True, 199 | health_check_path="/v1/health", 200 | env_vars=container_env, 201 | use_cache=ws_settings.use_cache, 202 | skip_delete=skip_delete, 203 | save_output=save_output, 204 | # Do not wait for the service to stabilize 205 | wait_for_create=False, 206 | # Do not wait for the service to be deleted 207 | wait_for_delete=False, 208 | ) 209 | 210 | # -*- Production DockerResources 211 | prd_docker_resources = DockerResources( 212 | env=ws_settings.prd_env, 213 | network=ws_settings.ws_name, 214 | resources=[prd_image], 215 | ) 216 | 217 | # -*- Production AwsResources 218 | prd_aws_config = AwsResources( 219 | env=ws_settings.prd_env, 220 | apps=[prd_fastapi], 221 | resources=( 222 | prd_lb_sg, 223 | prd_sg, 224 | prd_db_sg, 225 | prd_secret, 226 | prd_db_secret, 227 | prd_db_subnet_group, 228 | prd_db, 229 | prd_bucket, 230 | ), 231 | ) 232 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2022 Phidata, Inc. 2 | 3 | Mozilla Public License Version 2.0 4 | ================================== 5 | 6 | 1. Definitions 7 | -------------- 8 | 9 | 1.1. "Contributor" 10 | means each individual or legal entity that creates, contributes to 11 | the creation of, or owns Covered Software. 12 | 13 | 1.2. "Contributor Version" 14 | means the combination of the Contributions of others (if any) used 15 | by a Contributor and that particular Contributor's Contribution. 16 | 17 | 1.3. "Contribution" 18 | means Covered Software of a particular Contributor. 19 | 20 | 1.4. "Covered Software" 21 | means Source Code Form to which the initial Contributor has attached 22 | the notice in Exhibit A, the Executable Form of such Source Code 23 | Form, and Modifications of such Source Code Form, in each case 24 | including portions thereof. 25 | 26 | 1.5. "Incompatible With Secondary Licenses" 27 | means 28 | 29 | (a) that the initial Contributor has attached the notice described 30 | in Exhibit B to the Covered Software; or 31 | 32 | (b) that the Covered Software was made available under the terms of 33 | version 1.1 or earlier of the License, but not also under the 34 | terms of a Secondary License. 35 | 36 | 1.6. "Executable Form" 37 | means any form of the work other than Source Code Form. 38 | 39 | 1.7. "Larger Work" 40 | means a work that combines Covered Software with other material, in 41 | a separate file or files, that is not Covered Software. 42 | 43 | 1.8. "License" 44 | means this document. 45 | 46 | 1.9. "Licensable" 47 | means having the right to grant, to the maximum extent possible, 48 | whether at the time of the initial grant or subsequently, any and 49 | all of the rights conveyed by this License. 50 | 51 | 1.10. "Modifications" 52 | means any of the following: 53 | 54 | (a) any file in Source Code Form that results from an addition to, 55 | deletion from, or modification of the contents of Covered 56 | Software; or 57 | 58 | (b) any new file in Source Code Form that contains any Covered 59 | Software. 60 | 61 | 1.11. "Patent Claims" of a Contributor 62 | means any patent claim(s), including without limitation, method, 63 | process, and apparatus claims, in any patent Licensable by such 64 | Contributor that would be infringed, but for the grant of the 65 | License, by the making, using, selling, offering for sale, having 66 | made, import, or transfer of either its Contributions or its 67 | Contributor Version. 68 | 69 | 1.12. "Secondary License" 70 | means either the GNU General Public License, Version 2.0, the GNU 71 | Lesser General Public License, Version 2.1, the GNU Affero General 72 | Public License, Version 3.0, or any later versions of those 73 | licenses. 74 | 75 | 1.13. "Source Code Form" 76 | means the form of the work preferred for making modifications. 77 | 78 | 1.14. "You" (or "Your") 79 | means an individual or a legal entity exercising rights under this 80 | License. For legal entities, "You" includes any entity that 81 | controls, is controlled by, or is under common control with You. For 82 | purposes of this definition, "control" means (a) the power, direct 83 | or indirect, to cause the direction or management of such entity, 84 | whether by contract or otherwise, or (b) ownership of more than 85 | fifty percent (50%) of the outstanding shares or beneficial 86 | ownership of such entity. 87 | 88 | 2. License Grants and Conditions 89 | -------------------------------- 90 | 91 | 2.1. Grants 92 | 93 | Each Contributor hereby grants You a world-wide, royalty-free, 94 | non-exclusive license: 95 | 96 | (a) under intellectual property rights (other than patent or trademark) 97 | Licensable by such Contributor to use, reproduce, make available, 98 | modify, display, perform, distribute, and otherwise exploit its 99 | Contributions, either on an unmodified basis, with Modifications, or 100 | as part of a Larger Work; and 101 | 102 | (b) under Patent Claims of such Contributor to make, use, sell, offer 103 | for sale, have made, import, and otherwise transfer either its 104 | Contributions or its Contributor Version. 105 | 106 | 2.2. Effective Date 107 | 108 | The licenses granted in Section 2.1 with respect to any Contribution 109 | become effective for each Contribution on the date the Contributor first 110 | distributes such Contribution. 111 | 112 | 2.3. Limitations on Grant Scope 113 | 114 | The licenses granted in this Section 2 are the only rights granted under 115 | this License. No additional rights or licenses will be implied from the 116 | distribution or licensing of Covered Software under this License. 117 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 118 | Contributor: 119 | 120 | (a) for any code that a Contributor has removed from Covered Software; 121 | or 122 | 123 | (b) for infringements caused by: (i) Your and any other third party's 124 | modifications of Covered Software, or (ii) the combination of its 125 | Contributions with other software (except as part of its Contributor 126 | Version); or 127 | 128 | (c) under Patent Claims infringed by Covered Software in the absence of 129 | its Contributions. 130 | 131 | This License does not grant any rights in the trademarks, service marks, 132 | or logos of any Contributor (except as may be necessary to comply with 133 | the notice requirements in Section 3.4). 134 | 135 | 2.4. Subsequent Licenses 136 | 137 | No Contributor makes additional grants as a result of Your choice to 138 | distribute the Covered Software under a subsequent version of this 139 | License (see Section 10.2) or under the terms of a Secondary License (if 140 | permitted under the terms of Section 3.3). 141 | 142 | 2.5. Representation 143 | 144 | Each Contributor represents that the Contributor believes its 145 | Contributions are its original creation(s) or it has sufficient rights 146 | to grant the rights to its Contributions conveyed by this License. 147 | 148 | 2.6. Fair Use 149 | 150 | This License is not intended to limit any rights You have under 151 | applicable copyright doctrines of fair use, fair dealing, or other 152 | equivalents. 153 | 154 | 2.7. Conditions 155 | 156 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 157 | in Section 2.1. 158 | 159 | 3. Responsibilities 160 | ------------------- 161 | 162 | 3.1. Distribution of Source Form 163 | 164 | All distribution of Covered Software in Source Code Form, including any 165 | Modifications that You create or to which You contribute, must be under 166 | the terms of this License. You must inform recipients that the Source 167 | Code Form of the Covered Software is governed by the terms of this 168 | License, and how they can obtain a copy of this License. You may not 169 | attempt to alter or restrict the recipients' rights in the Source Code 170 | Form. 171 | 172 | 3.2. Distribution of Executable Form 173 | 174 | If You distribute Covered Software in Executable Form then: 175 | 176 | (a) such Covered Software must also be made available in Source Code 177 | Form, as described in Section 3.1, and You must inform recipients of 178 | the Executable Form how they can obtain a copy of such Source Code 179 | Form by reasonable means in a timely manner, at a charge no more 180 | than the cost of distribution to the recipient; and 181 | 182 | (b) You may distribute such Executable Form under the terms of this 183 | License, or sublicense it under different terms, provided that the 184 | license for the Executable Form does not attempt to limit or alter 185 | the recipients' rights in the Source Code Form under this License. 186 | 187 | 3.3. Distribution of a Larger Work 188 | 189 | You may create and distribute a Larger Work under terms of Your choice, 190 | provided that You also comply with the requirements of this License for 191 | the Covered Software. If the Larger Work is a combination of Covered 192 | Software with a work governed by one or more Secondary Licenses, and the 193 | Covered Software is not Incompatible With Secondary Licenses, this 194 | License permits You to additionally distribute such Covered Software 195 | under the terms of such Secondary License(s), so that the recipient of 196 | the Larger Work may, at their option, further distribute the Covered 197 | Software under the terms of either this License or such Secondary 198 | License(s). 199 | 200 | 3.4. Notices 201 | 202 | You may not remove or alter the substance of any license notices 203 | (including copyright notices, patent notices, disclaimers of warranty, 204 | or limitations of liability) contained within the Source Code Form of 205 | the Covered Software, except that You may alter any license notices to 206 | the extent required to remedy known factual inaccuracies. 207 | 208 | 3.5. Application of Additional Terms 209 | 210 | You may choose to offer, and to charge a fee for, warranty, support, 211 | indemnity or liability obligations to one or more recipients of Covered 212 | Software. However, You may do so only on Your own behalf, and not on 213 | behalf of any Contributor. You must make it absolutely clear that any 214 | such warranty, support, indemnity, or liability obligation is offered by 215 | You alone, and You hereby agree to indemnify every Contributor for any 216 | liability incurred by such Contributor as a result of warranty, support, 217 | indemnity or liability terms You offer. You may include additional 218 | disclaimers of warranty and limitations of liability specific to any 219 | jurisdiction. 220 | 221 | 4. Inability to Comply Due to Statute or Regulation 222 | --------------------------------------------------- 223 | 224 | If it is impossible for You to comply with any of the terms of this 225 | License with respect to some or all of the Covered Software due to 226 | statute, judicial order, or regulation then You must: (a) comply with 227 | the terms of this License to the maximum extent possible; and (b) 228 | describe the limitations and the code they affect. Such description must 229 | be placed in a text file included with all distributions of the Covered 230 | Software under this License. Except to the extent prohibited by statute 231 | or regulation, such description must be sufficiently detailed for a 232 | recipient of ordinary skill to be able to understand it. 233 | 234 | 5. Termination 235 | -------------- 236 | 237 | 5.1. The rights granted under this License will terminate automatically 238 | if You fail to comply with any of its terms. However, if You become 239 | compliant, then the rights granted under this License from a particular 240 | Contributor are reinstated (a) provisionally, unless and until such 241 | Contributor explicitly and finally terminates Your grants, and (b) on an 242 | ongoing basis, if such Contributor fails to notify You of the 243 | non-compliance by some reasonable means prior to 60 days after You have 244 | come back into compliance. Moreover, Your grants from a particular 245 | Contributor are reinstated on an ongoing basis if such Contributor 246 | notifies You of the non-compliance by some reasonable means, this is the 247 | first time You have received notice of non-compliance with this License 248 | from such Contributor, and You become compliant prior to 30 days after 249 | Your receipt of the notice. 250 | 251 | 5.2. If You initiate litigation against any entity by asserting a patent 252 | infringement claim (excluding declaratory judgment actions, 253 | counter-claims, and cross-claims) alleging that a Contributor Version 254 | directly or indirectly infringes any patent, then the rights granted to 255 | You by any and all Contributors for the Covered Software under Section 256 | 2.1 of this License shall terminate. 257 | 258 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all 259 | end user license agreements (excluding distributors and resellers) which 260 | have been validly granted by You or Your distributors under this License 261 | prior to termination shall survive termination. 262 | 263 | ************************************************************************ 264 | * * 265 | * 6. Disclaimer of Warranty * 266 | * ------------------------- * 267 | * * 268 | * Covered Software is provided under this License on an "as is" * 269 | * basis, without warranty of any kind, either expressed, implied, or * 270 | * statutory, including, without limitation, warranties that the * 271 | * Covered Software is free of defects, merchantable, fit for a * 272 | * particular purpose or non-infringing. The entire risk as to the * 273 | * quality and performance of the Covered Software is with You. * 274 | * Should any Covered Software prove defective in any respect, You * 275 | * (not any Contributor) assume the cost of any necessary servicing, * 276 | * repair, or correction. This disclaimer of warranty constitutes an * 277 | * essential part of this License. No use of any Covered Software is * 278 | * authorized under this License except under this disclaimer. * 279 | * * 280 | ************************************************************************ 281 | 282 | ************************************************************************ 283 | * * 284 | * 7. Limitation of Liability * 285 | * -------------------------- * 286 | * * 287 | * Under no circumstances and under no legal theory, whether tort * 288 | * (including negligence), contract, or otherwise, shall any * 289 | * Contributor, or anyone who distributes Covered Software as * 290 | * permitted above, be liable to You for any direct, indirect, * 291 | * special, incidental, or consequential damages of any character * 292 | * including, without limitation, damages for lost profits, loss of * 293 | * goodwill, work stoppage, computer failure or malfunction, or any * 294 | * and all other commercial damages or losses, even if such party * 295 | * shall have been informed of the possibility of such damages. This * 296 | * limitation of liability shall not apply to liability for death or * 297 | * personal injury resulting from such party's negligence to the * 298 | * extent applicable law prohibits such limitation. Some * 299 | * jurisdictions do not allow the exclusion or limitation of * 300 | * incidental or consequential damages, so this exclusion and * 301 | * limitation may not apply to You. * 302 | * * 303 | ************************************************************************ 304 | 305 | 8. Litigation 306 | ------------- 307 | 308 | Any litigation relating to this License may be brought only in the 309 | courts of a jurisdiction where the defendant maintains its principal 310 | place of business and such litigation shall be governed by laws of that 311 | jurisdiction, without reference to its conflict-of-law provisions. 312 | Nothing in this Section shall prevent a party's ability to bring 313 | cross-claims or counter-claims. 314 | 315 | 9. Miscellaneous 316 | ---------------- 317 | 318 | This License represents the complete agreement concerning the subject 319 | matter hereof. If any provision of this License is held to be 320 | unenforceable, such provision shall be reformed only to the extent 321 | necessary to make it enforceable. Any law or regulation which provides 322 | that the language of a contract shall be construed against the drafter 323 | shall not be used to construe this License against a Contributor. 324 | 325 | 10. Versions of the License 326 | --------------------------- 327 | 328 | 10.1. New Versions 329 | 330 | Mozilla Foundation is the license steward. Except as provided in Section 331 | 10.3, no one other than the license steward has the right to modify or 332 | publish new versions of this License. Each version will be given a 333 | distinguishing version number. 334 | 335 | 10.2. Effect of New Versions 336 | 337 | You may distribute the Covered Software under the terms of the version 338 | of the License under which You originally received the Covered Software, 339 | or under the terms of any subsequent version published by the license 340 | steward. 341 | 342 | 10.3. Modified Versions 343 | 344 | If you create software not governed by this License, and you want to 345 | create a new license for such software, you may create and use a 346 | modified version of this License if you rename the license and remove 347 | any references to the name of the license steward (except to note that 348 | such modified license differs from this License). 349 | 350 | 10.4. Distributing Source Code Form that is Incompatible With Secondary 351 | Licenses 352 | 353 | If You choose to distribute Source Code Form that is Incompatible With 354 | Secondary Licenses under the terms of this version of the License, the 355 | notice described in Exhibit B of this License must be attached. 356 | 357 | Exhibit A - Source Code Form License Notice 358 | ------------------------------------------- 359 | 360 | This Source Code Form is subject to the terms of the Mozilla Public 361 | License, v. 2.0. If a copy of the MPL was not distributed with this 362 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 363 | 364 | If it is not possible or desirable to put the notice in a particular 365 | file, then You may include the notice in a location (such as a LICENSE 366 | file in a relevant directory) where a recipient would be likely to look 367 | for such a notice. 368 | 369 | You may add additional accurate notices of copyright ownership. 370 | 371 | Exhibit B - "Incompatible With Secondary Licenses" Notice 372 | --------------------------------------------------------- 373 | 374 | This Source Code Form is "Incompatible With Secondary Licenses", as 375 | defined by the Mozilla Public License, v. 2.0. 376 | --------------------------------------------------------------------------------