├── backend ├── __init__.py ├── api │ ├── __init__.py │ ├── __pycache__ │ │ ├── routes.cpython-310.pyc │ │ ├── __init__.cpython-310.pyc │ │ └── dependencies.cpython-310.pyc │ ├── dependencies.py │ └── routes.py ├── database │ ├── __init__.py │ ├── __pycache__ │ │ ├── crud.cpython-310.pyc │ │ ├── models.cpython-310.pyc │ │ ├── __init__.cpython-310.pyc │ │ ├── database.cpython-310.pyc │ │ └── schemas.cpython-310.pyc │ ├── database.py │ ├── models.py │ ├── schemas.py │ └── crud.py ├── tests │ ├── __init__.py │ ├── test_rag.py │ ├── test_agents.py │ └── test_api.py ├── ai_engine │ ├── __init__.py │ ├── rag │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-310.pyc │ │ │ ├── retriever.cpython-310.pyc │ │ │ └── vector_store.cpython-310.pyc │ │ ├── vector_store.py │ │ └── retriever.py │ ├── agents │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-310.pyc │ │ │ ├── task_agent.cpython-310.pyc │ │ │ ├── ai_assistant.cpython-310.pyc │ │ │ ├── report_agent.cpython-310.pyc │ │ │ ├── priority_agent.cpython-310.pyc │ │ │ ├── suggestion_agent.cpython-310.pyc │ │ │ └── collaboration_agent.cpython-310.pyc │ │ ├── ai_assistant.py │ │ ├── collaboration_agent.py │ │ ├── suggestion_agent.py │ │ ├── task_agent.py │ │ ├── priority_agent.py │ │ └── report_agent.py │ ├── utils │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── __init__.cpython-310.pyc │ │ │ └── helpers.cpython-310.pyc │ │ └── helpers.py │ ├── workflow │ │ ├── __init__.py │ │ ├── __pycache__ │ │ │ ├── graph.cpython-310.pyc │ │ │ ├── __init__.cpython-310.pyc │ │ │ └── state_manager.cpython-310.pyc │ │ ├── state_manager.py │ │ └── graph.py │ └── __pycache__ │ │ └── __init__.cpython-310.pyc ├── __pycache__ │ ├── config.cpython-39.pyc │ ├── main.cpython-310.pyc │ ├── main.cpython-39.pyc │ ├── __init__.cpython-39.pyc │ ├── config.cpython-310.pyc │ └── __init__.cpython-310.pyc ├── main.py └── config.py ├── .gitignore ├── menu.png ├── test.db ├── graph_flow.png ├── apply_app.txt ├── frontend ├── components │ ├── aiAssistant.js │ ├── taskList.js │ ├── projectList.js │ └── projectDetails.js ├── styles.css ├── index.html └── app.js ├── docker └── Dockerfile.backend ├── requirements.txt ├── LICENSE └── README.md /backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/database/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/ai_engine/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/ai_engine/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/ai_engine/agents/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/ai_engine/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /backend/ai_engine/workflow/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | conad_env.txt 2 | .env 3 | __pycache__ 4 | apply_app.txt 5 | test.db -------------------------------------------------------------------------------- /menu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/menu.png -------------------------------------------------------------------------------- /test.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/test.db -------------------------------------------------------------------------------- /graph_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/graph_flow.png -------------------------------------------------------------------------------- /backend/__pycache__/config.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/config.cpython-39.pyc -------------------------------------------------------------------------------- /backend/__pycache__/main.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/main.cpython-310.pyc -------------------------------------------------------------------------------- /backend/__pycache__/main.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/main.cpython-39.pyc -------------------------------------------------------------------------------- /backend/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /backend/__pycache__/config.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/config.cpython-310.pyc -------------------------------------------------------------------------------- /backend/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/api/__pycache__/routes.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/api/__pycache__/routes.cpython-310.pyc -------------------------------------------------------------------------------- /backend/api/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/api/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /apply_app.txt: -------------------------------------------------------------------------------- 1 | backend 2 | uvicorn backend.main:app --reload 3 | 4 | frontend 5 | python -m http.server 8080 6 | 7 | 8 | # see the app in the web 9 | http://localhost:8080/ -------------------------------------------------------------------------------- /backend/database/__pycache__/crud.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/database/__pycache__/crud.cpython-310.pyc -------------------------------------------------------------------------------- /backend/database/__pycache__/models.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/database/__pycache__/models.cpython-310.pyc -------------------------------------------------------------------------------- /backend/api/__pycache__/dependencies.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/api/__pycache__/dependencies.cpython-310.pyc -------------------------------------------------------------------------------- /backend/database/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/database/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/database/__pycache__/database.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/database/__pycache__/database.cpython-310.pyc -------------------------------------------------------------------------------- /backend/database/__pycache__/schemas.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/database/__pycache__/schemas.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/rag/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/rag/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/rag/__pycache__/retriever.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/rag/__pycache__/retriever.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/utils/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/utils/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/utils/__pycache__/helpers.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/utils/__pycache__/helpers.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/workflow/__pycache__/graph.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/workflow/__pycache__/graph.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/task_agent.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/task_agent.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/rag/__pycache__/vector_store.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/rag/__pycache__/vector_store.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/workflow/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/workflow/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/ai_assistant.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/ai_assistant.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/report_agent.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/report_agent.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/priority_agent.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/priority_agent.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/workflow/__pycache__/state_manager.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/workflow/__pycache__/state_manager.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/suggestion_agent.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/suggestion_agent.cpython-310.pyc -------------------------------------------------------------------------------- /backend/ai_engine/agents/__pycache__/collaboration_agent.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yotambraun/Project_Management_System_with_RAG/HEAD/backend/ai_engine/agents/__pycache__/collaboration_agent.cpython-310.pyc -------------------------------------------------------------------------------- /frontend/components/aiAssistant.js: -------------------------------------------------------------------------------- 1 | const API_URL = 'http://localhost:8000/api/v1'; 2 | 3 | export function askAI(projectId, query) { 4 | return fetch(`${API_URL}/projects/${projectId}/ai_assistant`, { 5 | method: 'POST', 6 | headers: { 7 | 'Content-Type': 'application/json', 8 | }, 9 | body: JSON.stringify({ query }), 10 | }) 11 | .then(response => response.json()) 12 | .then(data => data.response) 13 | .catch(error => console.error('Error asking AI:', error)); 14 | } -------------------------------------------------------------------------------- /docker/Dockerfile.backend: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim 2 | 3 | # Set the working directory in the container 4 | WORKDIR /app 5 | 6 | # Copy the current directory contents into the container at /app 7 | COPY . /app 8 | 9 | # Install any needed packages specified in requirements.txt 10 | RUN pip install --no-cache-dir -r requirements.txt 11 | 12 | # Make port 8000 available to the world outside this container 13 | EXPOSE 8000 14 | 15 | # Define environment variable 16 | ENV NAME World 17 | 18 | # Run app.py when the container launches 19 | CMD ["uvicorn", "backend.main:app", "--host", "0.0.0.0", "--port", "8000"] -------------------------------------------------------------------------------- /backend/database/database.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.ext.declarative import declarative_base 3 | from sqlalchemy.orm import sessionmaker 4 | from backend.config import settings 5 | 6 | SQLALCHEMY_DATABASE_URL = settings.DATABASE_URL 7 | engine = create_engine( 8 | SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} 9 | ) 10 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 11 | 12 | Base = declarative_base() 13 | 14 | def get_db(): 15 | db = SessionLocal() 16 | try: 17 | yield db 18 | finally: 19 | db.close() -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # FastAPI and related 2 | fastapi==0.68.0 3 | uvicorn==0.15.0 4 | pydantic==1.8.2 5 | pydantic_settings==2.5.2 6 | python-jose==3.3.0 7 | passlib==1.7.4 8 | python-multipart==0.0.5 9 | python_jose==3.3.0 10 | sqlalchemy==1.4.23 11 | alembic==1.7.1 12 | langchain 13 | langgraph==0.2.28 14 | langchain_community 15 | langchain_ollama 16 | langchain_groq 17 | openai==0.27.0 18 | faiss-cpu==1.7.2 19 | pytest==6.2.5 20 | requests==2.26.0 21 | httpx==0.18.2 22 | python-dotenv==0.19.0 23 | black==21.7b0 24 | isort==5.9.3 25 | flake8==3.9.2 26 | numpy 27 | pandas 28 | logging 29 | reportlab 30 | json -------------------------------------------------------------------------------- /backend/main.py: -------------------------------------------------------------------------------- 1 | 2 | from fastapi import FastAPI 3 | from fastapi.middleware.cors import CORSMiddleware 4 | from .config import settings 5 | from .api.routes import router as api_router 6 | from .database.database import engine 7 | from .database import models 8 | from .ai_engine.rag.vector_store import vector_store 9 | from .database.models import Base, User, Project, Task, TeamMember 10 | 11 | Base.metadata.create_all(bind=engine) 12 | app = FastAPI( 13 | title=settings.APP_NAME, 14 | description="AI-powered project management system with RAG", 15 | version="1.0.0", 16 | ) 17 | 18 | app.add_middleware( 19 | CORSMiddleware, 20 | allow_origins=["*"], 21 | allow_credentials=True, 22 | allow_methods=["*"], 23 | allow_headers=["*"], 24 | ) 25 | 26 | app.include_router(api_router, prefix=settings.API_V1_PREFIX) 27 | 28 | @app.get("/") 29 | async def root(): 30 | return {"message": f"Welcome to the {settings.APP_NAME} API"} 31 | 32 | if __name__ == "__main__": 33 | import uvicorn 34 | uvicorn.run(app, host="0.0.0.0", port=8000) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Yotam Braun 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /frontend/components/taskList.js: -------------------------------------------------------------------------------- 1 | import { app } from '../app.js'; 2 | 3 | const API_URL = 'http://localhost:8000/api/v1'; 4 | 5 | export function loadTasks(projectId) { 6 | fetch(`${API_URL}/projects/${projectId}/tasks`) 7 | .then(response => response.json()) 8 | .then(tasks => { 9 | app.innerHTML = ` 10 |

Tasks

11 | 21 | `; 22 | }) 23 | .catch(error => console.error('Error loading tasks:', error)); 24 | } 25 | 26 | export function createTask(projectId, taskData) { 27 | return fetch(`${API_URL}/projects/${projectId}/tasks`, { 28 | method: 'POST', 29 | headers: { 30 | 'Content-Type': 'application/json', 31 | }, 32 | body: JSON.stringify(taskData), 33 | }) 34 | .then(response => response.json()) 35 | .catch(error => console.error('Error creating task:', error)); 36 | } -------------------------------------------------------------------------------- /frontend/styles.css: -------------------------------------------------------------------------------- 1 | /* frontend/styles.css */ 2 | body { 3 | font-family: Arial, sans-serif; 4 | line-height: 1.6; 5 | margin: 0; 6 | padding: 0; 7 | background-color: #f4f4f4; 8 | } 9 | 10 | header { 11 | background-color: #333; 12 | color: #fff; 13 | text-align: center; 14 | padding: 1rem; 15 | } 16 | 17 | nav { 18 | background-color: #444; 19 | padding: 0.5rem; 20 | } 21 | 22 | nav button { 23 | background-color: #555; 24 | color: #fff; 25 | border: none; 26 | padding: 0.5rem 1rem; 27 | margin-right: 0.5rem; 28 | cursor: pointer; 29 | } 30 | 31 | main { 32 | padding: 2rem; 33 | } 34 | 35 | .project-list, .task-list { 36 | list-style-type: none; 37 | padding: 0; 38 | } 39 | 40 | .project-list li, .task-list li { 41 | background-color: #fff; 42 | margin-bottom: 1rem; 43 | padding: 1rem; 44 | border-radius: 5px; 45 | box-shadow: 0 2px 5px rgba(0,0,0,0.1); 46 | } 47 | 48 | .project-list li h2, .task-list li h3 { 49 | margin-top: 0; 50 | } 51 | 52 | form { 53 | background-color: #fff; 54 | padding: 1rem; 55 | border-radius: 5px; 56 | box-shadow: 0 2px 5px rgba(0,0,0,0.1); 57 | } 58 | 59 | form input, form textarea { 60 | width: 100%; 61 | padding: 0.5rem; 62 | margin-bottom: 1rem; 63 | } 64 | 65 | form button { 66 | background-color: #333; 67 | color: #fff; 68 | border: none; 69 | padding: 0.5rem 1rem; 70 | cursor: pointer; 71 | } 72 | -------------------------------------------------------------------------------- /backend/config.py: -------------------------------------------------------------------------------- 1 | from pydantic_settings import BaseSettings 2 | from typing import Optional 3 | import os 4 | from dotenv import load_dotenv 5 | 6 | current_dir = os.path.dirname(os.path.abspath(__file__)) 7 | env_path = os.path.join(current_dir, '.env') 8 | 9 | load_dotenv(env_path) 10 | 11 | class Settings(BaseSettings): 12 | APP_NAME: str = "AI-Powered Project Management System" 13 | API_V1_PREFIX: str = "/api/v1" 14 | DATABASE_URL: str = "sqlite:///./test.db" 15 | ALLOWED_ORIGINS: list = ["http://localhost:3000"] 16 | OPENAI_API_KEY: Optional[str] = None 17 | GROQ_API_KEY: Optional[str] = None 18 | 19 | class Config: 20 | env_file = env_path 21 | env_file_encoding = 'utf-8' 22 | 23 | settings = Settings() 24 | 25 | print("Debug information:") 26 | print(f"Current directory: {current_dir}") 27 | print(f".env file path: {env_path}") 28 | print(f".env file exists: {os.path.exists(env_path)}") 29 | print(f"GROQ_API_KEY from settings: {'Set' if settings.GROQ_API_KEY else 'Not set'}") 30 | print(f"GROQ_API_KEY from os.environ: {'Set' if os.environ.get('GROQ_API_KEY') else 'Not set'}") 31 | 32 | 33 | if settings.GROQ_API_KEY is None: 34 | print("Warning: GROQ_API_KEY is not set in settings. Attempting to get it from os.environ.") 35 | settings.GROQ_API_KEY = os.environ.get('GROQ_API_KEY') 36 | 37 | if settings.GROQ_API_KEY is None: 38 | print("Warning: GROQ_API_KEY is not set. Some features may not work properly.") 39 | else: 40 | print("GROQ_API_KEY is set successfully.") -------------------------------------------------------------------------------- /frontend/components/projectList.js: -------------------------------------------------------------------------------- 1 | import { app } from '../app.js'; 2 | import { loadProjectDetails } from './projectDetails.js'; 3 | 4 | const API_URL = 'http://localhost:8000/api/v1'; 5 | 6 | export function loadProjects() { 7 | fetch(`${API_URL}/projects`) 8 | .then(response => response.json()) 9 | .then(projects => { 10 | app.innerHTML = ` 11 |

My Projects

12 | 21 | `; 22 | }) 23 | .catch(error => console.error('Error loading projects:', error)); 24 | } 25 | 26 | export function createProject(projectData) { 27 | return fetch(`${API_URL}/projects`, { 28 | method: 'POST', 29 | headers: { 30 | 'Content-Type': 'application/json', 31 | }, 32 | body: JSON.stringify(projectData), 33 | }) 34 | .then(response => response.json()) 35 | .catch(error => console.error('Error creating project:', error)); 36 | } 37 | 38 | // Make loadProjectDetails global so it can be called from inline onclick handlers 39 | window.loadProjectDetails = loadProjectDetails; -------------------------------------------------------------------------------- /backend/ai_engine/agents/ai_assistant.py: -------------------------------------------------------------------------------- 1 | from langchain_groq import ChatGroq 2 | from langchain.prompts import ChatPromptTemplate 3 | from backend.ai_engine.rag.retriever import Retriever 4 | from dotenv import load_dotenv 5 | from langchain.schema import HumanMessage, SystemMessage 6 | import os 7 | import json 8 | 9 | load_dotenv() 10 | 11 | class AIAssistant: 12 | def __init__(self, retriever: Retriever): 13 | self.llm = ChatGroq( 14 | groq_api_key=os.getenv("GROQ_API_KEY"), 15 | model_name="mixtral-8x7b-32768", 16 | temperature=0.5, 17 | max_tokens=1024, 18 | ) 19 | self.retriever = retriever 20 | 21 | def answer_question(self, project_id: int, question: str) -> str: 22 | project_context = self.retriever.get_project_context(project_id) 23 | related_info = self.retriever.get_related_information(question) 24 | 25 | formatted_project_context = json.dumps(project_context, indent=2) 26 | formatted_related_info = json.dumps(related_info, indent=2) 27 | 28 | system_message = SystemMessage(content="You are an AI assistant for a project management system. Answer the following question based on the provided context.") 29 | human_message = HumanMessage(content=f""" 30 | Project context: {formatted_project_context} 31 | Related information: {formatted_related_info} 32 | Question: {question} 33 | Answer: 34 | """) 35 | 36 | messages = [system_message, human_message] 37 | 38 | print(f"Sending to AI:\n{human_message.content}") # Debug print 39 | 40 | try: 41 | response = self.llm.invoke(messages) 42 | print(f"AI response:\n{response.content}") # Debug print 43 | return response.content 44 | except Exception as e: 45 | print(f"Error in AI chat: {str(e)}") 46 | return f"I'm sorry, but I encountered an error while trying to answer your question. Error: {str(e)}" -------------------------------------------------------------------------------- /backend/api/dependencies.py: -------------------------------------------------------------------------------- 1 | from fastapi import Depends, HTTPException, status 2 | from fastapi.security import OAuth2PasswordBearer 3 | from jose import JWTError, jwt 4 | from sqlalchemy.orm import Session 5 | from datetime import datetime, timedelta 6 | from typing import Optional 7 | 8 | from backend.config import settings 9 | from backend.database.database import SessionLocal 10 | from backend.database import crud, models, schemas 11 | from backend.database import models 12 | 13 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") 14 | 15 | def get_db(): 16 | db = SessionLocal() 17 | try: 18 | yield db 19 | finally: 20 | db.close() 21 | 22 | def create_access_token(data: dict, expires_delta: Optional[timedelta] = None): 23 | to_encode = data.copy() 24 | if expires_delta: 25 | expire = datetime.utcnow() + expires_delta 26 | else: 27 | expire = datetime.utcnow() + timedelta(minutes=15) 28 | to_encode.update({"exp": expire}) 29 | encoded_jwt = jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM) 30 | return encoded_jwt 31 | 32 | async def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)): 33 | credentials_exception = HTTPException( 34 | status_code=status.HTTP_401_UNAUTHORIZED, 35 | detail="Could not validate credentials", 36 | headers={"WWW-Authenticate": "Bearer"}, 37 | ) 38 | try: 39 | payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM]) 40 | username: str = payload.get("sub") 41 | if username is None: 42 | raise credentials_exception 43 | token_data = schemas.TokenData(username=username) 44 | except JWTError: 45 | raise credentials_exception 46 | user = crud.get_user_by_username(db, username=token_data.username) 47 | if user is None: 48 | raise credentials_exception 49 | return user 50 | 51 | async def get_current_active_user(current_user: models.User = Depends(get_current_user)): 52 | if not current_user.is_active: 53 | raise HTTPException(status_code=400, detail="Inactive user") 54 | return current_user 55 | 56 | 57 | def get_db_session(): 58 | return Depends(get_db) -------------------------------------------------------------------------------- /frontend/components/projectDetails.js: -------------------------------------------------------------------------------- 1 | import { app } from '../app.js'; 2 | import { loadTasks, createTask } from './taskList.js'; 3 | import { askAI } from './aiAssistant.js'; 4 | 5 | const API_URL = 'http://localhost:8000/api/v1'; 6 | 7 | export function loadProjectDetails(projectId) { 8 | fetch(`${API_URL}/projects/${projectId}`) 9 | .then(response => response.json()) 10 | .then(project => { 11 | app.innerHTML = ` 12 |

${project.name}

13 |

${project.description}

14 | 15 | 16 | 17 | `; 18 | }) 19 | .catch(error => console.error('Error loading project details:', error)); 20 | } 21 | 22 | function showCreateTaskForm(projectId) { 23 | app.innerHTML += ` 24 |

Create New Task

25 |
26 | 27 | 28 | 29 |
30 | `; 31 | document.getElementById('createTaskForm').addEventListener('submit', e => handleCreateTask(e, projectId)); 32 | } 33 | 34 | function handleCreateTask(e, projectId) { 35 | e.preventDefault(); 36 | const name = document.getElementById('taskName').value; 37 | const description = document.getElementById('taskDescription').value; 38 | createTask(projectId, { name, description }) 39 | .then(() => loadTasks(projectId)) 40 | .catch(error => console.error('Error creating task:', error)); 41 | } 42 | 43 | function showAIAssistant(projectId) { 44 | app.innerHTML += ` 45 |

AI Assistant

46 | 47 | 48 |
49 | `; 50 | } 51 | 52 | function handleAskAI(projectId) { 53 | const query = document.getElementById('aiQuery').value; 54 | askAI(projectId, query) 55 | .then(response => { 56 | document.getElementById('aiResponse').innerHTML = `

${response}

`; 57 | }) 58 | .catch(error => console.error('Error asking AI:', error)); 59 | } 60 | 61 | // Make functions global so they can be called from inline onclick handlers 62 | window.loadTasks = loadTasks; 63 | window.showCreateTaskForm = showCreateTaskForm; 64 | window.showAIAssistant = showAIAssistant; 65 | window.handleAskAI = handleAskAI; -------------------------------------------------------------------------------- /backend/ai_engine/workflow/state_manager.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Any, List 2 | from pydantic import BaseModel, Field 3 | 4 | 5 | class Task(BaseModel): 6 | title: str 7 | description: str 8 | status: str 9 | priority: str = None 10 | estimated_duration: float 11 | actual_duration: float = None 12 | required_skills: List[str] 13 | suggestions: Dict[str, Any] = None 14 | collaboration: Dict[str, Any] = None 15 | 16 | class WorkflowState(BaseModel): 17 | project_id: int 18 | input_description: str 19 | tasks: List[Task] = Field(default_factory=list) 20 | generate_report: bool = False 21 | report: Dict[str, Any] = None 22 | 23 | class StateManager: 24 | @staticmethod 25 | def initialize_state(project_id: int, input_description: str) -> WorkflowState: 26 | return WorkflowState( 27 | project_id=project_id, 28 | input_description=input_description 29 | ) 30 | 31 | @staticmethod 32 | def update_state(state: WorkflowState, updates: Dict[str, Any]) -> WorkflowState: 33 | for key, value in updates.items(): 34 | if hasattr(state, key): 35 | setattr(state, key, value) 36 | return state 37 | 38 | @staticmethod 39 | def add_task(state: WorkflowState, task: Dict[str, Any]) -> WorkflowState: 40 | state.tasks.append(Task(**task)) 41 | return state 42 | 43 | @staticmethod 44 | def update_task(state: WorkflowState, task_index: int, updates: Dict[str, Any]) -> WorkflowState: 45 | if 0 <= task_index < len(state.tasks): 46 | task = state.tasks[task_index] 47 | for key, value in updates.items(): 48 | if hasattr(task, key): 49 | setattr(task, key, value) 50 | return state 51 | 52 | @staticmethod 53 | def set_generate_report(state: WorkflowState, generate: bool) -> WorkflowState: 54 | state.generate_report = generate 55 | return state 56 | 57 | @staticmethod 58 | def get_state_summary(state: WorkflowState) -> Dict[str, Any]: 59 | return { 60 | "project_id": state.project_id, 61 | "total_tasks": len(state.tasks), 62 | "tasks_by_status": {status: sum(1 for task in state.tasks if task.status == status) for status in set(task.status for task in state.tasks)}, 63 | "generate_report": state.generate_report 64 | } 65 | 66 | @staticmethod 67 | def get_task_by_title(state: WorkflowState, title: str) -> Task: 68 | for task in state.tasks: 69 | if task.title == title: 70 | return task 71 | return None 72 | 73 | @staticmethod 74 | def get_all_tasks(state: WorkflowState) -> List[Task]: 75 | return state.tasks 76 | 77 | @staticmethod 78 | def clear_tasks(state: WorkflowState) -> WorkflowState: 79 | state.tasks = [] 80 | return state -------------------------------------------------------------------------------- /backend/ai_engine/agents/collaboration_agent.py: -------------------------------------------------------------------------------- 1 | # from langchain_community.chat_models import ChatOpenAI 2 | from langchain_groq import ChatGroq 3 | from langchain.prompts import ChatPromptTemplate 4 | from langchain.output_parsers import PydanticOutputParser 5 | from pydantic import BaseModel, Field 6 | from typing import List, Dict , Any 7 | from backend.ai_engine.rag.retriever import Retriever 8 | from dotenv import load_dotenv 9 | import os 10 | 11 | load_dotenv() 12 | 13 | class TeamFormation(BaseModel): 14 | member_name: str = Field(description="Name of the team member") 15 | role: str = Field(description="Assigned role for the task") 16 | 17 | class CollaborationOutput(BaseModel): 18 | team_formation: List[TeamFormation] = Field(description="Suggested team formation for the task") 19 | communication_plan: str = Field(description="Suggested communication plan for the team") 20 | 21 | class CollaborationAgent: 22 | """CollaborationAgent class to suggest team formation and communication plan for a task""" 23 | def __init__(self, retriever: Retriever,model_name: str = "gpt-3.5-turbo"): 24 | self.llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"), 25 | model="mixtral-8x7b-32768", 26 | temperature=0, 27 | max_tokens=None, 28 | timeout=None, 29 | max_retries=2,) 30 | self.parser = PydanticOutputParser(pydantic_object=CollaborationOutput) 31 | self.retriever = retriever 32 | 33 | self.collaboration_prompt = ChatPromptTemplate.from_template( 34 | "Suggest a team formation and communication plan for the following task:\n" 35 | "Task: {task_description}\n" 36 | "Available team members: {available_team_members}\n" 37 | "Project context: {project_context}\n" 38 | "Similar past collaborations: {similar_collaborations}\n" 39 | "{format_instructions}" 40 | ) 41 | 42 | def suggest_collaboration(self, task: Dict[str, Any], project_id: int) -> Dict[str, Any]: 43 | """Suggest team formation and communication plan for a task""" 44 | available_team_members = self.retriever.get_available_team_members(project_id) 45 | project_context = self.retriever.get_project_context(project_id) 46 | similar_collaborations = self.retriever.get_similar_collaborations(task['description'], project_id) 47 | 48 | prompt = self.collaboration_prompt.format( 49 | task_description=f"{task['title']} (Duration: {task['estimated_duration']}, Skills: {', '.join(task['required_skills'])})", 50 | available_team_members=available_team_members, 51 | project_context=project_context, 52 | similar_collaborations=similar_collaborations, 53 | format_instructions=self.parser.get_format_instructions() 54 | ) 55 | response = self.llm(prompt) 56 | collaboration_info = self.parser.parse(response.content) 57 | 58 | return collaboration_info.dict() -------------------------------------------------------------------------------- /backend/database/models.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Table, Date, Boolean, Float 2 | from sqlalchemy.orm import relationship 3 | from sqlalchemy.ext.declarative import declarative_base 4 | from datetime import datetime 5 | 6 | Base = declarative_base() 7 | project_team_members = Table('project_team_members', Base.metadata, 8 | Column('project_id', Integer, ForeignKey('projects.id')), 9 | Column('team_member_id', Integer, ForeignKey('team_members.id')) 10 | ) 11 | 12 | 13 | class User(Base): 14 | __tablename__ = "users" 15 | 16 | id = Column(Integer, primary_key=True, index=True) 17 | username = Column(String, unique=True, index=True) 18 | email = Column(String, unique=True, index=True) 19 | hashed_password = Column(String) 20 | is_active = Column(Boolean, default=True) 21 | 22 | projects = relationship("Project", back_populates="owner") 23 | 24 | class Project(Base): 25 | __tablename__ = 'projects' 26 | 27 | id = Column(Integer, primary_key=True) 28 | name = Column(String, nullable=False) 29 | description = Column(String) 30 | start_date = Column(Date) 31 | end_date = Column(Date) 32 | status = Column(String) 33 | created_at = Column(DateTime, nullable=False, default=datetime.utcnow) 34 | updated_at = Column(DateTime) 35 | 36 | owner_id = Column(Integer, ForeignKey("users.id")) 37 | owner = relationship("User", back_populates="projects") 38 | tasks = relationship("Task", back_populates="project") 39 | team_members = relationship("TeamMember", secondary=project_team_members, back_populates="projects") 40 | 41 | class Task(Base): 42 | __tablename__ = 'tasks' 43 | 44 | id = Column(Integer, primary_key=True) 45 | title = Column(String, nullable=False) 46 | description = Column(String) 47 | status = Column(String, nullable=False) 48 | priority = Column(String) 49 | priority_reasoning = Column(String) 50 | estimated_duration = Column(Float) 51 | actual_duration = Column(Float) 52 | created_at = Column(DateTime, nullable=False, default=datetime.utcnow) 53 | updated_at = Column(DateTime) 54 | due_date = Column(Date) 55 | required_skills = Column(String) # Store as JSON string 56 | 57 | project_id = Column(Integer, ForeignKey('projects.id')) 58 | project = relationship("Project", back_populates="tasks") 59 | assigned_to_id = Column(Integer, ForeignKey('team_members.id')) 60 | assigned_to = relationship("TeamMember", back_populates="assigned_tasks") 61 | 62 | class TeamMember(Base): 63 | __tablename__ = 'team_members' 64 | 65 | id = Column(Integer, primary_key=True) 66 | name = Column(String, nullable=False) 67 | email = Column(String, unique=True) 68 | role = Column(String) 69 | skills = Column(String) 70 | 71 | assigned_tasks = relationship("Task", back_populates="assigned_to") 72 | projects = relationship("Project", secondary=project_team_members, back_populates="team_members") -------------------------------------------------------------------------------- /backend/database/schemas.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, validator 2 | from typing import List, Optional 3 | from datetime import datetime, date 4 | import json 5 | 6 | 7 | class TaskBase(BaseModel): 8 | title: str 9 | description: Optional[str] = None 10 | status: Optional[str] = "New" 11 | priority: Optional[str] = None 12 | estimated_duration: Optional[float] = None 13 | due_date: Optional[date] = None 14 | required_skills: Optional[List[str]] = None 15 | 16 | class TaskCreate(TaskBase): 17 | pass 18 | 19 | class Task(TaskBase): 20 | id: int 21 | created_at: datetime 22 | updated_at: Optional[datetime] = None 23 | project_id: int 24 | assigned_to_id: Optional[int] = None 25 | priority: Optional[str] = None 26 | priority_reasoning: Optional[str] = None 27 | 28 | @validator('required_skills', pre=True) 29 | def parse_required_skills(cls, v): 30 | if isinstance(v, str): 31 | return json.loads(v) 32 | return v 33 | 34 | class Config: 35 | from_attributes = True 36 | 37 | class TeamMemberBase(BaseModel): 38 | name: str 39 | email: str 40 | role: Optional[str] = None 41 | skills: Optional[List[str]] = None 42 | 43 | class TeamMemberCreate(TeamMemberBase): 44 | pass 45 | 46 | class TeamMemberInDB(TeamMemberBase): 47 | id: int 48 | skills: Optional[str] = None # This will be the JSON string stored in the database 49 | 50 | class Config: 51 | from_attributes = True 52 | 53 | class ProjectBase(BaseModel): 54 | name: str 55 | description: Optional[str] = None 56 | start_date: Optional[date] = None 57 | end_date: Optional[date] = None 58 | status: Optional[str] = None 59 | 60 | class ProjectCreate(ProjectBase): 61 | pass 62 | 63 | class TeamMemberBase(BaseModel): 64 | id: int 65 | name: str 66 | email: str 67 | role: Optional[str] = None 68 | skills: Optional[List[str]] = None 69 | 70 | class TeamMember(TeamMemberBase): 71 | id: int 72 | assigned_tasks: List[Task] = [] 73 | 74 | class Config: 75 | from_attributes = True 76 | 77 | class ProjectOut(ProjectBase): 78 | id: int 79 | created_at: datetime 80 | updated_at: Optional[datetime] = None 81 | tasks: List[Task] = [] 82 | team_members: List[TeamMember] = [] 83 | 84 | class Config: 85 | from_attributes = True 86 | 87 | class Project(ProjectBase): 88 | id: int 89 | created_at: datetime 90 | updated_at: Optional[datetime] = None 91 | tasks: List[Task] = [] 92 | team_members: List[TeamMemberBase] = [] 93 | 94 | class Config: 95 | from_attributes = True 96 | 97 | class UserBase(BaseModel): 98 | username: str 99 | email: str 100 | 101 | class UserCreate(UserBase): 102 | password: str 103 | 104 | class User(UserBase): 105 | id: int 106 | is_active: bool 107 | projects: List[Project] = [] 108 | 109 | class Config: 110 | from_attributes = True -------------------------------------------------------------------------------- /backend/ai_engine/rag/vector_store.py: -------------------------------------------------------------------------------- 1 | import faiss 2 | from langchain_community.vectorstores import FAISS 3 | from langchain_ollama import OllamaEmbeddings 4 | from langchain.schema import Document 5 | from typing import List, Dict, Any 6 | import os 7 | from dotenv import load_dotenv 8 | 9 | load_dotenv() 10 | 11 | class VectorStore: 12 | def __init__(self): 13 | self.embeddings = OllamaEmbeddings(model="all-minilm") 14 | self.vector_store = None 15 | 16 | def create_vector_store(self, texts: List[str], metadatas: List[Dict[str, Any]]): 17 | documents = [Document(page_content=text, metadata=metadata) for text, metadata in zip(texts, metadatas)] 18 | try: 19 | print(f"Creating FAISS index with {len(documents)} documents") 20 | print(f"Embedding dimension: {len(self.embeddings.embed_query('test'))}") 21 | self.vector_store = FAISS.from_documents(documents, self.embeddings) 22 | print("FAISS index created successfully") 23 | except Exception as e: 24 | print(f"Error creating vector store: {e}") 25 | print("Falling back to in-memory storage without embeddings.") 26 | self.vector_store = InMemoryStore(documents) 27 | 28 | def add_texts(self, texts: List[str], metadatas: List[Dict[str, Any]]): 29 | if self.vector_store is None: 30 | self.create_vector_store(texts, metadatas) 31 | else: 32 | try: 33 | self.vector_store.add_texts(texts, metadatas=metadatas) 34 | except Exception as e: 35 | print(f"Error adding texts to vector store: {e}") 36 | 37 | def similarity_search(self, query: str, k: int = 4) -> List[Document]: 38 | if self.vector_store is None: 39 | self.initialize_with_dummy_data() 40 | try: 41 | return self.vector_store.similarity_search(query, k=k) 42 | except Exception as e: 43 | print(f"Error performing similarity search: {e}") 44 | return [] 45 | 46 | def initialize_with_dummy_data(self): 47 | dummy_texts = [ 48 | "This is a dummy task for initializing the vector store.", 49 | "Another dummy task to ensure the vector store is not empty.", 50 | "A third dummy task for good measure." 51 | ] 52 | dummy_metadatas = [ 53 | {"title": "Dummy Task 1", "project_id": 0}, 54 | {"title": "Dummy Task 2", "project_id": 0}, 55 | {"title": "Dummy Task 3", "project_id": 0} 56 | ] 57 | self.create_vector_store(dummy_texts, dummy_metadatas) 58 | print("Vector store initialized with dummy data.") 59 | 60 | class InMemoryStore: 61 | def __init__(self, documents: List[Document]): 62 | self.documents = documents 63 | def similarity_search(self, query: str, k: int = 4) -> List[Document]: 64 | return self.documents[:k] 65 | 66 | def add_texts(self, texts: List[str], metadatas: List[Dict[str, Any]]): 67 | new_docs = [Document(page_content=text, metadata=metadata) for text, metadata in zip(texts, metadatas)] 68 | self.documents.extend(new_docs) 69 | 70 | vector_store = VectorStore() -------------------------------------------------------------------------------- /backend/ai_engine/agents/suggestion_agent.py: -------------------------------------------------------------------------------- 1 | # from langchain_community.chat_models import ChatOpenAI 2 | from langchain_groq import ChatGroq 3 | from langchain.prompts import ChatPromptTemplate 4 | from langchain.output_parsers import PydanticOutputParser 5 | from langchain.schema import HumanMessage, SystemMessage 6 | from pydantic import BaseModel, Field 7 | from typing import List, Dict, Any 8 | from backend.ai_engine.rag.retriever import Retriever 9 | from dotenv import load_dotenv 10 | import os 11 | import re 12 | 13 | 14 | load_dotenv() 15 | 16 | class SuggestionOutput(BaseModel): 17 | suggestions: List[str] = Field(description="List of suggestions for completing the task") 18 | resources: List[str] = Field(description="List of recommended resources for the task") 19 | 20 | class SuggestionAgent: 21 | def __init__(self, retriever: Retriever,model_name: str = "gpt-3.5-turbo"): 22 | self.llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"), 23 | model="mixtral-8x7b-32768", 24 | temperature=0, 25 | max_tokens=None, 26 | timeout=None, 27 | max_retries=2,) 28 | self.parser = PydanticOutputParser(pydantic_object=SuggestionOutput) 29 | self.retriever = retriever 30 | 31 | self.suggestion_prompt = ChatPromptTemplate.from_template( 32 | "Provide suggestions and resources for completing the following task:\n" 33 | "Task: {task_description}\n" 34 | "Project context: {project_context}\n" 35 | "Similar completed tasks: {similar_tasks}\n" 36 | "Team member skills: {team_skills}\n" 37 | "{format_instructions}" 38 | ) 39 | 40 | def generate_suggestions(self, task: Dict[str, Any], project_id: int) -> Dict[str, Any]: 41 | project_context = self.retriever.get_project_context(project_id) 42 | similar_tasks = self.retriever.get_similar_completed_tasks(task['description'], project_id) 43 | team_skills = self.retriever.get_team_skills(project_id) 44 | 45 | task_description = f"{task['title']} (Duration: {task['estimated_duration']}, Skills: {', '.join(task['required_skills'])})" 46 | system_message = SystemMessage(content="You are a project management AI assistant. Provide suggestions and resources for completing the given task.") 47 | human_message = HumanMessage(content=f""" 48 | Task: {task_description} 49 | Project context: {project_context} 50 | Similar completed tasks: {similar_tasks} 51 | Team member skills: {team_skills} 52 | 53 | Please provide suggestions for completing this task and recommend relevant resources. 54 | """) 55 | 56 | messages = [system_message, human_message] 57 | 58 | response = self.llm.invoke(messages) 59 | 60 | # Parse the response content manually 61 | content = response.content 62 | suggestions = re.findall(r'\d+\.\s*\*\*(.*?)\*\*:', content, re.DOTALL) 63 | resources = re.findall(r'\d+\.\s*\*\*(.*?)\*\*:', content.split("Recommended resources:")[1], re.DOTALL) if "Recommended resources:" in content else [] 64 | 65 | suggestion_info = { 66 | "suggestions": suggestions, 67 | "resources": resources 68 | } 69 | 70 | return suggestion_info -------------------------------------------------------------------------------- /backend/ai_engine/utils/helpers.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import List, Dict, Any 3 | from datetime import datetime, timedelta 4 | from sqlalchemy.orm import class_mapper 5 | 6 | def extract_list_from_string(s: str) -> List[str]: 7 | """Extract a list of items from a comma-separated string.""" 8 | return [item.strip() for item in s.split(',') if item.strip()] 9 | 10 | def parse_duration(duration_str: str) -> timedelta: 11 | """Parse a duration string (e.g., '2 hours', '3 days') into a timedelta object.""" 12 | match = re.match(r'(\d+)\s*(hour|day|week)s?', duration_str.lower()) 13 | if not match: 14 | return timedelta() 15 | 16 | amount, unit = match.groups() 17 | amount = int(amount) 18 | 19 | if unit == 'hour': 20 | return timedelta(hours=amount) 21 | elif unit == 'day': 22 | return timedelta(days=amount) 23 | elif unit == 'week': 24 | return timedelta(weeks=amount) 25 | 26 | def calculate_project_metrics(tasks: List[Dict[str, Any]]) -> Dict[str, Any]: 27 | """Calculate various project metrics based on the tasks.""" 28 | total_tasks = len(tasks) 29 | completed_tasks = sum(1 for task in tasks if task['status'] == 'Completed') 30 | total_duration = sum((task['actual_duration'] or task['estimated_duration']) for task in tasks) 31 | 32 | return { 33 | 'total_tasks': total_tasks, 34 | 'completed_tasks': completed_tasks, 35 | 'completion_rate': completed_tasks / total_tasks if total_tasks > 0 else 0, 36 | 'total_duration': total_duration, 37 | 'average_task_duration': total_duration / total_tasks if total_tasks > 0 else 0 38 | } 39 | 40 | def format_timedelta(td: timedelta) -> str: 41 | """Format a timedelta object into a human-readable string.""" 42 | days, seconds = td.days, td.seconds 43 | hours = seconds // 3600 44 | minutes = (seconds % 3600) // 60 45 | 46 | parts = [] 47 | if days > 0: 48 | parts.append(f"{days} day{'s' if days > 1 else ''}") 49 | if hours > 0: 50 | parts.append(f"{hours} hour{'s' if hours > 1 else ''}") 51 | if minutes > 0: 52 | parts.append(f"{minutes} minute{'s' if minutes > 1 else ''}") 53 | 54 | return ', '.join(parts) if parts else "0 minutes" 55 | 56 | def sanitize_input(input_string: str) -> str: 57 | """Remove any potentially harmful characters from input strings.""" 58 | return re.sub(r'[^\w\s-]', '', input_string).strip() 59 | 60 | def generate_task_id(project_id: int, task_title: str) -> str: 61 | """Generate a unique task ID based on project ID and task title.""" 62 | sanitized_title = sanitize_input(task_title) 63 | return f"{project_id}-{sanitized_title[:20]}-{datetime.now().strftime('%Y%m%d%H%M%S')}" 64 | 65 | def calculate_task_priority_score(task: Dict[str, Any], project_context: Dict[str, Any]) -> float: 66 | """Calculate a priority score for a task based on various factors.""" 67 | score = 0 68 | if task['priority'] == 'High': 69 | score += 3 70 | elif task['priority'] == 'Medium': 71 | score += 2 72 | elif task['priority'] == 'Low': 73 | score += 1 74 | 75 | return score 76 | 77 | def model_to_dict(obj): 78 | return {c.key: getattr(obj, c.key) 79 | for c in class_mapper(obj.__class__).columns} -------------------------------------------------------------------------------- /backend/ai_engine/agents/task_agent.py: -------------------------------------------------------------------------------- 1 | import os 2 | from langchain_groq import ChatGroq 3 | from langchain.prompts import ChatPromptTemplate 4 | from langchain.output_parsers import PydanticOutputParser 5 | from pydantic import BaseModel, Field 6 | from typing import List 7 | import datetime 8 | from dotenv import load_dotenv 9 | from backend.ai_engine.rag.retriever import Retriever 10 | from langchain.schema import HumanMessage, SystemMessage 11 | 12 | load_dotenv() 13 | class TaskOutput(BaseModel): 14 | title: str = Field(description="Title of the task") 15 | estimated_duration: float = Field(description="Estimated duration of the task in hours") 16 | required_skills: List[str] = Field(description="List of skills required for the task") 17 | priority: str = Field(description="Priority of the task (High, Medium, Low)") 18 | 19 | class TaskAgent: 20 | def __init__(self, retriever: Retriever): 21 | self.llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"), 22 | model="mixtral-8x7b-32768", 23 | temperature=0, 24 | max_tokens=None, 25 | timeout=None, 26 | max_retries=2,) 27 | self.parser = PydanticOutputParser(pydantic_object=TaskOutput) 28 | self.retriever = retriever 29 | 30 | self.task_creation_prompt = ChatPromptTemplate.from_template( 31 | "Create a task based on the following description: {description}. " 32 | "Consider these similar tasks: {similar_tasks}. " 33 | "Project context: {project_context}. " 34 | "{format_instructions}" 35 | ) 36 | 37 | def create_task(self, description: str, project_id: int) -> dict: 38 | try: 39 | similar_tasks = self.retriever.get_similar_tasks(description, project_id) 40 | project_context = self.retriever.get_project_context(project_id) 41 | team_skills = self.retriever.get_team_skills(project_id) 42 | except Exception as e: 43 | print(f"Error retrieving data: {e}") 44 | similar_tasks = [] 45 | project_context = {} 46 | team_skills = {} 47 | 48 | messages = [ 49 | SystemMessage(content="You are a task creation assistant for a project management system."), 50 | HumanMessage(content=f"Create a task based on the following description: {description}. " 51 | f"Consider these similar tasks: {similar_tasks}. " 52 | f"Project context: {project_context}. " 53 | f"Available team skills: {team_skills}. " 54 | f"Provide a title, estimated duration, and required skills.") 55 | ] 56 | 57 | try: 58 | response = self.llm(messages) 59 | print(f"LLM Response: {response}") # Debug print 60 | task_info = self.parser.parse(response.content) 61 | return { 62 | 'title': task_info.title, 63 | 'description': description, 64 | 'estimated_duration': task_info.estimated_duration, 65 | 'required_skills': task_info.required_skills, 66 | 'created_at': datetime.datetime.now().isoformat(), 67 | 'status': 'New', 68 | 'project_id': project_id 69 | } 70 | except Exception as e: 71 | print(f"Error in create_task: {e}") 72 | return { 73 | 'title': f"Task: {description[:50]}", 74 | 'description': description, 75 | 'estimated_duration': None, 76 | 'required_skills': [], 77 | 'created_at': datetime.datetime.now().isoformat(), 78 | 'status': 'New', 79 | 'project_id': project_id 80 | } -------------------------------------------------------------------------------- /backend/tests/test_rag.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import Mock, patch 3 | from backend.ai_engine.rag.vector_store import VectorStore 4 | from backend.ai_engine.rag.retriever import Retriever 5 | 6 | @pytest.fixture 7 | def mock_db(): 8 | return Mock() 9 | 10 | @pytest.fixture 11 | def mock_vector_store(): 12 | with patch('backend.ai_engine.rag.vector_store.FAISS') as mock_faiss: 13 | mock_vs = VectorStore() 14 | mock_vs.vector_store = Mock() 15 | yield mock_vs 16 | 17 | def test_vector_store_creation(mock_vector_store): 18 | texts = ["Test document 1", "Test document 2"] 19 | metadatas = [{"source": "test1"}, {"source": "test2"}] 20 | 21 | mock_vector_store.create_vector_store(texts, metadatas) 22 | 23 | mock_vector_store.vector_store.from_documents.assert_called_once() 24 | 25 | def test_vector_store_similarity_search(mock_vector_store): 26 | mock_vector_store.vector_store.similarity_search.return_value = [ 27 | Mock(page_content="Similar document 1", metadata={"source": "test1"}), 28 | Mock(page_content="Similar document 2", metadata={"source": "test2"}) 29 | ] 30 | 31 | results = mock_vector_store.similarity_search("test query", k=2) 32 | 33 | assert len(results) == 2 34 | assert results[0].page_content == "Similar document 1" 35 | assert results[1].metadata["source"] == "test2" 36 | 37 | def test_retriever_get_similar_tasks(mock_db): 38 | with patch('backend.ai_engine.rag.retriever.vector_store') as mock_vs: 39 | mock_vs.similarity_search.return_value = [ 40 | Mock(metadata={"title": "Similar Task 1"}, page_content="Description 1"), 41 | Mock(metadata={"title": "Similar Task 2"}, page_content="Description 2") 42 | ] 43 | 44 | retriever = Retriever(mock_db) 45 | similar_tasks = retriever.get_similar_tasks("New task description", project_id=1, k=2) 46 | 47 | assert len(similar_tasks) == 2 48 | assert similar_tasks[0]["title"] == "Similar Task 1" 49 | assert similar_tasks[1]["description"] == "Description 2" 50 | 51 | def test_retriever_get_project_context(mock_db): 52 | mock_db.query().filter().first.return_value = Mock( 53 | name="Test Project", 54 | description="A test project", 55 | start_date="2023-01-01", 56 | end_date="2023-12-31", 57 | status="In Progress" 58 | ) 59 | 60 | retriever = Retriever(mock_db) 61 | context = retriever.get_project_context(project_id=1) 62 | 63 | assert context["name"] == "Test Project" 64 | assert context["status"] == "In Progress" 65 | 66 | def test_retriever_get_team_skills(mock_db): 67 | mock_db.query().filter().all.return_value = [ 68 | Mock(name="Alice", skills=["Python", "JavaScript"]), 69 | Mock(name="Bob", skills=["Java", "C++"]) 70 | ] 71 | 72 | retriever = Retriever(mock_db) 73 | skills = retriever.get_team_skills(project_id=1) 74 | 75 | assert skills == { 76 | "Alice": ["Python", "JavaScript"], 77 | "Bob": ["Java", "C++"] 78 | } 79 | 80 | def test_retriever_get_similar_projects(mock_db): 81 | with patch('backend.ai_engine.rag.retriever.vector_store') as mock_vs: 82 | mock_vs.similarity_search.return_value = [ 83 | Mock(metadata={"name": "Similar Project 1"}, page_content="Description 1"), 84 | Mock(metadata={"name": "Similar Project 2"}, page_content="Description 2") 85 | ] 86 | 87 | retriever = Retriever(mock_db) 88 | similar_projects = retriever.get_similar_projects(project_id=1, k=2) 89 | 90 | assert len(similar_projects) == 2 91 | assert similar_projects[0]["name"] == "Similar Project 1" 92 | assert similar_projects[1]["description"] == "Description 2" 93 | -------------------------------------------------------------------------------- /backend/ai_engine/agents/priority_agent.py: -------------------------------------------------------------------------------- 1 | # from langchain_community.chat_models import ChatOpenAI 2 | from langchain_groq import ChatGroq 3 | from langchain.prompts import ChatPromptTemplate 4 | from langchain.output_parsers import PydanticOutputParser 5 | from pydantic import BaseModel, Field 6 | from backend.ai_engine.rag.retriever import Retriever 7 | from dotenv import load_dotenv 8 | from langchain.schema import HumanMessage, SystemMessage 9 | import os 10 | 11 | load_dotenv() 12 | 13 | 14 | class PriorityOutput(BaseModel): 15 | priority: str = Field(description="Priority level of the task (High, Medium, Low)") 16 | reasoning: str = Field(description="Reasoning behind the priority assignment") 17 | 18 | class PriorityAgent: 19 | """PriorityAgent class to assign a priority to a task""" 20 | def __init__(self, retriever: Retriever, model_name: str = "gpt-3.5-turbo"): 21 | self.llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"), 22 | model="mixtral-8x7b-32768", 23 | temperature=0, 24 | max_tokens=None, 25 | timeout=None, 26 | max_retries=2,) 27 | self.parser = PydanticOutputParser(pydantic_object=PriorityOutput) 28 | self.retriever = retriever 29 | 30 | self.priority_prompt = ChatPromptTemplate.from_template( 31 | "Assign a priority to the following task: {task_description}\n" 32 | "Consider the task's complexity, estimated duration, and required skills.\n" 33 | "Project context: {project_context}\n" 34 | "Similar tasks priorities: {similar_tasks_priorities}\n" 35 | "{format_instructions}" 36 | ) 37 | 38 | def assign_priority(self, task: dict) -> dict: 39 | """Assign a priority to a task 40 | for example: 41 | task = { 42 | "title": "Design a new logo", 43 | "description": "Design a new logo for the company website", 44 | "estimated_duration": "2 days", 45 | "required_skills": ["Graphic Design", "Adobe Illustrator"], 46 | "project_id": 1 47 | } 48 | if the task is to design a new logo, the agent should assign a priority level (High, Medium, Low) and provide reasoning. 49 | if error occurs, return default priority and reasoning. 50 | """ 51 | project_context = self.retriever.get_project_context(task['project_id']) 52 | similar_tasks_priorities = self.retriever.get_similar_tasks_priorities(task['description'], task['project_id']) 53 | team_skills = self.retriever.get_team_skills(task['project_id']) 54 | 55 | task_description = f"{task['title']}" 56 | if task.get('estimated_duration'): 57 | task_description += f" (Duration: {task['estimated_duration']})" 58 | if task.get('required_skills'): 59 | task_description += f" (Skills: {', '.join(task['required_skills'])})" 60 | 61 | messages = [ 62 | SystemMessage(content="You are a task prioritization assistant for a project management system."), 63 | HumanMessage(content=f"Assign a priority to the following task: {task_description}\n" 64 | f"Consider the task's complexity, estimated duration, and required skills.\n" 65 | f"Project context: {project_context}\n" 66 | f"Similar tasks priorities: {similar_tasks_priorities}\n" 67 | f"Team skills: {team_skills}\n" 68 | f"Provide the priority (High, Medium, or Low) and reasoning.") 69 | ] 70 | 71 | try: 72 | response = self.llm(messages) 73 | priority_info = self.parser.parse(response.content) 74 | return priority_info.dict() 75 | except Exception as e: 76 | print(f"Error in assign_priority: {e}") 77 | return {"priority": "Medium", "reasoning": "Default priority assigned due to error."} -------------------------------------------------------------------------------- /backend/ai_engine/agents/report_agent.py: -------------------------------------------------------------------------------- 1 | # from langchain_community.chat_models import ChatOpenAI 2 | from langchain_groq import ChatGroq 3 | from langchain.prompts import ChatPromptTemplate 4 | from langchain.output_parsers import PydanticOutputParser 5 | from langchain.schema import HumanMessage, SystemMessage 6 | from pydantic import BaseModel, Field 7 | from typing import List, Dict, Any 8 | from backend.ai_engine.rag.retriever import Retriever 9 | from dotenv import load_dotenv 10 | import os 11 | import re 12 | import json 13 | from datetime import datetime 14 | 15 | load_dotenv() 16 | 17 | class ReportOutput(BaseModel): 18 | summary: str = Field(description="Summary of the project status, including tasks and assigned team members") 19 | key_metrics: Dict[str, float] = Field(description="Key metrics of the project") 20 | risks: List[str] = Field(description="Identified risks in the project") 21 | recommendations: List[str] = Field(description="List of recommendations for project improvement") 22 | 23 | class ReportAgent: 24 | def __init__(self, retriever: Retriever, model_name: str = "gpt-3.5-turbo"): 25 | self.llm = ChatGroq( 26 | groq_api_key=os.getenv("GROQ_API_KEY"), 27 | model="mixtral-8x7b-32768", 28 | temperature=0, 29 | max_tokens=None, 30 | timeout=None, 31 | max_retries=2, 32 | ) 33 | self.parser = PydanticOutputParser(pydantic_object=ReportOutput) 34 | self.retriever = retriever 35 | self.report_prompt = ChatPromptTemplate.from_template( 36 | "Generate a project report based on the following information:\n" 37 | "Tasks: {tasks}\n" 38 | "Project context: {project_context}\n" 39 | "Similar past projects: {similar_projects}\n" 40 | "Please include in the summary section a list of tasks with their assigned team members (if available).\n" 41 | "{format_instructions}" 42 | ) 43 | 44 | def generate_report(self, tasks: List[Dict[str, Any]]) -> Dict[str, Any]: 45 | if not tasks: 46 | return { 47 | "summary": "No tasks available for report generation.", 48 | "key_metrics": {}, 49 | "risks": ["No tasks to analyze risks."], 50 | "recommendations": ["Start by adding tasks to the project."] 51 | } 52 | 53 | project_id = tasks[0].get('project_id') 54 | project_context = self.retriever.get_project_context(project_id) if project_id else {} 55 | similar_projects = self.retriever.get_similar_projects(project_id) if project_id else [] 56 | 57 | # Convert datetime objects to strings 58 | def json_serial(obj): 59 | if isinstance(obj, datetime): 60 | return obj.isoformat() 61 | raise TypeError(f"Type {type(obj)} not serializable") 62 | 63 | # Enhance task information with team members if available in project context 64 | team_members = project_context.get('team_members', []) 65 | for task in tasks: 66 | task['assigned_to'] = team_members[task['id'] % len(team_members)] if team_members else "Unassigned" 67 | 68 | system_message = SystemMessage(content="You are an AI assistant tasked with generating project reports.") 69 | human_message = HumanMessage(content=self.report_prompt.format( 70 | tasks=json.dumps(tasks, default=json_serial, indent=2), 71 | project_context=json.dumps(project_context, default=json_serial, indent=2), 72 | similar_projects=json.dumps(similar_projects, default=json_serial, indent=2), 73 | format_instructions=self.parser.get_format_instructions() 74 | )) 75 | 76 | messages = [system_message, human_message] 77 | 78 | try: 79 | response = self.llm.invoke(messages) 80 | report_info = self.parser.parse(response.content) 81 | return report_info.dict() 82 | except Exception as e: 83 | print(f"Error in generate_report: {e}") 84 | return { 85 | "summary": "An error occurred while generating the report.", 86 | "key_metrics": {}, 87 | "risks": ["Unable to analyze risks due to an error."], 88 | "recommendations": ["Please try again or contact support if the issue persists."] 89 | } -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | AI-Powered Project Management 7 | 8 | 9 | 10 |
11 |

AI-Powered Project Management

12 | 13 | 14 |
15 |

Create Project

16 | 17 | 18 | 19 |
20 | 21 | 22 |
23 |

Projects

24 | 25 |
26 | 27 | 28 |
29 |

Team Members

30 | 31 | 32 | 33 | 34 | 35 |
36 | 37 | 38 |
39 |

Assign Team Member to Project

40 | 41 | 42 | 43 |
44 | 45 | 46 |
47 |

Create Task

48 | 49 | 50 | 51 | 52 | 53 |
54 | 55 | 56 |
57 |

Tasks

58 | 59 |
60 | 61 | 62 |
63 |

AI Assistant

64 | 65 | 66 | 67 |
68 |
69 | 70 | 71 |
72 |

Task Actions

73 | 74 | 75 | 76 |
77 |
78 | 79 | 80 |
81 |

Project Report

82 | 83 | 84 |
85 |
86 |
87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /backend/tests/test_agents.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import Mock, patch 3 | from backend.ai_engine.agents.task_agent import TaskAgent 4 | from backend.ai_engine.agents.priority_agent import PriorityAgent 5 | from backend.ai_engine.agents.suggestion_agent import SuggestionAgent 6 | from backend.ai_engine.agents.report_agent import ReportAgent 7 | from backend.ai_engine.agents.collaboration_agent import CollaborationAgent 8 | 9 | @pytest.fixture 10 | def mock_retriever(): 11 | return Mock() 12 | 13 | def test_task_agent(mock_retriever): 14 | with patch('backend.ai_engine.agents.task_agent.Retriever', return_value=mock_retriever): 15 | task_agent = TaskAgent() 16 | mock_retriever.get_similar_tasks.return_value = [{"title": "Similar Task", "description": "A similar task"}] 17 | mock_retriever.get_project_context.return_value = {"name": "Test Project", "description": "A test project"} 18 | 19 | task = task_agent.create_task("Create a new feature", project_id=1) 20 | 21 | assert "title" in task 22 | assert "estimated_duration" in task 23 | assert "required_skills" in task 24 | assert task["status"] == "New" 25 | 26 | def test_priority_agent(mock_retriever): 27 | with patch('backend.ai_engine.agents.priority_agent.Retriever', return_value=mock_retriever): 28 | priority_agent = PriorityAgent() 29 | mock_retriever.get_project_context.return_value = {"name": "Test Project", "description": "A test project"} 30 | mock_retriever.get_similar_tasks_priorities.return_value = [{"title": "Similar Task", "priority": "High"}] 31 | 32 | task = { 33 | "title": "Implement user authentication", 34 | "estimated_duration": 16, 35 | "required_skills": ["Python", "Security"] 36 | } 37 | priority_info = priority_agent.assign_priority(task, project_id=1) 38 | 39 | assert "priority" in priority_info 40 | assert "reasoning" in priority_info 41 | 42 | def test_suggestion_agent(mock_retriever): 43 | with patch('backend.ai_engine.agents.suggestion_agent.Retriever', return_value=mock_retriever): 44 | suggestion_agent = SuggestionAgent() 45 | mock_retriever.get_project_context.return_value = {"name": "Test Project", "description": "A test project"} 46 | mock_retriever.get_similar_completed_tasks.return_value = [{"title": "Similar Task", "description": "A completed similar task"}] 47 | mock_retriever.get_team_skills.return_value = {"Alice": ["Python", "JavaScript"], "Bob": ["Java", "C++"]} 48 | 49 | task = { 50 | "title": "Implement data visualization", 51 | "estimated_duration": 24, 52 | "required_skills": ["Python", "D3.js"] 53 | } 54 | suggestions = suggestion_agent.generate_suggestions(task, project_id=1) 55 | 56 | assert "suggestions" in suggestions 57 | assert "resources" in suggestions 58 | 59 | def test_report_agent(mock_retriever): 60 | with patch('backend.ai_engine.agents.report_agent.Retriever', return_value=mock_retriever): 61 | report_agent = ReportAgent() 62 | mock_retriever.get_project_context.return_value = {"name": "Test Project", "description": "A test project"} 63 | mock_retriever.get_project_tasks.return_value = [ 64 | {"title": "Task 1", "status": "Completed"}, 65 | {"title": "Task 2", "status": "In Progress"} 66 | ] 67 | mock_retriever.get_team_performance.return_value = {"task_completion_rate": 0.5} 68 | mock_retriever.get_similar_projects.return_value = [{"name": "Similar Project", "description": "A similar project"}] 69 | 70 | report = report_agent.generate_report(project_id=1) 71 | 72 | assert "summary" in report 73 | assert "key_metrics" in report 74 | assert "risks" in report 75 | assert "recommendations" in report 76 | 77 | def test_collaboration_agent(mock_retriever): 78 | with patch('backend.ai_engine.agents.collaboration_agent.Retriever', return_value=mock_retriever): 79 | collaboration_agent = CollaborationAgent() 80 | mock_retriever.get_project_context.return_value = {"name": "Test Project", "description": "A test project"} 81 | mock_retriever.get_available_team_members.return_value = [ 82 | {"name": "Alice", "skills": ["Python", "JavaScript"]}, 83 | {"name": "Bob", "skills": ["Java", "C++"]} 84 | ] 85 | mock_retriever.get_similar_collaborations.return_value = [{"task": "Similar Task", "collaboration": "Previous collaboration info"}] 86 | 87 | task = { 88 | "title": "Develop API", 89 | "estimated_duration": 40, 90 | "required_skills": ["Python", "RESTful API"] 91 | } 92 | collaboration_info = collaboration_agent.suggest_collaboration(task, project_id=1) 93 | 94 | assert "team_formation" in collaboration_info 95 | assert "communication_plan" in collaboration_info -------------------------------------------------------------------------------- /backend/ai_engine/rag/retriever.py: -------------------------------------------------------------------------------- 1 | from typing import List, Dict, Any 2 | from sqlalchemy.orm import Session 3 | from backend.ai_engine.rag.vector_store import vector_store 4 | from backend.database import crud 5 | from backend.database import models 6 | import json 7 | 8 | 9 | class Retriever: 10 | def __init__(self, db: Session): 11 | self.db = db 12 | 13 | def get_similar_tasks(self, description: str, project_id: int, k: int = 3) -> List[Dict[str, Any]]: 14 | query = f"Project ID: {project_id} | Task: {description}" 15 | similar_docs = vector_store.similarity_search(query, k=k) 16 | return [{"title": doc.metadata["title"], "description": doc.page_content} for doc in similar_docs] 17 | 18 | def get_project_context(self, project_id: int) -> Dict[str, Any]: 19 | project = crud.get_project(self.db, project_id) 20 | if not project: 21 | print(f"No project found for id: {project_id}") 22 | return {} 23 | context = { 24 | "name": project.name, 25 | "description": project.description, 26 | "start_date": str(project.start_date), 27 | "end_date": str(project.end_date), 28 | "status": project.status, 29 | "team_members": [member.name for member in project.team_members] 30 | } 31 | print(f"Project context: {context}") 32 | return context 33 | 34 | 35 | def get_project_team_members(self, project_id: int): 36 | return self.db.query(models.TeamMember).join(models.Project.team_members).filter(models.Project.id == project_id).all() 37 | 38 | def get_similar_tasks_priorities(self, description: str, project_id: int, k: int = 3) -> List[Dict[str, Any]]: 39 | query = f"Project ID: {project_id} | Task: {description}" 40 | similar_docs = vector_store.similarity_search(query, k=k) 41 | return [{"title": doc.metadata["title"], "priority": doc.metadata.get("priority", "Unknown")} for doc in similar_docs] 42 | 43 | def get_available_team_members(self, project_id: int) -> List[Dict[str, Any]]: 44 | team_members = crud.get_project_team_members(self.db, project_id) 45 | return [{"name": tm.name, "skills": tm.skills, "role": tm.role} for tm in team_members] 46 | 47 | def get_similar_collaborations(self, task_description: str, project_id: int, k: int = 3) -> List[Dict[str, Any]]: 48 | query = f"Project ID: {project_id} | Collaboration for: {task_description}" 49 | similar_docs = vector_store.similarity_search(query, k=k) 50 | return [{"task": doc.metadata["task"], "collaboration": doc.page_content} for doc in similar_docs] 51 | 52 | def get_project_tasks(self, project_id: int) -> List[Dict[str, Any]]: 53 | tasks = crud.get_project_tasks(self.db, project_id) 54 | return [{"title": task.title, "status": task.status, "priority": task.priority} for task in tasks] 55 | 56 | def get_team_performance(self, project_id: int) -> Dict[str, Any]: 57 | tasks = crud.get_project_tasks(self.db, project_id) 58 | completed_tasks = sum(1 for task in tasks if task.status == "Completed") 59 | total_tasks = len(tasks) 60 | return { 61 | "task_completion_rate": completed_tasks / total_tasks if total_tasks > 0 else 0, 62 | "total_tasks": total_tasks, 63 | "completed_tasks": completed_tasks 64 | } 65 | 66 | def get_similar_projects(self, project_id: int, k: int = 3) -> List[Dict[str, Any]]: 67 | project = crud.get_project(self.db, project_id) 68 | if not project: 69 | return [] 70 | 71 | query = f"Project: {project.name} | Description: {project.description}" 72 | similar_docs = vector_store.similarity_search(query, k=k) 73 | 74 | return [ 75 | { 76 | "name": doc.metadata.get("name", "Unnamed Project"), 77 | "description": doc.page_content 78 | } 79 | for doc in similar_docs 80 | ] 81 | 82 | def get_similar_completed_tasks(self, task_description: str, project_id: int, k: int = 3) -> List[Dict[str, Any]]: 83 | query = f"Project ID: {project_id} | Completed Task: {task_description}" 84 | similar_docs = vector_store.similarity_search(query, k=k) 85 | return [{"title": doc.metadata["title"], "description": doc.page_content} for doc in similar_docs] 86 | 87 | def get_team_skills(self, project_id: int) -> Dict[str, List[str]]: 88 | team_members = self.get_project_team_members(project_id) 89 | return {tm.name: json.loads(tm.skills) if tm.skills else [] for tm in team_members} 90 | 91 | def get_available_team_members(self, project_id: int) -> List[Dict[str, Any]]: 92 | team_members = crud.get_project_team_members(self.db, project_id) 93 | return [{"name": tm.name, "skills": tm.skills.split(',')} for tm in team_members] 94 | 95 | def get_related_information(self, question: str, k: int = 3) -> List[Dict[str, Any]]: 96 | similar_docs = vector_store.similarity_search(question, k=k) 97 | related_info = [{"title": doc.metadata.get("title", "Unknown"), "content": doc.page_content} for doc in similar_docs] 98 | print(f"Related information: {related_info}") 99 | return related_info 100 | -------------------------------------------------------------------------------- /backend/ai_engine/workflow/graph.py: -------------------------------------------------------------------------------- 1 | from langgraph.graph import StateGraph, END 2 | from typing import Dict, Any 3 | from backend.ai_engine.agents.task_agent import TaskAgent 4 | from backend.ai_engine.agents.priority_agent import PriorityAgent 5 | from backend.ai_engine.agents.suggestion_agent import SuggestionAgent 6 | from backend.ai_engine.agents.report_agent import ReportAgent 7 | from backend.ai_engine.agents.collaboration_agent import CollaborationAgent 8 | from backend.ai_engine.rag.retriever import Retriever 9 | from backend.database.database import SessionLocal 10 | 11 | def get_db(): 12 | db = SessionLocal() 13 | try: 14 | yield db 15 | finally: 16 | db.close() 17 | 18 | def create_workflow(): 19 | """this function creates a state graph workflow that defines the sequence of steps to be executed in the AI engine. 20 | The workflow consists of several nodes, each representing a specific task or action to be performed. 21 | The workflow starts with the "create_task" node, where a new task is created based on the input description and project ID. 22 | The workflow then moves to the "assign_priority" node, where the priority of the task is assigned using the PriorityAgent. 23 | Next, the workflow moves to the "generate_suggestions" node, where suggestions are generated for the task using the SuggestionAgent. 24 | The workflow then moves to the "suggest_collaboration" node, where collaboration suggestions are generated using the CollaborationAgent. 25 | Finally, the workflow moves to the "generate_report" node, where a report is generated based on the tasks created. 26 | The workflow is compiled and returned as a callable function that can be executed to run the AI engine. 27 | If any errors occur during the creation of the workflow, an error message is printed, and None is returned. 28 | The workflow function is then called to create the workflow instance, which is stored in the "workflow" variable. 29 | The workflow instance is used to execute the AI engine and perform the task management operations.""" 30 | 31 | try: 32 | workflow = StateGraph(Dict) 33 | 34 | db = next(get_db()) 35 | retriever = Retriever(db) 36 | task_agent = TaskAgent(retriever) 37 | priority_agent = PriorityAgent(retriever) 38 | suggestion_agent = SuggestionAgent(retriever) 39 | report_agent = ReportAgent(retriever) 40 | collaboration_agent = CollaborationAgent(retriever) 41 | 42 | workflow.add_node("create_task", lambda state: create_task_node(state, task_agent)) 43 | workflow.add_node("assign_priority", lambda state: assign_priority_node(state, priority_agent)) 44 | workflow.add_node("generate_suggestions", lambda state: generate_suggestions_node(state, suggestion_agent)) 45 | workflow.add_node("suggest_collaboration", lambda state: suggest_collaboration_node(state, collaboration_agent)) 46 | workflow.add_node("generate_report", lambda state: generate_report_node(state, report_agent)) 47 | 48 | workflow.add_edge("create_task", "assign_priority") 49 | workflow.add_edge("assign_priority", "generate_suggestions") 50 | workflow.add_edge("generate_suggestions", "suggest_collaboration") 51 | 52 | workflow.add_conditional_edges( 53 | "suggest_collaboration", 54 | lambda state: "generate_report" if state.get("generate_report", False) else END, 55 | { 56 | "generate_report": "generate_report", 57 | END: END 58 | } 59 | ) 60 | workflow.add_edge("generate_report", END) 61 | 62 | workflow.set_entry_point("create_task") 63 | 64 | return workflow.compile() 65 | except Exception as e: 66 | print(f"Error creating workflow: {e}") 67 | return None 68 | workflow = create_workflow() 69 | 70 | def create_task_node(state: Dict[str, Any], task_agent: TaskAgent) -> Dict[str, Any]: 71 | new_task = task_agent.create_task(state['input_description'], state['project_id']) 72 | new_task['project_id'] = state['project_id'] # Ensure project_id is included 73 | state['tasks'].append(new_task) 74 | return state 75 | 76 | def assign_priority_node(state: Dict[str, Any], priority_agent: PriorityAgent) -> Dict[str, Any]: 77 | for task in state['tasks']: 78 | if task['status'] == 'New': 79 | priority_info = priority_agent.assign_priority(task) 80 | task['priority'] = priority_info['priority'] 81 | task['priority_reasoning'] = priority_info['reasoning'] 82 | return state 83 | 84 | def generate_suggestions_node(state: Dict[str, Any], suggestion_agent: SuggestionAgent) -> Dict[str, Any]: 85 | for task in state['tasks']: 86 | if task['status'] == 'New': 87 | task['suggestions'] = suggestion_agent.generate_suggestions(task) 88 | return state 89 | 90 | def suggest_collaboration_node(state: Dict[str, Any], collaboration_agent: CollaborationAgent) -> Dict[str, Any]: 91 | for task in state['tasks']: 92 | if task['status'] == 'New': 93 | task['collaboration'] = collaboration_agent.suggest_collaboration(task) 94 | return state 95 | 96 | def generate_report_node(state: Dict[str, Any], report_agent: ReportAgent) -> Dict[str, Any]: 97 | state['report'] = report_agent.generate_report(state['tasks']) 98 | return state 99 | 100 | -------------------------------------------------------------------------------- /backend/tests/test_api.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | from sqlalchemy import create_engine 3 | from sqlalchemy.orm import sessionmaker 4 | from backend.database.database import Base 5 | from backend.main import app 6 | from backend.api.dependencies import get_db, get_current_active_user 7 | from backend.database import crud, schemas 8 | import pytest 9 | from unittest.mock import Mock 10 | 11 | SQLALCHEMY_DATABASE_URL = "sqlite:///./test.db" 12 | 13 | engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) 14 | TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) 15 | 16 | Base.metadata.create_all(bind=engine) 17 | 18 | def override_get_db(): 19 | try: 20 | db = TestingSessionLocal() 21 | yield db 22 | finally: 23 | db.close() 24 | 25 | app.dependency_overrides[get_db] = override_get_db 26 | 27 | # Mock the current user for testing 28 | mock_user = schemas.User(id=1, username="testuser", email="test@example.com", is_active=True) 29 | app.dependency_overrides[get_current_active_user] = lambda: mock_user 30 | 31 | client = TestClient(app) 32 | 33 | @pytest.fixture(autouse=True) 34 | def setup_db(): 35 | Base.metadata.create_all(bind=engine) 36 | yield 37 | Base.metadata.drop_all(bind=engine) 38 | 39 | def test_create_project(): 40 | response = client.post( 41 | "/api/v1/projects/", 42 | json={"name": "Test Project", "description": "A test project"} 43 | ) 44 | assert response.status_code == 200 45 | data = response.json() 46 | assert data["name"] == "Test Project" 47 | assert "id" in data 48 | 49 | def test_read_projects(): 50 | # Create a few projects first 51 | client.post("/api/v1/projects/", json={"name": "Project 1", "description": "Description 1"}) 52 | client.post("/api/v1/projects/", json={"name": "Project 2", "description": "Description 2"}) 53 | 54 | response = client.get("/api/v1/projects/") 55 | assert response.status_code == 200 56 | data = response.json() 57 | assert isinstance(data, list) 58 | assert len(data) >= 2 59 | 60 | def test_read_project(): 61 | # First, create a project 62 | create_response = client.post( 63 | "/api/v1/projects/", 64 | json={"name": "Test Project", "description": "A test project"} 65 | ) 66 | project_id = create_response.json()["id"] 67 | 68 | # Then, read the project 69 | read_response = client.get(f"/api/v1/projects/{project_id}") 70 | assert read_response.status_code == 200 71 | data = read_response.json() 72 | assert data["name"] == "Test Project" 73 | assert data["id"] == project_id 74 | 75 | def test_create_task(): 76 | # First, create a project 77 | project_response = client.post( 78 | "/api/v1/projects/", 79 | json={"name": "Test Project", "description": "A test project"} 80 | ) 81 | project_id = project_response.json()["id"] 82 | 83 | # Then, create a task 84 | task_response = client.post( 85 | f"/api/v1/projects/{project_id}/tasks/", 86 | json={"description": "Test task description"} 87 | ) 88 | assert task_response.status_code == 200 89 | data = task_response.json() 90 | assert "title" in data 91 | assert data["project_id"] == project_id 92 | 93 | def test_read_tasks(): 94 | # First, create a project 95 | project_response = client.post( 96 | "/api/v1/projects/", 97 | json={"name": "Test Project", "description": "A test project"} 98 | ) 99 | project_id = project_response.json()["id"] 100 | 101 | # Create a few tasks 102 | client.post(f"/api/v1/projects/{project_id}/tasks/", json={"description": "Task 1"}) 103 | client.post(f"/api/v1/projects/{project_id}/tasks/", json={"description": "Task 2"}) 104 | 105 | # Read tasks 106 | response = client.get(f"/api/v1/projects/{project_id}/tasks/") 107 | assert response.status_code == 200 108 | data = response.json() 109 | assert isinstance(data, list) 110 | assert len(data) >= 2 111 | 112 | def test_generate_project_report(): 113 | # First, create a project 114 | project_response = client.post( 115 | "/api/v1/projects/", 116 | json={"name": "Test Project", "description": "A test project"} 117 | ) 118 | project_id = project_response.json()["id"] 119 | 120 | # Create some tasks 121 | client.post(f"/api/v1/projects/{project_id}/tasks/", json={"description": "Task 1"}) 122 | client.post(f"/api/v1/projects/{project_id}/tasks/", json={"description": "Task 2"}) 123 | 124 | # Generate report 125 | response = client.post(f"/api/v1/projects/{project_id}/report/") 126 | assert response.status_code == 200 127 | data = response.json() 128 | assert "summary" in data 129 | assert "key_metrics" in data 130 | assert "risks" in data 131 | assert "recommendations" in data 132 | 133 | def test_nonexistent_project(): 134 | response = client.get("/api/v1/projects/9999") 135 | assert response.status_code == 404 136 | 137 | def test_unauthorized_access(): 138 | # Create a project 139 | project_response = client.post( 140 | "/api/v1/projects/", 141 | json={"name": "Test Project", "description": "A test project"} 142 | ) 143 | project_id = project_response.json()["id"] 144 | 145 | # Mock a different user 146 | different_user = schemas.User(id=2, username="otheruser", email="other@example.com", is_active=True) 147 | app.dependency_overrides[get_current_active_user] = lambda: different_user 148 | 149 | # Try to access the project 150 | response = client.get(f"/api/v1/projects/{project_id}") 151 | assert response.status_code == 403 152 | 153 | # Reset the mock 154 | app.dependency_overrides[get_current_active_user] = lambda: mock_user -------------------------------------------------------------------------------- /backend/database/crud.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import Session 2 | from . import models, schemas 3 | from datetime import datetime 4 | from fastapi import HTTPException 5 | import json 6 | 7 | 8 | def create_user(db: Session, user: schemas.UserCreate): 9 | fake_hashed_password = user.password + "notreallyhashed" 10 | db_user = models.User(username=user.username, email=user.email, hashed_password=fake_hashed_password) 11 | db.add(db_user) 12 | db.commit() 13 | db.refresh(db_user) 14 | return db_user 15 | 16 | def get_user(db: Session, user_id: int): 17 | return db.query(models.User).filter(models.User.id == user_id).first() 18 | 19 | def get_user_by_email(db: Session, email: str): 20 | return db.query(models.User).filter(models.User.email == email).first() 21 | 22 | def get_users(db: Session, skip: int = 0, limit: int = 100): 23 | return db.query(models.User).offset(skip).limit(limit).all() 24 | 25 | def create_project(db: Session, project: schemas.ProjectCreate, user_id: int): 26 | db_project = models.Project(**project.dict(), owner_id=user_id) 27 | db.add(db_project) 28 | db.commit() 29 | db.refresh(db_project) 30 | return db_project 31 | 32 | def get_project(db: Session, project_id: int): 33 | return db.query(models.Project).filter(models.Project.id == project_id).first() 34 | 35 | def get_projects(db: Session, skip: int = 0, limit: int = 100): 36 | projects = db.query(models.Project).offset(skip).limit(limit).all() 37 | return [schemas.ProjectOut( 38 | id=project.id, 39 | name=project.name, 40 | description=project.description, 41 | start_date=project.start_date, 42 | end_date=project.end_date, 43 | status=project.status, 44 | created_at=project.created_at, 45 | updated_at=project.updated_at, 46 | tasks=[schemas.Task.from_orm(task) for task in project.tasks], 47 | team_members=[schemas.TeamMember( 48 | id=tm.id, 49 | name=tm.name, 50 | email=tm.email, 51 | role=tm.role, 52 | skills=json.loads(tm.skills) if tm.skills else None, 53 | assigned_tasks=[schemas.Task.from_orm(task) for task in tm.assigned_tasks] 54 | ) for tm in project.team_members] 55 | ) for project in projects] 56 | 57 | def create_task(db: Session, task: schemas.TaskCreate, project_id: int): 58 | task_data = task.dict() 59 | if task_data.get('required_skills'): 60 | task_data['required_skills'] = json.dumps(task_data['required_skills']) 61 | db_task = models.Task(**task_data, project_id=project_id, created_at=datetime.utcnow()) 62 | db.add(db_task) 63 | db.commit() 64 | db.refresh(db_task) 65 | return db_task 66 | 67 | def get_tasks(db: Session, project_id: int, skip: int = 0, limit: int = 100): 68 | return db.query(models.Task).filter(models.Task.project_id == project_id).offset(skip).limit(limit).all() 69 | 70 | def create_team_member(db: Session, team_member: schemas.TeamMemberCreate): 71 | skills_json = json.dumps(team_member.skills) if team_member.skills else None 72 | db_team_member = models.TeamMember( 73 | name=team_member.name, 74 | email=team_member.email, 75 | role=team_member.role, 76 | skills=skills_json 77 | ) 78 | db.add(db_team_member) 79 | db.commit() 80 | db.refresh(db_team_member) 81 | 82 | # Convert the database model to the Pydantic schema 83 | return schemas.TeamMember( 84 | id=db_team_member.id, 85 | name=db_team_member.name, 86 | email=db_team_member.email, 87 | role=db_team_member.role, 88 | skills=json.loads(db_team_member.skills) if db_team_member.skills else None, 89 | assigned_tasks=[] # New team members won't have assigned tasks yet 90 | ) 91 | 92 | 93 | def get_team_members(db: Session, skip: int = 0, limit: int = 100): 94 | db_team_members = db.query(models.TeamMember).offset(skip).limit(limit).all() 95 | return [ 96 | schemas.TeamMember( 97 | id=tm.id, 98 | name=tm.name, 99 | email=tm.email, 100 | role=tm.role, 101 | skills=json.loads(tm.skills) if tm.skills else None, 102 | assigned_tasks=[schemas.Task.from_orm(task) for task in tm.assigned_tasks] 103 | ) 104 | for tm in db_team_members 105 | ] 106 | 107 | def update_task(db: Session, task_id: int, task_update: dict): 108 | db_task = db.query(models.Task).filter(models.Task.id == task_id).first() 109 | if db_task: 110 | for key, value in task_update.items(): 111 | setattr(db_task, key, value) 112 | db.commit() 113 | db.refresh(db_task) 114 | return db_task 115 | 116 | def get_task(db: Session, task_id: int): 117 | return db.query(models.Task).filter(models.Task.id == task_id).first() 118 | 119 | def assign_team_member_to_project(db: Session, project_id: int, team_member_id: int): 120 | project = db.query(models.Project).filter(models.Project.id == project_id).first() 121 | team_member = db.query(models.TeamMember).filter(models.TeamMember.id == team_member_id).first() 122 | 123 | if not project or not team_member: 124 | raise HTTPException(status_code=404, detail="Project or Team Member not found") 125 | 126 | project.team_members.append(team_member) 127 | db.commit() 128 | db.refresh(project) 129 | 130 | # Convert the project to a ProjectOut schema 131 | return schemas.ProjectOut( 132 | id=project.id, 133 | name=project.name, 134 | description=project.description, 135 | start_date=project.start_date, 136 | end_date=project.end_date, 137 | status=project.status, 138 | created_at=project.created_at, 139 | updated_at=project.updated_at, 140 | tasks=[schemas.Task.from_orm(task) for task in project.tasks], 141 | team_members=[schemas.TeamMember( 142 | id=tm.id, 143 | name=tm.name, 144 | email=tm.email, 145 | role=tm.role, 146 | skills=json.loads(tm.skills) if tm.skills else None, 147 | assigned_tasks=[schemas.Task.from_orm(task) for task in tm.assigned_tasks] 148 | ) for tm in project.team_members] 149 | ) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # **Project Management System with RAG** 2 | 3 | This project is an **Project Management System** utilizing **Retrieval-Augmented Generation (RAG)** to streamline the management of complex projects. By combining AI-driven techniques with structured workflows, the system offers enhanced efficiency in task execution, team collaboration, and decision-making processes. I have also written a [Medium article](https://medium.com/towards-artificial-intelligence/revolutionizing-project-management-with-ai-agents-and-langgraph-ff90951930c1) about this project, detailing its features, implementation, and applications. 4 | 5 | ## **Menu Screen** 6 | ![Menu Screen](menu.png) 7 | 8 | ## **Diagram Flow** 9 | ![Diagram Flow](graph_flow.png) 10 | 11 | ## **Key Features** 12 | 13 | - **Task Management with AI Assistance**: The `TaskAgent` generates tasks based on project descriptions and historical data. The `PriorityAgent` analyzes and prioritizes tasks using a data-driven approach. 14 | - **Retrieval-Augmented Generation (RAG)**: The system implements RAG, which integrates FAISS for similarity search and language models via `langchain_ollama` and `langchain_groq` to deliver context-aware task suggestions, reports, and recommendations. 15 | - **Team Collaboration and Formation**: The `CollaborationAgent` analyzes team capabilities and project requirements to suggest optimal team configurations and communication strategies. 16 | - **Comprehensive Project Reporting**: The `ReportAgent` generates detailed reports on project progress, including key metrics, risks, and insights, helping project managers make informed decisions. 17 | - **Workflow Orchestration with LangGraph**: The system orchestrates project workflows using a **graph-based approach**, where tasks are represented as nodes in a **stateful graph**. Each node corresponds to a specific step in the project lifecycle—such as task creation, priority assignment, and report generation. This allows for highly adaptable workflows that can dynamically adjust based on project context and changes. LangGraph ensures that the transitions between these steps are logical, structured, and responsive to project updates in real time. 18 | 19 | ## **Tech Stack** 20 | 21 | - **AI & NLP Frameworks**: `LangChain` for embeddings and retrieval tasks, `Langchain_ollama` for embeddings, `Langchain_groq` for AI-based suggestions, and `LangGraph` for managing stateful workflows. 22 | - **Backend Framework**: FastAPI, integrated with SQLAlchemy for database interactions. 23 | - **Vector Store**: FAISS for storing and retrieving embeddings based on similarity search. 24 | - **Authentication**: OAuth2 with JWT for secure access. 25 | - **Database**: SQLAlchemy for persistent data storage. 26 | 27 | ## **Retrieval-Augmented Generation (RAG) Overview** 28 | 29 | RAG combines two core components: 30 | 1. **Retriever**: The system uses FAISS to perform similarity search on stored embeddings, allowing it to identify relevant documents or data points from the project history. 31 | 2. **Generator**: Once relevant documents are retrieved, the system leverages `langchain_groq` for generating contextually aware outputs based on the retrieved data. This hybrid retrieval-generation mechanism ensures that suggestions and reports are both data-driven and contextually meaningful. 32 | 33 | ## **Stateful Workflow and Task Automation** 34 | 35 | The workflow system is designed around a **graph-based structure**, where each task and action in the project lifecycle is modeled as a node within the graph. These nodes are connected by edges that define the sequence of actions—from task creation, prioritization, and suggestions, to collaboration and report generation. The system’s **state management** ensures that each task’s status is tracked, and workflows can dynamically adapt based on real-time project updates, allowing for flexibility in project execution. 36 | 37 | For example, when a task is created, the system moves it through the workflow to assign a priority, generate suggestions, recommend collaborations, and finally produce a comprehensive report. This graph-based approach, powered by **LangGraph**, ensures that workflows are both adaptable and structured, responding efficiently to changes in project context. 38 | 39 | 40 | ## **Getting Started** 41 | 42 | ### **Prerequisites** 43 | 44 | - Python 3.10 45 | - SQLAlchemy database instance 46 | - OpenAI or Groq API Key for enhanced AI capabilities 47 | - FAISS installation 48 | 49 | 50 | ### **Installation** 51 | 52 | **Clone the Repository:** 53 | ```bash 54 | git clone https://github.com/your-username/AI_Powered_Project_Management_System_with_RAG.git 55 | cd AI_Powered_Project_Management_System_with_RAG 56 | ``` 57 | 58 | **Install the Dependencies:** 59 | ```bash 60 | pip install -r requirements.txt 61 | ``` 62 | 63 | **Launch the Application:** 64 | ```bash 65 | uvicorn main:app --reload 66 | ``` 67 | 68 | ## **Project Components** 69 | 70 | **1. Vector Store** 71 | This module handles the creation and management of **FAISS-based vector stores** for document embeddings, using **OllamaEmbeddings** (model -> all-minilm) for embedding generation. It supports both vector store creation and similarity search. 72 | 73 | Example usage: 74 | ```python 75 | from backend.ai_engine.rag.vector_store import VectorStore 76 | vector_store = VectorStore() 77 | vector_store.create_vector_store(texts, metadatas) 78 | ``` 79 | 80 | **2. Collaboration Agent** 81 | The **CollaborationAgent** generates team formation and communication plan suggestions for tasks. It uses a model from **langchain_groq** to generate relevant, context-aware recommendations based on project data and team member capabilities. 82 | 83 | Example usage: 84 | ```python 85 | from backend.ai_engine.agents.collaboration_agent import CollaborationAgent 86 | collaboration_agent = CollaborationAgent(retriever) 87 | response = collaboration_agent.suggest_collaboration(task, project_id) 88 | ``` 89 | 90 | **3. Workflow Management** 91 | This module manages the project workflow using a graph-based state manager. The workflow connects nodes like task creation, priority assignment, suggestion generation, and reporting, ensuring logical progression through each phase of the project. 92 | Example usage: 93 | ```python 94 | from backend.ai_engine.workflow.graph import create_workflow 95 | workflow = create_workflow() 96 | workflow() 97 | ``` 98 | 99 | ## **Example Output** 100 | 101 | Sample project created by the AI system: 102 | ```bash 103 | Project: NeuraFlow 104 | Team Members: Alice Johnson (Machine Learning), Bob Smith (Frontend Development), Charlie Brown (Backend Development) 105 | Tasks: 106 | 1. Implement AI-driven resource allocation algorithm 107 | 2. Design predictive bottleneck detection module 108 | ``` 109 | 110 | Generated suggestions for tasks: 111 | ```bash 112 | Task: Implement AI-driven resource allocation algorithm 113 | Suggestions: 114 | - Define the objective 115 | - Identify key factors 116 | - Develop an optimization algorithm 117 | ``` 118 | 119 | ## **License** 120 | 121 | This project is licensed under the MIT License. You are free to use, modify, and distribute this software as per the terms of the license. See the full license text in the `LICENSE` file included in this repository. 122 | 123 | MIT License © 2024 Yotam Braun 124 | 125 | -------------------------------------------------------------------------------- /backend/api/routes.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Depends, HTTPException, Response 2 | from sqlalchemy.orm import Session 3 | from typing import List 4 | import logging 5 | from backend.database import crud, schemas 6 | from backend.api.dependencies import get_db 7 | from backend.ai_engine.workflow.graph import workflow 8 | from backend.ai_engine.agents.ai_assistant import AIAssistant 9 | from backend.ai_engine.rag.retriever import Retriever 10 | from backend.ai_engine.agents.priority_agent import PriorityAgent 11 | from backend.ai_engine.agents.suggestion_agent import SuggestionAgent 12 | from backend.ai_engine.agents.report_agent import ReportAgent 13 | from backend.ai_engine.utils.helpers import model_to_dict 14 | from pydantic import BaseModel 15 | import json 16 | from io import BytesIO 17 | from reportlab.lib.pagesizes import letter 18 | from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer 19 | from reportlab.lib.styles import getSampleStyleSheet 20 | 21 | router = APIRouter() 22 | logging.basicConfig(level=logging.INFO) 23 | 24 | 25 | @router.post("/users/", response_model=schemas.User) 26 | def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)): 27 | db_user = crud.get_user_by_email(db, email=user.email) 28 | if db_user: 29 | raise HTTPException(status_code=400, detail="Email already registered") 30 | return crud.create_user(db=db, user=user) 31 | 32 | @router.get("/users/", response_model=List[schemas.User]) 33 | def read_users(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): 34 | users = crud.get_users(db, skip=skip, limit=limit) 35 | return users 36 | 37 | @router.post("/projects/", response_model=schemas.Project) 38 | def create_project(project: schemas.ProjectCreate, db: Session = Depends(get_db)): 39 | return crud.create_project(db=db, project=project, user_id=1) 40 | 41 | @router.get("/projects/", response_model=List[schemas.ProjectOut]) 42 | def read_projects(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): 43 | projects = crud.get_projects(db, skip=skip, limit=limit) 44 | return projects 45 | 46 | @router.get("/projects/{project_id}", response_model=schemas.Project) 47 | def read_project(project_id: int, db: Session = Depends(get_db)): 48 | db_project = crud.get_project(db, project_id=project_id) 49 | if db_project is None: 50 | raise HTTPException(status_code=404, detail="Project not found") 51 | return db_project 52 | 53 | @router.post("/projects/{project_id}/tasks/", response_model=schemas.Task) 54 | def create_task(project_id: int, task: schemas.TaskCreate, db: Session = Depends(get_db)): 55 | project = crud.get_project(db, project_id=project_id) 56 | if project is None: 57 | raise HTTPException(status_code=404, detail="Project not found") 58 | 59 | logging.info(f"Creating task for project {project_id}: {task}") 60 | 61 | try: 62 | db_task = crud.create_task(db=db, task=task, project_id=project_id) 63 | return db_task 64 | except Exception as e: 65 | logging.error(f"Error creating task: {str(e)}") 66 | raise HTTPException(status_code=500, detail=f"An error occurred while creating the task: {str(e)}") 67 | 68 | @router.get("/projects/{project_id}/tasks/", response_model=List[schemas.Task]) 69 | def read_tasks(project_id: int, skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): 70 | tasks = crud.get_tasks(db, project_id=project_id, skip=skip, limit=limit) 71 | return tasks 72 | 73 | @router.post("/team-members/", response_model=schemas.TeamMember) 74 | def create_team_member(team_member: schemas.TeamMemberCreate, db: Session = Depends(get_db)): 75 | return crud.create_team_member(db=db, team_member=team_member) 76 | 77 | @router.get("/team-members/", response_model=List[schemas.TeamMember]) 78 | def read_team_members(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)): 79 | team_members = crud.get_team_members(db, skip=skip, limit=limit) 80 | return team_members 81 | 82 | class AIQuestion(BaseModel): 83 | question: str 84 | 85 | @router.post("/projects/{project_id}/ai-chat/") 86 | def ai_chat(project_id: int, ai_question: AIQuestion, db: Session = Depends(get_db)): 87 | print(f"Received question for project {project_id}: {ai_question.question}") 88 | project = crud.get_project(db, project_id=project_id) 89 | if project is None: 90 | raise HTTPException(status_code=404, detail="Project not found") 91 | 92 | retriever = Retriever(db) 93 | ai_assistant = AIAssistant(retriever) 94 | 95 | try: 96 | answer = ai_assistant.answer_question(project_id, ai_question.question) 97 | print(f"AI response: {answer}") 98 | return {"answer": answer} 99 | except Exception as e: 100 | print(f"Error in AI chat: {str(e)}") 101 | raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}") 102 | 103 | @router.post("/projects/{project_id}/tasks/{task_id}/prioritize/", response_model=schemas.Task) 104 | def prioritize_task(project_id: int, task_id: int, db: Session = Depends(get_db)): 105 | task = crud.get_task(db, task_id=task_id) 106 | if task is None or task.project_id != project_id: 107 | raise HTTPException(status_code=404, detail="Task not found or does not belong to the specified project") 108 | 109 | retriever = Retriever(db) 110 | priority_agent = PriorityAgent(retriever) 111 | 112 | # Convert SQLAlchemy model to dictionary 113 | task_dict = {c.name: getattr(task, c.name) for c in task.__table__.columns} 114 | 115 | # Ensure the task dictionary has all required fields 116 | task_dict['project_id'] = project_id 117 | if 'required_skills' in task_dict and task_dict['required_skills']: 118 | task_dict['required_skills'] = task_dict['required_skills'].split(',') 119 | else: 120 | task_dict['required_skills'] = [] 121 | 122 | try: 123 | priority_info = priority_agent.assign_priority(task_dict) 124 | updated_task = crud.update_task(db, task_id=task_id, task_update={ 125 | 'priority': priority_info['priority'], 126 | 'priority_reasoning': priority_info['reasoning'] 127 | }) 128 | return updated_task 129 | except Exception as e: 130 | print(f"Error in prioritize_task: {e}") 131 | raise HTTPException(status_code=500, detail="An error occurred while prioritizing the task") 132 | 133 | @router.post("/projects/{project_id}/tasks/{task_id}/suggest/") 134 | def suggest_for_task(project_id: int, task_id: int, db: Session = Depends(get_db)): 135 | task = crud.get_task(db, task_id=task_id) 136 | if task is None or task.project_id != project_id: 137 | raise HTTPException(status_code=404, detail="Task not found or does not belong to the specified project") 138 | 139 | retriever = Retriever(db) 140 | suggestion_agent = SuggestionAgent(retriever) 141 | 142 | task_dict = model_to_dict(task) 143 | suggestions = suggestion_agent.generate_suggestions(task_dict, project_id) 144 | return suggestions 145 | 146 | @router.post("/projects/{project_id}/report/") 147 | def generate_project_report(project_id: int, db: Session = Depends(get_db), pdf: bool = False): 148 | project = crud.get_project(db, project_id=project_id) 149 | if project is None: 150 | raise HTTPException(status_code=404, detail="Project not found") 151 | 152 | tasks = crud.get_tasks(db, project_id=project_id) 153 | 154 | retriever = Retriever(db) 155 | report_agent = ReportAgent(retriever) 156 | 157 | # Convert SQLAlchemy model instances to dictionaries 158 | task_dicts = [] 159 | for task in tasks: 160 | task_dict = {c.name: getattr(task, c.name) for c in task.__table__.columns} 161 | # Handle the 'required_skills' field separately 162 | if task.required_skills: 163 | task_dict['required_skills'] = json.loads(task.required_skills) 164 | task_dicts.append(task_dict) 165 | 166 | logging.info(f"Generating report for project {project_id} with {len(task_dicts)} tasks") 167 | 168 | try: 169 | report = report_agent.generate_report(task_dicts) 170 | logging.info("Report generated successfully") 171 | 172 | if pdf: 173 | # Generate PDF 174 | buffer = BytesIO() 175 | doc = SimpleDocTemplate(buffer, pagesize=letter) 176 | styles = getSampleStyleSheet() 177 | story = [] 178 | 179 | # Add content to the PDF 180 | story.append(Paragraph("Project Report", styles['Title'])) 181 | story.append(Spacer(1, 12)) 182 | story.append(Paragraph("Summary", styles['Heading2'])) 183 | story.append(Paragraph(report['summary'], styles['Normal'])) 184 | story.append(Spacer(1, 12)) 185 | story.append(Paragraph("Key Metrics", styles['Heading2'])) 186 | for key, value in report['key_metrics'].items(): 187 | story.append(Paragraph(f"{key}: {value}", styles['Normal'])) 188 | story.append(Spacer(1, 12)) 189 | story.append(Paragraph("Risks", styles['Heading2'])) 190 | for risk in report['risks']: 191 | story.append(Paragraph(f"• {risk}", styles['Normal'])) 192 | story.append(Spacer(1, 12)) 193 | story.append(Paragraph("Recommendations", styles['Heading2'])) 194 | for recommendation in report['recommendations']: 195 | story.append(Paragraph(f"• {recommendation}", styles['Normal'])) 196 | 197 | doc.build(story) 198 | buffer.seek(0) 199 | return Response(content=buffer.getvalue(), media_type="application/pdf", headers={"Content-Disposition": "attachment; filename=project_report.pdf"}) 200 | else: 201 | return report 202 | except Exception as e: 203 | logging.error(f"Error generating report: {str(e)}") 204 | raise HTTPException(status_code=500, detail=f"An error occurred while generating the report: {str(e)}") 205 | 206 | @router.post("/projects/{project_id}/team-members/{team_member_id}", response_model=schemas.ProjectOut) 207 | def assign_team_member_to_project( 208 | project_id: int, 209 | team_member_id: int, 210 | db: Session = Depends(get_db) 211 | ): 212 | return crud.assign_team_member_to_project(db, project_id, team_member_id) -------------------------------------------------------------------------------- /frontend/app.js: -------------------------------------------------------------------------------- 1 | const API_URL = 'http://localhost:8000/api/v1'; 2 | 3 | async function fetchProjects() { 4 | try { 5 | const response = await fetch(`${API_URL}/projects/`); 6 | if (!response.ok) { 7 | throw new Error(`HTTP error! status: ${response.status}`); 8 | } 9 | const projects = await response.json(); 10 | updateProjectLists(projects); 11 | } catch (error) { 12 | console.error('Error fetching projects:', error); 13 | alert('Failed to fetch projects. Check the console for details.'); 14 | } 15 | } 16 | 17 | function updateProjectLists(projects) { 18 | const projectList = document.getElementById('projectList'); 19 | const projectSelects = ['projectSelect', 'aiProjectSelect', 'reportProjectSelect', 'projectSelectForTeam']; 20 | 21 | projectList.innerHTML = ''; 22 | projectSelects.forEach(selectId => { 23 | const select = document.getElementById(selectId); 24 | select.innerHTML = ''; 25 | }); 26 | 27 | projects.forEach(project => { 28 | const teamMembers = project.team_members.map(tm => tm.name).join(', '); 29 | projectList.innerHTML += ` 30 |
  • 31 | ${project.name}: ${project.description} 32 |
    Team Members: ${teamMembers || 'None'} 33 |
  • `; 34 | projectSelects.forEach(selectId => { 35 | const select = document.getElementById(selectId); 36 | select.innerHTML += ``; 37 | }); 38 | }); 39 | } 40 | 41 | async function createProject() { 42 | const name = document.getElementById('projectName').value; 43 | const description = document.getElementById('projectDescription').value; 44 | 45 | try { 46 | const response = await fetch(`${API_URL}/projects/`, { 47 | method: 'POST', 48 | headers: { 'Content-Type': 'application/json' }, 49 | body: JSON.stringify({ name, description }) 50 | }); 51 | 52 | if (!response.ok) { 53 | throw new Error(`HTTP error! status: ${response.status}`); 54 | } 55 | 56 | const result = await response.json(); 57 | console.log('Project created:', result); 58 | 59 | fetchProjects(); 60 | document.getElementById('projectName').value = ''; 61 | document.getElementById('projectDescription').value = ''; 62 | } catch (error) { 63 | console.error('Error creating project:', error); 64 | alert('Failed to create project. Check the console for details.'); 65 | } 66 | } 67 | 68 | async function fetchTasks(projectId) { 69 | try { 70 | const response = await fetch(`${API_URL}/projects/${projectId}/tasks/`); 71 | if (!response.ok) { 72 | throw new Error(`HTTP error! status: ${response.status}`); 73 | } 74 | const tasks = await response.json(); 75 | updateTaskList(tasks); 76 | } catch (error) { 77 | console.error('Error fetching tasks:', error); 78 | alert('Failed to fetch tasks. Check the console for details.'); 79 | } 80 | } 81 | 82 | function updateTaskList(tasks) { 83 | const taskList = document.getElementById('taskList'); 84 | const taskSelect = document.getElementById('taskSelect'); 85 | 86 | taskList.innerHTML = ''; 87 | taskSelect.innerHTML = ''; 88 | 89 | tasks.forEach(task => { 90 | taskList.innerHTML += `
  • ${task.title}: ${task.status} (Skills: ${task.required_skills ? task.required_skills.join(', ') : 'None'})
  • `; 91 | taskSelect.innerHTML += ``; 92 | }); 93 | } 94 | 95 | async function createTask() { 96 | const projectId = document.getElementById('projectSelect').value; 97 | const title = document.getElementById('taskTitle').value; 98 | const description = document.getElementById('taskDescription').value; 99 | const skills = document.getElementById('taskSkills').value.split(',').map(skill => skill.trim()); 100 | 101 | if (!projectId) { 102 | alert('Please select a project first.'); 103 | return; 104 | } 105 | 106 | try { 107 | const response = await fetch(`${API_URL}/projects/${projectId}/tasks/`, { 108 | method: 'POST', 109 | headers: { 'Content-Type': 'application/json' }, 110 | body: JSON.stringify({ title, description, required_skills: skills }) 111 | }); 112 | 113 | if (!response.ok) { 114 | throw new Error(`HTTP error! status: ${response.status}`); 115 | } 116 | 117 | const result = await response.json(); 118 | console.log('Task created:', result); 119 | 120 | fetchTasks(projectId); 121 | document.getElementById('taskTitle').value = ''; 122 | document.getElementById('taskDescription').value = ''; 123 | document.getElementById('taskSkills').value = ''; 124 | } catch (error) { 125 | console.error('Error creating task:', error); 126 | alert('Failed to create task. Check the console for details.'); 127 | } 128 | } 129 | 130 | async function fetchTeamMembers() { 131 | console.log('Fetching team members...'); 132 | try { 133 | const response = await fetch(`${API_URL}/team-members/`); 134 | if (!response.ok) { 135 | throw new Error(`HTTP error! status: ${response.status}`); 136 | } 137 | const teamMembers = await response.json(); 138 | console.log('Received team members:', teamMembers); 139 | updateTeamMemberList(teamMembers); 140 | } catch (error) { 141 | console.error('Error fetching team members:', error); 142 | alert('Failed to fetch team members. Check the console for details.'); 143 | } 144 | } 145 | 146 | function updateTeamMemberList(teamMembers) { 147 | console.log('Updating team member list with:', teamMembers); 148 | const teamMemberList = document.getElementById('teamMemberList'); 149 | const teamMemberSelect = document.getElementById('teamMemberSelect'); 150 | 151 | teamMemberList.innerHTML = ''; 152 | teamMemberSelect.innerHTML = ''; 153 | 154 | teamMembers.forEach(member => { 155 | teamMemberList.innerHTML += `
  • ${member.name}: ${member.skills.join(', ')}
  • `; 156 | teamMemberSelect.innerHTML += ``; 157 | }); 158 | console.log('Team member list updated'); 159 | } 160 | 161 | async function createTeamMember() { 162 | console.log("createTeamMember function called"); 163 | const name = document.getElementById('teamMemberName').value; 164 | const email = document.getElementById('teamMemberEmail').value; 165 | const skills = document.getElementById('teamMemberSkills').value.split(',').map(skill => skill.trim()); 166 | 167 | console.log("Sending team member data:", { name, email, skills }); 168 | 169 | try { 170 | const response = await fetch(`${API_URL}/team-members/`, { 171 | method: 'POST', 172 | headers: { 'Content-Type': 'application/json' }, 173 | body: JSON.stringify({ name, email, skills, role: null }) // Include role, even if it's null 174 | }); 175 | 176 | if (!response.ok) { 177 | const errorData = await response.json(); 178 | throw new Error(`HTTP error! status: ${response.status}, message: ${JSON.stringify(errorData)}`); 179 | } 180 | 181 | const result = await response.json(); 182 | console.log('Team member created:', result); 183 | 184 | console.log('About to fetch team members'); 185 | await fetchTeamMembers(); 186 | console.log('Finished fetching team members'); 187 | 188 | document.getElementById('teamMemberName').value = ''; 189 | document.getElementById('teamMemberEmail').value = ''; 190 | document.getElementById('teamMemberSkills').value = ''; 191 | } catch (error) { 192 | console.error('Error creating team member:', error); 193 | alert('Failed to create team member. Check the console for details.'); 194 | } 195 | } 196 | 197 | async function assignTeamMemberToProject() { 198 | const projectId = document.getElementById('projectSelectForTeam').value; 199 | const teamMemberId = document.getElementById('teamMemberSelect').value; 200 | 201 | if (!projectId || !teamMemberId) { 202 | alert('Please select both a project and a team member.'); 203 | return; 204 | } 205 | 206 | try { 207 | const response = await fetch(`${API_URL}/projects/${projectId}/team-members/${teamMemberId}`, { 208 | method: 'POST' 209 | }); 210 | 211 | if (!response.ok) { 212 | throw new Error(`HTTP error! status: ${response.status}`); 213 | } 214 | 215 | const result = await response.json(); 216 | console.log('Team member assigned to project:', result); 217 | alert('Team member assigned to project successfully.'); 218 | 219 | // Refresh the project list to show the updated team members 220 | await fetchProjects(); 221 | } catch (error) { 222 | console.error('Error assigning team member to project:', error); 223 | alert('Failed to assign team member to project. Check the console for details.'); 224 | } 225 | } 226 | 227 | async function askAI() { 228 | const projectId = document.getElementById('aiProjectSelect').value; 229 | const question = document.getElementById('aiQuestion').value; 230 | 231 | if (!projectId) { 232 | alert('Please select a project first.'); 233 | return; 234 | } 235 | 236 | try { 237 | const response = await fetch(`${API_URL}/projects/${projectId}/ai-chat/`, { 238 | method: 'POST', 239 | headers: { 'Content-Type': 'application/json' }, 240 | body: JSON.stringify({ question }) 241 | }); 242 | 243 | if (!response.ok) { 244 | throw new Error(`HTTP error! status: ${response.status}`); 245 | } 246 | 247 | const result = await response.json(); 248 | document.getElementById('aiResponse').innerText = result.answer; 249 | } catch (error) { 250 | console.error('Error asking AI:', error); 251 | alert('Failed to get AI response. Check console for details.'); 252 | } 253 | } 254 | 255 | async function prioritizeTask() { 256 | const taskId = document.getElementById('taskSelect').value; 257 | const projectId = document.getElementById('projectSelect').value; 258 | 259 | if (!taskId || !projectId) { 260 | alert('Please select a project and a task first.'); 261 | return; 262 | } 263 | 264 | try { 265 | const response = await fetch(`${API_URL}/projects/${projectId}/tasks/${taskId}/prioritize/`, { 266 | method: 'POST' 267 | }); 268 | 269 | if (!response.ok) { 270 | throw new Error(`HTTP error! status: ${response.status}`); 271 | } 272 | 273 | const result = await response.json(); 274 | document.getElementById('taskActions').innerText = `Task prioritized: ${result.priority}`; 275 | fetchTasks(projectId); 276 | } catch (error) { 277 | console.error('Error prioritizing task:', error); 278 | alert('Failed to prioritize task. Check console for details.'); 279 | } 280 | } 281 | 282 | async function getSuggestions() { 283 | const taskId = document.getElementById('taskSelect').value; 284 | const projectId = document.getElementById('projectSelect').value; 285 | 286 | if (!taskId || !projectId) { 287 | alert('Please select a project and a task first.'); 288 | return; 289 | } 290 | 291 | try { 292 | const response = await fetch(`${API_URL}/projects/${projectId}/tasks/${taskId}/suggest/`, { 293 | method: 'POST' 294 | }); 295 | 296 | if (!response.ok) { 297 | throw new Error(`HTTP error! status: ${response.status}`); 298 | } 299 | 300 | const result = await response.json(); 301 | document.getElementById('taskActions').innerHTML = ` 302 |

    Suggestions:

    303 | 304 |

    Resources:

    305 | 306 | `; 307 | } catch (error) { 308 | console.error('Error getting suggestions:', error); 309 | alert('Failed to get suggestions. Check console for details.'); 310 | } 311 | } 312 | 313 | async function generateReport() { 314 | const projectId = document.getElementById('reportProjectSelect').value; 315 | 316 | if (!projectId) { 317 | alert('Please select a project first.'); 318 | return; 319 | } 320 | 321 | try { 322 | const response = await fetch(`${API_URL}/projects/${projectId}/report/`, { 323 | method: 'POST' 324 | }); 325 | 326 | if (!response.ok) { 327 | throw new Error(`HTTP error! status: ${response.status}`); 328 | } 329 | 330 | const result = await response.json(); 331 | document.getElementById('projectReport').innerHTML = ` 332 |

    Summary:

    333 |

    ${result.summary}

    334 |

    Key Metrics:

    335 | 336 |

    Recommendations:

    337 | 338 | `; 339 | } catch (error) { 340 | console.error('Error generating report:', error); 341 | alert('Failed to generate report. Check console for details.'); 342 | } 343 | } 344 | 345 | // Event listeners 346 | document.getElementById('projectSelect').addEventListener('change', (e) => { 347 | if (e.target.value) { 348 | fetchTasks(e.target.value); 349 | } 350 | }); 351 | 352 | // Initial load 353 | fetchProjects(); 354 | fetchTeamMembers(); --------------------------------------------------------------------------------