├── streamlit_app.py ├── tasks.py ├── main.py ├── pyproject.toml ├── README.md └── agents.py /streamlit_app.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from main import ResearchCrew # Import the ResearchCrew class from main.py 3 | import os 4 | 5 | st.title('Your Research Assistant') 6 | os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"] 7 | os.environ["GROQ_API_KEY"] = st.secrets["GROQ_API_KEY"] 8 | os.environ["SERPER_API_KEY"] = st.secrets["SERPER_API_KEY"] 9 | 10 | with st.sidebar: 11 | st.header('Enter Research Details') 12 | topic = st.text_input("Main topic of your research:") 13 | detailed_questions = st.text_area("Specific questions or subtopics you are interested in exploring:") 14 | 15 | if st.button('Run Research'): 16 | if not topic or not detailed_questions: 17 | st.error("Please fill all the fields.") 18 | else: 19 | inputs = f"Research Topic: {topic}\nDetailed Questions: {detailed_questions}" 20 | research_crew = ResearchCrew(inputs) 21 | result = research_crew.run() 22 | st.subheader("Results of your research project:") 23 | st.write(result) 24 | -------------------------------------------------------------------------------- /tasks.py: -------------------------------------------------------------------------------- 1 | from crewai import Task 2 | 3 | from crewai import Task 4 | 5 | class ResearchCrewTasks: 6 | 7 | def research_task(self, agent, inputs): 8 | return Task( 9 | agent=agent, 10 | description=f" Based {inputs} figure out what it is that the user needs in order to figure out their problem, check https://www.thetoolbus.ai/ai-tools, and https://appsumo.com/collections/new/ for relevant tools that could be usefull ", 11 | expected_output=f"A clear explanation of the principles, concepts, disciplines, and skills needed by the visionary in order to accomoplish their goal" 12 | 13 | ) 14 | 15 | 16 | def analysis_task(self, agent, context): 17 | return Task( 18 | agent=agent, 19 | context=context, 20 | description=f"Evaluate the following report: {context}. Based on the results, create a learning plan, figure out the things the user needs to learn and focus on.", 21 | expected_output=f"A thorough learning plan for the next agent" 22 | 23 | ) 24 | 25 | 26 | def writing_task(self, agent, context, inputs): 27 | return Task( 28 | agent=agent, 29 | context=context, 30 | description=f"Answer the users inquiry their request topics: {inputs} Given the following learning plan {context}, using web search, web scraping ,figure give 5 principles or concepts that the user needs to learn with a short overview of each one and what it is, 5 internet articles titles and their URL, 5 books name and author and their purpose.", 31 | expected_output=f" 5 principles and concepts reviewed, and thoroughly explaiened , 5 internet articles titles and their URL, 5 books name and author and their purpose.", 32 | ) 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import os 2 | from decouple import config 3 | from crewai import Crew, Process 4 | from textwrap import dedent 5 | from agents import ResearchCrewAgents 6 | from tasks import ResearchCrewTasks 7 | 8 | 9 | 10 | class ResearchCrew: 11 | def __init__(self, inputs): 12 | self.inputs = inputs 13 | self.agents = ResearchCrewAgents() 14 | self.tasks = ResearchCrewTasks() 15 | 16 | def run(self): 17 | # Initialize agents 18 | researcher = self.agents.researcher() 19 | analyst = self.agents.analyst() 20 | writer = self.agents.writer() 21 | 22 | # Initialize tasks with respective agents 23 | research_task = self.tasks.research_task(researcher, self.inputs) 24 | analysis_task = self.tasks.analysis_task(analyst, [research_task]) 25 | writing_task = self.tasks.writing_task(writer, [analysis_task],self.inputs) 26 | 27 | # Form the crew with defined agents and tasks 28 | crew = Crew( 29 | agents=[researcher, analyst, writer], 30 | tasks=[research_task, analysis_task, writing_task], 31 | process=Process.sequential 32 | ) 33 | 34 | # Execute the crew to carry out the research project 35 | return crew.kickoff() 36 | 37 | if __name__ == "__main__": 38 | print("Welcome to the Research Crew Setup") 39 | print("---------------------------------------") 40 | topic = input("Please enter the main topic of your research: ") 41 | detailed_questions = input("What specific questions or subtopics are you interested in exploring? ") 42 | 43 | inputs = f"Topic: {topic}\nDetailed Questions: {detailed_questions}" 44 | research_crew = ResearchCrew(inputs) 45 | result = research_crew.run() 46 | 47 | print("\n\n##############################") 48 | print("## Here are the results of your research project:") 49 | print("##############################\n") 50 | print(result) 51 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "crewai-28" 3 | version = "0.28.8" 4 | description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks." 5 | authors = ["Joao Moura "] 6 | readme = "README.md" 7 | packages = [ 8 | { include = "crewai", from = "src" }, 9 | ] 10 | 11 | [tool.poetry.urls] 12 | Homepage = "https://crewai.com" 13 | Documentation = "https://github.com/joaomdmoura/CrewAI/wiki/Index" 14 | Repository = "https://github.com/joaomdmoura/crewai" 15 | 16 | [tool.poetry.dependencies] 17 | streamlit = "^1.13.0" 18 | pysqlite3-binary = "^0.5.1" 19 | python = ">=3.10,<=3.13" 20 | pydantic = "^2.4.2" 21 | langchain = "^0.1.10" 22 | openai = "^1.13.3" 23 | langchain_groq = "^0.1.3" 24 | opentelemetry-api = "^1.22.0" 25 | opentelemetry-sdk = "^1.22.0" 26 | opentelemetry-exporter-otlp-proto-http = "^1.22.0" 27 | instructor = "^0.5.2" 28 | regex = "^2023.12.25" 29 | crewai-tools = { version = "^0.1.7", optional = true } 30 | click = "^8.1.7" 31 | python-dotenv = "1.0.0" 32 | embedchain = "^0.1.98" 33 | appdirs = "^1.4.4" 34 | python-decouple = "^3.6" 35 | crewai = "^0.28.8" 36 | 37 | [tool.poetry.extras] 38 | tools = ["crewai-tools"] 39 | 40 | [tool.poetry.group.dev.dependencies] 41 | isort = "^5.13.2" 42 | pyright = ">=1.1.350,<2.0.0" 43 | autoflake = "^2.2.1" 44 | pre-commit = "^3.6.0" 45 | mkdocs = "^1.4.3" 46 | mkdocstrings = "^0.22.0" 47 | mkdocstrings-python = "^1.1.2" 48 | mkdocs-material = {extras = ["imaging"], version = "^9.5.7"} 49 | mkdocs-material-extensions = "^1.3.1" 50 | pillow = "^10.2.0" 51 | cairosvg = "^2.7.1" 52 | crewai-tools = "^0.1.7" 53 | 54 | [tool.isort] 55 | profile = "black" 56 | known_first_party = ["crewai"] 57 | 58 | 59 | 60 | [tool.poetry.group.test.dependencies] 61 | pytest = "^8.0.0" 62 | pytest-vcr = "^1.0.2" 63 | python-dotenv = "1.0.0" 64 | 65 | [tool.poetry.scripts] 66 | crewai = "crewai.cli.cli:crewai" 67 | 68 | [build-system] 69 | requires = ["poetry-core"] 70 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Installation and Running of Create AI Project with Multiple Language Models 2 | 3 | ### Objective: 4 | 5 | To install and run the Create AI project allowing switching between multiple large language models, including OpenAI models (GPT-3, GPT-4) and grok models (LLAMA-3, Mixtral, Google's GEMMA), and set up the frontend interface using Streamlit. 6 | 7 | ### Video Walkthrough: 8 | https://youtu.be/bvKzy6CqpvM 9 | 10 | ### Key Steps: 11 | 12 | 1. **Install Required Tools:** 13 | 14 | - Ensure you have VS Code, GIT, PipX, and Poetry installed on your computer. Follow a guide to set up these tools if needed. 15 | 16 | 2. **Clone GitHub Repository:** 17 | 18 | - Copy the GitHub repository link and use the command `git clone ` to clone the project. 19 | - Move into the project folder using `cd `. 20 | 21 | 3. **Install Dependencies:** 22 | 23 | - Run `poetry install --no-root` to install project dependencies. 24 | 25 | 4. **Set Up API Keys:** 26 | 27 | - Add API keys for OpenAI, Groq, and Serper in the Streamlit_app.py file. 28 | - Alternatively, create a `.secrets` file with API keys and place it in the specified directory. 29 | 30 | 5. **Run the Application:** 31 | 32 | - Execute `streamlit run streamlit_app.py` to start the application. 33 | - Enter search topics or queries to test the application functionality. 34 | 35 | 6. **Customize Language Models:** 36 | 37 | - Modify the `agents.py` file to change the large language model being used. 38 | - Update the selected model in the code to switch between available options. 39 | 40 | ### Cautionary Notes: 41 | 42 | - Ensure API keys are correctly entered to avoid errors during execution. 43 | - Limit the number of iterations and tools used with Grok LLMs to prevent crashing due to token limits. 44 | - Monitor token usage to adjust settings and prevent reaching token limits prematurely. 45 | 46 | ### Tips for Efficiency: 47 | 48 | - Keep the number of tools used by agents in check to optimize performance. 49 | - Limit iterations to control costs when using advanced language models like GPT-3 or GPT-4. 50 | - Regularly test the application and adjust settings as needed to maintain smooth operation. 51 | 52 | By following these steps, you can effectively install and run the Create AI project with multiple language models and streamline the process of switching between different models for various tasks. 53 | -------------------------------------------------------------------------------- /agents.py: -------------------------------------------------------------------------------- 1 | from crewai import Agent 2 | #from langchain_openai import ChatOpenAI 3 | from langchain_groq import ChatGroq # Import Groq client 4 | from langchain_openai import ChatOpenAI 5 | import os 6 | from crewai_tools import SerperDevTool,WebsiteSearchTool, ScrapeWebsiteTool 7 | 8 | 9 | 10 | class ResearchCrewAgents: 11 | 12 | def __init__(self): 13 | # Initialize tools if needed 14 | self.serper = SerperDevTool() 15 | self.web = WebsiteSearchTool() 16 | self.web_scrape=ScrapeWebsiteTool() 17 | 18 | 19 | # OpenAI Models 20 | self.gpt3 = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.7) 21 | self.gpt4 = ChatOpenAI(model_name="gpt-4-turbo", temperature=0.7) 22 | self.gpt3_5_turbo_0125 = ChatOpenAI(model_name="gpt-3.5-turbo-0125", temperature=0.7) 23 | self.gpt3_5_turbo_1106 = ChatOpenAI(model_name="gpt-3.5-turbo-1106", temperature=0.7) 24 | self.gpt3_5_turbo_instruct = ChatOpenAI(model_name="gpt-3.5-turbo-instruct", temperature=0.7) 25 | 26 | # Groq Models 27 | self.llama3_8b = ChatGroq(temperature=0.7, groq_api_key=os.environ.get("GROQ_API_KEY"), model_name="llama3-8b-8192") 28 | self.llama3_70b = ChatGroq(temperature=0.7, groq_api_key=os.environ.get("GROQ_API_KEY"), model_name="llama3-70b-8192") 29 | self.mixtral_8x7b = ChatGroq(temperature=0.7, groq_api_key=os.environ.get("GROQ_API_KEY"), model_name="mixtral-8x7b-32768") 30 | self.gemma_7b = ChatGroq(temperature=0.7, groq_api_key=os.environ.get("GROQ_API_KEY"), model_name="gemma-7b-it") 31 | 32 | # CHANGE YOUR MODEL HERE 33 | self.selected_llm = self.gpt4 34 | def researcher(self): 35 | # Detailed agent setup for the Research Expert 36 | return Agent( 37 | role='Expert', 38 | goal='To break down broad visionary ideas into specific, actionable research topics, identify key areas requiring in-depth investigation, and prepare report that serves as a roadmap for future goals.', 39 | backstory="You are an expert that can easily reconvey ideas from others through critical thinking and systems thinking to figure out what the visionary wants to accompolish", 40 | verbose=True, 41 | allow_delegation=False, 42 | llm=self.selected_llm, 43 | max_iter=3, 44 | tools=[self.serper, self.web, self.web_scrape], 45 | ) 46 | 47 | 48 | def analyst(self): 49 | # Detailed agent setup for the Analyst 50 | return Agent( 51 | role='Analyst', 52 | goal='Come up with a learning curriculumn that will allow for the visionary to reach deep and broad knowledge in order to accomplish their goals', 53 | backstory="You are a talented organized logical educator who can deductively comeup with amazing learning plans in order to provide guidance starting from the goal and working backwards to the begining of a novice level so as to easily bridge the gap between inexperienced and experts alike.", 54 | verbose=True, 55 | allow_delegation=False, 56 | llm=self.selected_llm, 57 | max_iter=3, 58 | 59 | 60 | ) 61 | 62 | def writer(self): 63 | # Detailed agent setup for the Writer 64 | return Agent( 65 | role='Technical writer', 66 | goal='Use CrewAI tools to search and summarize findings of the previous agent, internet articles titles and their URLs, as well as books and online resource to carry out the learning needed', 67 | backstory="You are organized course creater and talented educator that understands what it takes for beginners to get from point a to point be when it comes to learning, you export great findings, you are great at scraping the web links and resources geared to ward learning specific goals.", 68 | verbose=True, 69 | allow_delegation=False, 70 | llm=self.selected_llm, 71 | tools=[self.serper, self.web, self.web_scrape], 72 | max_iter=3, 73 | 74 | 75 | ) --------------------------------------------------------------------------------