├── phi ├── py.typed ├── __init__.py ├── api │ ├── __init__.py │ └── schemas │ │ ├── __init__.py │ │ ├── response.py │ │ ├── team.py │ │ ├── ai.py │ │ ├── monitor.py │ │ ├── agent.py │ │ └── assistant.py ├── app │ └── __init__.py ├── aws │ ├── __init__.py │ ├── resource │ │ ├── __init__.py │ │ ├── cloudformation │ │ │ └── __init__.py │ │ ├── emr │ │ │ └── __init__.py │ │ ├── glue │ │ │ └── __init__.py │ │ ├── acm │ │ │ └── __init__.py │ │ ├── iam │ │ │ └── __init__.py │ │ ├── s3 │ │ │ └── __init__.py │ │ ├── secret │ │ │ └── __init__.py │ │ ├── elasticache │ │ │ └── __init__.py │ │ ├── elb │ │ │ └── __init__.py │ │ ├── rds │ │ │ └── __init__.py │ │ ├── ec2 │ │ │ └── __init__.py │ │ ├── ecs │ │ │ └── __init__.py │ │ └── reference.py │ └── app │ │ ├── django │ │ └── __init__.py │ │ ├── fastapi │ │ └── __init__.py │ │ ├── jupyter │ │ └── __init__.py │ │ ├── qdrant │ │ └── __init__.py │ │ ├── streamlit │ │ └── __init__.py │ │ ├── __init__.py │ │ └── context.py ├── cli │ ├── __init__.py │ └── ws │ │ └── __init__.py ├── docker │ ├── __init__.py │ ├── resource │ │ └── __init__.py │ └── app │ │ ├── traefik │ │ └── __init__.py │ │ ├── mysql │ │ └── __init__.py │ │ ├── redis │ │ └── __init__.py │ │ ├── django │ │ └── __init__.py │ │ ├── ollama │ │ ├── __init__.py │ │ └── ollama.py │ │ ├── qdrant │ │ └── __init__.py │ │ ├── whoami │ │ ├── __init__.py │ │ └── whoami.py │ │ ├── fastapi │ │ └── __init__.py │ │ ├── jupyter │ │ └── __init__.py │ │ ├── streamlit │ │ └── __init__.py │ │ ├── __init__.py │ │ ├── context.py │ │ ├── postgres │ │ ├── __init__.py │ │ └── pgvector.py │ │ ├── superset │ │ ├── worker.py │ │ ├── __init__.py │ │ ├── worker_beat.py │ │ ├── init.py │ │ └── webserver.py │ │ └── airflow │ │ ├── scheduler.py │ │ ├── __init__.py │ │ ├── flower.py │ │ └── webserver.py ├── infra │ ├── __init__.py │ └── type.py ├── llm │ ├── aws │ │ └── __init__.py │ ├── __init__.py │ ├── groq │ │ └── __init__.py │ ├── cohere │ │ └── __init__.py │ ├── google │ │ └── __init__.py │ ├── anthropic │ │ └── __init__.py │ ├── vertexai │ │ └── __init__.py │ ├── azure │ │ └── __init__.py │ ├── deepseek │ │ ├── __init__.py │ │ └── deepseek.py │ ├── exceptions.py │ ├── fireworks │ │ └── __init__.py │ ├── mistral │ │ └── __init__.py │ ├── together │ │ └── __init__.py │ ├── openrouter │ │ ├── __init__.py │ │ └── openrouter.py │ ├── openai │ │ ├── __init__.py │ │ └── like.py │ ├── ollama │ │ ├── __init__.py │ │ └── openai.py │ └── references.py ├── model │ ├── __init__.py │ ├── aws │ │ └── __init__.py │ ├── InternLM │ │ └── __init__.py │ ├── xai │ │ └── __init__.py │ ├── groq │ │ └── __init__.py │ ├── cohere │ │ └── __init__.py │ ├── nvidia │ │ └── __init__.py │ ├── anthropic │ │ └── __init__.py │ ├── mistral │ │ └── __init__.py │ ├── together │ │ └── __init__.py │ ├── vertexai │ │ └── __init__.py │ ├── azure │ │ └── __init__.py │ ├── deepseek │ │ └── __init__.py │ ├── fireworks │ │ └── __init__.py │ ├── sambanova │ │ └── __init__.py │ ├── huggingface │ │ └── __init__.py │ ├── openrouter │ │ └── __init__.py │ ├── openai │ │ └── __init__.py │ ├── ollama │ │ └── __init__.py │ └── google │ │ └── __init__.py ├── run │ └── __init__.py ├── storage │ ├── __init__.py │ ├── agent │ │ └── __init__.py │ ├── workflow │ │ └── __init__.py │ └── assistant │ │ └── __init__.py ├── utils │ ├── __init__.py │ ├── dttm.py │ ├── audio.py │ ├── download_stream_file.py │ ├── format_str.py │ ├── response_iterator.py │ └── env.py ├── file │ ├── local │ │ ├── __init__.py │ │ └── txt.py │ ├── __init__.py │ └── file.py ├── knowledge │ ├── s3 │ │ └── __init__.py │ └── __init__.py ├── reasoning │ └── __init__.py ├── reranker │ ├── __init__.py │ └── base.py ├── resource │ └── __init__.py ├── workspace │ ├── __init__.py │ └── enums.py ├── document │ ├── chunking │ │ └── __init__.py │ ├── reader │ │ ├── s3 │ │ │ └── __init__.py │ │ └── __init__.py │ └── __init__.py ├── tools │ ├── streamlit │ │ └── __init__.py │ ├── tool_registry.py │ ├── __init__.py │ └── tool.py ├── vectordb │ ├── cassandra │ │ ├── __init__.py │ │ └── extra_param_mixin.py │ ├── __init__.py │ ├── milvus │ │ └── __init__.py │ ├── qdrant │ │ └── __init__.py │ ├── chroma │ │ └── __init__.py │ ├── lancedb │ │ └── __init__.py │ ├── pineconedb │ │ └── __init__.py │ ├── mongodb │ │ └── __init__.py │ ├── search.py │ ├── distance.py │ ├── clickhouse │ │ ├── __init__.py │ │ └── index.py │ ├── singlestore │ │ └── __init__.py │ └── pgvector │ │ └── __init__.py ├── embedder │ ├── __init__.py │ ├── fireworks.py │ ├── together.py │ └── base.py ├── eval │ └── __init__.py ├── memory │ ├── db │ │ └── __init__.py │ ├── __init__.py │ └── memory.py ├── assistant │ ├── openai │ │ ├── file │ │ │ └── __init__.py │ │ ├── __init__.py │ │ └── tool.py │ └── __init__.py ├── prompt │ ├── __init__.py │ └── exceptions.py ├── workflow │ └── __init__.py ├── playground │ └── __init__.py └── agent │ └── __init__.py ├── evals ├── __init__.py ├── .gitignore └── models │ ├── __init__.py │ └── openai │ └── __init__.py ├── tests ├── __init__.py └── unit │ ├── __init__.py │ └── utils │ └── __init__.py ├── cookbook ├── __init__.py ├── agents │ ├── __init__.py │ ├── .gitignore │ ├── multimodal-agents.jpg │ ├── 08_debugging.py │ ├── 07_monitoring.py │ ├── 20_system_prompt.py │ ├── 25_system_prompt_via_function.py │ ├── 41_image_to_text.py │ ├── 26_instructions_via_function.py │ ├── 13_image_agent.py │ ├── 33_agent_input_as_list.py │ ├── 01_web_search.py │ ├── 18_is_9_11_bigger_than_9_9.py │ └── 36_image_input_high_fidelity.py ├── async │ ├── __init__.py │ ├── web_search.py │ ├── basic.py │ └── basic_stream_off.py ├── memory │ └── __init__.py ├── rag │ └── __init__.py ├── readers │ └── __init__.py ├── storage │ ├── __init__.py │ ├── json_storage.py │ ├── yaml_storage.py │ └── sqlite_storage.py ├── teams │ ├── __init__.py │ └── .gitignore ├── tools │ ├── __init__.py │ ├── jira_tools.py │ ├── pubmed.py │ ├── zendesk_tools.py │ ├── wikipedia_tools.py │ ├── duckduckgo.py │ ├── serpapi_tools.py │ ├── tavily_tools.py │ ├── arxiv_tools.py │ ├── newspaper_tools.py │ ├── shell_tools.py │ ├── apify_tools.py │ ├── file_tools.py │ ├── jinareader_tools.py │ ├── website_tools.py │ ├── crawl4ai_tools.py │ ├── exa_tools.py │ ├── python_tools.py │ ├── spider_tools.py │ ├── firecrawl_tools.py │ ├── sql_tools.py │ ├── newspaper4k_tools.py │ ├── resend_tools.py │ ├── duckduckgo_mod.py │ ├── sleep_tool.py │ ├── models_lab_tool.py │ ├── composio_tools.py │ ├── hackernews.py │ ├── duckdb_tools.py │ ├── youtube_tools.py │ ├── phi_tool.py │ └── searxng_tools.py ├── agents_101 │ ├── __init__.py │ └── 01_web_search.py ├── assistants │ ├── __init__.py │ ├── async │ │ ├── __init__.py │ │ ├── basic.py │ │ └── basic_stream_off.py │ ├── llm_os │ │ ├── __init__.py │ │ ├── .gitignore │ │ └── requirements.in │ ├── llms │ │ ├── __init__.py │ │ ├── claude │ │ │ ├── __init__.py │ │ │ ├── basic.py │ │ │ ├── basic_stream_off.py │ │ │ ├── assistant.py │ │ │ └── assistant_stream_off.py │ │ ├── cohere │ │ │ ├── __init__.py │ │ │ ├── basic.py │ │ │ ├── basic_stream_off.py │ │ │ ├── assistant.py │ │ │ └── assistant_stream_off.py │ │ ├── groq │ │ │ ├── __init__.py │ │ │ ├── rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── ai_apps │ │ │ │ ├── __init__.py │ │ │ │ ├── pages │ │ │ │ │ └── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── auto_rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── research │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── finance_analyst │ │ │ │ └── __init__.py │ │ │ ├── news_articles │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── video_summary │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── investment_researcher │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── basic.py │ │ │ ├── basic_stream_off.py │ │ │ ├── web_search.py │ │ │ └── data_analyst.py │ │ ├── ollama │ │ │ ├── __init__.py │ │ │ ├── rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── auto_rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── tools │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── video_summary │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── test_image.jpeg │ │ │ ├── embeddings.py │ │ │ ├── tool_call.py │ │ │ ├── image.py │ │ │ ├── hermes.py │ │ │ ├── openai_api.py │ │ │ ├── assistant.py │ │ │ ├── finance.py │ │ │ ├── assistant_stream_off.py │ │ │ └── who_are_you.py │ │ ├── bedrock │ │ │ ├── __init__.py │ │ │ ├── basic.py │ │ │ ├── basic_stream_off.py │ │ │ ├── assistant.py │ │ │ └── cli_app.py │ │ ├── fireworks │ │ │ ├── __init__.py │ │ │ ├── embeddings.py │ │ │ ├── assistant.py │ │ │ ├── basic.py │ │ │ ├── assistant_stream_off.py │ │ │ └── basic_stream_off.py │ │ ├── hermes2 │ │ │ ├── __init__.py │ │ │ ├── auto_rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── embeddings.py │ │ │ ├── assistant.py │ │ │ ├── basic.py │ │ │ └── finance.py │ │ ├── mistral │ │ │ ├── __init__.py │ │ │ ├── rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── list_models.py │ │ │ ├── assistant.py │ │ │ ├── assistant_stream_off.py │ │ │ └── tool_call.py │ │ ├── openai │ │ │ ├── __init__.py │ │ │ ├── auto_rag │ │ │ │ ├── __init__.py │ │ │ │ └── requirements.in │ │ │ ├── embeddings.py │ │ │ ├── tool_call.py │ │ │ ├── assistant.py │ │ │ └── custom_messages.py │ │ ├── openhermes │ │ │ ├── __init__.py │ │ │ ├── embeddings.py │ │ │ ├── assistant.py │ │ │ └── tool_call.py │ │ ├── together │ │ │ ├── __init__.py │ │ │ ├── embeddings.py │ │ │ ├── cli.py │ │ │ ├── web_search.py │ │ │ ├── assistant.py │ │ │ └── assistant_stream_off.py │ │ ├── vertexai │ │ │ ├── __init__.py │ │ │ ├── samples │ │ │ │ └── __init__.py │ │ │ └── assistant.py │ │ ├── azure_openai │ │ │ ├── __init__.py │ │ │ ├── embeddings.py │ │ │ ├── cli.py │ │ │ ├── assistant.py │ │ │ └── assistant_stream_off.py │ │ ├── llama_cpp │ │ │ ├── __init__.py │ │ │ ├── .gitignore │ │ │ ├── assistant.py │ │ │ ├── assistant_stream_off.py │ │ │ └── tool_call.py │ │ ├── lmstudio │ │ │ ├── __init__.py │ │ │ ├── cli.py │ │ │ ├── assistant.py │ │ │ ├── assistant_stream_off.py │ │ │ └── tool_call.py │ │ ├── google │ │ │ ├── embeddings.py │ │ │ ├── assistant.py │ │ │ ├── assistant_stream_off.py │ │ │ ├── basic.py │ │ │ └── basic_stream_off.py │ │ ├── huggingface │ │ │ ├── sentence_transformer_embeddings.py │ │ │ └── huggingface_custom_embeddings.py │ │ ├── openrouter │ │ │ ├── assistant.py │ │ │ ├── tool_call.py │ │ │ └── assistant_stream_off.py │ │ └── deepseek │ │ │ └── tool_call.py │ ├── teams │ │ ├── __init__.py │ │ ├── .gitignore │ │ └── journalist │ │ │ └── __init__.py │ ├── tools │ │ ├── __init__.py │ │ ├── .gitignore │ │ ├── duckduckgo.py │ │ ├── tavily_tools.py │ │ ├── wikipedia_tools.py │ │ ├── arxiv_tools.py │ │ ├── duckduckgo_3.py │ │ ├── shell_tools.py │ │ ├── apify_tools.py │ │ ├── file_tools.py │ │ ├── website_tools.py │ │ ├── pubmed.py │ │ ├── crawl4ai_tools.py │ │ ├── resend_tools.py │ │ ├── python_tools.py │ │ ├── serpapi_tools.py │ │ ├── spider_tools.py │ │ ├── newspaper4k_tools.py │ │ ├── exa_tools.py │ │ ├── duckdb_tools.py │ │ ├── firecrawl_tools.py │ │ ├── hackernews.py │ │ ├── sql_tools.py │ │ └── youtube_tools.py │ ├── .gitignore │ ├── examples │ │ ├── __init__.py │ │ ├── pdf │ │ │ └── __init__.py │ │ ├── rag │ │ │ └── __init__.py │ │ ├── sql │ │ │ ├── __init__.py │ │ │ ├── requirements.in │ │ │ ├── knowledge │ │ │ │ └── sample_queries.sql │ │ │ └── load_knowledge.py │ │ ├── auto_rag │ │ │ ├── __init__.py │ │ │ └── requirements.in │ │ ├── data_eng │ │ │ ├── __init__.py │ │ │ ├── .gitignore │ │ │ └── requirements.in │ │ ├── research │ │ │ ├── __init__.py │ │ │ └── requirements.in │ │ ├── personalization │ │ │ ├── __init__.py │ │ │ └── requirements.in │ │ ├── structured_output │ │ │ ├── __init__.py │ │ │ └── README.md │ │ └── worldbuilding │ │ │ ├── __init__.py │ │ │ └── requirements.in │ ├── knowledge │ │ └── __init__.py │ ├── advanced_rag │ │ ├── __init__.py │ │ ├── hybrid_search │ │ │ └── __init__.py │ │ ├── image_search │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ │ └── pinecone_hybrid_search │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ ├── integrations │ │ ├── __init__.py │ │ ├── chromadb │ │ │ ├── __init__.py │ │ │ └── README.md │ │ ├── lancedb │ │ │ ├── __init__.py │ │ │ └── README.md │ │ ├── pgvector │ │ │ └── __init__.py │ │ ├── pinecone │ │ │ ├── __init__.py │ │ │ └── README.md │ │ ├── qdrant │ │ │ ├── __init__.py │ │ │ └── README.md │ │ └── singlestore │ │ │ ├── __init__.py │ │ │ ├── ai_apps │ │ │ ├── __init__.py │ │ │ ├── pages │ │ │ │ └── __init__.py │ │ │ └── requirements.in │ │ │ └── auto_rag │ │ │ └── README.md │ ├── mixture_of_agents │ │ ├── requirements.txt │ │ └── mixture_of_agents_diagram.png │ ├── system_prompt.py │ ├── joke.py │ ├── user_prompt.py │ ├── tothemoon.py │ ├── cli.py │ ├── instructions.py │ ├── web_search.py │ ├── additional_messages.py │ ├── basic.py │ ├── user_messages.py │ ├── storage.py │ └── is_9_11_bigger_than_9_9.py ├── chunking │ └── __init__.py ├── embedders │ └── __init__.py ├── examples │ ├── __init__.py │ ├── agents │ │ └── __init__.py │ ├── hybrid_search │ │ ├── __init__.py │ │ ├── lancedb │ │ │ ├── __init__.py │ │ │ └── README.md │ │ ├── pgvector │ │ │ ├── __init__.py │ │ │ └── README.md │ │ └── pinecone │ │ │ └── __init__.py │ ├── streamlit │ │ ├── __init__.py │ │ ├── geobuddy │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ │ ├── llm_os │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ │ └── paperpal │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ ├── workflows │ │ ├── __init__.py │ │ ├── coding_agent │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ │ └── qa_agent_workflow │ │ │ ├── __init__.py │ │ │ └── requirements.txt │ ├── dynamodb_as_storage │ │ └── __init__.py │ ├── product_manager_agent │ │ └── __init__.py │ └── rag_with_lance_and_sqlite │ │ └── __init__.py ├── integrations │ ├── __init__.py │ ├── mem0 │ │ └── __init__.py │ ├── chromadb │ │ ├── __init__.py │ │ └── README.md │ ├── lancedb │ │ ├── __init__.py │ │ └── README.md │ ├── pgvector │ │ └── __init__.py │ ├── pinecone │ │ ├── __init__.py │ │ └── README.md │ ├── qdrant │ │ └── __init__.py │ ├── clickhouse │ │ └── __init__.py │ └── singlestore │ │ └── __init__.py ├── knowledge │ └── __init__.py ├── playground │ └── __init__.py ├── providers │ ├── __init__.py │ ├── bedrock │ │ ├── __init__.py │ │ ├── basic.py │ │ └── web_search.py │ ├── claude │ │ ├── __init__.py │ │ ├── web_search.py │ │ ├── basic.py │ │ └── image_agent.py │ ├── cohere │ │ ├── __init__.py │ │ ├── basic.py │ │ ├── web_search.py │ │ └── basic_stream.py │ ├── google │ │ ├── __init__.py │ │ ├── .gitignore │ │ ├── audio_agent.py │ │ ├── web_search.py │ │ ├── basic.py │ │ ├── image_agent.py │ │ └── basic_stream.py │ ├── groq │ │ ├── __init__.py │ │ ├── .gitignore │ │ ├── async │ │ │ ├── __init__.py │ │ │ ├── basic.py │ │ │ └── basic_stream.py │ │ ├── image_agent.py │ │ ├── basic.py │ │ ├── web_search.py │ │ └── basic_stream.py │ ├── hermes │ │ ├── __init__.py │ │ ├── web_search.py │ │ ├── basic.py │ │ └── basic_stream.py │ ├── hermes2 │ │ └── __init__.py │ ├── mistral │ │ ├── __init__.py │ │ └── embeddings.py │ ├── nvidia │ │ ├── __init__.py │ │ ├── basic.py │ │ └── basic_stream.py │ ├── ollama │ │ ├── __init__.py │ │ ├── super-agents.png │ │ ├── web_search.py │ │ ├── basic.py │ │ ├── image_agent.py │ │ └── basic_stream.py │ ├── openai │ │ ├── __init__.py │ │ ├── o1 │ │ │ ├── __init__.py │ │ │ ├── o1_mini.py │ │ │ ├── o1_preview.py │ │ │ ├── o1_mini_stream.py │ │ │ ├── o1.py │ │ │ └── o1_preview_stream.py │ │ ├── .gitignore │ │ ├── web_search.py │ │ ├── basic.py │ │ ├── image_agent.py │ │ └── basic_stream.py │ ├── xai │ │ ├── __init__.py │ │ ├── web_search.py │ │ ├── basic.py │ │ └── basic_stream.py │ ├── azure_openai │ │ ├── __init__.py │ │ ├── basic.py │ │ └── web_search.py │ ├── deepseek │ │ ├── __init__.py │ │ ├── basic.py │ │ ├── web_search.py │ │ └── basic_stream.py │ ├── fireworks │ │ ├── __init__.py │ │ ├── basic.py │ │ └── web_search.py │ ├── huggingface │ │ ├── __init__.py │ │ ├── agent_stream.py │ │ └── basic_llama_inference.py │ ├── llama_cpp │ │ └── __init__.py │ ├── lmstudio │ │ └── __init__.py │ ├── ollama_tools │ │ ├── __init__.py │ │ ├── web_search.py │ │ └── basic.py │ ├── openhermes │ │ └── __init__.py │ ├── openrouter │ │ ├── __init__.py │ │ ├── web_search.py │ │ ├── basic.py │ │ └── basic_stream.py │ ├── sambanova │ │ ├── __init__.py │ │ ├── basic.py │ │ └── basic_stream.py │ ├── together │ │ ├── __init__.py │ │ ├── basic.py │ │ └── web_search.py │ ├── vertexai │ │ ├── __init__.py │ │ ├── web_search.py │ │ ├── basic.py │ │ └── basic_stream.py │ └── google_openai │ │ ├── __init__.py │ │ └── basic.py ├── reasoning │ ├── __init__.py │ ├── python_101_curriculum.py │ ├── plan_itenerary.py │ ├── life_in_500000_years.py │ ├── fibonacci.py │ └── mathematical_proof.py ├── vectordb │ └── __init__.py ├── workflows │ ├── __init__.py │ ├── .gitignore │ └── content_creator_workflow │ │ ├── __init__.py │ │ ├── requirements.txt │ │ └── config.py ├── run_pgvector.sh ├── run_mysql.sh ├── run_clickhouse.sh └── mysql-init │ └── init.sql ├── setup.py └── .editorconfig /phi/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /evals/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/api/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/aws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/cli/ws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/docker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/infra/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/llm/aws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/model/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/run/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/storage/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/agents/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/async/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/memory/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/readers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/storage/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/teams/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/tools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /evals/.gitignore: -------------------------------------------------------------------------------- 1 | results 2 | -------------------------------------------------------------------------------- /evals/models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/api/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/aws/resource/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/file/local/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/knowledge/s3/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/model/aws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/reasoning/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/reranker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/resource/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/workspace/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/agents_101/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/chunking/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/embedders/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/knowledge/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/playground/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/reasoning/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/teams/.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | -------------------------------------------------------------------------------- /cookbook/vectordb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /evals/models/openai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/docker/resource/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/document/chunking/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/model/InternLM/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/storage/agent/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/storage/workflow/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/tools/streamlit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/agents/.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | -------------------------------------------------------------------------------- /cookbook/assistants/async/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llm_os/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/teams/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/agents/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/mem0/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/bedrock/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/claude/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/cohere/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/google/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/groq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/hermes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/hermes2/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/mistral/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/nvidia/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/openai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/xai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/docker/app/traefik/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/document/reader/s3/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/vectordb/cassandra/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/.gitignore: -------------------------------------------------------------------------------- 1 | scratch 2 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/knowledge/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/claude/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/cohere/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/chromadb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/lancedb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/pgvector/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/pinecone/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/qdrant/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/azure_openai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/deepseek/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/fireworks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/groq/.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | -------------------------------------------------------------------------------- /cookbook/providers/groq/async/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/huggingface/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/llama_cpp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/lmstudio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/ollama_tools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/openhermes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/openrouter/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/sambanova/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/together/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/vertexai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/aws/resource/cloudformation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/pdf/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/sql/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/bedrock/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openhermes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/vertexai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/teams/.gitignore: -------------------------------------------------------------------------------- 1 | scratch 2 | -------------------------------------------------------------------------------- /cookbook/assistants/teams/journalist/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/.gitignore: -------------------------------------------------------------------------------- 1 | wip 2 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/geobuddy/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/llm_os/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/paperpal/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/clickhouse/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/integrations/singlestore/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/providers/google_openai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/auto_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/data_eng/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/research/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/chromadb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/lancedb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/pgvector/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/pinecone/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/qdrant/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llm_os/.gitignore: -------------------------------------------------------------------------------- 1 | scratch 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/azure_openai/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/ai_apps/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/auto_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/research/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/auto_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/llama_cpp/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/lmstudio/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/auto_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/tools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/auto_rag/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/vertexai/samples/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/dynamodb_as_storage/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/lancedb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/pgvector/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/pinecone/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/product_manager_agent/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/workflows/coding_agent/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/workflows/.gitignore: -------------------------------------------------------------------------------- 1 | reports 2 | games 3 | -------------------------------------------------------------------------------- /phi/llm/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.base import LLM 2 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/hybrid_search/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/image_search/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/personalization/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/structured_output/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/worldbuilding/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/singlestore/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/ai_apps/pages/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/finance_analyst/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/news_articles/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/video_summary/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/llama_cpp/.gitignore: -------------------------------------------------------------------------------- 1 | models 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/video_summary/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/rag_with_lance_and_sqlite/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/examples/workflows/qa_agent_workflow/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/workflows/content_creator_workflow/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/file/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.file.file import File 2 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/data_eng/.gitignore: -------------------------------------------------------------------------------- 1 | scratch 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/investment_researcher/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/llm/groq/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.groq.groq import Groq 2 | -------------------------------------------------------------------------------- /phi/model/xai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.xai.xai import xAI 2 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/pinecone_hybrid_search/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/singlestore/ai_apps/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/document/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.document.base import Document 2 | -------------------------------------------------------------------------------- /phi/embedder/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.base import Embedder 2 | -------------------------------------------------------------------------------- /phi/eval/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.eval.eval import Eval, EvalResult 2 | -------------------------------------------------------------------------------- /phi/model/groq/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.groq.groq import Groq 2 | -------------------------------------------------------------------------------- /phi/vectordb/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.base import VectorDb 2 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/singlestore/ai_apps/pages/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /phi/llm/cohere/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.cohere.chat import CohereChat 2 | -------------------------------------------------------------------------------- /phi/llm/google/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.google.gemini import Gemini 2 | -------------------------------------------------------------------------------- /phi/memory/db/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.memory.db.base import MemoryDb 2 | -------------------------------------------------------------------------------- /phi/llm/anthropic/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.anthropic.claude import Claude 2 | -------------------------------------------------------------------------------- /phi/llm/vertexai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.vertexai.gemini import Gemini 2 | -------------------------------------------------------------------------------- /phi/model/cohere/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.cohere.chat import CohereChat 2 | -------------------------------------------------------------------------------- /phi/model/nvidia/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.nvidia.nvidia import Nvidia 2 | -------------------------------------------------------------------------------- /phi/aws/app/django/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.django.django import Django 2 | -------------------------------------------------------------------------------- /phi/aws/app/fastapi/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.fastapi.fastapi import FastApi 2 | -------------------------------------------------------------------------------- /phi/aws/app/jupyter/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.jupyter.jupyter import Jupyter 2 | -------------------------------------------------------------------------------- /phi/aws/app/qdrant/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.qdrant.qdrant import Qdrant 2 | -------------------------------------------------------------------------------- /phi/docker/app/mysql/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.mysql.mysql import MySQLDb 2 | -------------------------------------------------------------------------------- /phi/docker/app/redis/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.redis.redis import Redis 2 | -------------------------------------------------------------------------------- /phi/document/reader/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.document.reader.base import Reader 2 | -------------------------------------------------------------------------------- /phi/llm/azure/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.azure.openai_chat import AzureOpenAIChat 2 | -------------------------------------------------------------------------------- /phi/llm/deepseek/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.deepseek.deepseek import DeepSeekChat 2 | -------------------------------------------------------------------------------- /phi/llm/exceptions.py: -------------------------------------------------------------------------------- 1 | class InvalidToolCallException(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /phi/llm/fireworks/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.fireworks.fireworks import Fireworks 2 | -------------------------------------------------------------------------------- /phi/llm/mistral/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.mistral.mistral import MistralChat 2 | -------------------------------------------------------------------------------- /phi/llm/together/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.together.together import Together 2 | -------------------------------------------------------------------------------- /phi/model/anthropic/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.anthropic.claude import Claude 2 | -------------------------------------------------------------------------------- /phi/model/mistral/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.mistral.mistral import MistralChat 2 | -------------------------------------------------------------------------------- /phi/model/together/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.together.together import Together 2 | -------------------------------------------------------------------------------- /phi/model/vertexai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.vertexai.gemini import Gemini 2 | -------------------------------------------------------------------------------- /phi/vectordb/milvus/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.milvus.milvus import Milvus 2 | -------------------------------------------------------------------------------- /phi/vectordb/qdrant/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.qdrant.qdrant import Qdrant 2 | -------------------------------------------------------------------------------- /phi/aws/resource/emr/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.emr.cluster import EmrCluster 2 | -------------------------------------------------------------------------------- /phi/docker/app/django/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.django.django import Django 2 | -------------------------------------------------------------------------------- /phi/docker/app/ollama/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.ollama.ollama import Ollama 2 | -------------------------------------------------------------------------------- /phi/docker/app/qdrant/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.qdrant.qdrant import Qdrant 2 | -------------------------------------------------------------------------------- /phi/docker/app/whoami/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.whoami.whoami import Whoami 2 | -------------------------------------------------------------------------------- /phi/llm/openrouter/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.openrouter.openrouter import OpenRouter 2 | -------------------------------------------------------------------------------- /phi/model/azure/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.azure.openai_chat import AzureOpenAIChat 2 | -------------------------------------------------------------------------------- /phi/model/deepseek/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.deepseek.deepseek import DeepSeekChat 2 | -------------------------------------------------------------------------------- /phi/model/fireworks/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.fireworks.fireworks import Fireworks 2 | -------------------------------------------------------------------------------- /phi/model/sambanova/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.sambanova.sambanova import Sambanova 2 | -------------------------------------------------------------------------------- /phi/vectordb/chroma/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.chroma.chromadb import ChromaDb 2 | -------------------------------------------------------------------------------- /phi/assistant/openai/file/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.assistant.openai.file.file import File 2 | -------------------------------------------------------------------------------- /phi/aws/app/streamlit/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.streamlit.streamlit import Streamlit 2 | -------------------------------------------------------------------------------- /phi/aws/resource/glue/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.glue.crawler import GlueCrawler 2 | -------------------------------------------------------------------------------- /phi/docker/app/fastapi/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.fastapi.fastapi import FastApi 2 | -------------------------------------------------------------------------------- /phi/docker/app/jupyter/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.jupyter.jupyter import Jupyter 2 | -------------------------------------------------------------------------------- /phi/model/huggingface/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.huggingface.hf import HuggingFaceChat 2 | -------------------------------------------------------------------------------- /phi/model/openrouter/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.openrouter.openrouter import OpenRouter 2 | -------------------------------------------------------------------------------- /phi/storage/assistant/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.storage.assistant.base import AssistantStorage 2 | -------------------------------------------------------------------------------- /cookbook/assistants/mixture_of_agents/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | groq 3 | pandas 4 | MLB-StatsAPI -------------------------------------------------------------------------------- /cookbook/providers/google/.gitignore: -------------------------------------------------------------------------------- 1 | *.jpg 2 | *.png 3 | *.mp3 4 | *.wav 5 | *.mp4 6 | *.mp3 7 | -------------------------------------------------------------------------------- /cookbook/providers/openai/.gitignore: -------------------------------------------------------------------------------- 1 | *.jpg 2 | *.png 3 | *.mp3 4 | *.wav 5 | *.mp4 6 | *.mp3 7 | -------------------------------------------------------------------------------- /phi/assistant/openai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.assistant.openai.assistant import OpenAIAssistant 2 | -------------------------------------------------------------------------------- /phi/aws/resource/acm/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.acm.certificate import AcmCertificate 2 | -------------------------------------------------------------------------------- /phi/docker/app/streamlit/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.streamlit.streamlit import Streamlit 2 | -------------------------------------------------------------------------------- /phi/vectordb/lancedb/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.lancedb.lance_db import LanceDb, SearchType 2 | -------------------------------------------------------------------------------- /phi/vectordb/pineconedb/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.pineconedb.pineconedb import PineconeDB 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/research/requirements.in: -------------------------------------------------------------------------------- 1 | groq 2 | phidata 3 | streamlit 4 | tavily-python 5 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/worldbuilding/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | streamlit 3 | sqlalchemy 4 | phidata 5 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/paperpal/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | openai 3 | streamlit 4 | exa_py 5 | arxiv 6 | -------------------------------------------------------------------------------- /cookbook/examples/workflows/qa_agent_workflow/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | lancedb 3 | openai 4 | tantivy 5 | -------------------------------------------------------------------------------- /phi/aws/app/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.app.base import AwsApp, AwsBuildContext, ContainerContext # noqa: F401 2 | -------------------------------------------------------------------------------- /phi/tools/tool_registry.py: -------------------------------------------------------------------------------- 1 | from phi.tools.toolkit import Toolkit as ToolRegistry # type: ignore # noqa: F401 2 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/video_summary/requirements.in: -------------------------------------------------------------------------------- 1 | groq 2 | phidata 3 | streamlit 4 | youtube_transcript_api 5 | -------------------------------------------------------------------------------- /phi/llm/openai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.openai.chat import OpenAIChat 2 | from phi.llm.openai.like import OpenAILike 3 | -------------------------------------------------------------------------------- /cookbook/agents/multimodal-agents.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agno-agi/phidata/HEAD/cookbook/agents/multimodal-agents.jpg -------------------------------------------------------------------------------- /cookbook/assistants/examples/data_eng/requirements.in: -------------------------------------------------------------------------------- 1 | streamlit 2 | sqlalchemy 3 | phidata 4 | duckdb 5 | pandas 6 | openai 7 | -------------------------------------------------------------------------------- /phi/docker/app/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.base import DockerApp, DockerBuildContext, ContainerContext # noqa: F401 2 | -------------------------------------------------------------------------------- /phi/model/openai/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.openai.chat import OpenAIChat 2 | from phi.model.openai.like import OpenAILike 3 | -------------------------------------------------------------------------------- /phi/prompt/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.prompt.template import PromptTemplate 2 | from phi.prompt.registry import PromptRegistry 3 | -------------------------------------------------------------------------------- /phi/vectordb/mongodb/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.mongodb.mongodb import MongoDBVector 2 | 3 | __all__ = ["MongoDBVector"] 4 | -------------------------------------------------------------------------------- /phi/workflow/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.workflow.workflow import Workflow, RunResponse, RunEvent, WorkflowSession, WorkflowStorage 2 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # A minimal setup.py file for supporting editable installs 2 | 3 | from setuptools import setup 4 | 5 | setup() 6 | -------------------------------------------------------------------------------- /phi/docker/app/context.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class DockerBuildContext(BaseModel): 5 | network: str 6 | -------------------------------------------------------------------------------- /phi/knowledge/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.knowledge.base import AssistantKnowledge 2 | from phi.knowledge.agent import AgentKnowledge 3 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/super-agents.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agno-agi/phidata/HEAD/cookbook/providers/ollama/super-agents.png -------------------------------------------------------------------------------- /phi/aws/resource/iam/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.iam.role import IamRole 2 | from phi.aws.resource.iam.policy import IamPolicy 3 | -------------------------------------------------------------------------------- /phi/aws/resource/s3/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.s3.bucket import S3Bucket 2 | from phi.aws.resource.s3.object import S3Object 3 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/research/requirements.in: -------------------------------------------------------------------------------- 1 | arxiv 2 | duckduckgo-search 3 | exa_py 4 | openai 5 | phidata 6 | pypdf 7 | streamlit 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/test_image.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agno-agi/phidata/HEAD/cookbook/assistants/llms/ollama/test_image.jpeg -------------------------------------------------------------------------------- /cookbook/examples/streamlit/geobuddy/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | google-generativeai 3 | openai 4 | streamlit 5 | pillow 6 | duckduckgo-search 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/auto_rag/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | streamlit 3 | pgvector 4 | pypdf 5 | psycopg[binary] 6 | sqlalchemy 7 | phidata 8 | -------------------------------------------------------------------------------- /phi/prompt/exceptions.py: -------------------------------------------------------------------------------- 1 | class PromptUpdateException(Exception): 2 | pass 3 | 4 | 5 | class PromptNotFoundException(Exception): 6 | pass 7 | -------------------------------------------------------------------------------- /phi/aws/resource/secret/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.secret.manager import SecretsManager 2 | from phi.aws.resource.secret.reader import read_secrets 3 | -------------------------------------------------------------------------------- /phi/docker/app/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.postgres.postgres import PostgresDb 2 | from phi.docker.app.postgres.pgvector import PgVectorDb 3 | -------------------------------------------------------------------------------- /phi/infra/type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class InfraType(str, Enum): 5 | local = "local" 6 | docker = "docker" 7 | aws = "aws" 8 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/sql/requirements.in: -------------------------------------------------------------------------------- 1 | openai 2 | pandas 3 | phidata 4 | streamlit 5 | sqlalchemy 6 | simplejson 7 | pgvector 8 | psycopg[binary] 9 | -------------------------------------------------------------------------------- /phi/llm/ollama/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.llm.ollama.chat import Ollama 2 | from phi.llm.ollama.hermes import Hermes 3 | from phi.llm.ollama.tools import OllamaTools 4 | -------------------------------------------------------------------------------- /cookbook/examples/workflows/coding_agent/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | openai 3 | bs4 4 | langchain_community 5 | langchain-openai 6 | langchain 7 | langchain_core 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/news_articles/requirements.in: -------------------------------------------------------------------------------- 1 | groq 2 | phidata 3 | streamlit 4 | duckduckgo-search 5 | nest_asyncio 6 | newspaper4k 7 | lxml_html_clean 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/video_summary/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | pgvector 3 | phidata 4 | psycopg[binary] 5 | sqlalchemy 6 | streamlit 7 | youtube_transcript_api 8 | -------------------------------------------------------------------------------- /phi/model/ollama/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.ollama.chat import Ollama 2 | from phi.model.ollama.hermes import Hermes 3 | from phi.model.ollama.tools import OllamaTools 4 | -------------------------------------------------------------------------------- /phi/vectordb/search.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class SearchType(str, Enum): 5 | vector = "vector" 6 | keyword = "keyword" 7 | hybrid = "hybrid" 8 | -------------------------------------------------------------------------------- /phi/workspace/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class WorkspaceStarterTemplate(str, Enum): 5 | agent_app = "agent-app" 6 | agent_api = "agent-api" 7 | -------------------------------------------------------------------------------- /cookbook/agents/08_debugging.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | 3 | agent = Agent(markdown=True, debug_mode=True) 4 | agent.print_response("Share a 2 sentence horror story") 5 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/sql/knowledge/sample_queries.sql: -------------------------------------------------------------------------------- 1 | -- Here are some sample queries for reference 2 | 3 | -- query description 4 | -- query start 5 | -- query end 6 | -------------------------------------------------------------------------------- /cookbook/agents/07_monitoring.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | 3 | agent = Agent(markdown=True, monitoring=True) 4 | agent.print_response("Share a 2 sentence horror story") 5 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/image_search/requirements.txt: -------------------------------------------------------------------------------- 1 | torch 2 | torchvision 3 | Pillow 4 | pinecone-client 5 | phidata 6 | scipy 7 | git+https://github.com/openai/CLIP.git -------------------------------------------------------------------------------- /phi/api/schemas/response.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class ApiResponseSchema(BaseModel): 5 | status: str = "fail" 6 | message: str = "invalid request" 7 | -------------------------------------------------------------------------------- /phi/aws/resource/elasticache/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.elasticache.cluster import CacheCluster 2 | from phi.aws.resource.elasticache.subnet_group import CacheSubnetGroup 3 | -------------------------------------------------------------------------------- /phi/vectordb/distance.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class Distance(str, Enum): 5 | cosine = "cosine" 6 | l2 = "l2" 7 | max_inner_product = "max_inner_product" 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/rag/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | pgvector 3 | phidata 4 | psycopg[binary] 5 | pypdf 6 | sqlalchemy 7 | streamlit 8 | bs4 9 | duckduckgo-search 10 | -------------------------------------------------------------------------------- /cookbook/assistants/mixture_of_agents/mixture_of_agents_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agno-agi/phidata/HEAD/cookbook/assistants/mixture_of_agents/mixture_of_agents_diagram.png -------------------------------------------------------------------------------- /phi/assistant/openai/tool.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | CodeInterpreter: Dict[str, str] = {"type": "code_interpreter"} 4 | 5 | Retrieval: Dict[str, str] = {"type": "retrieval"} 6 | -------------------------------------------------------------------------------- /cookbook/agents/20_system_prompt.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | 3 | agent = Agent(system_prompt="Share a 2 sentence story about") 4 | agent.print_response("Love in the year 12000.") 5 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/singlestore/auto_rag/README.md: -------------------------------------------------------------------------------- 1 | - This cookbook has been moved to the [SingleStore AI Apps](/cookbook/integrations/singlestore/ai_apps/README.md) folder. 2 | -------------------------------------------------------------------------------- /cookbook/workflows/content_creator_workflow/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | firecrawl-py 3 | openai 4 | packaging 5 | requests 6 | typing 7 | pydantic 8 | python-dotenv 9 | requests 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/rag/requirements.in: -------------------------------------------------------------------------------- 1 | mistralai 2 | pgvector 3 | phidata 4 | psycopg[binary] 5 | pypdf 6 | sqlalchemy 7 | streamlit 8 | bs4 9 | duckduckgo-search 10 | 11 | -------------------------------------------------------------------------------- /phi/memory/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.memory.agent import AgentMemory 2 | from phi.memory.assistant import AssistantMemory 3 | from phi.memory.memory import Memory 4 | from phi.memory.row import MemoryRow 5 | -------------------------------------------------------------------------------- /phi/vectordb/clickhouse/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.clickhouse.clickhousedb import ClickhouseDb 2 | from phi.vectordb.clickhouse.index import HNSW 3 | from phi.vectordb.distance import Distance 4 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/investment_researcher/requirements.in: -------------------------------------------------------------------------------- 1 | bs4 2 | duckduckgo-search 3 | groq 4 | nest_asyncio 5 | openai 6 | pandas 7 | phidata 8 | streamlit 9 | yfinance 10 | tabulate 11 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/rag/requirements.in: -------------------------------------------------------------------------------- 1 | groq 2 | openai 3 | ollama 4 | pgvector 5 | phidata 6 | psycopg[binary] 7 | pypdf 8 | sqlalchemy 9 | streamlit 10 | bs4 11 | duckduckgo-search 12 | -------------------------------------------------------------------------------- /cookbook/providers/mistral/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.mistral import MistralEmbedder 2 | 3 | embedder = MistralEmbedder() 4 | 5 | print(embedder.get_embedding("What is the capital of France?")) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/auto_rag/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | pgvector 3 | phidata 4 | psycopg[binary] 5 | pypdf 6 | sqlalchemy 7 | streamlit 8 | bs4 9 | duckduckgo-search 10 | nest_asyncio 11 | -------------------------------------------------------------------------------- /phi/aws/resource/elb/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.elb.load_balancer import LoadBalancer 2 | from phi.aws.resource.elb.target_group import TargetGroup 3 | from phi.aws.resource.elb.listener import Listener 4 | -------------------------------------------------------------------------------- /phi/vectordb/singlestore/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.distance import Distance 2 | from phi.vectordb.singlestore.s2vectordb import S2VectorDb 3 | from phi.vectordb.singlestore.index import Ivfflat, HNSWFlat 4 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/auto_rag/requirements.in: -------------------------------------------------------------------------------- 1 | openai 2 | ollama 3 | pgvector 4 | phidata 5 | psycopg[binary] 6 | pypdf 7 | sqlalchemy 8 | streamlit 9 | bs4 10 | duckduckgo-search 11 | nest_asyncio 12 | -------------------------------------------------------------------------------- /phi/aws/resource/rds/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.rds.db_cluster import DbCluster 2 | from phi.aws.resource.rds.db_instance import DbInstance 3 | from phi.aws.resource.rds.db_subnet_group import DbSubnetGroup 4 | -------------------------------------------------------------------------------- /phi/playground/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.playground.playground import Playground, PlaygroundSettings 2 | from phi.playground.serve import serve_playground_app 3 | from phi.playground.deploy import deploy_playground_app 4 | -------------------------------------------------------------------------------- /cookbook/tools/jira_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.jira_tools import JiraTools 3 | 4 | agent = Agent(tools=[JiraTools()]) 5 | agent.print_response("Find all issues in project PROJ", markdown=True) 6 | -------------------------------------------------------------------------------- /phi/aws/app/context.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class AwsBuildContext(BaseModel): 7 | aws_region: Optional[str] = None 8 | aws_profile: Optional[str] = None 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/auto_rag/requirements.in: -------------------------------------------------------------------------------- 1 | groq 2 | openai 3 | ollama 4 | pgvector 5 | phidata 6 | psycopg[binary] 7 | pypdf 8 | sqlalchemy 9 | streamlit 10 | bs4 11 | duckduckgo-search 12 | nest_asyncio 13 | -------------------------------------------------------------------------------- /cookbook/tools/pubmed.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.pubmed import PubmedTools 3 | 4 | agent = Agent(tools=[PubmedTools()], show_tool_calls=True) 5 | agent.print_response("Tell me about ulcerative colitis.") 6 | -------------------------------------------------------------------------------- /cookbook/tools/zendesk_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.zendesk import ZendeskTools 3 | 4 | agent = Agent(tools=[ZendeskTools()], show_tool_calls=True) 5 | agent.print_response("How do I login?", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/tools/requirements.in: -------------------------------------------------------------------------------- 1 | ollama 2 | pgvector 3 | phidata 4 | psycopg[binary] 5 | pypdf 6 | sqlalchemy 7 | streamlit 8 | bs4 9 | duckduckgo-search 10 | tavily-python 11 | yfinance 12 | nest_asyncio 13 | -------------------------------------------------------------------------------- /cookbook/tools/wikipedia_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.wikipedia import WikipediaTools 3 | 4 | agent = Agent(tools=[WikipediaTools()], show_tool_calls=True) 5 | agent.print_response("Search wikipedia for 'ai'") 6 | -------------------------------------------------------------------------------- /phi/aws/resource/ec2/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.ec2.security_group import SecurityGroup, InboundRule, OutboundRule, get_my_ip 2 | from phi.aws.resource.ec2.subnet import Subnet 3 | from phi.aws.resource.ec2.volume import EbsVolume 4 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_size = 2 5 | indent_style = space 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.py] 12 | indent_size = 4 13 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/lmstudio/cli.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | 4 | assistant = Assistant(llm=OpenAILike(base_url="http://localhost:1234/v1")) 5 | assistant.cli_app(markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/duckduckgo.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | 4 | agent = Agent(tools=[DuckDuckGo()], show_tool_calls=True) 5 | agent.print_response("Whats happening in France?", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/google/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.google import GeminiEmbedder 2 | 3 | embeddings = GeminiEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.openai import OpenAIEmbedder 2 | 3 | embeddings = OpenAIEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings[:5]}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /cookbook/assistants/system_prompt.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | 3 | assistant = Assistant( 4 | system_prompt="Share a 2 sentence story about", 5 | debug_mode=True, 6 | ) 7 | assistant.print_response("Love in the year 12000.") 8 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/singlestore/ai_apps/requirements.in: -------------------------------------------------------------------------------- 1 | bs4 2 | duckduckgo-search 3 | groq 4 | ollama 5 | openai 6 | phidata 7 | pymysql 8 | pypdf 9 | sqlalchemy 10 | streamlit 11 | yfinance 12 | tavily-python 13 | nest_asyncio 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.together import TogetherEmbedder 2 | 3 | embeddings = TogetherEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /cookbook/tools/serpapi_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.serpapi_tools import SerpApiTools 3 | 4 | agent = Agent(tools=[SerpApiTools()], show_tool_calls=True) 5 | agent.print_response("Whats happening in the USA?", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/tavily_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.tavily import TavilyTools 3 | 4 | agent = Agent(tools=[TavilyTools()], show_tool_calls=True) 5 | agent.print_response("Search tavily for 'language models'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llm_os/requirements.in: -------------------------------------------------------------------------------- 1 | bs4 2 | duckduckgo-search 3 | exa_py 4 | nest_asyncio 5 | openai 6 | pgvector 7 | phidata 8 | psycopg[binary] 9 | pypdf 10 | sqlalchemy 11 | streamlit 12 | yfinance 13 | duckdb 14 | pandas 15 | matplotlib 16 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.fireworks import FireworksEmbedder 2 | 3 | embeddings = FireworksEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/ai_apps/requirements.in: -------------------------------------------------------------------------------- 1 | bs4 2 | duckduckgo-search 3 | groq 4 | nest_asyncio 5 | ollama 6 | openai 7 | pgvector 8 | phidata 9 | psycopg[binary] 10 | pypdf 11 | sqlalchemy 12 | streamlit 13 | tavily-python 14 | yfinance 15 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/auto_rag/requirements.in: -------------------------------------------------------------------------------- 1 | openai 2 | ollama 3 | pgvector 4 | phidata 5 | psycopg[binary] 6 | pypdf 7 | sqlalchemy 8 | streamlit 9 | bs4 10 | duckduckgo-search 11 | nest_asyncio 12 | textract==1.6.3 13 | python-docx 14 | lxml -------------------------------------------------------------------------------- /cookbook/tools/arxiv_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.arxiv_toolkit import ArxivToolkit 3 | 4 | agent = Agent(tools=[ArxivToolkit()], show_tool_calls=True) 5 | agent.print_response("Search arxiv for 'language models'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/newspaper_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.newspaper_tools import NewspaperTools 3 | 4 | agent = Agent(tools=[NewspaperTools()]) 5 | agent.print_response("Please summarize https://en.wikipedia.org/wiki/Language_model") 6 | -------------------------------------------------------------------------------- /cookbook/tools/shell_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.shell import ShellTools 3 | 4 | agent = Agent(tools=[ShellTools()], show_tool_calls=True) 5 | agent.print_response("Show me the contents of the current directory", markdown=True) 6 | -------------------------------------------------------------------------------- /phi/llm/openai/like.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from phi.llm.openai.chat import OpenAIChat 3 | 4 | 5 | class OpenAILike(OpenAIChat): 6 | name: str = "OpenAILike" 7 | model: str = "not-provided" 8 | api_key: Optional[str] = "not-provided" 9 | -------------------------------------------------------------------------------- /cookbook/assistants/joke.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | topic = "ice cream" 5 | assistant = Assistant(llm=OpenAIChat(model="gpt-3.5-turbo")) 6 | assistant.print_response(f"Tell me a joke about {topic}") 7 | -------------------------------------------------------------------------------- /cookbook/tools/apify_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.apify import ApifyTools 3 | 4 | agent = Agent(tools=[ApifyTools()], show_tool_calls=True) 5 | agent.print_response("Tell me about https://docs.phidata.com/introduction", markdown=True) 6 | -------------------------------------------------------------------------------- /phi/llm/ollama/openai.py: -------------------------------------------------------------------------------- 1 | from phi.llm.openai.like import OpenAILike 2 | 3 | 4 | class OllamaOpenAI(OpenAILike): 5 | name: str = "Ollama" 6 | model: str = "openhermes" 7 | api_key: str = "ollama" 8 | base_url: str = "http://localhost:11434/v1" 9 | -------------------------------------------------------------------------------- /cookbook/assistants/user_prompt.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | 3 | assistant = Assistant( 4 | system_prompt="Share a 2 sentence story about", 5 | user_prompt="Love in the year 12000.", 6 | debug_mode=True, 7 | ) 8 | assistant.print_response() 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/azure_openai/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.azure_openai import AzureOpenAIEmbedder 2 | 3 | embeddings = AzureOpenAIEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings[:5]}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/duckduckgo.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | 4 | assistant = Assistant(tools=[DuckDuckGo()], show_tool_calls=True) 5 | assistant.print_response("Whats happening in France?", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/file_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.file import FileTools 3 | 4 | agent = Agent(tools=[FileTools()], show_tool_calls=True) 5 | agent.print_response("What is the most advanced LLM currently? Save the answer to a file.", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/jinareader_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.jina_tools import JinaReaderTools 3 | 4 | agent = Agent(tools=[JinaReaderTools()], debug_mode=True, show_tool_calls=True) 5 | agent.print_response("Summarize: https://github.com/phidatahq") 6 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/personalization/requirements.in: -------------------------------------------------------------------------------- 1 | bs4 2 | duckduckgo-search 3 | exa_py 4 | nest_asyncio 5 | openai 6 | pgvector 7 | phidata 8 | psycopg[binary] 9 | pypdf 10 | sqlalchemy 11 | streamlit 12 | yfinance 13 | duckdb 14 | pandas 15 | matplotlib 16 | -------------------------------------------------------------------------------- /cookbook/tools/website_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.website import WebsiteTools 3 | 4 | agent = Agent(tools=[WebsiteTools()], show_tool_calls=True) 5 | agent.print_response("Search web page: 'https://docs.phidata.com/introduction'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/advanced_rag/pinecone_hybrid_search/requirements.txt: -------------------------------------------------------------------------------- 1 | pinecone-client 2 | llama-index-core 3 | llama-index-readers-file 4 | llama-index-retrievers-bm25 5 | llama-index-embeddings-openai 6 | llama-index-llms-openai 7 | llama-index-vector-stores-pinecone 8 | phidata -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.ollama import OllamaEmbedder 2 | 3 | embedder = OllamaEmbedder(model="llama3") 4 | embeddings = embedder.get_embedding("Embed me") 5 | 6 | print(f"Embeddings: {embeddings}") 7 | print(f"Dimensions: {len(embeddings)}") 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/cli.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.together import Together 3 | 4 | assistant = Assistant(llm=Together(), description="You help people with their health and fitness goals.") 5 | assistant.cli_app(markdown=True, stream=False) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/tavily_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.tavily import TavilyTools 3 | 4 | assistant = Assistant(tools=[TavilyTools()], show_tool_calls=True) 5 | assistant.print_response("Search tavily for 'language models'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/wikipedia_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.wikipedia import WikipediaTools 3 | 4 | assistant = Assistant(tools=[WikipediaTools()], show_tool_calls=True) 5 | assistant.print_response("Search wikipedia for 'ai'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/crawl4ai_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.crawl4ai_tools import Crawl4aiTools 3 | 4 | agent = Agent(tools=[Crawl4aiTools(max_length=None)], show_tool_calls=True) 5 | agent.print_response("Tell me about https://github.com/phidatahq/phidata.") 6 | -------------------------------------------------------------------------------- /cookbook/tools/exa_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.exa import ExaTools 3 | 4 | agent = Agent(tools=[ExaTools(include_domains=["cnbc.com", "reuters.com", "bloomberg.com"])], show_tool_calls=True) 5 | agent.print_response("Search for AAPL news", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/arxiv_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.arxiv_toolkit import ArxivToolkit 3 | 4 | assistant = Assistant(tools=[ArxivToolkit()], show_tool_calls=True) 5 | assistant.print_response("Search arxiv for 'language models'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/duckduckgo_3.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | 4 | assistant = Assistant(tools=[DuckDuckGo()], show_tool_calls=True) 5 | assistant.print_response("Give me news from 3 different countries.", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/shell_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.shell import ShellTools 3 | 4 | assistant = Assistant(tools=[ShellTools()], show_tool_calls=True) 5 | assistant.print_response("Show me the contents of the current directory", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/examples/streamlit/llm_os/requirements.txt: -------------------------------------------------------------------------------- 1 | phidata 2 | openai 3 | exa_py 4 | yfinance 5 | duckdb 6 | bs4 7 | duckduckgo-search 8 | nest_asyncio 9 | qdrant-client 10 | pgvector 11 | psycopg[binary] 12 | pypdf 13 | sqlalchemy 14 | streamlit 15 | pandas 16 | matplotlib 17 | -------------------------------------------------------------------------------- /cookbook/tools/python_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.python import PythonTools 3 | 4 | agent = Agent(tools=[PythonTools()], show_tool_calls=True) 5 | agent.print_response("Write a python script for fibonacci series and display the result till the 10th number") 6 | -------------------------------------------------------------------------------- /cookbook/tools/spider_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.spider import SpiderTools 3 | 4 | agent = Agent(tools=[SpiderTools(optional_params={"proxy_enabled": True})]) 5 | agent.print_response('Can you scrape the first search result from a search on "news in USA"?') 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/apify_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.apify import ApifyTools 3 | 4 | assistant = Assistant(tools=[ApifyTools()], show_tool_calls=True) 5 | assistant.print_response("Tell me about https://docs.phidata.com/introduction", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/tools/firecrawl_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.firecrawl import FirecrawlTools 3 | 4 | agent = Agent(tools=[FirecrawlTools(scrape=False, crawl=True)], show_tool_calls=True, markdown=True) 5 | agent.print_response("Summarize this https://finance.yahoo.com/") 6 | -------------------------------------------------------------------------------- /phi/assistant/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.assistant.assistant import ( 2 | Assistant, 3 | AssistantRun, 4 | AssistantMemory, 5 | MemoryRetrieval, 6 | AssistantStorage, 7 | AssistantKnowledge, 8 | Function, 9 | Tool, 10 | Toolkit, 11 | Message, 12 | ) 13 | -------------------------------------------------------------------------------- /phi/tools/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.tools.tool import Tool 2 | from phi.tools.decorator import tool 3 | from phi.tools.function import Function, FunctionCall, StopAgentRun, RetryAgentRun, ToolCallException 4 | from phi.tools.toolkit import Toolkit 5 | from phi.tools.tool_registry import ToolRegistry 6 | -------------------------------------------------------------------------------- /phi/vectordb/pgvector/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.vectordb.distance import Distance 2 | from phi.vectordb.search import SearchType 3 | from phi.vectordb.pgvector.index import Ivfflat, HNSW 4 | from phi.vectordb.pgvector.pgvector import PgVector 5 | from phi.vectordb.pgvector.pgvector2 import PgVector2 6 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/o1_mini.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.openai import OpenAIChat 3 | 4 | agent = Agent(model=OpenAIChat(id="o1-mini")) 5 | 6 | # Print the response in the terminal 7 | agent.print_response("What is the closest galaxy to milky way?") 8 | -------------------------------------------------------------------------------- /cookbook/run_pgvector.sh: -------------------------------------------------------------------------------- 1 | docker run -d \ 2 | -e POSTGRES_DB=ai \ 3 | -e POSTGRES_USER=ai \ 4 | -e POSTGRES_PASSWORD=ai \ 5 | -e PGDATA=/var/lib/postgresql/data/pgdata \ 6 | -v pgvolume:/var/lib/postgresql/data \ 7 | -p 5532:5432 \ 8 | --name pgvector \ 9 | phidata/pgvector:16 10 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/file_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.file import FileTools 3 | 4 | assistant = Assistant(tools=[FileTools()], show_tool_calls=True) 5 | assistant.print_response("What is the most advanced LLM currently? Save the answer to a file.", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/website_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.website import WebsiteTools 3 | 4 | assistant = Assistant(tools=[WebsiteTools()], show_tool_calls=True) 5 | assistant.print_response("Search web page: 'https://docs.phidata.com/introduction'", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/o1_preview.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.openai import OpenAIChat 3 | 4 | agent = Agent(model=OpenAIChat(id="o1-preview")) 5 | 6 | # Print the response in the terminal 7 | agent.print_response("What is the closest galaxy to milky way?") 8 | -------------------------------------------------------------------------------- /phi/agent/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.agent.agent import ( 2 | Agent, 3 | AgentKnowledge, 4 | AgentMemory, 5 | AgentSession, 6 | AgentStorage, 7 | Function, 8 | MemoryRetrieval, 9 | Message, 10 | RunEvent, 11 | RunResponse, 12 | Tool, 13 | Toolkit, 14 | ) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/lmstudio/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | 4 | assistant = Assistant(llm=OpenAILike(base_url="http://localhost:1234/v1")) 5 | assistant.print_response("Share a 2 sentence quick healthy breakfast recipe.", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openhermes/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.ollama import OllamaEmbedder 2 | 3 | embedder = OllamaEmbedder(model="openhermes", dimensions=4096) 4 | embeddings = embedder.get_embedding("Embed me") 5 | 6 | print(f"Embeddings: {embeddings[:10]}") 7 | print(f"Dimensions: {len(embeddings)}") 8 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/pubmed.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.pubmed import PubmedTools 3 | 4 | assistant = Assistant(tools=[PubmedTools()], debug_mode=True, show_tool_calls=True) 5 | 6 | assistant.print_response( 7 | "ulcerative colitis.", 8 | markdown=True, 9 | ) 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/huggingface/sentence_transformer_embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.sentence_transformer import SentenceTransformerEmbedder 2 | 3 | embeddings = SentenceTransformerEmbedder().get_embedding("Embed me") 4 | 5 | print(f"Embeddings: {embeddings[:5]}") 6 | print(f"Dimensions: {len(embeddings)}") 7 | -------------------------------------------------------------------------------- /phi/docker/app/postgres/pgvector.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.postgres.postgres import PostgresDb 2 | 3 | 4 | class PgVectorDb(PostgresDb): 5 | # -*- App Name 6 | name: str = "pgvector-db" 7 | 8 | # -*- Image Configuration 9 | image_name: str = "phidata/pgvector" 10 | image_tag: str = "16" 11 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/lmstudio/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | 4 | assistant = Assistant(llm=OpenAILike(base_url="http://localhost:1234/v1")) 5 | assistant.print_response("Share a quick healthy breakfast recipe.", stream=False, markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/crawl4ai_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.crawl4ai_tools import Crawl4aiTools 3 | 4 | assistant = Assistant(tools=[Crawl4aiTools(max_length=None)], show_tool_calls=True) 5 | assistant.print_response("Tell me about https://github.com/phidatahq/phidata.", markdown=True) 6 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/resend_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.resend_tools import ResendTools 3 | 4 | assistant = Assistant(tools=[ResendTools(from_email="")], debug_mode=True) 5 | 6 | assistant.print_response("send email to greeting them with hello world") 7 | -------------------------------------------------------------------------------- /cookbook/assistants/tothemoon.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | assistant = Assistant( 5 | llm=OpenAIChat(model="gpt-4o"), 6 | description="You are a rocket scientist", 7 | ) 8 | assistant.print_response("write a plan to go to the moon stp by step", markdown=True) 9 | -------------------------------------------------------------------------------- /phi/aws/resource/ecs/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.aws.resource.ecs.cluster import EcsCluster 2 | from phi.aws.resource.ecs.container import EcsContainer 3 | from phi.aws.resource.ecs.service import EcsService 4 | from phi.aws.resource.ecs.task_definition import EcsTaskDefinition 5 | from phi.aws.resource.ecs.volume import EcsVolume 6 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/embeddings.py: -------------------------------------------------------------------------------- 1 | from phi.embedder.ollama import OllamaEmbedder 2 | 3 | embedder = OllamaEmbedder(model="adrienbrault/nous-hermes2pro:Q8_0", dimensions=4096) 4 | embeddings = embedder.get_embedding("Embed me") 5 | 6 | print(f"Embeddings: {embeddings[:10]}") 7 | print(f"Dimensions: {len(embeddings)}") 8 | -------------------------------------------------------------------------------- /cookbook/run_mysql.sh: -------------------------------------------------------------------------------- 1 | docker run -d \ 2 | -e MYSQL_ROOT_PASSWORD=phi \ 3 | -e MYSQL_DATABASE=phi \ 4 | -e MYSQL_USER=phi \ 5 | -e MYSQL_PASSWORD=phi \ 6 | -p 3306:3306 \ 7 | -v mysql_data:/var/lib/mysql \ 8 | -v $(pwd)/cookbook/mysql-init:/docker-entrypoint-initdb.d \ 9 | --name mysql \ 10 | mysql:8.0 11 | -------------------------------------------------------------------------------- /phi/vectordb/clickhouse/index.py: -------------------------------------------------------------------------------- 1 | from typing import Literal 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class HNSW(BaseModel): 7 | quantization: Literal["f64", "f32", "f16", "bf16", "i8"] = "bf16" 8 | hnsw_max_connections_per_layer: int = 32 9 | hnsw_candidate_list_size_for_construction: int = 128 10 | -------------------------------------------------------------------------------- /cookbook/tools/sql_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.sql import SQLTools 3 | 4 | db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai" 5 | 6 | agent = Agent(tools=[SQLTools(db_url=db_url)]) 7 | agent.print_response("List the tables in the database. Tell me about contents of one of the tables", markdown=True) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.fireworks import Fireworks 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant(llm=Fireworks(), tools=[DuckDuckGo()], show_tool_calls=True) 6 | assistant.print_response("Whats happening in France?", markdown=True) 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.fireworks import Fireworks 3 | 4 | assistant = Assistant( 5 | llm=Fireworks(), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/python_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.python import PythonTools 3 | 4 | assistant = Assistant(tools=[PythonTools()], show_tool_calls=True) 5 | assistant.print_response( 6 | "Write a python script for fibonacci series and display the result till the 10th number", markdown=True 7 | ) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/serpapi_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.serpapi_tools import SerpApiTools 3 | 4 | assistant = Assistant( 5 | tools=[SerpApiTools()], 6 | show_tool_calls=True, 7 | debug_mode=True, 8 | ) 9 | 10 | assistant.print_response("Whats happening in the USA?", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/tools/newspaper4k_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.newspaper4k import Newspaper4k 3 | 4 | agent = Agent(tools=[Newspaper4k()], debug_mode=True, show_tool_calls=True) 5 | agent.print_response( 6 | "Please summarize https://www.rockymountaineer.com/blog/experience-icefields-parkway-scenic-drive-lifetime" 7 | ) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.groq import Groq 3 | 4 | assistant = Assistant( 5 | llm=Groq(model="llama3-70b-8192"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/azure_openai/cli.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.azure import AzureOpenAIChat 3 | 4 | assistant = Assistant( 5 | llm=AzureOpenAIChat(model="gpt-35-turbo"), # model="deployment_name" 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.cli_app(markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/huggingface/huggingface_custom_embeddings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from phi.embedder.huggingface import HuggingfaceCustomEmbedder 4 | 5 | embeddings = HuggingfaceCustomEmbedder(api_key=os.getenv("HUGGINGFACE_API_KEY")).get_embedding("Embed me") 6 | 7 | print(f"Embeddings: {embeddings}") 8 | print(f"Dimensions: {len(embeddings)}") 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/cohere/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.cohere import CohereChat 3 | 4 | assistant = Assistant( 5 | llm=CohereChat(model="command-r"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | 6 | assistant = Assistant(llm=OpenAIChat(model="gpt-4-turbo"), tools=[DuckDuckGo()], show_tool_calls=True) 7 | assistant.print_response("Whats happening in France?", markdown=True) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openhermes/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.ollama import Ollama 3 | 4 | assistant = Assistant( 5 | llm=Ollama(model="openhermes"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/o1_mini_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.openai import OpenAIChat 4 | 5 | agent = Agent(model=OpenAIChat(id="o1-mini")) 6 | 7 | # Print the response in the terminal 8 | agent.print_response("What is the closest galaxy to milky way?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/tools/resend_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.resend_tools import ResendTools 3 | 4 | from_email = "" 5 | to_email = "" 6 | 7 | agent = Agent(tools=[ResendTools(from_email=from_email)], show_tool_calls=True) 8 | agent.print_response(f"Send an email to {to_email} greeting them with hello world") 9 | -------------------------------------------------------------------------------- /phi/docker/app/superset/worker.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.superset.base import SupersetBase 4 | 5 | 6 | class SupersetWorker(SupersetBase): 7 | # -*- App Name 8 | name: str = "superset-worker" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "worker" 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.fireworks import Fireworks 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant(llm=Fireworks(), tools=[DuckDuckGo()], show_tool_calls=True) 6 | assistant.print_response("Whats happening in France?", markdown=True, stream=False) 7 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/o1.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.openai import OpenAIChat 3 | 4 | # This will only work if you have access to the o1 model from OpenAI 5 | agent = Agent(model=OpenAIChat(id="o1")) 6 | 7 | # Print the response in the terminal 8 | agent.print_response("What is the closest galaxy to milky way?") 9 | -------------------------------------------------------------------------------- /cookbook/providers/openai/o1/o1_preview_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.openai import OpenAIChat 4 | 5 | agent = Agent(model=OpenAIChat(id="o1-preview")) 6 | 7 | # Print the response in the terminal 8 | agent.print_response("What is the closest galaxy to milky way?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/reasoning/python_101_curriculum.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | task = "Craft a curriculum for Python 101" 5 | 6 | reasoning_agent = Agent(model=OpenAIChat(id="gpt-4o"), reasoning=True, markdown=True, structured_outputs=True) 7 | reasoning_agent.print_response(task, stream=True, show_full_reasoning=True) 8 | -------------------------------------------------------------------------------- /phi/aws/resource/reference.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from phi.aws.api_client import AwsApiClient 3 | 4 | 5 | class AwsReference: 6 | def __init__(self, reference): 7 | self.reference = reference 8 | 9 | def get_reference(self, aws_client: Optional[AwsApiClient] = None): 10 | return self.reference(aws_client=aws_client) 11 | -------------------------------------------------------------------------------- /phi/docker/app/airflow/scheduler.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.airflow.base import AirflowBase 4 | 5 | 6 | class AirflowScheduler(AirflowBase): 7 | # -*- App Name 8 | name: str = "airflow-scheduler" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "scheduler" 12 | -------------------------------------------------------------------------------- /phi/docker/app/superset/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.superset.base import SupersetBase, ContainerContext 2 | from phi.docker.app.superset.webserver import SupersetWebserver 3 | from phi.docker.app.superset.worker import SupersetWorker 4 | from phi.docker.app.superset.worker_beat import SupersetWorkerBeat 5 | from phi.docker.app.superset.init import SupersetInit 6 | -------------------------------------------------------------------------------- /phi/utils/dttm.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | 4 | def current_datetime() -> datetime: 5 | return datetime.now() 6 | 7 | 8 | def current_datetime_utc() -> datetime: 9 | return datetime.now(timezone.utc) 10 | 11 | 12 | def current_datetime_utc_str() -> str: 13 | return current_datetime_utc().strftime("%Y-%m-%dT%H:%M:%S") 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/claude/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.anthropic import Claude 3 | 4 | assistant = Assistant( 5 | llm=Claude(model="claude-3-haiku-20240307"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/fireworks/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.fireworks import Fireworks 3 | 4 | assistant = Assistant( 5 | llm=Fireworks(), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/google/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.google import Gemini 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant(llm=Gemini(model="gemini-1.5-flash"), tools=[DuckDuckGo()], debug_mode=True, show_tool_calls=True) 6 | assistant.print_response("Whats happening in France?", markdown=True) 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.ollama import Hermes 4 | 5 | assistant = Assistant(llm=Hermes(model="adrienbrault/nous-hermes2pro:Q8_0"), tools=[DuckDuckGo()], show_tool_calls=True) 6 | assistant.print_response("Whats happening in France?", markdown=True) 7 | -------------------------------------------------------------------------------- /cookbook/providers/xai/web_search.py: -------------------------------------------------------------------------------- 1 | """Build a Web Search Agent using xAI.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.xai import xAI 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=xAI(id="grok-beta"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/reasoning/plan_itenerary.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | task = "Plan an itinerary from Los Angeles to Las Vegas" 5 | 6 | reasoning_agent = Agent(model=OpenAIChat(id="gpt-4o"), reasoning=True, markdown=True, structured_outputs=True) 7 | reasoning_agent.print_response(task, stream=True, show_full_reasoning=True) 8 | -------------------------------------------------------------------------------- /cookbook/tools/duckduckgo_mod.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | 4 | # We will search DDG but limit the site to Politifact 5 | agent = Agent(tools=[DuckDuckGo(modifier="site:politifact.com")], show_tool_calls=True) 6 | agent.print_response("Is Taylor Swift promoting energy-saving devices with Elon Musk?", markdown=False) 7 | -------------------------------------------------------------------------------- /phi/docker/app/superset/worker_beat.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.superset.base import SupersetBase 4 | 5 | 6 | class SupersetWorkerBeat(SupersetBase): 7 | # -*- App Name 8 | name: str = "superset-worker-beat" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "beat" 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.ollama import Hermes 3 | 4 | assistant = Assistant( 5 | llm=Hermes(model="adrienbrault/nous-hermes2pro:Q8_0"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/reasoning/life_in_500000_years.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | task = "Write a short story about life in 500000 years" 5 | 6 | reasoning_agent = Agent(model=OpenAIChat(id="gpt-4o"), reasoning=True, markdown=True, structured_outputs=True) 7 | reasoning_agent.print_response(task, stream=True, show_full_reasoning=True) 8 | -------------------------------------------------------------------------------- /phi/docker/app/airflow/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.airflow.base import AirflowBase, AirflowLogsVolumeType, ContainerContext 2 | from phi.docker.app.airflow.webserver import AirflowWebserver 3 | from phi.docker.app.airflow.scheduler import AirflowScheduler 4 | from phi.docker.app.airflow.worker import AirflowWorker 5 | from phi.docker.app.airflow.flower import AirflowFlower 6 | -------------------------------------------------------------------------------- /phi/docker/app/superset/init.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.superset.base import SupersetBase 4 | 5 | 6 | class SupersetInit(SupersetBase): 7 | # -*- App Name 8 | name: str = "superset-init" 9 | 10 | # Entrypoint for the container 11 | entrypoint: Optional[Union[str, List]] = "/scripts/init-superset.sh" 12 | -------------------------------------------------------------------------------- /phi/llm/deepseek/deepseek.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from os import getenv 3 | 4 | from phi.llm.openai.like import OpenAILike 5 | 6 | 7 | class DeepSeekChat(OpenAILike): 8 | name: str = "DeepSeekChat" 9 | model: str = "deepseek-chat" 10 | api_key: Optional[str] = getenv("DEEPSEEK_API_KEY") 11 | base_url: str = "https://api.deepseek.com" 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.groq import Groq 3 | 4 | assistant = Assistant( 5 | llm=Groq(model="mixtral-8x7b-32768"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/spider_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.spider import SpiderTools 3 | 4 | assistant = Assistant( 5 | tools=[SpiderTools()], 6 | show_tool_calls=True, 7 | debug_mode=True, 8 | ) 9 | 10 | assistant.print_response('Can you scrape the first search result from a search on "news in USA"?', markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/reasoning/fibonacci.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | task = "Give me steps to write a python script for fibonacci series" 5 | 6 | reasoning_agent = Agent(model=OpenAIChat(id="gpt-4o"), reasoning=True, markdown=True, structured_outputs=True) 7 | reasoning_agent.print_response(task, stream=True, show_full_reasoning=True) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/bedrock/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.aws.claude import Claude 3 | 4 | assistant = Assistant( 5 | llm=Claude(model="anthropic.claude-3-sonnet-20240229-v1:0"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/cohere/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.cohere import CohereChat 3 | 4 | assistant = Assistant( 5 | llm=CohereChat(model="command-r"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/web_search.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.together import Together 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant(llm=Together(), tools=[DuckDuckGo()], show_tool_calls=True) 6 | assistant.print_response("Whats happening in France? Summarize top stories with sources.", markdown=True, stream=False) 7 | -------------------------------------------------------------------------------- /cookbook/providers/groq/image_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.groq import Groq 3 | 4 | agent = Agent(model=Groq(id="llama-3.2-90b-vision-preview")) 5 | 6 | agent.print_response( 7 | "Tell me about this image", 8 | images=[ 9 | "https://upload.wikimedia.org/wikipedia/commons/f/f2/LPU-v1-die.jpg", 10 | ], 11 | stream=True, 12 | ) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/cli.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | 4 | assistant = Assistant( 5 | tools=[DuckDuckGo()], 6 | show_tool_calls=True, 7 | read_chat_history=True, 8 | debug_mode=True, 9 | add_chat_history_to_messages=True, 10 | num_history_messages=3, 11 | ) 12 | assistant.cli_app(markdown=True) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/azure_openai/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.azure import AzureOpenAIChat 3 | 4 | assistant = Assistant( 5 | llm=AzureOpenAIChat(model="gpt-4o"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a 2 sentence quick and healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /phi/api/schemas/team.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class TeamSchema(BaseModel): 7 | """Schema for team data returned by the API.""" 8 | 9 | id_team: str 10 | name: str 11 | url: str 12 | 13 | 14 | class TeamIdentifier(BaseModel): 15 | id_team: Optional[str] = None 16 | team_url: Optional[str] = None 17 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/claude/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.anthropic import Claude 3 | 4 | assistant = Assistant( 5 | llm=Claude(model="claude-3-haiku-20240307"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/google/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.google import Gemini 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant(llm=Gemini(model="gemini-1.5-flash"), tools=[DuckDuckGo()], debug_mode=True, show_tool_calls=True) 6 | assistant.print_response("Whats happening in France?", markdown=True, stream=False) 7 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/google/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.google import Gemini 3 | 4 | assistant = Assistant( 5 | llm=Gemini(model="gemini-1.5-flash"), 6 | description="You help people with their health and fitness goals.", 7 | debug_mode=True, 8 | ) 9 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.ollama import Ollama 4 | 5 | 6 | assistant = Assistant( 7 | llm=Ollama(model="llama3"), 8 | tools=[DuckDuckGo()], 9 | show_tool_calls=True, 10 | ) 11 | 12 | assistant.print_response("Whats happening in the US?", markdown=True) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/newspaper4k_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.newspaper4k import Newspaper4k 3 | 4 | assistant = Assistant(tools=[Newspaper4k()], debug_mode=True, show_tool_calls=True) 5 | 6 | assistant.print_response( 7 | "https://www.rockymountaineer.com/blog/experience-icefields-parkway-scenic-drive-lifetime", 8 | markdown=True, 9 | ) 10 | -------------------------------------------------------------------------------- /cookbook/tools/sleep_tool.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.sleep import Sleep 3 | 4 | # Create an Agent with the Sleep tool 5 | agent = Agent(tools=[Sleep()], name="Sleep Agent") 6 | 7 | # Example 1: Sleep for 2 seconds 8 | agent.print_response("Sleep for 2 seconds") 9 | 10 | # Example 2: Sleep for a longer duration 11 | agent.print_response("Sleep for 5 seconds") 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/cohere/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.cohere import CohereChat 4 | 5 | assistant = Assistant( 6 | llm=CohereChat(model="command-r-plus"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Whats happening in France?", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/llama_cpp/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | 4 | assistant = Assistant( 5 | llm=OpenAILike(base_url="http://localhost:8000/v1"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a 2 sentence quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.together import Together 3 | 4 | assistant = Assistant( 5 | llm=Together(model="meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /phi/llm/openrouter/openrouter.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Optional 3 | 4 | from phi.llm.openai.like import OpenAILike 5 | 6 | 7 | class OpenRouter(OpenAILike): 8 | name: str = "OpenRouter" 9 | model: str = "mistralai/mistral-7b-instruct:free" 10 | api_key: Optional[str] = getenv("OPENROUTER_API_KEY") 11 | base_url: str = "https://openrouter.ai/api/v1" 12 | -------------------------------------------------------------------------------- /cookbook/assistants/instructions.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | 3 | assistant = Assistant( 4 | description="You are a famous short story writer asked to write for a magazine", 5 | instructions=["You are a pilot on a plane flying from Hawaii to Japan."], 6 | markdown=True, 7 | debug_mode=True, 8 | ) 9 | assistant.print_response("Tell me a 2 sentence horror story.") 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/claude/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.anthropic import Claude 4 | 5 | assistant = Assistant( 6 | llm=Claude(model="claude-3-5-sonnet-20240620"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Whats happening in France", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/providers/hermes/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.ollama import Hermes 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=Hermes(id="hermes3"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/tools/models_lab_tool.py: -------------------------------------------------------------------------------- 1 | """Run `pip install requests` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.tools.models_labs import ModelsLabs 5 | 6 | # Create an Agent with the ModelsLabs tool 7 | agent = Agent(tools=[ModelsLabs()], name="ModelsLabs Agent") 8 | 9 | agent.print_response("Generate a video of a beautiful sunset over the ocean", markdown=True) 10 | -------------------------------------------------------------------------------- /phi/llm/references.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from pydantic import BaseModel 3 | 4 | 5 | class References(BaseModel): 6 | """Model for LLM references""" 7 | 8 | # The question asked by the user. 9 | query: str 10 | # The references from the vector database. 11 | references: Optional[str] = None 12 | # Performance in seconds. 13 | time: Optional[float] = None 14 | -------------------------------------------------------------------------------- /phi/utils/audio.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | 4 | def write_audio_to_file(audio, filename: str): 5 | """ 6 | Write base64 encoded audio file to disk. 7 | 8 | :param audio: Base64 encoded audio file 9 | :param filename: The filename to save the audio to 10 | """ 11 | wav_bytes = base64.b64decode(audio) 12 | with open(filename, "wb") as f: 13 | f.write(wav_bytes) 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/image.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from phi.assistant import Assistant 3 | from phi.llm.ollama import Ollama 4 | 5 | assistant = Assistant(llm=Ollama(model="llava")) 6 | 7 | image_path = Path(__file__).parent / "test_image.jpeg" 8 | assistant.print_response( 9 | "Whats in the image?", 10 | images=[image_path.read_bytes()], 11 | markdown=True, 12 | ) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openrouter/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openrouter import OpenRouter 3 | 4 | assistant = Assistant( 5 | llm=OpenRouter(model="mistralai/mistral-7b-instruct:free"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a 2 sentence quick and healthy breakfast recipe.", markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/providers/google/audio_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.google import Gemini 3 | 4 | agent = Agent( 5 | model=Gemini(id="gemini-2.0-flash-exp"), 6 | markdown=True, 7 | ) 8 | 9 | # Please download a sample audio file to test this Agent 10 | agent.print_response( 11 | "Tell me about this audio", 12 | audio="sample_audio.mp3", 13 | stream=True, 14 | ) 15 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.ollama import Ollama 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=Ollama(id="llama3.1:8b"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/tools/composio_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from composio_phidata import Action, ComposioToolSet # type: ignore 3 | 4 | toolset = ComposioToolSet() 5 | composio_tools = toolset.get_tools(actions=[Action.GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER]) 6 | 7 | agent = Agent(tools=composio_tools, show_tool_calls=True) 8 | agent.print_response("Can you star phidatahq/phidata repo?") 9 | -------------------------------------------------------------------------------- /phi/embedder/fireworks.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Optional 3 | 4 | from phi.embedder.openai import OpenAIEmbedder 5 | 6 | 7 | class FireworksEmbedder(OpenAIEmbedder): 8 | model: str = "nomic-ai/nomic-embed-text-v1.5" 9 | dimensions: int = 768 10 | api_key: Optional[str] = getenv("FIREWORKS_API_KEY") 11 | base_url: str = "https://api.fireworks.ai/inference/v1" 12 | -------------------------------------------------------------------------------- /phi/embedder/together.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Optional 3 | 4 | from phi.embedder.openai import OpenAIEmbedder 5 | 6 | 7 | class TogetherEmbedder(OpenAIEmbedder): 8 | model: str = "togethercomputer/m2-bert-80M-32k-retrieval" 9 | dimensions: int = 768 10 | api_key: Optional[str] = getenv("TOGETHER_API_KEY") 11 | base_url: str = "https://api.together.xyz/v1" 12 | -------------------------------------------------------------------------------- /phi/file/local/txt.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from phi.file import File 4 | 5 | 6 | class TextFile(File): 7 | path: str 8 | type: str = "TEXT" 9 | 10 | def get_metadata(self) -> dict[str, Any]: 11 | if self.name is None: 12 | from pathlib import Path 13 | 14 | self.name = Path(self.path).name 15 | return self.model_dump(exclude_none=True) 16 | -------------------------------------------------------------------------------- /phi/vectordb/cassandra/extra_param_mixin.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from cassio.table.mixins.base_table import BaseTableMixin 4 | from cassio.table.table_types import ColumnSpecType 5 | 6 | 7 | class ExtraParamMixin(BaseTableMixin): 8 | def _schema_da(self) -> List[ColumnSpecType]: 9 | return super()._schema_da() + [ 10 | ("document_name", "TEXT"), 11 | ] 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/azure_openai/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.azure import AzureOpenAIChat 3 | 4 | assistant = Assistant( 5 | llm=AzureOpenAIChat(model="gpt-4o"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a 2 sentence quick and healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/bedrock/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.aws.claude import Claude 3 | 4 | assistant = Assistant( 5 | llm=Claude(model="anthropic.claude-3-5-sonnet-20240620-v1:0"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/llama_cpp/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | 4 | assistant = Assistant( 5 | llm=OpenAILike(base_url="http://localhost:8000/v1"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", stream=False, markdown=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/hermes.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.ollama import Ollama 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | hermes = Assistant( 6 | llm=Ollama(model="openhermes"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | hermes.print_response("Whats happening in France? Summarize top stories with sources.", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openhermes/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.ollama import Ollama 4 | 5 | assistant = Assistant( 6 | llm=Ollama(model="openhermes"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | # debug_mode=True 10 | ) 11 | assistant.print_response("Tell me about OpenAI Sora", markdown=True) 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openrouter/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openrouter import OpenRouter 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant( 6 | llm=OpenRouter(model="openai/gpt-3.5-turbo"), tools=[DuckDuckGo()], show_tool_calls=True, debug_mode=True 7 | ) 8 | assistant.print_response("Whats happening in France?", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/together/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.together import Together 3 | 4 | assistant = Assistant( 5 | llm=Together(model="mistralai/Mixtral-8x7B-Instruct-v0.1"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/providers/openai/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=OpenAIChat(id="gpt-4o"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/tools/hackernews.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.hackernews import HackerNews 3 | 4 | agent = Agent( 5 | name="Hackernews Team", 6 | tools=[HackerNews()], 7 | show_tool_calls=True, 8 | markdown=True, 9 | ) 10 | agent.print_response( 11 | "Write an engaging summary of the users with the top 2 stories on hackernews. Please mention the stories as well.", 12 | ) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/cohere/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.cohere import CohereChat 4 | 5 | assistant = Assistant( 6 | llm=CohereChat(model="command-r"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Whats happening in France?", markdown=True, stream=False) 11 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/google/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.google import Gemini 3 | 4 | assistant = Assistant( 5 | llm=Gemini(model="gemini-1.5-flash"), 6 | description="You help people with their health and fitness goals.", 7 | debug_mode=True, 8 | ) 9 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 10 | -------------------------------------------------------------------------------- /cookbook/integrations/lancedb/README.md: -------------------------------------------------------------------------------- 1 | # Lancedb Agent 2 | 3 | ### 1. Create a virtual environment 4 | ```shell 5 | python3 -m venv ~/.venvs/aienv 6 | source ~/.venvs/aienv/bin/activate 7 | ``` 8 | 9 | ### 2. Install libraries 10 | ```shell 11 | pip install -U lancedb pypdf pandas openai phidata 12 | ``` 13 | 14 | ### 3. Run Agent 15 | ```shell 16 | python cookbook/integrations/lancedb/agent.py 17 | ``` 18 | -------------------------------------------------------------------------------- /cookbook/providers/google/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.google import Gemini 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/providers/openrouter/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.openrouter import OpenRouter 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=OpenRouter(id="gpt-4o"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/openai_api.py: -------------------------------------------------------------------------------- 1 | # Please install dependencies using: pip install -U ollama phidata openai 2 | from phi.assistant import Assistant 3 | from phi.llm.ollama.openai import OllamaOpenAI 4 | 5 | assistant = Assistant( 6 | llm=OllamaOpenAI(model="tinyllama"), 7 | system_prompt="Who are you and who created you? Respond in 1 sentence.", 8 | ) 9 | assistant.print_response(markdown=True) 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant( 6 | llm=OpenAIChat(model="gpt-4o", max_tokens=500, temperature=0.3), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Whats happening in France?", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/exa_tools.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from phi.assistant import Assistant 4 | from phi.tools.exa import ExaTools 5 | 6 | os.environ["EXA_API_KEY"] = "your api key" 7 | 8 | assistant = Assistant( 9 | tools=[ExaTools(include_domains=["cnbc.com", "reuters.com", "bloomberg.com"])], show_tool_calls=True 10 | ) 11 | assistant.print_response("Search for AAPL news", debug_mode=True, markdown=True) 12 | -------------------------------------------------------------------------------- /cookbook/assistants/web_search.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | assistant = Assistant( 6 | llm=OpenAIChat(model="gpt-4o"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | markdown=True, 10 | ) 11 | assistant.print_response("Search for news from France and write a short poem about it.") 12 | -------------------------------------------------------------------------------- /cookbook/providers/ollama_tools/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.ollama import OllamaTools 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=OllamaTools(id="llama3.1:8b"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/providers/vertexai/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.vertexai import Gemini 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/tools/duckdb_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.duckdb import DuckDbTools 3 | 4 | agent = Agent( 5 | tools=[DuckDbTools()], 6 | show_tool_calls=True, 7 | system_prompt="Use this file for Movies data: https://phidata-public.s3.amazonaws.com/demo_data/IMDB-Movie-Data.csv", 8 | ) 9 | agent.print_response("What is the average rating of movies?", markdown=True, stream=False) 10 | -------------------------------------------------------------------------------- /phi/memory/memory.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Any, Dict 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class Memory(BaseModel): 7 | """Model for Agent Memories""" 8 | 9 | memory: str 10 | id: Optional[str] = None 11 | topic: Optional[str] = None 12 | input: Optional[str] = None 13 | 14 | def to_dict(self) -> Dict[str, Any]: 15 | return self.model_dump(exclude_none=True) 16 | -------------------------------------------------------------------------------- /phi/utils/download_stream_file.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | 4 | def download_video(url: str, output_path: str) -> str: 5 | """Download video from URL""" 6 | response = requests.get(url, stream=True) 7 | response.raise_for_status() 8 | 9 | with open(output_path, "wb") as f: 10 | for chunk in response.iter_content(chunk_size=8192): 11 | f.write(chunk) 12 | return output_path 13 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/claude/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.anthropic import Claude 4 | 5 | assistant = Assistant( 6 | llm=Claude(model="claude-3-5-sonnet-20240620"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Whats happening in France?", markdown=True, stream=False) 11 | -------------------------------------------------------------------------------- /cookbook/providers/claude/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.anthropic import Claude 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent(model=Claude(id="claude-3-5-sonnet-20240620"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 8 | agent.print_response("Whats happening in France?", stream=True) 9 | -------------------------------------------------------------------------------- /cookbook/providers/xai/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.xai import xAI 3 | 4 | agent = Agent(model=xAI(id="grok-beta"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/tools/youtube_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.youtube_tools import YouTubeTools 3 | 4 | agent = Agent( 5 | tools=[YouTubeTools()], 6 | show_tool_calls=True, 7 | description="You are a YouTube agent. Obtain the captions of a YouTube video and answer questions.", 8 | ) 9 | agent.print_response("Summarize this video https://www.youtube.com/watch?v=Iv9dewmcFbs&t", markdown=True) 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openrouter/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openrouter import OpenRouter 3 | 4 | assistant = Assistant( 5 | llm=OpenRouter(model="mistralai/mistral-7b-instruct:free"), 6 | description="You help people with their health and fitness goals.", 7 | ) 8 | assistant.print_response("Share a 2 sentence quick and healthy breakfast recipe.", markdown=True, stream=False) 9 | -------------------------------------------------------------------------------- /cookbook/integrations/chromadb/README.md: -------------------------------------------------------------------------------- 1 | # Chromadb Agent 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U chromadb pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run Agent 17 | 18 | ```shell 19 | python cookbook/integrations/chromadb/agent.py 20 | ``` 21 | -------------------------------------------------------------------------------- /phi/file/file.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Any 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class File(BaseModel): 7 | name: Optional[str] = None 8 | description: Optional[str] = None 9 | columns: Optional[List[str]] = None 10 | path: Optional[str] = None 11 | type: str = "FILE" 12 | 13 | def get_metadata(self) -> dict[str, Any]: 14 | return self.model_dump(exclude_none=True) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/additional_messages.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | Assistant( 5 | llm=OpenAIChat(model="gpt-3.5-turbo", stop=""), 6 | system_prompt="What is the color of a banana? Provide your answer in the xml tag .", 7 | additional_messages=[{"role": "assistant", "content": ""}], 8 | debug_mode=True, 9 | ).print_response() 10 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/lancedb/README.md: -------------------------------------------------------------------------------- 1 | # Lancedb Assistant 2 | 3 | ### 1. Create a virtual environment 4 | ```shell 5 | python3 -m venv ~/.venvs/aienv 6 | source ~/.venvs/aienv/bin/activate 7 | ``` 8 | 9 | ### 2. Install libraries 10 | ```shell 11 | pip install -U lancedb pypdf pandas openai phidata 12 | ``` 13 | 14 | ### 3. Run Assistant 15 | ```shell 16 | python cookbook/integrations/lancedb/assistant.py 17 | ``` 18 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/openai/custom_messages.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | assistant = Assistant(llm=OpenAIChat(model="gpt-4-turbo"), debug_mode=True, format_messages=False) 5 | assistant.print_response( 6 | [ 7 | {"role": "system", "content": "Reply with haikus."}, 8 | {"role": "user", "content": "What is the capital of France?"}, 9 | ], 10 | ) 11 | -------------------------------------------------------------------------------- /cookbook/providers/hermes/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.ollama import Hermes 3 | 4 | agent = Agent(model=Hermes(id="hermes3"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /phi/api/schemas/ai.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List, Dict, Any 3 | 4 | from pydantic import BaseModel 5 | 6 | 7 | class ConversationType(str, Enum): 8 | RAG = "RAG" 9 | AUTO = "AUTO" 10 | 11 | 12 | class ConversationClient(str, Enum): 13 | CLI = "CLI" 14 | WEB = "WEB" 15 | 16 | 17 | class ConversationCreateResponse(BaseModel): 18 | id: str 19 | chat_history: List[Dict[str, Any]] 20 | -------------------------------------------------------------------------------- /cookbook/assistants/basic.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | assistant = Assistant( 5 | llm=OpenAIChat(model="gpt-4o"), 6 | description="You help people with their health and fitness goals.", 7 | instructions=["Recipes should be under 5 ingredients"], 8 | ) 9 | # -*- Print a response to the cli 10 | assistant.print_response("Share a breakfast recipe.", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/async/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | import asyncio 4 | from phi.agent import Agent 5 | from phi.model.openai import OpenAIChat 6 | from phi.tools.duckduckgo import DuckDuckGo 7 | 8 | agent = Agent(model=OpenAIChat(id="gpt-4o"), tools=[DuckDuckGo()], show_tool_calls=True, markdown=True) 9 | asyncio.run(agent.aprint_response("Whats happening in France?", stream=True)) 10 | -------------------------------------------------------------------------------- /cookbook/integrations/pinecone/README.md: -------------------------------------------------------------------------------- 1 | ## Pgvector Agent 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U pinecone pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run Pinecone Agent 17 | 18 | ```shell 19 | python cookbook/integrations/pinecone/agent.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.ollama import Ollama 3 | 4 | agent = Agent(model=Ollama(id="llama3.1:8b"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/run_clickhouse.sh: -------------------------------------------------------------------------------- 1 | docker run -d \ 2 | -e CLICKHOUSE_DB=ai \ 3 | -e CLICKHOUSE_USER=ai \ 4 | -e CLICKHOUSE_PASSWORD=ai \ 5 | -e CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 \ 6 | -v clickhouse_data:/var/lib/clickhouse/ \ 7 | -v clickhouse_log:/var/log/clickhouse-server/ \ 8 | -p 8123:8123 \ 9 | -p 9000:9000 \ 10 | --ulimit nofile=262144:262144 \ 11 | --name clickhouse-server \ 12 | clickhouse/clickhouse-server 13 | -------------------------------------------------------------------------------- /phi/reranker/base.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from pydantic import BaseModel, ConfigDict 4 | from phi.document import Document 5 | 6 | 7 | class Reranker(BaseModel): 8 | """Base class for rerankers""" 9 | 10 | model_config = ConfigDict(arbitrary_types_allowed=True, populate_by_name=True) 11 | 12 | def rerank(self, query: str, documents: List[Document]) -> List[Document]: 13 | raise NotImplementedError 14 | -------------------------------------------------------------------------------- /cookbook/agents/25_system_prompt_via_function.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | 3 | 4 | def get_system_prompt(agent: Agent) -> str: 5 | return f"You are {agent.name}! Remember to always include your name in your responses." 6 | 7 | 8 | agent = Agent( 9 | name="AgentX", 10 | system_prompt=get_system_prompt, 11 | markdown=True, 12 | show_tool_calls=True, 13 | ) 14 | agent.print_response("Who are you?", stream=True) 15 | -------------------------------------------------------------------------------- /cookbook/agents/41_image_to_text.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | 6 | agent = Agent( 7 | model=OpenAIChat(id="gpt-4o"), 8 | markdown=True, 9 | ) 10 | 11 | image_path = Path(__file__).parent.joinpath("multimodal-agents.jpg") 12 | agent.print_response( 13 | "Write a 3 sentence fiction story about the image", 14 | images=[str(image_path)], 15 | ) 16 | -------------------------------------------------------------------------------- /cookbook/providers/groq/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.groq import Groq 3 | 4 | agent = Agent(model=Groq(id="llama-3.3-70b-versatile"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response on the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/image_agent.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from phi.agent import Agent 4 | from phi.model.ollama import Ollama 5 | 6 | agent = Agent( 7 | model=Ollama(id="llama3.2-vision"), 8 | markdown=True, 9 | ) 10 | 11 | image_path = Path(__file__).parent.joinpath("super-agents.png") 12 | agent.print_response( 13 | "Write a 3 sentence fiction story about the image", 14 | images=[str(image_path)], 15 | ) 16 | -------------------------------------------------------------------------------- /cookbook/providers/openai/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.openai import OpenAIChat 3 | 4 | agent = Agent(model=OpenAIChat(id="gpt-4o"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/reasoning/mathematical_proof.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | task = "Prove that for any positive integer n, the sum of the first n odd numbers is equal to n squared. Provide a detailed proof." 5 | 6 | reasoning_agent = Agent(model=OpenAIChat(id="gpt-4o"), reasoning=True, markdown=True, structured_outputs=True) 7 | reasoning_agent.print_response(task, stream=True, show_full_reasoning=True) 8 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/chromadb/README.md: -------------------------------------------------------------------------------- 1 | # Chromadb Assistant 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U chromadb pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run Assistant 17 | 18 | ```shell 19 | python cookbook/integrations/chromadb/assistant.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/list_models.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from mistralai import Mistral 4 | 5 | 6 | def main(): 7 | api_key = os.environ["MISTRAL_API_KEY"] 8 | client = Mistral(api_key=api_key) 9 | list_models_response = client.models.list() 10 | if list_models_response is not None: 11 | for model in list_models_response: 12 | print(model) 13 | 14 | 15 | if __name__ == "__main__": 16 | main() 17 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/duckdb_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckdb import DuckDbTools 3 | 4 | assistant = Assistant( 5 | tools=[DuckDbTools()], 6 | show_tool_calls=True, 7 | system_prompt="Use this file for Movies data: https://phidata-public.s3.amazonaws.com/demo_data/IMDB-Movie-Data.csv", 8 | ) 9 | assistant.print_response("What is the average rating of movies?", markdown=True, stream=False) 10 | -------------------------------------------------------------------------------- /cookbook/async/basic.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.agent import Agent 3 | from phi.model.openai import OpenAIChat 4 | 5 | agent = Agent( 6 | model=OpenAIChat(id="gpt-4o"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(agent.aprint_response("Share a breakfast recipe.", markdown=True)) 12 | -------------------------------------------------------------------------------- /cookbook/providers/google/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.google import Gemini 3 | 4 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/openrouter/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.openrouter import OpenRouter 3 | 4 | agent = Agent(model=OpenRouter(id="gpt-4o"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /phi/model/google/__init__.py: -------------------------------------------------------------------------------- 1 | from phi.model.google.gemini import Gemini 2 | 3 | try: 4 | from phi.model.google.gemini_openai import GeminiOpenAIChat 5 | except ImportError: 6 | 7 | class GeminiOpenAIChat: # type: ignore 8 | def __init__(self, *args, **kwargs): 9 | raise ImportError( 10 | "GeminiOpenAIChat requires the 'openai' library. Please install it via `pip install openai`" 11 | ) 12 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/structured_output/README.md: -------------------------------------------------------------------------------- 1 | ## Structured Output 2 | 3 | 1. Install libraries 4 | 5 | ```shell 6 | pip install -U openai phidata 7 | ``` 8 | 9 | 2. Create single pydantic model 10 | 11 | ```shell 12 | python cookbook/examples/structured_output/movie_generator.py 13 | ``` 14 | 15 | 3. Create list of pydantic models 16 | 17 | ```shell 18 | python cookbook/examples/structured_output/movie_list_generator.py 19 | ``` 20 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/llama_cpp/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | 6 | assistant = Assistant( 7 | llm=OpenAILike(base_url="http://localhost:8000/v1"), tools=[DuckDuckGo()], show_tool_calls=True, debug_mode=True 8 | ) 9 | assistant.print_response("Whats happening in France? Summarize top stories with sources.", markdown=True) 10 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/lmstudio/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai.like import OpenAILike 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | 6 | assistant = Assistant( 7 | llm=OpenAILike(base_url="http://localhost:1234/v1"), 8 | tools=[DuckDuckGo()], 9 | show_tool_calls=True, 10 | ) 11 | assistant.print_response("Whats happening in France? Summarize top stories with sources.", markdown=True) 12 | -------------------------------------------------------------------------------- /cookbook/providers/cohere/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.cohere import CohereChat 3 | 4 | agent = Agent(model=CohereChat(id="command-r-08-2024"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/nvidia/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.nvidia import Nvidia 3 | 4 | agent = Agent(model=Nvidia(id="meta/llama-3.3-70b-instruct"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/ollama_tools/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.ollama import OllamaTools 3 | 4 | agent = Agent(model=OllamaTools(id="llama3.1:8b"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/vertexai/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.vertexai import Gemini 3 | 4 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /phi/tools/tool.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | from pydantic import BaseModel 3 | 4 | 5 | class Tool(BaseModel): 6 | """Model for Tools that can be used by an agent.""" 7 | 8 | # The type of tool 9 | type: str 10 | # The function to be called if type = "function" 11 | function: Optional[Dict[str, Any]] = None 12 | 13 | def to_dict(self) -> Dict[str, Any]: 14 | return self.model_dump(exclude_none=True) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/examples/sql/load_knowledge.py: -------------------------------------------------------------------------------- 1 | from phi.utils.log import logger 2 | from assistant import assistant_knowledge 3 | 4 | 5 | def load_sql_assistant_knowledge_base(recreate: bool = True): 6 | logger.info("Loading SQL Assistant knowledge.") 7 | assistant_knowledge.load(recreate=recreate) 8 | logger.info("SQL Assistant knowledge loaded.") 9 | 10 | 11 | if __name__ == "__main__": 12 | load_sql_assistant_knowledge_base() 13 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/pinecone/README.md: -------------------------------------------------------------------------------- 1 | ## Pgvector Assistant 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U pinecone pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run Pinecone Assistant 17 | 18 | ```shell 19 | python cookbook/integrations/pinecone/assistant.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/providers/azure_openai/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.azure import AzureOpenAIChat 3 | 4 | agent = Agent(model=AzureOpenAIChat(id="gpt-4o"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response on the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/claude/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.anthropic import Claude 3 | 4 | agent = Agent(model=Claude(id="claude-3-5-sonnet-20241022"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/deepseek/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.deepseek import DeepSeekChat 3 | 4 | agent = Agent(model=DeepSeekChat(id="deepseek-chat"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/workflows/content_creator_workflow/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from enum import Enum 3 | from dotenv import load_dotenv 4 | 5 | load_dotenv() 6 | 7 | 8 | TYPEFULLY_API_URL = "https://api.typefully.com/v1/drafts/" 9 | TYPEFULLY_API_KEY = os.getenv("TYPEFULLY_API_KEY") 10 | HEADERS = {"X-API-KEY": f"Bearer {TYPEFULLY_API_KEY}"} 11 | 12 | 13 | # Define the enums 14 | class PostType(Enum): 15 | TWITTER = "Twitter" 16 | LINKEDIN = "LinkedIn" 17 | -------------------------------------------------------------------------------- /phi/docker/app/whoami/whoami.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.base import DockerApp, ContainerContext # noqa: F401 2 | 3 | 4 | class Whoami(DockerApp): 5 | # -*- App Name 6 | name: str = "whoami" 7 | 8 | # -*- Image Configuration 9 | image_name: str = "traefik/whoami" 10 | image_tag: str = "v1.10" 11 | 12 | # -*- App Ports 13 | # Open a container port if open_port=True 14 | open_port: bool = True 15 | port_number: int = 80 16 | -------------------------------------------------------------------------------- /cookbook/assistants/integrations/qdrant/README.md: -------------------------------------------------------------------------------- 1 | ## Pgvector Assistant 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U pinecone-client pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run Pinecone Assistant 17 | 18 | ```shell 19 | python cookbook/integrations/pinecone/assistant.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/assistants/user_messages.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | 4 | Assistant( 5 | llm=OpenAIChat(model="gpt-3.5-turbo", stop=""), 6 | debug_mode=True, 7 | ).print_response( 8 | messages=[ 9 | {"role": "user", "content": "What is the color of a banana? Provide your answer in the xml tag ."}, 10 | {"role": "assistant", "content": ""}, 11 | ], 12 | ) 13 | -------------------------------------------------------------------------------- /cookbook/providers/groq/async/basic.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.agent import Agent 3 | from phi.model.groq import Groq 4 | 5 | agent = Agent( 6 | model=Groq(id="llama-3.3-70b-versatile"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(agent.aprint_response("Share a breakfast recipe.", markdown=True)) 12 | -------------------------------------------------------------------------------- /cookbook/tools/phi_tool.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.phi import PhiTools 3 | 4 | # Create an Agent with the Phi tool 5 | agent = Agent(tools=[PhiTools()], name="Phi Workspace Manager") 6 | 7 | # Example 1: Create a new agent app 8 | agent.print_response("Create a new agent-app called agent-app-turing", markdown=True) 9 | 10 | # Example 3: Start a workspace 11 | agent.print_response("Start the workspace agent-app-turing", markdown=True) 12 | -------------------------------------------------------------------------------- /phi/docker/app/ollama/ollama.py: -------------------------------------------------------------------------------- 1 | from phi.docker.app.base import DockerApp, ContainerContext # noqa: F401 2 | 3 | 4 | class Ollama(DockerApp): 5 | # -*- App Name 6 | name: str = "ollama" 7 | 8 | # -*- Image Configuration 9 | image_name: str = "ollama/ollama" 10 | image_tag: str = "latest" 11 | 12 | # -*- App Ports 13 | # Open a container port if open_port=True 14 | open_port: bool = True 15 | port_number: int = 11434 16 | -------------------------------------------------------------------------------- /cookbook/providers/deepseek/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.deepseek import DeepSeekChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=DeepSeekChat(id="deepseek-chat"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | 14 | agent.print_response("Whats happening in France?") 15 | -------------------------------------------------------------------------------- /cookbook/providers/groq/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.groq import Groq 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=Groq(id="llama-3.3-70b-versatile"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | 14 | agent.print_response("Whats happening in France?", stream=True) 15 | -------------------------------------------------------------------------------- /cookbook/providers/sambanova/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.sambanova import Sambanova 3 | 4 | agent = Agent(model=Sambanova(id="Meta-Llama-3.1-8B-Instruct"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /phi/utils/format_str.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | 4 | def remove_indent(s: Optional[str]) -> Optional[str]: 5 | """ 6 | Remove the indent from a string. 7 | 8 | Args: 9 | s (str): String to remove indent from 10 | 11 | Returns: 12 | str: String with indent removed 13 | """ 14 | if s is not None and isinstance(s, str): 15 | return "\n".join([line.strip() for line in s.split("\n")]) 16 | return None 17 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/assistant.py: -------------------------------------------------------------------------------- 1 | from rich.pretty import pprint 2 | from phi.assistant import Assistant 3 | from phi.llm.ollama import Ollama 4 | 5 | assistant = Assistant( 6 | llm=Ollama(model="llama3"), 7 | description="You help people with their health and fitness goals.", 8 | ) 9 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 10 | print("\n-*- Metrics:") 11 | pprint(assistant.llm.metrics) # type: ignore 12 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/firecrawl_tools.py: -------------------------------------------------------------------------------- 1 | # pip install firecrawl-py openai 2 | 3 | import os 4 | 5 | from phi.assistant import Assistant 6 | from phi.tools.firecrawl import FirecrawlTools 7 | 8 | api_key = os.getenv("FIRECRAWL_API_KEY") 9 | 10 | assistant = Assistant( 11 | tools=[FirecrawlTools(api_key=api_key, scrape=False, crawl=True)], show_tool_calls=True, markdown=True 12 | ) 13 | assistant.print_response("summarize this https://finance.yahoo.com/") 14 | -------------------------------------------------------------------------------- /cookbook/providers/bedrock/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.aws.claude import Claude 3 | 4 | agent = Agent(model=Claude(id="anthropic.claude-3-5-sonnet-20240620-v1:0"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/google_openai/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.google import GeminiOpenAIChat 3 | 4 | agent = Agent(model=GeminiOpenAIChat(id="gemini-1.5-flash"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/hackernews.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.hackernews import HackerNews 3 | 4 | 5 | hn_assistant = Assistant( 6 | name="Hackernews Team", 7 | tools=[HackerNews()], 8 | show_tool_calls=True, 9 | markdown=True, 10 | # debug_mode=True, 11 | ) 12 | hn_assistant.print_response( 13 | "Write an engaging summary of the users with the top 2 stories on hackernews. Please mention the stories as well.", 14 | ) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/sql_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.sql import SQLTools 3 | 4 | db_url = "postgresql+psycopg://ai:ai@localhost:5532/ai" 5 | 6 | assistant = Assistant( 7 | tools=[ 8 | SQLTools( 9 | db_url=db_url, 10 | ) 11 | ], 12 | show_tool_calls=True, 13 | ) 14 | 15 | assistant.print_response("List the tables in the database. Tell me about contents of one of the tables", markdown=True) 16 | -------------------------------------------------------------------------------- /cookbook/async/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.agent import Agent 3 | from phi.model.openai import OpenAIChat 4 | 5 | assistant = Agent( 6 | model=OpenAIChat(id="gpt-4o"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(assistant.aprint_response("Share a breakfast recipe.", markdown=True, stream=False)) 12 | -------------------------------------------------------------------------------- /cookbook/providers/cohere/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.cohere import CohereChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=CohereChat(id="command-r-08-2024"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | 14 | agent.print_response("Whats happening in France?", stream=True) 15 | -------------------------------------------------------------------------------- /cookbook/providers/together/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.together import Together 3 | 4 | agent = Agent(model=Together(id="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /phi/api/schemas/monitor.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class MonitorEventSchema(BaseModel): 7 | event_type: str 8 | event_status: str 9 | object_name: str 10 | event_data: Optional[Dict[str, Any]] = None 11 | object_data: Optional[Dict[str, Any]] = None 12 | 13 | 14 | class MonitorResponseSchema(BaseModel): 15 | id_monitor: Optional[int] = None 16 | id_event: Optional[int] = None 17 | -------------------------------------------------------------------------------- /cookbook/agents/26_instructions_via_function.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from phi.agent import Agent 4 | 5 | 6 | def get_instructions(agent: Agent) -> List[str]: 7 | return [f"Your name is {agent.name}!", "Talk in haiku's!", "Use poetry to answer questions."] 8 | 9 | 10 | agent = Agent( 11 | name="AgentX", 12 | instructions=get_instructions, 13 | markdown=True, 14 | show_tool_calls=True, 15 | ) 16 | agent.print_response("Who are you?", stream=True) 17 | -------------------------------------------------------------------------------- /cookbook/assistants/tools/youtube_tools.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.youtube_tools import YouTubeTools 3 | 4 | assistant = Assistant( 5 | tools=[YouTubeTools()], 6 | show_tool_calls=True, 7 | description="You are a YouTube assistant. Obtain the captions of a YouTube video and answer questions.", 8 | debug_mode=True, 9 | ) 10 | assistant.print_response("Summarize this video https://www.youtube.com/watch?v=Iv9dewmcFbs&t", markdown=True) 11 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/lancedb/README.md: -------------------------------------------------------------------------------- 1 | ## LanceDB Hybrid Search 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U lancedb tantivy pypdf openai phidata 14 | ``` 15 | 16 | ### 3. Run LanceDB Hybrid Search Agent 17 | 18 | ```shell 19 | python cookbook/examples/hybrid_search/lancedb/agent.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/providers/azure_openai/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.azure import AzureOpenAIChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=AzureOpenAIChat(id="gpt-4o"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | 14 | agent.print_response("Whats happening in France?", stream=True) 15 | -------------------------------------------------------------------------------- /phi/utils/response_iterator.py: -------------------------------------------------------------------------------- 1 | class ResponseIterator: 2 | def __init__(self): 3 | self.items = [] 4 | self.index = 0 5 | 6 | def add(self, item): 7 | self.items.append(item) 8 | 9 | def __iter__(self): 10 | return self 11 | 12 | def __next__(self): 13 | if self.index >= len(self.items): 14 | raise StopIteration 15 | item = self.items[self.index] 16 | self.index += 1 17 | return item 18 | -------------------------------------------------------------------------------- /cookbook/assistants/async/basic.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.assistant import Assistant 3 | from phi.llm.openai import OpenAIChat 4 | 5 | assistant = Assistant( 6 | llm=OpenAIChat(model="gpt-3.5-turbo"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(assistant.async_print_response("Share a breakfast recipe.", markdown=True)) 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/finance.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.yfinance import YFinanceTools 3 | from phi.llm.ollama import OllamaTools 4 | 5 | print("============= llama3 finance assistant =============") 6 | assistant = Assistant( 7 | llm=OllamaTools(model="llama3"), 8 | tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], 9 | show_tool_calls=True, 10 | ) 11 | assistant.cli_app(markdown=True) 12 | -------------------------------------------------------------------------------- /cookbook/providers/fireworks/basic.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.fireworks import Fireworks 3 | 4 | agent = Agent(model=Fireworks(id="accounts/fireworks/models/llama-v3p1-405b-instruct"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run: RunResponse = agent.run("Share a 2 sentence horror story") 8 | # print(run.content) 9 | 10 | # Print the response in the terminal 11 | agent.print_response("Share a 2 sentence horror story") 12 | -------------------------------------------------------------------------------- /cookbook/providers/bedrock/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.aws.claude import Claude 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=Claude(id="anthropic.claude-3-5-sonnet-20240620-v1:0"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | agent.print_response("Whats happening in France?", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/providers/groq/async/basic_stream.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.agent import Agent 3 | from phi.model.groq import Groq 4 | 5 | assistant = Agent( 6 | model=Groq(id="llama-3.3-70b-versatile"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(assistant.aprint_response("Share a breakfast recipe.", markdown=True, stream=True)) 12 | -------------------------------------------------------------------------------- /phi/docker/app/airflow/flower.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.airflow.base import AirflowBase 4 | 5 | 6 | class AirflowFlower(AirflowBase): 7 | # -*- App Name 8 | name: str = "airflow-flower" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "flower" 12 | 13 | # -*- App Ports 14 | # Open a container port if open_port=True 15 | open_port: bool = True 16 | port_number: int = 5555 17 | -------------------------------------------------------------------------------- /cookbook/agents_101/01_web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install openai duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | web_agent = Agent( 8 | name="Web Agent", 9 | model=OpenAIChat(id="gpt-4o"), 10 | tools=[DuckDuckGo()], 11 | show_tool_calls=True, 12 | markdown=True, 13 | ) 14 | web_agent.print_response("Whats happening in France?", stream=True) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/assistant.py: -------------------------------------------------------------------------------- 1 | import os 2 | from phi.assistant import Assistant 3 | from phi.llm.mistral import MistralChat 4 | 5 | assistant = Assistant( 6 | llm=MistralChat( 7 | model="open-mixtral-8x22b", 8 | api_key=os.environ["MISTRAL_API_KEY"], 9 | ), 10 | description="You help people with their health and fitness goals.", 11 | debug_mode=True, 12 | ) 13 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | from rich.pretty import pprint 2 | from phi.assistant import Assistant 3 | from phi.llm.ollama import Ollama 4 | 5 | assistant = Assistant( 6 | llm=Ollama(model="llama3"), 7 | description="You help people with their health and fitness goals.", 8 | ) 9 | assistant.print_response("Share a quick healthy breakfast recipe.", stream=False, markdown=True) 10 | print("\n-*- Metrics:") 11 | pprint(assistant.llm.metrics) # type: ignore 12 | -------------------------------------------------------------------------------- /cookbook/providers/together/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.together import Together 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=Together(id="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | agent.print_response("Whats happening in France?", stream=True) 14 | -------------------------------------------------------------------------------- /phi/docker/app/airflow/webserver.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.airflow.base import AirflowBase 4 | 5 | 6 | class AirflowWebserver(AirflowBase): 7 | # -*- App Name 8 | name: str = "airflow-ws" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "webserver" 12 | 13 | # -*- App Ports 14 | # Open a container port if open_port=True 15 | open_port: bool = True 16 | port_number: int = 8080 17 | -------------------------------------------------------------------------------- /phi/utils/env.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Optional 3 | 4 | 5 | def get_from_env(key: str, default: Optional[str] = None, required: bool = False) -> Optional[str]: 6 | """Get the value for an environment variable. Use default if not found, or raise an error if required is True.""" 7 | 8 | value = getenv(key, default) 9 | if value is None and required: 10 | raise ValueError(f"Environment variable {key} is required but not found") 11 | return value 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/assistant_stream_off.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from phi.assistant import Assistant 4 | from phi.llm.mistral import MistralChat 5 | 6 | assistant = Assistant( 7 | llm=MistralChat( 8 | model="mistral-large-latest", 9 | api_key=os.environ["MISTRAL_API_KEY"], 10 | ), 11 | description="You help people with their health and fitness goals.", 12 | ) 13 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True, stream=False) 14 | -------------------------------------------------------------------------------- /phi/docker/app/superset/webserver.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union, List 2 | 3 | from phi.docker.app.superset.base import SupersetBase 4 | 5 | 6 | class SupersetWebserver(SupersetBase): 7 | # -*- App Name 8 | name: str = "superset-ws" 9 | 10 | # Command for the container 11 | command: Optional[Union[str, List[str]]] = "webserver" 12 | 13 | # -*- App Ports 14 | # Open a container port if open_port=True 15 | open_port: bool = True 16 | port_number: int = 8088 17 | -------------------------------------------------------------------------------- /cookbook/examples/hybrid_search/pgvector/README.md: -------------------------------------------------------------------------------- 1 | ## Pgvector Hybrid Search 2 | 3 | ### 1. Create a virtual environment 4 | 5 | ```shell 6 | python3 -m venv ~/.venvs/aienv 7 | source ~/.venvs/aienv/bin/activate 8 | ``` 9 | 10 | ### 2. Install libraries 11 | 12 | ```shell 13 | pip install -U pgvector pypdf "psycopg[binary]" sqlalchemy openai phidata 14 | ``` 15 | 16 | ### 3. Run PgVector Hybrid Search Agent 17 | 18 | ```shell 19 | python cookbook/examples/hybrid_search/pgvector/agent.py 20 | ``` 21 | -------------------------------------------------------------------------------- /cookbook/providers/fireworks/web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.fireworks import Fireworks 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | agent = Agent( 8 | model=Fireworks(id="accounts/fireworks/models/llama-v3p1-405b-instruct"), 9 | tools=[DuckDuckGo()], 10 | show_tool_calls=True, 11 | markdown=True, 12 | ) 13 | agent.print_response("Whats happening in France?", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/assistants/async/basic_stream_off.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from phi.assistant import Assistant 3 | from phi.llm.openai import OpenAIChat 4 | 5 | assistant = Assistant( 6 | llm=OpenAIChat(model="gpt-3.5-turbo"), 7 | description="You help people with their health and fitness goals.", 8 | instructions=["Recipes should be under 5 ingredients"], 9 | ) 10 | # -*- Print a response to the cli 11 | asyncio.run(assistant.async_print_response("Share a breakfast recipe.", markdown=True, stream=False)) 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/deepseek/tool_call.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.deepseek import DeepSeekChat 3 | from phi.tools.yfinance import YFinanceTools 4 | 5 | assistant = Assistant( 6 | llm=DeepSeekChat(), 7 | tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, company_info=True, company_news=True)], 8 | show_tool_calls=True, 9 | markdown=True, 10 | ) 11 | assistant.print_response("Write a comparison between NVDA and AMD, use all tools available.") 12 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/web_search.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.groq import Groq 4 | 5 | assistant = Assistant( 6 | llm=Groq(model="llama3-70b-8192"), 7 | tools=[DuckDuckGo()], 8 | instructions=["Always search the web for information"], 9 | show_tool_calls=True, 10 | ) 11 | assistant.cli_app(markdown=True, stream=False) 12 | # assistant.print_response("Whats happening in France?", markdown=True, stream=False) 13 | -------------------------------------------------------------------------------- /cookbook/agents/13_image_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | agent = Agent( 6 | model=OpenAIChat(id="gpt-4o"), 7 | tools=[DuckDuckGo()], 8 | markdown=True, 9 | ) 10 | 11 | agent.print_response( 12 | "Tell me about this image and give me the latest news about it.", 13 | images=[ 14 | "https://upload.wikimedia.org/wikipedia/commons/b/bf/Krakow_-_Kosciol_Mariacki.jpg", 15 | ], 16 | stream=True, 17 | ) 18 | -------------------------------------------------------------------------------- /cookbook/providers/cohere/basic_stream.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.cohere import CohereChat 3 | 4 | agent = Agent(model=CohereChat(id="command-r-08-2024"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 8 | # for chunk in run_response: 9 | # print(chunk.content) 10 | 11 | # Print the response in the terminal 12 | agent.print_response("Share a 2 sentence horror story", stream=True) 13 | -------------------------------------------------------------------------------- /cookbook/agents/33_agent_input_as_list.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | 3 | Agent().print_response( 4 | [ 5 | {"type": "text", "text": "What's in this image?"}, 6 | { 7 | "type": "image_url", 8 | "image_url": { 9 | "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", 10 | }, 11 | }, 12 | ], 13 | stream=True, 14 | markdown=True, 15 | ) 16 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/ollama/who_are_you.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.ollama import Ollama 3 | 4 | prompt = "Who are you and who created you? Answer in 1 short sentence." 5 | temp = 0.3 6 | models = ["llama3", "phi3", "llava", "llama2", "mixtral", "openhermes", "tinyllama"] 7 | 8 | for model in models: 9 | print(f"================ {model} ================") 10 | Assistant(llm=Ollama(model=model, options={"temperature": temp}), system_prompt=prompt).print_response( 11 | markdown=True 12 | ) 13 | -------------------------------------------------------------------------------- /cookbook/assistants/storage.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.storage.assistant.postgres import PgAssistantStorage 4 | 5 | assistant = Assistant( 6 | storage=PgAssistantStorage(table_name="assistant_runs", db_url="postgresql+psycopg://ai:ai@localhost:5532/ai"), 7 | tools=[DuckDuckGo()], 8 | add_chat_history_to_messages=True, 9 | ) 10 | assistant.print_response("How many people live in Canada?") 11 | assistant.print_response("What is their national anthem called?") 12 | -------------------------------------------------------------------------------- /cookbook/providers/deepseek/basic_stream.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.deepseek import DeepSeekChat 3 | 4 | agent = Agent(model=DeepSeekChat(id="deepseek-chat"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 8 | # for chunk in run_response: 9 | # print(chunk.content) 10 | 11 | # Print the response in the terminal 12 | agent.print_response("Share a 2 sentence horror story", stream=True) 13 | -------------------------------------------------------------------------------- /cookbook/providers/huggingface/agent_stream.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.huggingface import HuggingFaceChat 3 | import os 4 | from getpass import getpass 5 | 6 | os.environ["HF_TOKEN"] = getpass("Enter your HuggingFace Access token") 7 | 8 | agent = Agent( 9 | model=HuggingFaceChat(id="mistralai/Mistral-7B-Instruct-v0.2", max_tokens=4096, temperature=0), 10 | description="What is meaning of life", 11 | ) 12 | agent.print_response("What is meaning of life and then recommend 5 best books for the same", stream=True) 13 | -------------------------------------------------------------------------------- /cookbook/providers/nvidia/basic_stream.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.nvidia import Nvidia 3 | 4 | agent = Agent(model=Nvidia(id="meta/llama-3.3-70b-instruct"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 8 | # for chunk in run_response: 9 | # print(chunk.content) 10 | 11 | # Print the response in the terminal 12 | agent.print_response("Share a 2 sentence horror story", stream=True) 13 | -------------------------------------------------------------------------------- /cookbook/providers/openai/image_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | agent = Agent( 6 | model=OpenAIChat(id="gpt-4o"), 7 | tools=[DuckDuckGo()], 8 | markdown=True, 9 | ) 10 | 11 | agent.print_response( 12 | "Tell me about this image and give me the latest news about it.", 13 | images=[ 14 | "https://upload.wikimedia.org/wikipedia/commons/b/bf/Krakow_-_Kosciol_Mariacki.jpg", 15 | ], 16 | stream=True, 17 | ) 18 | -------------------------------------------------------------------------------- /cookbook/agents/01_web_search.py: -------------------------------------------------------------------------------- 1 | """Run `pip install openai duckduckgo-search phidata` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.model.openai import OpenAIChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | web_agent = Agent( 8 | name="Web Agent", 9 | model=OpenAIChat(id="gpt-4o"), 10 | tools=[DuckDuckGo()], 11 | instructions=["Always include sources"], 12 | show_tool_calls=True, 13 | markdown=True, 14 | ) 15 | web_agent.print_response("Whats happening in France?", stream=True) 16 | -------------------------------------------------------------------------------- /cookbook/agents/18_is_9_11_bigger_than_9_9.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | from phi.tools.calculator import Calculator 4 | 5 | agent = Agent( 6 | model=OpenAIChat(id="gpt-4o"), 7 | tools=[Calculator(add=True, subtract=True, multiply=True, divide=True)], 8 | instructions=["Use the calculator tool for comparisons."], 9 | show_tool_calls=True, 10 | markdown=True, 11 | ) 12 | agent.print_response("Is 9.11 bigger than 9.9?") 13 | agent.print_response("9.11 and 9.9 -- which is bigger?") 14 | -------------------------------------------------------------------------------- /cookbook/providers/google/image_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.google import Gemini 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | agent = Agent( 6 | model=Gemini(id="gemini-2.0-flash-exp"), 7 | tools=[DuckDuckGo()], 8 | markdown=True, 9 | ) 10 | 11 | agent.print_response( 12 | "Tell me about this image and give me the latest news about it.", 13 | images=[ 14 | "https://upload.wikimedia.org/wikipedia/commons/b/bf/Krakow_-_Kosciol_Mariacki.jpg", 15 | ], 16 | stream=True, 17 | ) 18 | -------------------------------------------------------------------------------- /cookbook/storage/json_storage.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search openai` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.tools.duckduckgo import DuckDuckGo 5 | from phi.storage.agent.json import JsonFileAgentStorage 6 | 7 | agent = Agent( 8 | storage=JsonFileAgentStorage(dir_path="tmp/agent_sessions_json"), 9 | tools=[DuckDuckGo()], 10 | add_history_to_messages=True, 11 | ) 12 | agent.print_response("How many people live in Canada?") 13 | agent.print_response("What is their national anthem called?") 14 | -------------------------------------------------------------------------------- /cookbook/storage/yaml_storage.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search openai` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.tools.duckduckgo import DuckDuckGo 5 | from phi.storage.agent.yaml import YamlFileAgentStorage 6 | 7 | agent = Agent( 8 | storage=YamlFileAgentStorage(dir_path="tmp/agent_sessions_yaml"), 9 | tools=[DuckDuckGo()], 10 | add_history_to_messages=True, 11 | ) 12 | agent.print_response("How many people live in Canada?") 13 | agent.print_response("What is their national anthem called?") 14 | -------------------------------------------------------------------------------- /cookbook/providers/sambanova/basic_stream.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent, RunResponse # noqa 2 | from phi.model.sambanova import Sambanova 3 | 4 | agent = Agent(model=Sambanova(id="Meta-Llama-3.1-8B-Instruct"), markdown=True) 5 | 6 | # Get the response in a variable 7 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 8 | # for chunk in run_response: 9 | # print(chunk.content) 10 | 11 | # Print the response in the terminal 12 | agent.print_response("Share a 2 sentence horror story", stream=True) 13 | -------------------------------------------------------------------------------- /cookbook/providers/xai/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.xai import xAI 4 | 5 | agent = Agent(model=xAI(id="grok-beta"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/bedrock/assistant.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.duckduckgo import DuckDuckGo 3 | from phi.llm.aws.claude import Claude 4 | 5 | assistant = Assistant( 6 | llm=Claude(model="anthropic.claude-3-5-sonnet-20240620-v1:0"), 7 | tools=[DuckDuckGo()], 8 | show_tool_calls=True, 9 | debug_mode=True, 10 | add_datetime_to_instructions=True, 11 | ) 12 | assistant.print_response( 13 | "Who were the biggest upsets in the NFL? Who were the biggest upsets in College Football?", markdown=True 14 | ) 15 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/groq/data_analyst.py: -------------------------------------------------------------------------------- 1 | from phi.llm.groq import Groq 2 | from phi.assistant.duckdb import DuckDbAssistant 3 | 4 | data_analyst = DuckDbAssistant( 5 | llm=Groq(model="llama3-70b-8192"), 6 | semantic_model=""" 7 | tables: 8 | - name: movies 9 | description: "Contains information about movies from IMDB." 10 | path: "https://phidata-public.s3.amazonaws.com/demo_data/IMDB-Movie-Data.csv" 11 | """, 12 | show_tool_calls=True, 13 | ) 14 | data_analyst.cli_app(markdown=True, stream=False, user="Groq") 15 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/mistral/tool_call.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from phi.assistant import Assistant 4 | from phi.llm.mistral import MistralChat 5 | from phi.tools.duckduckgo import DuckDuckGo 6 | 7 | assistant = Assistant( 8 | llm=MistralChat( 9 | model="mistral-large-latest", 10 | api_key=os.environ["MISTRAL_API_KEY"], 11 | ), 12 | tools=[DuckDuckGo()], 13 | show_tool_calls=True, 14 | debug_mode=True, 15 | ) 16 | assistant.print_response("Whats happening in France? Summarize top 2 stories", markdown=True, stream=True) 17 | -------------------------------------------------------------------------------- /cookbook/providers/huggingface/basic_llama_inference.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.huggingface import HuggingFaceChat 3 | import os 4 | from getpass import getpass 5 | 6 | os.environ["HF_TOKEN"] = getpass("Enter your HuggingFace Access token") 7 | 8 | agent = Agent( 9 | model=HuggingFaceChat( 10 | id="meta-llama/Meta-Llama-3-8B-Instruct", 11 | max_tokens=4096, 12 | ), 13 | description="Essay Writer. Write 300 words essage on topic that will be provided by user", 14 | ) 15 | agent.print_response("topic: AI") 16 | -------------------------------------------------------------------------------- /cookbook/agents/36_image_input_high_fidelity.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.openai import OpenAIChat 3 | 4 | agent = Agent( 5 | model=OpenAIChat(id="gpt-4o"), 6 | markdown=True, 7 | ) 8 | 9 | agent.print_response( 10 | "What's in these images", 11 | images=[ 12 | { 13 | "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", 14 | "detail": "high", 15 | } 16 | ], 17 | ) 18 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/vertexai/assistant.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | 3 | import vertexai 4 | from phi.assistant import Assistant 5 | from phi.llm.vertexai import Gemini 6 | 7 | # *********** Initialize VertexAI *********** 8 | vertexai.init(project=getenv("PROJECT_ID"), location=getenv("LOCATION")) 9 | 10 | assistant = Assistant( 11 | llm=Gemini(model="gemini-1.5-pro-preview-0409"), 12 | description="You help people with their health and fitness goals.", 13 | ) 14 | assistant.print_response("Share a quick healthy breakfast recipe.", markdown=True) 15 | -------------------------------------------------------------------------------- /cookbook/providers/hermes/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.ollama import Hermes 4 | 5 | agent = Agent(model=Hermes(id="hermes3"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/tools/searxng_tools.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.tools.searxng import Searxng 3 | 4 | # Initialize Searxng with your Searxng instance URL 5 | searxng = Searxng(host="http://localhost:53153", engines=[], fixed_max_results=5, news=True, science=True) 6 | 7 | # Create an agent with Searxng 8 | agent = Agent(tools=[searxng]) 9 | 10 | # Example: Ask the agent to search using Searxng 11 | agent.print_response(""" 12 | Please search for information about artificial intelligence 13 | and summarize the key points from the top results 14 | """) 15 | -------------------------------------------------------------------------------- /phi/embedder/base.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, List, Tuple 2 | 3 | from pydantic import BaseModel, ConfigDict 4 | 5 | 6 | class Embedder(BaseModel): 7 | """Base class for managing embedders""" 8 | 9 | dimensions: Optional[int] = 1536 10 | 11 | model_config = ConfigDict(arbitrary_types_allowed=True) 12 | 13 | def get_embedding(self, text: str) -> List[float]: 14 | raise NotImplementedError 15 | 16 | def get_embedding_and_usage(self, text: str) -> Tuple[List[float], Optional[Dict]]: 17 | raise NotImplementedError 18 | -------------------------------------------------------------------------------- /cookbook/providers/claude/image_agent.py: -------------------------------------------------------------------------------- 1 | from phi.agent import Agent 2 | from phi.model.anthropic import Claude 3 | from phi.tools.duckduckgo import DuckDuckGo 4 | 5 | agent = Agent( 6 | model=Claude(id="claude-3-5-sonnet-20241022"), 7 | tools=[DuckDuckGo()], 8 | markdown=True, 9 | ) 10 | 11 | agent.print_response( 12 | "Tell me about this image and search the web for more information.", 13 | images=[ 14 | "https://upload.wikimedia.org/wikipedia/commons/a/a7/Camponotus_flavomarginatus_ant.jpg", 15 | ], 16 | stream=True, 17 | ) 18 | -------------------------------------------------------------------------------- /cookbook/providers/ollama/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.ollama import Ollama 4 | 5 | agent = Agent(model=Ollama(id="llama3.1:8b"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/providers/openai/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.openai import OpenAIChat 4 | 5 | agent = Agent(model=OpenAIChat(id="gpt-4o"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/storage/sqlite_storage.py: -------------------------------------------------------------------------------- 1 | """Run `pip install duckduckgo-search sqlalchemy openai` to install dependencies.""" 2 | 3 | from phi.agent import Agent 4 | from phi.tools.duckduckgo import DuckDuckGo 5 | from phi.storage.agent.sqlite import SqlAgentStorage 6 | 7 | agent = Agent( 8 | storage=SqlAgentStorage(table_name="agent_sessions", db_file="tmp/data.db"), 9 | tools=[DuckDuckGo()], 10 | add_history_to_messages=True, 11 | ) 12 | agent.print_response("How many people live in Canada?") 13 | agent.print_response("What is their national anthem called?") 14 | -------------------------------------------------------------------------------- /phi/api/schemas/agent.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, Any 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class AgentSessionCreate(BaseModel): 7 | """Data sent to API to create an Agent Session""" 8 | 9 | session_id: str 10 | agent_data: Optional[Dict[str, Any]] = None 11 | 12 | 13 | class AgentRunCreate(BaseModel): 14 | """Data sent to API to create an Agent Run""" 15 | 16 | session_id: str 17 | run_id: Optional[str] = None 18 | run_data: Optional[Dict[str, Any]] = None 19 | agent_data: Optional[Dict[str, Any]] = None 20 | -------------------------------------------------------------------------------- /cookbook/assistants/is_9_11_bigger_than_9_9.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.llm.openai import OpenAIChat 3 | from phi.tools.calculator import Calculator 4 | 5 | assistant = Assistant( 6 | llm=OpenAIChat(model="gpt-4o"), 7 | tools=[Calculator(add=True, subtract=True, multiply=True, divide=True)], 8 | instructions=["Use the calculator tool for comparisons."], 9 | show_tool_calls=True, 10 | markdown=True, 11 | ) 12 | assistant.print_response("Is 9.11 bigger than 9.9?") 13 | assistant.print_response("9.11 and 9.9 -- which is bigger?") 14 | -------------------------------------------------------------------------------- /cookbook/mysql-init/init.sql: -------------------------------------------------------------------------------- 1 | -- Create 'users' table 2 | CREATE TABLE IF NOT EXISTS users ( 3 | id INT AUTO_INCREMENT PRIMARY KEY, 4 | username VARCHAR(50) NOT NULL UNIQUE, 5 | email VARCHAR(100) NOT NULL UNIQUE, 6 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP 7 | ); 8 | 9 | -- Create 'products' table 10 | CREATE TABLE IF NOT EXISTS products ( 11 | id INT AUTO_INCREMENT PRIMARY KEY, 12 | name VARCHAR(100) NOT NULL, 13 | description TEXT, 14 | price DECIMAL(10,2) NOT NULL, 15 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP 16 | ); 17 | -------------------------------------------------------------------------------- /cookbook/providers/groq/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.groq import Groq 4 | 5 | agent = Agent(model=Groq(id="llama-3.3-70b-versatile"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response on the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/hermes2/finance.py: -------------------------------------------------------------------------------- 1 | from phi.assistant import Assistant 2 | from phi.tools.yfinance import YFinanceTools 3 | from phi.llm.ollama import Hermes 4 | 5 | assistant = Assistant( 6 | llm=Hermes(model="adrienbrault/nous-hermes2pro:Q8_0"), 7 | tools=[YFinanceTools(stock_price=True, analyst_recommendations=True, stock_fundamentals=True)], 8 | show_tool_calls=True, 9 | ) 10 | assistant.print_response("Share the NVDA stock price and analyst recommendations", markdown=True) 11 | assistant.print_response("Summarize fundamentals for TSLA", markdown=True) 12 | -------------------------------------------------------------------------------- /cookbook/providers/google/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.google import Gemini 4 | 5 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/providers/openrouter/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.openrouter import OpenRouter 4 | 5 | agent = Agent(model=OpenRouter(id="gpt-4o"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /cookbook/providers/vertexai/basic_stream.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator # noqa 2 | from phi.agent import Agent, RunResponse # noqa 3 | from phi.model.vertexai import Gemini 4 | 5 | agent = Agent(model=Gemini(id="gemini-2.0-flash-exp"), markdown=True) 6 | 7 | # Get the response in a variable 8 | # run_response: Iterator[RunResponse] = agent.run("Share a 2 sentence horror story", stream=True) 9 | # for chunk in run_response: 10 | # print(chunk.content) 11 | 12 | # Print the response in the terminal 13 | agent.print_response("Share a 2 sentence horror story", stream=True) 14 | -------------------------------------------------------------------------------- /phi/api/schemas/assistant.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, Any 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class AssistantRunCreate(BaseModel): 7 | """Data sent to API to create an assistant run""" 8 | 9 | run_id: str 10 | assistant_data: Optional[Dict[str, Any]] = None 11 | 12 | 13 | class AssistantEventCreate(BaseModel): 14 | """Data sent to API to create a new assistant event""" 15 | 16 | run_id: str 17 | assistant_data: Optional[Dict[str, Any]] = None 18 | event_type: str 19 | event_data: Optional[Dict[str, Any]] = None 20 | -------------------------------------------------------------------------------- /cookbook/assistants/llms/bedrock/cli_app.py: -------------------------------------------------------------------------------- 1 | import typer 2 | 3 | from phi.assistant import Assistant 4 | from phi.llm.aws.claude import Claude 5 | 6 | cli_app = typer.Typer(pretty_exceptions_show_locals=False) 7 | 8 | 9 | @cli_app.command() 10 | def aws_assistant(): 11 | assistant = Assistant( 12 | llm=Claude(model="anthropic.claude-3-5-sonnet-20240620-v1:0"), 13 | instructions=["respond in a southern drawl"], 14 | debug_mode=True, 15 | ) 16 | 17 | assistant.cli_app(markdown=True) 18 | 19 | 20 | if __name__ == "__main__": 21 | cli_app() 22 | --------------------------------------------------------------------------------