├── .env.example ├── .gitignore ├── README.md ├── app.py ├── chainlit.md ├── images ├── example_response.png └── example_response_functions.png ├── openai_function_schemas.py ├── openai_functions.py └── requirements.txt /.env.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY=your-key 2 | SERPAPI_API_KEY=your-key -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # chainlit 2 | .chainlit/ 3 | 4 | .vscode 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | cover/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | .pybuilder/ 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | # For a library or package, you might want to ignore these files since the code is 92 | # intended to run in multiple environments; otherwise, check them in: 93 | # .python-version 94 | 95 | # pipenv 96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 99 | # install all needed dependencies. 100 | #Pipfile.lock 101 | 102 | # poetry 103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 104 | # This is especially recommended for binary packages to ensure reproducibility, and is more 105 | # commonly ignored for libraries. 106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 107 | #poetry.lock 108 | 109 | # pdm 110 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 111 | #pdm.lock 112 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 113 | # in version control. 114 | # https://pdm.fming.dev/#use-with-ide 115 | .pdm.toml 116 | 117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 118 | __pypackages__/ 119 | 120 | # Celery stuff 121 | celerybeat-schedule 122 | celerybeat.pid 123 | 124 | # SageMath parsed files 125 | *.sage.py 126 | 127 | # Environments 128 | .env 129 | .venv 130 | env/ 131 | venv/ 132 | ENV/ 133 | env.bak/ 134 | venv.bak/ 135 | 136 | # Spyder project settings 137 | .spyderproject 138 | .spyproject 139 | 140 | # Rope project settings 141 | .ropeproject 142 | 143 | # mkdocs documentation 144 | /site 145 | 146 | # mypy 147 | .mypy_cache/ 148 | .dmypy.json 149 | dmypy.json 150 | 151 | # Pyre type checker 152 | .pyre/ 153 | 154 | # pytype static type analyzer 155 | .pytype/ 156 | 157 | # Cython debug symbols 158 | cython_debug/ 159 | 160 | # PyCharm 161 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 162 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 163 | # and can be added to the global gitignore or merged into this file. For a more nuclear 164 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 165 | #.idea/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Streaming chatbot with OpenAI functions 2 | 3 | This chatbot utilizes OpenAI's function calling feature to invoke appropriate functions based on user input and stream the response back. 4 | 5 | On top of the standard chat interface, the UI exposes the particular function called along with its arguments, as well as the response from the function. 6 | 7 | **The current configuration defines two OpenAI functions that can be called**: 8 | - `get_current_weather`: returns the current weather for a given location. Example input: `What's the weather like in New York?` 9 | - Note that the API returns temperature in Celsius by default. The time zone is set for Europe/Berlin, but this can be changed in `openai_functions.py` 10 | 11 | - `get_search_results`: A langchain agent that uses SERP API as a tool to search the web. Example input: `Search the web for the best restaurants in Berlin` 12 | 13 | Other than that, you can easily define your own functions and add them to the app (see [Making changes](#making-changes)). 14 | 15 | Sample conversation that makes use of two different OpenAI functions in a row, utilizing the response from the first function as an argument for the second function: 16 | 17 | ![alt text](images/example_response.png) 18 | 19 | If we expand the response, we can see the function calls and their arguments (the longitude and latitude were inferred from the location name returned by the search function): 20 | 21 | ![alt text](images/example_response_functions.png) 22 | 23 | ## Setup 24 | 25 | ### Install the dependencies 26 | ``` 27 | pip install -r requirements.txt 28 | ``` 29 | 30 | ### Set up your OpenAI API key as an environment variable 31 | 32 | Rename `.env.example` to `.env` and replace the placeholders with your API keys. 33 | ``` 34 | OPENAI_API_KEY=your-key 35 | SERPAPI_API_KEY=your-key 36 | ``` 37 | Note: While the OpenAI API key is required, the SERP API key is not. If you don't have one, you can still use the `get_current_weather` function, but it will not be able to search the web and will return an error message instead. 38 | 39 | You don't need an API key to use the function that returns the current weather. 40 | 41 | ### Run the app 42 | ``` 43 | chainlit run app.py -w 44 | ``` 45 | 46 | ### Making changes 47 | 48 | The app is configured to be easily scalable. If you'd like to add more OpenAI functions, you can do so in the following way: 49 | 50 | - Define the function schema in `openai_function_schemas.py` 51 | - Add the function definition to `openai_functions.py` 52 | - Add the function to `FUNCTIONS_MAPPING` in `openai_functions.py` 53 | - To test that it works, your function definition can return a hard-coded JSON. -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import os 3 | import chainlit as cl 4 | from dotenv import load_dotenv 5 | import ast 6 | from openai_function_schemas import FUNCTIONS_SCHEMA 7 | from openai_functions import FUNCTIONS_MAPPING 8 | 9 | load_dotenv() 10 | 11 | openai.api_key = os.getenv("OPENAI_API_KEY") 12 | MODEL_NAME = "gpt-3.5-turbo-0613" 13 | MODEL_TEMPERATURE = 0.3 14 | FUNCTION_CALL = "auto" 15 | 16 | 17 | async def process_new_delta( 18 | new_delta, openai_message, content_ui_message, function_ui_message 19 | ): 20 | if "role" in new_delta: 21 | openai_message["role"] = new_delta["role"] 22 | if "content" in new_delta: 23 | new_content = new_delta.get("content") or "" 24 | openai_message["content"] += new_content 25 | await content_ui_message.stream_token(new_content) 26 | if "function_call" in new_delta: 27 | if "name" in new_delta["function_call"]: 28 | openai_message["function_call"] = { 29 | "name": new_delta["function_call"]["name"] 30 | } 31 | await content_ui_message.send() 32 | function_ui_message = cl.Message( 33 | author=new_delta["function_call"]["name"], 34 | content="", 35 | indent=1, 36 | language="json", 37 | ) 38 | await function_ui_message.stream_token(new_delta["function_call"]["name"]) 39 | 40 | if "arguments" in new_delta["function_call"]: 41 | if "arguments" not in openai_message["function_call"]: 42 | openai_message["function_call"]["arguments"] = "" 43 | openai_message["function_call"]["arguments"] += new_delta["function_call"][ 44 | "arguments" 45 | ] 46 | await function_ui_message.stream_token( 47 | new_delta["function_call"]["arguments"] 48 | ) 49 | return openai_message, content_ui_message, function_ui_message 50 | 51 | 52 | async def get_model_response(message_history): 53 | try: 54 | return await openai.ChatCompletion.acreate( 55 | model=MODEL_NAME, 56 | messages=message_history, 57 | functions=FUNCTIONS_SCHEMA, 58 | function_call=FUNCTION_CALL, 59 | temperature=MODEL_TEMPERATURE, 60 | stream=True, 61 | ) 62 | except Exception as e: 63 | print(f"Error getting model response: {e}") 64 | return None 65 | 66 | 67 | async def process_function_call(function_name, arguments, message_history): 68 | if function_name in FUNCTIONS_MAPPING: 69 | function_response = FUNCTIONS_MAPPING[function_name](**arguments) 70 | message_history.append( 71 | { 72 | "role": "function", 73 | "name": function_name, 74 | "content": function_response, 75 | } 76 | ) 77 | await send_response(function_name, function_response) 78 | else: 79 | print(f"Unknown function: {function_name}") 80 | 81 | 82 | async def send_response(function_name, function_response): 83 | await cl.Message( 84 | author=function_name, 85 | content=str(function_response), 86 | language="json", 87 | indent=1, 88 | ).send() 89 | 90 | 91 | async def send_user_message(message): 92 | await cl.Message( 93 | author=message["role"], 94 | content=message["content"], 95 | ).send() 96 | 97 | 98 | @cl.on_chat_start 99 | def start_chat(): 100 | cl.user_session.set( 101 | "message_history", 102 | [{"role": "system", "content": "You are a helpful AI assistant"}], 103 | ) 104 | 105 | 106 | @cl.on_message 107 | async def run_conversation(user_message: str): 108 | message_history = cl.user_session.get("message_history") 109 | message_history.append({"role": "user", "content": user_message}) 110 | 111 | cur_iter = 0 112 | MAX_ITER = 5 113 | 114 | while cur_iter < MAX_ITER: 115 | openai_message = {"role": "", "content": ""} 116 | function_ui_message = None 117 | content_ui_message = cl.Message(content="") 118 | 119 | # Stream the responses from OpenAI 120 | model_response = await get_model_response(message_history) 121 | async for stream_resp in model_response: 122 | if "choices" not in stream_resp: 123 | print(f"No choices in response: {stream_resp}") 124 | return 125 | 126 | new_delta = stream_resp.choices[0]["delta"] 127 | ( 128 | openai_message, 129 | content_ui_message, 130 | function_ui_message, 131 | ) = await process_new_delta( 132 | new_delta, openai_message, content_ui_message, function_ui_message 133 | ) 134 | 135 | # Append message to history 136 | message_history.append(openai_message) 137 | if function_ui_message is not None: 138 | await function_ui_message.send() 139 | 140 | # Function call 141 | if openai_message.get("function_call"): 142 | function_name = openai_message.get("function_call").get("name") 143 | arguments = ast.literal_eval( 144 | openai_message.get("function_call").get("arguments") 145 | ) 146 | 147 | await process_function_call(function_name, arguments, message_history) 148 | 149 | function_response = message_history[-1] 150 | # await cl.Message( 151 | # author=function_name, 152 | # content=str(function_response), 153 | # language="json", 154 | # indent=1, 155 | # ).send() 156 | 157 | cur_iter += 1 158 | else: 159 | break 160 | -------------------------------------------------------------------------------- /chainlit.md: -------------------------------------------------------------------------------- 1 | # Streaming chatbot with OpenAI functions 2 | 3 | This chatbot utilizes OpenAI's function calling feature to invoke appropriate functions based on user input and stream the response back. 4 | 5 | On top of the standard chat interface, the UI exposes the particular function called along with its arguments, as well as the response from the function. 6 | 7 | **The current configuration defines two OpenAI functions that can be called**: 8 | - `get_current_weather`: returns the current weather for a given location. Example input: `What's the weather like in New York?` 9 | - Note that the API returns temperature in Celsius by default. The time zone is set for Europe/Berlin, but this can be changed in `openai_functions.py` 10 | 11 | - `get_search_results`: A langchain agent that uses SERP API as a tool to search the web. Example input: `Search the web for the best restaurants in Berlin` -------------------------------------------------------------------------------- /images/example_response.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dissorial/Chainlit-OpenAI-Functions/a0d6beeb961ff8cb1fc265dba553933e6a84d8a5/images/example_response.png -------------------------------------------------------------------------------- /images/example_response_functions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dissorial/Chainlit-OpenAI-Functions/a0d6beeb961ff8cb1fc265dba553933e6a84d8a5/images/example_response_functions.png -------------------------------------------------------------------------------- /openai_function_schemas.py: -------------------------------------------------------------------------------- 1 | FUNCTIONS_SCHEMA = [ 2 | { 3 | "name": "get_search_results", 4 | "description": "Used to get search results when the user asks for it", 5 | "parameters": { 6 | "type": "object", 7 | "properties": { 8 | "query": { 9 | "type": "string", 10 | "description": "The query to search for", 11 | } 12 | }, 13 | }, 14 | }, 15 | { 16 | "name": "get_current_weather", 17 | "description": "Get the current weather for a location", 18 | "parameters": { 19 | "type": "object", 20 | "properties": { 21 | "longitude": { 22 | "type": "number", 23 | "description": "The approximate longitude of the location", 24 | }, 25 | "latitude": { 26 | "type": "number", 27 | "description": "The approximate latitude of the location", 28 | }, 29 | }, 30 | "required": ["longitude", "latitude"], 31 | }, 32 | }, 33 | # other functions 34 | ] 35 | -------------------------------------------------------------------------------- /openai_functions.py: -------------------------------------------------------------------------------- 1 | from langchain.agents import initialize_agent, AgentType, Tool 2 | from langchain import SerpAPIWrapper 3 | from langchain.chat_models import ChatOpenAI 4 | import requests 5 | import json 6 | import os 7 | 8 | 9 | class OpenAIFunctions: 10 | @staticmethod 11 | def get_current_weather(longitude, latitude): 12 | """Get the current weather for a location""" 13 | try: 14 | url = "https://api.open-meteo.com/v1/forecast" 15 | params = { 16 | "latitude": latitude, 17 | "longitude": longitude, 18 | "current_weather": "true", 19 | "timezone": "Europe/Berlin", 20 | } 21 | response = requests.get(url, params=params) 22 | response.raise_for_status() 23 | data = response.json() 24 | return json.dumps(data["current_weather"]) 25 | except requests.exceptions.HTTPError as http_err: 26 | print(f"HTTP error occurred: {http_err}") 27 | except Exception as err: 28 | print(f"Other error occurred: {err}") 29 | return json.dumps({"error": "Failed to get weather"}) 30 | 31 | @staticmethod 32 | def get_search_results(query): 33 | """Get search results for a query""" 34 | try: 35 | llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo-0613") 36 | search = SerpAPIWrapper( 37 | serpapi_api_key=os.getenv("SERPAPI_API_KEY"), 38 | ) 39 | tools = [ 40 | Tool( 41 | name="Search", 42 | func=search.run, 43 | description="useful for when you need to answer questions about current events. You should ask targeted questions", 44 | ) 45 | ] 46 | agent = initialize_agent( 47 | tools, llm, agent=AgentType.OPENAI_FUNCTIONS, verbose=True 48 | ) 49 | res = agent.run(query) 50 | return json.dumps(res) 51 | except Exception as e: 52 | print(f"Error getting search results: {e}") 53 | return json.dumps({"error": "Failed to get search results"}) 54 | 55 | 56 | FUNCTIONS_MAPPING = { 57 | "get_search_results": OpenAIFunctions.get_search_results, 58 | "get_current_weather": OpenAIFunctions.get_current_weather, 59 | } 60 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | chainlit==0.5.2 2 | langchain==0.0.230 3 | openai==0.27.8 4 | python-dotenv==1.0.0 5 | Requests==2.31.0 --------------------------------------------------------------------------------