├── .github └── workflows │ └── tests.yml ├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── docs ├── Makefile ├── api.rst ├── conf.py ├── conversation.md ├── index.rst ├── introduction.md ├── make.bat ├── nlp_interface.md ├── requirements.txt └── skills.md ├── openai_functions ├── __init__.py ├── conversation.py ├── exceptions.py ├── functions │ ├── __init__.py │ ├── basic_set.py │ ├── functions.py │ ├── sets.py │ ├── togglable_set.py │ ├── union.py │ └── wrapper.py ├── json_type.py ├── nlp.py ├── openai_types.py ├── parsers │ ├── __init__.py │ ├── abc.py │ ├── atomic_type_parser.py │ ├── bool_parser.py │ ├── dataclass_parser.py │ ├── default.py │ ├── dict_parser.py │ ├── enum_parser.py │ ├── float_parser.py │ ├── int_parser.py │ ├── list_parser.py │ ├── none_parser.py │ ├── str_parser.py │ └── union_parser.py └── py.typed ├── poetry.lock ├── pyproject.toml ├── tests ├── __init__.py ├── test_basic_set.py ├── test_conversation.py ├── test_function_wrapper.py └── test_skills.py └── tox.ini /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | - push 5 | - pull_request 6 | 7 | jobs: 8 | test: 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | matrix: 12 | os: [ubuntu-latest, windows-latest] 13 | python-version: ['3.8', '3.9', '3.10', '3.11'] 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install tox tox-gh-actions coveralls 25 | - name: Test with tox 26 | run: tox 27 | - name: Upload coverage report 28 | env: 29 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 30 | run: coveralls --service=github 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | python: 12 | install: 13 | - requirements: docs/requirements.txt 14 | - method: pip 15 | path: . 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 rizerphe 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenAI functions 2 | 3 | The `openai-functions` Python project simplifies the usage of OpenAI's ChatGPT [function calling](https://platform.openai.com/docs/guides/gpt/function-calling) feature. It abstracts away the complexity of parsing function signatures and docstrings by providing developers with a clean and intuitive interface. 4 | 5 | ![Tests](https://github.com/rizerphe/openai-functions/actions/workflows/tests.yml/badge.svg) [![Coverage Status](https://coveralls.io/repos/github/rizerphe/openai-functions/badge.svg?branch=main)](https://coveralls.io/github/rizerphe/openai-functions?branch=main) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) [![PyPI version](https://badge.fury.io/py/openai-functions.svg)](https://badge.fury.io/py/openai-functions) [![Documentation Status](https://readthedocs.org/projects/openai-functions/badge/?version=latest)](https://openai-functions.readthedocs.io/en/latest/?badge=latest) 6 | 7 | ## Installation 8 | 9 | You can install `openai-functions` from PyPI using pip: 10 | 11 | ``` 12 | pip install openai-functions 13 | ``` 14 | 15 | ## Usage 16 | 17 | 1. Import the necessary modules and provide your API key: 18 | 19 | ```python 20 | import enum 21 | import openai 22 | from openai_functions import Conversation 23 | 24 | openai.api_key = "" 25 | ``` 26 | 27 | 2. Create a `Conversation` instance: 28 | 29 | ```python 30 | conversation = Conversation() 31 | ``` 32 | 33 | 3. Define your functions using the `@conversation.add_function` decorator: 34 | 35 | ```python 36 | class Unit(enum.Enum): 37 | FAHRENHEIT = "fahrenheit" 38 | CELSIUS = "celsius" 39 | 40 | @conversation.add_function() 41 | def get_current_weather(location: str, unit: Unit = Unit.FAHRENHEIT) -> dict: 42 | """Get the current weather in a given location. 43 | 44 | Args: 45 | location (str): The city and state, e.g., San Francisco, CA 46 | unit (Unit): The unit to use, e.g., fahrenheit or celsius 47 | """ 48 | return { 49 | "location": location, 50 | "temperature": "72", 51 | "unit": unit.value, 52 | "forecast": ["sunny", "windy"], 53 | } 54 | ``` 55 | 56 | 4. Ask the AI a question: 57 | 58 | ```python 59 | response = conversation.ask("What's the weather in San Francisco?") 60 | # Should return something like: 61 | # The current weather in San Francisco is 72 degrees Fahrenheit and it is sunny and windy. 62 | ``` 63 | 64 | You can read more about how to use `Conversation` [here](https://openai-functions.readthedocs.io/en/latest/conversation.html). 65 | 66 | ## More barebones use - just schema generation and result parsing: 67 | 68 | ```python 69 | from openai_functions import FunctionWrapper 70 | 71 | wrapper = FunctionWrapper(get_current_weather) 72 | schema = wrapper.schema 73 | result = wrapper({"location": "San Francisco, CA"}) 74 | ``` 75 | 76 | Or you could use [skills](https://openai-functions.readthedocs.io/en/latest/skills.html). 77 | 78 | ## Another use case: data extraction 79 | 80 | 1. Import the necessary modules and provide your API key: 81 | 82 | ```python 83 | from dataclasses import dataclass 84 | import openai 85 | from openai_functions import nlp 86 | 87 | openai.api_key = "" 88 | ``` 89 | 90 | 3. Define your data container using the `@nlp` decorator: 91 | 92 | ```python 93 | @nlp 94 | @dataclass 95 | class Person: 96 | """Extract personal info""" 97 | 98 | name: str 99 | age: int 100 | ``` 101 | 102 | 4. Ask the AI for the extracted data: 103 | 104 | ```python 105 | person = Person.from_natural_language("I'm Jack and I'm 20 years old.") 106 | ``` 107 | 108 | You can read more about `@nlp` [here](https://openai-functions.readthedocs.io/en/latest/nlp_interface.html). 109 | 110 | Note: mypy does not parse class decorators ([#3135](https://github.com/python/mypy/issues/3135)), so you might have trouble getting type checking when using it like this. Consider using something like `nlp(Person).from_natural_language` to get proper type support. 111 | 112 | ## How it Works 113 | 114 | `openai-functions` takes care of the following tasks: 115 | 116 | - Parsing the function signatures (with type annotations) and docstrings. 117 | - Sending the conversation and function descriptions to the OpenAI model. 118 | - Deciding whether to call a function based on the model's response. 119 | - Calling the appropriate function with the provided arguments. 120 | - Updating the conversation with the function response. 121 | - Repeating the process until the model generates a user-facing message. 122 | 123 | This abstraction allows developers to focus on defining their functions and adding user messages without worrying about the details of function calling. 124 | 125 | ## Note 126 | 127 | Please note that `openai-functions` is an unofficial project not maintained by OpenAI. Use it at your discretion. 128 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. automodule:: openai_functions 5 | :members: 6 | 7 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | import os 7 | import sys 8 | sys.path.insert(0, os.path.abspath('..')) 9 | 10 | # -- Project information ----------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 12 | 13 | project = 'openai-functions' 14 | copyright = '2023, rizerphe' 15 | author = 'rizerphe' 16 | 17 | # -- General configuration --------------------------------------------------- 18 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 19 | 20 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon', "myst_parser"] 21 | 22 | templates_path = ['_templates'] 23 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 24 | 25 | 26 | 27 | # -- Options for HTML output ------------------------------------------------- 28 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 29 | 30 | html_theme = 'sphinx_rtd_theme' 31 | html_theme_path = ["_themes", ] 32 | html_static_path = ['_static'] 33 | -------------------------------------------------------------------------------- /docs/conversation.md: -------------------------------------------------------------------------------- 1 | # Conversations 2 | 3 | For assistant-type applications, `Conversation` is the most intuitive tool. It allows you to store messages and generate new ones using AI or calling a function you provide. 4 | 5 | A conversation contains two things: 6 | 7 | - the messages in the conversation. 8 | - a skill (composed from a list of [skills](skills) and a list of [`OpenAIFunction`](openai_functions.OpenAIFunction)s inherent to the conversation) 9 | 10 | When initializing the conversation, you can pass in the list of skills and the model to use (and your Azure deployment ID if using that). 11 | 12 | ## Managing messages 13 | 14 | The main feature of a conversation is its management of messages. You can either use `conversation.messages` (which is a list of objects adhering to the [GenericMessage](openai_functions.GenericMessage) protocol - use [Message](openai_functions.Message) to create your own) to directly access them or you can use these: 15 | 16 | ```python 17 | conversation.add_message(Message("Hi there", role="user")) # "system", "user", "assistant" 18 | conversation.add_message({ 19 | "role": "assistant", 20 | "content": "Hello! How can I assist you today?" 21 | }) # Will be converted to a Message object automatically 22 | conversation.add_message('Say "no"') # Will be turned into a user message by default 23 | conversation.add_messages([Message("No.", "assistant"), "Oh ok"]) # Adding several at once 24 | conversation.pop_message(0) # Delete the first one 25 | conversation.clear_messages() 26 | ``` 27 | 28 | ## Managing skills 29 | 30 | A conversation also includes the skills - the functions the AI can call. You can either provide your skills when creating the conversation or add skills/functions like this: 31 | 32 | ```python 33 | conversation.add_skill(skill) 34 | conversation.add_function(openai_function) 35 | 36 | @conversation.add_function 37 | def my_awesome_function(...): 38 | ... 39 | 40 | @conversation.add_function( 41 | name="my_really_amazing_function", 42 | description="The most amazing function of them all", 43 | save_return=True, 44 | serialize=False, 45 | remove_call=False, 46 | interpret_as_response=False 47 | ) 48 | def my_amazing_function(): 49 | return "" 50 | 51 | conversation.remove_function(openai_function) 52 | conversation.remove_function(my_awesome_function) 53 | conversation.remove_function("my_amazing_function") 54 | ``` 55 | 56 | All of the keyword arguments passed to `add_function` are optional; most of them are the same as those an [OpenAIFunction](openai_functions.OpenAIFunction) inherently has: 57 | 58 | - `name` - the overwritten function name, otherwise will default to the function name 59 | - `description` - the overwritten function description sent to the AI - will use the description from the docstring by default 60 | - `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this 61 | - `serialize` - whether to serialize the function's return value before sending the result back to the AI; openai expects a function call to be a string; if this is False, `str()` will run on the function return. Otherwise, it will use JSON serialization, so if `serialize` is set to True, the function return needs to be JSON-serializable 62 | - `remove_call` - whether to remove the function call message itself; be careful to avoid infinite loops when using with `save_return=False`; the function should then, for example, disappear from the schema; it's your responsibility to make sure this happens 63 | - `interpret_as_response` - whether to interpret the return value of the function (the serialized one if `serialize` is set to True) as the response from the AI 64 | 65 | You can read more about how to use skills [here](skills). 66 | 67 | ## Generating messages 68 | 69 | The point of a conversation is to use the AI to generate responses. The easiest way to do this is through the `ask` method: 70 | 71 | ```python 72 | response = conversation.ask("Your input data") 73 | ``` 74 | 75 | The tool will then repeatedly get responses from OpenAI and run your functions until a full response is generated. Alternatively, if you don't want to add another message to the conversation, you can use the `run_until_response` method that returns a [FinalResponseMessage](openai_functions.FinalResponseMessage) object: 76 | 77 | ```python 78 | generated_message = conversation.run_until_response() 79 | further_comment = conversation.run_until_response(allow_function_calls=False) 80 | ``` 81 | 82 | If you want to use the conversation to run a specific function more directly and get the execution result, you can use the `run` method, optionally also providing another message: 83 | 84 | ```python 85 | raw_weather_result = conversation.run("get_weather", "What's the weather in San Francisco?") 86 | ``` 87 | 88 | However, for most usecases [@nlp](nlp_interface) should be sufficient; consider using it. 89 | 90 | Note: watch out for incomplete or invalid responses from OpenAI - currently they do not bother with validating the outputs, and the generation might cut off in the middle of the JSON output. If either of these happens, the tool will raise either [BrokenSchemaError](openai_functions.BrokenSchemaError) or [InvalidJsonError](openai_functions.InvalidJsonError). 91 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to openai-functions's documentation! 2 | ============================================ 3 | 4 | 5 | The ``openai-functions`` library simplifies the usage of OpenAI's function calling feature. It abstracts away the complexity of parsing function signatures and docstrings by providing developers with a clean and intuitive interface. 6 | 7 | Where to start 8 | -------------- 9 | 10 | Either way, you'll want to install ``openai-functions``: 11 | 12 | .. code-block:: bash 13 | 14 | pip install openai-functions 15 | 16 | There are three main ways of using it after that: 17 | 18 | * To use the AI as an assistant, start by reading the :ref:`your-first-conversation` section. 19 | * To use the AI for direct function calls, e.g. for data extraction, read the :ref:`extracting-data` section. 20 | * To just use the AI to generate schemas and parse OpenAI function calls, read the :ref:`just-generating-the-schemas` section. 21 | 22 | .. toctree:: 23 | :maxdepth: 2 24 | :caption: Table of Contents: 25 | 26 | introduction 27 | conversation 28 | nlp_interface 29 | skills 30 | api 31 | * :ref:`genindex` 32 | * :ref:`search` 33 | -------------------------------------------------------------------------------- /docs/introduction.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | The `openai-functions` Python project simplifies the usage of OpenAI's function calling feature. It abstracts away the complexity of parsing function signatures and docstrings by providing developers with a clean and intuitive interface. 4 | 5 | `openai-functions` takes care of the following tasks: 6 | 7 | - Parsing the function signatures (with type annotations) and docstrings. 8 | - Sending the conversation and function descriptions to the OpenAI model. 9 | - Deciding whether to call a function based on the model's response. 10 | - Calling the appropriate function with the provided arguments. 11 | - Updating the conversation with the function response. 12 | - Repeating the process until the model generates a user-facing message. 13 | 14 | This abstraction allows developers to focus on defining their functions and adding user messages without worrying about the details of function calling. 15 | 16 | # Quickstart 17 | 18 | ## Installation 19 | 20 | You can install `openai-functions` from PyPI using pip: 21 | 22 | ``` 23 | pip install openai-functions 24 | ``` 25 | 26 | Now, there are **three ways you can use this** - start with just one: 27 | 28 | - For managing conversations, use the [conversational](#your-first-conversation) interface 29 | - For data extraction etc., for working with just one function, use the [data extraction](extracting-data) interface 30 | - For just generating schemas and parsing call results, nothing more, use [raw schema generation](just-generating-the-schemas) next. 31 | 32 | (your-first-conversation)= 33 | ## Your first conversation 34 | 35 | The easiest way to use `openai-functions` is through the [conversation](conversation) interface. For that, you first import all of the necessary modules and initialize openai with your API key: 36 | 37 | ```python 38 | import enum 39 | import openai 40 | from openai_functions import Conversation 41 | 42 | openai.api_key = "" 43 | ``` 44 | 45 | Then, we can create a [conversation](openai_functions.Conversation). 46 | 47 | ```python 48 | conversation = Conversation() 49 | ``` 50 | 51 | A conversation contains our and the AI's messages, the functions we provide, and a set of methods for calling the AI with our functions. Now, we can add our functions to the conversation using the `@conversation.add_function` decorator to make them available for the AI: 52 | 53 | ```python 54 | class Unit(enum.Enum): 55 | FAHRENHEIT = "fahrenheit" 56 | CELSIUS = "celsius" 57 | 58 | @conversation.add_function() 59 | def get_current_weather(location: str, unit: Unit = Unit.FAHRENHEIT) -> dict: 60 | """Get the current weather in a given location. 61 | 62 | Args: 63 | location (str): The city and state, e.g., San Francisco, CA 64 | unit (Unit): The unit to use, e.g., fahrenheit or celsius 65 | """ 66 | return { 67 | "location": location, 68 | "temperature": "72", 69 | "unit": unit.value, 70 | "forecast": ["sunny", "windy"], 71 | } 72 | ``` 73 | 74 | Note that the function _must_ have type annotations for all arguments, including extended type annotations for lists/dictionaries (for example, `list[int]` and not just `list`); otherwise the tool won't be able to generate a schema. Our conversation is now ready for function calling. The easiest way to do so is through the `conversation.ask` method. This method will repeatedly ask the AI for a response, running function calls, until the AI responds with text to return: 75 | 76 | ```python 77 | response = conversation.ask("What's the weather in San Francisco?") 78 | # Should return something like: 79 | # The current weather in San Francisco is 72 degrees Fahrenheit and it is sunny and windy. 80 | ``` 81 | 82 | The AI will probably (nobody can say for sure) then return a function call with the arguments of `{"location": "San Francisco, CA"}`, which will get translated to `get_current_weather("San Francisco, CA")`. The function response will be serialized and sent back to the AI, and the AI will return a text description. You can read more about how to work with conversations [here](conversation). 83 | 84 | (extracting-data)= 85 | ## Extracting data 86 | 87 | There are two common uses for function calls: assistant-type applications, which is what conversations are for, and data extraction, where you force the AI to call a specific function and fill in the arguments. We have the [nlp interface](nlp_interface) for data extraction. It acts as a decorator, turning a function (or a class, including a dataclass) into a [wrapper](openai_functions.Wrapper) object, exposing methods for calling a function with natural language and annotating the call result with an AI response. To use it, you first import all of the necessary modules and initialize openai with your API key: 88 | 89 | ```python 90 | from dataclasses import dataclass 91 | import openai 92 | from openai_functions import nlp 93 | 94 | openai.api_key = "" 95 | ``` 96 | 97 | Then, we define our callable (a function or a class) to call when extracting data: 98 | 99 | ```python 100 | @nlp 101 | @dataclass 102 | class Person: 103 | """Extract personal info""" 104 | 105 | name: str 106 | age: int 107 | ``` 108 | 109 | Note that the callable _must_ have type annotations for all arguments (the arguments of the function itself or of the class constructor), and this includes extended type annotations for lists/dictionaries (for example, `list[int]` and not just `list`), otherwise the tool won't be able to generate a schema. Also, mypy does not parse class decorators ([#3135](https://github.com/python/mypy/issues/3135)), so you might have trouble getting type checking when using it like this. When working with classes, including dataclasses, consider using `nlp(Person)` directly to get proper type support. 110 | 111 | This sets `Person` to a [wrapper](openai_functions.Wrapper), which allows us to call `Person.from_natural_language` for data extraction: 112 | 113 | ```python 114 | person = Person.from_natural_language("I'm Jack and I'm 20 years old.") 115 | # Person(name="Jack", age=20) 116 | # (probably, it's not reproducible with temperature > 0) 117 | ``` 118 | 119 | The tool will call the AI, telling it to call the function `Person`. It will then generate a function call with the arguments of `{"name": "Jack", "age": 20}`, and the tool will parse it and call `Person(name="Jack", age=20)`, returning the result of this call. You can read more about how to work with `@nlp` [here](nlp_interface). 120 | 121 | (just-generating-the-schemas)= 122 | ## Just generating the schemas 123 | 124 | If you want to generate the schemas, you can use a [FunctionWrapper](openai_functions.FunctionWrapper): 125 | 126 | ```python 127 | from openai_functions import FunctionWrapper 128 | 129 | wrapper = FunctionWrapper(get_current_weather) 130 | schema = wrapper.schema 131 | result = wrapper({"location": "San Francisco, CA"}) 132 | ``` 133 | 134 | This creates an object that can both return you a schema of a function and provide the function with properly parsed arguments. Another tool is a [FunctionSet](openai_functions.BasicFunctionSet) that allows you to aggregate multiple functions into one schema: 135 | 136 | ```python 137 | from openai_functions import BasicFunctionSet 138 | import enum 139 | 140 | skill = BasicFunctionSet() 141 | 142 | class Unit(enum.Enum): 143 | FAHRENHEIT = "fahrenheit" 144 | CELSIUS = "celsius" 145 | 146 | @skill.add_function 147 | def get_current_weather(location: str, unit: Unit = Unit.FAHRENHEIT) -> dict: 148 | """Get the current weather in a given location. 149 | 150 | Args: 151 | location (str): The city and state, e.g., San Francisco, CA 152 | unit (Unit): The unit to use, e.g., fahrenheit or celsius 153 | """ 154 | return { 155 | "location": location, 156 | "temperature": "72", 157 | "unit": unit.value, 158 | "forecast": ["sunny", "windy"], 159 | } 160 | 161 | @skill.add_function 162 | def set_weather(location: str, weather_description: str): 163 | ... 164 | 165 | schema = skill.functions_schema 166 | ``` 167 | 168 | This then generates the schema for your functions.
This is what `schema` looks like 169 | 170 | ```json 171 | [ 172 | { 173 | "name": "get_current_weather", 174 | "parameters": { 175 | "type": "object", 176 | "properties": { 177 | "location": { 178 | "type": "string", 179 | "description": "The city and state, e.g., San Francisco, CA" 180 | }, 181 | "unit": { 182 | "type": "string", 183 | "enum": ["FAHRENHEIT", "CELSIUS"], 184 | "description": "The unit to use, e.g., fahrenheit or celsius" 185 | } 186 | }, 187 | "required": ["location"] 188 | }, 189 | "description": "Get the current weather in a given location." 190 | }, 191 | { 192 | "name": "set_weather", 193 | "parameters": { 194 | "type": "object", 195 | "properties": { 196 | "location": { 197 | "type": "string" 198 | }, 199 | "weather_description": { 200 | "type": "string" 201 | } 202 | }, 203 | "required": ["location", "weather_description"] 204 | } 205 | } 206 | ] 207 | ``` 208 | 209 |
210 | 211 | You can now call the functions directly using the function calls OpenAI returns: 212 | 213 | ```python 214 | weather = skill( 215 | {"name": "get_current_weather", "arguments": '{"location": "San Francisco, CA"}'} 216 | ) 217 | ``` 218 | 219 | This then calls the relevant function.
`weather` is then just the raw return value of it, in this case: 220 | 221 | ```json 222 | { 223 | "location": "San Francisco, CA", 224 | "temperature": "72", 225 | "unit": "fahrenheit", 226 | "forecast": ["sunny", "windy"] 227 | } 228 | ``` 229 | 230 |
231 | 232 | You can read more about how to work with skills [here](skills). 233 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/nlp_interface.md: -------------------------------------------------------------------------------- 1 | # @nlp 2 | 3 | [nlp](openai_functions.nlp) allows you to convert any callable (works pretty well with dataclasses) that takes in annotated arguments and returns a [Wrapper](openai_functions.Wrapper) around them. This allows you to: 4 | 5 | - Call a function or initialize an object: 6 | 7 | ```python 8 | return_value = nlp(callable).from_natural_language("Your natural language input") 9 | ``` 10 | 11 | - Get a natural language response for the functions that return JSON-serializable output: 12 | 13 | ```python 14 | response = nlp(callable).natural_language_response("Your natural language input") 15 | ``` 16 | 17 | - For JSON-serializable functions, annotate responses to natural language prompts with natural language responses (returns a [NaturalLanguageAnnotated](openai_functions.NaturalLanguageAnnotated) object): 18 | 19 | ```python 20 | annotated = nlp(callable).natural_language_annotated("Your natural language input") 21 | natural_language_response = annotated.annotation 22 | raw_function_result = annotated.function_result 23 | ``` 24 | 25 | `@nlp` was designed to be used as a decorator: 26 | 27 | ```python 28 | @nlp 29 | def get_current_weather(location: str): 30 | ... 31 | 32 | @nlp( 33 | name="set_current_weather", 34 | description="Set the current weather for a given location", 35 | serialize=False, 36 | system_prompt="You're an AI capable of changing the weather.", 37 | model="gpt-4-0613" 38 | engine=None 39 | ) 40 | def set_current_weather(location: str, description: str): 41 | return "Set the weather successfully" 42 | ``` 43 | 44 | Note: watch out for incomplete or invalid responses from OpenAI - currently they do not bother with validating the outputs, and the generation might cut off in the middle of the JSON output. If either of these happens, the tool will raise either [BrokenSchemaError](openai_functions.BrokenSchemaError) or [InvalidJsonError](openai_functions.InvalidJsonError). 45 | 46 | The parameters `@nlp` takes are: 47 | 48 | - `name` - the name of the function sent to the AI, defaulting to the function name itself 49 | - `description` - the description of the function sent to the AI, defaults to getting the short description from the function's docstring 50 | - `serialize` - whether to serialize the function's return value before sending the result back to the AI; openai expects a function call to be a string, so if this is False, the result of the function execution should be a string. Otherwise, it will use JSON serialization, so if `serialize` is set to True, the function return needs to be JSON-serializable 51 | - `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this 52 | - `system_prompt` - if provided, when asking the AI, the conversation will start with this system prompt, letting you modify the behavior of the model 53 | - `model` - this is just the model to use; currently (July 1st 2023) only `gpt-3.5-turbo-0613`, `gpt-3.5-turbo-16k-0613` and `gpt-4-0613` are supported 54 | - `engine` - if, for example, using Azure's OpenAI service, your deployment name 55 | 56 | Note: mypy does not parse class decorators ([#3135](https://github.com/python/mypy/issues/3135)), so you might have trouble getting type checking when using it like a decorator for a dataclass. 57 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==6.2.1 2 | sphinx-rtd-theme==1.2.2 3 | myst-parser==2.0.0 4 | -------------------------------------------------------------------------------- /docs/skills.md: -------------------------------------------------------------------------------- 1 | # Skills 2 | 3 | A skill allows you to combine several functions into one object, generate schemas for all those functions, and then call the function the AI requests. The most basic skill is defined with a [BasicFunctionSet](openai_functions.BasicFunctionSet) - it is just a function container. Here's an example of its usage: 4 | 5 | ```python 6 | skill = BasicFunctionSet() 7 | 8 | @skill.add_function 9 | def get_current_weather(location: str) -> dict: 10 | ... 11 | 12 | @skill.add_function( 13 | name="set_weather_but_for_real", 14 | description="Set the weather or something", 15 | save_return=True, 16 | serialize=False, 17 | remove_call=False, 18 | interpret_as_response=True 19 | ) 20 | def set_weather(location: str, weather_description: str): 21 | ... 22 | 23 | schema = skill.functions_schema 24 | ``` 25 | 26 | The parameters here are: 27 | 28 | - `name` - the name of the function sent to the AI, defaulting to the function name itself 29 | - `description` - the description of the function sent to the AI, defaults to getting the short description from the function's docstring 30 | - `save_return` - whether to send the return value of the function back to the AI; some functions - mainly those that don't return anything - don't need to do this 31 | - `serialize` - whether to serialize the function's return value before sending the result back to the AI; openai expects a function call to be a string; if this is False, `str()` will run on the function return. Otherwise, it will use JSON serialization, so if `serialize` is set to True, the function return needs to be JSON-serializable 32 | - `remove_call` - whether to remove the function call message itself; be careful to avoid infinite loops when using with `save_return=False`; the function should then, for example, disappear from the schema; it's your responsibility to make sure this happens 33 | - `interpret_as_response` - whether to interpret the return value of the function (the serialized one if `serialize` is set to True) as the response from the AI 34 | 35 | `schema` will be a list of JSON objects ready to be sent to OpenAI. You can then call your functions directly with the response returned from OpenAI: 36 | 37 | ```python 38 | weather = skill( 39 | {"name": "get_current_weather", "arguments": '{"location": "San Francisco, CA"}'} 40 | ) 41 | ``` 42 | 43 | When invalid JSON is passed in for the arguments, either because of the output not adhering to the schema or not being valid JSON at all (both of which camn be caused by OpenAI), the tool will raise either [BrokenSchemaError](openai_functions.BrokenSchemaError) or [InvalidJsonError](openai_functions.InvalidJsonError). 44 | 45 | ## Union skills 46 | 47 | A more advanced one is a [union skillset](openai_functions.UnionSkillSet) that combines others. It exposes one new method: 48 | 49 | ```python 50 | union_skill.add_skill(skill) 51 | ``` 52 | 53 | It still supports everything a [BasicFunctionSet](openai_functions.BasicFunctionSet), though; it can have a few functions inherent to it while still combining the other skillsets. 54 | 55 | ## Togglable skillset 56 | 57 | Specifically for applications where there are a lot of functions that can be separated into categories (e.g. general assistant applications), there's [TogglableSet](openai_functions.TogglableSet). It allows you to give the AI a function that _enables_ a set of other functions: 58 | 59 | ```python 60 | skill = TogglableSet("enable_email", "Enable the email features") 61 | 62 | @skill.add_function 63 | def send_email(recipient_address: str, content: str): 64 | """Send an email 65 | 66 | Args: 67 | recipient_address (str): The email address 68 | content (str): The full text content of the email 69 | """ 70 | return "Sent successfully" 71 | 72 | @skill.add_function 73 | def list_emails(): 74 | return [{"from": "jack@example.com", "content": "Wanna come over?"}] 75 | ``` 76 | 77 | The AI is then expected to run the `enable_email` function just to get to the point of knowing what the other functions are. 78 | 79 | ## Developing your own 80 | 81 | Skills are extensible; you can build your own by inheriting them from the [FunctionSet](openai_functions.FunctionSet) base class. You then have to provide these methods and properties: 82 | 83 | - `functions_schema` - the schema of the functions; list of JSON objects 84 | - `run_function(input_data)` - that runs the function and returns the result; takes in the raw dictionary retrieved from OpenAI. Should raise [FunctionNotFoundError](openai_functions.FunctionNotFoundError) if there isn't a function with this name in the skillset 85 | 86 | You can also inherit from the [MutableFunctionSet](openai_functions.MutableFunctionSet), which greatly simplifies adding and removing functions from the skill. Then, you have to define two additional methods: 87 | 88 | - `_add_function(function)` - adds an [OpenAIFunction](openai_functions.OpenAIFunction) to the skill 89 | - `_remove_function(name)` - takes in a string and deletes the function with that name 90 | 91 | Your skill will then have the `@skill.add_function` decorator available. 92 | -------------------------------------------------------------------------------- /openai_functions/__init__.py: -------------------------------------------------------------------------------- 1 | """ChatGPT function calling based on function docstrings.""" 2 | from .conversation import Conversation 3 | from .exceptions import ( 4 | BrokenSchemaError, 5 | CannotParseTypeError, 6 | FunctionNotFoundError, 7 | InvalidJsonError, 8 | NonSerializableOutputError, 9 | OpenAIFunctionsError, 10 | ) 11 | from .functions import ( 12 | BasicFunctionSet, 13 | FunctionResult, 14 | FunctionSet, 15 | FunctionWrapper, 16 | MutableFunctionSet, 17 | OpenAIFunction, 18 | RawFunctionResult, 19 | TogglableSet, 20 | UnionSkillSet, 21 | WrapperConfig, 22 | ) 23 | from .nlp import NaturalLanguageAnnotated, Wrapper, nlp 24 | from .openai_types import FinalResponseMessage, FunctionCall, GenericMessage, Message 25 | from .parsers import ArgSchemaParser, defargparsers 26 | 27 | __all__ = [ 28 | "Conversation", 29 | "BrokenSchemaError", 30 | "CannotParseTypeError", 31 | "FunctionNotFoundError", 32 | "InvalidJsonError", 33 | "NonSerializableOutputError", 34 | "OpenAIFunctionsError", 35 | "BasicFunctionSet", 36 | "FunctionSet", 37 | "defargparsers", 38 | "ArgSchemaParser", 39 | "FunctionWrapper", 40 | "MutableFunctionSet", 41 | "OpenAIFunction", 42 | "FunctionResult", 43 | "RawFunctionResult", 44 | "TogglableSet", 45 | "UnionSkillSet", 46 | "WrapperConfig", 47 | "NaturalLanguageAnnotated", 48 | "nlp", 49 | "Wrapper", 50 | "FinalResponseMessage", 51 | "FunctionCall", 52 | "GenericMessage", 53 | "Message", 54 | ] 55 | -------------------------------------------------------------------------------- /openai_functions/conversation.py: -------------------------------------------------------------------------------- 1 | """A module for running OpenAI functions""" 2 | from __future__ import annotations 3 | import time 4 | from typing import Any, Callable, Literal, TYPE_CHECKING, overload 5 | 6 | import openai 7 | import openai.error 8 | 9 | from .functions.union import UnionSkillSet 10 | from .openai_types import ( 11 | FinalResponseMessage, 12 | FinalResponseMessageType, 13 | ForcedFunctionCall, 14 | FunctionCallMessage, 15 | FunctionMessageType, 16 | GenericMessage, 17 | IntermediateResponseMessageType, 18 | Message, 19 | is_final_response_message, 20 | ) 21 | 22 | 23 | if TYPE_CHECKING: 24 | from .json_type import JsonType 25 | from .openai_types import ( 26 | FunctionCall, 27 | MessageType, 28 | NonFunctionMessageType, 29 | OpenAiFunctionCallInput, 30 | ) 31 | from .functions.functions import OpenAIFunction 32 | from .functions.sets import FunctionResult, FunctionSet 33 | 34 | 35 | class Conversation: 36 | """A class representing a single conversation with the AI 37 | 38 | Contains the messages sent and received, and the skillset used. 39 | """ 40 | 41 | def __init__( 42 | self, 43 | skills: list[FunctionSet] | None = None, 44 | model: str = "gpt-3.5-turbo-0613", 45 | engine: str | None = None, 46 | ) -> None: 47 | self.messages: list[GenericMessage] = [] 48 | self.skills = UnionSkillSet(*(skills or [])) 49 | self.model = model 50 | self.engine = engine 51 | 52 | @property 53 | def functions_schema(self) -> list[JsonType]: 54 | """Get the functions schema for the conversation 55 | 56 | Returns: 57 | list[JsonType]: The functions schema 58 | """ 59 | return self.skills.functions_schema 60 | 61 | def _add_message(self, message: GenericMessage) -> None: 62 | """Add a message 63 | 64 | Args: 65 | message (GenericMessage): The message 66 | """ 67 | self.messages.append(message) 68 | 69 | def add_message(self, message: GenericMessage | MessageType | str) -> None: 70 | """Add a message to the end of the conversation 71 | 72 | Args: 73 | message (GenericMessage | MessageType | str): The message 74 | """ 75 | if isinstance(message, GenericMessage): 76 | self._add_message(message) 77 | else: 78 | self._add_message(Message(message)) 79 | 80 | def add_messages(self, messages: list[GenericMessage | MessageType]) -> None: 81 | """Add multiple messages to the end of the conversation 82 | 83 | Args: 84 | messages (list[GenericMessage | MessageType]): The messages 85 | """ 86 | for message in messages: 87 | self.add_message(message) 88 | 89 | def pop_message(self, index: int = -1) -> GenericMessage: 90 | """Pop a message 91 | 92 | Args: 93 | index (int): The index. Defaults to -1. 94 | 95 | Returns: 96 | GenericMessage: The message 97 | """ 98 | return self.messages.pop(index) 99 | 100 | def clear_messages(self) -> None: 101 | """Fully clear the messages, but keep the skillset""" 102 | self.messages = [] 103 | 104 | @overload 105 | def _generate_message( 106 | self, function_call: ForcedFunctionCall, retries: int | None = 1 107 | ) -> IntermediateResponseMessageType: 108 | ... 109 | 110 | @overload 111 | def _generate_message( 112 | self, function_call: Literal["none"], retries: int | None = 1 113 | ) -> FinalResponseMessageType: 114 | ... 115 | 116 | @overload 117 | def _generate_message( 118 | self, function_call: Literal["auto"] = "auto", retries: int | None = 1 119 | ) -> NonFunctionMessageType: 120 | ... 121 | 122 | def _generate_message( 123 | self, function_call: OpenAiFunctionCallInput = "auto", retries: int | None = 1 124 | ) -> NonFunctionMessageType: 125 | """Generate a response, retrying if necessary 126 | 127 | Args: 128 | function_call (OpenAiFunctionCallInput): The function call. 129 | retries (int | None): The number of retries. Defaults to 4. 130 | Will retry indefinitely if None. 131 | 132 | Raises: 133 | openai.error.RateLimitError: If the rate limit is exceeded 134 | 135 | Returns: 136 | NonFunctionMessageType: The response 137 | """ 138 | if retries is None: 139 | retries = -1 140 | while True: 141 | try: 142 | response = self._generate_raw_message(function_call) 143 | except openai.error.RateLimitError as error: 144 | if retries == 0: 145 | raise 146 | retries -= 1 147 | time.sleep(self._retry_time_from_headers(error.headers)) 148 | else: 149 | return response["choices"][0]["message"] # type: ignore 150 | 151 | def _parse_retry_time(self, wait_for: str) -> float: 152 | """Parse the time returned by an x-ratelimit-reset-requests header 153 | 154 | Args: 155 | wait_for (str): The time 156 | 157 | Returns: 158 | float: The time to the next reset 159 | """ 160 | return float(wait_for[:-1]) * {"s": 1, "m": 60, "h": 3600}[wait_for[-1]] 161 | 162 | def _retry_time_from_headers(self, headers: dict[str, str]) -> float: 163 | """Get the time returned by the headers of an 429 reply 164 | 165 | Args: 166 | headers (dict[str, str]): The headers of the reply 167 | 168 | Returns: 169 | float: The time to wait for before retrying 170 | """ 171 | return self._parse_retry_time(headers["x-ratelimit-reset-requests"]) / int( 172 | headers["x-ratelimit-limit-requests"] 173 | ) 174 | 175 | def _generate_raw_message(self, function_call: OpenAiFunctionCallInput) -> Any: 176 | """Generate a raw OpenAI response 177 | 178 | Args: 179 | function_call (OpenAiFunctionCallInput): The function call. 180 | 181 | Returns: 182 | The raw OpenAI response 183 | """ 184 | return openai.ChatCompletion.create( 185 | engine=self.engine, 186 | model=self.model, 187 | messages=[message.as_dict() for message in self.messages], 188 | functions=self.functions_schema, 189 | function_call=function_call, 190 | ) 191 | 192 | def remove_function_call(self, function_name: str) -> None: 193 | """Remove a function call from the messages, if it is the last message 194 | 195 | Args: 196 | function_name (str): The function name 197 | """ 198 | if ( 199 | self.messages[-1].function_call 200 | and self.messages[-1].function_call["name"] == function_name 201 | ): 202 | self.pop_message() 203 | 204 | def _add_function_result(self, function_result: FunctionResult) -> bool: 205 | """Add a function execution result to the chat 206 | 207 | Args: 208 | function_result (FunctionResult): The function execution result 209 | 210 | Returns: 211 | bool: Whether the function result was added 212 | (whether save_return was True) 213 | """ 214 | if function_result.content is None: 215 | return False 216 | if function_result.interpret_return_as_response: 217 | self._add_function_result_as_response(function_result.content) 218 | else: 219 | self._add_function_result_as_function_call(function_result) 220 | return True 221 | 222 | def _add_function_result_as_response(self, function_result: str) -> None: 223 | """Add a function execution result to the chat as an assistant response 224 | 225 | Args: 226 | function_result (str): The function execution result 227 | """ 228 | response: FinalResponseMessageType = { 229 | "role": "assistant", 230 | "content": function_result, 231 | } 232 | self.add_message(response) 233 | 234 | def _add_function_result_as_function_call( 235 | self, function_result: FunctionResult 236 | ) -> None: 237 | """Add a function execution result to the chat as a function call 238 | 239 | Args: 240 | function_result (FunctionResult): The function execution result 241 | """ 242 | response: FunctionMessageType = { 243 | "role": "function", 244 | "name": function_result.name, 245 | "content": function_result.content, 246 | } 247 | self.add_message(response) 248 | 249 | def add_function_result(self, function_result: FunctionResult) -> bool: 250 | """Add a function execution result 251 | 252 | If the function has a return value (save_return is True), it will be added to 253 | the chat. The function call will be removed depending on the remove_call 254 | attribute, and the function result will be interpreted as a response or a 255 | function call depending on the interpret_return_as_response attribute. 256 | 257 | Args: 258 | function_result (FunctionResult): The function result 259 | 260 | Returns: 261 | bool: Whether the function result was added 262 | """ 263 | if function_result.remove_call: 264 | self.remove_function_call(function_result.name) 265 | return self._add_function_result(function_result) 266 | 267 | def run_function_and_substitute( 268 | self, 269 | function_call: FunctionCall, 270 | ) -> bool: 271 | """Run a function, replacing the last message with the result if needed 272 | 273 | Args: 274 | function_call (FunctionCall): The function call 275 | 276 | Raises: 277 | TypeError: If the function returns a None value 278 | 279 | Returns: 280 | bool: Whether the function result was added to the chat 281 | (whether save_return was True) 282 | """ 283 | return self.add_function_result(self.skills.run_function(function_call)) 284 | 285 | def run_function_if_needed(self) -> bool: 286 | """Run a function if the last message was a function call 287 | 288 | Might run the function over and over again if the function 289 | does not save the return. 290 | 291 | Returns: 292 | bool: Whether the function result was added 293 | """ 294 | if not self.messages: 295 | return False 296 | 297 | function_call = self.messages[-1].function_call 298 | if not function_call: 299 | return False 300 | 301 | return self.run_function_and_substitute(function_call) 302 | 303 | def generate_message( 304 | self, function_call: OpenAiFunctionCallInput = "auto", retries: int | None = 1 305 | ) -> GenericMessage: 306 | """Generate the next message. Will run a function if the last message 307 | was a function call and the function call is not being overridden; 308 | if the function does not save the return a message will still be generated. 309 | 310 | Args: 311 | function_call (OpenAiFunctionCallInput): The function call 312 | retries (int | None): The number of retries; if None, will retry 313 | indefinitely 314 | 315 | Returns: 316 | GenericMessage: The response 317 | """ 318 | if function_call in ["auto", "none"] and self.run_function_if_needed(): 319 | return self.messages[-1] 320 | 321 | message: NonFunctionMessageType = self._generate_message(function_call, retries) 322 | self.add_message(message) 323 | return Message(message) 324 | 325 | def run_until_response( 326 | self, allow_function_calls: bool = True, retries: int | None = 1 327 | ) -> FinalResponseMessage: 328 | """Run functions query the AI until a response is generated 329 | 330 | Args: 331 | allow_function_calls (bool): Whether to allow the AI to call functions 332 | retries (int | None): The number of retries; if None, will retry 333 | indefinitely 334 | 335 | Returns: 336 | FinalResponseMessage: The final response, either from the AI or a function 337 | that has interpret_as_response set to True 338 | """ 339 | while True: 340 | message = self.generate_message( 341 | function_call="auto" if allow_function_calls else "none", 342 | retries=retries, 343 | ) 344 | if is_final_response_message(message): 345 | return message 346 | 347 | @overload 348 | def add_function(self, function: OpenAIFunction) -> OpenAIFunction: 349 | ... 350 | 351 | @overload 352 | def add_function( 353 | self, 354 | function: Callable[..., Any], 355 | *, 356 | name: str | None = None, 357 | description: str | None = None, 358 | save_return: bool = True, 359 | serialize: bool = True, 360 | remove_call: bool = False, 361 | interpret_as_response: bool = False, 362 | ) -> Callable[..., Any]: 363 | ... 364 | 365 | @overload 366 | def add_function( 367 | self, 368 | *, 369 | name: str | None = None, 370 | description: str | None = None, 371 | save_return: bool = True, 372 | serialize: bool = True, 373 | remove_call: bool = False, 374 | interpret_as_response: bool = False, 375 | ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: 376 | ... 377 | 378 | def add_function( 379 | self, 380 | function: OpenAIFunction | Callable[..., Any] | None = None, 381 | *, 382 | name: str | None = None, 383 | description: str | None = None, 384 | save_return: bool = True, 385 | serialize: bool = True, 386 | remove_call: bool = False, 387 | interpret_as_response: bool = False, 388 | ) -> Callable[[Callable[..., Any]], Callable[..., Any]] | Callable[..., Any]: 389 | """Add a function to the functions available to the AI 390 | 391 | Args: 392 | function (OpenAIFunction | Callable[..., Any]): The function to add 393 | name (str): The name of the function. Defaults to the function's name. 394 | description (str): The description of the function. Defaults to getting 395 | the short description from the function's docstring. 396 | save_return (bool): Whether to send the return value of this function back 397 | to the AI. Defaults to True. 398 | serialize (bool): Whether to serialize the return value of this function. 399 | Otherwise, the return value must be a string. 400 | remove_call (bool): Whether to remove the function call itself from the chat 401 | history 402 | interpret_as_response (bool): Whether to interpret the return value of this 403 | function as the natural language response of the AI. 404 | 405 | Returns: 406 | Callable[[Callable[..., Any]], Callable[..., Any]]: A decorator 407 | Callable[..., Any]: The original function 408 | """ 409 | if function is None: 410 | return self.skills.add_function( 411 | name=name, 412 | description=description, 413 | save_return=save_return, 414 | serialize=serialize, 415 | remove_call=remove_call, 416 | interpret_as_response=interpret_as_response, 417 | ) 418 | return self.skills.add_function( 419 | function, 420 | name=name, 421 | description=description, 422 | save_return=save_return, 423 | serialize=serialize, 424 | remove_call=remove_call, 425 | interpret_as_response=interpret_as_response, 426 | ) 427 | 428 | def remove_function( 429 | self, function: str | OpenAIFunction | Callable[..., Any] 430 | ) -> None: 431 | """Remove a function 432 | 433 | Args: 434 | function (str | OpenAIFunction | Callable[..., Any]): The function 435 | """ 436 | self.skills.remove_function(function) 437 | 438 | def ask(self, question: str, retries: int | None = 1) -> str: 439 | """Ask the AI a question, running until a response is generated 440 | 441 | Args: 442 | question (str): The question 443 | retries (int | None): The number of retries; if None, will retry 444 | indefinitely 445 | 446 | Returns: 447 | str: The answer to the question 448 | """ 449 | self.add_message(question) 450 | return self.run_until_response(retries=retries).content 451 | 452 | def add_skill(self, skill: FunctionSet) -> None: 453 | """Add a skill to those available to the AI 454 | 455 | Args: 456 | skill (FunctionSet): The skill to add 457 | """ 458 | self.skills.add_skill(skill) 459 | 460 | def run( 461 | self, function: str, prompt: str | None = None, retries: int | None = 1 462 | ) -> Any: 463 | """Run a specified function and return the raw function result 464 | 465 | Args: 466 | function (str): The function to run 467 | prompt (str | None): The prompt to use 468 | retries (int | None): The number of retries; if None, will retry 469 | indefinitely 470 | 471 | Returns: 472 | The raw function result 473 | """ 474 | if prompt is not None: 475 | self.add_message(prompt) 476 | # We can do type: ignore as we know we're forcing a function call 477 | response: FunctionCallMessage 478 | response = self.generate_message( 479 | {"name": function}, retries=retries 480 | ) # type: ignore 481 | return self.skills(response.function_call) 482 | -------------------------------------------------------------------------------- /openai_functions/exceptions.py: -------------------------------------------------------------------------------- 1 | """The exceptions associated with function handling.""" 2 | from __future__ import annotations 3 | from typing import Any, TYPE_CHECKING 4 | 5 | if TYPE_CHECKING: 6 | from .json_type import JsonType 7 | 8 | 9 | class OpenAIFunctionsError(Exception): 10 | """The base exception for all OpenAI Functions errors.""" 11 | 12 | 13 | class FunctionNotFoundError(OpenAIFunctionsError): 14 | """The function was not found in the given skillset. 15 | 16 | Attributes: 17 | function_name (str): The name of the function that was not found 18 | """ 19 | 20 | def __init__(self, function_name: str) -> None: 21 | """Initialize the FunctionNotFoundError. 22 | 23 | Args: 24 | function_name (str): The name of the function that was not found 25 | """ 26 | super().__init__(f"Function {function_name} not found.") 27 | self.function_name = function_name 28 | 29 | 30 | class CannotParseTypeError(OpenAIFunctionsError): 31 | """This type of the argument could not be parsed. 32 | 33 | Attributes: 34 | argtype (Any): The type that could not be parsed 35 | """ 36 | 37 | def __init__(self, argtype: Any) -> None: 38 | """Initialize the CannotParseTypeError. 39 | 40 | Args: 41 | argtype (Any): The type that could not be parsed 42 | """ 43 | super().__init__(f"Cannot parse type {argtype}") 44 | self.argtype = argtype 45 | 46 | 47 | class NonSerializableOutputError(OpenAIFunctionsError): 48 | """The function returned a non-serializable output. 49 | 50 | Attributes: 51 | result (Any): The result that was not serializable 52 | """ 53 | 54 | def __init__(self, result: Any) -> None: 55 | """Initialize the NonSerializableOutputError. 56 | 57 | Args: 58 | result (Any): The result that was not serializable 59 | """ 60 | super().__init__( 61 | f"The result {result} is not JSON-serializable. " 62 | "Set serialize=False to use str() instead." 63 | ) 64 | self.result = result 65 | 66 | 67 | class InvalidJsonError(OpenAIFunctionsError): 68 | """OpenAI returned invalid JSON for the arguments. 69 | 70 | Attributes: 71 | response (str): The response that was not valid JSON 72 | """ 73 | 74 | def __init__(self, response: str) -> None: 75 | """Initialize the InvalidJsonError. 76 | 77 | Args: 78 | response (str): The response that was not valid JSON 79 | """ 80 | super().__init__( 81 | f"OpenAI returned invalid (perhaps incomplete) JSON: {response}" 82 | ) 83 | self.response = response 84 | 85 | 86 | class BrokenSchemaError(OpenAIFunctionsError): 87 | """The OpenAI response did not match the schema. 88 | 89 | Attributes: 90 | response (JsonType): The response that did not match the schema 91 | schema (JsonType): The schema that the response did not match 92 | """ 93 | 94 | def __init__(self, response: JsonType, schema: JsonType) -> None: 95 | """Initialize the BrokenSchemaError. 96 | 97 | Args: 98 | response (JsonType): The response that did not match the schema 99 | schema (JsonType): The schema that the response did not match 100 | """ 101 | super().__init__( 102 | "OpenAI returned a response that did not match the schema: " 103 | f"{response!r} does not match {schema}" 104 | ) 105 | self.response = response 106 | self.schema = schema 107 | -------------------------------------------------------------------------------- /openai_functions/functions/__init__.py: -------------------------------------------------------------------------------- 1 | """A set of tools responsible for managing the functions themselves.""" 2 | from .basic_set import BasicFunctionSet 3 | from .functions import FunctionResult, RawFunctionResult 4 | from .sets import FunctionSet, MutableFunctionSet, OpenAIFunction 5 | from .togglable_set import TogglableSet 6 | from .union import UnionSkillSet 7 | from .wrapper import FunctionWrapper, WrapperConfig 8 | 9 | __all__ = [ 10 | "BasicFunctionSet", 11 | "FunctionResult", 12 | "RawFunctionResult", 13 | "FunctionSet", 14 | "MutableFunctionSet", 15 | "OpenAIFunction", 16 | "TogglableSet", 17 | "UnionSkillSet", 18 | "FunctionWrapper", 19 | "WrapperConfig", 20 | ] 21 | -------------------------------------------------------------------------------- /openai_functions/functions/basic_set.py: -------------------------------------------------------------------------------- 1 | """A module for running OpenAI functions""" 2 | from __future__ import annotations 3 | import json 4 | from typing import TYPE_CHECKING 5 | 6 | from ..exceptions import FunctionNotFoundError, InvalidJsonError 7 | from .functions import FunctionResult, OpenAIFunction, RawFunctionResult 8 | from .sets import MutableFunctionSet 9 | 10 | if TYPE_CHECKING: 11 | from ..json_type import JsonType 12 | from ..openai_types import FunctionCall 13 | 14 | 15 | class BasicFunctionSet(MutableFunctionSet): 16 | """A skill set - a set of OpenAIFunction objects ready to be called. 17 | Inherited from `MutableFunctionSet`, therefore you can add and remove functions 18 | by using the `@add_function` and `remove_function` methods. 19 | 20 | Args: 21 | functions (list[OpenAIFunction] | None): The functions to initialize with. 22 | """ 23 | 24 | def __init__( 25 | self, 26 | functions: list[OpenAIFunction] | None = None, 27 | ) -> None: 28 | self.functions = functions or [] 29 | 30 | @property 31 | def functions_schema(self) -> list[JsonType]: 32 | """Get the functions schema, in the format OpenAI expects 33 | 34 | Returns: 35 | JsonType: The schema of all the available functions 36 | """ 37 | return [function.schema for function in self.functions] 38 | 39 | def run_function(self, input_data: FunctionCall) -> FunctionResult: 40 | """Run the function 41 | 42 | Args: 43 | input_data (FunctionCall): The function call 44 | 45 | Returns: 46 | FunctionResult: The function output 47 | 48 | Raises: 49 | FunctionNotFoundError: If the function is not found 50 | InvalidJsonError: If the arguments are not valid JSON 51 | """ 52 | function = self.find_function(input_data["name"]) 53 | try: 54 | arguments = json.loads(input_data["arguments"]) 55 | except json.decoder.JSONDecodeError as err: 56 | raise InvalidJsonError(input_data["arguments"]) from err 57 | result = self.get_function_result(function, arguments) 58 | return FunctionResult( 59 | function.name, result, function.remove_call, function.interpret_as_response 60 | ) 61 | 62 | def find_function(self, function_name: str) -> OpenAIFunction: 63 | """Find a function in the skillset 64 | 65 | Args: 66 | function_name (str): The function name 67 | 68 | Returns: 69 | OpenAIFunction: The function of the given name 70 | 71 | Raises: 72 | FunctionNotFoundError: If the function is not found 73 | """ 74 | for function in self.functions: 75 | if function.name == function_name: 76 | return function 77 | raise FunctionNotFoundError(function_name) 78 | 79 | def get_function_result( 80 | self, function: OpenAIFunction, arguments: dict[str, JsonType] 81 | ) -> RawFunctionResult | None: 82 | """Get the result of a function's execution 83 | 84 | Args: 85 | function (OpenAIFunction): The function to run 86 | arguments (dict[str, JsonType]): The arguments to run the function with 87 | 88 | Returns: 89 | RawFunctionResult | None: The result of the function, or None if the 90 | function does not save its return value 91 | """ 92 | result = function(arguments) 93 | 94 | if function.save_return: 95 | return RawFunctionResult(result, serialize=function.serialize) 96 | return None 97 | 98 | def _add_function(self, function: OpenAIFunction) -> None: 99 | """Add a function to the skillset 100 | 101 | Args: 102 | function (OpenAIFunction): The function 103 | """ 104 | self.functions.append(function) 105 | 106 | def _remove_function(self, name: str) -> None: 107 | """Remove a function from the skillset 108 | 109 | Args: 110 | name (str): The name of the function to remove 111 | """ 112 | self.functions = [f for f in self.functions if f.name != name] 113 | -------------------------------------------------------------------------------- /openai_functions/functions/functions.py: -------------------------------------------------------------------------------- 1 | """A module for running OpenAI functions""" 2 | from __future__ import annotations 3 | from dataclasses import dataclass 4 | import json 5 | from typing import Any, Protocol, TYPE_CHECKING, runtime_checkable 6 | 7 | from ..exceptions import NonSerializableOutputError 8 | 9 | if TYPE_CHECKING: 10 | from ..json_type import JsonType 11 | 12 | 13 | @runtime_checkable 14 | class OpenAIFunction(Protocol): 15 | """A protocol for OpenAI functions. 16 | 17 | Requires a __call__ method, a schema property, and a name property, 18 | as well as those that define the treatment of the return value. 19 | """ 20 | 21 | def __call__(self, arguments: dict[str, JsonType]) -> Any: 22 | ... 23 | 24 | @property 25 | def schema(self) -> JsonType: 26 | """Get the schema for this function""" 27 | 28 | @property 29 | def name(self) -> str: 30 | """Get the name of this function""" 31 | # This ellipsis is for Pyright #2758 32 | ... # pylint: disable=unnecessary-ellipsis 33 | 34 | @property 35 | def save_return(self) -> bool: 36 | """Get whether to save the return value of this function""" 37 | ... # pylint: disable=unnecessary-ellipsis 38 | 39 | @property 40 | def serialize(self) -> bool: 41 | """Get whether to continue running after this function""" 42 | ... # pylint: disable=unnecessary-ellipsis 43 | 44 | @property 45 | def remove_call(self) -> bool: 46 | """Get whether to remove the call to this function from the chat history""" 47 | ... # pylint: disable=unnecessary-ellipsis 48 | 49 | @property 50 | def interpret_as_response(self) -> bool: 51 | """Get whether to interpret the return value of this function as a response""" 52 | ... # pylint: disable=unnecessary-ellipsis 53 | 54 | 55 | @dataclass 56 | class RawFunctionResult: 57 | """A raw function result""" 58 | 59 | result: Any 60 | serialize: bool = True 61 | 62 | @property 63 | def serialized(self) -> str: 64 | """Get the serialized result 65 | 66 | Raises: 67 | NonSerializableOutputError: If the result cannot be serialized 68 | 69 | Returns: 70 | str: The serialized result 71 | """ 72 | if self.serialize: 73 | try: 74 | return json.dumps(self.result) 75 | except TypeError as error: 76 | raise NonSerializableOutputError(self.result) from error 77 | return str(self.result) 78 | 79 | 80 | @dataclass 81 | class FunctionResult: 82 | """A result of a function's execution""" 83 | 84 | name: str 85 | raw_result: RawFunctionResult | None 86 | remove_call: bool = False 87 | interpret_return_as_response: bool = False 88 | 89 | @property 90 | def content(self) -> str | None: 91 | """Get the content of this result 92 | 93 | Returns: 94 | str | None: The content 95 | """ 96 | return self.raw_result.serialized if self.raw_result else None 97 | 98 | @property 99 | def result(self) -> Any | None: 100 | """Get the result of this function call 101 | 102 | Returns: 103 | The raw result of the function call 104 | """ 105 | if self.raw_result: 106 | return self.raw_result.result 107 | return None 108 | -------------------------------------------------------------------------------- /openai_functions/functions/sets.py: -------------------------------------------------------------------------------- 1 | """A module for running OpenAI functions""" 2 | from __future__ import annotations 3 | from abc import ABC, abstractmethod 4 | from functools import partial 5 | from typing import Any, Callable, TYPE_CHECKING, overload 6 | 7 | from .functions import FunctionResult, OpenAIFunction 8 | from .wrapper import FunctionWrapper, WrapperConfig 9 | 10 | if TYPE_CHECKING: 11 | from ..json_type import JsonType 12 | from ..openai_types import FunctionCall 13 | 14 | 15 | class FunctionSet(ABC): 16 | """A skill set - a provider for a functions schema and a function runner""" 17 | 18 | @property 19 | @abstractmethod 20 | def functions_schema(self) -> list[JsonType]: 21 | """Get the functions schema""" 22 | 23 | @abstractmethod 24 | def run_function(self, input_data: FunctionCall) -> FunctionResult: 25 | """Run the function 26 | 27 | Args: 28 | input_data (FunctionCall): The function call 29 | 30 | Raises: 31 | FunctionNotFoundError: If the function is not found 32 | """ 33 | 34 | def __call__(self, input_data: FunctionCall) -> JsonType: 35 | """Run the function with the given input data 36 | 37 | Args: 38 | input_data (FunctionCall): The input data from OpenAI 39 | 40 | Returns: 41 | JsonType: Your function's raw result 42 | """ 43 | return self.run_function(input_data).result 44 | 45 | 46 | class MutableFunctionSet(FunctionSet): 47 | """A skill set that can be modified - functions can be added and removed""" 48 | 49 | @abstractmethod 50 | def _add_function(self, function: OpenAIFunction) -> None: 51 | ... 52 | 53 | @overload 54 | def add_function(self, function: OpenAIFunction) -> OpenAIFunction: 55 | ... 56 | 57 | @overload 58 | def add_function( 59 | self, 60 | function: Callable[..., Any], 61 | *, 62 | name: str | None = None, 63 | description: str | None = None, 64 | save_return: bool = True, 65 | serialize: bool = True, 66 | remove_call: bool = False, 67 | interpret_as_response: bool = False, 68 | ) -> Callable[..., Any]: 69 | ... 70 | 71 | @overload 72 | def add_function( 73 | self, 74 | *, 75 | name: str | None = None, 76 | description: str | None = None, 77 | save_return: bool = True, 78 | serialize: bool = True, 79 | remove_call: bool = False, 80 | interpret_as_response: bool = False, 81 | ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: 82 | ... 83 | 84 | def add_function( 85 | self, 86 | function: OpenAIFunction | Callable[..., Any] | None = None, 87 | *, 88 | name: str | None = None, 89 | description: str | None = None, 90 | save_return: bool = True, 91 | serialize: bool = True, 92 | remove_call: bool = False, 93 | interpret_as_response: bool = False, 94 | ) -> Callable[[Callable[..., Any]], Callable[..., Any]] | Callable[..., Any]: 95 | """Add a function 96 | 97 | Args: 98 | function (OpenAIFunction | Callable[..., Any]): The function 99 | name (str): The name of the function. Defaults to the function's name. 100 | description (str): The description of the function. Defaults to getting 101 | the short description from the function's docstring. 102 | save_return (bool): Whether to send the return value of this 103 | function to the AI. Defaults to True. 104 | serialize (bool): Whether to serialize the return value of this 105 | function. Defaults to True. Otherwise, the return value must be a 106 | string. 107 | remove_call (bool): Whether to remove the function call from the AI's 108 | chat history. Defaults to False. 109 | interpret_as_response (bool): Whether to interpret the return 110 | value of this function as a response of the agent. Defaults to False. 111 | 112 | Returns: 113 | Callable[[Callable[..., Any]], Callable[..., Any]]: A decorator 114 | Callable[..., Any]: The original function 115 | """ 116 | if isinstance(function, OpenAIFunction): 117 | self._add_function(function) 118 | return function 119 | if callable(function): 120 | self._add_function( 121 | FunctionWrapper( 122 | function, 123 | WrapperConfig( 124 | None, save_return, serialize, remove_call, interpret_as_response 125 | ), 126 | name=name, 127 | description=description, 128 | ) 129 | ) 130 | return function 131 | 132 | return partial( 133 | self.add_function, 134 | name=name, 135 | description=description, 136 | save_return=save_return, 137 | serialize=serialize, 138 | remove_call=remove_call, 139 | interpret_as_response=interpret_as_response, 140 | ) 141 | 142 | @abstractmethod 143 | def _remove_function(self, name: str) -> None: 144 | ... 145 | 146 | def remove_function( 147 | self, function: str | OpenAIFunction | Callable[..., Any] 148 | ) -> None: 149 | """Remove a function 150 | 151 | Args: 152 | function (str | OpenAIFunction | Callable[..., Any]): The function 153 | """ 154 | if isinstance(function, str): 155 | self._remove_function(function) 156 | return 157 | if isinstance(function, OpenAIFunction): 158 | self._remove_function(function.name) 159 | return 160 | self._remove_function(function.__name__) 161 | -------------------------------------------------------------------------------- /openai_functions/functions/togglable_set.py: -------------------------------------------------------------------------------- 1 | """A function set disabled by default that exposes a function to enable it.""" 2 | from __future__ import annotations 3 | from typing import TYPE_CHECKING 4 | 5 | from ..exceptions import FunctionNotFoundError 6 | from .basic_set import BasicFunctionSet 7 | from .functions import FunctionResult 8 | from .functions import OpenAIFunction 9 | 10 | if TYPE_CHECKING: 11 | from ..json_type import JsonType 12 | from ..openai_types import FunctionCall 13 | 14 | 15 | class TogglableSet(BasicFunctionSet): 16 | """A function set that is disabled by default and can be enabled by the AI. 17 | 18 | Args: 19 | enable_function_name (str): The name of the function to enable the set 20 | enable_function_description (str, optional): The description of the enable 21 | function. By default no description is provided. 22 | functions (list[OpenAIFunction], optional): The functions in the set. 23 | """ 24 | 25 | def __init__( 26 | self, 27 | enable_function_name: str, 28 | enable_function_description: str | None = None, 29 | functions: list[OpenAIFunction] | None = None, 30 | ) -> None: 31 | super().__init__(functions) 32 | self.enabled = False 33 | self.enable_function_name = enable_function_name 34 | self.enable_function_description = enable_function_description 35 | 36 | def enable(self) -> None: 37 | """Enable the function set.""" 38 | self.enabled = True 39 | 40 | @property 41 | def _enable_function_schema(self) -> dict[str, JsonType]: 42 | """Get the schema for the enable function. 43 | 44 | Returns: 45 | dict[str, JsonType]: The schema for the enable function 46 | """ 47 | schema: dict[str, JsonType] = { 48 | "name": self.enable_function_name, 49 | "parameters": { 50 | "type": "object", 51 | "properties": {}, 52 | }, 53 | } 54 | if self.enable_function_description: 55 | schema["description"] = self.enable_function_description 56 | return schema 57 | 58 | @property 59 | def functions_schema(self) -> list[JsonType]: 60 | """Get the functions schema, in the format OpenAI expects 61 | 62 | Returns: 63 | JsonType: The schema of all the available functions 64 | """ 65 | if self.enabled: 66 | return super().functions_schema 67 | return [self._enable_function_schema] 68 | 69 | def run_function(self, input_data: FunctionCall) -> FunctionResult: 70 | """Run the function, enabling the set if the enable function is called. 71 | 72 | Args: 73 | input_data (FunctionCall): The function call 74 | 75 | Returns: 76 | FunctionResult: The function output 77 | 78 | Raises: 79 | FunctionNotFoundError: If the function is not found 80 | """ 81 | if not self.enabled: 82 | if input_data["name"] == self.enable_function_name: 83 | self.enable() 84 | return FunctionResult(self.enable_function_name, None, True) 85 | raise FunctionNotFoundError(input_data["name"]) 86 | return super().run_function(input_data) 87 | -------------------------------------------------------------------------------- /openai_functions/functions/union.py: -------------------------------------------------------------------------------- 1 | """A function set that's a union of other function sets.""" 2 | from __future__ import annotations 3 | import contextlib 4 | from typing import TYPE_CHECKING 5 | 6 | from ..exceptions import FunctionNotFoundError 7 | from .basic_set import BasicFunctionSet 8 | 9 | if TYPE_CHECKING: 10 | from ..json_type import JsonType 11 | from ..openai_types import FunctionCall 12 | from .functions import FunctionResult 13 | from .sets import FunctionSet 14 | 15 | 16 | class UnionSkillSet(BasicFunctionSet): 17 | """A function set that's a union of other function sets.""" 18 | 19 | def __init__(self, *sets: FunctionSet) -> None: 20 | self.sets = list(sets) 21 | super().__init__() 22 | 23 | @property 24 | def functions_schema(self) -> list[JsonType]: 25 | """Get the combined functions schema 26 | 27 | Returns: 28 | list[JsonType]: The combined functions schema 29 | """ 30 | return super().functions_schema + sum( 31 | (function_set.functions_schema for function_set in self.sets), [] 32 | ) 33 | 34 | def run_function(self, input_data: FunctionCall) -> FunctionResult: 35 | """Run the function 36 | 37 | Args: 38 | input_data (FunctionCall): The function call 39 | 40 | Returns: 41 | FunctionResult: The function output 42 | 43 | Raises: 44 | FunctionNotFoundError: If the function is not found 45 | """ 46 | for function_set in self.sets: 47 | with contextlib.suppress(FunctionNotFoundError): 48 | return function_set.run_function(input_data) 49 | return super().run_function(input_data) 50 | 51 | def add_skill(self, skill: FunctionSet) -> None: 52 | """Add a skill 53 | 54 | Args: 55 | skill (FunctionSet): The skill 56 | """ 57 | self.sets.append(skill) 58 | -------------------------------------------------------------------------------- /openai_functions/functions/wrapper.py: -------------------------------------------------------------------------------- 1 | """Wrap a function for jsonschema io.""" 2 | from __future__ import annotations 3 | from dataclasses import dataclass 4 | import inspect 5 | from typing import Any, Callable, OrderedDict, TYPE_CHECKING, Type 6 | 7 | from docstring_parser import Docstring, parse 8 | 9 | from ..exceptions import BrokenSchemaError, CannotParseTypeError 10 | from ..parsers import ArgSchemaParser, defargparsers 11 | 12 | if TYPE_CHECKING: 13 | from ..json_type import JsonType 14 | 15 | 16 | @dataclass 17 | class WrapperConfig: 18 | """Configuration for a FunctionWrapper, one that specifies the parsers for the 19 | arguments and the treatment of the return value. 20 | 21 | Args: 22 | parsers (list[Type[ArgSchemaParser]] | None): The parsers for the arguments. 23 | defaults to `defargparsers`, which support all JSON types, as well 24 | as enums and dataclasses 25 | save_return (bool): Whether to send the return value back to the AI 26 | serialize (bool): Whether to serialize the return value; if False, the 27 | return value must be a string 28 | remove_call (bool): Whether to remove the call to this function from the 29 | chat history 30 | interpret_as_response (bool): Whether to interpret the return value as a 31 | response from the agent directly, or to base the response on the 32 | return value 33 | """ 34 | 35 | parsers: list[Type[ArgSchemaParser]] | None = None 36 | save_return: bool = True 37 | serialize: bool = True 38 | remove_call: bool = False 39 | interpret_as_response: bool = False 40 | 41 | 42 | class FunctionWrapper: 43 | """Wraps a function for jsonschema io 44 | 45 | Provides a function schema and a function runner - the function schema is 46 | generated from the function's docstring and argument type annotations, and 47 | the function runner parses the arguments from JSON and runs the function. 48 | They are accessed via the `schema` property and a `__call__` method respectively. 49 | 50 | Args: 51 | func (Callable[..., Any]): The function to wrap 52 | config (WrapperConfig | None, optional): The configuration for the wrapper. 53 | """ 54 | 55 | def __init__( 56 | self, 57 | func: Callable[..., Any], 58 | config: WrapperConfig | None = None, 59 | name: str | None = None, 60 | description: str | None = None, 61 | ) -> None: 62 | """Initialize a FunctionWrapper 63 | 64 | Args: 65 | func (Callable[..., Any]): The function to wrap 66 | config (WrapperConfig | None, optional): The configuration for the wrapper. 67 | name (str | None): The name override for the function. 68 | description (str | None): The description override for the function. 69 | """ 70 | self.func = func 71 | self.config = config or WrapperConfig() 72 | self._name = name 73 | self._description = description 74 | 75 | @property 76 | def parsers(self) -> list[Type[ArgSchemaParser]]: 77 | """Get the parsers for this function 78 | 79 | Returns: 80 | list[Type[ArgSchemaParser]]: The parsers 81 | """ 82 | return self.config.parsers or defargparsers 83 | 84 | @property 85 | def save_return(self) -> bool: 86 | """Get whether to send the return value of this function to the AI 87 | 88 | Returns: 89 | bool: Whether to send the return value to the AI 90 | """ 91 | return self.config.save_return 92 | 93 | @property 94 | def serialize(self) -> bool: 95 | """Get whether to serialize the return value of this function 96 | 97 | The function should return strictly a string if this is false. 98 | 99 | Returns: 100 | bool: Whether to serialize the return value 101 | """ 102 | return self.config.serialize 103 | 104 | @property 105 | def remove_call(self) -> bool: 106 | """Get whether to remove the call to this function from the chat history 107 | 108 | Returns: 109 | bool: Whether to remove the call to this function from the chat history 110 | """ 111 | return self.config.remove_call 112 | 113 | @property 114 | def interpret_as_response(self) -> bool: 115 | """Get whether to interpret the return value as an assistant response 116 | 117 | Returns: 118 | bool: Whether to interpret the return value as a response 119 | """ 120 | return self.config.interpret_as_response 121 | 122 | @property 123 | def argument_parsers(self) -> OrderedDict[str, ArgSchemaParser]: 124 | """Get the argument parsers for this function 125 | 126 | Returns: 127 | OrderedDict[str, ArgSchemaParser]: The argument parsers 128 | """ 129 | return OrderedDict( 130 | (name, self.parse_argument(argument)) 131 | for name, argument in inspect.signature(self.func).parameters.items() 132 | ) 133 | 134 | @property 135 | def required_arguments(self) -> JsonType: 136 | """Get the required arguments for this function 137 | 138 | Returns: 139 | JsonType: The required arguments 140 | """ 141 | return [ 142 | name 143 | for name, argument in inspect.signature(self.func).parameters.items() 144 | if argument.default is argument.empty 145 | ] 146 | 147 | @property 148 | def arguments_schema(self) -> JsonType: 149 | """Get the arguments schema for this function 150 | 151 | Returns: 152 | JsonType: The arguments schema 153 | """ 154 | return { 155 | name: { 156 | **parser.argument_schema, 157 | **( 158 | {"description": self.arg_docs.get(name)} 159 | if name in self.arg_docs 160 | else {} 161 | ), 162 | } 163 | for name, parser in self.argument_parsers.items() 164 | } 165 | 166 | @property 167 | def parsed_docs(self) -> Docstring: 168 | """Get the parsed docs for this function 169 | 170 | Returns: 171 | Docstring: The parsed docs 172 | """ 173 | return parse(self.func.__doc__ or "") 174 | 175 | @property 176 | def arg_docs(self) -> dict[str, str]: 177 | """Get the argument docs for this function 178 | 179 | Returns: 180 | dict[str, str]: The argument docs 181 | """ 182 | return { 183 | param.arg_name: param.description 184 | for param in self.parsed_docs.params 185 | if param.description 186 | } 187 | 188 | @property 189 | def name(self) -> str: 190 | """Get the name of this function 191 | 192 | Returns: 193 | str: The name 194 | """ 195 | return self._name or self.func.__name__ 196 | 197 | @property 198 | def schema(self) -> dict[str, JsonType]: 199 | """Get the schema for this function 200 | 201 | Returns: 202 | dict[str, JsonType]: The schema 203 | """ 204 | schema: dict[str, JsonType] = { 205 | "name": self.name, 206 | "parameters": { 207 | "type": "object", 208 | "properties": self.arguments_schema, 209 | "required": self.required_arguments, 210 | }, 211 | } 212 | if self.parsed_docs.short_description or self._description: 213 | schema["description"] = ( 214 | self.parsed_docs.short_description or self._description 215 | ) 216 | return schema 217 | 218 | def parse_argument(self, argument: inspect.Parameter) -> ArgSchemaParser: 219 | """Parse an argument 220 | 221 | Args: 222 | argument (inspect.Parameter): The argument to parse 223 | 224 | Raises: 225 | CannotParseTypeError: If the argument cannot be parsed 226 | 227 | Returns: 228 | ArgSchemaParser: The parser for the argument 229 | """ 230 | # The reasoning behind not using pydantic is OpenAI's apparent inability to 231 | # parse JSON Schemas with $ref's in them - or at least, that's what I've 232 | # gathered from the error messages. 233 | for parser in self.parsers: 234 | if parser.can_parse(argument.annotation): 235 | return parser(argument.annotation, self.parsers) 236 | raise CannotParseTypeError(argument.annotation) 237 | 238 | def parse_arguments(self, arguments: dict[str, JsonType]) -> OrderedDict[str, Any]: 239 | """Parse arguments 240 | 241 | Args: 242 | arguments (dict[str, JsonType]): The arguments to parse 243 | 244 | Raises: 245 | BrokenSchemaError: If the arguments do not match the schema 246 | 247 | Returns: 248 | OrderedDict[str, Any]: The parsed arguments 249 | """ 250 | argument_parsers = self.argument_parsers 251 | if not all(name in arguments for name in argument_parsers): 252 | raise BrokenSchemaError(arguments, self.arguments_schema) 253 | try: 254 | return OrderedDict( 255 | (name, argument_parsers[name].parse_value(value)) 256 | for name, value in arguments.items() 257 | ) 258 | except KeyError as err: 259 | raise BrokenSchemaError(arguments, self.arguments_schema) from err 260 | 261 | def __call__(self, arguments: dict[str, JsonType]) -> Any: 262 | """Call the wrapped function 263 | 264 | Args: 265 | arguments (dict[str, JsonType]): The arguments to call the function with 266 | 267 | Returns: 268 | The result of the function 269 | """ 270 | return self.func(**self.parse_arguments(arguments)) 271 | -------------------------------------------------------------------------------- /openai_functions/json_type.py: -------------------------------------------------------------------------------- 1 | """Type definitions for JSON data.""" 2 | from __future__ import annotations 3 | 4 | JsonType = int | float | str | bool | list["JsonType"] | dict[str, "JsonType"] | None 5 | -------------------------------------------------------------------------------- /openai_functions/nlp.py: -------------------------------------------------------------------------------- 1 | """A module for running OpenAI functions""" 2 | from __future__ import annotations 3 | from dataclasses import dataclass 4 | from functools import partial 5 | from typing import Callable, Generic, Protocol, TypeVar, overload 6 | from typing_extensions import ParamSpec 7 | 8 | from .conversation import Conversation 9 | from .functions.wrapper import FunctionWrapper, WrapperConfig 10 | 11 | 12 | Param = ParamSpec("Param") 13 | Return = TypeVar("Return") 14 | T = TypeVar("T") 15 | 16 | 17 | @dataclass 18 | class NaturalLanguageAnnotated(Generic[T]): 19 | """A natural language annotated function return value""" 20 | 21 | function_result: T 22 | annotation: str 23 | 24 | 25 | # This is a callable protocol, thus pylint can shut up 26 | class DecoratorProtocol( 27 | Protocol[Param, Return] 28 | ): # pylint: disable=too-few-public-methods 29 | """A protocol for the nlp decorator""" 30 | 31 | def __call__( 32 | self, 33 | function: Callable[Param, Return], 34 | *, 35 | system_prompt: str | None = None, 36 | model: str = "gpt-3.5-turbo-0613", 37 | engine: str | None = None, 38 | ) -> Wrapper[Param, Return]: 39 | ... 40 | 41 | 42 | @dataclass 43 | class NLPWrapperConfig: 44 | """A configuration for the nlp decorator""" 45 | 46 | name: str | None = None 47 | description: str | None = None 48 | serialize: bool = True 49 | 50 | model: str = "gpt-3.5-turbo-0613" 51 | engine: str | None = None 52 | system_prompt: str | None = None 53 | 54 | 55 | class Wrapper(Generic[Param, Return]): 56 | """A wrapper for a function that provides a natural language interface""" 57 | 58 | def __init__( 59 | self, 60 | origin: Callable[..., Return], 61 | config: NLPWrapperConfig, 62 | ) -> None: 63 | self.origin = origin 64 | self.config = config 65 | self.conversation = Conversation(model=config.model, engine=config.engine) 66 | self.openai_function = FunctionWrapper( 67 | self.origin, 68 | WrapperConfig(serialize=config.serialize), 69 | name=config.name, 70 | description=config.description, 71 | ) 72 | self.conversation.add_function(self.openai_function) 73 | 74 | def __call__(self, *args: Param.args, **kwds: Param.kwargs) -> Return: 75 | return self.origin(*args, **kwds) 76 | 77 | def _initialize_conversation(self) -> None: 78 | """Initialize the conversation""" 79 | self.conversation.clear_messages() 80 | if self.config.system_prompt is not None: 81 | self.conversation.add_message( 82 | { 83 | "role": "system", 84 | "content": self.config.system_prompt, 85 | } 86 | ) 87 | 88 | def from_natural_language(self, prompt: str, retries: int | None = 1) -> Return: 89 | """Run the function with the given natural language input 90 | 91 | Args: 92 | prompt (str): The prompt to use 93 | retries (int | None): The number of retries; if None, will retry 94 | indefinitely 95 | 96 | Returns: 97 | The result of the original function 98 | """ 99 | self._initialize_conversation() 100 | return self.conversation.run(self.openai_function.name, prompt, retries=retries) 101 | 102 | def natural_language_response(self, prompt: str, retries: int | None = 1) -> str: 103 | """Run the function and respond to the user with natural language 104 | 105 | Args: 106 | prompt (str): The prompt to use 107 | retries (int | None): The number of retries; if None, will retry 108 | indefinitely 109 | 110 | Returns: 111 | str: The response from the AI 112 | """ 113 | self._initialize_conversation() 114 | self.conversation.add_message(prompt) 115 | self.conversation.generate_message( 116 | function_call={"name": self.openai_function.name} 117 | ) 118 | response = self.conversation.run_until_response(False, retries=retries) 119 | return response.content 120 | 121 | def natural_language_annotated( 122 | self, prompt: str, retries: int | None = 1 123 | ) -> NaturalLanguageAnnotated[Return]: 124 | """Run the function and respond to the user with natural language as well as 125 | the raw function result 126 | 127 | Args: 128 | prompt (str): The prompt to use 129 | retries (int | None): The number of retries; if None, will retry 130 | indefinitely 131 | 132 | Returns: 133 | NaturalLanguageAnnotated: The response from the AI 134 | """ 135 | self._initialize_conversation() 136 | function_result = self.conversation.run( 137 | self.openai_function.name, prompt, retries=retries 138 | ) 139 | response = self.conversation.run_until_response(False, retries=retries) 140 | return NaturalLanguageAnnotated(function_result, response.content) 141 | 142 | 143 | def _nlp( 144 | function: Callable[Param, Return], 145 | *, 146 | name: str | None = None, 147 | description: str | None = None, 148 | system_prompt: str | None = None, 149 | model: str = "gpt-3.5-turbo-0613", 150 | engine: str | None = None, 151 | serialize: bool = True, 152 | ) -> Wrapper[Param, Return]: 153 | """Add natural language input to a function 154 | 155 | Args: 156 | function (Callable): The function to add natural language input to 157 | system_prompt (str | None): The system prompt to use. Defaults to None. 158 | model (str): The model to use. Defaults to "gpt-3.5-turbo-0613". 159 | engine (str | None): The engine to use, for example, for Azure deployments. 160 | name (str | None): The name override for the function. 161 | description (str | None): The description sent to OpenAI. 162 | serialize (bool): Whether to serialize the function result. 163 | 164 | Returns: 165 | The function, with natural language input, or a decorator to add natural 166 | language input to a function 167 | """ 168 | return Wrapper( 169 | function, 170 | NLPWrapperConfig( 171 | system_prompt=system_prompt, 172 | model=model, 173 | engine=engine, 174 | name=name, 175 | description=description, 176 | serialize=serialize, 177 | ), 178 | ) 179 | 180 | 181 | @overload 182 | def nlp( 183 | function: Callable[Param, Return], 184 | *, 185 | name: str | None = None, 186 | description: str | None = None, 187 | serialize: bool = True, 188 | system_prompt: str | None = None, 189 | model: str = "gpt-3.5-turbo-0613", 190 | engine: str | None = None, 191 | ) -> Wrapper[Param, Return]: 192 | ... 193 | 194 | 195 | @overload 196 | def nlp( 197 | *, 198 | name: str | None = None, 199 | description: str | None = None, 200 | serialize: bool = True, 201 | system_prompt: str | None = None, 202 | model: str = "gpt-3.5-turbo-0613", 203 | engine: str | None = None, 204 | ) -> DecoratorProtocol: 205 | ... 206 | 207 | 208 | def nlp( 209 | function: Callable[Param, Return] | None = None, 210 | *, 211 | name: str | None = None, 212 | description: str | None = None, 213 | serialize: bool = True, 214 | system_prompt: str | None = None, 215 | model: str = "gpt-3.5-turbo-0613", 216 | engine: str | None = None, 217 | ) -> Wrapper[Param, Return] | DecoratorProtocol: 218 | """Add natural language input to a function 219 | 220 | Args: 221 | function (Callable | None): The function 222 | to add natural language input to 223 | name (str | None): The name override for the function, will be inferred from 224 | the function name if not provided. 225 | description (str | None): The description sent to OpenAI, defaults to the short 226 | description from the function docstring. 227 | serialize (bool): Whether to serialize the function result. 228 | system_prompt (str | None): The system prompt to use. Defaults to None. 229 | model (str): The model to use. Defaults to "gpt-3.5-turbo-0613". 230 | engine (str | None): The engine to use, for example, for Azure deployments. 231 | 232 | Returns: 233 | Wrapper | DecoratorProtocol: The function, with natural language input, or a 234 | decorator to add natural language input to a function 235 | """ 236 | 237 | if function is None: 238 | return partial( 239 | _nlp, 240 | name=name, 241 | description=description, 242 | serialize=serialize, 243 | system_prompt=system_prompt, 244 | model=model, 245 | engine=engine, 246 | ) 247 | 248 | return _nlp( 249 | function, 250 | name=name, 251 | description=description, 252 | serialize=serialize, 253 | system_prompt=system_prompt, 254 | model=model, 255 | engine=engine, 256 | ) 257 | -------------------------------------------------------------------------------- /openai_functions/openai_types.py: -------------------------------------------------------------------------------- 1 | """A module for type definitions for OpenAI API responses""" 2 | from __future__ import annotations 3 | from typing import ( 4 | Literal, 5 | Protocol, 6 | TYPE_CHECKING, 7 | TypedDict, 8 | Union, 9 | overload, 10 | runtime_checkable, 11 | ) 12 | 13 | if TYPE_CHECKING: 14 | from typing_extensions import TypeGuard 15 | 16 | 17 | class FunctionCall(TypedDict): 18 | """A container for OpenAI function calls 19 | 20 | Attributes: 21 | name (str): The name of the function 22 | arguments (str): The arguments of the function, in JSON format 23 | """ 24 | 25 | name: str 26 | arguments: str 27 | 28 | 29 | class FinalResponseMessageType(TypedDict): 30 | """A type for OpenAI messages that are final responses""" 31 | 32 | role: Literal["assistant"] 33 | content: str 34 | 35 | 36 | class UserMessageType(TypedDict): 37 | """A type for OpenAI messages that are user messages""" 38 | 39 | role: Literal[ 40 | "system", 41 | "user", 42 | ] 43 | content: str 44 | 45 | 46 | ContentfulMessageType = Union[FinalResponseMessageType, UserMessageType] 47 | 48 | 49 | class IntermediateResponseMessageType(TypedDict): 50 | """A type for OpenAI messages that are intermediate responses""" 51 | 52 | role: Literal["assistant"] 53 | content: None 54 | function_call: FunctionCall 55 | 56 | 57 | NonFunctionMessageType = Union[ContentfulMessageType, IntermediateResponseMessageType] 58 | 59 | 60 | class FunctionMessageType(TypedDict): 61 | """A type for OpenAI messages""" 62 | 63 | role: Literal["function"] 64 | name: str 65 | content: str | None 66 | 67 | 68 | MessageType = Union[NonFunctionMessageType, FunctionMessageType] 69 | 70 | 71 | class Message: 72 | """A container for OpenAI messages""" 73 | 74 | @overload 75 | def __init__(self, message: MessageType) -> None: 76 | ... 77 | 78 | @overload 79 | def __init__(self, message: str) -> None: 80 | ... 81 | 82 | @overload 83 | def __init__( 84 | self, message: str, role: Literal["system", "user", "assistant"] 85 | ) -> None: 86 | ... 87 | 88 | def __init__( 89 | self, 90 | message: MessageType | str, 91 | role: Literal["system", "user", "assistant"] = "user", 92 | ): 93 | if isinstance(message, str): 94 | if role == "assistant": # We have to split this up because of mypy 95 | self.message: MessageType = {"role": role, "content": message} 96 | else: 97 | self.message = { 98 | "role": role, 99 | "content": message, 100 | } 101 | else: 102 | if "content" not in message: 103 | message["content"] = None 104 | self.message = message 105 | 106 | @property 107 | def content(self) -> str | None: 108 | """Get the content of the message 109 | 110 | Returns: 111 | str | None: The content of the message 112 | """ 113 | return self.message["content"] 114 | 115 | @property 116 | def role(self) -> Literal["system", "user", "assistant", "function"]: 117 | """Get the role of the message 118 | 119 | Returns: 120 | Literal["system", "user", "assistant", "function"]: The role of the message 121 | """ 122 | return self.message["role"] 123 | 124 | @property 125 | def is_function_call(self) -> bool: 126 | """Check if the message is a function call 127 | 128 | Returns: 129 | bool: Whether the message is a function call 130 | """ 131 | return self.role == "assistant" and "function_call" in self.message 132 | 133 | @property 134 | def function_call(self) -> FunctionCall | None: 135 | """Get the function call 136 | 137 | Returns: 138 | FunctionCall | None: The function call 139 | """ 140 | if self.message["role"] == "assistant": 141 | if self.message.get("content") is not None: 142 | return None 143 | return self.message.get("function_call") # type: ignore 144 | return None 145 | 146 | @property 147 | def is_final_response(self) -> bool: 148 | """Check if the message is a final response 149 | 150 | Returns: 151 | bool: Whether the message is a final response 152 | """ 153 | return self.role == "assistant" and self.content is not None 154 | 155 | def as_dict(self) -> MessageType: 156 | """Get the message as a dictionary 157 | 158 | Returns: 159 | MessageType: The message 160 | """ 161 | return self.message 162 | 163 | def __repr__(self) -> str: 164 | if self.is_function_call: 165 | return f"FunctionCall({self.function_call!r})" 166 | return f"Message({self.message['content']!r}, {self.message['role']!r})" 167 | 168 | def __eq__(self, other: object) -> bool: 169 | return ( 170 | isinstance(other, Message) 171 | and self.content == other.content 172 | and self.role == other.role 173 | and self.function_call == other.function_call 174 | ) 175 | 176 | def __hash__(self) -> int: 177 | return hash((self.content, self.role)) 178 | 179 | 180 | @runtime_checkable 181 | class GenericMessage(Protocol): 182 | """A container protocol for OpenAI messages""" 183 | 184 | message: MessageType 185 | 186 | @overload 187 | def __init__(self, message: MessageType) -> None: 188 | ... 189 | 190 | @overload 191 | def __init__(self, message: str) -> None: 192 | ... 193 | 194 | @overload 195 | def __init__( 196 | self, message: str, role: Literal["system", "user", "assistant"] 197 | ) -> None: 198 | ... 199 | 200 | def __init__( 201 | self, 202 | message: MessageType | str, 203 | role: Literal["system", "user", "assistant"] = "user", 204 | ): 205 | ... 206 | 207 | @property 208 | def content(self) -> str | None: 209 | """Get the content of the message""" 210 | 211 | @property 212 | def role(self) -> Literal["system", "user", "assistant", "function"]: 213 | """Get the role of the message""" 214 | ... # pylint: disable=unnecessary-ellipsis 215 | 216 | @property 217 | def is_function_call(self) -> bool: 218 | """Check if the message is a function call""" 219 | ... # pylint: disable=unnecessary-ellipsis 220 | 221 | @property 222 | def function_call(self) -> FunctionCall | None: 223 | """Get the function call""" 224 | 225 | @property 226 | def is_final_response(self) -> bool: 227 | """Check if the message is a final response""" 228 | ... # pylint: disable=unnecessary-ellipsis 229 | 230 | def as_dict(self) -> MessageType: 231 | """Get the message as a dictionary""" 232 | ... # pylint: disable=unnecessary-ellipsis 233 | 234 | def __hash__(self) -> int: 235 | ... 236 | 237 | 238 | class FinalResponseMessage(GenericMessage, Protocol): 239 | """A container for OpenAI final response messages 240 | 241 | Inherited from GenericMessage, acts the same, just restricts 242 | the message to have content and not be a function call 243 | """ 244 | 245 | message: FinalResponseMessageType # type: ignore 246 | 247 | @property 248 | def content(self) -> str: 249 | """Get the content of the message""" 250 | # This ellipsis is for Pyright #2758 251 | ... # pylint: disable=unnecessary-ellipsis 252 | 253 | @property 254 | def function_call(self) -> None: 255 | """Get the function call""" 256 | 257 | @property 258 | def is_final_response(self) -> Literal[True]: 259 | """Check if the message is a final response""" 260 | ... # pylint: disable=unnecessary-ellipsis 261 | 262 | 263 | class FunctionCallMessage(GenericMessage, Protocol): 264 | """A container for OpenAI function call messages""" 265 | 266 | message: IntermediateResponseMessageType 267 | 268 | @property 269 | def content(self) -> None: 270 | """Get the content of the message""" 271 | 272 | @property 273 | def function_call(self) -> FunctionCall: 274 | """Get the function call""" 275 | ... # pylint: disable=unnecessary-ellipsis 276 | 277 | @property 278 | def is_final_response(self) -> Literal[False]: 279 | """Check if the message is a final response""" 280 | ... # pylint: disable=unnecessary-ellipsis 281 | 282 | 283 | def is_final_response_message( 284 | message: GenericMessage, 285 | ) -> TypeGuard[FinalResponseMessage]: 286 | """Check if a message is a final response message 287 | 288 | Args: 289 | message (GenericMessage): The message to check 290 | 291 | Returns: 292 | TypeGuard[FinalResponseMessage]: Whether the message is a final response message 293 | """ 294 | return message.is_final_response 295 | 296 | 297 | class ForcedFunctionCall(TypedDict): 298 | """A type for forced function calls""" 299 | 300 | name: str 301 | 302 | 303 | OpenAiFunctionCallInput = Union[ForcedFunctionCall, Literal["auto", "none"]] 304 | -------------------------------------------------------------------------------- /openai_functions/parsers/__init__.py: -------------------------------------------------------------------------------- 1 | """Parsers for arguments""" 2 | from .abc import ArgSchemaParser 3 | from .default import defargparsers 4 | 5 | 6 | __all__ = [ 7 | "ArgSchemaParser", 8 | "defargparsers", 9 | ] 10 | -------------------------------------------------------------------------------- /openai_functions/parsers/abc.py: -------------------------------------------------------------------------------- 1 | """Abstract base class for argument schema parsers""" 2 | from __future__ import annotations 3 | from abc import ABC, abstractmethod 4 | from typing import Any, Generic, TYPE_CHECKING, Type, TypeVar 5 | 6 | from ..exceptions import CannotParseTypeError 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | from typing_extensions import TypeGuard 11 | 12 | T = TypeVar("T") 13 | S = TypeVar("S") 14 | 15 | 16 | class ArgSchemaParser(ABC, Generic[T]): 17 | """An abstract parser for a specific argument type 18 | 19 | Both converts the argument definition to a JSON schema and parses the argument value 20 | from JSON. 21 | """ 22 | 23 | def __init__( 24 | self, argtype: Type[T], rec_parsers: list[Type[ArgSchemaParser]] 25 | ) -> None: 26 | self.argtype = argtype 27 | self.rec_parsers = rec_parsers 28 | 29 | def parse_rec(self, argtype: Type[S]) -> ArgSchemaParser[S]: 30 | """Parse a type recursively 31 | 32 | Args: 33 | argtype (Type[S]): The type to parse 34 | 35 | Returns: 36 | ArgSchemaParser[S]: The parser for the type 37 | 38 | Raises: 39 | CannotParseTypeError: If the type cannot be parsed 40 | """ 41 | for parser in self.rec_parsers: 42 | if parser.can_parse(argtype): 43 | return parser(argtype, self.rec_parsers) 44 | raise CannotParseTypeError(argtype) 45 | 46 | @classmethod 47 | @abstractmethod 48 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[T]]: 49 | """Whether this parser can parse a specific arg type 50 | 51 | Args: 52 | argtype (Any): The type to check 53 | """ 54 | 55 | @property 56 | @abstractmethod 57 | def argument_schema(self) -> dict[str, JsonType]: 58 | """Parse an argument of a specific type""" 59 | 60 | @abstractmethod 61 | def parse_value(self, value: JsonType) -> T: 62 | """Parse a value of a specific type 63 | 64 | Args: 65 | value (JsonType): The value to parse 66 | 67 | Raises: 68 | BrokenSchemaError: If the value does not match the schema 69 | """ 70 | -------------------------------------------------------------------------------- /openai_functions/parsers/atomic_type_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for atomic json types""" 2 | from __future__ import annotations 3 | from abc import abstractmethod 4 | from typing import Any, TYPE_CHECKING, Type, TypeVar 5 | 6 | from ..exceptions import BrokenSchemaError 7 | from .abc import ArgSchemaParser 8 | 9 | if TYPE_CHECKING: 10 | from ..json_type import JsonType 11 | from typing_extensions import TypeGuard 12 | 13 | T = TypeVar("T") 14 | 15 | 16 | class AtomicParser(ArgSchemaParser[T]): 17 | """Parser for atomic json values""" 18 | 19 | _type: Type[T] 20 | 21 | @property 22 | @abstractmethod 23 | def schema_type_name(self) -> str: 24 | """Name of the type in the json schema""" 25 | 26 | @classmethod 27 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[T]]: 28 | return argtype is cls._type 29 | 30 | @property 31 | def argument_schema(self) -> dict[str, JsonType]: 32 | return { 33 | "type": self.schema_type_name, 34 | } 35 | 36 | def parse_value(self, value: JsonType) -> T: 37 | if not isinstance(value, self._type): 38 | raise BrokenSchemaError(value, self.argument_schema) 39 | return value 40 | -------------------------------------------------------------------------------- /openai_functions/parsers/bool_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for bool types""" 2 | from .atomic_type_parser import AtomicParser 3 | 4 | 5 | class BoolParser(AtomicParser[bool]): 6 | """Parser for bool types""" 7 | 8 | _type = bool 9 | schema_type_name: str = "boolean" 10 | -------------------------------------------------------------------------------- /openai_functions/parsers/dataclass_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for dataclass types""" 2 | from __future__ import annotations 3 | import dataclasses 4 | from typing import Any, ClassVar, Protocol, TYPE_CHECKING, Type 5 | 6 | from ..exceptions import BrokenSchemaError 7 | from .abc import ArgSchemaParser 8 | 9 | if TYPE_CHECKING: 10 | from ..json_type import JsonType 11 | from typing_extensions import TypeGuard 12 | 13 | 14 | class IsDataclass(Protocol): # pylint: disable=too-few-public-methods 15 | """A protocol for checking if a class is a dataclass""" 16 | 17 | __dataclass_fields__: ClassVar[dict] 18 | 19 | 20 | class DataclassParser(ArgSchemaParser[IsDataclass]): 21 | """Parser for dataclass types""" 22 | 23 | @classmethod 24 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[IsDataclass]]: 25 | return dataclasses.is_dataclass(argtype) 26 | 27 | @property 28 | def required_fields(self) -> list[str]: 29 | """All required fields of the dataclass 30 | 31 | Returns: 32 | list[str]: The required fields of the dataclass 33 | """ 34 | return [ 35 | field.name 36 | for field in dataclasses.fields(self.argtype) 37 | if field.default is dataclasses.MISSING 38 | ] 39 | 40 | @property 41 | def fields(self) -> dict[str, JsonType]: 42 | """All fields of the dataclass, with their schemas 43 | 44 | Returns: 45 | dict[str, JsonType]: The fields of the dataclass 46 | """ 47 | return { 48 | field.name: self.parse_rec(field.type).argument_schema 49 | for field in dataclasses.fields(self.argtype) 50 | } 51 | 52 | @property 53 | def argument_schema(self) -> dict[str, JsonType]: 54 | return { 55 | "type": "object", 56 | "description": self.argtype.__doc__, 57 | "properties": self.fields, 58 | "required": self.required_fields, # type: ignore 59 | } 60 | 61 | def parse_value(self, value: JsonType) -> IsDataclass: 62 | if not isinstance(value, dict): 63 | raise BrokenSchemaError(value, self.argument_schema) 64 | if not all(field in value for field in self.required_fields): 65 | raise BrokenSchemaError(value, self.argument_schema) 66 | if not all(field in self.fields for field in value): 67 | raise BrokenSchemaError(value, self.argument_schema) 68 | return self.argtype( 69 | **{ 70 | field.name: self.parse_rec(field.type).parse_value(value[field.name]) 71 | for field in dataclasses.fields(self.argtype) 72 | if field.name in value 73 | } 74 | ) 75 | -------------------------------------------------------------------------------- /openai_functions/parsers/default.py: -------------------------------------------------------------------------------- 1 | """Default parsers for ArgSchema.""" 2 | from __future__ import annotations 3 | from typing import TYPE_CHECKING, Type 4 | 5 | from .bool_parser import BoolParser 6 | from .dataclass_parser import DataclassParser 7 | from .dict_parser import DictParser 8 | from .enum_parser import EnumParser 9 | from .float_parser import FloatParser 10 | from .int_parser import IntParser 11 | from .list_parser import ListParser 12 | from .none_parser import NoneParser 13 | from .str_parser import StringParser 14 | from .union_parser import UnionParser 15 | 16 | if TYPE_CHECKING: 17 | from .abc import ArgSchemaParser 18 | 19 | 20 | defargparsers: list[Type[ArgSchemaParser]] = [ 21 | BoolParser, 22 | DataclassParser, 23 | DictParser, 24 | EnumParser, 25 | FloatParser, 26 | IntParser, 27 | ListParser, 28 | NoneParser, 29 | StringParser, 30 | UnionParser, 31 | ] 32 | -------------------------------------------------------------------------------- /openai_functions/parsers/dict_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for dict types""" 2 | from __future__ import annotations 3 | from typing import Any, Dict, TYPE_CHECKING, Type, TypeVar, get_args, get_origin 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .abc import ArgSchemaParser 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | from typing_extensions import TypeGuard 11 | 12 | T = TypeVar("T") 13 | 14 | 15 | class DictParser(ArgSchemaParser[Dict[str, T]]): 16 | """Parser for dict types""" 17 | 18 | @classmethod 19 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[Dict[str, T]]]: 20 | return ( 21 | get_origin(argtype) 22 | in [ 23 | dict, 24 | Dict, 25 | ] 26 | and get_args(argtype)[0] is str 27 | ) 28 | 29 | @property 30 | def argument_schema(self) -> Dict[str, JsonType]: 31 | return { 32 | "type": "object", 33 | "additionalProperties": self.parse_rec( 34 | get_args(self.argtype)[1] 35 | ).argument_schema, 36 | } 37 | 38 | def parse_value(self, value: JsonType) -> Dict[str, T]: 39 | if not isinstance(value, dict): 40 | raise BrokenSchemaError(value, self.argument_schema) 41 | return { 42 | k: self.parse_rec(get_args(self.argtype)[1]).parse_value(v) 43 | for k, v in value.items() 44 | } 45 | -------------------------------------------------------------------------------- /openai_functions/parsers/enum_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for enum types""" 2 | from __future__ import annotations 3 | import enum 4 | from typing import Any, TYPE_CHECKING, Type, TypeVar 5 | 6 | from ..exceptions import BrokenSchemaError 7 | from .abc import ArgSchemaParser 8 | 9 | if TYPE_CHECKING: 10 | from ..json_type import JsonType 11 | from typing_extensions import TypeGuard 12 | 13 | T = TypeVar("T", bound=enum.Enum) 14 | 15 | 16 | class EnumParser(ArgSchemaParser[T]): 17 | """Parser for enum types""" 18 | 19 | @classmethod 20 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[T]]: 21 | if not isinstance(argtype, type): 22 | return False 23 | return issubclass(argtype, enum.Enum) 24 | 25 | @property 26 | def argument_schema(self) -> dict[str, JsonType]: 27 | schema: dict[str, JsonType] = { 28 | "type": "string", 29 | "enum": [e.name for e in self.argtype], 30 | } 31 | if self.argtype.__doc__ is not None: 32 | schema["description"] = self.argtype.__doc__ 33 | return schema 34 | 35 | def parse_value(self, value: JsonType) -> T: 36 | if not isinstance(value, str): 37 | raise BrokenSchemaError(value, self.argument_schema) 38 | if value not in self.argument_schema["enum"]: # type: ignore 39 | # TODO: consider using something other than JsonType for 40 | # all of these, because disabling mypy is definitely 41 | # not the right way to do this 42 | raise BrokenSchemaError(value, self.argument_schema) 43 | return self.argtype[value] 44 | -------------------------------------------------------------------------------- /openai_functions/parsers/float_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for float types""" 2 | from __future__ import annotations 3 | from typing import TYPE_CHECKING 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .atomic_type_parser import AtomicParser 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | 11 | 12 | class FloatParser(AtomicParser[float]): 13 | """Parser for float types""" 14 | 15 | _type = float 16 | schema_type_name: str = "number" 17 | 18 | def parse_value(self, value: JsonType) -> float: 19 | if not isinstance(value, (float, int)): 20 | raise BrokenSchemaError(value, self.argument_schema) 21 | return float(value) 22 | -------------------------------------------------------------------------------- /openai_functions/parsers/int_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for int types""" 2 | from __future__ import annotations 3 | from typing import TYPE_CHECKING 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .atomic_type_parser import AtomicParser 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | 11 | 12 | class IntParser(AtomicParser[int]): 13 | """Parser for int types""" 14 | 15 | _type = int 16 | schema_type_name: str = "integer" 17 | 18 | def parse_value(self, value: JsonType) -> int: 19 | if isinstance(value, bool): 20 | # This has to happen for historical reasons 21 | # bool is a subclass of int, so isinstance(value, int) is True 22 | raise BrokenSchemaError(value, self.argument_schema) 23 | return super().parse_value(value) 24 | -------------------------------------------------------------------------------- /openai_functions/parsers/list_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for list types""" 2 | from __future__ import annotations 3 | from typing import Any, List, TYPE_CHECKING, Type, TypeVar, get_args, get_origin 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .abc import ArgSchemaParser 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | from typing_extensions import TypeGuard 11 | 12 | T = TypeVar("T") 13 | 14 | 15 | class ListParser(ArgSchemaParser[List[T]]): 16 | """Parser for list types""" 17 | 18 | @classmethod 19 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[List[T]]]: 20 | return get_origin(argtype) in [ 21 | list, 22 | List, 23 | ] 24 | 25 | @property 26 | def argument_schema(self) -> dict[str, JsonType]: 27 | return { 28 | "type": "array", 29 | "items": self.parse_rec(get_args(self.argtype)[0]).argument_schema, 30 | } 31 | 32 | def parse_value(self, value: JsonType) -> List[T]: 33 | if not isinstance(value, list): 34 | raise BrokenSchemaError(value, self.argument_schema) 35 | return [self.parse_rec(get_args(self.argtype)[0]).parse_value(v) for v in value] 36 | -------------------------------------------------------------------------------- /openai_functions/parsers/none_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for null types""" 2 | from __future__ import annotations 3 | from typing import Any, TYPE_CHECKING, Type 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .abc import ArgSchemaParser 7 | 8 | if TYPE_CHECKING: 9 | from ..json_type import JsonType 10 | from typing_extensions import TypeGuard 11 | 12 | 13 | class NoneParser(ArgSchemaParser[None]): 14 | """Parser for null types""" 15 | 16 | @classmethod 17 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[None]]: 18 | return argtype in [None, type(None)] 19 | 20 | @property 21 | def argument_schema(self) -> dict[str, JsonType]: 22 | return {"type": "null"} 23 | 24 | def parse_value(self, value: JsonType) -> None: 25 | if value is not None: 26 | raise BrokenSchemaError(value, self.argument_schema) 27 | -------------------------------------------------------------------------------- /openai_functions/parsers/str_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for string types""" 2 | from .atomic_type_parser import AtomicParser 3 | 4 | 5 | class StringParser(AtomicParser[str]): 6 | """Parser for string types""" 7 | 8 | _type = str 9 | schema_type_name: str = "string" 10 | -------------------------------------------------------------------------------- /openai_functions/parsers/union_parser.py: -------------------------------------------------------------------------------- 1 | """Parser for union types""" 2 | from __future__ import annotations 3 | import contextlib 4 | 5 | from ..exceptions import BrokenSchemaError 6 | from .abc import ArgSchemaParser 7 | 8 | try: 9 | from types import UnionType 10 | from typing import Any, TYPE_CHECKING, Type, Union, get_args, get_origin 11 | except ImportError: 12 | # This is for Python 3.8 13 | from typing import ( # type: ignore 14 | Any, 15 | TYPE_CHECKING, 16 | Type, 17 | Union, 18 | get_args, 19 | get_origin, 20 | _GenericAlias as UnionType, 21 | ) 22 | 23 | if TYPE_CHECKING: 24 | from ..json_type import JsonType 25 | from typing_extensions import TypeGuard 26 | 27 | 28 | class UnionParser(ArgSchemaParser[UnionType]): 29 | """Parser for union types""" 30 | 31 | @classmethod 32 | def can_parse(cls, argtype: Any) -> TypeGuard[Type[UnionType]]: 33 | return get_origin(argtype) is Union 34 | 35 | @property 36 | def argument_schema(self) -> dict[str, JsonType]: 37 | return { 38 | "anyOf": [self.parse_rec(t).argument_schema for t in get_args(self.argtype)] 39 | } 40 | 41 | def parse_value(self, value: JsonType) -> UnionType: 42 | for single_type in get_args(self.argtype): 43 | with contextlib.suppress(BrokenSchemaError): 44 | return self.parse_rec(single_type).parse_value(value) 45 | raise BrokenSchemaError(value, self.argument_schema) 46 | -------------------------------------------------------------------------------- /openai_functions/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizerphe/openai-functions/d86093e07208403b47223fac34fc3f7c69519a46/openai_functions/py.typed -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "openai-functions" 3 | version = "1.0.2" 4 | description = "Simplifies the usage of OpenAI ChatGPT's function calling by generating the schemas and parsing OpenAI's responses for you." 5 | authors = ["rizerphe <44440399+rizerphe@users.noreply.github.com>"] 6 | readme = "README.md" 7 | homepage = "https://github.com/rizerphe/openai-functions" 8 | documentation = "https://openai-functions.readthedocs.io/" 9 | keywords = ["nlp", "openai", "openai-api", "chatgpt", "chatgpt-api", "wrapper", "functions", "chatgpt-api", "openai-functions", "chatgpt-functions", "typing", "docstring", "docstrings", "decorators", "signatures", "parsing"] 10 | license = "MIT" 11 | packages = [{include = "openai_functions"}] 12 | 13 | [tool.poetry.dependencies] 14 | python = "^3.8" 15 | openai = "^0.27.8" 16 | docstring-parser = "^0.15" 17 | typing-extensions = "^4.6.3" 18 | 19 | 20 | [tool.poetry.group.dev.dependencies] 21 | tox-poetry-installer = {extras = ["poetry"], version = "^0.10.3"} 22 | pytest = "^7.4.0" 23 | pytest-cov = "^4.1.0" 24 | 25 | [build-system] 26 | requires = ["poetry-core"] 27 | build-backend = "poetry.core.masonry.api" 28 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizerphe/openai-functions/d86093e07208403b47223fac34fc3f7c69519a46/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_basic_set.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from dataclasses import dataclass 3 | import json 4 | from typing import Any, Callable, TYPE_CHECKING 5 | 6 | import pytest 7 | 8 | from openai_functions import ( 9 | BasicFunctionSet, 10 | FunctionCall, 11 | FunctionNotFoundError, 12 | InvalidJsonError, 13 | ) 14 | 15 | if TYPE_CHECKING: 16 | from openai_functions.json_type import JsonType 17 | 18 | 19 | @dataclass 20 | class MockOpenAIFunction: 21 | name: str 22 | schema: JsonType 23 | function: Callable[..., JsonType] 24 | save_return: bool 25 | serialize: bool 26 | remove_call: bool 27 | interpret_as_response: bool 28 | 29 | def __call__(self, *args: Any, **kwds: Any) -> Any: 30 | return self.function(*args, **kwds) 31 | 32 | 33 | def test_init_functions() -> None: 34 | functions: list[MockOpenAIFunction] = [ 35 | MockOpenAIFunction( 36 | name="test_function", 37 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 38 | function=lambda args: args["arg1"], 39 | save_return=True, 40 | serialize=False, 41 | remove_call=True, 42 | interpret_as_response=False, 43 | ) 44 | ] 45 | function_set = BasicFunctionSet(functions=functions) 46 | assert function_set.functions == functions 47 | 48 | 49 | def test_init_no_functions() -> None: 50 | function_set = BasicFunctionSet() 51 | assert function_set.functions == [] 52 | 53 | 54 | def test_functions_schema() -> None: 55 | functions = [ 56 | MockOpenAIFunction( 57 | name="test_function", 58 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 59 | function=lambda args: args["arg1"], 60 | save_return=True, 61 | serialize=False, 62 | remove_call=True, 63 | interpret_as_response=False, 64 | ) 65 | ] 66 | function_set = BasicFunctionSet(functions=functions) 67 | expected_schema = [{"type": "object", "properties": {"arg1": {"type": "string"}}}] 68 | assert function_set.functions_schema == expected_schema 69 | 70 | 71 | def test_run_function() -> None: 72 | functions = [ 73 | MockOpenAIFunction( 74 | name="test_function", 75 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 76 | function=lambda args: args["arg1"], 77 | save_return=True, 78 | serialize=False, 79 | remove_call=True, 80 | interpret_as_response=False, 81 | ) 82 | ] 83 | function_set = BasicFunctionSet(functions=functions) 84 | input_data = FunctionCall( 85 | name="test_function", arguments=json.dumps({"arg1": "hello"}) 86 | ) 87 | result = function_set.run_function(input_data) 88 | assert result.name == "test_function" 89 | assert result.result == "hello" 90 | assert result.remove_call is True 91 | assert result.interpret_return_as_response is False 92 | 93 | 94 | def test_run_function_invalid_json() -> None: 95 | functions = [ 96 | MockOpenAIFunction( 97 | name="test_function", 98 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 99 | function=lambda args: args["arg1"], 100 | save_return=True, 101 | serialize=False, 102 | remove_call=True, 103 | interpret_as_response=False, 104 | ) 105 | ] 106 | function_set = BasicFunctionSet(functions=functions) 107 | input_data = FunctionCall(name="test_function", arguments="invalid json") 108 | with pytest.raises(InvalidJsonError): 109 | function_set.run_function(input_data) 110 | 111 | 112 | def test_run_function_function_not_found() -> None: 113 | function_set = BasicFunctionSet() 114 | input_data = FunctionCall( 115 | name="test_function", arguments=json.dumps({"arg1": "hello"}) 116 | ) 117 | with pytest.raises(FunctionNotFoundError): 118 | function_set.run_function(input_data) 119 | 120 | 121 | def test_find_function() -> None: 122 | functions = [ 123 | MockOpenAIFunction( 124 | name="test_function", 125 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 126 | function=lambda args: args["arg1"], 127 | save_return=True, 128 | serialize=False, 129 | remove_call=True, 130 | interpret_as_response=False, 131 | ) 132 | ] 133 | function_set = BasicFunctionSet(functions=functions) 134 | function = function_set.find_function("test_function") 135 | assert function.name == "test_function" 136 | 137 | 138 | def test_find_function_not_found() -> None: 139 | function_set = BasicFunctionSet() 140 | with pytest.raises(FunctionNotFoundError): 141 | function_set.find_function("test_function") 142 | 143 | 144 | def test_get_function_result() -> None: 145 | functions = [ 146 | MockOpenAIFunction( 147 | name="test_function", 148 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 149 | function=lambda args: args["arg1"], 150 | save_return=True, 151 | serialize=False, 152 | remove_call=True, 153 | interpret_as_response=False, 154 | ) 155 | ] 156 | function_set = BasicFunctionSet(functions=functions) 157 | function = function_set.find_function("test_function") 158 | arguments = {"arg1": "hello"} 159 | result = function_set.get_function_result(function, arguments) 160 | assert result is not None 161 | assert result.result == "hello" 162 | 163 | 164 | def test_get_function_result_no_save_return() -> None: 165 | functions = [ 166 | MockOpenAIFunction( 167 | name="test_function", 168 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 169 | function=lambda args: None, 170 | save_return=False, 171 | serialize=False, 172 | remove_call=True, 173 | interpret_as_response=False, 174 | ) 175 | ] 176 | function_set = BasicFunctionSet(functions=functions) 177 | function = function_set.find_function("test_function") 178 | arguments = {"arg1": "hello"} 179 | result = function_set.get_function_result(function, arguments) 180 | assert result is None 181 | 182 | 183 | def test_add_function() -> None: 184 | function_set = BasicFunctionSet() 185 | function = MockOpenAIFunction( 186 | name="test_function", 187 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 188 | function=lambda args: args["arg1"], 189 | save_return=True, 190 | serialize=False, 191 | remove_call=True, 192 | interpret_as_response=False, 193 | ) 194 | function_set.add_function(function) 195 | assert function_set.functions == [function] 196 | 197 | 198 | def test_remove_function() -> None: 199 | functions = [ 200 | MockOpenAIFunction( 201 | name="test_function", 202 | schema={"type": "object", "properties": {"arg1": {"type": "string"}}}, 203 | function=lambda args: args["arg1"], 204 | save_return=True, 205 | serialize=False, 206 | remove_call=True, 207 | interpret_as_response=False, 208 | ) 209 | ] 210 | function_set = BasicFunctionSet(functions=functions) 211 | function_set.remove_function("test_function") 212 | assert function_set.functions == [] 213 | -------------------------------------------------------------------------------- /tests/test_conversation.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock 2 | 3 | from openai_functions import Conversation, Message 4 | 5 | 6 | def test_add_function(): 7 | conversation = Conversation() 8 | 9 | def function(): 10 | ... 11 | 12 | conversation.add_function(function) 13 | assert conversation.skills.functions[0].name == "function" 14 | 15 | 16 | def test_remove_function(): 17 | conversation = Conversation() 18 | function = MagicMock() 19 | conversation.add_function(function) 20 | conversation.remove_function(function) 21 | assert function.name not in [ 22 | function.name for function in conversation.skills.functions 23 | ] 24 | 25 | 26 | def test_ask(): 27 | conversation = Conversation() 28 | conversation.generate_message = MagicMock( 29 | return_value=Message("Hello, World!", "assistant") 30 | ) 31 | response = conversation.ask("What's up?") 32 | assert response == "Hello, World!" 33 | 34 | 35 | def test_run(): 36 | conversation = Conversation() 37 | conversation.generate_message = MagicMock( 38 | return_value=Message( 39 | { 40 | "role": "assistant", 41 | "content": None, 42 | "function_call": {"name": "test_function", "args": {"test": "test"}}, 43 | }, 44 | ) 45 | ) 46 | conversation.skills = MagicMock(return_value="Test Result") 47 | result = conversation.run("test_function") 48 | conversation.skills.assert_called_once_with( 49 | {"name": "test_function", "args": {"test": "test"}} 50 | ) 51 | assert result == "Test Result" 52 | -------------------------------------------------------------------------------- /tests/test_function_wrapper.py: -------------------------------------------------------------------------------- 1 | """Test the function_wrapper module.""" 2 | 3 | from dataclasses import dataclass 4 | import enum 5 | from typing import Dict, List, Union 6 | 7 | import pytest 8 | 9 | from openai_functions import BrokenSchemaError, CannotParseTypeError, FunctionWrapper 10 | 11 | 12 | def test_function_schema_generation_empty(): 13 | """Test that the empty function schema is generated correctly.""" 14 | 15 | def test_function(): 16 | ... 17 | 18 | function_wrapper = FunctionWrapper(test_function) 19 | function_schema = function_wrapper.schema 20 | 21 | assert function_wrapper.name == "test_function" 22 | assert function_schema == { 23 | "name": "test_function", 24 | "parameters": { 25 | "type": "object", 26 | "properties": {}, 27 | "required": [], 28 | }, 29 | } 30 | 31 | 32 | def test_function_schema_generation_docstring(): 33 | """Test that the function schema is generated correctly with the docstring.""" 34 | 35 | def test_function(): 36 | """Test function docstring.""" 37 | 38 | function_wrapper = FunctionWrapper(test_function) 39 | function_schema = function_wrapper.schema 40 | 41 | assert function_wrapper.name == "test_function" 42 | assert function_schema == { 43 | "name": "test_function", 44 | "description": "Test function docstring.", 45 | "parameters": { 46 | "type": "object", 47 | "properties": {}, 48 | "required": [], 49 | }, 50 | } 51 | 52 | 53 | def test_function_schema_generation_parameters(): 54 | """Test that the function schema is generated correctly from the parameters.""" 55 | 56 | def test_function(param1: int, param2: str, param3: bool): 57 | """Test function docstring.""" 58 | 59 | function_wrapper = FunctionWrapper(test_function) 60 | function_schema = function_wrapper.schema 61 | 62 | assert function_wrapper.name == "test_function" 63 | assert function_schema == { 64 | "name": "test_function", 65 | "description": "Test function docstring.", 66 | "parameters": { 67 | "type": "object", 68 | "properties": { 69 | "param1": {"type": "integer"}, 70 | "param2": {"type": "string"}, 71 | "param3": {"type": "boolean"}, 72 | }, 73 | "required": ["param1", "param2", "param3"], 74 | }, 75 | } 76 | 77 | 78 | def test_function_schema_generation_parameters_with_defaults(): 79 | """Test that the function schema is generated correctly from the docstring.""" 80 | 81 | def test_function(param1: int, param2: str = "default", param3: bool = True): 82 | """Test function docstring.""" 83 | 84 | function_wrapper = FunctionWrapper(test_function) 85 | function_schema = function_wrapper.schema 86 | 87 | assert function_wrapper.name == "test_function" 88 | assert function_schema == { 89 | "name": "test_function", 90 | "description": "Test function docstring.", 91 | "parameters": { 92 | "type": "object", 93 | "properties": { 94 | "param1": {"type": "integer"}, 95 | "param2": { 96 | "type": "string", 97 | }, 98 | "param3": {"type": "boolean"}, 99 | }, 100 | "required": ["param1"], 101 | }, 102 | } 103 | 104 | 105 | def test_function_schema_generation_parameters_with_param_docs(): 106 | """Test that the function parameters are described correctly.""" 107 | 108 | def test_function(param1: int, param2: str, param3: bool): 109 | """Test function docstring. 110 | 111 | Args: 112 | param1: Parameter 1 description. 113 | param3: Parameter 3 description. 114 | param4: Parameter 4 description. 115 | """ 116 | 117 | function_wrapper = FunctionWrapper(test_function) 118 | function_schema = function_wrapper.schema 119 | 120 | assert function_wrapper.name == "test_function" 121 | assert function_schema == { 122 | "name": "test_function", 123 | "description": "Test function docstring.", 124 | "parameters": { 125 | "type": "object", 126 | "properties": { 127 | "param1": { 128 | "type": "integer", 129 | "description": "Parameter 1 description.", 130 | }, 131 | "param2": {"type": "string"}, 132 | "param3": { 133 | "type": "boolean", 134 | "description": "Parameter 3 description.", 135 | }, 136 | }, 137 | "required": ["param1", "param2", "param3"], 138 | }, 139 | } 140 | 141 | 142 | def test_function_schema_generation_parameters_with_union_types(): 143 | """Test that the function schema is generated correctly from the docstring.""" 144 | 145 | def test_function(param1: Union[int, str]): 146 | """Test function docstring.""" 147 | 148 | function_wrapper = FunctionWrapper(test_function) 149 | function_schema = function_wrapper.schema 150 | 151 | assert function_wrapper.name == "test_function" 152 | assert "parameters" in function_schema 153 | assert isinstance(function_schema["parameters"], dict) 154 | assert "properties" in function_schema["parameters"] 155 | assert isinstance(function_schema["parameters"]["properties"], dict) 156 | assert "param1" in function_schema["parameters"]["properties"] 157 | assert isinstance(function_schema["parameters"]["properties"]["param1"], dict) 158 | assert "anyOf" in function_schema["parameters"]["properties"]["param1"] 159 | assert isinstance( 160 | function_schema["parameters"]["properties"]["param1"]["anyOf"], list 161 | ) 162 | 163 | instances = function_schema["parameters"]["properties"]["param1"]["anyOf"] 164 | expected_instances = [ 165 | {"type": "integer"}, 166 | {"type": "string"}, 167 | ] 168 | 169 | assert len(instances) == len(expected_instances) 170 | for instance in instances: 171 | assert instance in expected_instances 172 | 173 | 174 | def test_function_schema_generation_invalid_parameters(): 175 | """Test that the function schema is generated correctly from the docstring.""" 176 | 177 | def test_function(param1: object, param2: str, param3: bool): 178 | """Test function docstring.""" 179 | 180 | with pytest.raises(CannotParseTypeError): 181 | FunctionWrapper(test_function).schema 182 | 183 | 184 | def test_function_call(): 185 | """Test that the function is called correctly.""" 186 | 187 | def test_function(param1: int, param2: str, param3: bool, param4: None): 188 | """Test function docstring.""" 189 | assert param1 == 1 190 | assert param2 == "test" 191 | assert param3 is True 192 | assert param4 is None 193 | 194 | function_wrapper = FunctionWrapper(test_function) 195 | function_wrapper({"param1": 1, "param2": "test", "param3": True, "param4": None}) 196 | 197 | 198 | def test_function_call_with_union(): 199 | """Test that the function is called correctly.""" 200 | 201 | def test_function(param1: Union[int, str, None]): 202 | """Test function docstring.""" 203 | return param1 204 | 205 | function_wrapper = FunctionWrapper(test_function) 206 | 207 | assert function_wrapper({"param1": 1}) == 1 208 | assert function_wrapper({"param1": "test"}) == "test" 209 | assert function_wrapper({"param1": None}) is None 210 | with pytest.raises(BrokenSchemaError): 211 | function_wrapper({"param1": True}) 212 | 213 | 214 | def test_dataclass(): 215 | """Test that dataclass schemas are generated properly""" 216 | 217 | @dataclass 218 | class Container: 219 | """Container dataclass""" 220 | 221 | item: int 222 | priority: int = 5 223 | 224 | def test_function(container: Container): 225 | """Test function docstring.""" 226 | assert isinstance(container, Container) 227 | assert container.item == 1 228 | assert container.priority == 2 229 | 230 | function_wrapper = FunctionWrapper(test_function) 231 | function_schema = function_wrapper.schema 232 | 233 | assert function_schema == { 234 | "name": "test_function", 235 | "description": "Test function docstring.", 236 | "parameters": { 237 | "type": "object", 238 | "properties": { 239 | "container": { 240 | "type": "object", 241 | "description": "Container dataclass", 242 | "properties": { 243 | "item": {"type": "integer"}, 244 | "priority": {"type": "integer"}, 245 | }, 246 | "required": ["item"], 247 | } 248 | }, 249 | "required": ["container"], 250 | }, 251 | } 252 | function_wrapper({"container": {"item": 1, "priority": 2}}) 253 | with pytest.raises(BrokenSchemaError): 254 | function_wrapper({"container": 1}) 255 | 256 | 257 | def test_dataclass_with_nested_dataclass(): 258 | """Test that dataclass schemas are generated properly""" 259 | 260 | @dataclass 261 | class Contained: 262 | """Contained dataclass""" 263 | 264 | item: int 265 | priority: int = 5 266 | 267 | @dataclass 268 | class Container: 269 | item: Contained 270 | priority: int = 5 271 | 272 | def test_function(container: Container): 273 | """Test function docstring. 274 | 275 | Args: 276 | container: Container dataclass 277 | """ 278 | assert isinstance(container, Container) 279 | assert isinstance(container.item, Contained) 280 | assert container.item.item == 1 281 | assert container.item.priority == 2 282 | assert container.priority == 5 283 | 284 | function_wrapper = FunctionWrapper(test_function) 285 | function_schema = function_wrapper.schema 286 | 287 | assert function_schema == { 288 | "name": "test_function", 289 | "description": "Test function docstring.", 290 | "parameters": { 291 | "type": "object", 292 | "properties": { 293 | "container": { 294 | "type": "object", 295 | "description": "Container dataclass", 296 | "properties": { 297 | "item": { 298 | "type": "object", 299 | "description": "Contained dataclass", 300 | "properties": { 301 | "item": {"type": "integer"}, 302 | "priority": {"type": "integer"}, 303 | }, 304 | "required": ["item"], 305 | }, 306 | "priority": {"type": "integer"}, 307 | }, 308 | "required": ["item"], 309 | } 310 | }, 311 | "required": ["container"], 312 | }, 313 | } 314 | function_wrapper({"container": {"item": {"item": 1, "priority": 2}}}) 315 | 316 | 317 | def test_invalid_dataclass_field(): 318 | """Test that dataclass schemas are generated properly""" 319 | 320 | @dataclass 321 | class Container: 322 | x: object 323 | 324 | def test_function(container: Container): 325 | """Test function docstring.""" 326 | 327 | with pytest.raises(CannotParseTypeError): 328 | FunctionWrapper(test_function).schema 329 | 330 | 331 | def test_dictionary(): 332 | """Test that dictionary schemas are generated properly""" 333 | 334 | def test_function(container: Dict[str, int]): 335 | """Test function docstring.""" 336 | assert isinstance(container, dict) 337 | assert container["item"] == 1 338 | assert container["priority"] == 2 339 | 340 | function_wrapper = FunctionWrapper(test_function) 341 | function_schema = function_wrapper.schema 342 | 343 | assert function_schema == { 344 | "name": "test_function", 345 | "description": "Test function docstring.", 346 | "parameters": { 347 | "type": "object", 348 | "properties": { 349 | "container": { 350 | "type": "object", 351 | "additionalProperties": {"type": "integer"}, 352 | } 353 | }, 354 | "required": ["container"], 355 | }, 356 | } 357 | function_wrapper({"container": {"item": 1, "priority": 2}}) 358 | with pytest.raises(BrokenSchemaError): 359 | function_wrapper({"container": [(1, 2), (3, 4)]}) 360 | 361 | 362 | def test_array(): 363 | """Test that array schemas are generated properly""" 364 | 365 | def test_function(container: List[Union[int, str]]): 366 | """Test function docstring.""" 367 | assert container == [1, "test"] 368 | 369 | function_wrapper = FunctionWrapper(test_function) 370 | function_schema = function_wrapper.schema 371 | 372 | assert function_schema == { 373 | "name": "test_function", 374 | "description": "Test function docstring.", 375 | "parameters": { 376 | "type": "object", 377 | "properties": { 378 | "container": { 379 | "type": "array", 380 | "items": {"anyOf": [{"type": "integer"}, {"type": "string"}]}, 381 | } 382 | }, 383 | "required": ["container"], 384 | }, 385 | } 386 | function_wrapper({"container": [1, "test"]}) 387 | with pytest.raises(BrokenSchemaError): 388 | function_wrapper({"container": "test"}) 389 | 390 | 391 | def test_enum(): 392 | """Test that enum schemas are generated properly""" 393 | 394 | class Priority(enum.Enum): 395 | """Priority enum""" 396 | 397 | LOW = 0 398 | MEDIUM = 1 399 | HIGH = 2 400 | 401 | def test_function(priority: Priority): 402 | """Test function docstring.""" 403 | assert priority == Priority.LOW 404 | 405 | function_wrapper = FunctionWrapper(test_function) 406 | function_schema = function_wrapper.schema 407 | 408 | assert function_schema == { 409 | "name": "test_function", 410 | "description": "Test function docstring.", 411 | "parameters": { 412 | "type": "object", 413 | "properties": { 414 | "priority": { 415 | "type": "string", 416 | "enum": ["LOW", "MEDIUM", "HIGH"], 417 | "description": "Priority enum", 418 | } 419 | }, 420 | "required": ["priority"], 421 | }, 422 | } 423 | function_wrapper({"priority": "LOW"}) 424 | with pytest.raises(BrokenSchemaError): 425 | function_wrapper({"priority": 1}) 426 | 427 | 428 | def test_none(): 429 | """Test that None schemas are generated properly""" 430 | 431 | def test_function(container: None): 432 | """Test function docstring.""" 433 | assert container is None 434 | 435 | function_wrapper = FunctionWrapper(test_function) 436 | function_schema = function_wrapper.schema 437 | 438 | assert function_schema == { 439 | "name": "test_function", 440 | "description": "Test function docstring.", 441 | "parameters": { 442 | "type": "object", 443 | "properties": {"container": {"type": "null"}}, 444 | "required": ["container"], 445 | }, 446 | } 447 | function_wrapper({"container": None}) 448 | -------------------------------------------------------------------------------- /tests/test_skills.py: -------------------------------------------------------------------------------- 1 | """Test the skills.""" 2 | import pytest 3 | 4 | from openai_functions import BasicFunctionSet, FunctionNotFoundError, FunctionWrapper 5 | 6 | 7 | def test_skills_functions(): 8 | """Test the skills function management.""" 9 | skills = BasicFunctionSet() 10 | 11 | @skills.add_function 12 | def test_function(): 13 | """Test function.""" 14 | 15 | @skills.add_function 16 | def removed_function(): 17 | """Removed function.""" 18 | 19 | @skills.add_function 20 | def test_function_with_params(a: int, b: int): 21 | """Test function with params.""" 22 | assert a == 1 23 | assert b == 2 24 | return a + b 25 | 26 | skills.add_function(FunctionWrapper(lambda: None)) 27 | skills.remove_function("removed_function") 28 | 29 | assert skills.functions_schema == [ 30 | { 31 | "name": "test_function", 32 | "description": "Test function.", 33 | "parameters": { 34 | "type": "object", 35 | "properties": {}, 36 | "required": [], 37 | }, 38 | }, 39 | { 40 | "name": "test_function_with_params", 41 | "description": "Test function with params.", 42 | "parameters": { 43 | "type": "object", 44 | "properties": { 45 | "a": {"type": "integer"}, 46 | "b": {"type": "integer"}, 47 | }, 48 | "required": ["a", "b"], 49 | }, 50 | }, 51 | { 52 | "name": "", 53 | "parameters": { 54 | "type": "object", 55 | "properties": {}, 56 | "required": [], 57 | }, 58 | }, 59 | ] 60 | assert ( 61 | skills.run_function( 62 | { 63 | "name": "test_function_with_params", 64 | "arguments": '{"a": 1, "b": 2}', 65 | } 66 | ).content 67 | == "3" 68 | ) 69 | with pytest.raises(FunctionNotFoundError): 70 | skills.run_function({"name": "invalid_function", "arguments": "{}"}) 71 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 3.8.0 3 | envlist = py38, py39, py310, py311, mypy, pylint, flake8 4 | isolated_build = true 5 | 6 | [gh-actions] 7 | python = 8 | 3.8: py38 9 | 3.9: py39 10 | 3.10: py310 11 | 3.11: py311, mypy, pylint, flake8 12 | 13 | [testenv] 14 | description = Basic testing environment 15 | deps = 16 | pytest 17 | pytest-cov 18 | commands = 19 | pytest --cov --cov-append 20 | 21 | [testenv:mypy] 22 | description = Type checking environment 23 | basepython = python3.11 24 | deps = 25 | mypy 26 | commands = 27 | mypy openai_functions 28 | 29 | [testenv:pylint] 30 | description = Pylint environment 31 | basepython = python3.11 32 | deps = 33 | pylint 34 | commands = 35 | pylint --fail-under 9 openai_functions 36 | 37 | [testenv:flake8] 38 | description = Flake8 environment 39 | basepython = python3.11 40 | deps = 41 | flake8 42 | darglint 43 | dlint 44 | flake8-annotations-complexity 45 | flake8-annotations 46 | flake8-bugbear 47 | flake8-builtins 48 | flake8-cognitive-complexity 49 | flake8-comments 50 | flake8-comprehensions 51 | flake8-eradicate 52 | flake8-expression-complexity 53 | # flake8-new-union-types 54 | # I'd love to use this one but I'm still aiming for 3.8 compatibility 55 | flake8-pie 56 | flake8-return 57 | flake8-type-checking 58 | flake8-use-fstring 59 | flake8_simplify 60 | pep8-naming 61 | tryceratops 62 | commands = 63 | flake8 openai_functions 64 | 65 | [flake8] 66 | max-line-length = 88 67 | extend-ignore = E203,W503,W504,ANN101,ANN102,DAR003,DAR402 68 | max-annotations-complexity=4 69 | 70 | [coverage:report] 71 | exclude_lines = 72 | pragma: no cover 73 | if TYPE_CHECKING: 74 | class .*\bProtocol\): 75 | @(abc\.)?abstractmethod 76 | ^\s*\.\.\.$ 77 | --------------------------------------------------------------------------------