├── .gitignore ├── LICENSE ├── README.md ├── examples ├── motivator.json └── quickstart.py ├── pyproject.toml └── src └── formulaic_ai ├── __init__.py └── formulaic_ai.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Personal testing sripts 2 | /ignore 3 | open.py 4 | example.py 5 | .DS_store 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | # .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/#use-with-ide 116 | .pdm.toml 117 | 118 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 119 | __pypackages__/ 120 | 121 | # Celery stuff 122 | celerybeat-schedule 123 | celerybeat.pid 124 | 125 | # SageMath parsed files 126 | *.sage.py 127 | 128 | # Environments 129 | .env 130 | .venv 131 | env/ 132 | venv/ 133 | ENV/ 134 | env.bak/ 135 | venv.bak/ 136 | 137 | # Spyder project settings 138 | .spyderproject 139 | .spyproject 140 | 141 | # Rope project settings 142 | .ropeproject 143 | 144 | # mkdocs documentation 145 | /site 146 | 147 | # mypy 148 | .mypy_cache/ 149 | .dmypy.json 150 | dmypy.json 151 | 152 | # Pyre type checker 153 | .pyre/ 154 | 155 | # pytype static type analyzer 156 | .pytype/ 157 | 158 | # Cython debug symbols 159 | cython_debug/ 160 | 161 | # PyCharm 162 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 163 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 164 | # and can be added to the global gitignore or merged into this file. For a more nuclear 165 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 166 | #.idea/ 167 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Formulaic Python Library 2 | _**NOTE: This is a project in active development and changes frequently. It is not yet intended for production use.**_ 3 | 4 | The [Formulaic Python library](https://github.com/Mozilla-Ocho/formulaic-python) makes it easy to use Formulas inside your generative AI applications. Formulas are open-licensed JSON scripts that contain AI prompts, template variables, and model configuration. You can browse the library of existing Formulas for many popular language models at [Formulaic.app](https://formulaic.app). 5 | 6 | ## Installation 7 | 8 | Install the Formulaic Python library 9 | 10 | ```python 11 | pip install formulaic-ai 12 | ``` 13 | 14 | ## Quick Start 15 | We're going to build [this script](https://github.com/Mozilla-Ocho/formulaic-python/blob/main/examples/quickstart.py) step-by-step below, using a [Formula JSON file](https://github.com/Mozilla-Ocho/formulaic-python/blob/main/examples/motivator.json) we downloaded from [Formulaic.app](http://formulaic.app). If you download both this script and the JSON file to your working directory, you can run them right away. You will need a llamafile server running at `localhost:8080`. You can also substitute in an OpenAI key and get going that way. We're going to break the entire thing down step-by-step in a moment. 16 | 17 | ```python 18 | from formulaic_ai import Formula, load_formula, OpenClient 19 | 20 | 21 | 22 | model_config = { "llamafile" : {"url" : "http://localhost:8080/v1", 23 | "key":"sk-no-key-required", 24 | "name": "LLaMA_CPP"}, 25 | "OpenAI" : {"url" : "https://api.openai.com/v1", 26 | "key": "OPENAI_KEY_GOES_HERE", 27 | "name": "gpt-3.5-turbo"}, 28 | "mistral7b" : {"url" : "https://api.endpoints.anyscale.com/v1", 29 | "key": "ANYSCALE_KEY_GOES_HERE", 30 | "name": "mistralai/Mistral-7B-Instruct-v0.1"} 31 | 32 | } 33 | 34 | # load our Formula, print the Formula name: description 35 | my_formula = Formula(load_formula("motivator.json")) 36 | print(f"{my_formula.name}: {my_formula.description}") 37 | 38 | 39 | # render prompts. 40 | my_formula.render() 41 | print(f"\nMy starting prompts: {my_formula.prompts}") 42 | 43 | 44 | #our new variables here. 45 | data = {"occasion": "I'm scared of heights and climbing a mountain", 'language': 'German'} 46 | 47 | 48 | # render and print our prompts 49 | my_formula.render(data) 50 | print(f"\nMy new prompts: {my_formula.prompts}") 51 | 52 | 53 | # Create an OpenClient instance for llamafile 54 | with OpenClient(my_formula, model_config["llamafile"]) as client: 55 | 56 | # start our chat. True = print to terminal 57 | client.chat(True) 58 | 59 | # print our message log. 60 | print(client.messages) 61 | ``` 62 | 63 | ## Step-by-step 64 | 65 | ### Do our imports 66 | 67 | Import `Formula` which is what we'll use to work with our Formulas, `OpenClient` a wrapper on the OpenAI library to make it seamless to send Formula prompts to any OpenAI compatible API, and a helper function `load_formula` to open Formula files. 68 | 69 | ```python 70 | from formulaic_ai import Formula, OpenClient, load_formula 71 | ``` 72 | 73 | ### Load and create our Formula instance 74 | All Formulas on [Formulaic.app](https://formulaic.app) are JSON files that you can download to your local machine. We're providing one called `motivator.json` for you to use for this tutorial, which you can [download here](https://github.com/Mozilla-Ocho/formulaic-python/blob/main/examples/motivator.json). 75 | 76 | - Run `load_formula()` with a single argument of the the filepath+filename to `motivator.json`. That opens the Formula's JSON file and loads it into a Python dictionary 77 | - Create an instance of the `Formula` class by passing it the dictionary we just created. I combined these two steps and saved my `Formula` instance as `my_formula` 78 | - Now let's print the Formula name and description. 79 | 80 | ```python 81 | # load our Formula 82 | my_formula = Formula(load_formula("motivator.json")) 83 | 84 | print(f"{my_formula.name}: {my_formula.description}") 85 | 86 | ``` 87 | We see: 88 | 89 | ``` 90 | Daily Motivator: Generates a motivational slogan based on your occasion. 91 | ``` 92 | 93 | ### Render prompts 94 | Our Formula is loaded correctly. Now let's call the `.render()` method. Downloaded Formula prompts often contain templating variables. When we render, we replace the template variables with values and generate prompts that are stored in the `.prompts` property. If we don't pass new values to `.render()`, it will render prompts using the Formula's default values. Render and then print again. 95 | 96 | ```python 97 | # render prompts. 98 | my_formula.render() 99 | print(f"\nMy starting prompts: {my_formula.prompts}") 100 | ``` 101 | Printed in the terminal we see we see: 102 | 103 | ``` 104 | My starting prompts: ['You are a personal motivator assistant who is direct and 105 | believes that everyone can be their best. Generate a motivating slogan for the 106 | occasion of first day of a new job', 'Now translate that slogan into French '] 107 | ``` 108 | Our prompts are in a Python list. The occasion is "first day of a new job" and the "French". 109 | 110 | Now let's pass in new data, re-render our prompts, and print again. 111 | ```python 112 | #our new variables here. 113 | data = {"occasion": "I'm scared of heights and climbing a mountain", 'language': 'German'} 114 | 115 | # render and print our prompts 116 | my_formula.render(data) 117 | print(f"\nMy new prompts: {my_formula.prompts}") 118 | ``` 119 | Now we see our prompt list, available at `.prompts` contains the new occasion and new translation language. 120 | 121 | ``` 122 | My new prompts: ["You are a personal motivator assistant who is direct and 123 | believes that everyone can be their best. Generate a motivating slogan for 124 | the occasion of I'm scared of heights and climbing a mountain", 'Now 125 | translate that slogan into German'] 126 | ``` 127 | 128 | ### Setup our model endpoint configuration 129 | We have prompts that are ready to be sent off to a language model. I'm going to use llamafile for this tutorial. [llamafile](https://github.com/Mozilla-Ocho/llamafile) is free, runs on your local machine, and makes it easy to deploy a local API endpoint. I chose the [mistral 7B instruct llamafile](https://huggingface.co/jartine/Mistral-7B-Instruct-v0.2-llamafile). To get it running, download the file (5GB) and run it from the command line to start the local HTTP server. Please see the full [llamafile documentation](https://github.com/Mozilla-Ocho/llamafile) for instructions on how to download and get started. 130 | 131 | 132 | I went ahead and created a `model_config` dictionary to hold my model config variables to make it simpler. We can use the Formulaic Library to send our prompts to any language model API that supports the OpenAI format, so I included OpenAI and Anyscale. Anyscale provides hosting for many open source language models with an OpenAI compatible endpoint. You would have to create keys for OpenAI and Anyscale and substitute them in below. 133 | 134 | ```python 135 | 136 | model_config = { "llamafile" : {"url" : "http://localhost:8080/v1", 137 | "key":"sk-no-key-required", 138 | "name": "LLaMA_CPP"}, 139 | "OpenAI" : {"url" : "https://api.openai.com/v1", 140 | "key": "OPENAI_KEY_GOES_HERE", 141 | "name": "gpt-3.5-turbo"}, 142 | "mistral7b" : {"url" : "https://api.endpoints.anyscale.com/v1", 143 | "key": "ANYSCALE_KEY_GOES_HERE", 144 | "name": "mistralai/Mistral-7B-Instruct-v0.1"} 145 | 146 | } 147 | ``` 148 | 149 | ### Create our OpenClient and start our chat 150 | Now we're ready to create our `OpenClient` instance, which is a class that extends `OpenAI`. 151 | 152 | * We call `OpenClass` and pass two arguments: 153 | - The first is our Formula, `my_formula`. 154 | - The second is a dictionary that contains valid values for the `url`, `key`, and `name` of the model endpoing we're going to use. In this case, we pass it the `llamafile` dictionary from our `model_config`. 155 | 156 | We're going to call it using a `with` statement so that OpenClient's context manager will clean up for us: 157 | 158 | ```python 159 | with OpenClient(my_formula, model_config["llamafile"]) as client: 160 | 161 | ``` 162 | 163 | We now have two options. We can just iterate over the 2 prompts we have in our Formula and await their responses. We do that by calling `.run()`. Instead, we are going to have an ongoing chat by calling `.chat()`. Both `.run` and `.chat` have a single optional argument to print out all user propmts and assistant responses to terminal. The default is `False` but we are using the command line to iteract, so we pass `True` 164 | 165 | ```python 166 | client.chat(True) 167 | ``` 168 | 169 | And we're also going to add `print(client.messages)` so that we can see the full list of all messages we sent to the model and the model sent back. Our whole block looks like this: 170 | 171 | ```python 172 | # Create an OpenClient instance for llamafile 173 | with OpenClient(my_formula, model_config["llamafile"]) as client: 174 | 175 | # start our chat. True = print to terminal 176 | client.chat(True) 177 | 178 | # print our message log. 179 | print(client.messages) 180 | 181 | ``` 182 | 183 | ## Save and run the script 184 | 185 | We save it as `quickstart.py` and run it in the terminal 186 | 187 | ``` 188 | python quickstart.py 189 | 190 | ``` 191 | 192 | 193 | 194 | It takes a moment because we're running on our local hardware using llamafile. Here's what we see: 195 | 196 | ``` 197 | User: You are a personal motivator assistant who is direct and believes that 198 | everyone can be their best. Generate a motivating slogan for the occasion of 199 | I'm scared of heights and climbing a mountain 200 | 201 | Assistant: Absolutely, I understand that fear of heights can be a significant 202 | challenge. But remember, every mountain peak is within your reach if you believe 203 | in yourself and take it one step at a time. Here's a motivating slogan for you: 204 | 205 | "Conquer the Mountain Within: Your Fear is Just a Stepping Stone to New Heights!" 206 | 207 | User: Now translate that slogan into German 208 | 209 | Assistant: Of course! The German translation of "Conquer the Mountain Within: 210 | Your Fear is Just a Stepping Stone to New Heights!" would be: 211 | 212 | "Berge Innerhalb von Dir besiegen: Deine Angst ist nur ein Stufenstein zu 213 | neuen Gipfeln!" 214 | 215 | > 216 | ``` 217 | 218 | Notice that we have iterated over both of our two prompts and received two answers from the llamafile model. The cursor is awaiting our input. Let's tell it to translate to Latin and hit Return. 219 | 220 | ``` 221 | > Now translate to Latin 222 | Assistant: In Latin, the phrase could be: 223 | 224 | "Montes Intus Vincere: Timor Tuum Nec Nisi Gradus Ad Novos Culmines!" 225 | 226 | > 227 | ``` 228 | We see the Latin translation from the local llamafile model, and the cursor aways our next chat input. To stop the chat, just hit Return without entering any input and the loop exits. 229 | 230 | 231 | ### See the message log printed 232 | Our Formula instance saved every message we sent to the model and every message the assistant sent back. This is what we accessed above by printing `client.messages` 233 | 234 | 235 | and now we see: 236 | ``` 237 | [{'role': 'user', 'content': "You are a personal motivator assistant who is 238 | direct and believes that everyone can be their best. Generate a motivating 239 | slogan for the occasion of I'm scared of heights and climbing a mountain"}, 240 | {'role': 'assistant', 'content': 'Absolutely, I understand that fear of heights 241 | can be a significant challenge. But remember, every mountain peak is within your 242 | reach if you believe in yourself and take it one step at a time. Here\'s a 243 | motivating slogan for you:\n\n"Conquer the Mountain Within: Your Fear is 244 | Just a Stepping Stone to New Heights!"'}, {'role': 'user', 'content': 'Now 245 | translate that slogan into German'}, {'role': 'assistant', 'content': 'Of course! 246 | The German translation of "Conquer the Mountain Within: Your Fear is Just a Stepping 247 | Stone to New Heights!" would be:\n\n"Berge Innerhalb von Dir besiegen: Deine Angst 248 | \ist nur ein Stufenstein zu neuen Gipfeln!"'}, {'role': 'user', 'content': 'Now 249 | translate to Latin'}, {'role': 'assistant', 'content': 'In Latin, the phrase could 250 | be:\n\n"Montes Intus Vincere: Timor Tuum Nec Nisi Gradus Ad Novos Culmines!"'}] 251 | 252 | ``` 253 | 254 | That's the gist! You've parsed your first Formula and sent it off to a local language model. You can send it off to other model endpoints just as easily. 255 | 256 | You can see [the entire script](https://github.com/Mozilla-Ocho/formulaic-python/blob/main/examples/quickstart.py) we just produced here. 257 | 258 | -------------------------------------------------------------------------------- /examples/motivator.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": "javaun", 3 | "source": "https://formulaic.app/recipes/05ff3183-a43e-43fe-b77f-839025807515/edit", 4 | "created_at": "2024-02-27T16:55:40.181Z", 5 | "updated_at": "2024-02-28T20:49:59.181Z", 6 | "license": { 7 | "name": "CC BY 4.0 Deed", 8 | "canonical_link": "https://creativecommons.org/licenses/by/4.0" 9 | }, 10 | "name": "Daily Motivator", 11 | "description": "Generates a motivational slogan based on your occasion.", 12 | "script": { 13 | "model": { 14 | "id": "mistralai/Mistral-7B-Instruct-v0.1", 15 | "name": "Mistral 7B Instruct", 16 | "vendor": "Mistral", 17 | "provider": "Anyscale" 18 | }, 19 | "sequences": [ 20 | [ 21 | { 22 | "text": "You are a personal motivator assistant who is direct and believes that everyone can be their best. Generate a motivating slogan for the occasion of {{{occasion}}}" 23 | }, 24 | { 25 | "text": "Now translate that slogan into {{{language}}}" 26 | } 27 | ] 28 | ], 29 | "variables": [ 30 | { 31 | "name": "occasion", 32 | "type": "text", 33 | "value": "first day of a new job", 34 | "description": "Tell me the occasion for which you want a motivating slogan" 35 | }, 36 | { 37 | "name": "language", 38 | "type": "text", 39 | "value": "French ", 40 | "description": "The language you want to translate into" 41 | } 42 | ] 43 | } 44 | } -------------------------------------------------------------------------------- /examples/quickstart.py: -------------------------------------------------------------------------------- 1 | """ 2 | This example works with any LLM inference API that uses the OpenAI format and 3 | OpenAI Python library 4 | 5 | For this demo we've chosen llamafile, which is an LLM that runs on your local 6 | machine and includes a locally running OpenAI-compatible API endpoint. 7 | 8 | You may substitue in another providersuch as Anyscale or OpenAI by changing 9 | the values of endpoint_url and inference_api_key. 10 | 11 | """ 12 | 13 | from formulaic_ai import Formulaic 14 | import openai 15 | 16 | 17 | formulaic_api_key = "your_personal_key" 18 | endpoint_url = "http://localhost:8080/v1" # default for llamafile 19 | inference_api_key = "sk-no-key-required" # substitute if using another service 20 | 21 | 22 | formula = Formulaic(formulaic_api_key) 23 | 24 | formula.get_formula("2968bf58-a231-46ff-99de-923198c3864e") 25 | 26 | # print the entire Formula script 27 | print (formula.script) 28 | 29 | 30 | # new values for the template variables 31 | new_variables = {"occasion": "I'm scared of heights!", 'language': 'German'} 32 | 33 | # render prompts by sustituting the new values 34 | formula.render(new_variables) 35 | 36 | # print the prompts that contain our new values 37 | print (formula.prompts) 38 | 39 | # change values, render, and print the prompts 40 | new_variables = {"occasion": "It's my birthday!", 'language': 'Greek'} 41 | formula.render(new_variables) 42 | print (formula.prompts) 43 | 44 | 45 | # Send our latest prompts to an OpenAI compatible endpoint 46 | 47 | # create an OpenAI client 48 | client = openai.OpenAI( 49 | base_url="http://localhost:8080/v1", # default for llamafile 50 | api_key = "sk-no-key-required" 51 | ) 52 | messages=[] 53 | 54 | # iterate over the prompts and send to the model for completions 55 | for p in formula.prompts: 56 | messages.append({"role": "user", "content": p}) 57 | completion = client.chat.completions.create( 58 | model="gpt-3.5-turbo", 59 | messages=messages 60 | ) 61 | # print the user prompt we sent 62 | print(f"\nUser: {p}") 63 | # print the Assistant's response 64 | print(f"\nAssistant: {completion.choices[0].message.content}") -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "formulaic-ai" 7 | version = "0.1.0.5" 8 | description = "The official Formulaic Python library" 9 | authors = [ 10 | {name = "Javaun Moradi", email = "formulaic@mozilla.com"}, 11 | {name = "Gabriel Habayeb", email = "formulaic@mozilla.com"} 12 | ] 13 | license = "Apache-2.0" 14 | readme = "README.md" 15 | keywords = ["prompt engineering", "prompt scripts" , "LLM", "language models", "llama", "mistral", "generative ai", "openai", "api", ] 16 | classifiers = [ 17 | "Development Status :: 1 - Planning", 18 | "Intended Audience :: Developers", 19 | "Programming Language :: Python :: 3", 20 | "Programming Language :: Python :: 3.8", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | "License :: OSI Approved :: Apache Software License", 27 | "Operating System :: OS Independent", 28 | ] 29 | dependencies = [ 30 | "requests>=2.31.0" 31 | ] 32 | requires-python = ">= 3.8" 33 | 34 | [project.urls] 35 | homepage = "https:formulaic.app" 36 | repository = "https://github.com/Mozilla-Ocho/formulaic-python" 37 | documentation = "https://docs.formulaic.app" 38 | 39 | [tool.hatch.build.targets.wheel] 40 | packages = ["src/formulaic_ai"] 41 | 42 | 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /src/formulaic_ai/__init__.py: -------------------------------------------------------------------------------- 1 | from .formulaic_ai import Formulaic 2 | from .formulaic_ai import load_formula 3 | 4 | 5 | -------------------------------------------------------------------------------- /src/formulaic_ai/formulaic_ai.py: -------------------------------------------------------------------------------- 1 | 2 | import json, sys, copy, requests 3 | from string import Template 4 | 5 | 6 | 7 | ''' 8 | Formula Class for enabling the opening, parsing of Formulas 9 | Gets them ready to send to various LLMs 10 | 11 | This will grow to support system prompts, few shot 12 | examples, etc. 13 | 14 | Formula Class works with OpenClient, which is a wrapper on top of 15 | OpenAI and can send our Formula prompts to many LLMs 16 | 17 | :render(): Formula method for rendering usable prompts by substituting variable values 18 | into the template placeholders 19 | 20 | 21 | FormulaTemplate Class - extends Template for basic template rendering of curly 22 | brace tenplates. Consider changing format to support either mustache or 23 | jinja formats 24 | 25 | Helper functions Also contains helper functions for opening json Formula files from disc 26 | And saving processed outputs back to disc 27 | :load_formula(): top-level function to load a Formula from disk. 28 | arg is path+filename (or just filename if in same directory) 29 | :save_output(): saves message output to disk. Expects 2 args 30 | - output: a dictionary reprenting the message thread. This is natively what 31 | Formula.messages generates 32 | - filepath+filename (or just filename if saving to same directory) 33 | 34 | 35 | 36 | ''' 37 | 38 | 39 | # Helpers 40 | # open and read JSON file passed by CLI, return dict 41 | def load_formula(file_name): 42 | """Load a JSON Formula from disk""" 43 | try: 44 | 45 | file_name = sys.argv[1] if len(sys.argv) > 1 else file_name 46 | with open(file_name, "r") as my_file: 47 | contents = my_file.read() 48 | return json.loads(contents) # load the JSON Formula into a dict 49 | except Exception as e: 50 | print(f"An unexpected error occurred: {e}") 51 | 52 | 53 | class Formulaic: 54 | # blank Formula for embedded publishing in apps 55 | default_formula_json = { 56 | 'name': '', 57 | 'description': '', 58 | 'created_at': '', 59 | 'updated_at': '', 60 | 'author': '', 61 | 'source': '', 62 | 'license': {'name': '','canonical_link': ''}, 63 | 'script': { 64 | 'model': {'id': '', 'name': '', 'vendor': '', 'provider': ''}, 65 | 'sequences': [], 66 | 'variables': {} 67 | } 68 | } 69 | 70 | 71 | def __init__(self, api_key=None, formula_json=None, options=None): 72 | 73 | # Use the default_formula_json template if none is given 74 | if formula_json is None: 75 | formula_json = copy.deepcopy(Formulaic.default_formula_json) 76 | 77 | if api_key is not None: 78 | self.api_key = api_key 79 | 80 | # set default options to include base_url 81 | self.options = {"base_url": "https://formulaic.app/api/"} 82 | 83 | # add user options, including overriding base_url. api_key expected 84 | if options is not None: 85 | self.options.update(options) 86 | 87 | self.script = formula_json 88 | 89 | 90 | @property 91 | def script(self): 92 | return self._script 93 | 94 | @script.setter 95 | def script(self, formula_json): 96 | if formula_json is not None: 97 | self._script = formula_json 98 | #self.script = formula_json 99 | 100 | # individual properties 101 | 102 | self.name = formula_json.get('name', '') 103 | self.description = formula_json.get('description', '') 104 | self.created = formula_json.get('created_at', '') 105 | self.updated = formula_json.get('updated_at', '') 106 | self.author = formula_json.get('author', '') # ['author'] 107 | self.source = formula_json.get ('source') #['source'] 108 | self.license = formula_json.get('license', {}) #['license']['canonical_link'] 109 | self.model = formula_json.get('script', {}).get('model', {}) #['script']['model'] 110 | 111 | # shortcut because model_id seems useful 112 | self.model_id = self.model.get('id', '') #['script']['model']['id'] 113 | self.sequences = formula_json.get('script', {}).get('sequences', []) #['script']['sequences'] 114 | 115 | 116 | 117 | # full variables with all attributes 118 | self.variables = formula_json.get('script', {}).get('variables', {}) #['script']['variables'] 119 | 120 | # storedefault variables in simple format. Useful for testing locally 121 | self.default_values = Formulaic.simple_variables(self.variables) 122 | 123 | # auto-render the default values or fail when they're called before rendering? 124 | #self.prompts = Formula.render(self.sequences, self.defaults) 125 | 126 | def get_formula(self, formula_id): 127 | """Get a Formula from the Formulaic API""" 128 | url = self.options['base_url'] + "recipes/" + formula_id + "/scripts" 129 | headers = { 130 | "Accept": "*/*", 131 | "Authorization": "Bearer " + self.api_key, 132 | } 133 | 134 | response = requests.get(url, headers=headers, timeout=10) 135 | formula_dict = response.json() 136 | self.script = formula_dict 137 | return formula_dict 138 | 139 | 140 | 141 | @staticmethod 142 | def simple_variables(data): 143 | # reformat our variables into k/v pairs for use in the template 144 | simple_data = {variable['name']: variable['value'] for variable in data} 145 | return simple_data 146 | 147 | #renders prompts 148 | def render(self, simple_data=None): 149 | 150 | # if we don't get new values, use the defaults 151 | if simple_data is None: 152 | simple_data = self.default_values 153 | 154 | rendered = [] 155 | 156 | # render our template, substituting the values 157 | 158 | for i in self.sequences: 159 | # each prompt in the sequence 160 | for prompt in i: 161 | 162 | # turn it into a FormulaTemplate and then substitute the values 163 | prompt_template = FormulaTemplate(prompt["text"]) 164 | 165 | try: 166 | rendered.append(prompt_template.substitute(simple_data)) 167 | 168 | except: 169 | print(f"Templating error, the JSON you submitted has incorrect keys.") 170 | 171 | #consider format here -- do we use OpenAI format for messages? 172 | #think about whether we want to return prompts or set a property 173 | self.prompts = rendered 174 | 175 | 176 | 177 | 178 | # This allows us to extend templating or replace w/ Jinja2 179 | class FormulaTemplate(Template): 180 | delimiter = '{{{' 181 | idpattern = r'\w+' 182 | 183 | pattern = r''' 184 | \{{3} # matches 3 opening braces 185 | (?: 186 | (?P\w+)\}{3} # a-z, A-Z, 0-9 and _ allowed 187 | | # OR 188 | (?P.+?)\}{3} # invalid 189 | ) 190 | ''' 191 | 192 | 193 | --------------------------------------------------------------------------------