├── .formatter.exs ├── .gitignore ├── CHANGELOG.md ├── README.md ├── lib └── ai.ex ├── mix.exs ├── mix.lock └── test ├── ai_test.exs └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | potions-*.tar 24 | 25 | # Temporary files, for example, from tests. 26 | /tmp/ 27 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.3.4] - 2022-01-01 2 | 3 | ### Changed 4 | 5 | - Recommend using `~l` instead of `~LLM`, as it allows for interpolation. 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AI 2 | 3 | Helpers for using AI in Elixir. Currently requires the [OpenAI](https://github.com/mgallo/openai.ex) package. 4 | 5 | Includes `~l` sigil and `chat/1` function for OpenAI generation. 6 | 7 | ```elixir 8 | iex> ~l"model:gpt-3.5-turbo user: how do I build an igloo in 10 words?" 9 | [ 10 | model: "gpt-3.5-turbo", 11 | messages: [%{role: "user", content: "how do I build an igloo in 10 words?"}] 12 | ] 13 | ``` 14 | 15 | ```elixir 16 | iex> ~l"model:gpt-3.5-turbo user: how do I build an igloo in 10 words?" |> chat() 17 | {:ok, "Stack snow blocks in circle, decreasing size upward, till enclosed"} 18 | ``` 19 | 20 | ## Installation 21 | 22 | If [available in Hex](https://hex.pm/docs/publish), the package can be installed by adding `ai` to your list of dependencies in `mix.exs`: 23 | 24 | ```elixir 25 | def deps do 26 | [ 27 | {:ai, "~> 0.3.4"}, 28 | ] 29 | end 30 | ``` 31 | 32 | ## Why? 33 | 34 | [Here's why!](https://www.charlieholtz.com/articles/elixir-ai) 35 | -------------------------------------------------------------------------------- /lib/ai.ex: -------------------------------------------------------------------------------- 1 | defmodule AI do 2 | @moduledoc """ 3 | Documentation for `AI`. 4 | """ 5 | 6 | @doc """ 7 | Implements the ~l sigil, which parses text into an OpenAI friendly chat completion prompt. 8 | ~l works by parsing out the `model`, `system`, `user` and `assistant` keywords. 9 | 10 | Supports string interpolation. 11 | 12 | ## Examples 13 | iex> import AI 14 | iex> ~l"model: gpt-3.5-turbo system: You are an expert at text to image prompts. Given a description, write a text-to-image prompt. user: sunset" 15 | [ 16 | model: "gpt-3.5-turbo", 17 | messages: [ 18 | %{ 19 | content: "You are an expert at text to image prompts. Given a description, write a text-to-image prompt.", 20 | role: "system" 21 | }, 22 | %{content: "sunset", role: "user"} 23 | ] 24 | ] 25 | """ 26 | def sigil_l(lines, _opts) do 27 | lines |> text_to_prompts() 28 | end 29 | 30 | @doc """ 31 | DEPRECATED: Use ~l instead. ~LLM doesn't work with string interpolation. 32 | 33 | Implements the ~LLM sigil, which parses text into an OpenAI friendly chat completion prompt. 34 | ~LLM works by parsing out the `model`, `system`, `user` and `assistant` keywords. 35 | 36 | ## Examples 37 | iex> import AI 38 | iex> ~LLM"model: gpt-3.5-turbo system: You are an expert at text to image prompts. Given a description, write a text-to-image prompt. user: sunset" 39 | [ 40 | model: "gpt-3.5-turbo", 41 | messages: [ 42 | %{ 43 | content: "You are an expert at text to image prompts. Given a description, write a text-to-image prompt.", 44 | role: "system" 45 | }, 46 | %{content: "sunset", role: "user"} 47 | ] 48 | ] 49 | """ 50 | def sigil_LLM(lines, _opts) do 51 | lines |> text_to_prompts() 52 | end 53 | 54 | defp text_to_prompts(text) when is_binary(text) do 55 | model = extract_model(text) |> String.trim() 56 | messages = extract_messages(text) 57 | [model: model, messages: messages] 58 | end 59 | 60 | defp extract_model(text) do 61 | extract_value_after_keyword(text, "model:") 62 | end 63 | 64 | defp extract_messages(text) do 65 | keywords = ["system:", "user:", "assistant:"] 66 | 67 | Enum.reduce_while(keywords, [], fn keyword, acc -> 68 | case extract_value_after_keyword(text, keyword) do 69 | nil -> 70 | {:cont, acc} 71 | 72 | value -> 73 | role = String.trim(keyword, ":") 74 | acc = acc ++ [%{role: role, content: String.trim(value)}] 75 | {:cont, acc} 76 | end 77 | end) 78 | end 79 | 80 | defp extract_value_after_keyword(text, keyword) do 81 | pattern = ~r/#{keyword}\s*(.*?)(?=model:|system:|user:|assistant:|$)/s 82 | 83 | case Regex.run(pattern, text) do 84 | [_, value] -> value 85 | _ -> nil 86 | end 87 | end 88 | 89 | @doc """ 90 | Parses out OpenAI's chat completion response into a cleaner format. 91 | Returns {:ok, text_content} or {:error, message} 92 | 93 | Instead of: 94 | {:ok, 95 | %{ 96 | id: "chatcmpl-7zSc1rsCXpyALMjM9MkaF077xYRot", 97 | usage: %{ 98 | "completion_tokens" => 10, 99 | "prompt_tokens" => 19, 100 | "total_tokens" => 29 101 | }, 102 | created: 1694882349, 103 | choices: [ 104 | %{ 105 | "finish_reason" => "stop", 106 | "index" => 0, 107 | "message" => %{ 108 | "content" => "Compact and stack snow blocks in a dome shape.", 109 | "role" => "assistant" 110 | } 111 | } 112 | ], 113 | model: "gpt-3.5-turbo-0613", 114 | object: "chat.completion" 115 | }} 116 | """ 117 | def chat(text) do 118 | text 119 | |> OpenAI.chat_completion() 120 | |> parse_chat() 121 | end 122 | 123 | defp parse_chat({:ok, %{choices: [%{"message" => %{"content" => content}} | _]}}), 124 | do: {:ok, content} 125 | 126 | defp parse_chat({:error, %{"error" => %{"message" => message}}}), do: {:error, message} 127 | end 128 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule AI.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :ai, 7 | version: "0.3.4", 8 | elixir: "~> 1.15", 9 | start_permanent: Mix.env() == :prod, 10 | deps: deps(), 11 | name: "AI", 12 | source_url: "https://github.com/cbh123/elixir_ai", 13 | homepage_url: "https://hexdocs.pm/ai/readme.html", 14 | package: package(), 15 | description: description(), 16 | docs: [ 17 | main: "readme", 18 | extras: ["README.md", "CHANGELOG.md"] 19 | ] 20 | ] 21 | end 22 | 23 | # Run "mix help compile.app" to learn about applications. 24 | def application do 25 | [ 26 | extra_applications: [:logger] 27 | ] 28 | end 29 | 30 | # Run "mix help deps" to learn about dependencies. 31 | defp deps do 32 | [ 33 | {:openai, "~> 0.5.2"}, 34 | {:ex_doc, "~> 0.14", only: :dev, runtime: false} 35 | ] 36 | end 37 | 38 | defp description() do 39 | "Helper functions for using AI in Elixir" 40 | end 41 | 42 | defp package() do 43 | [ 44 | name: "ai", 45 | files: ~w(lib .formatter.exs mix.exs README*), 46 | licenses: ["Apache-2.0"], 47 | links: %{"GitHub" => "https://github.com/cbh123/elixir_ai"} 48 | ] 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"}, 3 | "earmark_parser": {:hex, :earmark_parser, "1.4.35", "437773ca9384edf69830e26e9e7b2e0d22d2596c4a6b17094a3b29f01ea65bb8", [:mix], [], "hexpm", "8652ba3cb85608d0d7aa2d21b45c6fad4ddc9a1f9a1f1b30ca3a246f0acc33f6"}, 4 | "ex_doc": {:hex, :ex_doc, "0.30.6", "5f8b54854b240a2b55c9734c4b1d0dd7bdd41f71a095d42a70445c03cf05a281", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "bd48f2ddacf4e482c727f9293d9498e0881597eae6ddc3d9562bd7923375109f"}, 5 | "hackney": {:hex, :hackney, "1.18.2", "d7ff544ddae5e1cb49e9cf7fa4e356d7f41b283989a1c304bfc47a8cc1cf966f", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "af94d5c9f97857db257090a4a10e5426ecb6f4918aa5cc666798566ae14b65fd"}, 6 | "httpoison": {:hex, :httpoison, "2.1.0", "655fd9a7b0b95ee3e9a3b535cf7ac8e08ef5229bab187fa86ac4208b122d934b", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "fc455cb4306b43827def4f57299b2d5ac8ac331cb23f517e734a4b78210a160c"}, 7 | "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, 8 | "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, 9 | "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, 10 | "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, 11 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"}, 12 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, 13 | "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"}, 14 | "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"}, 15 | "openai": {:hex, :openai, "0.5.4", "2abc7bc6a72ad1732c16d3f0914aa54f4de14b174a4c70c1b2d7934f0fe2646f", [:mix], [{:httpoison, "~> 2.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "72add1d3dcbf3ed7d24ce3acf51e8b2f374b23305b0fc1d5f6acff35c567b267"}, 16 | "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, 17 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, 18 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, 19 | } 20 | -------------------------------------------------------------------------------- /test/ai_test.exs: -------------------------------------------------------------------------------- 1 | defmodule AITest do 2 | use ExUnit.Case 3 | doctest AI 4 | import AI 5 | 6 | test "basic usage" do 7 | assert ~l"model:gpt-3.5-turbo user:how do I build an igloo in 10 words?" == 8 | [ 9 | model: "gpt-3.5-turbo", 10 | messages: [%{role: "user", content: "how do I build an igloo in 10 words?"}] 11 | ] 12 | end 13 | 14 | test "code interpolation works" do 15 | model = "gpt-3.5-turbo" 16 | assert ~l"model:#{model} user:how do I build an igloo in 10 words?" == 17 | [ 18 | model: "gpt-3.5-turbo", 19 | messages: [%{role: "user", content: "how do I build an igloo in 10 words?"}] 20 | ] 21 | end 22 | 23 | 24 | end 25 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------