├── .github └── workflows │ └── go.yml ├── LICENSE ├── README.md ├── audio └── audio.go ├── chat ├── chat.go └── streaming.go ├── completion ├── completion.go └── streaming.go ├── edit └── edit.go ├── embedding └── embedding.go ├── examples ├── audio │ └── main.go ├── chat │ └── main.go ├── completion │ └── main.go ├── edit │ └── main.go ├── embedding │ └── main.go ├── image │ └── main.go └── moderation │ └── main.go ├── go.mod ├── image ├── create.go └── image.go ├── moderation └── moderation.go └── openai.go /.github/workflows/go.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a golang project 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-go 3 | 4 | name: Go 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | 14 | build: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v3 18 | 19 | - name: Set up Go 20 | uses: actions/setup-go@v3 21 | with: 22 | go-version: 1.19 23 | 24 | - name: Build 25 | run: go build -v ./... 26 | 27 | - name: Test 28 | run: go test -v ./... 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # openai-go [![Go Reference](https://pkg.go.dev/badge/github.com/rakyll/openai-go.svg)](https://pkg.go.dev/github.com/rakyll/openai-go) [![Go](https://github.com/rakyll/openai-go/actions/workflows/go.yml/badge.svg)](https://github.com/rakyll/openai-go/actions/workflows/go.yml) 2 | 3 | Go client libraries for OpenAI APIs. Supported APIs: 4 | ``` 5 | ✅ completions 6 | ✅ chat 7 | ✅ edits 8 | 🚧 images 9 | ✅ moderations 10 | ✅ audio 11 | ✅ embeddings 12 | ``` 13 | 14 | Set your [API key](https://platform.openai.com/account/api-keys) 15 | as the `OPENAI_API_KEY` environmental variable before running the examples. 16 | -------------------------------------------------------------------------------- /audio/audio.go: -------------------------------------------------------------------------------- 1 | // Package audio implements a client for OpenAI's Whisper 2 | // audio transcriber. 3 | package audio 4 | 5 | import ( 6 | "context" 7 | "fmt" 8 | "io" 9 | "net/url" 10 | 11 | "github.com/rakyll/openai-go" 12 | ) 13 | 14 | const defaultCreateTranscriptionEndpoint = "https://api.openai.com/v1/audio/transcriptions" 15 | 16 | // Client is a client to communicate with Open AI's ChatGPT APIs. 17 | type Client struct { 18 | s *openai.Session 19 | model string 20 | 21 | // CreateTranscriptionEndpoint allows overriding the default API endpoint. 22 | // Set this field before using the client. 23 | CreateTranscriptionEndpoint string 24 | } 25 | 26 | // NewClient creates a new default client that uses the given session 27 | // and defaults to the given model. 28 | func NewClient(session *openai.Session, model string) *Client { 29 | if model == "" { 30 | model = "whisper-1" 31 | } 32 | return &Client{ 33 | s: session, 34 | model: model, 35 | CreateTranscriptionEndpoint: defaultCreateTranscriptionEndpoint, 36 | } 37 | } 38 | 39 | type CreateTranscriptionParams struct { 40 | Model string 41 | Language string 42 | 43 | Audio io.Reader 44 | AudioFormat string // such as "mp3" or "wav", etc. 45 | 46 | Prompt string // optional 47 | // TODO: Add temperature. 48 | } 49 | 50 | type CreateTranscriptionResponse struct { 51 | Text string `json:"text,omitempty"` 52 | } 53 | 54 | func (c *Client) CreateTranscription(ctx context.Context, p *CreateTranscriptionParams) (*CreateTranscriptionResponse, error) { 55 | if p.AudioFormat == "" { 56 | return nil, fmt.Errorf("audio format is required") 57 | } 58 | if p.Model == "" { 59 | p.Model = c.model 60 | } 61 | params := url.Values{} 62 | params.Set("model", p.Model) 63 | if p.Language != "" { 64 | params.Set("language", p.Language) 65 | } 66 | if p.Prompt != "" { 67 | params.Set("prompt", p.Prompt) 68 | } 69 | var r CreateTranscriptionResponse 70 | return &r, c.s.Upload(ctx, c.CreateTranscriptionEndpoint, p.Audio, p.AudioFormat, params, &r) 71 | } 72 | -------------------------------------------------------------------------------- /chat/chat.go: -------------------------------------------------------------------------------- 1 | // Package chat contains a client for Open AI's ChatGPT APIs. 2 | package chat 3 | 4 | import ( 5 | "context" 6 | "errors" 7 | 8 | "github.com/rakyll/openai-go" 9 | ) 10 | 11 | const defaultModel = "gpt-3.5-turbo" 12 | 13 | const defaultCreateCompletionsEndpoint = "https://api.openai.com/v1/chat/completions" 14 | 15 | // Client is a client to communicate with Open AI's ChatGPT APIs. 16 | type Client struct { 17 | s *openai.Session 18 | model string 19 | 20 | // CreateCompletionsEndpoint allows overriding the default API endpoint. 21 | // Set this field before using the client. 22 | CreateCompletionEndpoint string 23 | } 24 | 25 | // NewClient creates a new default client that uses the given session 26 | // and defaults to the given model. 27 | func NewClient(session *openai.Session, model string) *Client { 28 | if model == "" { 29 | model = defaultModel 30 | } 31 | return &Client{ 32 | s: session, 33 | model: model, 34 | CreateCompletionEndpoint: defaultCreateCompletionsEndpoint, 35 | } 36 | } 37 | 38 | type CreateCompletionParams struct { 39 | Model string `json:"model,omitempty"` 40 | 41 | Messages []*Message `json:"messages,omitempty"` 42 | Stop []string `json:"stop,omitempty"` 43 | Stream bool `json:"stream,omitempty"` 44 | 45 | N int `json:"n,omitempty"` 46 | TopP float64 `json:"top_p,omitempty"` 47 | Temperature float64 `json:"temperature,omitempty"` 48 | MaxTokens int `json:"max_tokens,omitempty"` 49 | 50 | PresencePenalty float64 `json:"presence_penalty,omitempty"` 51 | FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` 52 | 53 | User string `json:"user,omitempty"` 54 | } 55 | 56 | type CreateCompletionResponse struct { 57 | ID string `json:"id,omitempty"` 58 | Object string `json:"object,omitempty"` 59 | CreatedAt int64 `json:"created_at,omitempty"` 60 | Choices []*Choice `json:"choices,omitempty"` 61 | 62 | Usage *openai.Usage `json:"usage,omitempty"` 63 | } 64 | 65 | type Choice struct { 66 | Message *Message `json:"message,omitempty"` 67 | Index int `json:"index,omitempty"` 68 | LogProbs int `json:"logprobs,omitempty"` 69 | FinishReason string `json:"finish_reason,omitempty"` 70 | } 71 | 72 | type Message struct { 73 | Role string `json:"role,omitempty"` 74 | Content string `json:"content,omitempty"` 75 | Name string `json:"name,omitempty"` 76 | } 77 | 78 | func (c *Client) CreateCompletion(ctx context.Context, p *CreateCompletionParams) (*CreateCompletionResponse, error) { 79 | if p.Model == "" { 80 | p.Model = c.model 81 | } 82 | if p.Stream { 83 | return nil, errors.New("use StreamingClient instead") 84 | } 85 | 86 | var r CreateCompletionResponse 87 | if err := c.s.MakeRequest(ctx, c.CreateCompletionEndpoint, p, &r); err != nil { 88 | return nil, err 89 | } 90 | return &r, nil 91 | } 92 | -------------------------------------------------------------------------------- /chat/streaming.go: -------------------------------------------------------------------------------- 1 | package chat 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/rakyll/openai-go" 7 | ) 8 | 9 | // StreamingClient is a client to communicate with Open AI's ChatGPT APIs. 10 | type StreamingClient struct { 11 | s *openai.Session 12 | model string 13 | 14 | // CreateCompletionsEndpoint allows overriding the default API endpoint. 15 | // Set this field before using the client. 16 | CreateCompletionEndpoint string 17 | } 18 | 19 | // NewStreamingClient creates a new default streaming client that uses the given session 20 | // and defaults to the given model. 21 | func NewStreamingClient(session *openai.Session, model string) *StreamingClient { 22 | if model == "" { 23 | model = defaultModel 24 | } 25 | return &StreamingClient{ 26 | s: session, 27 | model: model, 28 | CreateCompletionEndpoint: defaultCreateCompletionsEndpoint, 29 | } 30 | } 31 | 32 | type CreateCompletionStreamingResponse struct { 33 | ID string `json:"id,omitempty"` 34 | Object string `json:"object,omitempty"` 35 | CreatedAt int64 `json:"created_at,omitempty"` 36 | Choices []*StreamingChoice `json:"choices,omitempty"` 37 | } 38 | 39 | type StreamingChoice struct { 40 | Delta *Message `json:"delta,omitempty"` 41 | Index int `json:"index,omitempty"` 42 | LogProbs int `json:"logprobs,omitempty"` 43 | FinishReason string `json:"finish_reason,omitempty"` 44 | } 45 | 46 | func (c *StreamingClient) CreateCompletion(ctx context.Context, p *CreateCompletionParams, fn func(r *CreateCompletionStreamingResponse)) error { 47 | if p.Model == "" { 48 | p.Model = c.model 49 | } 50 | p.Stream = true 51 | 52 | var r CreateCompletionStreamingResponse 53 | return c.s.MakeStreamingRequest(ctx, c.CreateCompletionEndpoint, p, &r, func(r any) { 54 | fn(r.(*CreateCompletionStreamingResponse)) 55 | }) 56 | } 57 | -------------------------------------------------------------------------------- /completion/completion.go: -------------------------------------------------------------------------------- 1 | // Package completion contains a client for OpenAI's completion API. 2 | package completion 3 | 4 | import ( 5 | "context" 6 | "errors" 7 | 8 | "github.com/rakyll/openai-go" 9 | ) 10 | 11 | const defaultCreateEndpoint = "https://api.openai.com/v1/completions" 12 | 13 | // Client is a client to communicate with Open AI's completions API. 14 | type Client struct { 15 | s *openai.Session 16 | model string 17 | 18 | // CreateEndpoint allows overriding the default API endpoint. 19 | // Set this field before using the client. 20 | CreateEndpoint string 21 | } 22 | 23 | // NewClient creates a new default client that uses the given session 24 | // and defaults to the given model. 25 | func NewClient(session *openai.Session, model string) *Client { 26 | return &Client{ 27 | s: session, 28 | model: model, 29 | CreateEndpoint: defaultCreateEndpoint, 30 | } 31 | } 32 | 33 | // CreateParams are completion parameters. Refer to OpenAI documentation 34 | // at https://platform.openai.com/docs/api-reference/completions/create 35 | // for reference. 36 | type CreateParams struct { 37 | Model string `json:"model,omitempty"` 38 | 39 | Prompt []string `json:"prompt,omitempty"` 40 | Stop []string `json:"stop,omitempty"` 41 | Suffix string `json:"suffix,omitempty"` 42 | Stream bool `json:"stream,omitempty"` 43 | Echo bool `json:"echo,omitempty"` 44 | 45 | MaxTokens int `json:"max_tokens,omitempty"` 46 | N int `json:"n,omitempty"` 47 | TopP float64 `json:"top_p,omitempty"` 48 | Temperature float64 `json:"temperature,omitempty"` 49 | 50 | LogProbs int `json:"logprobs,omitempty"` 51 | PresencePenalty float64 `json:"presence_penalty,omitempty"` 52 | FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` 53 | BestOf int `json:"best_of,omitempty"` 54 | 55 | User string `json:"user,omitempty"` 56 | } 57 | 58 | // CreateResponse is a response to a completion. Refer to OpenAI documentation 59 | // at https://platform.openai.com/docs/api-reference/completions/create 60 | // for reference. 61 | type CreateResponse struct { 62 | ID string `json:"id,omitempty"` 63 | Object string `json:"object,omitempty"` 64 | CreatedAt int64 `json:"created_at,omitempty"` 65 | Choices []*Choice `json:"choices,omitempty"` 66 | 67 | Usage *openai.Usage `json:"usage,omitempty"` 68 | } 69 | 70 | type Choice struct { 71 | Text string `json:"text,omitempty"` 72 | Index int `json:"index,omitempty"` 73 | LogProbs int `json:"logprobs,omitempty"` 74 | FinishReason string `json:"finish_reason,omitempty"` 75 | } 76 | 77 | // Create creates a completion for the provided parameters. 78 | func (c *Client) Create(ctx context.Context, p *CreateParams) (*CreateResponse, error) { 79 | if p.Model == "" { 80 | p.Model = c.model 81 | } 82 | if p.Stream { 83 | return nil, errors.New("use StreamingClient instead") 84 | } 85 | 86 | var r CreateResponse 87 | if err := c.s.MakeRequest(ctx, c.CreateEndpoint, p, &r); err != nil { 88 | return nil, err 89 | } 90 | return &r, nil 91 | } 92 | -------------------------------------------------------------------------------- /completion/streaming.go: -------------------------------------------------------------------------------- 1 | package completion 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/rakyll/openai-go" 7 | ) 8 | 9 | // StreamingClient is a client to communicate with Open AI's completions API. 10 | type StreamingClient struct { 11 | s *openai.Session 12 | model string 13 | 14 | // CreateEndpoint allows overriding the default API endpoint. 15 | // Set this field before using the client. 16 | CreateEndpoint string 17 | } 18 | 19 | // NewStreamingClient creates a new default streaming client that uses the given session 20 | // and defaults to the given model. 21 | func NewStreamingClient(session *openai.Session, model string) *StreamingClient { 22 | return &StreamingClient{ 23 | s: session, 24 | model: model, 25 | CreateEndpoint: defaultCreateEndpoint, 26 | } 27 | } 28 | 29 | // Create creates a completion for the provided parameters. 30 | func (c *StreamingClient) Create(ctx context.Context, p *CreateParams, fn func(r *CreateResponse)) error { 31 | if p.Model == "" { 32 | p.Model = c.model 33 | } 34 | p.Stream = true 35 | 36 | var r CreateResponse 37 | return c.s.MakeStreamingRequest(ctx, c.CreateEndpoint, p, &r, func(r any) { 38 | fn(r.(*CreateResponse)) 39 | }) 40 | } 41 | -------------------------------------------------------------------------------- /edit/edit.go: -------------------------------------------------------------------------------- 1 | // Package edit contains a client for OpenAI's edits API. 2 | package edit 3 | 4 | import ( 5 | "context" 6 | 7 | "github.com/rakyll/openai-go" 8 | ) 9 | 10 | const defaultCreateEndpoint = "https://api.openai.com/v1/edits" 11 | 12 | // Client is a client to communicate with Open AI's edits API. 13 | type Client struct { 14 | s *openai.Session 15 | model string 16 | 17 | // CreateEndpoint allows overriding the default API endpoint. 18 | // Set this field before using the client. 19 | CreateEndpoint string 20 | } 21 | 22 | func NewClient(session *openai.Session, model string) *Client { 23 | return &Client{ 24 | s: session, 25 | model: model, 26 | CreateEndpoint: defaultCreateEndpoint, 27 | } 28 | } 29 | 30 | // CreateParams are completion parameters. Refer to OpenAI documentation 31 | // at https://platform.openai.com/docs/api-reference/edits/create 32 | // for reference. 33 | type CreateParams struct { 34 | Model string `json:"model,omitempty"` 35 | Input string `json:"input,omitempty"` 36 | Instruction string `json:"instruction,omitempty"` 37 | 38 | N int `json:"n,omitempty"` 39 | TopP float64 `json:"top_p,omitempty"` 40 | Temperature float64 `json:"temperature,omitempty"` 41 | } 42 | 43 | // CreateResponse is a response to a completion. Refer to OpenAI documentation 44 | // at https://platform.openai.com/docs/api-reference/edits/create 45 | // for reference. 46 | type CreateResponse struct { 47 | Object string `json:"object,omitempty"` 48 | CreatedAt int64 `json:"created_at,omitempty"` 49 | Choices []*Choice `json:"choices,omitempty"` 50 | 51 | Usage *openai.Usage `json:"usage,omitempty"` 52 | } 53 | 54 | type Choice struct { 55 | Text string `json:"text,omitempty"` 56 | Index int `json:"index,omitempty"` 57 | } 58 | 59 | func (c *Client) Create(ctx context.Context, p *CreateParams) (*CreateResponse, error) { 60 | if p.Model == "" { 61 | p.Model = c.model 62 | } 63 | 64 | var r CreateResponse 65 | if err := c.s.MakeRequest(ctx, c.CreateEndpoint, p, &r); err != nil { 66 | return nil, err 67 | } 68 | return &r, nil 69 | } 70 | -------------------------------------------------------------------------------- /embedding/embedding.go: -------------------------------------------------------------------------------- 1 | // Package embedding contains a client for Open AI's Embeddings APIs. 2 | package embedding 3 | 4 | import ( 5 | "context" 6 | 7 | "github.com/rakyll/openai-go" 8 | ) 9 | 10 | const ( 11 | defaultModel = "text-embedding-ada-002" 12 | defaultCreateEndpoint = "https://api.openai.com/v1/embeddings" 13 | ) 14 | 15 | // Client is a client to communicate with Open AI's Embeddings APIs. 16 | type Client struct { 17 | s *openai.Session 18 | model string 19 | 20 | // CreateEndpoint allows overriding the default API endpoint. 21 | // Set this field before using the client. 22 | CreateEndpoint string 23 | } 24 | 25 | // NewClient creates a new default client that uses the given session 26 | // and defaults to the given model. 27 | func NewClient(session *openai.Session, model string) *Client { 28 | if model == "" { 29 | model = defaultModel 30 | } 31 | return &Client{ 32 | s: session, 33 | model: model, 34 | CreateEndpoint: defaultCreateEndpoint, 35 | } 36 | } 37 | 38 | type CreateParams struct { 39 | Model string `json:"model,omitempty"` 40 | 41 | Input []string `json:"input,omitempty"` 42 | User string `json:"user,omitempty"` 43 | } 44 | 45 | type CreateResponse struct { 46 | Object string `json:"object,omitempty"` 47 | Data []*Data `json:"data,omitempty"` 48 | Model string `json:"model,omitempty"` 49 | 50 | Usage *openai.Usage `json:"usage,omitempty"` 51 | } 52 | 53 | type Data struct { 54 | Object string `json:"object,omitempty"` 55 | Embedding []float64 `json:"embedding,omitempty"` 56 | Index int `json:"index,omitempty"` 57 | } 58 | 59 | func (c *Client) Create(ctx context.Context, p *CreateParams) (*CreateResponse, error) { 60 | if p.Model == "" { 61 | p.Model = c.model 62 | } 63 | 64 | var r CreateResponse 65 | if err := c.s.MakeRequest(ctx, c.CreateEndpoint, p, &r); err != nil { 66 | return nil, err 67 | } 68 | return &r, nil 69 | } 70 | -------------------------------------------------------------------------------- /examples/audio/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/audio" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | 15 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 16 | client := audio.NewClient(s, "") 17 | filePath := os.Getenv("AUDIO_FILE_PATH") 18 | if filePath == "" { 19 | log.Fatal("must provide an AUDIO_FILE_PATH env var") 20 | } 21 | f, err := os.Open(filePath) 22 | if err != nil { 23 | log.Fatalf("error opening audio file: %v", err) 24 | } 25 | defer f.Close() 26 | resp, err := client.CreateTranscription(ctx, &audio.CreateTranscriptionParams{ 27 | Language: "en", 28 | Audio: f, 29 | AudioFormat: "mp3", 30 | }) 31 | if err != nil { 32 | log.Fatalf("error transcribing file: %v", err) 33 | } 34 | log.Println(resp.Text) 35 | } 36 | -------------------------------------------------------------------------------- /examples/chat/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/chat" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 15 | 16 | client := chat.NewClient(s, "gpt-3.5-turbo") 17 | resp, err := client.CreateCompletion(ctx, &chat.CreateCompletionParams{ 18 | Messages: []*chat.Message{ 19 | {Role: "user", Content: "hello"}, 20 | }, 21 | }) 22 | if err != nil { 23 | log.Fatalf("Failed to complete: %v", err) 24 | } 25 | 26 | for _, choice := range resp.Choices { 27 | msg := choice.Message 28 | log.Printf("role=%q, content=%q", msg.Role, msg.Content) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /examples/completion/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/completion" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 15 | 16 | client := completion.NewClient(s, "text-davinci-003") 17 | resp, err := client.Create(ctx, &completion.CreateParams{ 18 | N: 1, 19 | MaxTokens: 200, 20 | Prompt: []string{"say this is a test"}, 21 | }) 22 | if err != nil { 23 | log.Fatalf("Failed to complete: %v", err) 24 | } 25 | 26 | for _, choice := range resp.Choices { 27 | log.Println(choice.Text) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /examples/edit/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/edit" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 15 | 16 | client := edit.NewClient(s, "text-davinci-edit-001") 17 | resp, err := client.Create(ctx, &edit.CreateParams{ 18 | N: 1, 19 | Input: "What day of the wek is it?", 20 | Instruction: "Fix the spelling mistakes", 21 | }) 22 | if err != nil { 23 | log.Fatalf("Failed to create an edit: %v", err) 24 | } 25 | 26 | for _, choice := range resp.Choices { 27 | log.Println(choice.Text) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /examples/embedding/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/embedding" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 15 | 16 | client := embedding.NewClient(s, "text-embedding-ada-002") 17 | resp, err := client.Create(ctx, &embedding.CreateParams{ 18 | Input: []string{"The food was delicious and the waiter..."}, 19 | }) 20 | if err != nil { 21 | log.Fatalf("Failed to complete: %v", err) 22 | } 23 | 24 | for _, data := range resp.Data { 25 | log.Printf("index=%d, len(embedding)=%d", data.Index, len(data.Embedding)) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /examples/image/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "io/ioutil" 6 | "log" 7 | "os" 8 | 9 | "github.com/rakyll/openai-go" 10 | "github.com/rakyll/openai-go/image" 11 | ) 12 | 13 | func main() { 14 | ctx := context.Background() 15 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 16 | 17 | client := image.NewClient(s) 18 | resp, err := client.Create(ctx, &image.CreateParams{ 19 | N: 3, 20 | Prompt: "a cute baby", 21 | Size: "1024x1024", 22 | Format: "b64_json", 23 | }) 24 | if err != nil { 25 | log.Fatalf("Failed to generate image: %v", err) 26 | } 27 | 28 | for _, image := range resp.Data { 29 | reader, err := image.Reader() 30 | if err != nil { 31 | log.Fatalf("Failed to read image data: %v", err) 32 | } 33 | data, err := ioutil.ReadAll(reader) 34 | if err != nil { 35 | log.Fatalf("ReadAll error: %v", err) 36 | } 37 | _ = data // use data 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /examples/moderation/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "log" 6 | "os" 7 | 8 | "github.com/rakyll/openai-go" 9 | "github.com/rakyll/openai-go/moderation" 10 | ) 11 | 12 | func main() { 13 | ctx := context.Background() 14 | s := openai.NewSession(os.Getenv("OPENAI_API_KEY")) 15 | 16 | client := moderation.NewClient(s, "text-moderation-latest") 17 | resp, err := client.Create(ctx, &moderation.CreateParams{ 18 | Input: []string{"I will kill you"}, 19 | }) 20 | if err != nil { 21 | log.Fatalf("Failed to complete: %v", err) 22 | } 23 | 24 | for _, result := range resp.Results { 25 | log.Println("Content moderation is flagged as", result.Flagged) 26 | if result.Flagged { 27 | for key, value := range result.Categories { 28 | if value { 29 | log.Println("Content category is", key) 30 | } 31 | } 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/rakyll/openai-go 2 | 3 | go 1.19 4 | -------------------------------------------------------------------------------- /image/create.go: -------------------------------------------------------------------------------- 1 | // Package image contains a client for OpenAI's images API. 2 | package image 3 | 4 | import "context" 5 | 6 | type CreateParams struct { 7 | Prompt string `json:"prompt,omitempty"` 8 | N int `json:"n,omitempty"` 9 | Size string `json:"size,omitempty"` 10 | Format string `json:"response_format,omitempty"` 11 | User string `json:"user,omitempty"` 12 | } 13 | 14 | type CreateResponse struct { 15 | CreatedAt int64 `json:"created_at,omitempty"` 16 | Data []*Image `json:"data,omitempty"` 17 | } 18 | 19 | func (c *Client) Create(ctx context.Context, p *CreateParams) (*CreateResponse, error) { 20 | var r CreateResponse 21 | if err := c.s.MakeRequest(ctx, c.CreateEndpoint, p, &r); err != nil { 22 | return nil, err 23 | } 24 | return &r, nil 25 | } 26 | -------------------------------------------------------------------------------- /image/image.go: -------------------------------------------------------------------------------- 1 | package image 2 | 3 | import ( 4 | "bytes" 5 | "encoding/base64" 6 | "errors" 7 | "io" 8 | "io/ioutil" 9 | "net/http" 10 | 11 | "github.com/rakyll/openai-go" 12 | ) 13 | 14 | const ( 15 | defaultCreateEndpoint = "https://api.openai.com/v1/images/generations" 16 | ) 17 | 18 | // Client is a client to communicate with Open AI's images API. 19 | type Client struct { 20 | s *openai.Session 21 | 22 | // CreateEndpoint allows overriding the default 23 | // for the image generation API endpoint. 24 | // Set this field before using the client. 25 | CreateEndpoint string 26 | } 27 | 28 | func NewClient(session *openai.Session) *Client { 29 | return &Client{ 30 | s: session, 31 | CreateEndpoint: defaultCreateEndpoint, 32 | } 33 | } 34 | 35 | type Image struct { 36 | URL string `json:"url,omitempty"` 37 | Base64JSON string `json:"b64_json,omitempty"` 38 | } 39 | 40 | func (i *Image) Reader() (io.ReadCloser, error) { 41 | if i.URL != "" { 42 | resp, err := http.Get(i.URL) 43 | if err != nil { 44 | return nil, err 45 | } 46 | return resp.Body, nil 47 | } 48 | if i.Base64JSON != "" { 49 | decoded, err := base64.StdEncoding.DecodeString(i.Base64JSON) 50 | if err != nil { 51 | return nil, err 52 | } 53 | return ioutil.NopCloser(bytes.NewBuffer(decoded)), nil 54 | } 55 | return nil, errors.New("no image data") 56 | } 57 | -------------------------------------------------------------------------------- /moderation/moderation.go: -------------------------------------------------------------------------------- 1 | // Package moderation contains a client for OpenAI's moderations API. 2 | package moderation 3 | 4 | import ( 5 | "context" 6 | 7 | "github.com/rakyll/openai-go" 8 | ) 9 | 10 | const defaultCreateEndpoint = "https://api.openai.com/v1/moderations" 11 | 12 | // Client is a client to communicate with Open AI's moderation API. 13 | type Client struct { 14 | s *openai.Session 15 | model string 16 | 17 | // CreateEndpoint allows overriding the default API endpoint. 18 | // Set this field before using the client. 19 | CreateEndpoint string 20 | } 21 | 22 | func NewClient(session *openai.Session, model string) *Client { 23 | return &Client{ 24 | s: session, 25 | model: model, 26 | CreateEndpoint: defaultCreateEndpoint, 27 | } 28 | } 29 | 30 | type CreateParams struct { 31 | Model string `json:"model,omitempty"` 32 | Input []string `json:"input,omitempty"` 33 | } 34 | 35 | type CreateResponse struct { 36 | ID string `json:"id,omitempty"` 37 | Results []*Result `json:"results,omitempty"` 38 | } 39 | 40 | type Result struct { 41 | Categories map[string]bool `json:"categories,omitempty"` 42 | CategoryScores map[string]float64 `json:"category_scores,omitempty"` 43 | Flagged bool `json:"flagged,omitempty"` 44 | } 45 | 46 | func (c *Client) Create(ctx context.Context, p *CreateParams) (*CreateResponse, error) { 47 | if p.Model == "" { 48 | p.Model = c.model 49 | } 50 | 51 | var r CreateResponse 52 | if err := c.s.MakeRequest(ctx, c.CreateEndpoint, p, &r); err != nil { 53 | return nil, err 54 | } 55 | return &r, nil 56 | } 57 | -------------------------------------------------------------------------------- /openai.go: -------------------------------------------------------------------------------- 1 | // Package openai contains Go client libraries for OpenAI libraries. 2 | package openai 3 | 4 | import ( 5 | "bufio" 6 | "bytes" 7 | "context" 8 | "encoding/json" 9 | "fmt" 10 | "io" 11 | "mime/multipart" 12 | "net/http" 13 | "net/url" 14 | "strings" 15 | "time" 16 | ) 17 | 18 | const userAgent = "openai-go/1" 19 | 20 | // Session is a session created to communicate with OpenAI. 21 | type Session struct { 22 | // OrganizationID is the ID optionally to be included as 23 | // a header to requests made from this session. 24 | // This field must be set before session is used. 25 | OrganizationID string 26 | 27 | // HTTPClient providing a custom HTTP client. 28 | // This field must be set before session is used. 29 | HTTPClient *http.Client 30 | 31 | apiKey string 32 | } 33 | 34 | // NewSession creates a new session. Organization IDs are optional, 35 | // use an empty string when you don't want to set one. 36 | func NewSession(apiKey string) *Session { 37 | return &Session{ 38 | apiKey: apiKey, 39 | HTTPClient: &http.Client{ 40 | Timeout: 30 * time.Second, 41 | }, 42 | } 43 | } 44 | 45 | // MakeRequest make HTTP requests and authenticates them with 46 | // session's API key. MakeRequest marshals input as the request body, 47 | // and unmarshals the response as output. 48 | func (s *Session) MakeRequest(ctx context.Context, endpoint string, input, output any) error { 49 | reqBody, err := json.Marshal(input) 50 | if err != nil { 51 | return err 52 | } 53 | 54 | req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(reqBody)) 55 | if err != nil { 56 | return err 57 | } 58 | 59 | respBody, err := s.makeRequest(req, "application/json") 60 | if err != nil { 61 | return err 62 | } 63 | defer respBody.Close() 64 | 65 | return json.NewDecoder(respBody).Decode(output) 66 | } 67 | 68 | func (s *Session) MakeStreamingRequest(ctx context.Context, endpoint string, input any, output any, fn func(any)) error { 69 | const ( 70 | streamPrefix = "data: " 71 | streamEnd = "[DONE]" 72 | ) 73 | 74 | buf, err := json.Marshal(input) 75 | if err != nil { 76 | return err 77 | } 78 | 79 | req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(buf)) 80 | if err != nil { 81 | return err 82 | } 83 | 84 | respBody, err := s.makeRequest(req, "application/json") 85 | if err != nil { 86 | return err 87 | } 88 | defer respBody.Close() 89 | 90 | scanner := bufio.NewScanner(respBody) 91 | for scanner.Scan() { 92 | line := strings.Replace(scanner.Text(), streamPrefix, "", 1) 93 | if line == "" { 94 | continue 95 | } 96 | if line == streamEnd { 97 | return nil 98 | } 99 | if err := json.Unmarshal([]byte(line), output); err != nil { 100 | return fmt.Errorf("failed to unmarshal streaming response: %w", err) 101 | } 102 | fn(output) 103 | } 104 | return scanner.Err() 105 | } 106 | 107 | // Upload makes a multi-part form data upload them with 108 | // session's API key. Upload combines the file with the given params 109 | // and unmarshals the response as output. 110 | func (s *Session) Upload(ctx context.Context, endpoint string, file io.Reader, fileExt string, params url.Values, output any) error { 111 | pr, pw := io.Pipe() 112 | mw := multipart.NewWriter(pw) 113 | go func() { 114 | err := upload(mw, file, fileExt, params) 115 | pw.CloseWithError(err) 116 | }() 117 | req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, pr) 118 | if err != nil { 119 | return err 120 | } 121 | respBody, err := s.makeRequest(req, mw.FormDataContentType()) 122 | if err != nil { 123 | return err 124 | } 125 | defer respBody.Close() 126 | 127 | return json.NewDecoder(respBody).Decode(output) 128 | } 129 | 130 | func (s *Session) makeRequest(req *http.Request, contentType string) (io.ReadCloser, error) { 131 | if s.apiKey != "" { 132 | req.Header.Set("Authorization", "Bearer "+s.apiKey) 133 | } 134 | if s.OrganizationID != "" { 135 | req.Header.Set("OpenAI-Organization", s.OrganizationID) 136 | } 137 | req.Header.Set("Content-Type", contentType) 138 | req.Header.Add("User-Agent", userAgent) 139 | 140 | resp, err := s.HTTPClient.Do(req) 141 | if err != nil { 142 | return nil, fmt.Errorf("error making request: %w", err) 143 | } 144 | 145 | if resp.StatusCode < 200 || resp.StatusCode >= 400 { 146 | respBody, err := io.ReadAll(resp.Body) 147 | if err != nil { 148 | return nil, err 149 | } 150 | return nil, &APIError{ 151 | StatusCode: resp.StatusCode, 152 | Payload: respBody, 153 | } 154 | } 155 | return resp.Body, nil 156 | } 157 | 158 | func upload(mw *multipart.Writer, file io.Reader, fileExt string, params url.Values) error { 159 | for key := range params { 160 | w, err := mw.CreateFormField(key) 161 | if err != nil { 162 | return fmt.Errorf("error creating %q field: %w", key, err) 163 | } 164 | if _, err := fmt.Fprint(w, params.Get(key)); err != nil { 165 | return fmt.Errorf("error writing %q field: %w", key, err) 166 | } 167 | } 168 | w, err := mw.CreateFormFile("file", "audio."+fileExt) 169 | if err != nil { 170 | return fmt.Errorf("error creating file: %w", err) 171 | } 172 | if _, err := io.Copy(w, file); err != nil { 173 | return fmt.Errorf("error copying file: %w", err) 174 | } 175 | if err := mw.Close(); err != nil { 176 | return fmt.Errorf("error closing multipart writer: %w", err) 177 | } 178 | return nil 179 | } 180 | 181 | // APIError is returned from API requests if the API 182 | // responds with an error. 183 | type APIError struct { 184 | StatusCode int 185 | Payload []byte 186 | } 187 | 188 | func (e *APIError) Error() string { 189 | return fmt.Sprintf("status_code=%d, payload=%s", e.StatusCode, e.Payload) 190 | } 191 | 192 | // Usage reports the API usage. 193 | type Usage struct { 194 | PromptTokens int `json:"prompt_tokens,omitempty"` 195 | CompletionTokens int `json:"completion_tokens,omitempty"` 196 | TotalTokens int `json:"total_tokens,omitempty"` 197 | } 198 | --------------------------------------------------------------------------------