├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── CONTRIBUTING.md
├── Examples
└── SwiftOpenAIExample
│ ├── SwiftOpenAIExample.xcodeproj
│ ├── project.pbxproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ └── xcshareddata
│ │ └── xcschemes
│ │ └── SwiftOpenAIExample.xcscheme
│ ├── SwiftOpenAIExample
│ ├── AIProxyIntroView.swift
│ ├── ApiKeyIntroView.swift
│ ├── Assets.xcassets
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ └── Contents.json
│ │ └── Contents.json
│ ├── Assistants
│ │ ├── AssistantConfigurationDemoView.swift
│ │ ├── AssistantConfigurationProvider.swift
│ │ ├── AssistantStreamDemoScreen.swift
│ │ ├── AssistantThreadConfigurationProvider.swift
│ │ └── AssistantsListDemoView.swift
│ ├── AudioDemo
│ │ ├── AudioDemoView.swift
│ │ └── AudioProvider.swift
│ ├── ChatDemo
│ │ ├── ChatDemoView.swift
│ │ └── ChatProvider.swift
│ ├── ChatFunctionsCall
│ │ ├── ChatMessageDisplayModel.swift
│ │ ├── ChatMessageView.swift
│ │ ├── Completion
│ │ │ ├── ChatFunctionCallDemoView.swift
│ │ │ └── ChatFunctionCallProvider.swift
│ │ └── Stream
│ │ │ ├── ChatFunctionsCallStreamProvider.swift
│ │ │ └── ChatFunctionsCalllStreamDemoView.swift
│ ├── ChatStreamFluidConversationDemo
│ │ ├── ChatFluidConversationProvider.swift
│ │ └── ChatStreamFluidConversationDemoView.swift
│ ├── ChatStructureOutputTool
│ │ ├── ChatStructureOutputToolDemoView.swift
│ │ └── ChatStructuredOutputToolProvider.swift
│ ├── ChatStructuredOutputs
│ │ ├── ChatStructuredOutputDemoView.swift
│ │ └── ChatStructuredOutputProvider.swift
│ ├── EmbeddingsDemo
│ │ ├── EmbeddingsDemoView.swift
│ │ └── Embeddingsprovider.swift
│ ├── Files
│ │ ├── AttachmentView.swift
│ │ ├── FileAttachmentView.swift
│ │ ├── FilesPicker.swift
│ │ └── FilesPickerProvider.swift
│ ├── FilesDemo
│ │ ├── FilesDemoView.swift
│ │ └── FilesProvider.swift
│ ├── FineTuningDemo
│ │ ├── FineTuningJobDemoView.swift
│ │ └── FineTuningJobProvider.swift
│ ├── ImagesDemo
│ │ ├── ImagesDemoView.swift
│ │ └── ImagesProvider.swift
│ ├── LocalChatDemo
│ │ └── LocalChatDemoView.swift
│ ├── LocalHostEntryView.swift
│ ├── ModelsDemo
│ │ ├── ModelsDemoView.swift
│ │ └── ModelsProvider.swift
│ ├── ModerationsDemo
│ │ ├── ModerationDemoView.swift
│ │ └── ModerationProvider.swift
│ ├── OptionsListView.swift
│ ├── PredictedOutputsDemo
│ │ └── ChatPredictedOutputDemoView.swift
│ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ ├── Resources
│ │ ├── WorldCupData.jsonl
│ │ ├── german.m4a
│ │ └── narcos.m4a
│ ├── ResponseAPIDemo
│ │ ├── ResponseStreamDemoView.swift
│ │ └── ResponseStreamProvider.swift
│ ├── ServiceSelectionView.swift
│ ├── SharedModels
│ │ └── ChatDisplayMessage.swift
│ ├── SharedUI
│ │ ├── ChatDisplayMessageView.swift
│ │ ├── ChatMessageLoadingView.swift
│ │ ├── LoadingView.swift
│ │ ├── Theme
│ │ │ ├── Sizes.swift
│ │ │ └── ThemeColor.swift
│ │ └── URLImageView.swift
│ ├── SwiftOpenAIExample.entitlements
│ ├── SwiftOpenAIExampleApp.swift
│ ├── Utilities
│ │ └── ContentLoader.swift
│ └── Vision
│ │ ├── ChatVisionDemoView.swift
│ │ └── ChatVisionProvider.swift
│ ├── SwiftOpenAIExampleTests
│ └── SwiftOpenAIExampleTests.swift
│ └── SwiftOpenAIExampleUITests
│ ├── SwiftOpenAIExampleUITests.swift
│ └── SwiftOpenAIExampleUITestsLaunchTests.swift
├── LICENSE
├── Package.swift
├── README.md
├── Sources
└── OpenAI
│ ├── AIProxy
│ ├── AIProxyCertificatePinning.swift
│ ├── AIProxyService.swift
│ └── Endpoint+AIProxy.swift
│ ├── Azure
│ ├── AzureOpenAIAPI.swift
│ ├── AzureOpenAIConfiguration.swift
│ └── DefaultOpenAIAzureService.swift
│ ├── LocalModelService
│ ├── LocalModelAPI.swift
│ └── LocalModelService.swift
│ ├── Private
│ └── Networking
│ │ ├── Endpoint.swift
│ │ ├── MultipartFormDataBuilder.swift
│ │ └── OpenAIAPI.swift
│ └── Public
│ ├── Parameters
│ ├── Assistant
│ │ └── AssistantParameters.swift
│ ├── Audio
│ │ ├── AudioSpeechParameters.swift
│ │ ├── AudioTranscriptionParameters.swift
│ │ └── AudioTranslationParameters.swift
│ ├── Batch
│ │ └── BatchParameter.swift
│ ├── Chat
│ │ └── ChatCompletionParameters.swift
│ ├── Embedding
│ │ └── EmbeddingParameter.swift
│ ├── File
│ │ └── FileParameter.swift
│ ├── FineTuning
│ │ └── FineTuningJobParameters.swift
│ ├── Image
│ │ ├── Dalle.swift
│ │ ├── ImageCreateParameters.swift
│ │ ├── ImageEditParameters.swift
│ │ └── ImageVariationParameters.swift
│ ├── ImageGen
│ │ ├── CreateImageEditParameters.swift
│ │ ├── CreateImageParameters.swift
│ │ └── CreateImageVariationParameters.swift
│ ├── Message
│ │ ├── MessageParameter.swift
│ │ └── ModifyMessageParameters.swift
│ ├── Model.swift
│ ├── Moderation
│ │ └── ModerationParameter.swift
│ ├── MultipartFormDataParameters.swift
│ ├── Response
│ │ ├── InputType.swift
│ │ └── ModelResponseParameter.swift
│ ├── Runs
│ │ ├── CreateThreadAndRunParameter.swift
│ │ ├── ModifyRunParameters.swift
│ │ ├── RunParameter.swift
│ │ └── RunToolsOutputParameter.swift
│ ├── Threads
│ │ ├── CreateThreadParameters.swift
│ │ └── ModifyThreadParameters.swift
│ ├── VectorStore
│ │ └── VectorStoreParameter.swift
│ ├── VectorStoreFileBatch
│ │ └── VectorStoreFileBatchParameter.swift
│ └── VectorStoreFiles
│ │ └── VectorStoreFileParameter.swift
│ ├── ResponseModels
│ ├── Assistants
│ │ ├── AssistantObject.swift
│ │ ├── AssistantStreamEvent.swift
│ │ └── AssistantStreamEventObject.swift
│ ├── Audio
│ │ ├── AudioObject.swift
│ │ └── AudioSpeechObject.swift
│ ├── Batch
│ │ └── BatchObject.swift
│ ├── Chat
│ │ ├── ChatCompletionChunkObject.swift
│ │ ├── ChatCompletionObject.swift
│ │ └── ChatUsage.swift
│ ├── Delta.swift
│ ├── Embedding
│ │ └── EmbeddingObject.swift
│ ├── File
│ │ └── FileObject.swift
│ ├── FineTuning
│ │ ├── FineTuningJobEventObject.swift
│ │ └── FineTuningJobObject.swift
│ ├── Image
│ │ └── ImageObject.swift
│ ├── ImageGen
│ │ └── CreateImageResponse.swift
│ ├── Messages
│ │ ├── MessageContent.swift
│ │ ├── MessageDeltaObject.swift
│ │ └── MessageObject.swift
│ ├── Model
│ │ └── ModelObject.swift
│ ├── Moderation
│ │ └── ModerationObject.swift
│ ├── OpenAIErrorResponse.swift
│ ├── OpenAIResponse.swift
│ ├── Response
│ │ ├── OutputItem.swift
│ │ ├── ResponseModel.swift
│ │ └── ResponseStreamEvent.swift
│ ├── Runs
│ │ ├── RunObject.swift
│ │ ├── RunStepDeltaObject.swift
│ │ ├── RunStepDetails.swift
│ │ └── RunStepObject.swift
│ ├── Threads
│ │ └── ThreadObject.swift
│ ├── VectorStore
│ │ └── VectorStoreObject.swift
│ ├── VectorStoreFile
│ │ └── VectorStoreFileObject.swift
│ └── VectorStoreFileBatch
│ │ └── VectorStoreFileBatchObject.swift
│ ├── Service
│ ├── DefaultOpenAIService.swift
│ ├── OpenAIService.swift
│ └── OpenAIServiceFactory.swift
│ └── Shared
│ ├── DeletionStatus.swift
│ ├── ExpirationPolicy.swift
│ ├── FileCount.swift
│ ├── IncompleteDetails.swift
│ ├── JSONSchema.swift
│ ├── LastError.swift
│ ├── MessageAttachment.swift
│ ├── Reasoning.swift
│ ├── ResponseFormat.swift
│ ├── TextConfiguration.swift
│ ├── Tool.swift
│ ├── ToolCall.swift
│ ├── ToolChoice.swift
│ ├── ToolChoiceMode.swift
│ ├── ToolResources.swift
│ ├── TruncationStrategy.swift
│ └── Usage.swift
├── Tests
└── OpenAITests
│ ├── InputTypeTests.swift
│ ├── ModelResponseParameterTests.swift
│ ├── OpenAITests.swift
│ ├── ResponseModelValidationTests.swift
│ └── ResponseStreamEventTests.swift
└── rules.swiftformat
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | # This workflow will build a Swift project
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-swift
3 |
4 | name: CI tests
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main" ]
11 |
12 | jobs:
13 | build_and_test:
14 | runs-on: macos-latest
15 | steps:
16 | - uses: swift-actions/setup-swift@v2
17 | with:
18 | swift-version: "6.0.1"
19 | - name: Get swift version
20 | run: swift --version
21 | - uses: actions/checkout@v4
22 | - name: Build
23 | run: swift build -q
24 | - name: Run tests
25 | run: swift test -q
26 |
27 | lint:
28 | runs-on: macos-latest
29 | steps:
30 | - uses: actions/checkout@v4
31 | - name: Set up Homebrew
32 | id: set-up-homebrew
33 | uses: Homebrew/actions/setup-homebrew@master
34 | - name: Install swiftformat
35 | run: brew install swiftformat
36 | - name: Run linter
37 | run: swiftformat --config rules.swiftformat .
38 | - name: Verify that `swiftformat --config rules.swiftformat .` did not change outputs (if it did, please re-run it and re-commit!)
39 | run: git diff --exit-code
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | xcuserdata/
5 | DerivedData/
6 | .swiftpm/configuration/registries.json
7 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
8 | .netrc
9 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 |
2 | ### Linting:
3 | ```bash
4 | brew install swiftformat
5 | swiftformat --config rules.swiftformat .
6 | ```
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/AIProxyIntroView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyIntroView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by Lou Zell on 3/27/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct AIProxyIntroView: View {
12 |
13 | var body: some View {
14 | NavigationStack {
15 | VStack {
16 | Spacer()
17 | VStack(spacing: 24) {
18 | TextField("Enter partial key", text: $partialKey)
19 | TextField("Enter your service's URL", text: $serviceURL)
20 | }
21 | .padding()
22 | .textFieldStyle(.roundedBorder)
23 |
24 | Text("You receive a partial key and service URL when you configure an app in the AIProxy dashboard")
25 | .font(.caption)
26 |
27 | NavigationLink(destination: OptionsListView(
28 | openAIService: aiproxyService,
29 | options: OptionsListView.APIOption.allCases.filter { $0 != .localChat }))
30 | {
31 | Text("Continue")
32 | .padding()
33 | .padding(.horizontal, 48)
34 | .foregroundColor(.white)
35 | .background(
36 | Capsule()
37 | .foregroundColor(canProceed ? Color(red: 64 / 255, green: 195 / 255, blue: 125 / 255) : .gray.opacity(0.2)))
38 | }
39 | .disabled(!canProceed)
40 | Spacer()
41 | Group {
42 | Text(
43 | "AIProxy keeps your OpenAI API key secure. To configure AIProxy for your project, or to learn more about how it works, please see the docs at ") +
44 | Text("[this link](https://www.aiproxy.pro/docs).")
45 | }
46 | .font(.caption)
47 | }
48 | .padding()
49 | .navigationTitle("AIProxy Configuration")
50 | }
51 | }
52 |
53 | @State private var partialKey = ""
54 | @State private var serviceURL = ""
55 |
56 | private var canProceed: Bool {
57 | !(partialKey.isEmpty || serviceURL.isEmpty)
58 | }
59 |
60 | private var aiproxyService: OpenAIService {
61 | OpenAIServiceFactory.service(
62 | aiproxyPartialKey: partialKey,
63 | aiproxyServiceURL: serviceURL != "" ? serviceURL : nil)
64 | }
65 | }
66 |
67 | #Preview {
68 | ApiKeyIntroView()
69 | }
70 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ApiKeyIntroView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ApiKeyIntroView: View {
12 |
13 | var body: some View {
14 | NavigationStack {
15 | VStack {
16 | Spacer()
17 | VStack(spacing: 24) {
18 | TextField("Enter API Key", text: $apiKey)
19 | TextField("Enter Organization ID (Optional)", text: $organizationIdentifier)
20 | .onChange(of: organizationIdentifier) { _, newValue in
21 | if !newValue.isEmpty {
22 | localOrganizationID = newValue
23 | }
24 | }
25 | }
26 | .padding()
27 | .textFieldStyle(.roundedBorder)
28 | NavigationLink(destination: OptionsListView(
29 | openAIService: OpenAIServiceFactory.service(apiKey: apiKey, organizationID: localOrganizationID, debugEnabled: true),
30 | options: OptionsListView.APIOption.allCases.filter { $0 != .localChat }))
31 | {
32 | Text("Continue")
33 | .padding()
34 | .padding(.horizontal, 48)
35 | .foregroundColor(.white)
36 | .background(
37 | Capsule()
38 | .foregroundColor(apiKey.isEmpty ? .gray.opacity(0.2) : Color(red: 64 / 255, green: 195 / 255, blue: 125 / 255)))
39 | }
40 | .disabled(apiKey.isEmpty)
41 | Spacer()
42 | Group {
43 | Text("If you don't have a valid API KEY yet, you can visit ") +
44 | Text("[this link](https://platform.openai.com/account/api-keys)") + Text(" to get started.")
45 | }
46 | .font(.caption)
47 | }
48 | .padding()
49 | .navigationTitle("Enter OpenAI API KEY")
50 | }
51 | }
52 |
53 | @State private var apiKey = ""
54 | @State private var organizationIdentifier = ""
55 | @State private var localOrganizationID: String? = nil
56 |
57 | }
58 |
59 | #Preview {
60 | ApiKeyIntroView()
61 | }
62 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "idiom" : "mac",
10 | "scale" : "1x",
11 | "size" : "16x16"
12 | },
13 | {
14 | "idiom" : "mac",
15 | "scale" : "2x",
16 | "size" : "16x16"
17 | },
18 | {
19 | "idiom" : "mac",
20 | "scale" : "1x",
21 | "size" : "32x32"
22 | },
23 | {
24 | "idiom" : "mac",
25 | "scale" : "2x",
26 | "size" : "32x32"
27 | },
28 | {
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "idiom" : "mac",
35 | "scale" : "2x",
36 | "size" : "128x128"
37 | },
38 | {
39 | "idiom" : "mac",
40 | "scale" : "1x",
41 | "size" : "256x256"
42 | },
43 | {
44 | "idiom" : "mac",
45 | "scale" : "2x",
46 | "size" : "256x256"
47 | },
48 | {
49 | "idiom" : "mac",
50 | "scale" : "1x",
51 | "size" : "512x512"
52 | },
53 | {
54 | "idiom" : "mac",
55 | "scale" : "2x",
56 | "size" : "512x512"
57 | }
58 | ],
59 | "info" : {
60 | "author" : "xcode",
61 | "version" : 1
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AssistantConfigurationProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/17/23.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | @Observable
12 | class AssistantConfigurationProvider {
13 |
14 | // MARK: - Initializer
15 |
16 | init(service: OpenAIService) {
17 | self.service = service
18 | }
19 |
20 | var assistant: AssistantObject?
21 | var assistants: [AssistantObject] = []
22 | var avatarURL: URL?
23 | var assistantDeletionStatus: DeletionStatus?
24 |
25 | func listAssistants()
26 | async throws
27 | {
28 | do {
29 | let assistants = try await service.listAssistants(limit: nil, order: nil, after: nil, before: nil)
30 | self.assistants = assistants.data
31 | } catch {
32 | debugPrint("\(error)")
33 | }
34 | }
35 |
36 | func deleteAssistant(
37 | id: String)
38 | async throws
39 | {
40 | do {
41 | assistantDeletionStatus = try await service.deleteAssistant(id: id)
42 | } catch {
43 | debugPrint("\(error)")
44 | }
45 | }
46 |
47 | func createAssistant(
48 | parameters: AssistantParameters)
49 | async throws
50 | {
51 | do {
52 | assistant = try await service.createAssistant(parameters: parameters)
53 | } catch {
54 | debugPrint("\(error)")
55 | }
56 | }
57 |
58 | func createAvatar(
59 | prompt: String)
60 | async throws
61 | {
62 | do {
63 | let avatarURLs = try await service.createImages(parameters: .init(prompt: prompt, model: .dallE3)).data?.compactMap(\.url)
64 | avatarURL = URL(string: avatarURLs?.first ?? "")
65 | } catch {
66 | debugPrint("\(error)")
67 | }
68 | }
69 |
70 | // TODO: Create demo for this.
71 | func createVStore() async throws {
72 | let _ = try await service.createVectorStore(parameters: .init(name: "Personal Data"))
73 | }
74 |
75 | // MARK: - Private Properties
76 | private let service: OpenAIService
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantThreadConfigurationProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AssistantThreadConfigurationProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 3/19/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | @Observable
12 | class AssistantThreadConfigurationProvider {
13 |
14 | // MARK: - Initializer
15 |
16 | init(service: OpenAIService) {
17 | self.service = service
18 | }
19 |
20 | var thread: ThreadObject?
21 | var message: MessageObject?
22 | var runObject: RunObject?
23 | var messageText = ""
24 | var toolOuptutMessage = ""
25 | var functionCallOutput = ""
26 |
27 | func createThread()
28 | async throws
29 | {
30 | do {
31 | thread = try await service.createThread(parameters: .init())
32 | } catch {
33 | print("THREAD ERROR: \(error)")
34 | }
35 | }
36 |
37 | func createMessage(
38 | threadID: String,
39 | parameters: MessageParameter)
40 | async throws
41 | {
42 | do {
43 | message = try await service.createMessage(threadID: threadID, parameters: parameters)
44 | } catch {
45 | print("THREAD ERROR: \(error)")
46 | }
47 | }
48 |
49 | func createRunAndStreamMessage(
50 | threadID: String,
51 | parameters: RunParameter)
52 | async throws
53 | {
54 | do {
55 | let stream = try await service.createRunStream(threadID: threadID, parameters: parameters)
56 | for try await result in stream {
57 | switch result {
58 | case .threadMessageDelta(let messageDelta):
59 | let content = messageDelta.delta.content.first
60 | switch content {
61 | case .imageFile, .imageUrl, nil:
62 | break
63 | case .text(let textContent):
64 | messageText += textContent.text.value
65 | }
66 |
67 | case .threadRunStepDelta(let runStepDelta):
68 | let toolCall = runStepDelta.delta.stepDetails.toolCalls?.first?.toolCall
69 | switch toolCall {
70 | case .codeInterpreterToolCall(let toolCall):
71 | toolOuptutMessage += toolCall.input ?? ""
72 | case .fileSearchToolCall(let toolCall):
73 | print("PROVIDER: File search tool call \(toolCall)")
74 | case .functionToolCall(let toolCall):
75 | functionCallOutput += toolCall.arguments
76 | case nil:
77 | print("PROVIDER: tool call nil")
78 | }
79 |
80 | case .threadRunCompleted(let runObject):
81 | print("PROVIDER: the run is completed - \(runObject)")
82 |
83 | default: break
84 | }
85 | }
86 | } catch {
87 | print("THREAD ERROR: \(error)")
88 | }
89 | }
90 |
91 | // MARK: - Private Properties
92 | private let service: OpenAIService
93 |
94 | }
95 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantsListDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AssistantsListDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 3/19/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | // MARK: - AssistantObject + Identifiable
12 |
13 | extension AssistantObject: Identifiable { }
14 |
15 | // MARK: - AssistantsListDemoView
16 |
17 | public struct AssistantsListDemoView: View {
18 |
19 | public var body: some View {
20 | NavigationView {
21 | ForEach(assistants) { assistant in
22 | NavigationLink(destination: AssistantStartThreadScreen(assistant: assistant, service: service)) {
23 | VStack(alignment: .leading) {
24 | Text(assistant.name ?? "No name")
25 | .font(.title).bold()
26 | Text(assistant.description ?? "No Description")
27 | .font(.subheadline).fontWeight(.medium)
28 | Text(assistant.id)
29 | .font(.caption).fontWeight(.bold)
30 | }
31 | .padding()
32 | .frame(maxWidth: .infinity, alignment: .leading)
33 | .background {
34 | RoundedRectangle(cornerRadius: 25.0)
35 | .fill(.mint)
36 | }
37 | .padding()
38 | }
39 | }
40 | }
41 | }
42 |
43 | let assistants: [AssistantObject]
44 | let service: OpenAIService
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/AudioDemo/AudioDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioDemo.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct AudioDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _audioProvider = State(initialValue: AudioProvider(service: service))
15 | }
16 |
17 | var textArea: some View {
18 | HStack(spacing: 4) {
19 | TextField("Enter message to convert to speech", text: $prompt, axis: .vertical)
20 | .textFieldStyle(.roundedBorder)
21 | .padding()
22 | Button {
23 | Task {
24 | isLoading = true
25 | defer { isLoading = false } // ensure isLoading is set to false when the
26 | try await audioProvider.speech(parameters: .init(model: .tts1, input: prompt, voice: .shimmer))
27 | }
28 | } label: {
29 | Image(systemName: "paperplane")
30 | }
31 | .buttonStyle(.bordered)
32 | }
33 | .padding()
34 | }
35 |
36 | var transcriptionView: some View {
37 | VStack {
38 | Text("Tap this button to use the transcript API, a `m4a` file has been added to the app's bundle.")
39 | .font(.callout)
40 | .padding()
41 | Button("Transcript") {
42 | Task {
43 | isLoading = true
44 | defer { isLoading = false } // ensure isLoading is set to false when the function exits
45 | /// ['flac', 'm4a', 'mp3', 'mp4', 'mpeg', 'mpga', 'oga', 'ogg', 'wav', 'webm'] (supported formats)
46 | let data = try contentLoader.loadBundledContent(fromFileNamed: "narcos", ext: "m4a")
47 | try await audioProvider.transcript(parameters: .init(fileName: "narcos.m4a", file: data))
48 | }
49 | }
50 | .buttonStyle(.borderedProminent)
51 | Text(audioProvider.transcription)
52 | .padding()
53 | }
54 | }
55 |
56 | var translationView: some View {
57 | VStack {
58 | Text("Tap this button to use the translationView API, a `m4a` file in German has been added to the app's bundle.")
59 | .font(.callout)
60 | .padding()
61 | Button("Translate") {
62 | Task {
63 | isLoading = true
64 | defer { isLoading = false } // ensure isLoading is set to false when the function exits
65 | /// ['flac', 'm4a', 'mp3', 'mp4', 'mpeg', 'mpga', 'oga', 'ogg', 'wav', 'webm'] (supported formats)
66 | let data = try contentLoader.loadBundledContent(fromFileNamed: "german", ext: "m4a")
67 | try await audioProvider.translate(parameters: .init(fileName: "german.m4a", file: data))
68 | }
69 | }
70 | .buttonStyle(.borderedProminent)
71 | Text(audioProvider.translation)
72 | .padding()
73 | }
74 | }
75 |
76 | var body: some View {
77 | ScrollView {
78 | VStack {
79 | VStack {
80 | Text("Add a text to convert to speech")
81 | textArea
82 | }
83 | transcriptionView
84 | .padding()
85 | Divider()
86 | translationView
87 | .padding()
88 | }
89 | }.overlay(
90 | Group {
91 | if isLoading {
92 | ProgressView()
93 | } else {
94 | EmptyView()
95 | }
96 | })
97 | .safeAreaPadding()
98 | }
99 |
100 | @State private var audioProvider: AudioProvider
101 | @State private var isLoading = false
102 | @State private var prompt = ""
103 |
104 | private let contentLoader = ContentLoader()
105 |
106 | }
107 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/AudioDemo/AudioProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import AVFoundation
9 | import SwiftOpenAI
10 | import SwiftUI
11 |
12 | @Observable
13 | class AudioProvider {
14 |
15 | init(service: OpenAIService) {
16 | self.service = service
17 | }
18 |
19 | var transcription = ""
20 | var translation = ""
21 | var speechErrorMessage = ""
22 | var audioPlayer: AVAudioPlayer?
23 |
24 | func transcript(
25 | parameters: AudioTranscriptionParameters)
26 | async throws
27 | {
28 | do {
29 | transcription = try await service.createTranscription(parameters: parameters).text
30 | } catch {
31 | transcription = "\(error)"
32 | }
33 | }
34 |
35 | func translate(
36 | parameters: AudioTranslationParameters)
37 | async throws
38 | {
39 | do {
40 | translation = try await service.createTranslation(parameters: parameters).text
41 | } catch {
42 | translation = "\(error)"
43 | }
44 | }
45 |
46 | func speech(
47 | parameters: AudioSpeechParameters)
48 | async throws
49 | {
50 | do {
51 | let speech = try await service.createSpeech(parameters: parameters).output
52 | playAudio(from: speech)
53 | } catch let error as APIError {
54 | speechErrorMessage = error.displayDescription
55 | } catch {
56 | speechErrorMessage = "\(error)"
57 | }
58 | }
59 |
60 | private let service: OpenAIService
61 |
62 | private func playAudio(from data: Data) {
63 | do {
64 | // Initialize the audio player with the data
65 | audioPlayer = try AVAudioPlayer(data: data)
66 | audioPlayer?.prepareToPlay()
67 | audioPlayer?.play()
68 | } catch {
69 | // Handle errors
70 | print("Error playing audio: \(error.localizedDescription)")
71 | }
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ChatDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _chatProvider = State(initialValue: ChatProvider(service: service))
15 | }
16 |
17 | enum ChatConfig {
18 | case chatCompletion
19 | case chatCompeltionStream
20 | }
21 |
22 | var body: some View {
23 | ScrollView {
24 | VStack {
25 | picker
26 | textArea
27 | Text(chatProvider.errorMessage)
28 | .foregroundColor(.red)
29 | switch selectedSegment {
30 | case .chatCompeltionStream:
31 | streamedChatResultView
32 | case .chatCompletion:
33 | chatCompletionResultView
34 | }
35 | }
36 | }
37 | .overlay(
38 | Group {
39 | if isLoading {
40 | ProgressView()
41 | } else {
42 | EmptyView()
43 | }
44 | })
45 | }
46 |
47 | var picker: some View {
48 | Picker("Options", selection: $selectedSegment) {
49 | Text("Chat Completion").tag(ChatConfig.chatCompletion)
50 | Text("Chat Completion stream").tag(ChatConfig.chatCompeltionStream)
51 | }
52 | .pickerStyle(SegmentedPickerStyle())
53 | .padding()
54 | }
55 |
56 | var textArea: some View {
57 | HStack(spacing: 4) {
58 | TextField("Enter prompt", text: $prompt, axis: .vertical)
59 | .textFieldStyle(.roundedBorder)
60 | .padding()
61 | Button {
62 | Task {
63 | isLoading = true
64 | defer { isLoading = false } // ensure isLoading is set to false when the
65 |
66 | let content = ChatCompletionParameters.Message.ContentType.text(prompt)
67 | prompt = ""
68 | let parameters = ChatCompletionParameters(
69 | messages: [.init(
70 | role: .user,
71 | content: content)],
72 | model: .custom("claude-3-7-sonnet-20250219"))
73 | switch selectedSegment {
74 | case .chatCompletion:
75 | try await chatProvider.startChat(parameters: parameters)
76 | case .chatCompeltionStream:
77 | try await chatProvider.startStreamedChat(parameters: parameters)
78 | }
79 | }
80 | } label: {
81 | Image(systemName: "paperplane")
82 | }
83 | .buttonStyle(.bordered)
84 | }
85 | .padding()
86 | }
87 |
88 | /// stream = `false`
89 | var chatCompletionResultView: some View {
90 | ForEach(Array(chatProvider.messages.enumerated()), id: \.offset) { _, val in
91 | VStack(spacing: 0) {
92 | Text("\(val)")
93 | }
94 | }
95 | }
96 |
97 | /// stream = `true`
98 | var streamedChatResultView: some View {
99 | VStack {
100 | Button("Cancel stream") {
101 | chatProvider.cancelStream()
102 | }
103 | Text(chatProvider.message)
104 | }
105 | }
106 |
107 | @State private var chatProvider: ChatProvider
108 | @State private var isLoading = false
109 | @State private var prompt = ""
110 | @State private var selectedSegment = ChatConfig.chatCompeltionStream
111 |
112 | }
113 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class ChatProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var messages: [String] = []
19 | var errorMessage = ""
20 | var message = ""
21 | var usage: ChatUsage?
22 |
23 | func startChat(
24 | parameters: ChatCompletionParameters)
25 | async throws
26 | {
27 | do {
28 | let response = try await service.startChat(parameters: parameters)
29 | let choices = response.choices
30 | let chatUsage = response.usage
31 | let logprobs = choices?.compactMap(\.logprobs)
32 | dump(logprobs)
33 | messages = choices?.compactMap(\.message?.content) ?? []
34 | dump(chatUsage)
35 | usage = chatUsage
36 | } catch APIError.responseUnsuccessful(let description, let statusCode) {
37 | self.errorMessage = "Network error with status code: \(statusCode) and description: \(description)"
38 | } catch {
39 | errorMessage = error.localizedDescription
40 | }
41 | }
42 |
43 | func startStreamedChat(
44 | parameters: ChatCompletionParameters)
45 | async throws
46 | {
47 | streamTask = Task {
48 | do {
49 | let stream = try await service.startStreamedChat(parameters: parameters)
50 | for try await result in stream {
51 | let content = result.choices?.first?.delta?.content ?? ""
52 | self.message += content
53 | }
54 | } catch APIError.responseUnsuccessful(let description, let statusCode) {
55 | self.errorMessage = "Network error with status code: \(statusCode) and description: \(description)"
56 | } catch {
57 | self.errorMessage = error.localizedDescription
58 | }
59 | }
60 | }
61 |
62 | func cancelStream() {
63 | streamTask?.cancel()
64 | }
65 |
66 | private let service: OpenAIService
67 | private var streamTask: Task? = nil
68 |
69 | }
70 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/ChatMessageDisplayModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatMessageDisplayModel.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/13/23.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | struct ChatMessageDisplayModel: Identifiable {
12 |
13 | init(
14 | id: UUID = UUID(),
15 | content: DisplayContent,
16 | origin: MessageOrigin)
17 | {
18 | self.id = id
19 | self.content = content
20 | self.origin = origin
21 | }
22 |
23 | enum DisplayContent: Equatable {
24 |
25 | case content(DisplayMessageType)
26 | case error(String)
27 |
28 | static func ==(lhs: DisplayContent, rhs: DisplayContent) -> Bool {
29 | switch (lhs, rhs) {
30 | case (.content(let a), .content(let b)):
31 | a == b
32 | case (.error(let a), .error(let b)):
33 | a == b
34 | default:
35 | false
36 | }
37 | }
38 |
39 | struct DisplayMessageType: Equatable {
40 | var text: String?
41 | var urls: [URL]? = nil
42 | }
43 | }
44 |
45 | enum MessageOrigin {
46 |
47 | case received(ReceivedSource)
48 | case sent
49 |
50 | enum ReceivedSource {
51 | case gpt
52 | case dalle
53 | }
54 | }
55 |
56 | let id: UUID
57 | var content: DisplayContent
58 | let origin: MessageOrigin
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/ChatMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatMessageView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/13/23.
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | struct ChatMessageView: View {
12 |
13 | let message: ChatMessageDisplayModel
14 |
15 | @ViewBuilder
16 | var header: some View {
17 | switch message.origin {
18 | case .received(let source):
19 | switch source {
20 | case .gpt:
21 | headerWith("wand.and.stars", title: "CHATGPT")
22 | case .dalle:
23 | EmptyView()
24 | }
25 |
26 | case .sent:
27 | headerWith("person.circle", title: "USER")
28 | }
29 | }
30 |
31 | var body: some View {
32 | VStack(alignment: .leading, spacing: 8) {
33 | header
34 | Group {
35 | switch message.content {
36 | case .content(let mediaType):
37 | VStack(alignment: .leading, spacing: Sizes.spacingMedium) {
38 | imagesFrom(urls: mediaType.urls ?? [])
39 | chatMessageViewWith(mediaType.text)
40 | }
41 | .transition(.opacity)
42 |
43 | case .error(let error):
44 | Text(error)
45 | .padding()
46 | .font(.callout)
47 | .background(
48 | RoundedRectangle(cornerRadius: 20)
49 | .foregroundColor(.red.opacity(0.7)))
50 | }
51 | }
52 | .padding(.leading, 23)
53 | }
54 | }
55 |
56 | @ViewBuilder
57 | func chatMessageViewWith(
58 | _ text: String?)
59 | -> some View
60 | {
61 | if let text {
62 | if text.isEmpty {
63 | LoadingView()
64 | } else {
65 | Text(text)
66 | .font(.body)
67 | }
68 | } else {
69 | EmptyView()
70 | }
71 | }
72 |
73 | func headerWith(
74 | _ systemImageName: String,
75 | title: String)
76 | -> some View
77 | {
78 | HStack {
79 | Image(systemName: systemImageName)
80 | .resizable()
81 | .frame(width: 16, height: 16)
82 | Text(title)
83 | .font(.caption2)
84 | }
85 | .foregroundColor(.gray.opacity(0.9))
86 | }
87 |
88 | func imagesFrom(
89 | urls: [URL])
90 | -> some View
91 | {
92 | ScrollView(.horizontal, showsIndicators: false) {
93 | HStack(spacing: 8) {
94 | ForEach(urls, id: \.self) { url in
95 | URLImageView(url: url)
96 | }
97 | }
98 | }
99 | }
100 | }
101 |
102 | #Preview {
103 | VStack {
104 | ChatMessageView(message: .init(
105 | content: .content(.init(text: "What is the capital of Peru? and what is the population")),
106 | origin: .sent))
107 | ChatMessageView(message: .init(content: .content(.init(text: "Lima, an its 28 million habitants.")), origin: .received(.gpt)))
108 | ChatMessageView(message: .init(
109 | content: .content(.init(
110 | text: "The image you requested is ready 🐱",
111 | urls: [
112 | URL(
113 | string: "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg")!,
114 | ])),
115 | origin: .received(.dalle)))
116 | ChatMessageView(message: .init(content: .content(.init(text: "")), origin: .received(.gpt)))
117 | }
118 | .padding()
119 | }
120 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatFunctionCallDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/14/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ChatFunctionCallDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _chatProvider = State(initialValue: ChatFunctionCallProvider(service: service))
15 | }
16 |
17 | var body: some View {
18 | ScrollViewReader { proxy in
19 | VStack {
20 | List(chatProvider.chatDisplayMessages) { message in
21 | ChatMessageView(message: message)
22 | .listRowSeparator(.hidden)
23 | }
24 | .listStyle(.plain)
25 | .onChange(of: chatProvider.chatDisplayMessages.last?.content) {
26 | let lastMessage = chatProvider.chatDisplayMessages.last
27 | if let id = lastMessage?.id {
28 | proxy.scrollTo(id, anchor: .bottom)
29 | }
30 | }
31 | textArea
32 | }
33 | }
34 | }
35 |
36 | var textArea: some View {
37 | HStack(spacing: 0) {
38 | VStack(alignment: .leading, spacing: 0) {
39 | textField
40 | .padding(.vertical, Sizes.spacingExtraSmall)
41 | .padding(.horizontal, Sizes.spacingSmall)
42 | }
43 | .padding(.vertical, Sizes.spacingExtraSmall)
44 | .padding(.horizontal, Sizes.spacingExtraSmall)
45 | .background(
46 | RoundedRectangle(cornerRadius: 20)
47 | .stroke(.gray, lineWidth: 1))
48 | .padding(.horizontal, Sizes.spacingMedium)
49 | textAreSendButton
50 | }
51 | .padding(.horizontal)
52 | .disabled(isLoading)
53 | }
54 |
55 | var textField: some View {
56 | TextField(
57 | "How Can I help you today?",
58 | text: $prompt,
59 | axis: .vertical)
60 | }
61 |
62 | var textAreSendButton: some View {
63 | Button {
64 | Task {
65 | /// Loading UI
66 | isLoading = true
67 | defer { isLoading = false }
68 | // Clears text field.
69 | let userPrompt = prompt
70 | prompt = ""
71 | try await chatProvider.startChat(prompt: userPrompt)
72 | }
73 | } label: {
74 | Image(systemName: "paperplane")
75 | }
76 | .buttonStyle(.bordered)
77 | .tint(ThemeColor.tintColor)
78 | .disabled(prompt.isEmpty)
79 | }
80 |
81 | @State private var chatProvider: ChatFunctionCallProvider
82 | @State private var isLoading = false
83 | @State private var prompt = ""
84 |
85 | }
86 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCalllStreamDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatFunctionsCalllStreamDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/6/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ChatFunctionsCalllStreamDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _chatProvider = State(initialValue: ChatFunctionsCallStreamProvider(service: service))
15 | }
16 |
17 | var body: some View {
18 | ScrollViewReader { proxy in
19 | VStack {
20 | List(chatProvider.chatDisplayMessages) { message in
21 | ChatMessageView(message: message)
22 | .listRowSeparator(.hidden)
23 | }
24 | .listStyle(.plain)
25 | .onChange(of: chatProvider.chatDisplayMessages.last?.content) {
26 | let lastMessage = chatProvider.chatDisplayMessages.last
27 | if let id = lastMessage?.id {
28 | proxy.scrollTo(id, anchor: .bottom)
29 | }
30 | }
31 | textArea
32 | }
33 | }
34 | }
35 |
36 | var textArea: some View {
37 | HStack(spacing: 0) {
38 | VStack(alignment: .leading, spacing: 0) {
39 | textField
40 | .padding(.vertical, Sizes.spacingExtraSmall)
41 | .padding(.horizontal, Sizes.spacingSmall)
42 | }
43 | .padding(.vertical, Sizes.spacingExtraSmall)
44 | .padding(.horizontal, Sizes.spacingExtraSmall)
45 | .background(
46 | RoundedRectangle(cornerRadius: 20)
47 | .stroke(.gray, lineWidth: 1))
48 | .padding(.horizontal, Sizes.spacingMedium)
49 | textAreSendButton
50 | }
51 | .padding(.horizontal)
52 | .disabled(isLoading)
53 | }
54 |
55 | var textField: some View {
56 | TextField(
57 | "How Can I help you today?",
58 | text: $prompt,
59 | axis: .vertical)
60 | }
61 |
62 | var textAreSendButton: some View {
63 | Button {
64 | Task {
65 | /// Loading UI
66 | isLoading = true
67 | defer { isLoading = false }
68 | // Clears text field.
69 | let userPrompt = prompt
70 | prompt = ""
71 | try await chatProvider.chat(prompt: userPrompt)
72 | }
73 | } label: {
74 | Image(systemName: "paperplane")
75 | }
76 | .buttonStyle(.bordered)
77 | .tint(ThemeColor.tintColor)
78 | .disabled(prompt.isEmpty)
79 | }
80 |
81 | @State private var isLoading = false
82 | @State private var prompt = ""
83 | @State private var chatProvider: ChatFunctionsCallStreamProvider
84 |
85 | }
86 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatStreamFluidConversationDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatStreamFluidConversationDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/4/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ChatStreamFluidConversationDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _chatProvider = State(initialValue: ChatFluidConversationProvider(service: service))
15 | }
16 |
17 | enum GPTModel: String, CaseIterable {
18 | case gpt3dot5 = "GPT-3.5"
19 | case gpt4 = "GPT-4"
20 | }
21 |
22 | var body: some View {
23 | ScrollViewReader { proxy in
24 | VStack {
25 | picker
26 | List(chatProvider.chatMessages) { message in
27 | ChatDisplayMessageView(message: message)
28 | .listRowSeparator(.hidden)
29 | }
30 | .listStyle(.plain)
31 | .onChange(of: chatProvider.chatMessages.last?.content) {
32 | let lastMessage = chatProvider.chatMessages.last
33 | if let id = lastMessage?.id {
34 | proxy.scrollTo(id, anchor: .bottom)
35 | }
36 | }
37 | textArea
38 | }
39 | }
40 | }
41 |
42 | var picker: some View {
43 | Picker("", selection: $selectedModel) {
44 | ForEach(GPTModel.allCases, id: \.self) { model in
45 | Text(model.rawValue)
46 | .font(.title)
47 | .tag(model)
48 | }
49 | }
50 | .pickerStyle(.segmented)
51 | .padding()
52 | }
53 |
54 | var textArea: some View {
55 | HStack(spacing: 0) {
56 | TextField(
57 | "How Can I help you today?",
58 | text: $prompt,
59 | axis: .vertical)
60 | .textFieldStyle(.roundedBorder)
61 | .padding()
62 | textAreButton
63 | }
64 | .padding(.horizontal)
65 | .disabled(isLoading)
66 | }
67 |
68 | var textAreButton: some View {
69 | Button {
70 | Task {
71 | isLoading = true
72 | defer {
73 | // ensure isLoading is set to false after the function executes.
74 | isLoading = false
75 | prompt = ""
76 | }
77 | /// Make the request
78 | try await chatProvider.startStreamedChat(parameters: .init(
79 | messages: [.init(role: .user, content: .text(prompt))],
80 | model: selectedModel == .gpt3dot5 ? .gpt35Turbo : .gpt4), prompt: prompt)
81 | }
82 | } label: {
83 | Image(systemName: "paperplane")
84 | }
85 | .buttonStyle(.bordered)
86 | }
87 |
88 | @State private var chatProvider: ChatFluidConversationProvider
89 | @State private var isLoading = false
90 | @State private var prompt = ""
91 | @State private var selectedModel = GPTModel.gpt3dot5
92 |
93 | }
94 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructureOutputToolDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatStructureOutputToolDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 8/11/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 | import SwiftUI
11 |
12 | struct ChatStructureOutputToolDemoView: View {
13 |
14 | init(service: OpenAIService) {
15 | chatProvider = .init(service: service)
16 | }
17 |
18 | var body: some View {
19 | ScrollViewReader { proxy in
20 | VStack {
21 | List(chatProvider.chatDisplayMessages) { message in
22 | ChatMessageView(message: message)
23 | .listRowSeparator(.hidden)
24 | }
25 | .listStyle(.plain)
26 | .onChange(of: chatProvider.chatDisplayMessages.last?.content) {
27 | let lastMessage = chatProvider.chatDisplayMessages.last
28 | if let id = lastMessage?.id {
29 | proxy.scrollTo(id, anchor: .bottom)
30 | }
31 | }
32 | textArea
33 | }
34 | }
35 | }
36 |
37 | var textArea: some View {
38 | HStack(spacing: 0) {
39 | VStack(alignment: .leading, spacing: 0) {
40 | textField
41 | .padding(.vertical, Sizes.spacingExtraSmall)
42 | .padding(.horizontal, Sizes.spacingSmall)
43 | }
44 | .padding(.vertical, Sizes.spacingExtraSmall)
45 | .padding(.horizontal, Sizes.spacingExtraSmall)
46 | .background(
47 | RoundedRectangle(cornerRadius: 20)
48 | .stroke(.gray, lineWidth: 1))
49 | .padding(.horizontal, Sizes.spacingMedium)
50 | textAreSendButton
51 | }
52 | .padding(.horizontal)
53 | .disabled(isLoading)
54 | }
55 |
56 | var textField: some View {
57 | TextField(
58 | "How Can I help you today?",
59 | text: $prompt,
60 | axis: .vertical)
61 | }
62 |
63 | var textAreSendButton: some View {
64 | Button {
65 | Task {
66 | /// Loading UI
67 | isLoading = true
68 | defer { isLoading = false }
69 | // Clears text field.
70 | let userPrompt = prompt
71 | prompt = ""
72 | try await chatProvider.startChat(prompt: userPrompt)
73 | }
74 | } label: {
75 | Image(systemName: "paperplane")
76 | }
77 | .buttonStyle(.bordered)
78 | .tint(ThemeColor.tintColor)
79 | .disabled(prompt.isEmpty)
80 | }
81 |
82 | @State private var chatProvider: ChatStructuredOutputToolProvider
83 | @State private var isLoading = false
84 | @State private var prompt = ""
85 |
86 | }
87 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatStructuredOutputProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 8/10/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | // MARK: - ChatStructuredOutputProvider
12 |
13 | @Observable
14 | final class ChatStructuredOutputProvider {
15 |
16 | // MARK: - Initializer
17 |
18 | init(service: OpenAIService) {
19 | self.service = service
20 | }
21 |
22 | var message = ""
23 | var messages: [String] = []
24 | var errorMessage = ""
25 |
26 | // MARK: - Public Methods
27 |
28 | func startChat(
29 | parameters: ChatCompletionParameters)
30 | async throws
31 | {
32 | do {
33 | let choices = try await service.startChat(parameters: parameters).choices ?? []
34 | messages = choices.compactMap(\.message?.content).map { $0.asJsonFormatted() }
35 | assert(messages.count == 1)
36 | errorMessage = choices.first?.message?.refusal ?? ""
37 | } catch APIError.responseUnsuccessful(let description, let statusCode) {
38 | self.errorMessage = "Network error with status code: \(statusCode) and description: \(description)"
39 | } catch {
40 | errorMessage = error.localizedDescription
41 | }
42 | }
43 |
44 | func startStreamedChat(
45 | parameters: ChatCompletionParameters)
46 | async throws
47 | {
48 | streamTask = Task {
49 | do {
50 | let stream = try await service.startStreamedChat(parameters: parameters)
51 | for try await result in stream {
52 | let firstChoiceDelta = result.choices?.first?.delta
53 | let content = firstChoiceDelta?.refusal ?? firstChoiceDelta?.content ?? ""
54 | self.message += content
55 | if result.choices?.first?.finishReason != nil {
56 | self.message = self.message.asJsonFormatted()
57 | }
58 | }
59 | } catch APIError.responseUnsuccessful(let description, let statusCode) {
60 | self.errorMessage = "Network error with status code: \(statusCode) and description: \(description)"
61 | } catch {
62 | self.errorMessage = error.localizedDescription
63 | }
64 | }
65 | }
66 |
67 | func cancelStream() {
68 | streamTask?.cancel()
69 | }
70 |
71 | private let service: OpenAIService
72 | private var streamTask: Task? = nil
73 |
74 | }
75 |
76 | /// Helper that allows to display the JSON Schema.
77 | extension String {
78 |
79 | func asJsonFormatted() -> String {
80 | guard let data = data(using: .utf8) else { return self }
81 | do {
82 | // Parse JSON string to Any object
83 | if let jsonObject = try JSONSerialization.jsonObject(with: data, options: []) as? [String: Any] {
84 | // Convert back to data with pretty-printing
85 | let prettyPrintedData = try JSONSerialization.data(withJSONObject: jsonObject, options: [.prettyPrinted, .sortedKeys])
86 |
87 | // Convert formatted data back to string
88 | return String(data: prettyPrintedData, encoding: .utf8) ?? self
89 | }
90 | } catch {
91 | print("Error formatting JSON: \(error)")
92 | }
93 | return self
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/EmbeddingsDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmbeddingsDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct EmbeddingsDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _embeddingsProvider = State(initialValue: EmbeddingsProvider(service: service))
15 | }
16 |
17 | var textArea: some View {
18 | HStack(spacing: 4) {
19 | TextField("Enter prompt", text: $prompt, axis: .vertical)
20 | .textFieldStyle(.roundedBorder)
21 | .padding()
22 | Button {
23 | Task {
24 | isLoading = true
25 | defer { isLoading = false } // ensure isLoading is set to false when the
26 | do {
27 | try await embeddingsProvider.createEmbeddings(parameters: .init(
28 | input: prompt,
29 | model: .textEmbedding3Large,
30 | encodingFormat: nil,
31 | dimensions: nil))
32 | } catch {
33 | errorMessage = "\(error)"
34 | }
35 | }
36 | } label: {
37 | Image(systemName: "paperplane")
38 | }
39 | .buttonStyle(.bordered)
40 | }
41 | .padding()
42 | }
43 |
44 | var list: some View {
45 | List {
46 | ForEach(Array(embeddingsProvider.embeddings.enumerated()), id: \.offset) { _, embeddingObject in
47 | Section(header: Text("Section \(embeddingObject.index) \(embeddingObject.object)")) {
48 | ForEach(embeddingObject.embedding, id: \.self) { embedding in
49 | Text("Embedding Value \(embedding)")
50 | }
51 | }
52 | }
53 | }
54 | }
55 |
56 | var body: some View {
57 | VStack {
58 | textArea
59 | if !errorMessage.isEmpty {
60 | Text("Error \(errorMessage)")
61 | .bold()
62 | }
63 | list
64 | }
65 | .overlay(
66 | Group {
67 | if isLoading {
68 | ProgressView()
69 | } else {
70 | EmptyView()
71 | }
72 | })
73 | }
74 |
75 | @State private var embeddingsProvider: EmbeddingsProvider
76 | @State private var isLoading = false
77 | @State private var prompt = ""
78 | @State private var errorMessage = ""
79 |
80 | }
81 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Embeddingsprovider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class EmbeddingsProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var embeddings: [EmbeddingObject] = []
19 |
20 | func createEmbeddings(
21 | parameters: EmbeddingParameter)
22 | async throws
23 | {
24 | embeddings = try await service.createEmbeddings(parameters: parameters).data
25 | }
26 |
27 | private let service: OpenAIService
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/AttachmentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AttachmentView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 5/29/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct AttachmentView: View {
11 |
12 | let fileName: String
13 | @Binding var actionTrigger: Bool
14 | let isLoading: Bool
15 |
16 | var body: some View {
17 | HStack(spacing: Sizes.spacingExtraSmall) {
18 | HStack {
19 | if isLoading == true {
20 | ProgressView()
21 | .frame(width: 10, height: 10)
22 | .padding(.horizontal, Sizes.spacingExtraSmall)
23 | } else {
24 | Image(systemName: "doc")
25 | .resizable()
26 | .aspectRatio(contentMode: .fit)
27 | .frame(width: 10)
28 | .foregroundColor(.secondary)
29 | }
30 | Text(fileName)
31 | .font(.caption2)
32 | }
33 | Button {
34 | actionTrigger = true
35 |
36 | } label: {
37 | Image(systemName: "xmark.circle.fill")
38 | }
39 | .disabled(isLoading)
40 | }
41 | .padding(.leading, Sizes.spacingMedium)
42 | .background(
43 | RoundedRectangle(cornerRadius: 8)
44 | .stroke(.gray.opacity(0.5), lineWidth: 0.5))
45 | }
46 | }
47 |
48 | #Preview {
49 | AttachmentView(fileName: "Mydocument.pdf", actionTrigger: .constant(true), isLoading: true)
50 | }
51 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileAttachmentView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 5/29/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | // MARK: - FileAttachmentView
12 |
13 | struct FileAttachmentView: View {
14 |
15 | init(
16 | service: OpenAIService,
17 | action: FilePickerAction,
18 | fileUploadedCompletion: @escaping (_ file: FileObject) -> Void,
19 | fileDeletedCompletion: @escaping (_ parameters: FilePickerAction, _ id: String) -> Void)
20 | {
21 | fileProvider = FilesPickerProvider(service: service)
22 | self.action = action
23 | self.fileUploadedCompletion = fileUploadedCompletion
24 | self.fileDeletedCompletion = fileDeletedCompletion
25 | }
26 |
27 | var body: some View {
28 | Group {
29 | switch action {
30 | case .request(let parameters):
31 | newUploadedFileView(parameters: parameters)
32 | case .retrieveAndDisplay(let id):
33 | previousUploadedFileView(id: id)
34 | }
35 | }
36 | .onChange(of: deleted) { oldValue, newValue in
37 | if oldValue != newValue, newValue {
38 | Task {
39 | if let fileObject {
40 | fileDeleteStatus = try await fileProvider.deleteFileWith(id: fileObject.id)
41 | }
42 | }
43 | }
44 | }
45 | .onChange(of: fileDeleteStatus) { oldValue, newValue in
46 | if oldValue != newValue, let newValue, newValue.deleted {
47 | fileDeletedCompletion(action, newValue.id)
48 | }
49 | }
50 | }
51 |
52 | func newUploadedFileView(
53 | parameters: FileParameters)
54 | -> some View
55 | {
56 | AttachmentView(
57 | fileName: (fileObject?.filename ?? parameters.fileName) ?? "",
58 | actionTrigger: $deleted,
59 | isLoading: fileObject == nil || deleted)
60 | .disabled(fileObject == nil)
61 | .opacity(fileObject == nil ? 0.3 : 1)
62 | .onFirstAppear {
63 | Task {
64 | fileObject = try await fileProvider.uploadFile(parameters: parameters)
65 | }
66 | }
67 | .onChange(of: fileObject) { oldValue, newValue in
68 | if oldValue != newValue, let newValue {
69 | fileUploadedCompletion(newValue)
70 | }
71 | }
72 | }
73 |
74 | func previousUploadedFileView(
75 | id: String)
76 | -> some View
77 | {
78 | AttachmentView(fileName: fileObject?.filename ?? "Document", actionTrigger: $deleted, isLoading: fileObject == nil || deleted)
79 | .onFirstAppear {
80 | Task {
81 | fileObject = try await fileProvider.retrieveFileWith(id: id)
82 | }
83 | }
84 | }
85 |
86 | @State private var fileObject: FileObject?
87 | @State private var fileDeleteStatus: DeletionStatus?
88 | @State private var deleted = false
89 |
90 | private let fileProvider: FilesPickerProvider
91 | private let fileUploadedCompletion: (_ file: FileObject) -> Void
92 | private let fileDeletedCompletion: (_ action: FilePickerAction, _ id: String) -> Void
93 | private let action: FilePickerAction
94 | }
95 |
96 | // MARK: - OnFirstAppear
97 |
98 | private struct OnFirstAppear: ViewModifier {
99 | let perform: () -> Void
100 |
101 | @State private var firstTime = true
102 |
103 | func body(content: Content) -> some View {
104 | content.onAppear {
105 | if firstTime {
106 | firstTime = false
107 | perform()
108 | }
109 | }
110 | }
111 | }
112 |
113 | extension View {
114 | func onFirstAppear(perform: @escaping () -> Void) -> some View {
115 | modifier(OnFirstAppear(perform: perform))
116 | }
117 | }
118 |
119 | extension DeletionStatus: @retroactive Equatable {
120 | public static func ==(lhs: DeletionStatus, rhs: DeletionStatus) -> Bool {
121 | lhs.id == rhs.id
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilesPickerProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 5/29/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | final class FilesPickerProvider {
12 |
13 | init(service: OpenAIService) {
14 | self.service = service
15 | }
16 |
17 | var files: [FileObject] = []
18 | var uploadedFile: FileObject? = nil
19 | var deletedStatus: DeletionStatus? = nil
20 | var retrievedFile: FileObject? = nil
21 | var fileContent: [[String: Any]] = []
22 |
23 | func listFiles() async throws {
24 | files = try await service.listFiles().data
25 | }
26 |
27 | func uploadFile(
28 | parameters: FileParameters)
29 | async throws -> FileObject?
30 | {
31 | try await service.uploadFile(parameters: parameters)
32 | }
33 |
34 | func deleteFileWith(
35 | id: String)
36 | async throws -> DeletionStatus?
37 | {
38 | try await service.deleteFileWith(id: id)
39 | }
40 |
41 | func retrieveFileWith(
42 | id: String)
43 | async throws -> FileObject?
44 | {
45 | try await service.retrieveFileWith(id: id)
46 | }
47 |
48 | func retrieveContentForFileWith(
49 | id: String)
50 | async throws
51 | {
52 | fileContent = try await service.retrieveContentForFileWith(id: id)
53 | }
54 |
55 | private let service: OpenAIService
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilesDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | // MARK: - FilesDemoView
12 |
13 | struct FilesDemoView: View {
14 |
15 | init(service: OpenAIService) {
16 | _filesProvider = State(initialValue: FilesProvider(service: service))
17 | }
18 |
19 | enum Config {
20 | case list
21 | case moreOptions
22 | }
23 |
24 | var body: some View {
25 | VStack {
26 | picker
27 | if !errorMessage.isEmpty {
28 | Text("Error \(errorMessage)")
29 | .bold()
30 | }
31 | switch selectedSegment {
32 | case .list:
33 | listView
34 | case .moreOptions:
35 | moreOptionsView
36 | }
37 | }
38 | .overlay(
39 | Group {
40 | if isLoading {
41 | ProgressView()
42 | } else {
43 | EmptyView()
44 | }
45 | })
46 | }
47 |
48 | var picker: some View {
49 | Picker("Options", selection: $selectedSegment) {
50 | Text("Shows List").tag(Config.list)
51 | Text("Show More options").tag(Config.moreOptions)
52 | }
53 | .pickerStyle(SegmentedPickerStyle())
54 | .padding()
55 | }
56 |
57 | var moreOptionsView: some View {
58 | ScrollView {
59 | VStack {
60 | uploadFileButton
61 | Text("This button will load a file that has been added to this app bundle.")
62 | if let uploadedFile = filesProvider.uploadedFile {
63 | FileObjectView(file: uploadedFile)
64 | }
65 | }
66 | }
67 | }
68 |
69 | var listView: some View {
70 | VStack(spacing: 0) {
71 | listFilesButton
72 | list
73 | }
74 | }
75 |
76 | var listFilesButton: some View {
77 | Button("List Files") {
78 | Task {
79 | isLoading = true
80 | defer { isLoading = false } // ensure isLoading is set to false when the
81 | do {
82 | try await filesProvider.listFiles()
83 | } catch {
84 | errorMessage = "\(error)"
85 | }
86 | }
87 | }
88 | .buttonStyle(.borderedProminent)
89 | }
90 |
91 | var uploadFileButton: some View {
92 | Button("Upload File") {
93 | Task {
94 | isLoading = true
95 | defer { isLoading = false } // ensure isLoading is set to false when the
96 | do {
97 | let fileData = try contentLoader.loadBundledContent(fromFileNamed: "WorldCupData", ext: "jsonl")
98 | try await filesProvider.uploadFile(parameters: .init(fileName: "WorldCupData", file: fileData, purpose: "fine-tune"))
99 | } catch {
100 | errorMessage = "\(error)"
101 | }
102 | }
103 | }
104 | .buttonStyle(.borderedProminent)
105 | }
106 |
107 | var list: some View {
108 | List {
109 | ForEach(Array(filesProvider.files.enumerated()), id: \.offset) { _, file in
110 | FileObjectView(file: file)
111 | }
112 | }
113 | }
114 |
115 | @State private var filesProvider: FilesProvider
116 | @State private var isLoading = false
117 | @State private var errorMessage = ""
118 | @State private var selectedSegment = Config.list
119 |
120 | private let contentLoader = ContentLoader()
121 |
122 | }
123 |
124 | // MARK: - FileObjectView
125 |
126 | struct FileObjectView: View {
127 |
128 | init(file: FileObject) {
129 | self.file = file
130 | }
131 |
132 | var body: some View {
133 | VStack(alignment: .leading, spacing: 4) {
134 | Text("File name = \(file.filename)")
135 | .font(.title2)
136 | VStack(alignment: .leading, spacing: 2) {
137 | Text("ID = \(file.id)")
138 | Text("Created = \(file.createdAt)")
139 | Text("Object = \(file.object)")
140 | Text("Purpose = \(file.purpose)")
141 | Text("Status = \(file.status ?? "NO STATUS")")
142 | Text("Status Details = \(file.statusDetails ?? "NO DETAILS")")
143 | }
144 | .font(.callout)
145 | }
146 | .foregroundColor(.primary)
147 | .padding()
148 | .background(
149 | RoundedRectangle(cornerSize: .init(width: 20, height: 20))
150 | .foregroundColor(.mint))
151 | }
152 |
153 | private let file: FileObject
154 |
155 | }
156 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilesProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class FilesProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var files: [FileObject] = []
19 | var uploadedFile: FileObject? = nil
20 | var deletedStatus: DeletionStatus? = nil
21 | var retrievedFile: FileObject? = nil
22 | var fileContent: [[String: Any]] = []
23 |
24 | func listFiles() async throws {
25 | files = try await service.listFiles().data
26 | }
27 |
28 | func uploadFile(
29 | parameters: FileParameters)
30 | async throws
31 | {
32 | uploadedFile = try await service.uploadFile(parameters: parameters)
33 | }
34 |
35 | func deleteFileWith(
36 | id: String)
37 | async throws
38 | {
39 | deletedStatus = try await service.deleteFileWith(id: id)
40 | }
41 |
42 | func retrieveFileWith(
43 | id: String)
44 | async throws
45 | {
46 | retrievedFile = try await service.retrieveFileWith(id: id)
47 | }
48 |
49 | func retrieveContentForFileWith(
50 | id: String)
51 | async throws
52 | {
53 | fileContent = try await service.retrieveContentForFileWith(id: id)
54 | }
55 |
56 | private let service: OpenAIService
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FineTuningJobDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | // MARK: - FineTuningJobDemoView
12 |
13 | struct FineTuningJobDemoView: View {
14 |
15 | init(service: OpenAIService) {
16 | _fineTuningJobProvider = State(initialValue: FineTuningJobProvider(service: service))
17 | }
18 |
19 | var body: some View {
20 | VStack {
21 | Button("List Fine tuning jobs") {
22 | Task {
23 | isLoading = true
24 | defer { isLoading = false } // ensure isLoading is set to false when the
25 | try await fineTuningJobProvider.listFineTuningJobs()
26 | }
27 | }
28 | .buttonStyle(.borderedProminent)
29 | List {
30 | ForEach(Array(fineTuningJobProvider.fineTunedJobs.enumerated()), id: \.offset) { _, job in
31 | FineTuningObjectView(job: job)
32 | }
33 | }
34 | }
35 | .overlay(
36 | Group {
37 | if isLoading {
38 | ProgressView()
39 | } else {
40 | EmptyView()
41 | }
42 | })
43 | }
44 |
45 | @State private var fineTuningJobProvider: FineTuningJobProvider
46 | @State private var isLoading = false
47 |
48 | }
49 |
50 | // MARK: - FineTuningObjectView
51 |
52 | struct FineTuningObjectView: View {
53 |
54 | init(job: FineTuningJobObject) {
55 | self.job = job
56 | }
57 |
58 | var body: some View {
59 | VStack(alignment: .leading, spacing: 4) {
60 | Text("Fine Tuned Model - \(job.fineTunedModel ?? "NO MODEL")")
61 | .font(.title2)
62 | VStack(alignment: .leading, spacing: 2) {
63 | Text("Model = \(job.model)")
64 | Text("Object = \(job.object)")
65 | Text("ID = \(job.id)")
66 | Text("Created = \(job.createdAt)")
67 | Text("Organization ID = \(job.organizationId)")
68 | Text("Training file = \(job.trainingFile)")
69 | Text("Status = \(job.status)")
70 | .bold()
71 | }
72 | .font(.callout)
73 | }
74 | .foregroundColor(.primary)
75 | .padding()
76 | .background(
77 | RoundedRectangle(cornerSize: .init(width: 20, height: 20))
78 | .foregroundColor(.mint))
79 | }
80 |
81 | private let job: FineTuningJobObject
82 |
83 | }
84 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FineTuningJobProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class FineTuningJobProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var createdFineTuningJob: FineTuningJobObject? = nil
19 | var canceledFineTuningJob: FineTuningJobObject? = nil
20 | var retrievedFineTuningJob: FineTuningJobObject? = nil
21 | var fineTunedJobs: [FineTuningJobObject] = []
22 | var finteTuningEventObjects: [FineTuningJobEventObject] = []
23 |
24 | func createFineTuningJob(
25 | parameters: FineTuningJobParameters)
26 | async throws
27 | {
28 | createdFineTuningJob = try await service.createFineTuningJob(parameters: parameters)
29 | }
30 |
31 | func listFineTuningJobs()
32 | async throws
33 | {
34 | fineTunedJobs = try await service.listFineTuningJobs(after: nil, limit: nil).data
35 | }
36 |
37 | func retrieveFineTuningJob(
38 | id: String)
39 | async throws
40 | {
41 | retrievedFineTuningJob = try await service.retrieveFineTuningJob(id: id)
42 | }
43 |
44 | func cancelFineTuningJob(
45 | id: String)
46 | async throws
47 | {
48 | canceledFineTuningJob = try await service.cancelFineTuningJobWith(id: id)
49 | }
50 |
51 | func listFineTuningEventsForJobWith(
52 | id: String)
53 | async throws
54 | {
55 | finteTuningEventObjects = try await service.listFineTuningEventsForJobWith(id: id, after: nil, limit: nil).data
56 | }
57 |
58 | private let service: OpenAIService
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImagesDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/24/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ImagesDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _imagesProvider = State(initialValue: ImagesProvider(service: service))
15 | }
16 |
17 | var body: some View {
18 | ScrollView {
19 | textArea
20 | if !errorMessage.isEmpty {
21 | Text("Error \(errorMessage)")
22 | .bold()
23 | }
24 | ForEach(Array(imagesProvider.images.enumerated()), id: \.offset) { _, url in
25 | AsyncImage(url: url, scale: 1) { image in
26 | image
27 | .resizable()
28 | .aspectRatio(contentMode: .fill)
29 | .clipped()
30 | } placeholder: {
31 | EmptyView()
32 | }
33 | }
34 | }
35 | .overlay(
36 | Group {
37 | if isLoading {
38 | ProgressView()
39 | } else {
40 | EmptyView()
41 | }
42 | })
43 | }
44 |
45 | var textArea: some View {
46 | HStack(spacing: 4) {
47 | TextField("Enter prompt", text: $prompt, axis: .vertical)
48 | .textFieldStyle(.roundedBorder)
49 | .padding()
50 | Button {
51 | Task {
52 | isLoading = true
53 | defer { isLoading = false } // ensure isLoading is set to false when the
54 | do {
55 | try await imagesProvider.createImages(parameters: .init(prompt: prompt, model: .dalle3(.largeSquare)))
56 | } catch {
57 | errorMessage = "\(error)"
58 | }
59 | }
60 | } label: {
61 | Image(systemName: "paperplane")
62 | }
63 | .buttonStyle(.bordered)
64 | }
65 | .padding()
66 | }
67 |
68 | @State private var imagesProvider: ImagesProvider
69 | @State private var isLoading = false
70 | @State private var prompt = ""
71 | @State private var errorMessage = ""
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImagesProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/23/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class ImagesProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var images: [URL] = []
19 |
20 | func createImages(
21 | parameters: ImageCreateParameters)
22 | async throws
23 | {
24 | let urls = try await service.legacyCreateImages(
25 | parameters: parameters).data.map(\.url)
26 | images = urls.compactMap(\.self)
27 | }
28 |
29 | func editImages(
30 | parameters: ImageEditParameters)
31 | async throws
32 | {
33 | let urls = try await service.legacyEditImage(
34 | parameters: parameters).data.map(\.url)
35 | images = urls.compactMap(\.self)
36 | }
37 |
38 | func createImageVariations(
39 | parameters: ImageVariationParameters)
40 | async throws
41 | {
42 | let urls = try await service.legacyCreateImageVariations(parameters: parameters).data.map(\.url)
43 | images = urls.compactMap(\.self)
44 | }
45 |
46 | private let service: OpenAIService
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalChatDemo/LocalChatDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LocalChatDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 6/24/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | /// For more visit https://github.com/ollama/ollama/blob/main/docs/openai.md
12 |
13 | /// Important:
14 | /// Before using a model, pull it locally ollama pull:
15 |
16 | /// `ollama pull llama3`
17 | /// Default model names
18 | /// For tooling that relies on default OpenAI model names such as gpt-3.5-turbo, use ollama cp to copy an existing model name to a temporary name:
19 |
20 | /// `ollama cp llama3 gpt-3.5-turbo`
21 | /// Afterwards, this new model name can be specified the model field:
22 |
23 | /// ```curl http://localhost:11434/v1/chat/completions \
24 | /// -H "Content-Type: application/json" \
25 | /// -d '{
26 | /// "model": "gpt-3.5-turbo",
27 | /// "messages": [
28 | /// {
29 | /// "role": "user",
30 | /// "content": "Hello!"
31 | /// }
32 | /// ]
33 | /// }'```
34 |
35 | struct LocalChatDemoView: View {
36 |
37 | init(service: OpenAIService) {
38 | _chatProvider = State(initialValue: ChatProvider(service: service))
39 | }
40 |
41 | enum ChatConfig {
42 | case chatCompletion
43 | case chatCompeltionStream
44 | }
45 |
46 | var body: some View {
47 | ScrollView {
48 | VStack {
49 | picker
50 | textArea
51 | Text(chatProvider.errorMessage)
52 | .foregroundColor(.red)
53 | switch selectedSegment {
54 | case .chatCompeltionStream:
55 | streamedChatResultView
56 | case .chatCompletion:
57 | chatCompletionResultView
58 | }
59 | }
60 | }
61 | .overlay(
62 | Group {
63 | if isLoading {
64 | ProgressView()
65 | } else {
66 | EmptyView()
67 | }
68 | })
69 | }
70 |
71 | var picker: some View {
72 | Picker("Options", selection: $selectedSegment) {
73 | Text("Chat Completion").tag(ChatConfig.chatCompletion)
74 | Text("Chat Completion stream").tag(ChatConfig.chatCompeltionStream)
75 | }
76 | .pickerStyle(SegmentedPickerStyle())
77 | .padding()
78 | }
79 |
80 | var textArea: some View {
81 | HStack(spacing: 4) {
82 | TextField("Enter prompt", text: $prompt, axis: .vertical)
83 | .textFieldStyle(.roundedBorder)
84 | .padding()
85 | Button {
86 | Task {
87 | isLoading = true
88 | defer { isLoading = false } // ensure isLoading is set to false when the
89 |
90 | let content = ChatCompletionParameters.Message.ContentType.text(prompt)
91 | prompt = ""
92 | let parameters = ChatCompletionParameters(
93 | messages: [.init(
94 | role: .user,
95 | content: content)],
96 | // Make sure you run `ollama pull llama3` in your terminal to download this model.
97 | model: .custom("llama3"))
98 | switch selectedSegment {
99 | case .chatCompletion:
100 | try await chatProvider.startChat(parameters: parameters)
101 | case .chatCompeltionStream:
102 | try await chatProvider.startStreamedChat(parameters: parameters)
103 | }
104 | }
105 | } label: {
106 | Image(systemName: "paperplane")
107 | }
108 | .buttonStyle(.bordered)
109 | }
110 | .padding()
111 | }
112 |
113 | /// stream = `false`
114 | var chatCompletionResultView: some View {
115 | ForEach(Array(chatProvider.messages.enumerated()), id: \.offset) { _, val in
116 | VStack(spacing: 0) {
117 | Text("\(val)")
118 | }
119 | }
120 | }
121 |
122 | /// stream = `true`
123 | var streamedChatResultView: some View {
124 | VStack {
125 | Button("Cancel stream") {
126 | chatProvider.cancelStream()
127 | }
128 | Text(chatProvider.message)
129 | }
130 | }
131 |
132 | @State private var chatProvider: ChatProvider
133 | @State private var isLoading = false
134 | @State private var prompt = ""
135 | @State private var selectedSegment = ChatConfig.chatCompeltionStream
136 |
137 | }
138 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalHostEntryView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LocalHostEntryView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 6/24/24.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct LocalHostEntryView: View {
12 |
13 | var body: some View {
14 | NavigationStack {
15 | VStack {
16 | Spacer()
17 | TextField("Enter URL", text: $url)
18 | .padding()
19 | .textFieldStyle(.roundedBorder)
20 | NavigationLink(destination: OptionsListView(
21 | openAIService: OpenAIServiceFactory.service(baseURL: url),
22 | options: [.localChat]))
23 | {
24 | Text("Continue")
25 | .padding()
26 | .padding(.horizontal, 48)
27 | .foregroundColor(.white)
28 | .background(
29 | Capsule()
30 | .foregroundColor(url.isEmpty ? .gray.opacity(0.2) : Color(red: 64 / 255, green: 195 / 255, blue: 125 / 255)))
31 | }
32 | .disabled(url.isEmpty)
33 | Spacer()
34 | }
35 | .padding()
36 | .navigationTitle("Enter URL")
37 | }
38 | }
39 |
40 | @State private var url = ""
41 |
42 | }
43 |
44 | #Preview {
45 | ApiKeyIntroView()
46 | }
47 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelsDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/24/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ModelsDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _modelsProvider = State(initialValue: ModelsProvider(service: service))
15 | }
16 |
17 | var body: some View {
18 | VStack {
19 | showModelsButton
20 | list
21 | }
22 | }
23 |
24 | var list: some View {
25 | List {
26 | ForEach(Array(modelsProvider.models.enumerated()), id: \.offset) { _, model in
27 | Text("\(model.id)")
28 | }
29 | }
30 | }
31 |
32 | var showModelsButton: some View {
33 | Button("List models") {
34 | Task {
35 | isLoading = true
36 | defer { isLoading = false } // ensure isLoading is set to false when the
37 | do {
38 | try await modelsProvider.listModels()
39 | } catch {
40 | errorMessage = "\(error)"
41 | }
42 | }
43 | }
44 | .buttonStyle(.bordered)
45 | }
46 |
47 | @State private var modelsProvider: ModelsProvider
48 | @State private var isLoading = false
49 | @State private var errorMessage = ""
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelsProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/24/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class ModelsProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var models: [ModelObject] = []
19 | var retrievedModel: ModelObject? = nil
20 | var deletionStatus: DeletionStatus? = nil
21 |
22 | func listModels() async throws {
23 | models = try await service.listModels().data
24 | }
25 |
26 | func retrieveModelWith(
27 | id: String)
28 | async throws
29 | {
30 | retrievedModel = try await service.retrieveModelWith(id: id)
31 | }
32 |
33 | func deleteFineTuneModelWith(
34 | id: String)
35 | async throws
36 | {
37 | deletionStatus = try await service.deleteFineTuneModelWith(id: id)
38 | }
39 |
40 | private let service: OpenAIService
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModerationsDemo/ModerationDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationDemoView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/24/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct ModerationDemoView: View {
12 |
13 | init(service: OpenAIService) {
14 | _moderationProvider = State(initialValue: ModerationProvider(service: service))
15 | }
16 |
17 | var body: some View {
18 | VStack {
19 | textArea
20 | if moderationProvider.isFlagged {
21 | Text("That is not a nice thing to say.")
22 | }
23 | if !errorMessage.isEmpty {
24 | Text("Error \(errorMessage)")
25 | .bold()
26 | }
27 | }
28 | .overlay(
29 | Group {
30 | if isLoading {
31 | ProgressView()
32 | } else {
33 | EmptyView()
34 | }
35 | })
36 | }
37 |
38 | var textArea: some View {
39 | HStack(spacing: 4) {
40 | TextField("Enter prompt", text: $prompt, axis: .vertical)
41 | .textFieldStyle(.roundedBorder)
42 | .padding()
43 | Button {
44 | Task {
45 | isLoading = true
46 | defer { isLoading = false } // ensure isLoading is set to false when the
47 | do {
48 | try await moderationProvider.createModerationFromText(parameters: .init(input: prompt))
49 | } catch {
50 | errorMessage = "\(error)"
51 | }
52 | }
53 | } label: {
54 | Image(systemName: "paperplane")
55 | }
56 | .buttonStyle(.bordered)
57 | }
58 | .padding()
59 | }
60 |
61 | @State private var moderationProvider: ModerationProvider
62 | @State private var isLoading = false
63 | @State private var prompt = ""
64 | @State private var errorMessage = ""
65 |
66 | }
67 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModerationsDemo/ModerationProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationProvider.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/24/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | @Observable
12 | class ModerationProvider {
13 |
14 | init(service: OpenAIService) {
15 | self.service = service
16 | }
17 |
18 | var isFlagged = false
19 |
20 | func createModerationFromText(
21 | parameters: ModerationParameter)
22 | async throws
23 | {
24 | isFlagged = try await service.createModerationFromText(parameters: parameters).isFlagged
25 | }
26 |
27 | func createModerationFromTexts(
28 | parameters: ModerationParameter<[String]>)
29 | async throws
30 | {
31 | isFlagged = try await service.createModerationFromTexts(parameters: parameters).isFlagged
32 | }
33 |
34 | private let service: OpenAIService
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/OptionsListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OptionsListView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftOpenAI
9 | import SwiftUI
10 |
11 | struct OptionsListView: View {
12 |
13 | /// https://platform.openai.com/docs/api-reference
14 | enum APIOption: String, CaseIterable, Identifiable {
15 | case audio = "Audio"
16 | case chat = "Chat"
17 | case chatPredictedOutput = "Chat Predicted Output"
18 | case localChat = "Local Chat" // Ollama
19 | case vision = "Vision"
20 | case embeddings = "Embeddings"
21 | case fineTuning = "Fine Tuning"
22 | case files = "Files"
23 | case images = "Images"
24 | case models = "Models"
25 | case moderations = "Moderations"
26 | case chatHistoryConversation = "Chat History Conversation"
27 | case chatFunctionCall = "Chat Functions call"
28 | case chatFunctionsCallStream = "Chat Functions call (Stream)"
29 | case chatStructuredOutput = "Chat Structured Output"
30 | case chatStructuredOutputTool = "Chat Structured Output Tools"
31 | case configureAssistant = "Configure Assistant"
32 | case realTimeAPI = "Real time API"
33 | case responseStream = "Response Stream Demo"
34 |
35 | var id: String { rawValue }
36 | }
37 |
38 | var openAIService: OpenAIService
39 |
40 | var options: [APIOption]
41 |
42 | var body: some View {
43 | List(options, id: \.self, selection: $selection) { option in
44 | Text(option.rawValue)
45 | }
46 | .sheet(item: $selection) { selection in
47 | VStack {
48 | Text(selection.rawValue)
49 | .font(.largeTitle)
50 | .padding()
51 | switch selection {
52 | case .audio:
53 | AudioDemoView(service: openAIService)
54 | case .chat:
55 | ChatDemoView(service: openAIService)
56 | case .chatPredictedOutput:
57 | ChatPredictedOutputDemoView(service: openAIService)
58 | case .vision:
59 | ChatVisionDemoView(service: openAIService)
60 | case .embeddings:
61 | EmbeddingsDemoView(service: openAIService)
62 | case .fineTuning:
63 | FineTuningJobDemoView(service: openAIService)
64 | case .files:
65 | FilesDemoView(service: openAIService)
66 | case .images:
67 | ImagesDemoView(service: openAIService)
68 | case .localChat:
69 | LocalChatDemoView(service: openAIService)
70 | case .models:
71 | ModelsDemoView(service: openAIService)
72 | case .moderations:
73 | ModerationDemoView(service: openAIService)
74 | case .chatHistoryConversation:
75 | ChatStreamFluidConversationDemoView(service: openAIService)
76 | case .chatFunctionCall:
77 | ChatFunctionCallDemoView(service: openAIService)
78 | case .chatFunctionsCallStream:
79 | ChatFunctionsCalllStreamDemoView(service: openAIService)
80 | case .chatStructuredOutput:
81 | ChatStructuredOutputDemoView(service: openAIService)
82 | case .chatStructuredOutputTool:
83 | ChatStructureOutputToolDemoView(service: openAIService)
84 | case .configureAssistant:
85 | AssistantConfigurationDemoView(service: openAIService)
86 | case .realTimeAPI:
87 | Text("WIP")
88 | case .responseStream:
89 | ResponseStreamDemoView(service: openAIService)
90 | }
91 | }
92 | }
93 | }
94 |
95 | @State private var selection: APIOption? = nil
96 |
97 | }
98 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Resources/WorldCupData.jsonl:
--------------------------------------------------------------------------------
1 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Tell me about the World Cup winner in Spain 1982."}, {"role": "assistant", "content": "The winner of the 1982 FIFA World Cup held in Spain was Italy. They defeated West Germany 3-1 in the final match."}]}
2 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "What about Mexico 1986?"}, {"role": "assistant", "content": "The 1986 FIFA World Cup in Mexico was won by Argentina. They beat West Germany 3-2 in the final."}]}
3 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Who won in Italy 1990?"}, {"role": "assistant", "content": "West Germany won the 1990 World Cup in Italy. They defeated Argentina 1-0 in the final."}]}
4 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "What about USA 1994?"}, {"role": "assistant", "content": "Brazil won the 1994 World Cup held in the USA. They defeated Italy in a penalty shootout after a 0-0 draw in the final match."}]}
5 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Tell me about France 1998."}, {"role": "assistant", "content": "France won the 1998 World Cup held in their home country. They defeated Brazil 3-0 in the final."}]}
6 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "What happened in Korea-Japan 2002?"}, {"role": "assistant", "content": "Brazil won the 2002 World Cup held in South Korea and Japan. They defeated Germany 2-0 in the final match."}]}
7 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Who won in Germany 2006?"}, {"role": "assistant", "content": "Italy won the 2006 World Cup held in Germany. They defeated France in a penalty shootout after a 1-1 draw in the final."}]}
8 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Tell me about South Africa 2010."}, {"role": "assistant", "content": "Spain won the 2010 World Cup held in South Africa. They defeated the Netherlands 1-0 in the final."}]}
9 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Who won in Brazil 2014?"}, {"role": "assistant", "content": "Germany won the 2014 World Cup held in Brazil. They defeated Argentina 1-0 in the final."}]}
10 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "What about Russia 2018?"}, {"role": "assistant", "content": "France won the 2018 World Cup held in Russia. They defeated Croatia 4-2 in the final match."}]}
11 | {"messages": [{"role": "system", "content": "You are a helpful assistant specializing in World Cup history."}, {"role": "user", "content": "Tell me about Qatar 2022."}, {"role": "assistant", "content": "I don't know"}]}
12 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Resources/german.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jamesrochabrun/SwiftOpenAI/f44490fef0b505814ec669c577f7605d61da48f2/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Resources/german.m4a
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Resources/narcos.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jamesrochabrun/SwiftOpenAI/f44490fef0b505814ec669c577f7605d61da48f2/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Resources/narcos.m4a
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ServiceSelectionView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ServiceSelectionView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by Lou Zell on 3/27/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ServiceSelectionView: View {
11 |
12 | var body: some View {
13 | NavigationStack {
14 | List {
15 | Section("Select Service") {
16 | NavigationLink(destination: ApiKeyIntroView()) {
17 | VStack(alignment: .leading) {
18 | Text("Default OpenAI Service")
19 | .padding(.bottom, 10)
20 | Group {
21 | Text("Use this service to test SwiftOpenAI functionality by providing your own OpenAI key.")
22 | }
23 | .font(.caption)
24 | .fontWeight(.light)
25 | }
26 | }
27 |
28 | NavigationLink(destination: AIProxyIntroView()) {
29 | VStack(alignment: .leading) {
30 | Text("AIProxy Service")
31 | .padding(.bottom, 10)
32 | Group {
33 | Text(
34 | "Use this service to test SwiftOpenAI functionality with requests proxied through AIProxy for key protection.")
35 | }
36 | .font(.caption)
37 | .fontWeight(.light)
38 | }
39 | }
40 |
41 | NavigationLink(destination: LocalHostEntryView()) {
42 | VStack(alignment: .leading) {
43 | Text("Ollama")
44 | .padding(.bottom, 10)
45 | Group {
46 | Text("Use this service to test SwiftOpenAI functionality by providing your own local host.")
47 | }
48 | .font(.caption)
49 | .fontWeight(.light)
50 | }
51 | }
52 | }
53 | }
54 | }
55 | }
56 | }
57 |
58 | #Preview {
59 | ServiceSelectionView()
60 | }
61 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedModels/ChatDisplayMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatDisplayMessage.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/4/23.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | struct ChatDisplayMessage: Identifiable {
12 |
13 | init(
14 | id: UUID = UUID(),
15 | content: DisplayContent,
16 | type: DisplayMessageType,
17 | delta: ChatDisplayMessage.Delta?)
18 | {
19 | self.id = id
20 | self.content = content
21 | self.type = type
22 | self.delta = delta
23 | }
24 |
25 | struct Delta {
26 | var role: String
27 | var content: String
28 | var functionCallName: String?
29 | var functionCallArguments: String?
30 | }
31 |
32 | enum DisplayContent: Equatable {
33 | case text(String)
34 | case images([URL])
35 | case content([ChatCompletionParameters.Message.ContentType.MessageContent])
36 | case error(String)
37 |
38 | static func ==(lhs: DisplayContent, rhs: DisplayContent) -> Bool {
39 | switch (lhs, rhs) {
40 | case (.images(let a), .images(let b)):
41 | a == b
42 | case (.content(let a), .content(let b)):
43 | a == b
44 | case (.error(let a), .error(let b)):
45 | a == b
46 | default:
47 | false
48 | }
49 | }
50 | }
51 |
52 | enum DisplayMessageType {
53 | case received, sent
54 | }
55 |
56 | let id: UUID
57 | let content: DisplayContent
58 | let type: DisplayMessageType
59 | let delta: Delta?
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatDisplayMessageView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/4/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ChatDisplayMessageView: View {
11 |
12 | let message: ChatDisplayMessage
13 |
14 | var body: some View {
15 | VStack(alignment: .leading, spacing: 8) {
16 | headerFor(message: message)
17 | Group {
18 | switch message.content {
19 | case .content(let content):
20 | let text = content.compactMap { contentItem -> String? in
21 | if case .text(let text) = contentItem {
22 | return text
23 | } else {
24 | return nil
25 | }
26 | }.first ?? ""
27 |
28 | let urls = content.compactMap { contentItem -> URL? in
29 | if case .imageUrl(let imageDetail) = contentItem {
30 | return imageDetail.url
31 | } else {
32 | return nil
33 | }
34 | }
35 | VStack(alignment: .leading, spacing: 8) {
36 | chatImagesViewFrom(urls: urls)
37 | chatMessageViewWith(text)
38 | }
39 |
40 | case .error(let error):
41 | Text(error)
42 | .padding()
43 | .font(.callout)
44 | .background(
45 | RoundedRectangle(cornerRadius: 20)
46 | .foregroundColor(.red.opacity(0.7)))
47 |
48 | case .text(let text):
49 | chatMessageViewWith(text)
50 |
51 | case .images(let urls):
52 | chatImagesViewFrom(urls: urls)
53 | }
54 | }
55 | .padding(.leading, 23)
56 | }
57 | }
58 |
59 | @ViewBuilder
60 | func chatMessageViewWith(
61 | _ text: String)
62 | -> some View
63 | {
64 | if text.isEmpty {
65 | ChatMessageLoadingView(animationDuration: 0.5)
66 | .frame(width: 10, height: 10)
67 | } else {
68 | Text(text)
69 | .font(.body)
70 | }
71 | }
72 |
73 | func headerFor(
74 | message: ChatDisplayMessage)
75 | -> some View
76 | {
77 | HStack {
78 | Image(systemName: message.type == .sent ? "person.circle" : "wand.and.stars")
79 | .resizable()
80 | .frame(width: 15, height: 15)
81 | Text(message.type == .sent ? "USER" : "CHATGPT")
82 | .font(.caption2)
83 | }
84 | .foregroundColor(.gray.opacity(0.9))
85 | }
86 |
87 | func chatImagesViewFrom(
88 | urls: [URL])
89 | -> some View
90 | {
91 | ScrollView(.horizontal, showsIndicators: false) {
92 | HStack(spacing: 8) {
93 | ForEach(urls, id: \.self) { url in
94 | URLImageView(url: url)
95 | }
96 | }
97 | }
98 | }
99 |
100 | @State private var urls: [URL] = []
101 |
102 | }
103 |
104 | #Preview {
105 | VStack(alignment: .leading) {
106 | ChatDisplayMessageView(message: .init(content: .text("How are you?"), type: .sent, delta: nil))
107 | ChatDisplayMessageView(message: .init(content: .text("I am ok"), type: .received, delta: nil))
108 | ChatDisplayMessageView(message: .init(content: .images([]), type: .received, delta: nil))
109 | }
110 | .padding()
111 | .frame(maxWidth: .infinity, alignment: .leading)
112 | }
113 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatMessageLoadingView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatMessageLoadingView.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 3/28/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | struct ChatMessageLoadingView: View {
12 |
13 | var animationDuration: Double
14 | @State private var isScaledUp = false
15 |
16 | var body: some View {
17 | Circle()
18 | .scaleEffect(isScaledUp ? 1.5 : 1) // 1.5 is 150% size, 1 is 100% size
19 | .onAppear {
20 | withAnimation(Animation.easeInOut(duration: animationDuration).repeatForever(autoreverses: true)) {
21 | isScaledUp.toggle()
22 | }
23 | }
24 | }
25 | }
26 |
27 | #Preview {
28 | ChatMessageLoadingView(animationDuration: 0.2)
29 | }
30 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/LoadingView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LoadingView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/4/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct LoadingView: View {
11 |
12 | @State private var dotsCount = 0
13 |
14 | let timer = Timer.publish(every: 0.5, on: .main, in: .common).autoconnect()
15 |
16 | var body: some View {
17 | HStack {
18 | Text("\(getDots())")
19 | .font(.title)
20 | .onReceive(timer) { _ in
21 | withAnimation {
22 | dotsCount = (dotsCount + 1) % 4
23 | }
24 | }
25 | }
26 | .frame(minHeight: 40)
27 | }
28 |
29 | func getDots() -> String {
30 | String(repeating: ".", count: dotsCount)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/Theme/Sizes.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Sizes.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - Sizes
11 |
12 | enum Sizes { }
13 |
14 | extension Sizes {
15 |
16 | static let spacingExtraSmall: CGFloat = 4.0
17 | static let spacingSmall: CGFloat = 6
18 | static let spacingMedium: CGFloat = 8
19 | static let spacingLarge: CGFloat = 12
20 | static let spacingExtraLarge: CGFloat = 16
21 | }
22 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/Theme/ThemeColor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ThemeColor.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/13/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | // MARK: - ThemeColor
11 |
12 | enum ThemeColor { }
13 |
14 | extension ThemeColor {
15 |
16 | static let tintColor = Color.purple
17 | }
18 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/URLImageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // URLImageView.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 11/4/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | // MARK: - URLImageView
11 |
12 | struct URLImageView: View {
13 |
14 | let url: URL
15 |
16 | var body: some View {
17 | AsyncImage(
18 | url: url,
19 | transaction: Transaction(animation: .easeInOut))
20 | { phase in
21 | switch phase {
22 | case .empty:
23 | ProgressView()
24 |
25 | case .success(let image):
26 | image
27 | .resizable()
28 | .frame(width: 100, height: 100)
29 | .transition(.opacity)
30 |
31 | case .failure:
32 | Image(systemName: "wifi.slash")
33 |
34 | @unknown default:
35 | EmptyView()
36 | }
37 | }
38 | .frame(width: 100, height: 100)
39 | .background(Color.gray)
40 | .clipShape(RoundedRectangle(cornerRadius: 10))
41 | }
42 | }
43 |
44 | let urlImageViewMockURL =
45 | URL(
46 | string: "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg")!
47 |
48 | #Preview {
49 | ScrollView {
50 | VStack(spacing: 40) {
51 | URLImageView(url: urlImageViewMockURL)
52 | URLImageView(url: urlImageViewMockURL)
53 | .clipShape(Circle())
54 | .overlay(Circle().stroke(Color.white, lineWidth: 4))
55 | .shadow(radius: 10)
56 | URLImageView(url: urlImageViewMockURL)
57 | .clipShape(Circle())
58 | .overlay(Circle().stroke(Color.white, lineWidth: 4))
59 | }
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SwiftOpenAIExample.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SwiftOpenAIExampleApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftOpenAIExampleApp.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct SwiftOpenAIExampleApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ServiceSelectionView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Utilities/ContentLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentLoader.swift
3 | // SwiftOpenAIExample
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import Foundation
9 |
10 | struct ContentLoader {
11 |
12 | enum Error: Swift.Error {
13 | case fileNotFound(name: String)
14 | case fileDecodingFailed(name: String, Swift.Error)
15 | }
16 |
17 | func urlFromAsset(fromFileNamed name: String, ext: String) -> URL? {
18 | guard
19 | let url = Bundle.main.url(
20 | forResource: name,
21 | withExtension: ext)
22 | else {
23 | return nil
24 | }
25 | return url
26 | }
27 |
28 | func loadBundledContent(fromFileNamed name: String, ext: String) throws -> Data {
29 | guard let url = urlFromAsset(fromFileNamed: name, ext: ext) else {
30 | throw Error.fileNotFound(name: name)
31 | }
32 |
33 | do {
34 | return try Data(contentsOf: url)
35 | } catch {
36 | throw Error.fileDecodingFailed(name: name, error)
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExampleTests/SwiftOpenAIExampleTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftOpenAIExampleTests.swift
3 | // SwiftOpenAIExampleTests
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import XCTest
9 |
10 | final class SwiftOpenAIExampleTests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 | }
15 |
16 | override func tearDownWithError() throws {
17 | // Put teardown code here. This method is called after the invocation of each test method in the class.
18 | }
19 |
20 | func testExample() throws {
21 | // This is an example of a functional test case.
22 | // Use XCTAssert and related functions to verify your tests produce the correct results.
23 | // Any test you write for XCTest can be annotated as throws and async.
24 | // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error.
25 | // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards.
26 | }
27 |
28 | func testPerformanceExample() throws {
29 | // This is an example of a performance test case.
30 | measure {
31 | // Put the code you want to measure the time of here.
32 | }
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExampleUITests/SwiftOpenAIExampleUITests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftOpenAIExampleUITests.swift
3 | // SwiftOpenAIExampleUITests
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import XCTest
9 |
10 | final class SwiftOpenAIExampleUITests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 |
15 | // In UI tests it is usually best to stop immediately when a failure occurs.
16 | continueAfterFailure = false
17 |
18 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
19 | }
20 |
21 | override func tearDownWithError() throws {
22 | // Put teardown code here. This method is called after the invocation of each test method in the class.
23 | }
24 |
25 | func testExample() throws {
26 | // UI tests must launch the application that they test.
27 | let app = XCUIApplication()
28 | app.launch()
29 |
30 | // Use XCTAssert and related functions to verify your tests produce the correct results.
31 | }
32 |
33 | func testLaunchPerformance() throws {
34 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
35 | // This measures how long it takes to launch your application.
36 | measure(metrics: [XCTApplicationLaunchMetric()]) {
37 | XCUIApplication().launch()
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Examples/SwiftOpenAIExample/SwiftOpenAIExampleUITests/SwiftOpenAIExampleUITestsLaunchTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftOpenAIExampleUITestsLaunchTests.swift
3 | // SwiftOpenAIExampleUITests
4 | //
5 | // Created by James Rochabrun on 10/19/23.
6 | //
7 |
8 | import XCTest
9 |
10 | final class SwiftOpenAIExampleUITestsLaunchTests: XCTestCase {
11 |
12 | override class var runsForEachTargetApplicationUIConfiguration: Bool {
13 | true
14 | }
15 |
16 | override func setUpWithError() throws {
17 | continueAfterFailure = false
18 | }
19 |
20 | func testLaunch() throws {
21 | let app = XCUIApplication()
22 | app.launch()
23 |
24 | // Insert steps here to perform after app launch but before taking a screenshot,
25 | // such as logging into a test account or navigating somewhere in the app
26 |
27 | let attachment = XCTAttachment(screenshot: app.screenshot())
28 | attachment.name = "Launch Screen"
29 | attachment.lifetime = .keepAlways
30 | add(attachment)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 James Rochabrun
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.9
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "SwiftOpenAI",
8 | platforms: [
9 | .iOS(.v15),
10 | .macOS(.v13),
11 | .watchOS(.v9),
12 | ],
13 | products: [
14 | // Products define the executables and libraries a package produces, making them visible to other packages.
15 | .library(
16 | name: "SwiftOpenAI",
17 | targets: ["SwiftOpenAI"]),
18 | ],
19 | targets: [
20 | // Targets are the basic building blocks of a package, defining a module or a test suite.
21 | // Targets can depend on other targets in this package and products from dependencies.
22 | .target(
23 | name: "SwiftOpenAI"),
24 | .testTarget(
25 | name: "SwiftOpenAITests",
26 | dependencies: ["SwiftOpenAI"]),
27 | ])
28 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Azure/AzureOpenAIConfiguration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AzureOpenAIConfiguration.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 1/23/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Reference](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference)
11 | public struct AzureOpenAIConfiguration {
12 |
13 | public init(
14 | resourceName: String,
15 | openAIAPIKey: Authorization,
16 | apiVersion: String,
17 | extraHeaders: [String: String]? = nil)
18 | {
19 | self.resourceName = resourceName
20 | self.openAIAPIKey = openAIAPIKey
21 | self.apiVersion = apiVersion
22 | self.extraHeaders = extraHeaders
23 | }
24 |
25 | /// The name of your Azure OpenAI Resource.
26 | let resourceName: String
27 |
28 | /// The OpenAI API Key
29 | let openAIAPIKey: Authorization
30 |
31 | /// The API version to use for this operation. This follows the YYYY-MM-DD format.
32 | let apiVersion: String
33 |
34 | /// Azure configuration extra headers for a request.
35 | let extraHeaders: [String: String]?
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/OpenAI/LocalModelService/LocalModelAPI.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LocalModelAPI.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 6/30/24.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - LocalModelAPI
11 |
12 | enum LocalModelAPI {
13 |
14 | case chat
15 | }
16 |
17 | // MARK: Endpoint
18 |
19 | extension LocalModelAPI: Endpoint {
20 |
21 | /// Builds the final path that includes:
22 | /// - optional proxy path (e.g. "/my-proxy")
23 | /// - version if non-nil (e.g. "/v1")
24 | /// - then the specific endpoint path (e.g. "/assistants")
25 | func path(in openAIEnvironment: OpenAIEnvironment) -> String {
26 | // 1) Potentially prepend proxy path if `proxyPath` is non-empty
27 | let proxyPart =
28 | if let envProxyPart = openAIEnvironment.proxyPath, !envProxyPart.isEmpty {
29 | "/\(envProxyPart)"
30 | } else {
31 | ""
32 | }
33 | let mainPart = openAIPath(in: openAIEnvironment)
34 |
35 | return proxyPart + mainPart // e.g. "/my-proxy/v1/assistants"
36 | }
37 |
38 | func openAIPath(in openAIEnvironment: OpenAIEnvironment) -> String {
39 | let version =
40 | if let envOverrideVersion = openAIEnvironment.version, !envOverrideVersion.isEmpty {
41 | "/\(envOverrideVersion)"
42 | } else {
43 | ""
44 | }
45 | switch self {
46 | case .chat: return "\(version)/chat/completions"
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/Networking/Endpoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Endpoint.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/11/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - HTTPMethod
11 |
12 | enum HTTPMethod: String {
13 | case post = "POST"
14 | case get = "GET"
15 | case delete = "DELETE"
16 | }
17 |
18 | // MARK: - Endpoint
19 |
20 | protocol Endpoint {
21 | func path(
22 | in openAIEnvironment: OpenAIEnvironment)
23 | -> String
24 | }
25 |
26 | // MARK: Endpoint+Requests
27 |
28 | extension Endpoint {
29 |
30 | func request(
31 | apiKey: Authorization,
32 | openAIEnvironment: OpenAIEnvironment,
33 | organizationID: String?,
34 | method: HTTPMethod,
35 | params: Encodable? = nil,
36 | queryItems: [URLQueryItem] = [],
37 | betaHeaderField: String? = nil,
38 | extraHeaders: [String: String]? = nil)
39 | throws -> URLRequest
40 | {
41 | let finalPath = path(in: openAIEnvironment)
42 | var request = URLRequest(url: urlComponents(base: openAIEnvironment.baseURL, path: finalPath, queryItems: queryItems).url!)
43 | request.addValue("application/json", forHTTPHeaderField: "Content-Type")
44 | request.addValue(apiKey.value, forHTTPHeaderField: apiKey.headerField)
45 | if let organizationID {
46 | request.addValue(organizationID, forHTTPHeaderField: "OpenAI-Organization")
47 | }
48 | if let betaHeaderField {
49 | request.addValue(betaHeaderField, forHTTPHeaderField: "OpenAI-Beta")
50 | }
51 | if let extraHeaders {
52 | for header in extraHeaders {
53 | request.addValue(header.value, forHTTPHeaderField: header.key)
54 | }
55 | }
56 | request.httpMethod = method.rawValue
57 | if let params {
58 | request.httpBody = try JSONEncoder().encode(params)
59 | }
60 | return request
61 | }
62 |
63 | func multiPartRequest(
64 | apiKey: Authorization,
65 | openAIEnvironment: OpenAIEnvironment,
66 | organizationID: String?,
67 | method: HTTPMethod,
68 | params: MultipartFormDataParameters,
69 | queryItems: [URLQueryItem] = [])
70 | throws -> URLRequest
71 | {
72 | let finalPath = path(in: openAIEnvironment)
73 | var request = URLRequest(url: urlComponents(base: openAIEnvironment.baseURL, path: finalPath, queryItems: queryItems).url!)
74 | request.httpMethod = method.rawValue
75 | let boundary = UUID().uuidString
76 | request.addValue(apiKey.value, forHTTPHeaderField: apiKey.headerField)
77 | if let organizationID {
78 | request.addValue(organizationID, forHTTPHeaderField: "OpenAI-Organization")
79 | }
80 | request.addValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
81 | request.httpBody = params.encode(boundary: boundary)
82 | return request
83 | }
84 |
85 | private func urlComponents(
86 | base: String,
87 | path: String,
88 | queryItems: [URLQueryItem])
89 | -> URLComponents
90 | {
91 | var components = URLComponents(string: base)!
92 | components.path = path
93 | if !queryItems.isEmpty {
94 | components.queryItems = queryItems
95 | }
96 | return components
97 | }
98 |
99 | }
100 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/Networking/MultipartFormDataBuilder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataBuilder.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/11/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - MultipartFormDataBuilder
11 |
12 | struct MultipartFormDataBuilder {
13 |
14 | let boundary: String
15 | let entries: [MultipartFormDataEntry]
16 |
17 | init(
18 | boundary: String,
19 | entries: [MultipartFormDataEntry])
20 | {
21 | self.boundary = boundary
22 | self.entries = entries
23 | }
24 |
25 | func build() -> Data {
26 | var httpData = entries
27 | .map { $0.makeData(boundary: boundary) }
28 | .reduce(Data(), +)
29 | httpData.append("--\(boundary)--\r\n")
30 | return httpData
31 | }
32 | }
33 |
34 | // MARK: - MultipartFormDataEntry
35 |
36 | enum MultipartFormDataEntry {
37 |
38 | case file(paramName: String, fileName: String?, fileData: Data, contentType: String)
39 | case string(paramName: String, value: Any?)
40 | }
41 |
42 | // MARK: MultipartFormDataEntry+Data
43 |
44 | extension MultipartFormDataEntry {
45 |
46 | func makeData(boundary: String) -> Data {
47 | var body = Data()
48 | switch self {
49 | case .file(let paramName, let fileName, let fileData, let contentType):
50 | body.append("--\(boundary)\r\n")
51 | if let fileName {
52 | body.append("Content-Disposition: form-data; name=\"\(paramName)\"; filename=\"\(fileName)\"\r\n")
53 | } else {
54 | body.append("Content-Disposition: form-data; name=\"\(paramName)\"\r\n")
55 | }
56 | body.append("Content-Type: \(contentType)\r\n\r\n")
57 | body.append(fileData)
58 | body.append("\r\n")
59 |
60 | case .string(let paramName, let value):
61 | if let value {
62 | body.append("--\(boundary)\r\n")
63 | body.append("Content-Disposition: form-data; name=\"\(paramName)\"\r\n\r\n")
64 | body.append("\(value)\r\n")
65 | }
66 | }
67 | return body
68 | }
69 | }
70 |
71 | extension Data {
72 |
73 | fileprivate mutating func append(_ string: String) {
74 | let data = string.data(
75 | using: String.Encoding.utf8,
76 | allowLossyConversion: true)
77 | append(data!)
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Audio/AudioSpeechParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioSpeechParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/14/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Generates audio from the input text.](https://platform.openai.com/docs/api-reference/audio/createSpeech)
11 | public struct AudioSpeechParameters: Encodable {
12 |
13 | public init(
14 | model: TTSModel,
15 | input: String,
16 | voice: Voice,
17 | responseFormat: ResponseFormat? = nil,
18 | speed: Double? = nil)
19 | {
20 | self.model = model.rawValue
21 | self.input = input
22 | self.voice = voice.rawValue
23 | self.responseFormat = responseFormat?.rawValue
24 | self.speed = speed
25 | }
26 |
27 | public enum TTSModel {
28 | case tts1
29 | case tts1HD
30 | case custom(model: String)
31 |
32 | var rawValue: String {
33 | switch self {
34 | case .tts1:
35 | "tts-1"
36 | case .tts1HD:
37 | "tts-1-hd"
38 | case .custom(let model):
39 | model
40 | }
41 | }
42 | }
43 |
44 | public enum Voice: String {
45 | case alloy
46 | case echo
47 | case fable
48 | case onyx
49 | case nova
50 | case shimmer
51 | case ash
52 | case coral
53 | case sage
54 | }
55 |
56 | public enum ResponseFormat: String {
57 | case mp3
58 | case opus
59 | case aac
60 | case flac
61 | }
62 |
63 | enum CodingKeys: String, CodingKey {
64 | case model
65 | case input
66 | case voice
67 | case responseFormat = "response_format"
68 | case speed
69 | }
70 |
71 | /// One of the available [TTS models](https://platform.openai.com/docs/models/tts): tts-1 or tts-1-hd
72 | let model: String
73 | /// The text to generate audio for. The maximum length is 4096 characters.
74 | let input: String
75 | /// The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. Previews of the voices are available in the [Text to speech guide.](https://platform.openai.com/docs/guides/text-to-speech/voice-options)
76 | let voice: String
77 | /// Defaults to mp3, The format to audio in. Supported formats are mp3, opus, aac, and flac.
78 | let responseFormat: String?
79 | /// Defaults to 1, The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default.
80 | let speed: Double?
81 |
82 | }
83 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Audio/AudioTranscriptionParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranscriptionParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/10/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - AudioTranscriptionParameters
11 |
12 | /// [Transcribes audio into the input language.](https://platform.openai.com/docs/api-reference/audio/createTranscription)
13 | public struct AudioTranscriptionParameters: Encodable {
14 |
15 | public init(
16 | fileName: String,
17 | file: Data,
18 | model: Model = .whisperOne,
19 | prompt: String? = nil,
20 | responseFormat: String? = nil,
21 | temperature: Double? = nil,
22 | language: String? = nil,
23 | timestampGranularities: [String]? = nil)
24 | {
25 | self.fileName = fileName
26 | self.file = file
27 | self.model = model.value
28 | self.prompt = prompt
29 | self.responseFormat = responseFormat
30 | self.temperature = temperature
31 | self.language = language
32 | self.timestampGranularities = timestampGranularities
33 | }
34 |
35 | public enum Model {
36 | case whisperOne
37 | case custom(model: String)
38 | var value: String {
39 | switch self {
40 | case .whisperOne:
41 | "whisper-1"
42 | case .custom(let model):
43 | model
44 | }
45 | }
46 | }
47 |
48 | enum CodingKeys: String, CodingKey {
49 | case file
50 | case model
51 | case prompt
52 | case responseFormat = "response_format"
53 | case temperature
54 | case language
55 | case timestampGranularities = "timestamp_granularities[]"
56 | }
57 |
58 | /// The name of the file asset is not documented in OpenAI's official documentation; however, it is essential for constructing the multipart request.
59 | let fileName: String
60 | /// The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.
61 | let file: Data
62 | /// ID of the model to use. Only whisper-1 is currently available.
63 | let model: String
64 | /// The language of the input audio. Supplying the input language in [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will improve accuracy and latency.
65 | let language: String?
66 | /// An optional text to guide the model's style or continue a previous audio segment. The [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) should match the audio language.
67 | let prompt: String?
68 | /// The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. Defaults to json
69 | let responseFormat: String?
70 | /// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use [log probability](https://en.wikipedia.org/wiki/Log_probability) to automatically increase the temperature until certain thresholds are hit. Defaults to 0
71 | let temperature: Double?
72 | /// Defaults to segment
73 | /// The timestamp granularities to populate for this transcription. response_format must be set verbose_json to use timestamp granularities. Either or both of these options are supported: word, or segment. Note: There is no additional latency for segment timestamps, but generating word timestamps incurs additional latency.
74 | let timestampGranularities: [String]?
75 |
76 | }
77 |
78 | // MARK: MultipartFormDataParameters
79 |
80 | extension AudioTranscriptionParameters: MultipartFormDataParameters {
81 |
82 | public func encode(boundary: String) -> Data {
83 | MultipartFormDataBuilder(boundary: boundary, entries: [
84 | .file(paramName: Self.CodingKeys.file.rawValue, fileName: fileName, fileData: file, contentType: "audio/mpeg"),
85 | .string(paramName: Self.CodingKeys.model.rawValue, value: model),
86 | .string(paramName: Self.CodingKeys.language.rawValue, value: language),
87 | .string(paramName: Self.CodingKeys.prompt.rawValue, value: prompt),
88 | .string(paramName: Self.CodingKeys.responseFormat.rawValue, value: responseFormat),
89 | .string(paramName: Self.CodingKeys.temperature.rawValue, value: temperature),
90 | .string(paramName: Self.CodingKeys.timestampGranularities.rawValue, value: timestampGranularities),
91 | ]).build()
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Audio/AudioTranslationParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranslationParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/10/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - AudioTranslationParameters
11 |
12 | /// Translates audio into English. [Create translation](https://platform.openai.com/docs/api-reference/audio/createTranslation).
13 | public struct AudioTranslationParameters: Encodable {
14 |
15 | public init(
16 | fileName: String,
17 | file: Data,
18 | model: Model = .whisperOne,
19 | prompt: String? = nil,
20 | responseFormat: String? = nil,
21 | temperature: Double? = nil)
22 | {
23 | self.fileName = fileName
24 | self.file = file
25 | self.model = model.value
26 | self.prompt = prompt
27 | self.responseFormat = responseFormat
28 | self.temperature = temperature
29 | }
30 |
31 | public enum Model {
32 | case whisperOne
33 | case custom(model: String)
34 |
35 | var value: String {
36 | switch self {
37 | case .whisperOne:
38 | "whisper-1"
39 | case .custom(let model):
40 | model
41 | }
42 | }
43 | }
44 |
45 | enum CodingKeys: String, CodingKey {
46 | case file
47 | case model
48 | case prompt
49 | case responseFormat = "response_format"
50 | case temperature
51 | }
52 |
53 | /// The name of the file asset is not documented in OpenAI's official documentation; however, it is essential for constructing the multipart request.
54 | let fileName: String
55 | /// The audio file object (not file name) translate, in one of these formats: flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm.
56 | let file: Data
57 | /// ID of the model to use. Only whisper-1 is currently available.
58 | let model: String
59 | /// An optional text to guide the model's style or continue a previous audio segment. The [prompt](https://platform.openai.com/docs/guides/speech-to-text/prompting) should match the audio language.
60 | let prompt: String?
61 | /// The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. Defaults to json
62 | let responseFormat: String?
63 | /// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use [log probability](https://en.wikipedia.org/wiki/Log_probability) to automatically increase the temperature until certain thresholds are hit. Defaults to 0
64 | let temperature: Double?
65 |
66 | }
67 |
68 | // MARK: MultipartFormDataParameters
69 |
70 | extension AudioTranslationParameters: MultipartFormDataParameters {
71 |
72 | public func encode(boundary: String) -> Data {
73 | MultipartFormDataBuilder(boundary: boundary, entries: [
74 | .file(paramName: Self.CodingKeys.file.rawValue, fileName: fileName, fileData: file, contentType: "audio/mpeg"),
75 | .string(paramName: Self.CodingKeys.model.rawValue, value: model),
76 | .string(paramName: Self.CodingKeys.prompt.rawValue, value: prompt),
77 | .string(paramName: Self.CodingKeys.responseFormat.rawValue, value: responseFormat),
78 | .string(paramName: Self.CodingKeys.temperature.rawValue, value: temperature),
79 | ]).build()
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Batch/BatchParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Create large batches of API requests for asynchronous processing. The Batch API returns completions within 24 hours for a 50% discount.](https://platform.openai.com/docs/api-reference/batch/create)
11 | public struct BatchParameter: Encodable {
12 |
13 | /// The ID of an uploaded file that contains requests for the new batch.
14 | /// See [upload file](https://platform.openai.com/docs/api-reference/files/create) for how to upload a file.
15 | /// Your input file must be formatted as a [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), and must be uploaded with the purpose batch.
16 | let inputFileID: String
17 | /// The endpoint to be used for all requests in the batch. Currently only /v1/chat/completions is supported.
18 | let endpoint: String
19 | /// The time frame within which the batch should be processed. Currently only 24h is supported.
20 | let completionWindow: String
21 | /// Optional custom metadata for the batch.
22 | let metadata: [String: String]?
23 |
24 | enum CodingKeys: String, CodingKey {
25 | case inputFileID = "input_file_id"
26 | case endpoint
27 | case completionWindow = "completion_window"
28 | case metadata
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Embedding/EmbeddingParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmbeddingParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Creates](https://platform.openai.com/docs/api-reference/embeddings/create) an embedding vector representing the input text.
11 | public struct EmbeddingParameter: Encodable {
12 |
13 | public init(
14 | input: String,
15 | model: Model = .textEmbeddingAda002,
16 | encodingFormat: String?,
17 | dimensions: Int?,
18 | user: String? = nil)
19 | {
20 | self.input = input
21 | self.model = model.rawValue
22 | self.encodingFormat = encodingFormat
23 | self.dimensions = dimensions
24 | self.user = user
25 | }
26 |
27 | public enum Model: String {
28 | case textEmbeddingAda002 = "text-embedding-ada-002"
29 | case textEmbedding3Large = "text-embedding-3-large"
30 | case textEmbedding3Small = "text-embedding-3-small"
31 | }
32 |
33 | enum CodingKeys: String, CodingKey {
34 | case input
35 | case model
36 | case encodingFormat = "encoding_format"
37 | case dimensions
38 | case user
39 | }
40 |
41 | /// Input text to embed, encoded as a string or array of tokens. To embed multiple inputs in a single request, pass an array of strings or array of token arrays. Each input must not exceed the max input tokens for the model (8191 tokens for text-embedding-ada-002) and cannot be an empty string. [How to Count Tokens with `tiktoken`](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
42 | let input: String
43 | /// ID of the model to use. You can use the List models API to see all of your available models, or see our [Model overview ](https://platform.openai.com/docs/models/overview) for descriptions of them.
44 | let model: String
45 | /// The format to return the embeddings in. Can be either float or [base64](https://pypi.org/project/pybase64/).
46 | /// Defaults to "float"
47 | let encodingFormat: String?
48 | /// The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.
49 | let dimensions: Int?
50 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more.](https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids)
51 | let user: String?
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/File/FileParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/16/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - FileParameters
11 |
12 | /// [Upload a file](https://platform.openai.com/docs/api-reference/files/create) that can be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB. Please contact us if you need to increase the storage limit.
13 | public struct FileParameters: Encodable {
14 |
15 | /// The name of the file asset is not documented in OpenAI's official documentation; however, it is essential for constructing the multipart request.
16 | public let fileName: String?
17 | /// The file object (not file name) to be uploaded.
18 | /// If the purpose is set to "fine-tune", the file will be used for fine-tuning.
19 | public let file: Data
20 | /// The intended purpose of the uploaded file.
21 | /// Use "fine-tune" for [fine-tuning](https://platform.openai.com/docs/api-reference/fine-tuning). This allows us to validate the format of the uploaded file is correct for fine-tuning.
22 | public let purpose: String
23 |
24 | public init(
25 | fileName: String?,
26 | file: Data,
27 | purpose: String)
28 | {
29 | self.fileName = fileName
30 | self.file = file
31 | self.purpose = purpose
32 | }
33 | }
34 |
35 | // MARK: MultipartFormDataParameters
36 |
37 | extension FileParameters: MultipartFormDataParameters {
38 |
39 | public func encode(boundary: String) -> Data {
40 | MultipartFormDataBuilder(boundary: boundary, entries: [
41 | .file(paramName: "file", fileName: fileName, fileData: file, contentType: "application/x-ndjson"),
42 | .string(paramName: "purpose", value: purpose),
43 | ]).build()
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Image/Dalle.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Dalle.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/15/23.
6 | //
7 |
8 | import Foundation
9 | /// [DALL·E](https://platform.openai.com/docs/models/dall-e)
10 | ///
11 | /// DALL·E is a AI system that can create realistic images and art from a description in natural language. DALL·E 3 currently supports the ability, given a prompt, to create a new image with a specific size. DALL·E 2 also support the ability to edit an existing image, or create variations of a user provided image.
12 | ///
13 | /// DALL·E 3 is available through our Images API along with DALL·E 2. You can try DALL·E 3 through ChatGPT Plus.
14 | ///
15 | ///
16 | /// | MODEL | DESCRIPTION |
17 | /// |-----------|--------------------------------------------------------------|
18 | /// | dall-e-3 | DALL·E 3 New |
19 | /// | | The latest DALL·E model released in Nov 2023. Learn more. |
20 | /// | dall-e-2 | The previous DALL·E model released in Nov 2022. |
21 | /// | | The 2nd iteration of DALL·E with more realistic, accurate, |
22 | /// | | and 4x greater resolution images than the original model. |
23 | public enum Dalle {
24 |
25 | case dalle2(Dalle2ImageSize)
26 | case dalle3(Dalle3ImageSize)
27 |
28 | public enum Dalle2ImageSize: String {
29 | case small = "256x256"
30 | case medium = "512x512"
31 | case large = "1024x1024"
32 | }
33 |
34 | public enum Dalle3ImageSize: String {
35 | case largeSquare = "1024x1024"
36 | case landscape = "1792x1024"
37 | case portrait = "1024x1792"
38 | }
39 |
40 | var model: String {
41 | switch self {
42 | case .dalle2: Model.dalle2.value
43 | case .dalle3: Model.dalle3.value
44 | }
45 | }
46 |
47 | var size: String {
48 | switch self {
49 | case .dalle2(let dalle2ImageSize):
50 | dalle2ImageSize.rawValue
51 | case .dalle3(let dalle3ImageSize):
52 | dalle3ImageSize.rawValue
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Image/ImageCreateParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageCreateParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Creates an image given a prompt.](https://platform.openai.com/docs/api-reference/images/create)
11 | public struct ImageCreateParameters: Encodable {
12 |
13 | public init(
14 | prompt: String,
15 | model: Dalle,
16 | numberOfImages: Int = 1,
17 | quality: String? = nil,
18 | responseFormat: ImageResponseFormat? = nil,
19 | style: String? = nil,
20 | user: String? = nil)
21 | {
22 | self.prompt = prompt
23 | self.model = model.model
24 | n = numberOfImages
25 | self.quality = quality
26 | self.responseFormat = responseFormat?.rawValue
27 | size = model.size
28 | self.style = style
29 | self.user = user
30 | }
31 |
32 | public enum ImageSize: String {
33 | case small = "256x256"
34 | case medium = "512x512"
35 | case large = "1024x1024"
36 | }
37 |
38 | public enum ImageResponseFormat: String {
39 | case url
40 | case b64Json = "b64_json"
41 | }
42 |
43 | enum CodingKeys: String, CodingKey {
44 | case prompt
45 | case model
46 | case n
47 | case quality
48 | case responseFormat = "response_format"
49 | case size
50 | case style
51 | case user
52 | }
53 |
54 | /// A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.
55 | let prompt: String
56 | /// The model to use for image generation. Defaults to dall-e-2
57 | let model: String?
58 | /// The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported.
59 | let n: Int?
60 | /// The quality of the image that will be generated. hd creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3. Defaults to standard
61 | let quality: String?
62 | /// The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url
63 | let responseFormat: String?
64 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models. Defaults to 1024x1024
65 | let size: String?
66 | /// The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3. Defaults to vivid
67 | let style: String?
68 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](https://platform.openai.com/docs/guides/safety-best-practices)
69 | let user: String?
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Image/ImageEditParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageEditParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 | #if canImport(UIKit)
10 | import UIKit
11 | public typealias PlatformImage = UIImage
12 | #elseif canImport(AppKit)
13 | import AppKit
14 | public typealias PlatformImage = NSImage
15 | #endif
16 |
17 | // MARK: - ImageEditParameters
18 |
19 | /// [Creates an edited or extended image given an original image and a prompt.](https://platform.openai.com/docs/api-reference/images/createEdit)
20 | public struct ImageEditParameters: Encodable {
21 |
22 | public init(
23 | image: PlatformImage,
24 | model: Dalle? = nil,
25 | mask: PlatformImage? = nil,
26 | prompt: String,
27 | numberOfImages: Int? = nil,
28 | responseFormat: ImageResponseFormat? = nil,
29 | user: String? = nil)
30 | {
31 | #if canImport(UIKit)
32 | let imageData = image.pngData()
33 | let maskData = mask?.pngData()
34 | #elseif canImport(AppKit)
35 | let imageData = image.tiffRepresentation
36 | let maskData = mask?.tiffRepresentation
37 | #endif
38 |
39 | if imageData == nil {
40 | assertionFailure("Failed to get image data")
41 | }
42 | if maskData == nil {
43 | assertionFailure("Failed to get mask data")
44 | }
45 |
46 | self.image = imageData!
47 | self.model = model?.model
48 | self.mask = maskData
49 | self.prompt = prompt
50 | n = numberOfImages
51 | size = model?.size
52 | self.responseFormat = responseFormat?.rawValue
53 | self.user = user
54 | }
55 |
56 | public enum ImageResponseFormat: String {
57 | case url
58 | case b64Json = "b64_json"
59 | }
60 |
61 | enum CodingKeys: String, CodingKey {
62 | case image
63 | case prompt
64 | case mask
65 | case model
66 | case n
67 | case size
68 | case responseFormat = "response_format"
69 | case user
70 | }
71 |
72 | /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
73 | let image: Data
74 | /// A text description of the desired image(s). The maximum length is 1000 characters.
75 | let prompt: String
76 | /// An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.
77 | let mask: Data?
78 | /// The model to use for image generation. Only dall-e-2 is supported at this time. Defaults to dall-e-2
79 | let model: String?
80 | /// The number of images to generate. Must be between 1 and 10. Defaults to 1
81 | let n: Int?
82 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024. Defaults to 1024x1024
83 | let size: String?
84 | /// The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url
85 | let responseFormat: String?
86 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](https://platform.openai.com/docs/guides/safety-best-practices)
87 | let user: String?
88 |
89 | }
90 |
91 | // MARK: MultipartFormDataParameters
92 |
93 | extension ImageEditParameters: MultipartFormDataParameters {
94 |
95 | public func encode(boundary: String) -> Data {
96 | MultipartFormDataBuilder(boundary: boundary, entries: [
97 | .file(paramName: Self.CodingKeys.image.rawValue, fileName: "", fileData: image, contentType: "image/png"),
98 | .string(paramName: Self.CodingKeys.prompt.rawValue, value: prompt),
99 | .string(paramName: Self.CodingKeys.mask.rawValue, value: mask),
100 | .string(paramName: Self.CodingKeys.model.rawValue, value: model),
101 | .string(paramName: Self.CodingKeys.n.rawValue, value: n),
102 | .string(paramName: Self.CodingKeys.size.rawValue, value: size),
103 | .string(paramName: Self.CodingKeys.responseFormat.rawValue, value: responseFormat),
104 | .string(paramName: Self.CodingKeys.user.rawValue, value: user),
105 | ]).build()
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Image/ImageVariationParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageVariationParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 | #if canImport(UIKit)
10 | import UIKit
11 | #elseif canImport(AppKit)
12 | import AppKit
13 | #endif
14 |
15 | // MARK: - ImageVariationParameters
16 |
17 | /// [Creates a variation of a given image.](https://platform.openai.com/docs/api-reference/images/createVariation)
18 | public struct ImageVariationParameters: Encodable {
19 |
20 | public init(
21 | image: PlatformImage,
22 | model: Dalle? = nil,
23 | numberOfImages: Int? = nil,
24 | responseFormat: ImageResponseFormat? = nil,
25 | user: String? = nil)
26 | {
27 | if let model, model.model != Model.dalle2.value {
28 | assertionFailure(
29 | "Only dall-e-2 is supported at this time [https://platform.openai.com/docs/api-reference/images/createEdit]")
30 | }
31 |
32 | #if canImport(UIKit)
33 | let imageData = image.pngData()
34 | #elseif canImport(AppKit)
35 | let imageData = image.tiffRepresentation
36 | #endif
37 |
38 | if imageData == nil {
39 | assertionFailure("Failed ot load image data from image.")
40 | }
41 |
42 | self.image = imageData!
43 | n = numberOfImages
44 | self.model = model?.model
45 | size = model?.size
46 | self.responseFormat = responseFormat?.rawValue
47 | self.user = user
48 | }
49 |
50 | public enum ImageResponseFormat: String {
51 | case url
52 | case b64Json = "b64_json"
53 | }
54 |
55 | enum CodingKeys: String, CodingKey {
56 | case image
57 | case model
58 | case n
59 | case responseFormat = "response_format"
60 | case size
61 | case user
62 | }
63 |
64 | /// The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square.
65 | let image: Data
66 | /// The model to use for image generation. Only dall-e-2 is supported at this time. Defaults to dall-e-2
67 | let model: String?
68 | /// The number of images to generate. Must be between 1 and 10. Defaults to 1
69 | let n: Int?
70 | /// The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url
71 | let responseFormat: String?
72 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024. Defaults to 1024x1024
73 | let size: String?
74 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. [Learn more](https://platform.openai.com/docs/guides/safety-best-practices)
75 | let user: String?
76 |
77 | }
78 |
79 | // MARK: MultipartFormDataParameters
80 |
81 | extension ImageVariationParameters: MultipartFormDataParameters {
82 |
83 | public func encode(boundary: String) -> Data {
84 | MultipartFormDataBuilder(boundary: boundary, entries: [
85 | .file(paramName: Self.CodingKeys.image.rawValue, fileName: "", fileData: image, contentType: "image/png"),
86 | .string(paramName: Self.CodingKeys.model.rawValue, value: model),
87 | .string(paramName: Self.CodingKeys.n.rawValue, value: n),
88 | .string(paramName: Self.CodingKeys.size.rawValue, value: size),
89 | .string(paramName: Self.CodingKeys.responseFormat.rawValue, value: responseFormat),
90 | .string(paramName: Self.CodingKeys.user.rawValue, value: user),
91 | ]).build()
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Message/ModifyMessageParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModifyMessageParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/25/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Modifies a [Message](https://platform.openai.com/docs/api-reference/messages/modifyMessage)
11 | /// Only the metadata can be modified.
12 | public struct ModifyMessageParameters: Encodable {
13 |
14 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
15 | public var metadata: [String: String]
16 |
17 | public init(
18 | metadata: [String: String])
19 | {
20 | self.metadata = metadata
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Moderation/ModerationParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Classifies if text violates OpenAI's Content Policy.](https://platform.openai.com/docs/api-reference/moderations/create)
11 | public struct ModerationParameter: Encodable {
12 |
13 | /// The input text to classify, string or array.
14 | let input: Input
15 | /// Two content moderations models are available: text-moderation-stable and text-moderation-latest.
16 | /// The default is text-moderation-latest which will be automatically upgraded over time. This ensures you are always using our most accurate model. If you use text-moderation-stable, we will provide advanced notice before updating the model. Accuracy of text-moderation-stable may be slightly lower than for text-moderation-latest.
17 | let model: String?
18 |
19 | public enum Model: String {
20 | case stable = "text-moderation-stable"
21 | case latest = "text-moderation-latest"
22 | }
23 |
24 | public init(
25 | input: Input,
26 | model: Model? = nil)
27 | {
28 | self.input = input
29 | self.model = model?.rawValue
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/MultipartFormDataParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/11/23.
6 | //
7 |
8 | import Foundation
9 |
10 | public protocol MultipartFormDataParameters {
11 |
12 | func encode(boundary: String) -> Data
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Runs/ModifyRunParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModifyRunParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/29/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Modifies a [Run](https://platform.openai.com/docs/api-reference/runs/modifyRun)
11 | /// Only the metadata can be modified.
12 | public struct ModifyRunParameters: Encodable {
13 |
14 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
15 | public var metadata: [String: String]
16 |
17 | public init(
18 | metadata: [String: String])
19 | {
20 | self.metadata = metadata
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Runs/RunToolsOutputParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RunToolsOutputParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/16/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// When a run has the status: "requires_action" and required_action.type is submit_tool_outputs, this endpoint can be used to submit the [outputs](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) from the tool calls once they're all completed. All outputs must be submitted in a single request.
11 | public struct RunToolsOutputParameter: Encodable {
12 |
13 | public init(
14 | toolOutputs: [ToolOutput])
15 | {
16 | self.toolOutputs = toolOutputs
17 | }
18 |
19 | public struct ToolOutput: Encodable {
20 |
21 | /// The ID of the tool call in the `required_action` object within the run object the output is being submitted for.
22 | public let toolCallId: String?
23 | /// The output of the tool call to be submitted to continue the run.
24 | public let output: String?
25 |
26 | enum CodingKeys: String, CodingKey {
27 | case toolCallId = "tool_call_id"
28 | case output
29 | }
30 |
31 | public init(
32 | toolCallId: String?,
33 | output: String?)
34 | {
35 | self.toolCallId = toolCallId
36 | self.output = output
37 | }
38 | }
39 |
40 | /// A list of tools for which the outputs are being submitted.
41 | public let toolOutputs: [ToolOutput]
42 | /// If true, returns a stream of events that happen during the Run as server-sent events, terminating when the Run enters a terminal state with a data: [DONE] message.
43 | public var stream = false
44 |
45 | enum CodingKeys: String, CodingKey {
46 | case toolOutputs = "tool_outputs"
47 | case stream
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Threads/CreateThreadParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CreateThreadParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/16/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Create a [Thread](https://platform.openai.com/docs/api-reference/threads/createThread)
11 | public struct CreateThreadParameters: Encodable {
12 |
13 | public init(
14 | messages: [MessageObject]? = nil,
15 | toolResources: ToolResources? = nil,
16 | metadata: [String: String]? = nil)
17 | {
18 | self.messages = messages
19 | self.toolResources = toolResources
20 | self.metadata = metadata
21 | }
22 |
23 | /// A list of [messages](https://platform.openai.com/docs/api-reference/messages) to start the thread with.
24 | public var messages: [MessageObject]?
25 | /// A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs.
26 | public var toolResources: ToolResources?
27 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
28 | public var metadata: [String: String]?
29 |
30 | enum CodingKeys: String, CodingKey {
31 | case messages
32 | case toolResources = "tool_resources"
33 | case metadata
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/Threads/ModifyThreadParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModifyThreadParameters.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/25/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Modifies a [Thread](https://platform.openai.com/docs/api-reference/threads/modifyThread)
11 | /// Only the metadata can be modified.
12 | public struct ModifyThreadParameters: Encodable {
13 |
14 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
15 | public var metadata: [String: String]
16 |
17 | public init(
18 | metadata: [String: String])
19 | {
20 | self.metadata = metadata
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/VectorStore/VectorStoreParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Vector stores are used to store files for use by the file_search tool.
11 | ///
12 | /// Related guide: [File Search](https://platform.openai.com/docs/assistants/tools/file-search)
13 | ///
14 | /// Create a [vector store](https://platform.openai.com/docs/api-reference/vector-stores).
15 | public struct VectorStoreParameter: Encodable {
16 |
17 | public init(
18 | fileIDS: [String]? = nil,
19 | name: String? = nil,
20 | expiresAfter: ExpirationPolicy? = nil,
21 | metadata: [String: String]? = nil)
22 | {
23 | self.fileIDS = fileIDS
24 | self.name = name
25 | self.expiresAfter = expiresAfter
26 | self.metadata = metadata
27 | }
28 |
29 | /// Encoding only no nil parameters, this will avoid sending nil values when using this parameter in the "modifyVectorStore" request.
30 | public func encode(to encoder: Encoder) throws {
31 | var container = encoder.container(keyedBy: CodingKeys.self)
32 | if let fileIDS {
33 | try container.encode(fileIDS, forKey: .fileIDS)
34 | }
35 | if let name {
36 | try container.encode(name, forKey: .name)
37 | }
38 | if let expiresAfter {
39 | try container.encode(expiresAfter, forKey: .expiresAfter)
40 | }
41 | if let metadata {
42 | try container.encode(metadata, forKey: .metadata)
43 | }
44 | }
45 |
46 | enum CodingKeys: String, CodingKey {
47 | case fileIDS = "file_ids"
48 | case name
49 | case expiresAfter = "expires_after"
50 | case metadata
51 | }
52 |
53 | /// A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that the vector store should use. Useful for tools like file_search that can access files.
54 | let fileIDS: [String]?
55 | /// The name of the vector store.
56 | let name: String?
57 | /// The expiration policy for a vector store.
58 | let expiresAfter: ExpirationPolicy?
59 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
60 | let metadata: [String: String]?
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/VectorStoreFileBatch/VectorStoreFileBatchParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreFileBatch.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/29/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Create vector store file batchBeta](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/createBatch)
11 | public struct VectorStoreFileBatchParameter: Encodable {
12 |
13 | /// A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that the vector store should use. Useful for tools like file_search that can access files.
14 | public let fileIDS: [String]
15 |
16 | enum CodingKeys: String, CodingKey {
17 | case fileIDS = "file_ids"
18 | }
19 |
20 | public init(fileIDS: [String]) {
21 | self.fileIDS = fileIDS
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Parameters/VectorStoreFiles/VectorStoreFileParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreFileParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/28/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Vector Store Files](https://platform.openai.com/docs/api-reference/vector-stores-files)
11 | public struct VectorStoreFileParameter: Encodable {
12 |
13 | /// A [File](https://platform.openai.com/docs/api-reference/files) ID that the vector store should use. Useful for tools like file_search that can access files.
14 | public let fileID: String
15 |
16 | enum CodingKeys: String, CodingKey {
17 | case fileID = "file_id"
18 | }
19 |
20 | public init(fileID: String) {
21 | self.fileID = fileID
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Assistants/AssistantStreamEvent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AssistantStreamEvent.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 3/22/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// A model that helps retrieve an object from an event.
11 | public enum AssistantStreamEvent {
12 |
13 | /// Occurs when a new thread is created.
14 | /// - data is a thread
15 | case threadCreated
16 |
17 | /// Occurs when a new run is created.
18 | /// - data is a run
19 | case threadRunCreated
20 |
21 | /// Occurs when a run moves to a queued status.
22 | /// - data is a run
23 | case threadRunQueued(RunObject)
24 |
25 | /// Occurs when a run moves to an in_progress status.
26 | /// - data is a run
27 | case threadRunInProgress(RunObject)
28 |
29 | /// Occurs when a run moves to a requires_action status.
30 | /// - data is a run
31 | case threadRunRequiresAction(RunObject)
32 |
33 | /// Occurs when a run is completed.
34 | /// - data is a run
35 | case threadRunCompleted(RunObject)
36 |
37 | /// Occurs when a run fails.
38 | /// - data is a run
39 | case threadRunFailed(RunObject)
40 |
41 | /// Occurs when a run moves to a cancelling status.
42 | /// - data is a run
43 | case threadRunCancelling(RunObject)
44 |
45 | /// Occurs when a run is cancelled.
46 | /// - data is a run
47 | case threadRunCancelled(RunObject)
48 |
49 | /// Occurs when a run expires.
50 | /// - data is a run
51 | case threadRunExpired(RunObject)
52 |
53 | /// Occurs when a run step is created.
54 | /// - data is a run step
55 | case threadRunStepCreated
56 |
57 | /// Occurs when a run step moves to an in_progress state.
58 | /// - data is a run step
59 | case threadRunStepInProgress
60 |
61 | /// Occurs when parts of a run step are being streamed.
62 | /// - data is a run step delta
63 | case threadRunStepDelta(RunStepDeltaObject)
64 |
65 | /// Occurs when a run step is completed.
66 | /// - data is a run step
67 | case threadRunStepCompleted
68 |
69 | /// Occurs when a run step fails.
70 | /// - data is a run step
71 | case threadRunStepFailed
72 |
73 | /// Occurs when a run step is cancelled.
74 | /// - data is a run step
75 | case threadRunStepCancelled
76 |
77 | /// Occurs when a run step expires.
78 | /// - data is a run step
79 | case threadRunStepExpired
80 |
81 | /// Occurs when a message is created.
82 | /// - data is a message
83 | case threadMessageCreated
84 |
85 | /// Occurs when a message moves to an in_progress state.
86 | /// - data is a message
87 | case threadMessageInProgress
88 |
89 | /// Occurs when parts of a message are being streamed.
90 | /// - data is a message delta
91 | case threadMessageDelta(MessageDeltaObject)
92 |
93 | /// Occurs when a message is completed.
94 | /// - data is a message
95 | case threadMessageCompleted
96 |
97 | /// Occurs when a message ends before it is completed.
98 | /// - data is a message
99 | case threadMessageIncomplete
100 |
101 | /// Occurs when an error occurs. This can happen due to an internal server error or a timeout.
102 | /// - data is an error
103 | case error
104 |
105 | /// Occurs when a stream ends.
106 | /// - data is [DONE]
107 | case done
108 |
109 | }
110 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Audio/AudioObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/10/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The [audio](https://platform.openai.com/docs/api-reference/audio) response.
11 | public struct AudioObject: Decodable {
12 |
13 | public struct Word: Decodable {
14 |
15 | /// The text content of the word.
16 | public let word: String
17 | /// Start time of the word in seconds.
18 | public let start: Double
19 | /// End time of the word in seconds.
20 | public let end: Double
21 | }
22 |
23 | public struct Segment: Decodable {
24 | /// Unique identifier of the segment.
25 | public let id: Int
26 | /// Seek offset of the segment.
27 | public let seek: Int
28 | /// Start time of the segment in seconds.
29 | public let start: Double
30 | /// End time of the segment in seconds.
31 | public let end: Double
32 | /// Text content of the segment.
33 | public let text: String
34 | /// Array of token IDs for the text content.
35 | public let tokens: [Int]
36 | /// Temperature parameter used for generating the segment.
37 | public let temperature: Double
38 | /// Average logprob of the segment. If the value is lower than -1, consider the logprobs failed.
39 | public let avgLogprob: Double
40 | /// Compression ratio of the segment. If the value is greater than 2.4, consider the compression failed.
41 | public let compressionRatio: Double
42 | /// Probability of no speech in the segment. If the value is higher than 1.0 and the avg_logprob is below -1, consider this segment silent.
43 | public let noSpeechProb: Double
44 |
45 | enum CodingKeys: String, CodingKey {
46 | case id
47 | case seek
48 | case start
49 | case end
50 | case text
51 | case tokens
52 | case temperature
53 | case avgLogprob = "avg_logprob"
54 | case compressionRatio = "compression_ratio"
55 | case noSpeechProb = "no_speech_prob"
56 | }
57 | }
58 |
59 | /// The language of the input audio.
60 | public let language: String?
61 | /// The duration of the input audio.
62 | public let duration: String?
63 | /// The transcribed text if the request uses the `transcriptions` API, or the translated text if the request uses the `translations` endpoint.
64 | public let text: String
65 | /// Extracted words and their corresponding timestamps.
66 | public let words: [Word]?
67 | /// Segments of the transcribed text and their corresponding details.
68 | public let segments: [Segment]?
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Audio/AudioSpeechObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/14/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The [audio speech](https://platform.openai.com/docs/api-reference/audio/createSpeech) response.
11 | public struct AudioSpeechObject: Decodable {
12 |
13 | /// The audio file content.
14 | public let output: Data
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Batch/BatchObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BatchObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct BatchObject: Decodable {
11 |
12 | public struct Error: Decodable {
13 |
14 | let object: String
15 | let data: [Data]
16 |
17 | public struct Data: Decodable {
18 |
19 | /// An error code identifying the error type.
20 | let code: String
21 | /// A human-readable message providing more details about the error.
22 | let message: String
23 | /// The name of the parameter that caused the error, if applicable.
24 | let param: String?
25 | /// The line number of the input file where the error occurred, if applicable.
26 | let line: Int?
27 | }
28 | }
29 |
30 | public struct RequestCount: Decodable {
31 |
32 | /// Total number of requests in the batch.
33 | let total: Int
34 | /// Number of requests that have been completed successfully.
35 | let completed: Int
36 | /// Number of requests that have failed.
37 | let failed: Int
38 | }
39 |
40 | enum CodingKeys: String, CodingKey {
41 | case id
42 | case object
43 | case endpoint
44 | case errors
45 | case inputFileID = "input_file_id"
46 | case completionWindow = "completion_window"
47 | case status
48 | case outputFileID = "output_file_id"
49 | case errorFileID = "error_file_id"
50 | case createdAt = "created_at"
51 | case inProgressAt = "in_progress_at"
52 | case expiresAt = "expires_at"
53 | case finalizingAt = "finalizing_at"
54 | case completedAt = "completed_at"
55 | case failedAt = "failed_at"
56 | case expiredAt = "expired_at"
57 | case cancellingAt = "cancelling_at"
58 | case cancelledAt = "cancelled_at"
59 | case requestCounts = "request_counts"
60 | case metadata
61 | }
62 |
63 | let id: String
64 | /// The object type, which is always batch.
65 | let object: String
66 | /// The OpenAI API endpoint used by the batch.
67 | let endpoint: String
68 |
69 | let errors: Error
70 | /// The ID of the input file for the batch.
71 | let inputFileID: String
72 | /// The time frame within which the batch should be processed.
73 | let completionWindow: String
74 | /// The current status of the batch.
75 | let status: String
76 | /// The ID of the file containing the outputs of successfully executed requests.
77 | let outputFileID: String
78 | /// The ID of the file containing the outputs of requests with errors.
79 | let errorFileID: String
80 | /// The Unix timestamp (in seconds) for when the batch was created.
81 | let createdAt: Int
82 | /// The Unix timestamp (in seconds) for when the batch started processing.
83 | let inProgressAt: Int
84 | /// The Unix timestamp (in seconds) for when the batch will expire.
85 | let expiresAt: Int
86 | /// The Unix timestamp (in seconds) for when the batch started finalizing.
87 | let finalizingAt: Int
88 | /// The Unix timestamp (in seconds) for when the batch was completed.
89 | let completedAt: Int
90 | /// The Unix timestamp (in seconds) for when the batch failed.
91 | let failedAt: Int
92 | /// The Unix timestamp (in seconds) for when the batch expired.
93 | let expiredAt: Int
94 | /// The Unix timestamp (in seconds) for when the batch started cancelling.
95 | let cancellingAt: Int
96 | /// The Unix timestamp (in seconds) for when the batch was cancelled.
97 | let cancelledAt: Int
98 | /// The request counts for different statuses within the batch.
99 | let requestCounts: RequestCount
100 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
101 | let metadata: [String: String]
102 |
103 | }
104 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Chat/ChatUsage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | // SwiftOpenAI
4 | //
5 | // Created by James Rochabrun on 1/3/25.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - ChatUsage
11 |
12 | public struct ChatUsage: Decodable {
13 | /// Number of tokens in the prompt
14 | public let promptTokens: Int?
15 | /// Number of tokens in the generated completion
16 | public let completionTokens: Int?
17 | /// Total number of tokens used in the request (prompt + completion)
18 | public let totalTokens: Int?
19 | /// Detailed breakdown of prompt tokens
20 | public let promptTokensDetails: PromptTokenDetails?
21 | /// Detailed breakdown of completion tokens
22 | public let completionTokensDetails: CompletionTokenDetails?
23 |
24 | enum CodingKeys: String, CodingKey {
25 | case promptTokens = "prompt_tokens"
26 | case completionTokens = "completion_tokens"
27 | case totalTokens = "total_tokens"
28 | case promptTokensDetails = "prompt_tokens_details"
29 | case completionTokensDetails = "completion_tokens_details"
30 | }
31 | }
32 |
33 | // MARK: - PromptTokenDetails
34 |
35 | public struct PromptTokenDetails: Decodable {
36 | /// Number of tokens retrieved from cache
37 | public let cachedTokens: Int?
38 | /// Number of tokens used for audio processing
39 | public let audioTokens: Int?
40 |
41 | enum CodingKeys: String, CodingKey {
42 | case cachedTokens = "cached_tokens"
43 | case audioTokens = "audio_tokens"
44 | }
45 | }
46 |
47 | // MARK: - CompletionTokenDetails
48 |
49 | public struct CompletionTokenDetails: Decodable {
50 | /// Number of tokens used for reasoning
51 | public let reasoningTokens: Int?
52 | /// Number of tokens used for audio processing
53 | public let audioTokens: Int?
54 | /// Number of tokens in accepted predictions
55 | public let acceptedPredictionTokens: Int?
56 | /// Number of tokens in rejected predictions
57 | public let rejectedPredictionTokens: Int?
58 |
59 | enum CodingKeys: String, CodingKey {
60 | case reasoningTokens = "reasoning_tokens"
61 | case audioTokens = "audio_tokens"
62 | case acceptedPredictionTokens = "accepted_prediction_tokens"
63 | case rejectedPredictionTokens = "rejected_prediction_tokens"
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Delta.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Delta.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 3/22/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Protocol for Assistant Stream Delta.
11 | /// Defines a set of requirements for objects that can be included in an assistant event stream, such as `RunStepDeltaObject` or `MessageDeltaObject`.
12 | public protocol Delta: Decodable {
13 | associatedtype T
14 | var id: String { get }
15 | var object: String { get }
16 | var delta: T { get }
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Embedding/EmbeddingObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmbeddingObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Represents an embedding vector returned by embedding endpoint.](https://platform.openai.com/docs/api-reference/embeddings/object)
11 | public struct EmbeddingObject: Decodable {
12 |
13 | /// The object type, which is always "embedding".
14 | public let object: String
15 | /// The embedding vector, which is a list of floats. The length of vector depends on the model as listed in the embedding guide.[https://platform.openai.com/docs/guides/embeddings]
16 | public let embedding: [Float]
17 | /// The index of the embedding in the list of embeddings.
18 | public let index: Int
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/File/FileObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/16/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The [File object](https://platform.openai.com/docs/api-reference/files/object) represents a document that has been uploaded to OpenAI.
11 | public struct FileObject: Decodable {
12 |
13 | public init(
14 | id: String,
15 | bytes: Int,
16 | createdAt: Int,
17 | filename: String,
18 | object: String,
19 | purpose: String,
20 | status: Status,
21 | statusDetails: String?)
22 | {
23 | self.id = id
24 | self.bytes = bytes
25 | self.createdAt = createdAt
26 | self.filename = filename
27 | self.object = object
28 | self.purpose = purpose
29 | self.status = status.rawValue
30 | self.statusDetails = statusDetails
31 | }
32 |
33 | public enum Status: String {
34 | case uploaded
35 | case processed
36 | case pending
37 | case error
38 | case deleting
39 | case deleted
40 | }
41 |
42 | /// The file identifier, which can be referenced in the API endpoints.
43 | public let id: String
44 | /// The size of the file in bytes.
45 | public let bytes: Int?
46 | /// The Unix timestamp (in seconds) for when the file was created.
47 | public let createdAt: Int
48 | /// The name of the file.
49 | public let filename: String
50 | /// The object type, which is always "file".
51 | public let object: String
52 | /// The intended purpose of the file. Currently, only "fine-tune" is supported.
53 | public let purpose: String
54 | /// Deprecated. The current status of the file, which can be either uploaded, processed, or error.
55 | @available(*, deprecated, message: "Deprecated")
56 | public let status: String?
57 | /// Additional details about the status of the file. If the file is in the error state, this will include a message describing the error.
58 | @available(
59 | *,
60 | deprecated,
61 | message: "Deprecated. For details on why a fine-tuning training file failed validation, see the error field on fine_tuning.job")
62 | public let statusDetails: String?
63 |
64 | enum CodingKeys: String, CodingKey {
65 | case id
66 | case bytes
67 | case createdAt = "created_at"
68 | case filename
69 | case object
70 | case purpose
71 | case status
72 | case statusDetails = "status_details"
73 | }
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/FineTuning/FineTuningJobEventObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FineTuningJobEventObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/17/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Fine-tuning job event object](https://platform.openai.com/docs/api-reference/fine-tuning/event-object)
11 | public struct FineTuningJobEventObject: Decodable {
12 |
13 | public struct Data: Decodable {
14 | public let step: Int
15 | public let trainLoss: Double
16 | public let trainMeanTokenAccuracy: Double
17 |
18 | enum CodingKeys: String, CodingKey {
19 | case step
20 | case trainLoss = "train_loss"
21 | case trainMeanTokenAccuracy = "train_mean_token_accuracy"
22 | }
23 | }
24 |
25 | public let id: String
26 |
27 | public let createdAt: Int
28 |
29 | public let level: String
30 |
31 | public let message: String
32 |
33 | public let object: String
34 |
35 | public let type: String?
36 |
37 | public let data: Data?
38 |
39 | enum CodingKeys: String, CodingKey {
40 | case id
41 | case createdAt = "created_at"
42 | case level
43 | case message
44 | case object
45 | case type
46 | case data
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Image/ImageObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/12/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [Represents the url or the content of an image generated by the OpenAI API.](https://platform.openai.com/docs/api-reference/images/object)
11 | public struct ImageObject: Decodable {
12 | /// The URL of the generated image, if response_format is url (default).
13 | public let url: URL?
14 | /// The base64-encoded JSON of the generated image, if response_format is b64_json.
15 | public let b64Json: String?
16 | /// The prompt that was used to generate the image, if there was any revision to the prompt.
17 | public let revisedPrompt: String?
18 |
19 | enum CodingKeys: String, CodingKey {
20 | case url
21 | case b64Json = "b64_json"
22 | case revisedPrompt = "revised_prompt"
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/ImageGen/CreateImageResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CreateImageResponse.swift
3 | // SwiftOpenAI
4 | //
5 | // Created by James Rochabrun on 4/24/25.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Response from the 'Create Image' endpoint:
11 | /// https://platform.openai.com/docs/api-reference/images/create
12 | import Foundation
13 |
14 | public struct CreateImageResponse: Decodable {
15 | public struct ImageData: Decodable, Equatable {
16 | /// Base64-encoded image data (only present for gpt-image-1 or if `response_format = b64_json`)
17 | public let b64JSON: String?
18 |
19 | /// The URL of the generated image (default for DALL·E 2 and 3, absent for gpt-image-1)
20 | public let url: String?
21 |
22 | /// The revised prompt used (DALL·E 3 only)
23 | public let revisedPrompt: String?
24 |
25 | enum CodingKeys: String, CodingKey {
26 | case b64JSON = "b64_json"
27 | case url
28 | case revisedPrompt = "revised_prompt"
29 | }
30 | }
31 |
32 | public struct Usage: Decodable {
33 | public struct InputTokensDetails: Decodable {
34 | public let textTokens: Int
35 | public let imageTokens: Int
36 |
37 | enum CodingKeys: String, CodingKey {
38 | case textTokens = "text_tokens"
39 | case imageTokens = "image_tokens"
40 | }
41 | }
42 |
43 | /// The number of input tokens (text + image)
44 | public let inputTokens: Int
45 |
46 | /// The number of output tokens (image)
47 | public let outputTokens: Int
48 |
49 | /// Total token usage
50 | public let totalTokens: Int
51 |
52 | /// Input token details (optional)
53 | public let inputTokensDetails: InputTokensDetails?
54 |
55 | enum CodingKeys: String, CodingKey {
56 | case inputTokens = "input_tokens"
57 | case outputTokens = "output_tokens"
58 | case totalTokens = "total_tokens"
59 | case inputTokensDetails = "input_tokens_details"
60 | }
61 | }
62 |
63 | /// The Unix timestamp (in seconds) of when the image was created
64 | public let created: TimeInterval?
65 |
66 | /// The list of generated images
67 | public let data: [ImageData]?
68 |
69 | /// Token usage info (only for gpt-image-1)
70 | public let usage: Usage?
71 |
72 | enum CodingKeys: String, CodingKey {
73 | case created
74 | case data
75 | case usage
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Messages/MessageDeltaObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageDeltaObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 3/17/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [MessageDeltaObject](https://platform.openai.com/docs/api-reference/assistants-streaming/message-delta-object)
11 | ///
12 | /// Represents a message delta i.e. any changed fields on a message during streaming.
13 | public struct MessageDeltaObject: Delta {
14 |
15 | public struct Delta: Decodable {
16 |
17 | /// The entity that produced the message. One of user or assistant.
18 | public let role: String?
19 | /// The content of the message in array of text and/or images.
20 | public let content: [AssistantMessageContent]
21 |
22 | enum Role: String {
23 | case user
24 | case assistant
25 | }
26 |
27 | enum CodingKeys: String, CodingKey {
28 | case role
29 | case content
30 | }
31 | }
32 |
33 | /// The identifier of the message, which can be referenced in API endpoints.
34 | public let id: String
35 | /// The object type, which is always thread.message.delta.
36 | public let object: String
37 | /// The delta containing the fields that have changed on the Message.
38 | public let delta: Delta
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Messages/MessageObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/15/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// BETA.
11 | /// Represents a [message](https://platform.openai.com/docs/api-reference/messages) within a [thread](https://platform.openai.com/docs/api-reference/threads).
12 | /// [Message Object](https://platform.openai.com/docs/api-reference/messages/object)
13 | public struct MessageObject: Codable {
14 |
15 | public init(
16 | id: String,
17 | object: String,
18 | createdAt: Int,
19 | threadID: String,
20 | status: String?,
21 | incompleteDetails: IncompleteDetails?,
22 | completedAt: Int?,
23 | role: String,
24 | content: [MessageContent],
25 | assistantID: String?,
26 | runID: String?,
27 | attachments: [MessageAttachment]?,
28 | metadata: [String: String]?)
29 | {
30 | self.id = id
31 | self.object = object
32 | self.createdAt = createdAt
33 | self.threadID = threadID
34 | self.status = status
35 | self.incompleteDetails = incompleteDetails
36 | self.completedAt = completedAt
37 | self.role = role
38 | self.content = content
39 | self.assistantID = assistantID
40 | self.runID = runID
41 | self.attachments = attachments
42 | self.metadata = metadata
43 | }
44 |
45 | /// The identifier, which can be referenced in API endpoints.
46 | public let id: String
47 | /// The object type, which is always thread.message.
48 | public let object: String
49 | /// The Unix timestamp (in seconds) for when the message was created.
50 | public let createdAt: Int
51 | /// The [thread](https://platform.openai.com/docs/api-reference/threads) ID that this message belongs to.
52 | public let threadID: String
53 | /// The status of the message, which can be either in_progress, incomplete, or completed.
54 | public let status: String?
55 | /// On an incomplete message, details about why the message is incomplete.
56 | public let incompleteDetails: IncompleteDetails?
57 | /// The Unix timestamp (in seconds) for when the message was completed.
58 | public let completedAt: Int?
59 | /// The entity that produced the message. One of user or assistant.
60 | public let role: String
61 | /// The content of the message in array of text and/or images.
62 | public let content: [MessageContent]
63 | /// If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-reference/assistants) that authored this message.
64 | public let assistantID: String?
65 | /// If applicable, the ID of the [run](https://platform.openai.com/docs/api-reference/runs) associated with the authoring of this message.
66 | public let runID: String?
67 | /// A list of files attached to the message, and the tools they were added to.
68 | public let attachments: [MessageAttachment]?
69 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
70 | public let metadata: [String: String]?
71 |
72 | enum Role: String {
73 | case user
74 | case assistant
75 | }
76 |
77 | enum CodingKeys: String, CodingKey {
78 | case id
79 | case object
80 | case createdAt = "created_at"
81 | case threadID = "thread_id"
82 | case status
83 | case incompleteDetails = "incomplete_details"
84 | case completedAt = "completed_at"
85 | case role
86 | case content
87 | case assistantID = "assistant_id"
88 | case runID = "run_id"
89 | case attachments
90 | case metadata
91 | }
92 |
93 | }
94 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Model/ModelObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Describes an OpenAI [model](https://platform.openai.com/docs/api-reference/models/object) offering that can be used with the API.
11 | public struct ModelObject: Decodable {
12 |
13 | public struct Permission: Decodable {
14 | public let id: String?
15 | public let object: String?
16 | public let created: Int?
17 | public let allowCreateEngine: Bool?
18 | public let allowSampling: Bool?
19 | public let allowLogprobs: Bool?
20 | public let allowSearchIndices: Bool?
21 | public let allowView: Bool?
22 | public let allowFineTuning: Bool?
23 | public let organization: String?
24 | public let group: String?
25 | public let isBlocking: Bool?
26 |
27 | enum CodingKeys: String, CodingKey {
28 | case id
29 | case object
30 | case created
31 | case allowCreateEngine = "allow_create_engine"
32 | case allowSampling = "allow_sampling"
33 | case allowLogprobs = "allow_logprobs"
34 | case allowSearchIndices = "allow_search_indices"
35 | case allowView = "allow_view"
36 | case allowFineTuning = "allow_fine_tuning"
37 | case organization
38 | case group
39 | case isBlocking = "is_blocking"
40 | }
41 | }
42 |
43 | /// The model identifier, which can be referenced in the API endpoints.
44 | public let id: String
45 | /// The Unix timestamp (in seconds) when the model was created.
46 | public let created: Int?
47 | /// The object type, which is always "model".
48 | public let object: String
49 | /// The organization that owns the model.
50 | public let ownedBy: String
51 | /// An array representing the current permissions of a model. Each element in the array corresponds to a specific permission setting. If there are no permissions or if the data is unavailable, the array may be nil.
52 | public let permission: [Permission]?
53 |
54 | enum CodingKeys: String, CodingKey {
55 | case id
56 | case created
57 | case object
58 | case ownedBy = "owned_by"
59 | case permission
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Moderation/ModerationObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The [moderation object](https://platform.openai.com/docs/api-reference/moderations/object). Represents policy compliance report by OpenAI's content moderation model against a given input.
11 | public struct ModerationObject: Decodable {
12 |
13 | public struct Moderation: Decodable {
14 |
15 | public struct Category: Decodable {
16 |
17 | /// Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste. Hateful content aimed at non-protected groups (e.g., chess players) is harrassment.
18 | public let hate: T
19 | /// Hateful content that also includes violence or serious harm towards the targeted group based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.
20 | public let hateThreatening: T
21 | /// Content that expresses, incites, or promotes harassing language towards any target.
22 | public let harassment: T
23 | /// Harassment content that also includes violence or serious harm towards any target.
24 | public let harassmentThreatening: T
25 | /// Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.
26 | public let selfHarm: T
27 | /// Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.
28 | public let selfHarmIntent: T
29 | /// Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.
30 | public let selfHarmInstructions: T
31 | /// Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).
32 | public let sexual: T
33 | /// Sexual content that includes an individual who is under 18 years old.
34 | public let sexualMinors: T
35 | /// Content that depicts death, violence, or physical injury.
36 | public let violence: T
37 | /// Content that depicts death, violence, or physical injury in graphic detail.
38 | public let violenceGraphic: T
39 |
40 | enum CodingKeys: String, CodingKey {
41 | case hate
42 | case hateThreatening = "hate/threatening"
43 | case harassment
44 | case harassmentThreatening = "harassment/threatening"
45 | case selfHarm = "self-harm"
46 | case selfHarmIntent = "self-harm/intent"
47 | case selfHarmInstructions = "self-harm/instructions"
48 | case sexual
49 | case sexualMinors = "sexual/minors"
50 | case violence
51 | case violenceGraphic = "violence/graphic"
52 | }
53 | }
54 |
55 | /// Whether the content violates OpenAI's usage policies.
56 | public let flagged: Bool
57 | /// A list of the categories, and whether they are flagged or not.
58 | public let categories: Category
59 | /// A list of the categories along with their scores as predicted by model.
60 | public let categoryScores: Category
61 |
62 | enum CodingKeys: String, CodingKey {
63 | case categories
64 | case categoryScores = "category_scores"
65 | case flagged
66 | }
67 | }
68 |
69 | /// The unique identifier for the moderation request.
70 | public let id: String
71 | /// The model used to generate the moderation results.
72 | public let model: String
73 | /// A list of moderation objects.
74 | public let results: [Moderation]
75 |
76 | public var isFlagged: Bool {
77 | results.map(\.flagged).contains(true)
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/OpenAIErrorResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIErrorResponse.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // {
11 | // "error": {
12 | // "message": "Invalid parameter: messages with role 'tool' must be a response to a preceeding message with 'tool_calls'.",
13 | // "type": "invalid_request_error",
14 | // "param": "messages.[2].role",
15 | // "code": null
16 | // }
17 | // }
18 |
19 | public struct OpenAIErrorResponse: Decodable {
20 |
21 | public let error: Error
22 |
23 | public struct Error: Decodable {
24 | public let message: String?
25 | public let type: String?
26 | public let param: String?
27 | public let code: String?
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/OpenAIResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIResponse.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 10/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// A generic structure for OpenAI API responses.
11 | /// e.g:
12 | /// ```json
13 | /// {
14 | /// "object": "list",
15 | /// "data": [
16 | /// {
17 | /// "object": "embedding",
18 | /// "embedding": [
19 | /// 0.0023064255,
20 | /// -0.009327292,
21 | /// .... (1536 floats total for ada-002)
22 | /// -0.0028842222,
23 | /// ],
24 | /// "index": 0
25 | /// }
26 | /// ],
27 | /// "model": "text-embedding-ada-002",
28 | /// "usage": {
29 | /// "prompt_tokens": 8,
30 | /// "total_tokens": 8
31 | /// }
32 | /// }
33 | public struct OpenAIResponse: Decodable {
34 |
35 | public struct Usage: Decodable {
36 |
37 | public let promptTokens: Int
38 | public let totalTokens: Int
39 |
40 | enum CodingKeys: String, CodingKey {
41 | case promptTokens = "prompt_tokens"
42 | case totalTokens = "total_tokens"
43 | }
44 | }
45 |
46 | public let object: String?
47 | public let data: [T]
48 | public let model: String?
49 | public let usage: Usage?
50 | public let hasMore: Bool?
51 | public let created: Int?
52 | public let firstID: String?
53 | public let lastID: String?
54 |
55 | enum CodingKeys: String, CodingKey {
56 | case object
57 | case data
58 | case model
59 | case usage
60 | case hasMore = "has_more"
61 | case created
62 | case firstID = "first_id"
63 | case lastID = "last_id"
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Runs/RunStepDeltaObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RunStepDeltaObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 3/17/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Represents a [run step delta](https://platform.openai.com/docs/api-reference/assistants-streaming/run-step-delta-object) i.e. any changed fields on a run step during streaming.
11 | public struct RunStepDeltaObject: Delta {
12 |
13 | /// The identifier of the run step, which can be referenced in API endpoints.
14 | public let id: String
15 | /// The object type, which is always thread.run.step.delta.
16 | public let object: String
17 | /// The delta containing the fields that have changed on the run step.
18 | public let delta: Delta
19 |
20 | public struct Delta: Decodable {
21 |
22 | /// The details of the run step.
23 | public let stepDetails: RunStepDetails
24 |
25 | private enum CodingKeys: String, CodingKey {
26 | case stepDetails = "step_details"
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/Threads/ThreadObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ThreadObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/15/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /// BETA
11 | /// A [thread object](https://platform.openai.com/docs/api-reference/threads) represents a thread that contains [messages](https://platform.openai.com/docs/api-reference/messages).
12 | public struct ThreadObject: Decodable {
13 |
14 | public init(
15 | id: String,
16 | object: String,
17 | createdAt: Int,
18 | metadata: [String: String])
19 | {
20 | self.id = id
21 | self.object = object
22 | self.createdAt = createdAt
23 | self.metadata = metadata
24 | }
25 |
26 | /// The identifier, which can be referenced in API endpoints.
27 | public let id: String
28 | /// The object type, which is always thread.
29 | public let object: String
30 | /// The Unix timestamp (in seconds) for when the thread was created.
31 | public let createdAt: Int
32 | /// A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs.
33 | public var toolResources: ToolResources?
34 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
35 | public let metadata: [String: String]
36 |
37 | enum CodingKeys: String, CodingKey {
38 | case id
39 | case object
40 | case createdAt = "created_at"
41 | case toolResources = "tool_resources"
42 | case metadata
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/VectorStore/VectorStoreObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct VectorStoreObject: Decodable {
11 |
12 | /// The identifier, which can be referenced in API endpoints.
13 | public let id: String
14 | /// The object type, which is always vector_store.
15 | public let object: String
16 | /// The Unix timestamp (in seconds) for when the vector store was created.
17 | public let createdAt: Int
18 | /// The name of the vector store.
19 | public let name: String
20 | /// The total number of bytes used by the files in the vector store.
21 | public let usageBytes: Int
22 |
23 | public let fileCounts: FileCount
24 | /// The status of the vector store, which can be either expired, in_progress, or completed. A status of completed indicates that the vector store is ready for use.
25 | public let status: String
26 | /// The expiration policy for a vector store.
27 | public let expiresAfter: ExpirationPolicy?
28 | /// The Unix timestamp (in seconds) for when the vector store will expire.
29 | public let expiresAt: Int?
30 | /// The Unix timestamp (in seconds) for when the vector store was last active.
31 | public let lastActiveAt: Int?
32 | /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long.
33 | public let metadata: [String: String]
34 |
35 | enum CodingKeys: String, CodingKey {
36 | case id
37 | case object
38 | case createdAt = "created_at"
39 | case name
40 | case usageBytes = "usage_bytes"
41 | case fileCounts = "file_counts"
42 | case status
43 | case expiresAfter = "expires_after"
44 | case expiresAt = "expires_at"
45 | case lastActiveAt = "last_active_at"
46 | case metadata
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/VectorStoreFile/VectorStoreFileObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreFileObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/28/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [The Vector store file object](https://platform.openai.com/docs/api-reference/vector-stores-files/file-object)
11 | public struct VectorStoreFileObject: Decodable {
12 |
13 | /// The identifier, which can be referenced in API endpoints.
14 | public let id: String
15 | /// The object type, which is always vector_store.file.
16 | public let object: String
17 | /// The total vector store usage in bytes. Note that this may be different from the original file size.
18 | public let usageBytes: Int
19 | /// The Unix timestamp (in seconds) for when the vector store file was created.
20 | public let createdAt: Int
21 | /// The ID of the [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) that the [File](https://platform.openai.com/docs/api-reference/files) is attached to.
22 | public let vectorStoreID: String
23 | /// The status of the vector store file, which can be either in_progress, completed, cancelled, or failed. The status completed indicates that the vector store file is ready for use.
24 | public let status: String
25 | /// The last error associated with this vector store file. Will be null if there are no errors.
26 | public let lastError: LastError?
27 | }
28 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/ResponseModels/VectorStoreFileBatch/VectorStoreFileBatchObject.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VectorStoreFileBatchObject.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/29/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// [The vector store files batch objectBeta](https://platform.openai.com/docs/api-reference/vector-stores-file-batches/batch-object)
11 | public struct VectorStoreFileBatchObject: Decodable {
12 |
13 | /// The identifier, which can be referenced in API endpoints.
14 | public let id: String
15 | /// The object type, which is always vector_store.file_batch.
16 | public let object: String
17 | /// The Unix timestamp (in seconds) for when the vector store files batch was created.
18 | public let createdAt: Int
19 | /// The ID of the [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) that the [File](https://platform.openai.com/docs/api-reference/files) is attached to.
20 | public let vectorStoreID: String
21 | /// The status of the vector store files batch, which can be either in_progress, completed, cancelled or failed.
22 | public let status: String
23 |
24 | public let fileCounts: FileCount
25 |
26 | enum CodingKeys: String, CodingKey {
27 | case id
28 | case object
29 | case createdAt = "created_at"
30 | case vectorStoreID = "vector_store_id"
31 | case status
32 | case fileCounts = "file_counts"
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/DeletionStatus.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DeletionStatus.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct DeletionStatus: Decodable {
11 | public let id: String
12 | public let object: String
13 | public let deleted: Bool
14 | }
15 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/ExpirationPolicy.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ExpirationPolicy.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/27/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ExpirationPolicy: Codable {
11 |
12 | /// Anchor timestamp after which the expiration policy applies. Supported anchors: last_active_at.
13 | let anchor: String
14 | /// The number of days after the anchor time that the vector store will expire.
15 | let days: Int
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/FileCount.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileCount.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/29/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct FileCount: Decodable {
11 |
12 | /// The number of files that are currently being processed.
13 | let inProgress: Int
14 | /// The number of files that have been successfully processed.
15 | let completed: Int
16 | /// The number of files that have failed to process.
17 | let failed: Int
18 | /// The number of files that were cancelled.
19 | let cancelled: Int
20 | /// The total number of files.
21 | let total: Int
22 |
23 | enum CodingKeys: String, CodingKey {
24 | case inProgress = "in_progress"
25 | case completed
26 | case failed
27 | case cancelled
28 | case total
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/IncompleteDetails.swift:
--------------------------------------------------------------------------------
1 | //
2 | // IncompleteDetails.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/25/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Message: On an incomplete message, details about why the message is incomplete.
11 | /// Run: Details on why the run is incomplete. Will be null if the run is not incomplete.
12 | public struct IncompleteDetails: Codable {
13 |
14 | /// Message: The reason the message is incomplete.
15 | /// Run: The reason why the run is incomplete. This will point to which specific token limit was reached over the course of the run.
16 | let reason: String
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/LastError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LastError.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/28/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct LastError: Codable {
11 | /// One of server_error or rate_limit_exceeded.
12 | let code: String
13 | /// A human-readable description of the error.
14 | let message: String
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/MessageAttachment.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageAttachment.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/25/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Messages have attachments instead of file_ids. attachments are helpers that add files to the Thread’s tool_resources.
11 | /// [V2](https://platform.openai.com/docs/assistants/migration/what-has-changed)
12 | public struct MessageAttachment: Codable {
13 |
14 | let fileID: String
15 | let tools: [AssistantObject.Tool]
16 |
17 | enum CodingKeys: String, CodingKey {
18 | case fileID = "file_id"
19 | case tools
20 | }
21 |
22 | public init(fileID: String, tools: [AssistantObject.Tool]) {
23 | self.fileID = fileID
24 | self.tools = tools
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/Reasoning.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Reasoning.swift
3 | // SwiftOpenAI
4 | //
5 | // Created by James Rochabrun on 3/15/25.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Reasoning configuration for o-series models
11 | public struct Reasoning: Codable {
12 | public init(effort: String? = nil, generateSummary: String? = nil, summary: String? = nil) {
13 | self.effort = effort
14 | self.generateSummary = generateSummary
15 | self.summary = summary
16 | }
17 |
18 | /// Defaults to medium
19 | /// Constrains effort on reasoning for [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently supported values are low, medium, and high.
20 | /// Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.
21 | public var effort: String?
22 |
23 | /// computer_use_preview only
24 | /// A summary of the reasoning performed by the model.
25 | /// This can be useful for debugging and understanding the model's reasoning process. One of concise or detailed.
26 | public var generateSummary: String?
27 |
28 | /// Summary field used in response objects (nullable)
29 | public var summary: String?
30 |
31 | enum CodingKeys: String, CodingKey {
32 | case effort
33 | case generateSummary = "generate_summary"
34 | case summary
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/ResponseFormat.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ResponseFormat.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/13/24.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - ResponseFormat
11 |
12 | /// An object specifying the format that the model must output. Compatible with GPT-4o, GPT-4o mini, GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.
13 | ///
14 | /// Setting to { "type": "json_schema", "json_schema": {...} } enables Structured Outputs which ensures the model will match your supplied JSON schema. Learn more in the [Structured Outputs guide.](https://platform.openai.com/docs/guides/structured-outputs)
15 | ///
16 | /// Setting to { "type": "json_object" } enables JSON mode, which ensures the message the model generates is valid JSON.
17 | ///
18 | /// Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a system or user message. Without this, the model may generate an unending stream of whitespace until the generation reaches the token limit, resulting in a long-running and seemingly "stuck" request. Also note that the message content may be partially cut off if finish_reason="length", which indicates the generation exceeded max_tokens or the conversation exceeded the max context length.
19 | ///
20 | /// [OpenAI announcement](https://openai.com/index/introducing-structured-outputs-in-the-api/)
21 | ///
22 | /// [Documentation](https://platform.openai.com/docs/api-reference/chat/create#chat-create-response_format)
23 | public enum ResponseFormat: Codable, Equatable {
24 |
25 | case text // The type of response format being defined: text.
26 | case jsonObject // The type of response format being defined: json_object.
27 | case jsonSchema(JSONSchemaResponseFormat) // The type of response format being defined: json_schema.
28 | case unknown
29 |
30 | public init(from decoder: Decoder) throws {
31 | // Attempt to decode the response format as a single string
32 | if
33 | let singleValueContainer = try? decoder.singleValueContainer(),
34 | let typeString = try? singleValueContainer.decode(String.self)
35 | {
36 | switch typeString {
37 | case "text":
38 | self = .text
39 | case "json_object":
40 | self = .jsonObject
41 | default:
42 | self = .unknown
43 | }
44 | return
45 | }
46 |
47 | // If it’s not a single string, decode it as a dictionary
48 | let container = try decoder.container(keyedBy: CodingKeys.self)
49 | let type = try container.decode(String.self, forKey: .type)
50 |
51 | switch type {
52 | case "text":
53 | self = .text
54 |
55 | case "json_object":
56 | self = .jsonObject
57 |
58 | case "json_schema":
59 | let jsonSchema = try container.decode(JSONSchemaResponseFormat.self, forKey: .jsonSchema)
60 | self = .jsonSchema(jsonSchema)
61 |
62 | default:
63 | self = .unknown
64 | }
65 | }
66 |
67 | public func encode(to encoder: Encoder) throws {
68 | var container = encoder.container(keyedBy: CodingKeys.self)
69 | switch self {
70 | case .text:
71 | try container.encode("text", forKey: .type)
72 |
73 | case .jsonObject:
74 | try container.encode("json_object", forKey: .type)
75 |
76 | case .jsonSchema(let jsonSchema):
77 | try container.encode("json_schema", forKey: .type)
78 | try container.encode(jsonSchema, forKey: .jsonSchema)
79 |
80 | case .unknown:
81 | try container.encode("unknown", forKey: .type)
82 | }
83 | }
84 |
85 | private enum CodingKeys: String, CodingKey {
86 | case type
87 | case jsonSchema = "json_schema"
88 | }
89 | }
90 |
91 | // MARK: - JSONSchemaResponseFormat
92 |
93 | /// [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs/structured-outputs)
94 | /// Specifically to be used for Response format with structured outputs.
95 | public struct JSONSchemaResponseFormat: Codable, Equatable {
96 | let name: String
97 | let description: String?
98 | let strict: Bool
99 | let schema: JSONSchema
100 |
101 | public init(name: String, description: String? = nil, strict: Bool, schema: JSONSchema) {
102 | self.name = name
103 | self.description = description
104 | self.strict = strict
105 | self.schema = schema
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/TextConfiguration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TextConfiguration.swift
3 | // SwiftOpenAI
4 | //
5 | // Created by James Rochabrun on 3/15/25.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - TextConfiguration
11 |
12 | /// Text configuration options
13 | public struct TextConfiguration: Codable {
14 | /// An object specifying the format that the model must output
15 | public var format: FormatType
16 |
17 | public init(format: FormatType) {
18 | self.format = format
19 | }
20 | }
21 |
22 | // MARK: - FormatType
23 |
24 | /// Format types for text response
25 | public enum FormatType: Codable {
26 | case text
27 | case jsonSchema(JSONSchema)
28 | case jsonObject
29 |
30 | public init(from decoder: Decoder) throws {
31 | let container = try decoder.container(keyedBy: CodingKeys.self)
32 | let type = try container.decode(String.self, forKey: .type)
33 |
34 | switch type {
35 | case "text":
36 | self = .text
37 |
38 | case "json_schema":
39 | let schema = try container.decode(JSONSchema.self, forKey: .schema)
40 | self = .jsonSchema(schema)
41 |
42 | case "json_object":
43 | self = .jsonObject
44 |
45 | default:
46 | throw DecodingError.dataCorruptedError(
47 | forKey: .type,
48 | in: container,
49 | debugDescription: "Unknown format type: \(type)")
50 | }
51 | }
52 |
53 | public func encode(to encoder: Encoder) throws {
54 | var container = encoder.container(keyedBy: CodingKeys.self)
55 |
56 | switch self {
57 | case .text:
58 | try container.encode("text", forKey: .type)
59 |
60 | case .jsonSchema(let schema):
61 | try container.encode("json_schema", forKey: .type)
62 | try container.encode(schema, forKey: .schema)
63 |
64 | case .jsonObject:
65 | try container.encode("json_object", forKey: .type)
66 | }
67 | }
68 |
69 | enum CodingKeys: String, CodingKey {
70 | case type
71 | case schema
72 | }
73 |
74 | }
75 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/ToolCall.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolCall.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 11/14/23.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - ToolCall
11 |
12 | public struct ToolCall: Codable {
13 |
14 | public init(
15 | index: Int? = nil,
16 | id: String?,
17 | type: String = "function",
18 | function: FunctionCall)
19 | {
20 | self.index = index
21 | self.id = id
22 | self.type = type
23 | self.function = function
24 | }
25 |
26 | public let index: Int?
27 | /// The ID of the tool call.
28 | public let id: String?
29 | /// The type of the tool. Currently, only `function` is supported.
30 | public let type: String?
31 | /// The function that the model called.
32 | public let function: FunctionCall
33 |
34 | }
35 |
36 | // MARK: - FunctionCall
37 |
38 | public struct FunctionCall: Codable {
39 |
40 | /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.
41 | public let arguments: String
42 | /// The name of the function to call.
43 | public let name: String?
44 |
45 | public init(
46 | arguments: String,
47 | name: String)
48 | {
49 | self.arguments = arguments
50 | self.name = name
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/ToolChoice.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolChoice.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/13/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// string `none` means the model will not call a function and instead generates a message.
11 | ///
12 | /// `auto` means the model can pick between generating a message or calling a function.
13 | ///
14 | /// `object` Specifies a tool the model should use. Use to force the model to call a specific function. The type of the tool. Currently, only` function` is supported. `{"type: "function", "function": {"name": "my_function"}}`
15 | ///
16 | /// `required` To force the model to always call one or more functions, you can set tool_choice: "required". The model will then select which function(s) to call.
17 | ///
18 | /// [Function Calling](https://platform.openai.com/docs/guides/function-calling)
19 | public enum ToolChoice: Codable, Equatable {
20 | case none
21 | case auto
22 | case required
23 | case function(type: String = "function", name: String)
24 |
25 | public init(from decoder: Decoder) throws {
26 | // Handle the 'function' case:
27 | if
28 | let container = try? decoder.container(keyedBy: CodingKeys.self),
29 | let functionContainer = try? container.nestedContainer(keyedBy: FunctionCodingKeys.self, forKey: .function)
30 | {
31 | let name = try functionContainer.decode(String.self, forKey: .name)
32 | self = .function(type: "function", name: name)
33 | return
34 | }
35 |
36 | // Handle the 'auto' and 'none' cases
37 | let container = try decoder.singleValueContainer()
38 | switch try container.decode(String.self) {
39 | case "none":
40 | self = .none
41 | case "auto":
42 | self = .auto
43 | case "required":
44 | self = .required
45 | default:
46 | throw DecodingError.dataCorruptedError(in: container, debugDescription: "Invalid tool_choice structure")
47 | }
48 | }
49 |
50 | public func encode(to encoder: Encoder) throws {
51 | switch self {
52 | case .none:
53 | var container = encoder.singleValueContainer()
54 | try container.encode(CodingKeys.none.rawValue)
55 |
56 | case .auto:
57 | var container = encoder.singleValueContainer()
58 | try container.encode(CodingKeys.auto.rawValue)
59 |
60 | case .required:
61 | var container = encoder.singleValueContainer()
62 | try container.encode(CodingKeys.required.rawValue)
63 |
64 | case .function(let type, let name):
65 | var container = encoder.container(keyedBy: CodingKeys.self)
66 | try container.encode(type, forKey: .type)
67 | var functionContainer = container.nestedContainer(keyedBy: FunctionCodingKeys.self, forKey: .function)
68 | try functionContainer.encode(name, forKey: .name)
69 | }
70 | }
71 |
72 | enum CodingKeys: String, CodingKey {
73 | case none
74 | case auto
75 | case required
76 | case type
77 | case function
78 | }
79 |
80 | enum FunctionCodingKeys: String, CodingKey {
81 | case name
82 | }
83 |
84 | }
85 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolChoiceMode.swift
3 | // SwiftOpenAI
4 | //
5 | // Created by James Rochabrun on 3/15/25.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: - ToolChoiceMode
11 |
12 | /// Controls which (if any) tool is called by the model.
13 | public enum ToolChoiceMode: Codable {
14 |
15 | /// Means the model will not call any tool and instead generates a message.
16 | case none
17 |
18 | /// Means the model can pick between generating a message or calling one or more tools.
19 | case auto
20 |
21 | /// Means the model must call one or more tools.
22 | case required
23 |
24 | /// Indicates that the model should use a built-in tool to generate a response.
25 | case hostedTool(HostedToolType)
26 |
27 | /// Use this option to force the model to call a specific function.
28 | case functionTool(FunctionTool)
29 |
30 | public init(from decoder: Decoder) throws {
31 | let container = try decoder.singleValueContainer()
32 |
33 | if let stringValue = try? container.decode(String.self) {
34 | switch stringValue {
35 | case "none":
36 | self = .none
37 | case "auto":
38 | self = .auto
39 | case "required":
40 | self = .required
41 | default:
42 | throw DecodingError.dataCorruptedError(
43 | in: container,
44 | debugDescription: "Unknown tool choice string value: \(stringValue)")
45 | }
46 | } else if let hostedTool = try? container.decode(HostedToolType.self) {
47 | self = .hostedTool(hostedTool)
48 | } else if let functionTool = try? container.decode(FunctionTool.self) {
49 | self = .functionTool(functionTool)
50 | } else {
51 | throw DecodingError.dataCorruptedError(
52 | in: container,
53 | debugDescription: "Invalid tool choice value")
54 | }
55 | }
56 |
57 | public func encode(to encoder: Encoder) throws {
58 | var container = encoder.singleValueContainer()
59 |
60 | switch self {
61 | case .none:
62 | try container.encode("none")
63 | case .auto:
64 | try container.encode("auto")
65 | case .required:
66 | try container.encode("required")
67 | case .hostedTool(let toolType):
68 | try container.encode(toolType)
69 | case .functionTool(let tool):
70 | try container.encode(tool)
71 | }
72 | }
73 |
74 | }
75 |
76 | // MARK: - HostedToolType
77 |
78 | /// Hosted tool type enum
79 | public enum HostedToolType: Codable {
80 | /// File search tool
81 | case fileSearch
82 |
83 | /// Web search tool
84 | case webSearchPreview
85 |
86 | /// Computer use tool
87 | case computerUsePreview
88 |
89 | /// Custom tool type for future compatibility
90 | case custom(String)
91 |
92 | public init(from decoder: Decoder) throws {
93 | let container = try decoder.container(keyedBy: CodingKeys.self)
94 | let type = try container.decode(String.self, forKey: .type)
95 |
96 | switch type {
97 | case "file_search":
98 | self = .fileSearch
99 | case "web_search_preview":
100 | self = .webSearchPreview
101 | case "computer_use_preview":
102 | self = .computerUsePreview
103 | default:
104 | self = .custom(type)
105 | }
106 | }
107 |
108 | public func encode(to encoder: Encoder) throws {
109 | var container = encoder.container(keyedBy: CodingKeys.self)
110 |
111 | switch self {
112 | case .fileSearch:
113 | try container.encode("file_search", forKey: .type)
114 | case .webSearchPreview:
115 | try container.encode("web_search_preview", forKey: .type)
116 | case .computerUsePreview:
117 | try container.encode("computer_use_preview", forKey: .type)
118 | case .custom(let value):
119 | try container.encode(value, forKey: .type)
120 | }
121 | }
122 |
123 | private enum CodingKeys: String, CodingKey {
124 | case type
125 | }
126 | }
127 |
128 | // MARK: - FunctionTool
129 |
130 | /// Function tool specification
131 | public struct FunctionTool: Codable {
132 | /// The name of the function to call
133 | public var name: String
134 |
135 | /// For function calling, the type is always function
136 | public var type = "function"
137 |
138 | public init(name: String) {
139 | self.name = name
140 | }
141 |
142 | enum CodingKeys: String, CodingKey {
143 | case name
144 | case type
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/TruncationStrategy.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TruncationStrategy.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/13/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct TruncationStrategy: Codable {
11 | /// The truncation strategy to use for the thread. The default is auto. If set to last_messages, the thread will be truncated to the n most recent messages in the thread. When set to auto, messages in the middle of the thread will be dropped to fit the context length of the model, max_prompt_tokens.
12 | let type: String
13 | /// The number of most recent messages from the thread when constructing the context for the run.
14 | let lastMessage: Int?
15 |
16 | enum CodingKeys: String, CodingKey {
17 | case type
18 | case lastMessage = "last_messages"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Shared/Usage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Usage.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/13/24.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Represents token usage details including input tokens, output tokens, a breakdown of output tokens, and the total tokens used.
11 | public struct Usage: Codable {
12 |
13 | /// Details about input tokens
14 | public struct InputTokensDetails: Codable {
15 | /// Number of cached tokens
16 | public let cachedTokens: Int?
17 |
18 | enum CodingKeys: String, CodingKey {
19 | case cachedTokens = "cached_tokens"
20 | }
21 | }
22 |
23 | /// A detailed breakdown of the output tokens.
24 | public struct OutputTokensDetails: Codable {
25 | /// The number of reasoning tokens.
26 | public let reasoningTokens: Int?
27 |
28 | enum CodingKeys: String, CodingKey {
29 | case reasoningTokens = "reasoning_tokens"
30 | }
31 | }
32 |
33 | /// Number of completion tokens used over the course of the run step.
34 | public let completionTokens: Int?
35 |
36 | /// Number of prompt tokens used over the course of the run step.
37 | public let promptTokens: Int?
38 |
39 | /// The number of input tokens.
40 | public let inputTokens: Int?
41 |
42 | /// Details about input tokens
43 | public let inputTokensDetails: InputTokensDetails?
44 |
45 | /// The number of output tokens.
46 | public let outputTokens: Int?
47 |
48 | /// A detailed breakdown of the output tokens.
49 | public let outputTokensDetails: OutputTokensDetails?
50 |
51 | /// The total number of tokens used.
52 | public let totalTokens: Int?
53 |
54 | enum CodingKeys: String, CodingKey {
55 | case completionTokens = "completion_tokens"
56 | case promptTokens = "prompt_tokens"
57 | case inputTokens = "input_tokens"
58 | case inputTokensDetails = "input_tokens_details"
59 | case outputTokens = "output_tokens"
60 | case outputTokensDetails = "output_tokens_details"
61 | case totalTokens = "total_tokens"
62 | }
63 | }
64 |
--------------------------------------------------------------------------------