├── .github
└── workflows
│ └── swift.yml
├── .gitignore
├── .spi.yml
├── .swiftlint.yml
├── .swiftpm
└── xcode
│ ├── package.xcworkspace
│ └── contents.xcworkspacedata
│ └── xcshareddata
│ └── xcschemes
│ └── SwiftOpenAI.xcscheme
├── Demo
├── Demo.xcodeproj
│ ├── project.pbxproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ └── xcshareddata
│ │ └── xcschemes
│ │ └── Demo.xcscheme
└── Demo
│ ├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── Contents.json
│ ├── Audio
│ ├── CreateAudio
│ │ ├── CreateAudioView.swift
│ │ └── CreateAudioViewModel.swift
│ ├── CreateTranscription
│ │ ├── CreateTranscriptView.swift
│ │ └── CreateTranscriptViewModel.swift
│ └── CreateTranslation
│ │ ├── CreateTranslationView.swift
│ │ └── CreateTranslationViewModel.swift
│ ├── ChatCompletions
│ ├── ChatCompletionsViewModel.swift
│ ├── ChatView.swift
│ └── Subviews
│ │ ├── ConversationView.swift
│ │ ├── TextMessageView.swift
│ │ └── TypingIndicatorView.swift
│ ├── ContentView.swift
│ ├── DemoApp.swift
│ ├── Helpers
│ └── Bundle+OpenAIAPIKey.swift
│ ├── Image
│ ├── CreateImages
│ │ ├── CreateImageViewModel.swift
│ │ ├── CreateImagesView.swift
│ │ └── Subviews
│ │ │ └── LoadingView.swift
│ ├── EditImage
│ │ ├── Dependencies
│ │ │ ├── CameraView.swift
│ │ │ ├── GalleryView.swift
│ │ │ ├── Line.swift
│ │ │ ├── Point.swift
│ │ │ ├── SwiftBetaCanvas.swift
│ │ │ └── View+ReverseMask.swift
│ │ ├── EditImageView.swift
│ │ └── EditImageViewModel.swift
│ └── VariationImage
│ │ ├── VariationImageView.swift
│ │ └── VariationImageViewModel.swift
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── SwiftOpenAI.plist
│ └── Vision
│ ├── VisionView.swift
│ └── VisionViewModel.swift
├── LICENSE
├── Package.resolved
├── Package.swift
├── README.md
├── Sources
└── SwiftOpenAI
│ ├── APIClient
│ ├── API.swift
│ ├── Endpoint
│ │ └── Endpoint.swift
│ ├── Parser
│ │ └── Parser.swift
│ ├── Requester
│ │ ├── APIError.swift
│ │ ├── RequestBuilder.swift
│ │ └── Requester.swift
│ └── Router
│ │ ├── BaseEnvironment.swift
│ │ └── Router.swift
│ └── OpenAI
│ ├── DataModels
│ ├── Audio
│ │ ├── CreateTranscriptionDataModel.swift
│ │ ├── CreateTranslationDataModel.swift
│ │ ├── OpenAIAudioResponseType.swift
│ │ ├── OpenAITTSModelType.swift
│ │ ├── OpenAITranscriptionModelType.swift
│ │ └── OpenAIVoiceType.swift
│ ├── Chat
│ │ ├── ChatCompletionsDataModel.swift
│ │ ├── ChatCompletionsOptionalParameters.swift
│ │ └── ChatCompletionsStreamDataModel.swift
│ ├── Completions
│ │ ├── CompletionsDataModel.swift
│ │ └── CompletionsOptionalParameters.swift
│ ├── Embedding
│ │ └── EmbeddingDataModel.swift
│ ├── Images
│ │ ├── CreateImageDataModel.swift
│ │ └── ImageSize.swift
│ ├── Message
│ │ ├── MessageChatGPT.swift
│ │ ├── MessageChatImageInput.swift
│ │ └── MessageRoleType.swift
│ ├── Models
│ │ └── ModelDataModel.swift
│ ├── Moderations
│ │ └── ModerationsDataModel.swift
│ ├── OpenAIError.swift
│ └── OpenAIModelType.swift
│ ├── Environment
│ └── OpenAIBaseEnvironment.swift
│ ├── OpenAIEndpoints
│ ├── List
│ │ ├── Audio
│ │ │ ├── CreateSpeechEndpoint.swift
│ │ │ ├── CreateTranscriptionEndpoint.swift
│ │ │ └── CreateTranslationEndpoint.swift
│ │ ├── Chat
│ │ │ ├── ChatCompletionsEndpoint.swift
│ │ │ ├── ChatCompletionsImageInputEndpoint.swift
│ │ │ └── CompletionsEndpoint.swift
│ │ ├── Embeddings
│ │ │ └── EmbeddingsEndpoint.swift
│ │ ├── Image
│ │ │ ├── CreateImageEndpoint.swift
│ │ │ ├── EditImageEndpoint.swift
│ │ │ └── VariationImageEndpoint.swift
│ │ ├── Models
│ │ │ └── ListModelsEndpoint.swift
│ │ └── Moderation
│ │ │ └── ModerationEndpoint.swift
│ └── OpenAIEndpoints.swift
│ ├── Requests
│ ├── Audio
│ │ ├── CreateSpeechRequest.swift
│ │ ├── CreateTranscriptionRequest.swift
│ │ └── CreateTranslationRequest.swift
│ ├── ChatCompletions
│ │ ├── CreateChatCompletionsImageInputRequest.swift
│ │ ├── CreateChatCompletionsRequest.swift
│ │ └── Stream
│ │ │ ├── ChatCompletionsStreamMapper.swift
│ │ │ └── CreateChatCompletionsStreamRequest.swift
│ ├── Completions
│ │ └── CompletionsRequest.swift
│ ├── Embeddings
│ │ └── EmbeddingsRequest.swift
│ ├── Images
│ │ ├── CreateImagesRequest.swift
│ │ ├── EditImageRequest.swift
│ │ └── VariationImageRequest.swift
│ ├── Models
│ │ └── ListModelsRequest.swift
│ ├── Moderations
│ │ └── ModerationsRequest.swift
│ └── MultipartFormData.swift
│ └── SwiftOpenAI.swift
└── Tests
└── SwiftOpenAITests
├── APIClientTests
├── Endpoint
│ ├── EndpointSpec.swift
│ └── Mocks
│ │ ├── EmptyEndpointMock.swift
│ │ ├── EndpointGetMock.swift
│ │ └── EndpointPostMock.swift
├── Parser
│ ├── ParserSpec.swift
│ └── SwiftBetaModel.swift
├── Requester
│ ├── Mocks
│ │ ├── RequesterMock.swift
│ │ └── URLProtocolMock.swift
│ ├── RequesterBuilderSpec.swift
│ └── RequesterSpec.swift
└── Router
│ ├── BaseEnvironmentMock.swift
│ └── RouterSpec.swift
└── OpenAITests
├── Helpers
├── URLStreamProtocolMock.swift
├── dataToJSON.swift
└── loadJSON.swift
└── Unit Tests
├── Audio
├── CreateSpeech
│ ├── CreateSpeechEndpointSpec.swift
│ └── CreateSpeechRequestSpec.swift
└── CreateTranscription
│ ├── CreateTranscriptionEndpointSpec.swift
│ └── CreateTranscriptionRequestSpec.swift
├── ChatCompletions
├── ChatCompletionsAPIClientSpec.swift
├── ChatCompletionsEndpointSpec.swift
├── ChatCompletionsParserSpec.swift
├── ChatCompletionsRequestSpec.swift
├── ChatCompletionsStreamAPIClientSpec.swift
└── ChatCompletionsStreamMapperSpec.swift
├── Completions
├── CompletionParserSpec.swift
├── CompletionRequestSpec.swift
├── CompletionsAPIClientSpec.swift
└── CompletionsEndpointSpec.swift
├── Embeddings
├── EmbeddingsAPIClientSpec.swift
├── EmbeddingsEndpointSpec.swift
├── EmbeddingsParserSpec.swift
└── EmbeddingsRequestSpec.swift
├── Images
├── CreateImageAPIClientSpec.swift
├── CreateImageEndpointSpec.swift
├── CreateImageParserSpec.swift
└── CreateImageRequestSpec.swift
├── JSON
├── chat.completions.error.invalid_api_key.json
├── chat.completions.json
├── completions.error.invalid_api_key.json
├── completions.json
├── create.image.json
├── embeddings.json
├── models.json
└── moderations.json
├── ListModels
├── ListModelsAPIClientSpec.swift
├── ListModelsEndpointSpec.swift
├── ListModelsParserSpec.swift
└── ListModelsRequestSpec.swift
└── Moderations
├── ModerationsAPIClientSpec.swift
├── ModerationsEndpointSpec.swift
├── ModerationsParserSpec.swift
└── ModerationsRequestSpec.swift
/.github/workflows/swift.yml:
--------------------------------------------------------------------------------
1 | # Github Actions Documentation
2 | # https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-swift
3 |
4 | name: Swift
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main" ]
11 |
12 | jobs:
13 | build:
14 | runs-on: macos-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v3
18 |
19 | - name: Build
20 | run: swift build
21 |
22 | - name: Run tests
23 | run: swift test
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.toptal.com/developers/gitignore/api/swiftpm,swiftpackagemanager,xcode
2 | # Edit at https://www.toptal.com/developers/gitignore?templates=swiftpm,swiftpackagemanager,xcode
3 |
4 | ### SwiftPackageManager ###
5 | Packages
6 | .build/
7 | xcuserdata
8 | DerivedData/
9 |
10 |
11 | ### SwiftPM ###
12 |
13 |
14 | ### Xcode ###
15 | ## User settings
16 | xcuserdata/
17 |
18 | ## Xcode 8 and earlier
19 | *.xcscmblueprint
20 | *.xccheckout
21 |
22 | ### Xcode Patch ###
23 | *.xcodeproj/*
24 | !*.xcodeproj/project.pbxproj
25 | !*.xcodeproj/xcshareddata/
26 | !*.xcodeproj/project.xcworkspace/
27 | !*.xcworkspace/contents.xcworkspacedata
28 | /*.gcno
29 | **/xcshareddata/WorkspaceSettings.xcsettings
30 |
31 | # End of https://www.toptal.com/developers/gitignore/api/swiftpm,swiftpackagemanager,xcode
32 | .DS_Store
33 |
--------------------------------------------------------------------------------
/.spi.yml:
--------------------------------------------------------------------------------
1 | version: 1
2 | builder:
3 | configs:
4 | - platform: ios
5 | scheme: SwiftOpenAI
6 | - platform: macos-xcodebuild
7 | scheme: SwiftOpenAI
8 | - documentation_targets: [SwiftOpenAI]
--------------------------------------------------------------------------------
/.swiftlint.yml:
--------------------------------------------------------------------------------
1 | disabled_rules:
2 | - identifier_name
3 | - type_name
4 |
5 | line_length:
6 | warning: 140
7 | ignores_function_declarations: true
8 | ignores_comments: true
9 | ignores_interpolated_strings: true
10 | ignores_urls: true
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/xcshareddata/xcschemes/SwiftOpenAI.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
34 |
40 |
41 |
42 |
43 |
44 |
54 |
55 |
61 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/xcshareddata/xcschemes/Demo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
9 |
10 |
16 |
22 |
23 |
24 |
25 |
26 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/Demo/Demo/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demo/Demo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Demo/Demo/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateAudio/CreateAudioView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import AVKit
3 |
4 | struct CreateAudioView: View {
5 | var viewModel: CreateAudioViewModel
6 | @State var prompt: String = "Hello, I'm SwiftBeta, a developer who in his free time tries to teach through his blog swiftbeta.com and his YouTube channel. Now I'm adding the OpenAI API to transform this text into audio"
7 |
8 | var body: some View {
9 | VStack {
10 | VStack {
11 | switch viewModel.isLoadingTextToSpeechAudio {
12 | case .isLoading:
13 | TypingIndicatorView()
14 | .padding(.top, 60)
15 | case .noExecuted:
16 | VStack {
17 | Image(systemName: "waveform")
18 | .resizable()
19 | .scaledToFit()
20 | .frame(width: 120, height: 120)
21 | Text("Add the prompt in the bottom text field of the audio you wish to create")
22 | .font(.system(size: 24))
23 | .multilineTextAlignment(.center)
24 | .padding(.horizontal, 32)
25 | }
26 | .font(.system(size: 24))
27 | .padding(.top, 60)
28 | case .finishedPlaying,
29 | .finishedLoading:
30 | VStack {
31 | Image(systemName: "waveform")
32 | .font(.system(size: 120))
33 | Button {
34 | viewModel.playAudioAgain()
35 | } label: {
36 | Text("Tap to play it again!")
37 | }
38 | .buttonStyle(.borderedProminent)
39 | }
40 | .padding(.top, 60)
41 | }
42 | }
43 | .padding(.horizontal, 32)
44 |
45 | Spacer()
46 |
47 | HStack {
48 | TextField("Write something to create Speech", text: $prompt, axis: .vertical)
49 | .padding(12)
50 | .background(Color(.systemGray6))
51 | .cornerRadius(25)
52 | .lineLimit(6)
53 | .onSubmit {
54 | Task {
55 | await viewModel.createSpeech(input: prompt)
56 | }
57 | }
58 | Button(action: {
59 | Task {
60 | await viewModel.createSpeech(input: prompt)
61 | }
62 | }) {
63 | Image(systemName: "paperplane.fill")
64 | .foregroundColor(Color.white)
65 | .frame(width: 44, height: 44)
66 | .background(Color.blue)
67 | .cornerRadius(22)
68 | }
69 | .padding(.leading, 8)
70 | }
71 | .padding(.horizontal)
72 | }
73 | .padding(.top)
74 | }
75 | }
76 |
77 | #Preview {
78 | CreateAudioView(viewModel: .init())
79 | }
80 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateAudio/CreateAudioViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftOpenAI
3 | import AVFoundation
4 |
5 | @Observable
6 | final class CreateAudioViewModel: NSObject {
7 | var openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
8 | var avAudioPlayer = AVAudioPlayer()
9 | var isLoadingTextToSpeechAudio: TextToSpeechType = .noExecuted
10 |
11 | enum TextToSpeechType {
12 | case noExecuted
13 | case isLoading
14 | case finishedLoading
15 | case finishedPlaying
16 | }
17 |
18 | func playAudioAgain() {
19 | avAudioPlayer.play()
20 | }
21 |
22 | @MainActor
23 | func createSpeech(input: String) async {
24 | isLoadingTextToSpeechAudio = .isLoading
25 | do {
26 | let data = try await openAI.createSpeech(model: .tts(.tts1),
27 | input: input,
28 | voice: .alloy,
29 | responseFormat: .mp3,
30 | speed: 1.0)
31 |
32 | if let filePath = FileManager.default.urls(for: .documentDirectory,
33 | in: .userDomainMask).first?.appendingPathComponent("speech.mp3"),
34 | let data {
35 | do {
36 | try data.write(to: filePath)
37 | print("File created: \(filePath)")
38 |
39 | avAudioPlayer = try AVAudioPlayer(contentsOf: filePath)
40 | avAudioPlayer.delegate = self
41 | avAudioPlayer.play()
42 | isLoadingTextToSpeechAudio = .finishedLoading
43 | } catch {
44 | print("Error saving file: ", error.localizedDescription)
45 | }
46 | } else {
47 | print("Error trying to save file in filePath")
48 | }
49 |
50 | } catch {
51 | print("Error creating Audios: ", error.localizedDescription)
52 | }
53 | }
54 | }
55 |
56 | extension CreateAudioViewModel: AVAudioPlayerDelegate {
57 | func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
58 | isLoadingTextToSpeechAudio = .finishedPlaying
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateTranscription/CreateTranscriptView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import UniformTypeIdentifiers
3 | import PhotosUI
4 |
5 | struct CreateTranscriptView: View {
6 | @Binding var viewModel: CreateTranscriptViewModel
7 |
8 | var body: some View {
9 | Form {
10 | Section("Select Video or Video/Audio") {
11 | VStack {
12 | PhotosPicker(selection: $viewModel.photoSelection,
13 | matching: .videos,
14 | photoLibrary: .shared()) {
15 | Label("Add video or audio",
16 | systemImage: "video.fill")
17 | }
18 | .frame(height: 300)
19 | .photosPickerStyle(.inline)
20 | .onChange(of: viewModel.photoSelection!) { oldValue, newValue in
21 | newValue.loadTransferable(type: Data.self) { [self] result in
22 | switch result {
23 | case .success(let data):
24 | if let data {
25 | viewModel.currentData = data
26 | } else {
27 | print("No supported content type found.")
28 | }
29 | case .failure(let error):
30 | fatalError(error.localizedDescription)
31 | }
32 | }
33 | }
34 | Button {
35 | Task {
36 | await viewModel.createTranscription()
37 | }
38 | } label: {
39 | Text("Transcript Video/Audio")
40 | }
41 | .disabled(viewModel.currentData == nil)
42 | .buttonStyle(.borderedProminent)
43 |
44 | Spacer()
45 | }
46 | }
47 |
48 | Section("Transcription") {
49 | if viewModel.isLoading {
50 | TypingIndicatorView()
51 | } else {
52 | if !viewModel.transcription.isEmpty {
53 | Text(viewModel.transcription)
54 | .font(.system(size: 22))
55 | .italic()
56 | .padding(.horizontal)
57 | .padding(.top, 8)
58 | }
59 | }
60 | }
61 | }
62 | }
63 | }
64 |
65 | #Preview {
66 | CreateTranscriptView(viewModel: .constant(.init()))
67 | }
68 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateTranscription/CreateTranscriptViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import PhotosUI
4 | import SwiftOpenAI
5 |
6 | @Observable
7 | class CreateTranscriptViewModel {
8 | var openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
9 |
10 | var photoSelection: PhotosPickerItem? = .init(itemIdentifier: "")
11 | var transcription: String = ""
12 | var isLoading: Bool = false
13 |
14 | var currentData: Data?
15 |
16 | func createTranscription() async {
17 | guard let data = currentData else {
18 | print("Error: Data is empty")
19 | return
20 | }
21 |
22 | isLoading = true
23 | let model: OpenAITranscriptionModelType = .whisper
24 |
25 | do {
26 | for try await newMessage in try await openAI.createTranscription(model: model,
27 | file: data,
28 | language: "en",
29 | prompt: "",
30 | responseFormat: .mp3,
31 | temperature: 1.0) {
32 | print("Received Transcription \(newMessage)")
33 | await MainActor.run {
34 | isLoading = false
35 | transcription = newMessage.text
36 | }
37 | }
38 | } catch {
39 | print("Error creating Transcription from file: ", error.localizedDescription)
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateTranslation/CreateTranslationView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import UniformTypeIdentifiers
3 | import PhotosUI
4 |
5 | struct CreateTranslationView: View {
6 | @Binding var viewModel: CreateTranslationViewModel
7 |
8 | var body: some View {
9 | Form {
10 | Section("Select Video or Video/Audio") {
11 | VStack {
12 | PhotosPicker(selection: $viewModel.photoSelection,
13 | matching: .videos,
14 | photoLibrary: .shared()) {
15 | Label("Add video or audio",
16 | systemImage: "video.fill")
17 | }
18 | .frame(height: 300)
19 | .photosPickerStyle(.inline)
20 | .onChange(of: viewModel.photoSelection!) { oldValue, newValue in
21 | newValue.loadTransferable(type: Data.self) { [self] result in
22 | switch result {
23 | case .success(let data):
24 | if let data {
25 | viewModel.currentData = data
26 | } else {
27 | print("No supported content type found.")
28 | }
29 | case .failure(let error):
30 | fatalError(error.localizedDescription)
31 | }
32 | }
33 | }
34 | Button {
35 | Task {
36 | await viewModel.createTranscription()
37 | }
38 | } label: {
39 | Text("Translate Video/Audio")
40 | }
41 | .disabled(viewModel.currentData == nil)
42 | .buttonStyle(.borderedProminent)
43 |
44 | Spacer()
45 | }
46 | }
47 |
48 | Section("Translation") {
49 | if viewModel.isLoading {
50 | TypingIndicatorView()
51 | } else {
52 | if !viewModel.translation.isEmpty {
53 | Text(viewModel.translation)
54 | .font(.system(size: 22))
55 | .italic()
56 | .padding(.horizontal)
57 | .padding(.top, 8)
58 | }
59 | }
60 | }
61 | }
62 | }
63 | }
64 |
65 | #Preview {
66 | CreateTranscriptView(viewModel: .constant(.init()))
67 | }
68 |
--------------------------------------------------------------------------------
/Demo/Demo/Audio/CreateTranslation/CreateTranslationViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 | import PhotosUI
4 | import SwiftOpenAI
5 |
6 | @Observable
7 | class CreateTranslationViewModel {
8 | var openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
9 |
10 | var photoSelection: PhotosPickerItem? = .init(itemIdentifier: "")
11 | var translation: String = ""
12 | var isLoading: Bool = false
13 |
14 | var currentData: Data?
15 |
16 | func createTranscription() async {
17 | guard let data = currentData else {
18 | print("Error: Data is empty")
19 | return
20 | }
21 |
22 | isLoading = true
23 | let model: OpenAITranscriptionModelType = .whisper
24 |
25 | do {
26 | for try await newMessage in try await openAI.createTranslation(model: model,
27 | file: data,
28 | prompt: "",
29 | responseFormat: .mp3,
30 | temperature: 1.0) {
31 | print("Received Translation \(newMessage)")
32 | await MainActor.run {
33 | isLoading = false
34 | translation = newMessage.text
35 | }
36 | }
37 | } catch {
38 | print("Error creating Transcription from file: ", error.localizedDescription)
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Demo/Demo/ChatCompletions/ChatCompletionsViewModel.swift:
--------------------------------------------------------------------------------
1 | import SwiftOpenAI
2 | import Foundation
3 | import Observation
4 |
5 | @Observable
6 | final class ChatCompletionsViewModel {
7 | private let openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
8 | var messages: [MessageChatGPT] = [.init(text: "I am an AI and I am here to help you.", role: .system)]
9 | var currentMessage: MessageChatGPT = .init(text: "", role: .assistant)
10 | var isStream: Bool = true
11 |
12 | @MainActor
13 | func send(message: String) async {
14 | let myMessage = MessageChatGPT(text: message,
15 | role: .user)
16 | messages.append(myMessage)
17 | currentMessage = MessageChatGPT(text: "",
18 | role: .assistant)
19 | messages.append(currentMessage)
20 |
21 | let optionalParameters = ChatCompletionsOptionalParameters(temperature: 0.5,
22 | stream: isStream,
23 | maxTokens: 2000)
24 | if isStream {
25 | do {
26 | for try await newMessage in try await openAI.createChatCompletionsStream(model: .gpt4o(.base),
27 | messages: messages,
28 | optionalParameters: optionalParameters) {
29 | onReceiveStream(newMessage: newMessage)
30 | }
31 | } catch {
32 | print("Error generating Chat Completion with STREAM: ", error.localizedDescription)
33 | }
34 | } else {
35 | do {
36 | let chatCompletions = try await openAI.createChatCompletions(
37 | model: .gpt4(.base),
38 | messages: messages,
39 | optionalParameters: optionalParameters
40 | )
41 |
42 | chatCompletions.map {
43 | onReceive(newMessage: $0)
44 | }
45 |
46 | } catch {
47 | print("Error generating Chat Completion: ", error.localizedDescription)
48 | }
49 | }
50 | }
51 |
52 | @MainActor
53 | private func onReceiveStream(newMessage: ChatCompletionsStreamDataModel) {
54 | guard let lastMessage = newMessage.choices.first,
55 | lastMessage.finishReason == nil,
56 | let content = lastMessage.delta?.content, !content.isEmpty else {
57 | return
58 | }
59 |
60 | currentMessage.text.append(content)
61 | if let lastIndex = messages.indices.last {
62 | messages[lastIndex].text = currentMessage.text
63 | }
64 | }
65 |
66 | @MainActor
67 | private func onReceive(newMessage: ChatCompletionsDataModel) {
68 | guard let lastMessage = newMessage.choices.first else {
69 | return
70 | }
71 |
72 | currentMessage.text.append(lastMessage.message.content)
73 | if let lastIndex = messages.indices.last {
74 | messages[lastIndex].text = currentMessage.text
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/Demo/Demo/ChatCompletions/ChatView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct ChatView: View {
4 | @Binding var viewModel: ChatCompletionsViewModel
5 | @State var prompt: String = "Can you provide a detailed overview of the development process and the key features that distinguished the first iPhone from previous mobile phones? Please include information on its technological innovations and the impact it had on the smartphone market"
6 |
7 | var body: some View {
8 | VStack {
9 | ConversationView(viewModel: $viewModel)
10 | .padding(.horizontal, 12)
11 | .frame(maxWidth: .infinity, maxHeight: .infinity)
12 |
13 | Spacer()
14 |
15 | HStack {
16 | TextField("Write something for ChatGPT", text: $prompt, axis: .vertical)
17 | .padding(12)
18 | .background(Color(.systemGray6))
19 | .cornerRadius(25)
20 | .lineLimit(6)
21 | .onSubmit {
22 | Task {
23 | await viewModel.send(message: prompt)
24 | prompt = ""
25 | }
26 | }
27 | Button(action: {
28 | Task {
29 | await viewModel.send(message: prompt)
30 | prompt = ""
31 | }
32 | }) {
33 | Image(systemName: "paperplane.fill")
34 | .foregroundColor(Color.white)
35 | .frame(width: 44, height: 44)
36 | .background(Color.blue)
37 | .cornerRadius(22)
38 | }
39 | .padding(.leading, 8)
40 | }
41 | .padding(.horizontal)
42 | }
43 | .toolbar {
44 | ToolbarItem(placement: .navigationBarTrailing) {
45 | Toggle(isOn: $viewModel.isStream) {
46 | HStack {
47 | Text("Stream")
48 | .bold()
49 | }
50 | }
51 | .toggleStyle(SwitchToggleStyle(tint: .blue))
52 | }
53 | }
54 | }
55 | }
56 |
57 | #Preview {
58 | ChatView(viewModel: .constant(.init()))
59 | }
60 |
--------------------------------------------------------------------------------
/Demo/Demo/ChatCompletions/Subviews/ConversationView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct ConversationView: View {
4 | @State var bottomPadding: Double = 160
5 | @Binding var viewModel: ChatCompletionsViewModel
6 |
7 | var body: some View {
8 | ScrollViewReader { scrollProxy in
9 | ScrollView {
10 | ForEach(viewModel.messages) { message in
11 | TextMessageView(message: message)
12 | .padding(.bottom, viewModel.messages.last == message ? bottomPadding : 0)
13 | }
14 | }
15 | .scrollIndicators(.hidden)
16 | .onChange(of: viewModel.currentMessage, { _, newMessage in
17 | withAnimation(.linear(duration: 0.5)) {
18 | scrollProxy.scrollTo(newMessage.id, anchor: .bottom)
19 | }
20 | })
21 | .onAppear {
22 | withAnimation {
23 | scrollProxy.scrollTo(viewModel.messages.last?.id)
24 | }
25 | }
26 | }
27 | }
28 | }
29 |
30 | #Preview {
31 | ConversationView(viewModel: .constant(.init()))
32 | }
33 |
--------------------------------------------------------------------------------
/Demo/Demo/ChatCompletions/Subviews/TextMessageView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import SwiftOpenAI
3 |
4 | struct TextMessageView: View {
5 | var message: MessageChatGPT
6 |
7 | var body: some View {
8 | HStack {
9 | if message.role == .user {
10 | Spacer()
11 | Text(message.text)
12 | .multilineTextAlignment(.trailing)
13 | .foregroundColor(.white)
14 | .padding(.all, 10)
15 | .background(
16 | RoundedRectangle(cornerRadius: 16)
17 | .fill(Color.blue)
18 | )
19 | .frame(maxWidth: 240, alignment: .trailing)
20 | .id(message.id)
21 | } else if message.role == .assistant || message.role == .system {
22 | if message.text == "" {
23 | TypingIndicatorView()
24 | .padding(.all, 10)
25 | } else {
26 | Text(message.text)
27 | .multilineTextAlignment(.leading)
28 | .foregroundColor(.white)
29 | .padding(.all, 10)
30 | .background(
31 | RoundedRectangle(cornerRadius: 16)
32 | .fill(Color.gray)
33 | )
34 | .frame(maxWidth: 240, alignment: .leading)
35 | .id(message.id)
36 | }
37 | Spacer()
38 | } else {
39 | EmptyView()
40 | }
41 | }
42 | .padding(.vertical, 4)
43 | }
44 | }
45 |
46 | #Preview {
47 | TextMessageView(message: .init(text: "Hello! my name is SwiftBeta", role: .user))
48 | }
49 |
--------------------------------------------------------------------------------
/Demo/Demo/ChatCompletions/Subviews/TypingIndicatorView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct TypingIndicatorView: View {
4 | @State private var animationCycle = 0
5 | @State private var timer: Timer?
6 |
7 | let animationDuration: Double = 0.6
8 | let dotSize: CGFloat = 10
9 | let color: Color = .blue
10 |
11 | var body: some View {
12 | HStack(spacing: dotSize) {
13 | ForEach(0..<3) { index in
14 | Circle()
15 | .fill(color)
16 | .frame(width: dotSize, height: dotSize)
17 | .offset(y: animationCycle == index ? -dotSize : 0)
18 | }
19 | }
20 | .onAppear {
21 | animationCycle = 0
22 | timer = Timer.scheduledTimer(withTimeInterval: animationDuration, repeats: true) { _ in
23 | withAnimation(.easeInOut(duration: animationDuration)) {
24 | animationCycle = (animationCycle + 1) % 3
25 | }
26 | }
27 | }
28 | .onDisappear {
29 | timer?.invalidate()
30 | }
31 | }
32 | }
33 |
34 | #Preview {
35 | TypingIndicatorView()
36 | }
37 |
--------------------------------------------------------------------------------
/Demo/Demo/DemoApp.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | @main
4 | struct DemoApp: App {
5 | var body: some Scene {
6 | WindowGroup {
7 | ContentView()
8 | }
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/Demo/Demo/Helpers/Bundle+OpenAIAPIKey.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | extension Bundle {
4 | func getOpenAIApiKey() -> String? {
5 | guard let path = Bundle.main.path(forResource: "SwiftOpenAI", ofType: "plist"),
6 | let plist = NSDictionary(contentsOfFile: path),
7 | let value = plist.object(forKey: "OpenAI_API_Key") as? String,
8 | !value.isEmpty else {
9 | print("👇 Add your OpenAI API Key inside the project's SwiftOpenAI.plist 👇\nVisit: 🔗 https://platform.openai.com/api-keys")
10 | return nil
11 | }
12 | return value
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/CreateImages/CreateImageViewModel.swift:
--------------------------------------------------------------------------------
1 | import SwiftOpenAI
2 | import Foundation
3 | import Observation
4 |
5 | @Observable
6 | final class CreateImageViewModel {
7 | private let openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
8 | var imageURLStrings: [String] = []
9 | var isLoading: Bool = false
10 |
11 | @MainActor
12 | func createImages(prompt: String) async {
13 | imageURLStrings = []
14 | isLoading = true
15 | do {
16 | guard let images = try await openAI.createImages(model: .dalle(.dalle3),
17 | prompt: prompt,
18 | numberOfImages: 1,
19 | size: .sw1024h1792) else {
20 | isLoading = false
21 | return
22 | }
23 | imageURLStrings = images.data.map { $0.url }
24 | isLoading = false
25 | } catch {
26 | isLoading = false
27 | print("Error creating Images with DALL·E: ", error.localizedDescription)
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/CreateImages/CreateImagesView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct CreateImagesView: View {
4 | var viewModel: CreateImageViewModel
5 |
6 | @State var prompt: String = "Create a highly detailed and realistic image capturing a moment of profound innovation and determination. Imagine a scene set in the early 2000s within a minimalist yet intensely focused work environment. In the center, a figure with a striking resemblance to a generic tech visionary, embodying the spirit and physicality of an era-defining entrepreneur, but not directly imitating Steve Jobs, is intensely focused on a sleek, yet-to-be-revealed device in his hands. This device, with its clean lines and revolutionary design, hints at being the first iPhone, marking the dawn of a new era in technology. The background is filled with sketches and prototypes, reflecting the culmination of years of design and innovation. The lighting is soft yet purposeful, highlighting the anticipation and the moment of breakthrough. This scene is not just about the creation of a device but the birth of a vision that would change the world"
7 |
8 | var body: some View {
9 | VStack {
10 | Grid {
11 | if viewModel.imageURLStrings.isEmpty {
12 | VStack {
13 | Image(systemName: "photo.stack")
14 | .resizable()
15 | .scaledToFit()
16 | .frame(width: 120, height: 120)
17 | if viewModel.isLoading {
18 | LoadingView()
19 | } else {
20 | Text("Add the prompt or description in the bottom text field of the image you wish to create")
21 | .font(.system(size: 24))
22 | .multilineTextAlignment(.center)
23 | .padding(.horizontal, 32)
24 | }
25 | }
26 | .font(.system(size: 24))
27 | .padding(.top, 60)
28 | }
29 |
30 | if !viewModel.imageURLStrings.isEmpty {
31 | ForEach(viewModel.imageURLStrings, id: \.self) { imageURL in
32 | GridRow {
33 | AsyncImage(url: URL(string: imageURL)) { image in
34 | image
35 | .resizable()
36 | .scaledToFit()
37 | } placeholder: {
38 | ProgressView()
39 | }
40 | }
41 | }
42 | }
43 | }
44 | Spacer()
45 | HStack {
46 | TextField("Write something for DALL·E", text: $prompt, axis: .vertical)
47 | .padding(12)
48 | .background(Color(.systemGray6))
49 | .cornerRadius(25)
50 | .lineLimit(6)
51 | .onSubmit {
52 | Task {
53 | await viewModel.createImages(prompt: prompt)
54 | prompt = ""
55 | }
56 | }
57 | Button(action: {
58 | Task {
59 | await viewModel.createImages(prompt: prompt)
60 | prompt = ""
61 | }
62 | }) {
63 | Image(systemName: "paperplane.fill")
64 | .foregroundColor(Color.white)
65 | .frame(width: 44, height: 44)
66 | .background(Color.blue)
67 | .cornerRadius(22)
68 | }
69 | .padding(.leading, 8)
70 | }
71 | .padding(.horizontal)
72 | }
73 | .padding(.top)
74 | }
75 | }
76 |
77 | #Preview {
78 | CreateImagesView(viewModel: .init())
79 | }
80 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/CreateImages/Subviews/LoadingView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct LoadingView: View {
4 | var body: some View {
5 | VStack {
6 | RoundedRectangle(cornerRadius: 10)
7 | .fill(Color(.systemGray5))
8 | .frame(width: 350, height: 100)
9 | .overlay {
10 | Text("Generating image, please wait a few seconds...")
11 | .multilineTextAlignment(.center)
12 | .font(.system(size: 20, weight: .semibold))
13 | .foregroundColor(.primary)
14 | .padding(.horizontal)
15 | }
16 | }
17 | }
18 | }
19 |
20 | #Preview {
21 | LoadingView()
22 | }
23 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/CameraView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraView.swift
3 | // OpenAI
4 | //
5 | // Created by Home on 4/11/22.
6 | //
7 |
8 | import Foundation
9 | import UIKit
10 | import SwiftUI
11 |
12 | public struct CameraView: UIViewControllerRepresentable {
13 | @Binding var selectedImage: Image?
14 | @Environment(\.dismiss) var dismiss
15 |
16 | public init(selectedImage: Binding) {
17 | self._selectedImage = selectedImage
18 | }
19 |
20 | public func makeUIViewController(context: Context) -> some UIViewController {
21 | let imagePickerController = UIImagePickerController()
22 | imagePickerController.delegate = context.coordinator
23 | imagePickerController.sourceType = .camera
24 | imagePickerController.allowsEditing = true
25 | imagePickerController.showsCameraControls = true
26 | return imagePickerController
27 | }
28 |
29 | public func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {
30 | // Empty
31 | }
32 |
33 | public func makeCoordinator() -> Coordinator {
34 | Coordinator(cameraView: self)
35 | }
36 | }
37 |
38 | final public class Coordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
39 |
40 | var cameraView: CameraView
41 |
42 | init(cameraView: CameraView) {
43 | self.cameraView = cameraView
44 | }
45 |
46 | public func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
47 |
48 | if let image = info[UIImagePickerController.InfoKey.editedImage] as? UIImage {
49 | cameraView.selectedImage = Image(uiImage: image)
50 | }
51 | cameraView.dismiss()
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/GalleryView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import UIKit
3 | import SwiftUI
4 |
5 | public struct GalleryView: UIViewControllerRepresentable {
6 | @Binding var selectedImage: Image?
7 | @Environment(\.dismiss) var dismiss
8 |
9 | public init(selectedImage: Binding) {
10 | self._selectedImage = selectedImage
11 | }
12 |
13 | public func makeUIViewController(context: Context) -> some UIViewController {
14 | let imagePickerController = UIImagePickerController()
15 | imagePickerController.delegate = context.coordinator
16 | imagePickerController.sourceType = .photoLibrary
17 | imagePickerController.allowsEditing = true
18 | return imagePickerController
19 | }
20 |
21 | public func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {
22 | // Empty
23 | }
24 |
25 | public func makeCoordinator() -> GalleryCoordinator {
26 | GalleryCoordinator(galleryView: self)
27 | }
28 | }
29 |
30 | final public class GalleryCoordinator: NSObject, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
31 |
32 | var galleryView: GalleryView
33 |
34 | init(galleryView: GalleryView) {
35 | self.galleryView = galleryView
36 | }
37 |
38 | public func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
39 |
40 | if let image = info[UIImagePickerController.InfoKey.editedImage] as? UIImage {
41 | galleryView.selectedImage = Image(uiImage: image)
42 | }
43 | galleryView.dismiss()
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/Line.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 |
4 | public struct Line {
5 | var points: [Point]
6 | var color: Color
7 | var width: Float
8 |
9 | public init(points: [Point], color: Color, width: Float) {
10 | self.points = points
11 | self.color = color
12 | self.width = width
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/Point.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct Point {
4 | let currentPoint: CGPoint
5 | let lastPoint: CGPoint
6 |
7 | public init(currentPoint: CGPoint, lastPoint: CGPoint) {
8 | self.currentPoint = currentPoint
9 | self.lastPoint = lastPoint
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/SwiftBetaCanvas.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | public struct SwiftBetaCanvas: View {
4 | @Binding var lines: [Line]
5 | @State var points: [Point] = []
6 | @State var currentLine: Int = 0
7 | @State var currentLineColor: Color = .red
8 | var currentLineWidth: Float
9 |
10 | public init(lines: Binding<[Line]>,
11 | currentLineWidth: Float) {
12 | self._lines = lines
13 | self.currentLineWidth = currentLineWidth
14 | }
15 |
16 | public var body: some View {
17 | Canvas { context, _ in
18 | createNewPath(context: context, lines: lines)
19 | }
20 | .gesture(
21 | DragGesture()
22 | .onChanged({ value in
23 | let point = value.location
24 | let lastPoint = points.isEmpty ? point : points.last!.currentPoint
25 | let currentLinePoints = Point(currentPoint: point, lastPoint: lastPoint)
26 | points.append(currentLinePoints)
27 |
28 | if lines.isEmpty {
29 | let line = Line(points: [currentLinePoints],
30 | color: currentLineColor,
31 | width: currentLineWidth)
32 | lines.append(line)
33 | } else {
34 | var line: Line?
35 |
36 | if currentLine >= lines.count {
37 | line = Line(points: [currentLinePoints],
38 | color: currentLineColor,
39 | width: currentLineWidth)
40 | lines.append(line!)
41 | } else {
42 | line = lines[currentLine]
43 | line?.points = points
44 | line?.color = currentLineColor
45 | }
46 |
47 | if currentLine < lines.count {
48 | lines[currentLine] = line!
49 | }
50 | }
51 | })
52 | .onEnded({ value in
53 | currentLine += 1
54 | points.removeAll()
55 | })
56 | )
57 | .background(Color.clear)
58 | .frame(width: 400, height: 400)
59 | }
60 |
61 | private func createNewPath(context: GraphicsContext,
62 | lines: [Line]) {
63 |
64 | guard !lines.isEmpty else { return }
65 |
66 | for line in lines {
67 | var newPath = Path()
68 | for point in line.points {
69 | newPath.move(to: point.lastPoint)
70 | newPath.addLine(to: point.currentPoint)
71 | }
72 | context.stroke(newPath, with: .color(line.color), style: .init(lineWidth: CGFloat(line.width), lineCap: .round, lineJoin: .round))
73 | }
74 | }
75 | }
76 |
77 | struct SwiftBetaCanvas_Previews: PreviewProvider {
78 | static var previews: some View {
79 | SwiftBetaCanvas(lines: .constant([Line]()), currentLineWidth: 16)
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/Dependencies/View+ReverseMask.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 |
4 | extension View {
5 | public func reverseMask(@ViewBuilder _ mask: () -> Mask) -> some View {
6 | self.mask {
7 | Rectangle()
8 | .overlay(alignment: .center) {
9 | mask()
10 | .blendMode(.destinationOut)
11 | }
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/EditImage/EditImageViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftOpenAI
3 |
4 | @Observable
5 | class EditImageViewModel {
6 | private let openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
7 | var imageURL: URL?
8 | var isLoading: Bool = false
9 |
10 | @MainActor
11 | func editImage(prompt: String, imageMask: Data, maskData: Data) async {
12 | isLoading = true
13 |
14 | do {
15 | let editedImage = try await openAI.editImage(model: .dalle(.dalle2), imageData: imageMask, maskData: maskData, prompt: prompt, numberOfImages: 1, size: .s512)
16 | await MainActor.run {
17 | guard let editedImage, let urlString = editedImage.data.map({ $0.url }).last else {
18 | isLoading = false
19 | return
20 | }
21 | imageURL = URL(string: urlString)
22 | isLoading = false
23 | }
24 | } catch {
25 | isLoading = false
26 | print("Error creating edit image", error.localizedDescription)
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/VariationImage/VariationImageView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | struct VariationImageView: View {
4 | var viewModel = VariationImageViewModel()
5 | @State var selectedImage: Image?
6 | @State var emptyImage: Image = Image(systemName: "photo.on.rectangle.angled")
7 | @State var showCamera: Bool = false
8 | @State var showGallery: Bool = false
9 |
10 | var currentImage: some View {
11 | if let selectedImage {
12 | return selectedImage
13 | .resizable()
14 | .scaledToFill()
15 | .frame(width: 300, height: 300)
16 | } else {
17 | return emptyImage
18 | .resizable()
19 | .scaledToFill()
20 | .frame(width: 40, height: 40)
21 | }
22 | }
23 |
24 | var body: some View {
25 | Form {
26 | Text("Create a variation of the selected image")
27 | .font(.headline)
28 | .padding(.vertical, 12)
29 |
30 | AsyncImage(url: viewModel.imageURL) { image in
31 | image
32 | .resizable()
33 | .scaledToFit()
34 | } placeholder: {
35 | VStack {
36 | if !viewModel.isLoading {
37 | ZStack {
38 | currentImage
39 | }
40 | } else {
41 | HStack {
42 | Spacer()
43 | VStack {
44 | ProgressView()
45 | .padding(.bottom, 12)
46 | Text("Your image is being generated, please wait 5 seconds! 🚀")
47 | .multilineTextAlignment(.center)
48 | }
49 | Spacer()
50 | }
51 | }
52 | }
53 | .frame(width: 300, height: 300)
54 | }
55 |
56 | HStack {
57 | Button {
58 | showCamera.toggle()
59 | } label: {
60 | Text("📷 Take a photo!")
61 | }
62 | .tint(.orange)
63 | .buttonStyle(.borderedProminent)
64 | .fullScreenCover(isPresented: $showCamera) {
65 | CameraView(selectedImage: $selectedImage)
66 | }
67 | .padding(.vertical, 12)
68 |
69 | Spacer()
70 |
71 | Button {
72 | showGallery.toggle()
73 | } label: {
74 | Text("Open Gallery")
75 | }
76 | .tint(.purple)
77 | .buttonStyle(.borderedProminent)
78 | .fullScreenCover(isPresented: $showGallery) {
79 | GalleryView(selectedImage: $selectedImage)
80 | }
81 | .padding(.vertical, 12)
82 | }
83 |
84 | HStack {
85 | Spacer()
86 | Button("🪄 Generate Image") {
87 | let selectedImageRenderer = ImageRenderer(content: currentImage)
88 |
89 | Task {
90 | guard let selecteduiImage = selectedImageRenderer.uiImage,
91 | let selectedPNGData = selecteduiImage.pngData() else {
92 | return
93 | }
94 |
95 | await viewModel.variationImage(imageMask: selectedPNGData)
96 | }
97 | }
98 | .buttonStyle(.borderedProminent)
99 | .disabled(viewModel.isLoading)
100 | }
101 | .padding(.vertical, 12)
102 | }
103 | }
104 | }
105 |
106 | #Preview {
107 | VariationImageView()
108 | }
109 |
--------------------------------------------------------------------------------
/Demo/Demo/Image/VariationImage/VariationImageViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftOpenAI
3 |
4 | @Observable
5 | class VariationImageViewModel {
6 | private let openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
7 | var imageURL: URL?
8 | var isLoading: Bool = false
9 |
10 | @MainActor
11 | func variationImage(imageMask: Data) async {
12 | isLoading = true
13 |
14 | do {
15 | let variationImage = try await openAI.variationImage(model: .dalle(.dalle2), imageData: imageMask, numberOfImages: 1, size: .s512)
16 |
17 | await MainActor.run {
18 | guard let variationImage, let urlString = variationImage.data.map({ $0.url }).last else {
19 | isLoading = false
20 | return
21 | }
22 | imageURL = URL(string: urlString)
23 | isLoading = false
24 | }
25 | } catch {
26 | isLoading = false
27 | print("Error creating variation image", error.localizedDescription)
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Demo/Demo/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demo/Demo/SwiftOpenAI.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | OpenAI_API_Key
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Demo/Demo/Vision/VisionView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import PhotosUI
3 |
4 | struct VisionView: View {
5 | @State private var visionStrategy = 0
6 | @State var viewModel: VisionViewModel
7 |
8 | var body: some View {
9 | VStack {
10 | Picker("What is your favorite color?", selection: $visionStrategy) {
11 | Text("URL").tag(0)
12 | Text("Gallery").tag(1)
13 | }
14 | .pickerStyle(.segmented)
15 |
16 | if visionStrategy == 0 {
17 | AsyncImage(url: URL(string: viewModel.imageVisionURL)) { image in
18 | image
19 | .resizable()
20 | .scaledToFit()
21 | .frame(width: 300, height: 300)
22 | } placeholder: {
23 | ProgressView()
24 | .padding(.bottom, 20)
25 | }
26 | } else {
27 | PhotosPicker(selection: $viewModel.photoSelection,
28 | matching: .images,
29 | photoLibrary: .shared()) {
30 | Label("Add video or audio",
31 | systemImage: "video.fill")
32 | }
33 | .frame(height: 300)
34 | .photosPickerStyle(.inline)
35 | .onChange(of: viewModel.photoSelection!) { oldValue, newValue in
36 | newValue.loadTransferable(type: Data.self) { [self] result in
37 | switch result {
38 | case .success(let data):
39 | if let data {
40 | viewModel.currentData = data
41 | } else {
42 | print("No supported content type found.")
43 | }
44 | case .failure(let error):
45 | fatalError(error.localizedDescription)
46 | }
47 | }
48 | }
49 | }
50 |
51 | if !viewModel.isLoading {
52 | Button(action: {
53 | Task {
54 | await viewModel.send(message: "Please analyze the image and describe its contents, providing any relevant details or information")
55 | }
56 | }, label: {
57 | Text("Describe Image from URL")
58 | })
59 | .buttonStyle(.borderedProminent)
60 | } else {
61 | ProgressView()
62 | }
63 |
64 | Divider()
65 | .padding(.top, 20)
66 |
67 | TextEditor(text: .constant(viewModel.message))
68 | .font(.body)
69 | .padding(.top, 12)
70 | .padding(.horizontal)
71 | }
72 | .padding(.horizontal, 32)
73 | }
74 | }
75 |
76 | #Preview {
77 | VisionView(viewModel: .init())
78 | }
79 |
--------------------------------------------------------------------------------
/Demo/Demo/Vision/VisionViewModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftOpenAI
3 | import PhotosUI
4 | import SwiftUI
5 |
6 | @Observable
7 | final class VisionViewModel {
8 | var openAI = SwiftOpenAI(apiKey: Bundle.main.getOpenAIApiKey()!)
9 | let imageVisionURL = "https://upload.wikimedia.org/wikipedia/commons/thumb/5/57/M31bobo.jpg/640px-M31bobo.jpg"
10 | var message: String = ""
11 | var isLoading = false
12 |
13 | // Local Image
14 | var photoSelection: PhotosPickerItem? = .init(itemIdentifier: "")
15 | var currentData: Data?
16 |
17 | @MainActor
18 | func send(message: String) async {
19 | isLoading = true
20 |
21 | do {
22 | let imageValue: String
23 | if let data = currentData {
24 | let base64Image = data.base64EncodedString()
25 | imageValue = "data:image/jpeg;base64,\(base64Image)"
26 | } else {
27 | imageValue = imageVisionURL
28 | }
29 |
30 | let myMessage = MessageChatImageInput(text: message,
31 | imageURL: imageValue,
32 | role: .user)
33 |
34 | let optionalParameters: ChatCompletionsOptionalParameters = .init(temperature: 0.5,
35 | stop: ["stopstring"],
36 | stream: false,
37 | maxTokens: 1200)
38 |
39 | let result = try await openAI.createChatCompletionsWithImageInput(model: .gpt4(.gpt_4_vision_preview),
40 | messages: [myMessage],
41 | optionalParameters: optionalParameters)
42 | self.currentData = nil
43 | self.message = result?.choices.first?.message.content ?? "No value"
44 | self.isLoading = false
45 |
46 | } catch {
47 | print("Error trying to understant the image you provided: ", error.localizedDescription)
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright 2023 SwiftBeta
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "swift-docc-plugin",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/apple/swift-docc-plugin",
7 | "state" : {
8 | "revision" : "9b1258905c21fc1b97bf03d1b4ca12c4ec4e5fda",
9 | "version" : "1.2.0"
10 | }
11 | },
12 | {
13 | "identity" : "swift-docc-symbolkit",
14 | "kind" : "remoteSourceControl",
15 | "location" : "https://github.com/apple/swift-docc-symbolkit",
16 | "state" : {
17 | "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34",
18 | "version" : "1.0.0"
19 | }
20 | }
21 | ],
22 | "version" : 2
23 | }
24 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.7.1
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "SwiftOpenAI",
8 | platforms: [.iOS(.v13), .macOS(.v12)],
9 | products: [
10 | .library(
11 | name: "SwiftOpenAI",
12 | targets: ["SwiftOpenAI"]),
13 | ],
14 | dependencies: [
15 | .package(url: "https://github.com/apple/swift-docc-plugin", from: "1.2.0"),
16 | ],
17 | targets: [
18 | .target(
19 | name: "SwiftOpenAI",
20 | dependencies: [],
21 | plugins: []),
22 | .testTarget(
23 | name: "SwiftOpenAITests",
24 | dependencies: ["SwiftOpenAI"],
25 | resources: [
26 | .process("OpenAITests/Unit Tests/JSON")
27 | ])
28 | ]
29 | )
30 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/API.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public final class API {
4 | private let requester: RequesterProtocol
5 | private let parser: ParserProtocol
6 | private let router: RouterProtocol
7 | private let requestBuilder: RequestBuilderProtocol
8 |
9 | public init(requester: RequesterProtocol = Requester(),
10 | parser: ParserProtocol = Parser(),
11 | router: RouterProtocol = Router(),
12 | requestBuilder: RequestBuilderProtocol = RequestBuilder()) {
13 | self.requester = requester
14 | self.parser = parser
15 | self.router = router
16 | self.requestBuilder = requestBuilder
17 | }
18 |
19 | public func routeEndpoint(_ endpoint: inout Endpoint, environment: BaseEnvironmentType) {
20 | router.routeEndpoint(&endpoint, environment: environment)
21 | }
22 |
23 | public func buildURLRequest(endpoint: Endpoint) -> URLRequest {
24 | requestBuilder.buildURLRequest(endpoint: endpoint)
25 | }
26 |
27 | public func addHeaders(urlRequest: inout URLRequest, headers: [String: String]) {
28 | requestBuilder.addHeaders(urlRequest: &urlRequest, headers: headers)
29 | }
30 |
31 | public func execute(with urlRequest: URLRequest) async -> Result {
32 | await requester.execute(with: urlRequest)
33 | }
34 |
35 | public func parse(_ data: Result, type: T.Type, jsonDecoder: JSONDecoder, errorType: E.Type) throws -> T? {
36 | try parser.parse(data, type: T.self, jsonDecoder: jsonDecoder, errorType: E.self)
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Endpoint/Endpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum HTTPMethod: String {
4 | case POST
5 | case GET
6 | }
7 |
8 | public protocol Endpoint {
9 | var path: String { get set }
10 | var method: HTTPMethod { get }
11 | var parameters: [String: Any]? { get }
12 | }
13 |
14 | public extension Endpoint {
15 | var path: String { "" }
16 | var method: HTTPMethod { .GET }
17 | var parameters: [String: Any]? { nil }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Parser/Parser.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol ParserProtocol {
4 | func parse(_ data: Result,
5 | type: T.Type,
6 | jsonDecoder: JSONDecoder, errorType: E.Type) throws -> T?
7 | func parse(_ data: Data, type: T.Type, jsonDecoder: JSONDecoder) throws -> T?
8 | }
9 |
10 | final public class Parser: ParserProtocol {
11 | public init() { }
12 |
13 | public func parse(_ result: Result,
14 | type: T.Type,
15 | jsonDecoder: JSONDecoder = .init(),
16 | errorType: E.Type) throws -> T? {
17 | switch result {
18 | case .success(let data):
19 | let dataModel = try parse(data, type: T.self, jsonDecoder: jsonDecoder)
20 | return dataModel
21 | case .failure(let error):
22 | let errorDataModel = try parseError(apiError: error, type: E.self)
23 | throw errorDataModel ?? error
24 | }
25 | }
26 |
27 | public func parse(_ data: Data,
28 | type: T.Type,
29 | jsonDecoder: JSONDecoder = .init()) throws -> T? {
30 | do {
31 | return try jsonDecoder.decode(T.self, from: data)
32 | } catch let error as DecodingError {
33 | printDecodable(error: error)
34 |
35 | throw APIError.decodable(error)
36 | }
37 | }
38 |
39 | public func parseError(apiError: APIError,
40 | type: E.Type,
41 | jsonDecoder: JSONDecoder = .init()) throws -> E? {
42 | guard case APIError.jsonResponseError(let jsonString) = apiError,
43 | let jsonData = jsonString.data(using: .utf8) else {
44 | throw apiError
45 | }
46 |
47 | do {
48 | let decodedErrorModel = try jsonDecoder.decode(E.self, from: jsonData)
49 | return decodedErrorModel
50 | } catch {
51 | throw error
52 | }
53 | }
54 | }
55 |
56 | extension Parser {
57 | func printDecodable(error: DecodingError) {
58 | let message: String
59 | switch error {
60 | case .keyNotFound(let key, let context):
61 | message = "[APIClient] Decoding Error: Key \"\(key)\" not found \nContext: \(context.debugDescription)"
62 | case .dataCorrupted(let context):
63 | message = "[APIClient] Decoding Error: Data corrupted \n(Context: \(context.debugDescription)) \nCodingKeys: \(context.codingPath)"
64 | case .typeMismatch(let type, let context):
65 | message = "[APIClient] Decoding Error: Type mismatch \"\(type)\" \nContext: \(context.debugDescription)"
66 | case .valueNotFound(let type, let context):
67 | message = "[APIClient] Decoding Error: Value not found, type \"\(type)\" \nContext: \(context.debugDescription)"
68 | @unknown default:
69 | message = "[APIClient] Unknown DecodingError catched"
70 | assertionFailure(message)
71 | }
72 | print(message)
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Requester/APIError.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum APIError: Error {
4 | case urlSession(URLError)
5 | case decodable(DecodingError)
6 | case jsonResponseError(String)
7 | case badErrorJSONFormat
8 | case unknown
9 | }
10 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Requester/RequestBuilder.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol RequestBuilderProtocol {
4 | func buildURLRequest(endpoint: Endpoint) -> URLRequest
5 | func addHeaders(urlRequest: inout URLRequest, headers: [String: String])
6 | }
7 |
8 | final public class RequestBuilder: RequestBuilderProtocol {
9 |
10 | public init() { }
11 |
12 | public func buildURLRequest(endpoint: Endpoint) -> URLRequest {
13 | var urlRequest = URLRequest(url: URL(string: endpoint.path)!)
14 | urlRequest.httpMethod = endpoint.method.rawValue
15 |
16 | switch endpoint.method {
17 | case .POST:
18 | guard let parameters = endpoint.parameters,
19 | !parameters.isEmpty,
20 | let postData = (try? JSONSerialization.data(withJSONObject: parameters,
21 | options: [])) else {
22 | return urlRequest
23 | }
24 | urlRequest.httpBody = postData
25 | case .GET:
26 | guard let parameters = endpoint.parameters else {
27 | return urlRequest
28 | }
29 | var urlComponents = URLComponents(string: endpoint.path)
30 | urlComponents?.queryItems = parameters.map({ key, value in
31 | URLQueryItem(name: key, value: value as? String)
32 | })
33 | urlRequest.url = urlComponents?.url
34 | }
35 |
36 | return urlRequest
37 | }
38 |
39 | public func addHeaders(urlRequest: inout URLRequest, headers: [String: String]) {
40 | headers.forEach { key, value in
41 | urlRequest.setValue(value, forHTTPHeaderField: key)
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Requester/Requester.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol RequesterProtocol {
4 | func execute(with urlRequest: URLRequest) async -> Result
5 | }
6 |
7 | final public class Requester: RequesterProtocol {
8 | private let urlSession: URLSession
9 |
10 | public init(urlSession: URLSession = URLSession.shared) {
11 | self.urlSession = urlSession
12 | }
13 |
14 | public func execute(with urlRequest: URLRequest) async -> Result {
15 | do {
16 | let (data, response) = try await urlSession.data(for: urlRequest)
17 |
18 | if let httpResponse = response as? HTTPURLResponse {
19 | let statusCode = httpResponse.statusCode
20 |
21 | if (400...599).contains(statusCode) {
22 | if let jsonString = String(data: data, encoding: .utf8) {
23 | throw APIError.jsonResponseError(jsonString)
24 | } else {
25 | throw APIError.unknown
26 | }
27 | }
28 | } else {
29 | throw APIError.unknown
30 | }
31 |
32 | return .success(data)
33 | } catch let error as URLError {
34 | return .failure(.urlSession(error))
35 | } catch let error as APIError {
36 | return .failure(error)
37 | } catch {
38 | return .failure(.unknown)
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Router/BaseEnvironment.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol BaseEnvironmentType {
4 | var url: String { get set }
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/APIClient/Router/Router.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol RouterProtocol {
4 | func routeEndpoint(_ endpoint: inout Endpoint, environment: BaseEnvironmentType)
5 | }
6 |
7 | final public class Router: RouterProtocol {
8 | public init() { }
9 |
10 | public func routeEndpoint(_ endpoint: inout Endpoint, environment: BaseEnvironmentType) {
11 | var url = URL(string: environment.url)!
12 | url.appendPathComponent(endpoint.path)
13 | endpoint.path = url.absoluteString
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/CreateTranscriptionDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct CreateTranscriptionDataModel: Decodable {
4 | public let text: String
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/CreateTranslationDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct CreateTranslationDataModel: Decodable {
4 | public let text: String
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/OpenAIAudioResponseType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum OpenAIAudioResponseType: String {
4 | case mp3
5 | case opus
6 | case aac
7 | case flac
8 | }
9 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/OpenAITTSModelType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum OpenAITTSModelType {
4 | case tts(TTS)
5 |
6 | var name: String {
7 | switch self {
8 | case .tts(let model):
9 | return model.rawValue
10 | }
11 | }
12 | }
13 |
14 | public enum TTS: String {
15 | case tts1 = "tts-1"
16 | case tts1HD = "tts-1-hd"
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/OpenAITranscriptionModelType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum OpenAITranscriptionModelType: String {
4 | case whisper = "whisper-1"
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Audio/OpenAIVoiceType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum OpenAIVoiceType: String {
4 | case alloy
5 | case echo
6 | case fable
7 | case onyx
8 | case nova
9 | case shimmer
10 | }
11 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Chat/ChatCompletionsDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ChatCompletionsDataModel: Decodable {
4 | public var id: String
5 | public var object: String
6 | public var created: Int
7 | public var choices: [ChoiceDataModel]
8 | public var usage: UsageDataModel
9 | }
10 |
11 | public struct UsageDataModel: Decodable {
12 | public var promptTokens: Int
13 | public var completionTokens: Int
14 | public var totalTokens: Int
15 | }
16 |
17 | public struct ChoiceDataModel: Decodable {
18 | public var index: Int
19 | public var finishReason: String?
20 | public var message: MessageDataModel
21 | }
22 |
23 | public struct MessageDataModel: Decodable {
24 | public var role: String
25 | public var content: String
26 | }
27 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Chat/ChatCompletionsOptionalParameters.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ChatCompletionsOptionalParameters {
4 | public let temperature: Double?
5 | public let topP: Double?
6 | public let n: Int?
7 | public let stop: [String]?
8 | public let stream: Bool
9 | public let maxTokens: Int?
10 | public let user: String?
11 |
12 | public init(temperature: Double = 1.0,
13 | topP: Double = 1.0,
14 | n: Int = 1,
15 | stop: [String]? = nil,
16 | stream: Bool = false,
17 | maxTokens: Int? = nil,
18 | user: String? = nil) {
19 | self.temperature = temperature
20 | self.topP = topP
21 | self.n = n
22 | self.stop = stop
23 | self.stream = stream
24 | self.maxTokens = maxTokens
25 | self.user = user
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Chat/ChatCompletionsStreamDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ChatCompletionsStreamDataModel: Decodable {
4 | public var id: String
5 | public var object: String
6 | public var created: Int
7 | public var model: String
8 | public var choices: [ChoicesStreamDataModel]
9 |
10 | static var finished: ChatCompletionsStreamDataModel = {
11 | .init(id: UUID().uuidString,
12 | object: "",
13 | created: 1,
14 | model: "",
15 | choices: [.init(index: 0, finishReason: "stop")])
16 | }()
17 | }
18 |
19 | public struct ChoicesStreamDataModel: Decodable {
20 | public var delta: DeltaDataModel?
21 | public var index: Int
22 | public var finishReason: String?
23 | }
24 |
25 | public struct DeltaDataModel: Decodable {
26 | public let content: String?
27 |
28 | enum CodingKeys: String, CodingKey {
29 | case content
30 | }
31 |
32 | public init(from decoder: Decoder) throws {
33 | let container = try decoder.container(keyedBy: CodingKeys.self)
34 | content = try container.decodeIfPresent(String.self, forKey: .content)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Completions/CompletionsDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct CompletionsDataModel: Decodable {
4 | public var id: String
5 | public var object: String
6 | public var created: Int
7 | public var model: String
8 | public var choices: [CompletionsChoiceDataModel]
9 | public var usage: CompletionsUsageDataModel
10 | }
11 |
12 | public struct CompletionsUsageDataModel: Decodable {
13 | public var promptTokens: Int
14 | public var completionTokens: Int
15 | public var totalTokens: Int
16 | }
17 |
18 | public struct CompletionsChoiceDataModel: Decodable {
19 | public var text: String
20 | public var index: Int
21 | public var finishReason: String?
22 | }
23 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Completions/CompletionsOptionalParameters.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct CompletionsOptionalParameters {
4 | public let prompt: String
5 | public let suffix: String?
6 | public let maxTokens: Int?
7 | public let temperature: Double?
8 | public let topP: Double?
9 | public let n: Int?
10 | public let logprobs: Int?
11 | public let echo: Bool?
12 | public let stop: String?
13 | public let presencePenalty: Double?
14 | public let frequencyPenalty: Double?
15 | public let bestOf: Int?
16 | public let user: String?
17 |
18 | public init(prompt: String,
19 | suffix: String = "",
20 | maxTokens: Int? = 16,
21 | temperature: Double? = 1.0,
22 | topP: Double? = 1.0,
23 | n: Int? = 1,
24 | logprobs: Int? = nil,
25 | echo: Bool? = false,
26 | stop: String? = nil,
27 | presencePenalty: Double? = 0.0,
28 | frequencyPenalty: Double? = 0.0,
29 | bestOf: Int? = 1,
30 | user: String = "") {
31 | self.prompt = prompt
32 | self.suffix = suffix
33 | self.maxTokens = maxTokens
34 | self.temperature = temperature
35 | self.topP = topP
36 | self.n = n
37 | self.logprobs = logprobs
38 | self.echo = echo
39 | self.stop = stop
40 | self.presencePenalty = presencePenalty
41 | self.frequencyPenalty = frequencyPenalty
42 | self.bestOf = bestOf
43 | self.user = user
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Embedding/EmbeddingDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct EmbeddingResponseDataModel: Decodable {
4 | public let object: String
5 | public let data: [EmbeddingDataModel]
6 | public let model: String
7 | public let usage: EmbeddingUsageDataModel
8 | }
9 |
10 | public struct EmbeddingDataModel: Decodable {
11 | public let object: String
12 | public let embedding: [Float]
13 | public let index: Int
14 | }
15 |
16 | public struct EmbeddingUsageDataModel: Decodable {
17 | public let promptTokens: Int
18 | public let totalTokens: Int
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Images/CreateImageDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct CreateImageDataModel: Decodable {
4 | public let created: Int
5 | public let data: [CreateImageURLDataModel]
6 | }
7 |
8 | public struct CreateImageURLDataModel: Decodable {
9 | public let url: String
10 | }
11 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Images/ImageSize.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum ImageSize: String {
4 | case s256 = "256x256"
5 | case s512 = "512x512"
6 | case s1024 = "1024x1024"
7 | case sw1792h1024 = "1792x1024"
8 | case sw1024h1792 = "1024x1792"
9 | }
10 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Message/MessageChatGPT.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct MessageChatGPT: Identifiable, Hashable {
4 | public var id: UUID
5 | public var text: String
6 | public var role: MessageRoleType
7 |
8 | public init(text: String, role: MessageRoleType) {
9 | self.id = UUID()
10 | self.text = text
11 | self.role = role
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Message/MessageChatImageInput.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct MessageChatImageInput: Identifiable, Hashable {
4 | public var id: UUID
5 | public var text: String
6 | public var imageURL: String
7 | public var role: MessageRoleType
8 |
9 | public init(text: String, imageURL: String, role: MessageRoleType) {
10 | self.id = UUID()
11 | self.text = text
12 | self.role = role
13 | self.imageURL = imageURL
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Message/MessageRoleType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum MessageRoleType: String {
4 | case user
5 | case assistant
6 | case system
7 | }
8 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Models/ModelDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ModelListDataModel: Decodable {
4 | let data: [ModelDataModel]
5 | let object: String
6 |
7 | public init(data: [ModelDataModel], object: String) {
8 | self.data = data
9 | self.object = object
10 | }
11 | }
12 |
13 | public struct ModelDataModel: Decodable {
14 | let id: String
15 | let created: Int
16 | let object: String
17 | let ownedBy: String
18 |
19 | private enum CodingKeys: String, CodingKey {
20 | case id
21 | case created
22 | case object
23 | case ownedBy
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/Moderations/ModerationsDataModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ModerationDataModel: Decodable {
4 | public let id: String
5 | public let model: String
6 | public let results: [ModerationResultDataModel]
7 | }
8 |
9 | public struct ModerationResultDataModel: Decodable {
10 | public let categories: ModerationCategoriesDataModel
11 | public let categoryScores: ModerationCategoriesScoreDataModel
12 | public let flagged: Bool
13 | }
14 |
15 | public struct ModerationCategoriesDataModel: Decodable {
16 | public let hate: Bool
17 | public let hateThreatening: Bool
18 | public let selfHarm: Bool
19 | public let sexual: Bool
20 | public let sexualMinors: Bool
21 | public let violence: Bool
22 | public let violenceGraphic: Bool
23 |
24 | enum CodingKeys: String, CodingKey {
25 | case hate
26 | case hateThreatening = "hate/threatening"
27 | case selfHarm = "self-harm"
28 | case sexual
29 | case sexualMinors = "sexual/minors"
30 | case violence
31 | case violenceGraphic = "violence/graphic"
32 | }
33 | }
34 |
35 | public struct ModerationCategoriesScoreDataModel: Decodable {
36 | public let hate: Double
37 | public let hateThreatening: Double
38 | public let selfHarm: Double
39 | public let sexual: Double
40 | public let sexualMinors: Double
41 | public let violence: Double
42 | public let violenceGraphic: Double
43 |
44 | enum CodingKeys: String, CodingKey {
45 | case hate
46 | case hateThreatening = "hate/threatening"
47 | case selfHarm = "self-harm"
48 | case sexual
49 | case sexualMinors = "sexual/minors"
50 | case violence
51 | case violenceGraphic = "violence/graphic"
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/OpenAIError.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct OpenAIAPIError: Decodable, Error {
4 | let code: String?
5 | let message: String
6 | let param: String?
7 | let type: String
8 |
9 | private enum CodingKeys: String, CodingKey {
10 | case error
11 | }
12 |
13 | private enum ErrorKeys: String, CodingKey {
14 | case code, message, param, type
15 | }
16 |
17 | init(from decoder: Decoder) throws {
18 | let container = try decoder.container(keyedBy: CodingKeys.self)
19 | let errorContainer = try container.nestedContainer(keyedBy: ErrorKeys.self, forKey: .error)
20 |
21 | code = try errorContainer.decodeIfPresent(String.self, forKey: .code)
22 | message = try errorContainer.decode(String.self, forKey: .message)
23 | param = try errorContainer.decodeIfPresent(String.self, forKey: .param)
24 | type = try errorContainer.decode(String.self, forKey: .type)
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/DataModels/OpenAIModelType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum OpenAIModelType {
4 | case gpt4o(GPT4o)
5 | case gpt4(GPT4)
6 | case gpt3_5(GPT3_5)
7 | case embedding(EmbeddingModel)
8 |
9 | var name: String {
10 | switch self {
11 | case .gpt4o(let gpt4oModel):
12 | return gpt4oModel.rawValue
13 | case .gpt4(let gpt4Model):
14 | return gpt4Model.rawValue
15 | case .gpt3_5(let gpt3_5Model):
16 | return gpt3_5Model.rawValue
17 | case .embedding(let embeddingModel):
18 | return embeddingModel.rawValue
19 | }
20 | }
21 | }
22 |
23 | public enum OpenAIImageModelType {
24 | case dalle(Dalle)
25 |
26 | var name: String {
27 | switch self {
28 | case .dalle(let model):
29 | return model.rawValue
30 | }
31 | }
32 | }
33 |
34 | public enum GPT4o: String {
35 | case base = "gpt-4o"
36 | case gpt_4o_2024_05_13 = "gpt-4o-2024-05-13"
37 | }
38 |
39 | public enum GPT4: String {
40 | case base = "gpt-4-turbo"
41 | case gpt_4_turbo_2024_04_09 = "gpt-4-turbo-2024-04-09"
42 | case gpt_4 = "gpt-4"
43 | case gpt_4_turbo_preview = "gpt-4-turbo-preview"
44 | case gpt_4_0125_preview = "gpt-4-0125-preview"
45 | case gpt_4_1106_preview = "gpt-4-1106-preview"
46 | case gpt_4_1106_vision_preview = "gpt-4-1106-vision-preview"
47 | case gpt_4_vision_preview = "gpt-4-vision-preview"
48 | case gpt_4_32k = "gpt-4-32k"
49 | case gpt_4_0613 = "gpt-4-0613"
50 | case gpt_4_32k_0613 = "gpt-4-32k-0613"
51 | }
52 |
53 | public enum GPT3_5: String {
54 | case turbo = "gpt-3.5-turbo"
55 | case gpt_3_5_turbo_0125 = "gpt-3.5-turbo-0125"
56 | case gpt_3_5_turbo_1106 = "gpt-3.5-turbo-1106"
57 | case gpt_3_5_turbo_instruct = "gpt-3.5-turbo-instruct"
58 | }
59 |
60 | public enum Dalle: String {
61 | case dalle2 = "dall-e-2"
62 | case dalle3 = "dall-e-3"
63 | }
64 |
65 | public enum EmbeddingModel: String {
66 | case text_embedding_ada_002 = "text-embedding-ada-002"
67 | case text_embedding_3_small = "text-embedding-3-small"
68 | case text_embedding_3_large = "text-embedding-3-large"
69 | }
70 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Environment/OpenAIBaseEnvironment.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct OpenAIEnvironmentV1: BaseEnvironmentType {
4 | var url: String = "https://api.openai.com/v1/"
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Audio/CreateSpeechEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CreateSpeechEndpoint: Endpoint {
4 | private let model: OpenAITTSModelType
5 | private let input: String
6 | private let voice: OpenAIVoiceType
7 | private let responseFormat: OpenAIAudioResponseType
8 | private let speed: Double
9 |
10 | var method: HTTPMethod {
11 | .POST
12 | }
13 |
14 | var path: String = "audio/speech"
15 |
16 | init(model: OpenAITTSModelType,
17 | input: String,
18 | voice: OpenAIVoiceType,
19 | responseFormat: OpenAIAudioResponseType,
20 | speed: Double) {
21 | self.model = model
22 | self.input = input
23 | self.voice = voice
24 | self.responseFormat = responseFormat
25 | self.speed = speed
26 | }
27 |
28 | var parameters: [String: Any]? {
29 | ["model": self.model.name as Any,
30 | "input": self.input as Any,
31 | "voice": self.voice.rawValue as Any,
32 | "response_format": self.responseFormat.rawValue as Any,
33 | "speed": self.speed as Any]
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Audio/CreateTranscriptionEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CreateTranscriptionEndpoint: Endpoint {
4 | private let file: Data
5 | private let model: OpenAITranscriptionModelType
6 | private let language: String
7 | private let prompt: String
8 | private let responseFormat: OpenAIAudioResponseType
9 | private let temperature: Double
10 |
11 | var method: HTTPMethod {
12 | .POST
13 | }
14 |
15 | var path: String = "audio/transcriptions"
16 |
17 | init(file: Data,
18 | model: OpenAITranscriptionModelType,
19 | language: String = "en",
20 | prompt: String = "",
21 | responseFormat: OpenAIAudioResponseType,
22 | temperature: Double = 0.0) {
23 | self.file = file
24 | self.model = model
25 | self.language = language
26 | self.prompt = prompt
27 | self.responseFormat = responseFormat
28 | self.temperature = temperature
29 | }
30 |
31 | var parameters: [String: Any]? {
32 | ["model": self.model.rawValue as Any,
33 | "language": self.language as Any,
34 | "prompt": self.prompt as Any,
35 | "response_format": self.responseFormat.rawValue as Any,
36 | "temperature": self.temperature as Any]
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Audio/CreateTranslationEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CreateTranslationEndpoint: Endpoint {
4 | private let file: Data
5 | private let model: OpenAITranscriptionModelType
6 | private let prompt: String
7 | private let responseFormat: OpenAIAudioResponseType
8 | private let temperature: Double
9 |
10 | var method: HTTPMethod {
11 | .POST
12 | }
13 |
14 | var path: String = "audio/translations"
15 |
16 | init(file: Data,
17 | model: OpenAITranscriptionModelType,
18 | prompt: String = "",
19 | responseFormat: OpenAIAudioResponseType,
20 | temperature: Double = 0.0) {
21 | self.file = file
22 | self.model = model
23 | self.prompt = prompt
24 | self.responseFormat = responseFormat
25 | self.temperature = temperature
26 | }
27 |
28 | var parameters: [String: Any]? {
29 | ["model": self.model.rawValue as Any,
30 | "prompt": self.prompt as Any,
31 | "response_format": self.responseFormat.rawValue as Any,
32 | "temperature": self.temperature as Any]
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Chat/ChatCompletionsEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct ChatCompletionsEndpoint: Endpoint {
4 | private let model: OpenAIModelType
5 | private var messages: [[String: String]] = []
6 |
7 | private let optionalParameters: ChatCompletionsOptionalParameters?
8 |
9 | var method: HTTPMethod {
10 | .POST
11 | }
12 |
13 | var path: String = "chat/completions"
14 |
15 | init(model: OpenAIModelType,
16 | messages: [MessageChatGPT],
17 | optionalParameters: ChatCompletionsOptionalParameters?) {
18 | self.model = model
19 | self.messages = Self.mapMessageModelToDictionary(messages: messages)
20 | self.optionalParameters = optionalParameters
21 | }
22 |
23 | var parameters: [String: Any]? {
24 | ["model": self.model.name as Any,
25 | "messages": self.messages as Any,
26 | "temperature": self.optionalParameters?.temperature as Any,
27 | "top_p": self.optionalParameters?.topP as Any,
28 | "n": self.optionalParameters?.n as Any,
29 | "stop": self.optionalParameters?.stop as Any,
30 | "stream": self.optionalParameters?.stream as Any,
31 | "max_tokens": self.optionalParameters?.maxTokens as Any]
32 | }
33 |
34 | private static func mapMessageModelToDictionary(messages: [MessageChatGPT]) -> [[String: String]] {
35 | return messages.map {
36 | ["role": $0.role.rawValue, "content": $0.text]
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Chat/ChatCompletionsImageInputEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct ChatCompletionsImageInputEndpoint: Endpoint {
4 | private let model: OpenAIModelType
5 | private var messages: [[String: Any]] = []
6 |
7 | private let optionalParameters: ChatCompletionsOptionalParameters?
8 |
9 | var method: HTTPMethod {
10 | .POST
11 | }
12 |
13 | var path: String = "chat/completions"
14 |
15 | init(model: OpenAIModelType,
16 | messages: [MessageChatImageInput],
17 | optionalParameters: ChatCompletionsOptionalParameters?) {
18 | self.model = model
19 | self.messages = Self.mapMessageModelToDictionary(messages: messages)
20 | self.optionalParameters = optionalParameters
21 | }
22 |
23 | var parameters: [String: Any]? {
24 | ["model": self.model.name as Any,
25 | "messages": self.messages as Any,
26 | "temperature": self.optionalParameters?.temperature as Any,
27 | "top_p": self.optionalParameters?.topP as Any,
28 | "n": self.optionalParameters?.n as Any,
29 | "stop": self.optionalParameters?.stop as Any,
30 | "stream": self.optionalParameters?.stream as Any,
31 | "max_tokens": self.optionalParameters?.maxTokens as Any]
32 | }
33 |
34 | private static func mapMessageModelToDictionary(messages: [MessageChatImageInput]) -> [[String: Any]] {
35 | return messages.map { message in
36 | var contentArray: [[String: Any]] = []
37 | contentArray.append(["type": "text", "text": message.text])
38 |
39 | if !message.imageURL.isEmpty {
40 | contentArray.append(["type": "image_url", "image_url": ["url": message.imageURL]])
41 | }
42 |
43 | return ["role": message.role.rawValue, "content": contentArray]
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Chat/CompletionsEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CompletionsEndpoint: Endpoint {
4 | private let model: OpenAIModelType
5 | private let optionalParameters: CompletionsOptionalParameters?
6 |
7 | var method: HTTPMethod {
8 | .POST
9 | }
10 |
11 | var path: String = "completions"
12 |
13 | init(model: OpenAIModelType,
14 | optionalParameters: CompletionsOptionalParameters?) {
15 | self.model = model
16 | self.optionalParameters = optionalParameters
17 | }
18 |
19 | var parameters: [String: Any]? {
20 | ["model": self.model.name as Any,
21 | "prompt": self.optionalParameters?.prompt as Any,
22 | "suffix": self.optionalParameters?.suffix as Any,
23 | "max_tokens": self.optionalParameters?.maxTokens as Any,
24 | "temperature": self.optionalParameters?.temperature as Any,
25 | "top_p": self.optionalParameters?.topP as Any,
26 | "n": self.optionalParameters?.n as Any,
27 | "logprobs": self.optionalParameters?.logprobs as Any,
28 | "echo": self.optionalParameters?.echo as Any,
29 | "stop": self.optionalParameters?.stop as Any,
30 | "presence_penalty": self.optionalParameters?.presencePenalty as Any,
31 | "frequency_penalty": self.optionalParameters?.frequencyPenalty as Any,
32 | "best_of": self.optionalParameters?.bestOf as Any,
33 | "user": self.optionalParameters?.user as Any
34 | ]
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Embeddings/EmbeddingsEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CreateEmbeddingsEndpoint: Endpoint {
4 | private let model: OpenAIModelType
5 | private let input: String
6 |
7 | var method: HTTPMethod {
8 | .POST
9 | }
10 |
11 | var path: String = "embeddings"
12 |
13 | init(model: OpenAIModelType,
14 | input: String) {
15 | self.model = model
16 | self.input = input
17 | }
18 |
19 | var parameters: [String: Any]? {
20 | ["model": self.model.name as Any,
21 | "input": self.input as Any
22 | ]
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Image/CreateImageEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct CreateImageEndpoint: Endpoint {
4 | private let model: OpenAIImageModelType
5 | private let prompt: String
6 | private let numberOfImages: Int
7 | private let size: String
8 |
9 | var method: HTTPMethod {
10 | .POST
11 | }
12 |
13 | var path: String = "images/generations"
14 |
15 | init(model: OpenAIImageModelType,
16 | prompt: String,
17 | numberOfImages: Int,
18 | size: ImageSize) {
19 | self.model = model
20 | self.prompt = prompt
21 | self.numberOfImages = numberOfImages
22 | self.size = size.rawValue
23 | }
24 |
25 | var parameters: [String: Any]? {
26 | ["model": self.model.name as Any,
27 | "prompt": self.prompt as Any,
28 | "n": self.numberOfImages as Any,
29 | "size": self.size as Any]
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Image/EditImageEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct EditImageEndpoint: Endpoint {
4 | private let model: OpenAIImageModelType
5 |
6 | var method: HTTPMethod {
7 | .POST
8 | }
9 |
10 | var path: String = "images/edits"
11 |
12 | init(model: OpenAIImageModelType) {
13 | self.model = model
14 | }
15 |
16 | var parameters: [String: Any]? {
17 | ["model": self.model.name as Any]
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Image/VariationImageEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct VariationImageEndpoint: Endpoint {
4 | private let model: OpenAIImageModelType
5 |
6 | var method: HTTPMethod {
7 | .POST
8 | }
9 |
10 | var path: String = "images/variations"
11 |
12 | init(model: OpenAIImageModelType) {
13 | self.model = model
14 | }
15 |
16 | var parameters: [String: Any]? {
17 | ["model": self.model.name as Any]
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Models/ListModelsEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct ListModelsEndpoint: Endpoint {
4 | var method: HTTPMethod {
5 | .GET
6 | }
7 |
8 | var path: String = "models"
9 | }
10 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/List/Moderation/ModerationEndpoint.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct ModerationEndpoint: Endpoint {
4 | private let input: String
5 |
6 | var method: HTTPMethod {
7 | .POST
8 | }
9 |
10 | var path: String = "moderations"
11 |
12 | init(input: String) {
13 | self.input = input
14 | }
15 |
16 | var parameters: [String: Any]? {
17 | ["input": self.input as Any]
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/OpenAIEndpoints/OpenAIEndpoints.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | enum OpenAIEndpoints {
4 | case listModels
5 | case completions(model: OpenAIModelType, optionalParameters: CompletionsOptionalParameters?)
6 | case chatCompletions(model: OpenAIModelType, messages: [MessageChatGPT], optionalParameters: ChatCompletionsOptionalParameters?)
7 | case chatCompletionsWithImageInput(model: OpenAIModelType, messages: [MessageChatImageInput], optionalParameters: ChatCompletionsOptionalParameters?)
8 | case createImage(model: OpenAIImageModelType, prompt: String, numberOfImages: Int, size: ImageSize)
9 | case editImage(model: OpenAIImageModelType)
10 | case variationImage(model: OpenAIImageModelType)
11 | case embeddings(model: OpenAIModelType, input: String)
12 | case moderations(input: String)
13 | case createSpeech(model: OpenAITTSModelType, input: String, voice: OpenAIVoiceType, responseFormat: OpenAIAudioResponseType, speed: Double)
14 | case createTranscription(file: Data, model: OpenAITranscriptionModelType, language: String, prompt: String, responseFormat: OpenAIAudioResponseType, temperature: Double)
15 | case createTranslation(file: Data, model: OpenAITranscriptionModelType, prompt: String, responseFormat: OpenAIAudioResponseType, temperature: Double)
16 |
17 | public var endpoint: Endpoint {
18 | switch self {
19 | case .listModels:
20 | return ListModelsEndpoint()
21 | case .completions(model: let model, optionalParameters: let optionalParameters):
22 | return CompletionsEndpoint(model: model,
23 | optionalParameters: optionalParameters)
24 |
25 | case .chatCompletionsWithImageInput(model: let model, messages: let messages, optionalParameters: let optionalParameters):
26 | return ChatCompletionsImageInputEndpoint(model: model,
27 | messages: messages,
28 | optionalParameters: optionalParameters)
29 | case .chatCompletions(let model, let messages, let optionalParameters):
30 | return ChatCompletionsEndpoint(model: model,
31 | messages: messages,
32 | optionalParameters: optionalParameters)
33 | case .createImage(model: let model, prompt: let prompt, numberOfImages: let numberOfImages, size: let size):
34 | return CreateImageEndpoint(model: model,
35 | prompt: prompt,
36 | numberOfImages: numberOfImages,
37 | size: size)
38 | case .embeddings(model: let model, input: let input):
39 | return CreateEmbeddingsEndpoint(model: model,
40 | input: input)
41 | case .moderations(input: let input):
42 | return ModerationEndpoint(input: input)
43 | case .createSpeech(model: let model, input: let input, voice: let voice, responseFormat: let responseFormat, speed: let speed):
44 | return CreateSpeechEndpoint(model: model,
45 | input: input,
46 | voice: voice,
47 | responseFormat: responseFormat,
48 | speed: speed)
49 | case .createTranscription(file: let file, model: let model, language: let language, prompt: let prompt, responseFormat: let responseFormat, temperature: let temperature):
50 | return CreateTranscriptionEndpoint(file: file,
51 | model: model,
52 | language: language,
53 | prompt: prompt,
54 | responseFormat: responseFormat,
55 | temperature: temperature)
56 | case .createTranslation(file: let file, model: let model, prompt: let prompt, responseFormat: let responseFormat, temperature: let temperature):
57 | return CreateTranslationEndpoint(file: file,
58 | model: model,
59 | prompt: prompt,
60 | responseFormat: responseFormat,
61 | temperature: temperature)
62 | case .editImage(model: let model):
63 | return EditImageEndpoint(model: model)
64 | case .variationImage(model: let model):
65 | return VariationImageEndpoint(model: model)
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Audio/CreateSpeechRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CreateSpeechRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAITTSModelType,
7 | input: String,
8 | voice: OpenAIVoiceType,
9 | responseFormat: OpenAIAudioResponseType,
10 | speed: Double) async throws -> Data?
11 | }
12 |
13 | final public class CreateSpeechRequest: CreateSpeechRequestProtocol {
14 | public typealias Init = (_ api: API,
15 | _ apiKey: String,
16 | _ model: OpenAITTSModelType,
17 | _ input: String,
18 | _ voice: OpenAIVoiceType,
19 | _ responseFormat: OpenAIAudioResponseType,
20 | _ speed: Double) async throws -> Data?
21 |
22 | public init() { }
23 |
24 | public func execute(api: API,
25 | apiKey: String,
26 | model: OpenAITTSModelType,
27 | input: String,
28 | voice: OpenAIVoiceType,
29 | responseFormat: OpenAIAudioResponseType,
30 | speed: Double) async throws -> Data? {
31 | var endpoint = OpenAIEndpoints.createSpeech(model: model, input: input, voice: voice, responseFormat: responseFormat, speed: speed).endpoint
32 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
33 |
34 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
35 | api.addHeaders(urlRequest: &urlRequest,
36 | headers: ["Content-Type": "application/json",
37 | "Authorization": "Bearer \(apiKey)"])
38 |
39 | let result = await api.execute(with: urlRequest)
40 |
41 | let jsonDecoder = JSONDecoder()
42 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
43 |
44 | switch result {
45 | case .success(let data):
46 | return data
47 | case .failure(let error):
48 | throw error
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Audio/CreateTranscriptionRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CreateTranscriptionRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | file: Data,
7 | model: OpenAITranscriptionModelType,
8 | language: String,
9 | prompt: String,
10 | responseFormat: OpenAIAudioResponseType,
11 | temperature: Double) async throws -> AsyncThrowingStream
12 | }
13 |
14 | final public class CreateTranscriptionRequest: NSObject, CreateTranscriptionRequestProtocol {
15 | public typealias Init = (_ api: API,
16 | _ apiKey: String,
17 | _ file: Data,
18 | _ model: OpenAITranscriptionModelType,
19 | _ language: String,
20 | _ prompt: String,
21 | _ responseFormat: OpenAIAudioResponseType,
22 | _ temperature: Double) async throws -> AsyncThrowingStream
23 |
24 | private var urlSession: URLSession?
25 | private var dataTask: URLSessionDataTask?
26 | private var continuation: AsyncThrowingStream.Continuation?
27 |
28 | public override init() {
29 | super.init()
30 | }
31 |
32 | public func execute(api: API,
33 | apiKey: String,
34 | file: Data,
35 | model: OpenAITranscriptionModelType,
36 | language: String,
37 | prompt: String,
38 | responseFormat: OpenAIAudioResponseType,
39 | temperature: Double) async throws -> AsyncThrowingStream {
40 |
41 | return AsyncThrowingStream { continuation in
42 | self.continuation = continuation
43 |
44 | var endpoint = OpenAIEndpoints.createTranscription(file: file, model: model, language: language, prompt: prompt, responseFormat: responseFormat, temperature: temperature).endpoint
45 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
46 |
47 | let boundary = "Boundary-\(UUID().uuidString)"
48 |
49 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
50 | api.addHeaders(urlRequest: &urlRequest,
51 | headers: ["Content-Type": "multipart/form-data; boundary=\(boundary)",
52 | "Authorization": "Bearer \(apiKey)"])
53 |
54 | let formData = MultipartFormData(boundary: boundary)
55 | formData.appendField(name: "model", value: "whisper-1")
56 | formData.appendImageData(fieldName: "file", data: file, filename: "steve.mp4", mimeType: "audio/mpeg")
57 | formData.finalizeBody()
58 |
59 | urlRequest.httpBody = formData.getHttpBody()
60 |
61 | self.urlSession = URLSession(configuration: .default,
62 | delegate: self,
63 | delegateQueue: OperationQueue())
64 |
65 | dataTask = urlSession?.dataTask(with: urlRequest)
66 | dataTask?.resume()
67 | }
68 | }
69 | }
70 |
71 | extension CreateTranscriptionRequest: URLSessionDataDelegate {
72 | public func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
73 | do {
74 | let createTranscriptionDataModel = try JSONDecoder().decode(CreateTranscriptionDataModel.self, from: data)
75 | self.continuation?.yield(createTranscriptionDataModel)
76 | } catch {
77 | print("Error al parsear JSON:", error.localizedDescription)
78 | }
79 | }
80 |
81 | public func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
82 | guard let error = error else {
83 | continuation?.finish()
84 | return
85 | }
86 | continuation?.finish(throwing: error)
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Audio/CreateTranslationRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CreateTranslationRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | file: Data,
7 | model: OpenAITranscriptionModelType,
8 | prompt: String,
9 | responseFormat: OpenAIAudioResponseType,
10 | temperature: Double) async throws -> AsyncThrowingStream
11 | }
12 |
13 | final public class CreateTranslationRequest: NSObject, CreateTranslationRequestProtocol {
14 | public typealias Init = (_ api: API,
15 | _ apiKey: String,
16 | _ file: Data,
17 | _ model: OpenAITranscriptionModelType,
18 | _ prompt: String,
19 | _ responseFormat: OpenAIAudioResponseType,
20 | _ temperature: Double) async throws -> AsyncThrowingStream
21 |
22 | private var urlSession: URLSession?
23 | private var dataTask: URLSessionDataTask?
24 | private var continuation: AsyncThrowingStream.Continuation?
25 |
26 | public override init() {
27 | super.init()
28 | }
29 |
30 | public func execute(api: API,
31 | apiKey: String,
32 | file: Data,
33 | model: OpenAITranscriptionModelType,
34 | prompt: String,
35 | responseFormat: OpenAIAudioResponseType,
36 | temperature: Double) async throws -> AsyncThrowingStream {
37 |
38 | return AsyncThrowingStream { continuation in
39 | self.continuation = continuation
40 |
41 | var endpoint = OpenAIEndpoints.createTranslation(file: file, model: model, prompt: prompt, responseFormat: responseFormat, temperature: temperature).endpoint
42 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
43 |
44 | let boundary = UUID().uuidString
45 |
46 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
47 | api.addHeaders(urlRequest: &urlRequest,
48 | headers: ["Content-Type": "multipart/form-data; boundary=\(boundary)",
49 | "Authorization": "Bearer \(apiKey)"])
50 |
51 | var body = Data()
52 |
53 | body.append("--\(boundary)\r\n".data(using: .utf8)!)
54 | body.append("Content-Disposition: form-data; name=\"model\"\r\n\r\n".data(using: .utf8)!)
55 | body.append("whisper-1\r\n".data(using: .utf8)!)
56 |
57 | body.append("--\(boundary)\r\n".data(using: .utf8)!)
58 | body.append("Content-Disposition: form-data; name=\"file\"; filename=\"steve.mp4\"\r\n".data(using: .utf8)!)
59 | body.append("Content-Type: audio/mpeg\r\n\r\n".data(using: .utf8)!)
60 | body.append(file)
61 | body.append("\r\n".data(using: .utf8)!)
62 |
63 | body.append("--\(boundary)--\r\n".data(using: .utf8)!)
64 |
65 | urlRequest.httpBody = body
66 |
67 | self.urlSession = URLSession(configuration: .default,
68 | delegate: self,
69 | delegateQueue: OperationQueue())
70 |
71 | dataTask = urlSession?.dataTask(with: urlRequest)
72 | dataTask?.resume()
73 | }
74 | }
75 | }
76 |
77 | extension CreateTranslationRequest: URLSessionDataDelegate {
78 | public func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
79 | do {
80 | let createTranslationDataModel = try JSONDecoder().decode(CreateTranslationDataModel.self, from: data)
81 | self.continuation?.yield(createTranslationDataModel)
82 | } catch {
83 | print("Error al parsear JSON:", error.localizedDescription)
84 | }
85 | }
86 |
87 | public func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
88 | guard let error = error else {
89 | continuation?.finish()
90 | return
91 | }
92 | continuation?.finish(throwing: error)
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/ChatCompletions/CreateChatCompletionsImageInputRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import Foundation
3 |
4 | protocol CreateChatCompletionsImageInputRequestProtocol {
5 | func execute(api: API,
6 | apiKey: String,
7 | model: OpenAIModelType,
8 | messages: [MessageChatImageInput],
9 | optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel?
10 | }
11 |
12 | final public class CreateChatCompletionsImageInputRequest: CreateChatCompletionsImageInputRequestProtocol {
13 | public typealias Init = (_ api: API,
14 | _ apiKey: String,
15 | _ model: OpenAIModelType,
16 | _ messages: [MessageChatImageInput],
17 | _ optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel?
18 |
19 | public init() { }
20 |
21 | public func execute(api: API,
22 | apiKey: String,
23 | model: OpenAIModelType,
24 | messages: [MessageChatImageInput],
25 | optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel? {
26 | var endpoint = OpenAIEndpoints.chatCompletionsWithImageInput(model: model, messages: messages, optionalParameters: optionalParameters).endpoint
27 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
28 |
29 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
30 | api.addHeaders(urlRequest: &urlRequest,
31 | headers: ["Content-Type": "application/json",
32 | "Authorization": "Bearer \(apiKey)"])
33 |
34 | let result = await api.execute(with: urlRequest)
35 |
36 | let jsonDecoder = JSONDecoder()
37 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
38 |
39 | return try api.parse(result,
40 | type: ChatCompletionsDataModel.self,
41 | jsonDecoder: jsonDecoder,
42 | errorType: OpenAIAPIError.self)
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/ChatCompletions/CreateChatCompletionsRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CreateChatCompletionsRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIModelType,
7 | messages: [MessageChatGPT],
8 | optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel?
9 | }
10 |
11 | final public class CreateChatCompletionsRequest: CreateChatCompletionsRequestProtocol {
12 | public typealias Init = (_ api: API,
13 | _ apiKey: String,
14 | _ model: OpenAIModelType,
15 | _ messages: [MessageChatGPT],
16 | _ optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel?
17 |
18 | public init() { }
19 |
20 | public func execute(api: API,
21 | apiKey: String,
22 | model: OpenAIModelType,
23 | messages: [MessageChatGPT],
24 | optionalParameters: ChatCompletionsOptionalParameters?) async throws -> ChatCompletionsDataModel? {
25 | var endpoint = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: optionalParameters).endpoint
26 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
27 |
28 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
29 | api.addHeaders(urlRequest: &urlRequest,
30 | headers: ["Content-Type": "application/json",
31 | "Authorization": "Bearer \(apiKey)"])
32 |
33 | let result = await api.execute(with: urlRequest)
34 |
35 | let jsonDecoder = JSONDecoder()
36 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
37 |
38 | return try api.parse(result,
39 | type: ChatCompletionsDataModel.self,
40 | jsonDecoder: jsonDecoder,
41 | errorType: OpenAIAPIError.self)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/ChatCompletions/Stream/ChatCompletionsStreamMapper.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public protocol ChatCompletionsStreamMappeable {
4 | func parse(data: Data) throws -> [ChatCompletionsStreamDataModel]
5 | }
6 |
7 | public struct ChatCompletionsStreamMapper: ChatCompletionsStreamMappeable {
8 | private enum Constant: String {
9 | case streamData = "data: "
10 | case streamError = "\"error\": {\n"
11 | case streamFinished = "[DONE]"
12 | }
13 |
14 | public init() { }
15 |
16 | public func parse(data: Data) throws -> [ChatCompletionsStreamDataModel] {
17 | guard let dataString = String(data: data, encoding: .utf8) else {
18 | return []
19 | }
20 | return try extractDataLine(from: dataString).map {
21 | guard let jsonData = $0.data(using: .utf8) else {
22 | return nil
23 | }
24 | if $0 == Constant.streamFinished.rawValue {
25 | return .finished
26 | } else {
27 | return try decodeChatCompletionsStreamDataModel(from: jsonData)
28 | }
29 | }.compactMap { $0 }
30 | }
31 |
32 | private func extractDataLine(from dataString: String,
33 | dataPrefix: String = Constant.streamData.rawValue) throws -> [String] {
34 | if dataString.contains(Constant.streamError.rawValue) {
35 | return [dataString]
36 | } else {
37 | let lines = dataString.components(separatedBy: "\n\n")
38 | .filter { !$0.isEmpty }
39 | return lines.map {
40 | $0.dropFirst(dataPrefix.count).trimmingCharacters(in: .whitespaces)
41 | }
42 | }
43 | }
44 |
45 | private func decodeChatCompletionsStreamDataModel(from data: Data) throws -> ChatCompletionsStreamDataModel? {
46 | do {
47 | return try JSONDecoder().decode(ChatCompletionsStreamDataModel.self, from: data)
48 | } catch {
49 | let error = try JSONDecoder().decode(OpenAIAPIError.self, from: data)
50 | throw error
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/ChatCompletions/Stream/CreateChatCompletionsStreamRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // swiftlint:disable line_length
4 | protocol CreateChatCompletionsStreamRequestProtocol {
5 | func execute(api: API,
6 | apiKey: String,
7 | model: OpenAIModelType,
8 | messages: [MessageChatGPT],
9 | optionalParameters: ChatCompletionsOptionalParameters?) throws -> AsyncThrowingStream
10 | func setURLSession(urlSession: URLSession)
11 | }
12 |
13 | final public class CreateChatCompletionsStreamRequest: NSObject, CreateChatCompletionsStreamRequestProtocol {
14 |
15 | public typealias Init = (_ api: API,
16 | _ apiKey: String,
17 | _ model: OpenAIModelType,
18 | _ messages: [MessageChatGPT],
19 | _ optionalParameters: ChatCompletionsOptionalParameters?) throws -> AsyncThrowingStream
20 |
21 | private var urlSession: URLSession?
22 | private var dataTask: URLSessionDataTask?
23 | private var streamMapper: ChatCompletionsStreamMappeable
24 | private var continuation: AsyncThrowingStream.Continuation?
25 |
26 | public init(streamMapper: ChatCompletionsStreamMappeable = ChatCompletionsStreamMapper()) {
27 | self.streamMapper = streamMapper
28 | super.init()
29 | self.urlSession = URLSession(configuration: .default,
30 | delegate: self,
31 | delegateQueue: OperationQueue())
32 | }
33 |
34 | public func execute(api: API,
35 | apiKey: String,
36 | model: OpenAIModelType,
37 | messages: [MessageChatGPT],
38 | optionalParameters: ChatCompletionsOptionalParameters?) throws -> AsyncThrowingStream {
39 | return AsyncThrowingStream { continuation in
40 | self.continuation = continuation
41 | var endpoint = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: optionalParameters).endpoint
42 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
43 |
44 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
45 | api.addHeaders(urlRequest: &urlRequest,
46 | headers: ["Content-Type": "application/json",
47 | "Authorization": "Bearer \(apiKey)"])
48 |
49 | dataTask = urlSession?.dataTask(with: urlRequest)
50 | dataTask?.resume()
51 | }
52 | }
53 |
54 | func setURLSession(urlSession: URLSession) {
55 | self.urlSession = urlSession
56 | }
57 | }
58 | // swiftlint:enable line_length
59 |
60 | extension CreateChatCompletionsStreamRequest: URLSessionDataDelegate {
61 | public func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
62 | do {
63 | try streamMapper.parse(data: data).forEach { [weak self] newMessage in
64 | self?.continuation?.yield(newMessage)
65 | }
66 | } catch {
67 | continuation?.finish(throwing: error)
68 | }
69 | }
70 |
71 | public func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
72 | guard let error = error else {
73 | continuation?.finish()
74 | return
75 | }
76 | continuation?.finish(throwing: error)
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Completions/CompletionsRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CompletionsRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIModelType,
7 | optionalParameters: CompletionsOptionalParameters?) async throws -> CompletionsDataModel?
8 | }
9 |
10 | final public class CompletionsRequest: CompletionsRequestProtocol {
11 | public typealias Init = (_ api: API,
12 | _ apiKey: String,
13 | _ model: OpenAIModelType,
14 | _ optionalParameters: CompletionsOptionalParameters?) async throws -> CompletionsDataModel?
15 |
16 | public init() { }
17 |
18 | public func execute(api: API,
19 | apiKey: String,
20 | model: OpenAIModelType,
21 | optionalParameters: CompletionsOptionalParameters?) async throws -> CompletionsDataModel? {
22 | var endpoint = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
23 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
24 |
25 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
26 | api.addHeaders(urlRequest: &urlRequest,
27 | headers: ["Content-Type": "application/json",
28 | "Authorization": "Bearer \(apiKey)"])
29 |
30 | let result = await api.execute(with: urlRequest)
31 |
32 | let jsonDecoder = JSONDecoder()
33 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
34 |
35 | return try api.parse(result,
36 | type: CompletionsDataModel.self,
37 | jsonDecoder: jsonDecoder,
38 | errorType: OpenAIAPIError.self)
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Embeddings/EmbeddingsRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol EmbeddingsRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIModelType,
7 | input: String) async throws -> EmbeddingResponseDataModel?
8 | }
9 |
10 | final public class EmbeddingsRequest: EmbeddingsRequestProtocol {
11 | public typealias Init = (_ api: API,
12 | _ apiKey: String,
13 | _ model: OpenAIModelType,
14 | _ input: String) async throws -> EmbeddingResponseDataModel?
15 |
16 | public init() { }
17 |
18 | public func execute(api: API,
19 | apiKey: String,
20 | model: OpenAIModelType,
21 | input: String) async throws -> EmbeddingResponseDataModel? {
22 | var endpoint = OpenAIEndpoints.embeddings(model: model, input: input).endpoint
23 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
24 |
25 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
26 | api.addHeaders(urlRequest: &urlRequest,
27 | headers: ["Content-Type": "application/json",
28 | "Authorization": "Bearer \(apiKey)"])
29 |
30 | let result = await api.execute(with: urlRequest)
31 |
32 | let jsonDecoder = JSONDecoder()
33 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
34 |
35 | return try api.parse(result,
36 | type: EmbeddingResponseDataModel.self,
37 | jsonDecoder: jsonDecoder,
38 | errorType: OpenAIAPIError.self)
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Images/CreateImagesRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol CreateImagesRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIImageModelType,
7 | prompt: String,
8 | numberOfImages: Int,
9 | size: ImageSize) async throws -> CreateImageDataModel?
10 | }
11 |
12 | final public class CreateImagesRequest: CreateImagesRequestProtocol {
13 | public typealias Init = (_ api: API,
14 | _ apiKey: String,
15 | _ model: OpenAIImageModelType,
16 | _ prompt: String,
17 | _ numberOfImages: Int,
18 | _ size: ImageSize) async throws -> CreateImageDataModel?
19 |
20 | public init() { }
21 |
22 | public func execute(api: API,
23 | apiKey: String,
24 | model: OpenAIImageModelType,
25 | prompt: String,
26 | numberOfImages: Int,
27 | size: ImageSize) async throws -> CreateImageDataModel? {
28 | var endpoint = OpenAIEndpoints.createImage(model: model,
29 | prompt: prompt,
30 | numberOfImages: numberOfImages,
31 | size: size).endpoint
32 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
33 |
34 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
35 | api.addHeaders(urlRequest: &urlRequest,
36 | headers: ["Content-Type": "application/json",
37 | "Authorization": "Bearer \(apiKey)"])
38 |
39 | let result = await api.execute(with: urlRequest)
40 |
41 | return try api.parse(result,
42 | type: CreateImageDataModel.self,
43 | jsonDecoder: JSONDecoder(),
44 | errorType: OpenAIAPIError.self)
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Images/EditImageRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol EditImageRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIImageModelType,
7 | imageData: Data,
8 | maskData: Data,
9 | prompt: String,
10 | numberOfImages: Int,
11 | size: ImageSize) async throws -> CreateImageDataModel?
12 | }
13 |
14 | final public class EditImageRequest: NSObject, EditImageRequestProtocol {
15 | public typealias Init = (_ api: API,
16 | _ apiKey: String,
17 | _ model: OpenAIImageModelType,
18 | _ imageData: Data,
19 | _ maskData: Data,
20 | _ prompt: String,
21 | _ numberOfImages: Int,
22 | _ size: ImageSize) async throws -> CreateImageDataModel?
23 |
24 | public override init() {
25 | super.init()
26 | }
27 |
28 | public func execute(api: API,
29 | apiKey: String,
30 | model: OpenAIImageModelType,
31 | imageData: Data,
32 | maskData: Data,
33 | prompt: String,
34 | numberOfImages: Int,
35 | size: ImageSize) async throws -> CreateImageDataModel? {
36 |
37 | var endpoint = OpenAIEndpoints.editImage(model: model).endpoint
38 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
39 |
40 | let boundary = "Boundary-\(UUID().uuidString)"
41 |
42 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
43 | api.addHeaders(urlRequest: &urlRequest,
44 | headers: ["Content-Type": "multipart/form-data; boundary=\(boundary)",
45 | "Authorization": "Bearer \(apiKey)"])
46 |
47 | let formData = MultipartFormData(boundary: boundary)
48 |
49 | formData.appendField(name: "prompt", value: prompt)
50 | formData.appendField(name: "n", value: String(numberOfImages))
51 | formData.appendField(name: "size", value: size.rawValue)
52 | formData.appendImageData(fieldName: "image", data: imageData, filename: "image.png", mimeType: "image/png")
53 | formData.appendImageData(fieldName: "mask", data: maskData, filename: "mask.png", mimeType: "image/png")
54 | formData.finalizeBody()
55 |
56 | urlRequest.httpBody = formData.getHttpBody()
57 |
58 | let (data, _) = try await URLSession.shared.data(for: urlRequest)
59 | let variationImageDataModel = try JSONDecoder().decode(CreateImageDataModel.self, from: data)
60 |
61 | return variationImageDataModel
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Images/VariationImageRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol VariationImageRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | model: OpenAIImageModelType,
7 | imageData: Data,
8 | numberOfImages: Int,
9 | size: ImageSize) async throws -> CreateImageDataModel?
10 | }
11 |
12 | final public class VariationImageRequest: VariationImageRequestProtocol {
13 | public typealias Init = (_ api: API,
14 | _ apiKey: String,
15 | _ model: OpenAIImageModelType,
16 | _ imageData: Data,
17 | _ numberOfImages: Int,
18 | _ size: ImageSize) async throws -> CreateImageDataModel?
19 |
20 | public init() {}
21 |
22 | public func execute(api: API,
23 | apiKey: String,
24 | model: OpenAIImageModelType,
25 | imageData: Data,
26 | numberOfImages: Int,
27 | size: ImageSize) async throws -> CreateImageDataModel? {
28 |
29 | var endpoint = OpenAIEndpoints.variationImage(model: model).endpoint
30 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
31 |
32 | let boundary = "Boundary-\(UUID().uuidString)"
33 |
34 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
35 | api.addHeaders(urlRequest: &urlRequest,
36 | headers: ["Content-Type": "multipart/form-data; boundary=\(boundary)",
37 | "Authorization": "Bearer \(apiKey)"])
38 |
39 | let formData = MultipartFormData(boundary: boundary)
40 |
41 | formData.appendField(name: "n", value: String(numberOfImages))
42 | formData.appendField(name: "size", value: size.rawValue)
43 | formData.appendImageData(fieldName: "image", data: imageData, filename: "image.png", mimeType: "image/png")
44 | formData.finalizeBody()
45 |
46 | urlRequest.httpBody = formData.getHttpBody()
47 |
48 | let (data, _) = try await URLSession.shared.data(for: urlRequest)
49 | let variationImageDataModel = try JSONDecoder().decode(CreateImageDataModel.self, from: data)
50 |
51 | return variationImageDataModel
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Models/ListModelsRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol ListModelsRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String) async throws -> ModelListDataModel?
6 | }
7 |
8 | final public class ListModelsRequest: ListModelsRequestProtocol {
9 | public typealias Init = (_ api: API,
10 | _ apiKey: String) async throws -> ModelListDataModel?
11 |
12 | public init() { }
13 |
14 | public func execute(api: API,
15 | apiKey: String) async throws -> ModelListDataModel? {
16 | var endpoint = OpenAIEndpoints.listModels.endpoint
17 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
18 |
19 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
20 | api.addHeaders(urlRequest: &urlRequest,
21 | headers: ["Content-Type": "application/json",
22 | "Authorization": "Bearer \(apiKey)"])
23 |
24 | let result = await api.execute(with: urlRequest)
25 |
26 | let jsonDecoder = JSONDecoder()
27 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
28 |
29 | return try api.parse(result,
30 | type: ModelListDataModel.self,
31 | jsonDecoder: jsonDecoder,
32 | errorType: OpenAIAPIError.self)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/Moderations/ModerationsRequest.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | protocol ModerationsRequestProtocol {
4 | func execute(api: API,
5 | apiKey: String,
6 | input: String) async throws -> ModerationDataModel?
7 | }
8 |
9 | final public class ModerationsRequest: ModerationsRequestProtocol {
10 | public typealias Init = (_ api: API,
11 | _ apiKey: String,
12 | _ input: String) async throws -> ModerationDataModel?
13 |
14 | public init() { }
15 |
16 | public func execute(api: API,
17 | apiKey: String,
18 | input: String) async throws -> ModerationDataModel? {
19 | var endpoint = OpenAIEndpoints.moderations(input: input).endpoint
20 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
21 |
22 | var urlRequest = api.buildURLRequest(endpoint: endpoint)
23 | api.addHeaders(urlRequest: &urlRequest,
24 | headers: ["Content-Type": "application/json",
25 | "Authorization": "Bearer \(apiKey)"])
26 |
27 | let result = await api.execute(with: urlRequest)
28 |
29 | let jsonDecoder = JSONDecoder()
30 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
31 |
32 | return try api.parse(result,
33 | type: ModerationDataModel.self,
34 | jsonDecoder: jsonDecoder,
35 | errorType: OpenAIAPIError.self)
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/SwiftOpenAI/OpenAI/Requests/MultipartFormData.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | final public class MultipartFormData {
4 | private var body: Data = Data()
5 | private let boundary: String
6 |
7 | public init(boundary: String) {
8 | self.boundary = boundary
9 | }
10 |
11 | public func appendField(name: String, value: String, filename: String? = nil, mimeType: String? = nil) {
12 | var disposition = "Content-Disposition: form-data; name=\"\(name)\""
13 | if let filename = filename {
14 | disposition += "; filename=\"\(filename)\""
15 | }
16 |
17 | append("--\(boundary)\r\n")
18 | append("\(disposition)\r\n")
19 |
20 | if let mimeType = mimeType {
21 | append("Content-Type: \(mimeType)\r\n\r\n")
22 | } else {
23 | append("\r\n")
24 | }
25 |
26 | append(value)
27 | append("\r\n")
28 | }
29 |
30 | public func appendImageData(fieldName: String, data: Data, filename: String, mimeType: String) {
31 | append("--\(boundary)\r\n")
32 | append("Content-Disposition: form-data; name=\"\(fieldName)\"; filename=\"\(filename)\"\r\n")
33 | append("Content-Type: \(mimeType)\r\n\r\n")
34 | body.append(data)
35 | append("\r\n")
36 | }
37 |
38 | public func finalizeBody() {
39 | append("--\(boundary)--\r\n")
40 | }
41 |
42 | public func getHttpBody() -> Data {
43 | return body
44 | }
45 |
46 | private func append(_ string: String) {
47 | if let data = string.data(using: .utf8) {
48 | body.append(data)
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Endpoint/EndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | class EndpointSpec: XCTestCase {
4 |
5 | func testGetEndpoint_WhenIsInitialized_ShouldHaveCorrectValuesInProperties() throws {
6 | // Arrange
7 | let sut = EndpointGetMock(parameterOne: "SwiftBeta", parameterTwo: "Swift")
8 | // Act
9 |
10 | // Assert
11 | XCTAssertEqual(sut.path, "mock")
12 | XCTAssertEqual(sut.method, .GET)
13 | XCTAssertEqual(sut.parameters?.count, 2)
14 | XCTAssertEqual(sut.parameters as! [String : String], ["parameter_one": "SwiftBeta", "parameter_two": "Swift"])
15 | }
16 |
17 | func testPostEndpoint_WhenIsInitialized_ShouldHaveCorrectValuesInProperties() throws {
18 | // Arrange
19 | let sut = EndpointPostMock(parameterOne: "SwiftBeta", parameterTwo: "Swift")
20 | // Act
21 |
22 | // Assert
23 | XCTAssertEqual(sut.path, "mock")
24 | XCTAssertEqual(sut.method, .POST)
25 | XCTAssertEqual(sut.parameters?.count, 2)
26 | XCTAssertEqual(sut.parameters as! [String : String], ["parameter_one": "SwiftBeta", "parameter_two": "Swift"])
27 | }
28 |
29 | func testEmptyEndpoint_WhenIsInitialized_ShouldHaveCorrectValuesInProperties() throws {
30 | // Arrange
31 | let sut = EmptyEndpointMock()
32 | // Act
33 |
34 | // Assert
35 | XCTAssertEqual(sut.path, "mock")
36 | XCTAssertEqual(sut.method, .GET)
37 | XCTAssertNil(sut.parameters, "")
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Endpoint/Mocks/EmptyEndpointMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @testable import SwiftOpenAI
3 |
4 | struct EmptyEndpointMock: Endpoint {
5 | var path: String = "mock"
6 | }
7 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Endpoint/Mocks/EndpointGetMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @testable import SwiftOpenAI
3 |
4 | struct EndpointGetMock: Endpoint {
5 | private let parameterOne: String
6 | private let parameterTwo: String
7 |
8 | init(parameterOne: String,
9 | parameterTwo: String) {
10 | self.parameterOne = parameterOne
11 | self.parameterTwo = parameterTwo
12 | }
13 |
14 | var path: String = "mock"
15 |
16 | var parameters: [String : Any]? {
17 | ["parameter_one": parameterOne as Any,
18 | "parameter_two": parameterTwo as Any]
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Endpoint/Mocks/EndpointPostMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @testable import SwiftOpenAI
3 |
4 | struct EndpointPostMock: Endpoint {
5 | private let parameterOne: String
6 | private let parameterTwo: String
7 |
8 | init(parameterOne: String,
9 | parameterTwo: String) {
10 | self.parameterOne = parameterOne
11 | self.parameterTwo = parameterTwo
12 | }
13 |
14 | var method: HTTPMethod {
15 | .POST
16 | }
17 |
18 | var path: String = "mock"
19 |
20 | var parameters: [String : Any]? {
21 | ["parameter_one": parameterOne as Any,
22 | "parameter_two": parameterTwo as Any]
23 | }
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Parser/ParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | class ParserSpec: XCTestCase {
5 |
6 | func testParser_WhenCorrectJSONisMappedIntoAModel_ShouldHaveCorrectValuesInProperties() throws {
7 | // Arrange
8 | let data = """
9 | {
10 | "user": "SwiftBeta",
11 | "number_of_videos": 140,
12 | "topics": ["SwiftUI", "Swift", "Xcode", "Testing", "Combine"],
13 | }
14 | """.data(using: .utf8)!
15 |
16 | // Act
17 | let sut = try Parser().parse(data, type: SwiftBetaModel.self, jsonDecoder: .init())
18 |
19 | // Assert
20 | XCTAssertNotNil(sut)
21 | XCTAssertEqual(sut?.user, "SwiftBeta")
22 | XCTAssertEqual(sut?.numberOfVideos, 140)
23 | XCTAssertEqual(sut?.topics, ["SwiftUI", "Swift", "Xcode", "Testing", "Combine"])
24 | }
25 |
26 | func testParser_WhenKeyNotFoundInJSON_ShouldThrowsAndException() throws {
27 | let data = """
28 | {
29 | "user": "SwiftBeta",
30 | "number_ofvideos": 140
31 | }
32 | """.data(using: .utf8)!
33 |
34 | XCTAssertThrowsError(try Parser().parse(data, type: SwiftBetaModel.self, jsonDecoder: .init()), "JSON Key Not Found") { error in
35 | XCTAssertTrue(error is APIError)
36 | }
37 | }
38 |
39 | func testParser_WhenMismatchTypeInJSON_ShouldThrowsAndException() throws {
40 | let data = """
41 | {
42 | "user": "SwiftBeta",
43 | "number_of_videos": "140",
44 | "topics": ["SwiftUI", "Swift", "Xcode", "Testing", "Combine"],
45 | }
46 | """.data(using: .utf8)!
47 |
48 | XCTAssertThrowsError(try Parser().parse(data, type: SwiftBetaModel.self, jsonDecoder: .init()), "JSON Type Mismatch") { error in
49 | XCTAssertTrue(error is APIError)
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Parser/SwiftBetaModel.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | struct SwiftBetaModel: Decodable {
4 | let user: String
5 | let numberOfVideos: Int
6 | let topics: [String]
7 |
8 | enum CodingKeys: String, CodingKey {
9 | case user
10 | case numberOfVideos = "number_of_videos"
11 | case topics
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Requester/Mocks/RequesterMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation.NSURLSession
2 | @testable import SwiftOpenAI
3 |
4 | struct RequesterMock: RequesterProtocol {
5 | var urlSession: URLSession = {
6 | let configuration = URLSessionConfiguration.ephemeral
7 | configuration.protocolClasses = [URLProtocolMock.self]
8 | return URLSession(configuration: configuration)
9 | }()
10 |
11 | func execute(with urlRequest: URLRequest) async -> Result {
12 | do {
13 | let (data, response) = try await urlSession.data(for: urlRequest)
14 |
15 | if let httpResponse = response as? HTTPURLResponse {
16 | let statusCode = httpResponse.statusCode
17 |
18 | if (400...599).contains(statusCode) {
19 | if let jsonString = String(data: data, encoding: .utf8) {
20 | throw APIError.jsonResponseError(jsonString)
21 | } else {
22 | throw APIError.unknown
23 | }
24 | }
25 | } else {
26 | throw APIError.unknown
27 | }
28 |
29 | return .success(data)
30 | } catch let error as URLError {
31 | return .failure(.urlSession(error))
32 | } catch let error as APIError {
33 | return .failure(error)
34 | } catch {
35 | return .failure(.unknown)
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Requester/Mocks/URLProtocolMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation.NSURLSession
2 | @testable import SwiftOpenAI
3 |
4 | final class URLProtocolMock: URLProtocol {
5 | static var completionHandler: ((URLRequest) throws -> (HTTPURLResponse, Data))?
6 |
7 | override class func canInit(with request: URLRequest) -> Bool {
8 | return true
9 | }
10 |
11 | override class func canonicalRequest(for request: URLRequest) -> URLRequest {
12 | request
13 | }
14 |
15 | override func startLoading() {
16 | guard let handler = URLProtocolMock.completionHandler else {
17 | assertionFailure("Received unexpected request with no handler set")
18 | return
19 | }
20 |
21 | do {
22 | let (response, data) = try handler(request)
23 | client?.urlProtocol(self, didReceive: response, cacheStoragePolicy: .notAllowed)
24 | client?.urlProtocol(self, didLoad: data)
25 | client?.urlProtocolDidFinishLoading(self)
26 | } catch {
27 | client?.urlProtocol(self, didFailWithError: error)
28 | }
29 | }
30 |
31 | override func stopLoading() { }
32 | }
33 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Requester/RequesterBuilderSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | class RequesterBuilderSpec: XCTestCase {
5 |
6 | func testRequestBuilder_WhenURLRequestIsCreatedWithGETEndpoint_ShouldBeNotNil() throws {
7 | let endpointMock = EndpointGetMock(parameterOne: "parameter_one_value", parameterTwo: "parameter_two_value")
8 | let requestBuilder = RequestBuilder()
9 |
10 | let sut = requestBuilder.buildURLRequest(endpoint: endpointMock)
11 | let absoluteStringURL = URLComponents(string: sut.url!.absoluteString)!
12 | let pathStringURL = sut.url!.path
13 |
14 | XCTAssertEqual(sut.httpMethod, "GET")
15 | XCTAssertNil(sut.httpBody)
16 | XCTAssertEqual(pathStringURL, "mock")
17 | XCTAssertTrue(absoluteStringURL.queryItems!.contains(where: { $0.name == "parameter_one"}))
18 | XCTAssertTrue(absoluteStringURL.queryItems!.contains(where: { $0.value == "parameter_one_value"}))
19 | XCTAssertTrue(absoluteStringURL.queryItems!.contains(where: { $0.name == "parameter_two"}))
20 | XCTAssertTrue(absoluteStringURL.queryItems!.contains(where: { $0.value == "parameter_two_value"}))
21 | }
22 |
23 | func testRequestBuilder_WhenURLRequestIsCreatedWithPOSTEndpoint_ShouldBeNotNil() throws {
24 | let endpointMock = EndpointPostMock(parameterOne: "parameter_one_value", parameterTwo: "parameter_two_value")
25 | let requestBuilder = RequestBuilder()
26 |
27 | let sut = requestBuilder.buildURLRequest(endpoint: endpointMock)
28 | let absoluteStringURL = URLComponents(string: sut.url!.absoluteString)!
29 | let pathStringURL = sut.url!.path
30 |
31 | XCTAssertEqual(sut.httpMethod, "POST")
32 | XCTAssertNotNil(sut.httpBody)
33 | XCTAssertEqual(pathStringURL, "mock")
34 | XCTAssertNil(absoluteStringURL.queryItems)
35 | }
36 |
37 | func testRequestBuilder_WhenURLRequestIsCreatedWithGETEndpointAndHeader_ShouldBeNotNil() throws {
38 | let endpointMock = EndpointGetMock(parameterOne: "parameter_one_value", parameterTwo: "parameter_two_value")
39 | let requestBuilder = RequestBuilder()
40 | var request = requestBuilder.buildURLRequest(endpoint: endpointMock)
41 | requestBuilder.addHeaders(urlRequest: &request, headers: ["token" : "swiftbeta", "Content-Type": "application/json"])
42 |
43 | let sut = request.allHTTPHeaderFields!
44 | XCTAssertTrue(sut.keys.contains(where: { $0 == "token"}))
45 | XCTAssertTrue(sut.keys.contains(where: { $0 == "Content-Type"}))
46 | XCTAssertEqual(sut["token"], "swiftbeta")
47 | XCTAssertEqual(sut["Content-Type"], "application/json")
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Requester/RequesterSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | class RequesterSpec: XCTestCase {
5 |
6 | struct MyTestError: Error, Decodable {
7 |
8 | }
9 |
10 | func testSuccessfulRequestAndParsingOfSwiftBetaModel() async throws {
11 | let data =
12 | """
13 | {
14 | "user": "SwiftBeta",
15 | "number_of_videos": 140,
16 | "topics": ["SwiftUI", "Swift", "Xcode", "Testing", "Combine"],
17 | }
18 | """.data(using: .utf8)!
19 |
20 | let router = Router()
21 | let requestBuilder = RequestBuilder()
22 | var endpointMock: Endpoint = EndpointGetMock(parameterOne: "parameter_one_value",
23 | parameterTwo: "parameter_two_value")
24 | router.routeEndpoint(&endpointMock, environment: BaseEnvironmentMock())
25 | var urlRequest = requestBuilder.buildURLRequest(endpoint: endpointMock)
26 | requestBuilder.addHeaders(urlRequest: &urlRequest, headers: ["Content-Type": "application/json",
27 | "Client-Id": "12345",
28 | "Authorization": "Bearer qwertyuiop"])
29 |
30 | URLProtocolMock.completionHandler = { request in
31 | let response = HTTPURLResponse(url: URL(string: endpointMock.path)!, statusCode: 200, httpVersion: nil, headerFields: [:])!
32 | return (response, data)
33 | }
34 |
35 | let requester = RequesterMock()
36 | let result = await requester.execute(with: urlRequest)
37 |
38 | let parser = Parser()
39 |
40 | let model = try parser.parse(result, type: SwiftBetaModel.self, errorType: MyTestError.self)
41 |
42 | XCTAssertNotNil(model)
43 | XCTAssertEqual(model?.user, "SwiftBeta")
44 | XCTAssertEqual(model?.numberOfVideos, 140)
45 | XCTAssertEqual(model?.topics, ["SwiftUI", "Swift", "Xcode", "Testing", "Combine"])
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Router/BaseEnvironmentMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @testable import SwiftOpenAI
3 |
4 | struct BaseEnvironmentMock: BaseEnvironmentType {
5 | var url: String = "https://www.swiftbeta.com"
6 | }
7 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/APIClientTests/Router/RouterSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | class RouterSpec: XCTestCase {
5 |
6 | func testRequestBuilder_WhenURLRequestIsCreatedWithGETEndpointAndHeader_ShouldBeNotNil() throws {
7 | let sut = Router()
8 | var endpointMock: Endpoint = EndpointGetMock(parameterOne: "parameter_one_value", parameterTwo: "parameter_two_value")
9 | sut.routeEndpoint(&endpointMock, environment: BaseEnvironmentMock())
10 |
11 | XCTAssertEqual(endpointMock.path, "https://www.swiftbeta.com/mock")
12 | }
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Helpers/URLStreamProtocolMock.swift:
--------------------------------------------------------------------------------
1 | import Foundation.NSURLSession
2 | @testable import SwiftOpenAI
3 |
4 | final class URLStreamProtocolMock: URLProtocol {
5 | static var response: (data: Data?, urlResponse: URLResponse?, error: Error?) = (nil, nil, nil)
6 |
7 | override class func canInit(with request: URLRequest) -> Bool {
8 | return true
9 | }
10 |
11 | override class func canonicalRequest(for request: URLRequest) -> URLRequest {
12 | request
13 | }
14 |
15 | override func startLoading() {
16 | if let error = URLStreamProtocolMock.response.error {
17 | client?.urlProtocol(self, didFailWithError: error)
18 | } else {
19 | if let data = URLStreamProtocolMock.response.data {
20 | client?.urlProtocol(self, didLoad: data)
21 | }
22 | if let response = URLStreamProtocolMock.response.urlResponse {
23 | client?.urlProtocol(self, didReceive: response, cacheStoragePolicy: .notAllowed)
24 | }
25 | }
26 | client?.urlProtocolDidFinishLoading(self)
27 | }
28 |
29 | override func stopLoading() { }
30 | }
31 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Helpers/dataToJSON.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | extension Data {
4 | func toJSONString() -> String? {
5 | return String(data: self, encoding: .utf8)
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Helpers/loadJSON.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | func loadJSON(name: String) -> Data {
4 | guard let resourceURL = Bundle.module.url(forResource: name, withExtension: "json"),
5 | let data = try? Data(contentsOf: resourceURL) else {
6 | fatalError("Can't load JSON file")
7 | }
8 | return data
9 | }
10 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Audio/CreateSpeech/CreateSpeechEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateSpeechEndpointSpec: XCTestCase {
5 | func testEndpointCreateSpeech_WithModelTTS() throws {
6 | let model: OpenAITTSModelType = .tts(.tts1)
7 | let input = "Please create an audio with this input"
8 | let voice: OpenAIVoiceType = .alloy
9 | let responseFormat: OpenAIAudioResponseType = .mp3
10 | let speed = 1.0
11 |
12 | let sut = OpenAIEndpoints.createSpeech(
13 | model: model,
14 | input: input,
15 | voice: voice,
16 | responseFormat: responseFormat,
17 | speed: speed
18 | ).endpoint
19 |
20 | let modelParameter = sut.parameters!["model"] as! String
21 | let inputParameter = sut.parameters!["input"] as! String
22 | let voiceParameter = sut.parameters!["voice"] as! String
23 | let responseFormatParameter = sut.parameters!["response_format"] as! String
24 | let speedFormatParameter = sut.parameters!["speed"] as! Double
25 |
26 | XCTAssertEqual(sut.path, "audio/speech")
27 | XCTAssertEqual(sut.method, .POST)
28 | XCTAssertEqual(sut.parameters?.count, 5)
29 | XCTAssertEqual(modelParameter, model.name)
30 | XCTAssertEqual(inputParameter, input)
31 | XCTAssertEqual(voiceParameter, voice.rawValue)
32 | XCTAssertEqual(responseFormatParameter, responseFormat.rawValue)
33 | XCTAssertEqual(speedFormatParameter, speed)
34 | }
35 | }
36 |
37 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Audio/CreateSpeech/CreateSpeechRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateSpeechRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let model: OpenAITTSModelType = .tts(.tts1)
10 | let input = "Please create an audio with this input"
11 | let voice: OpenAIVoiceType = .alloy
12 | let responseFormat: OpenAIAudioResponseType = .mp3
13 | let speed = 1.0
14 |
15 | var endpoint = OpenAIEndpoints.createSpeech(
16 | model: model,
17 | input: input,
18 | voice: voice,
19 | responseFormat: responseFormat,
20 | speed: speed
21 | ).endpoint
22 |
23 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
24 |
25 | var sut = api.buildURLRequest(endpoint: endpoint)
26 | api.addHeaders(urlRequest: &sut,
27 | headers: ["Content-Type" : "application/json",
28 | "Authorization" : "Bearer \(apiKey)"])
29 |
30 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
31 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
32 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Audio/CreateTranscription/CreateTranscriptionEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateTranscriptionEndpointSpec: XCTestCase {
5 | func testEndpointCreateSpeech_WithModelTTS() throws {
6 | let model: OpenAITranscriptionModelType = .whisper
7 | let language = "en"
8 | let responseFormat: OpenAIAudioResponseType = .mp3
9 | let temperature = 1.0
10 |
11 | let sut = OpenAIEndpoints.createTranscription(
12 | file: Data(),
13 | model: model,
14 | language: language,
15 | prompt: "",
16 | responseFormat: responseFormat,
17 | temperature: temperature
18 | ).endpoint
19 |
20 | let modelParameter = sut.parameters!["model"] as! String
21 | let languageParameter = sut.parameters!["language"] as! String
22 | let responseFormatParameter = sut.parameters!["response_format"] as! String
23 | let temperatureFormatParameter = sut.parameters!["temperature"] as! Double
24 |
25 | XCTAssertEqual(sut.path, "audio/transcriptions")
26 | XCTAssertEqual(sut.method, .POST)
27 | XCTAssertEqual(sut.parameters?.count, 5)
28 | XCTAssertEqual(modelParameter, model.rawValue)
29 | XCTAssertEqual(languageParameter, language)
30 | XCTAssertEqual(responseFormatParameter, responseFormat.rawValue)
31 | XCTAssertEqual(temperatureFormatParameter, temperature)
32 | }
33 | }
34 |
35 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Audio/CreateTranscription/CreateTranscriptionRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateTranscriptionRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let model: OpenAITranscriptionModelType = .whisper
10 | let language = "en"
11 | let responseFormat: OpenAIAudioResponseType = .mp3
12 | let temperature = 1.0
13 |
14 | var endpoint = OpenAIEndpoints.createTranscription(
15 | file: Data(),
16 | model: model,
17 | language: language,
18 | prompt: "",
19 | responseFormat: responseFormat,
20 | temperature: temperature
21 | ).endpoint
22 |
23 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
24 |
25 | var sut = api.buildURLRequest(endpoint: endpoint)
26 | api.addHeaders(urlRequest: &sut,
27 | headers: ["Content-Type" : "application/json",
28 | "Authorization" : "Bearer \(apiKey)"])
29 |
30 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
31 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
32 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ChatCompletions/ChatCompletionsAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ChatCompletionAPIClientSpec: XCTestCase {
5 | private var sut: CreateChatCompletionsRequestProtocol!
6 | private let model: OpenAIModelType = .gpt4(.base)
7 | private let apiKey = "1234567890"
8 | private let messages: [MessageChatGPT] = [.init(text: "Hello, who are you?",
9 | role: .user)]
10 |
11 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
12 | let json = loadJSON(name: "chat.completions")
13 |
14 | let api = API(requester: RequesterMock())
15 | let endpoint = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: nil).endpoint
16 |
17 | sut = CreateChatCompletionsRequest()
18 |
19 | stubHTTP(endpoint: endpoint,
20 | json: json,
21 | statusCode: 200)
22 |
23 | do {
24 | let dataModel = try await sut.execute(api: api, apiKey: apiKey, model: model, messages: messages, optionalParameters: nil)
25 | XCTAssertNotNil(dataModel)
26 | XCTAssertEqual(dataModel?.id, "chatcmpl-123")
27 | XCTAssertEqual(dataModel?.object, "chat.completion")
28 | XCTAssertEqual(dataModel?.created, 1677652288)
29 | XCTAssertEqual(dataModel?.choices.count, 1)
30 | XCTAssertEqual(dataModel?.choices[0].index, 0)
31 | XCTAssertEqual(dataModel?.choices[0].message.role, "assistant")
32 | XCTAssertEqual(dataModel?.choices[0].message.content, "Hello there, how may I assist you today?")
33 | XCTAssertEqual(dataModel?.choices[0].finishReason, "stop")
34 | XCTAssertEqual(dataModel?.usage.promptTokens, 9)
35 | XCTAssertEqual(dataModel?.usage.completionTokens, 12)
36 | XCTAssertEqual(dataModel?.usage.totalTokens, 21)
37 | } catch {
38 | XCTFail()
39 | }
40 | }
41 |
42 | func testAsyncAPIRequest_ParsesValidErrorJSONToErrorDataModel() async throws {
43 | let json = loadJSON(name: "chat.completions.error.invalid_api_key")
44 |
45 | let api = API(requester: RequesterMock())
46 | let endpoint = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: nil).endpoint
47 |
48 | sut = CreateChatCompletionsRequest()
49 |
50 | stubHTTP(endpoint: endpoint,
51 | json: json,
52 | statusCode: 401)
53 |
54 | do {
55 | let _ = try await sut.execute(api: api, apiKey: apiKey, model: model, messages: messages, optionalParameters: nil)
56 | XCTFail()
57 | } catch let error as OpenAIAPIError {
58 | XCTAssertNotNil(error)
59 | XCTAssertEqual(error.code, "invalid_api_key")
60 | XCTAssertEqual(error.message, "Incorrect API key provided: YOUR_API_KEY. You can find your API key at https://platform.openai.com/account/api-keys.")
61 | XCTAssertEqual(error.type, "invalid_request_error")
62 | XCTAssertEqual(error.param, "")
63 | }
64 | }
65 |
66 | private func stubHTTP(endpoint: Endpoint,
67 | json: Data,
68 | statusCode: Int) {
69 |
70 | URLProtocolMock.completionHandler = { request in
71 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
72 | statusCode: statusCode,
73 | httpVersion: nil,
74 | headerFields: [:])!
75 | return (response, json)
76 | }
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ChatCompletions/ChatCompletionsEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ChatCompletionsEndpointSpec: XCTestCase {
5 | func testChatEndpointCreation_WithGPT4ModelAndMessages_CreatesCorrectEndpointParameters() throws {
6 | let model: OpenAIModelType = .gpt4(.base)
7 | let messages: [MessageChatGPT] = [.init(text: "Hello, who are you?", role: .user)]
8 | let sut = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: nil).endpoint
9 |
10 | let firstParameterValue = sut.parameters!["model"] as! String
11 | let secondParameter = sut.parameters!["messages"] as! [[String : String]]
12 |
13 | XCTAssertEqual(sut.path, "chat/completions")
14 | XCTAssertEqual(sut.method, .POST)
15 | XCTAssertEqual(sut.parameters?.count, 8)
16 | XCTAssertEqual(firstParameterValue, model.name)
17 | XCTAssertEqual(secondParameter[0]["role"], messages[0].role.rawValue)
18 | XCTAssertEqual(secondParameter[0]["content"], messages[0].text)
19 | }
20 |
21 | func testChatEndpointCreation_WithGPT3_5ModelAndMessages_CreatesCorrectEndpointParameters() throws {
22 | let model: OpenAIModelType = .gpt3_5(.turbo)
23 | let messages: [MessageChatGPT] = [.init(text: "Generate 5 questions about Swift", role: .user)]
24 | let sut = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: nil).endpoint
25 |
26 | let firstParameterValue = sut.parameters!["model"] as! String
27 | let secondParameter = sut.parameters!["messages"] as! [[String : String]]
28 |
29 | XCTAssertEqual(sut.path, "chat/completions")
30 | XCTAssertEqual(sut.method, .POST)
31 | XCTAssertEqual(sut.parameters?.count, 8)
32 | XCTAssertEqual(firstParameterValue, model.name)
33 | XCTAssertEqual(secondParameter[0]["role"], messages[0].role.rawValue)
34 | XCTAssertEqual(secondParameter[0]["content"], messages[0].text)
35 | }
36 |
37 | func testChatEndpointCreation_WithGPT4_ModelAndMessages_CreatesCorrectEndpointWithOptionalParameters() throws {
38 | let model: OpenAIModelType = .gpt3_5(.turbo)
39 | let messages: [MessageChatGPT] = [.init(text: "Generate 5 questions about Swift", role: .user)]
40 | let optionalParameters: ChatCompletionsOptionalParameters = .init(temperature: 0.5, stream: true)
41 | let sut = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: optionalParameters).endpoint
42 |
43 | let firstParameterValue = sut.parameters!["model"] as! String
44 | let secondParameter = sut.parameters!["messages"] as! [[String : String]]
45 |
46 | XCTAssertEqual(sut.path, "chat/completions")
47 | XCTAssertEqual(sut.method, .POST)
48 | XCTAssertEqual(sut.parameters?.count, 8)
49 | XCTAssertEqual(sut.parameters?["temperature"] as! Double, 0.5)
50 | XCTAssertEqual(sut.parameters?["stream"] as! Bool, true)
51 | XCTAssertEqual(firstParameterValue, model.name)
52 | XCTAssertEqual(secondParameter[0]["role"], messages[0].role.rawValue)
53 | XCTAssertEqual(secondParameter[0]["content"], messages[0].text)
54 | }
55 | }
56 |
57 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ChatCompletions/ChatCompletionsParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ChatCompletionParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
8 | let jsonData = loadJSON(name: "chat.completions")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: ChatCompletionsDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.id, "chatcmpl-123")
17 | XCTAssertEqual(dataModel?.object, "chat.completion")
18 | XCTAssertEqual(dataModel?.created, 1677652288)
19 | XCTAssertEqual(dataModel?.choices.count, 1)
20 | XCTAssertEqual(dataModel?.choices[0].index, 0)
21 | XCTAssertEqual(dataModel?.choices[0].message.role, "assistant")
22 | XCTAssertEqual(dataModel?.choices[0].message.content, "Hello there, how may I assist you today?")
23 | XCTAssertEqual(dataModel?.choices[0].finishReason, "stop")
24 | XCTAssertEqual(dataModel?.usage.promptTokens, 9)
25 | XCTAssertEqual(dataModel?.usage.completionTokens, 12)
26 | XCTAssertEqual(dataModel?.usage.totalTokens, 21)
27 | }
28 |
29 | func testAsyncAPIRequest_FailWithIncorrectJSONDecoderStrategy() async throws {
30 | let jsonData = loadJSON(name: "chat.completions")
31 |
32 | do {
33 | let _ = try api.parse(.success(jsonData), type: ChatCompletionsDataModel.self, jsonDecoder: JSONDecoder(), errorType: OpenAIAPIError.self)
34 | } catch let error as APIError {
35 | switch error {
36 | case .decodable(let decodingError):
37 | switch decodingError {
38 | case .keyNotFound(let codingKey, let context):
39 | XCTAssertEqual(codingKey.stringValue, "promptTokens")
40 | XCTAssertEqual(context.debugDescription, "No value associated with key CodingKeys(stringValue: \"promptTokens\", intValue: nil) (\"promptTokens\").")
41 | default:
42 | XCTFail()
43 | }
44 | default:
45 | XCTFail()
46 | }
47 | }
48 | }
49 |
50 | func testAsyncAPIRequest_ParsesValidErrorJSONToErrorDataModel() async throws {
51 | let jsonData = loadJSON(name: "chat.completions.error.invalid_api_key")
52 |
53 | let jsonDecoder = JSONDecoder()
54 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
55 |
56 | do {
57 | let _ = try api.parse(.failure(.jsonResponseError(jsonData.toJSONString()!)), type: ChatCompletionsDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
58 | } catch let error as OpenAIAPIError {
59 | XCTAssertNotNil(error)
60 | XCTAssertEqual(error.code, "invalid_api_key")
61 | XCTAssertEqual(error.message, "Incorrect API key provided: YOUR_API_KEY. You can find your API key at https://platform.openai.com/account/api-keys.")
62 | XCTAssertEqual(error.type, "invalid_request_error")
63 | XCTAssertEqual(error.param, "")
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ChatCompletions/ChatCompletionsRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ChatCompletionRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let model: OpenAIModelType = .gpt4(.base)
10 | let messages: [MessageChatGPT] = [.init(text: "Hello, who are you?", role: .user)]
11 | var endpoint = OpenAIEndpoints.chatCompletions(model: model, messages: messages, optionalParameters: nil).endpoint
12 |
13 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
14 |
15 | var sut = api.buildURLRequest(endpoint: endpoint)
16 | api.addHeaders(urlRequest: &sut,
17 | headers: ["Content-Type" : "application/json",
18 | "Authorization" : "Bearer \(apiKey)"])
19 |
20 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
21 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
22 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ChatCompletions/ChatCompletionsStreamMapperSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ChatCompletionsStreamMapperSpec: XCTestCase {
5 | func dsaddsa() {
6 | let stringResult = "data: {\"id\":\"chatcmpl-6y3hLScC8nkFaGwfZWXqBXPPFdlei\",\"object\":\"chat.completion.chunk\",\"created\":1679771915,\"model\":\"gpt-3.5-turbo-0301\",\"choices\":[{\"delta\":{\"content\":\" conectar\"},\"index\":0,\"finish_reason\":null}]}\n\ndata: {\"id\":\"chatcmpl-6y3hLScC8nkFaGwfZWXqBXPPFdlei\",\"object\":\"chat.completion.chunk\",\"created\":1679771915,\"model\":\"gpt-3.5-turbo-0301\",\"choices\":[{\"delta\":{\"content\":\" emoc\"},\"index\":0,\"finish_reason\":null}]}\n\n"
7 |
8 | let sut = ChatCompletionsStreamMapper()
9 | do {
10 | let chatCompletionsStreamDataModel = try sut.parse(data: stringResult.data(using: .utf8)!)
11 | let firstStreamMessage = chatCompletionsStreamDataModel[0]
12 | let secondStreamMessage = chatCompletionsStreamDataModel[0]
13 |
14 | XCTAssertEqual(chatCompletionsStreamDataModel.count, 2)
15 | XCTAssertEqual(firstStreamMessage.id, "chatcmpl-6y3hLScC8nkFaGwfZWXqBXPPFdlei")
16 | XCTAssertEqual(firstStreamMessage.object, "chat.completion.chunk")
17 | XCTAssertEqual(firstStreamMessage.created, 1679771915)
18 | XCTAssertEqual(firstStreamMessage.model, "gpt-3.5-turbo-0301")
19 | XCTAssertEqual(firstStreamMessage.choices[0].delta?.content, " conectar")
20 | XCTAssertEqual(firstStreamMessage.choices[0].index, 0)
21 | XCTAssertNil(firstStreamMessage.choices[0].finishReason)
22 |
23 | XCTAssertEqual(secondStreamMessage.id, "chatcmpl-6y3hLScC8nkFaGwfZWXqBXPPFdlei")
24 | XCTAssertEqual(secondStreamMessage.object, "chat.completion.chunk")
25 | XCTAssertEqual(secondStreamMessage.model, "gpt-3.5-turbo-0301")
26 | XCTAssertEqual(secondStreamMessage.choices[0].delta?.content, " emoc")
27 | XCTAssertEqual(secondStreamMessage.choices[0].index, 0)
28 | XCTAssertNil(secondStreamMessage.choices[0].finishReason)
29 | } catch {
30 | XCTFail()
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Completions/CompletionParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CompletionParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToCompletionsDataModel() async throws {
8 | let jsonData = loadJSON(name: "completions")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: CompletionsDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.id, "cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7")
17 | XCTAssertEqual(dataModel?.object, "text_completion")
18 | XCTAssertEqual(dataModel?.created, 1589478378)
19 | XCTAssertEqual(dataModel?.choices.count, 1)
20 | XCTAssertEqual(dataModel?.choices[0].index, 0)
21 | XCTAssertEqual(dataModel?.choices[0].text, "\n\nThis is indeed a test")
22 | XCTAssertEqual(dataModel?.choices[0].index, 0)
23 | XCTAssertEqual(dataModel?.choices[0].finishReason, "length")
24 | XCTAssertEqual(dataModel?.usage.promptTokens, 5)
25 | XCTAssertEqual(dataModel?.usage.completionTokens, 7)
26 | XCTAssertEqual(dataModel?.usage.totalTokens, 12)
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Completions/CompletionRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CompletionRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let model: OpenAIModelType = .gpt3_5(.gpt_3_5_turbo_1106)
10 | let optionalParameters: CompletionsOptionalParameters = .init(prompt: "Say this is a test")
11 | var endpoint = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
12 |
13 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
14 |
15 | var sut = api.buildURLRequest(endpoint: endpoint)
16 | api.addHeaders(urlRequest: &sut,
17 | headers: ["Content-Type" : "application/json",
18 | "Authorization" : "Bearer \(apiKey)"])
19 |
20 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
21 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
22 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Completions/CompletionsAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CompletionAPIClientSpec: XCTestCase {
5 | private var sut: CompletionsRequestProtocol!
6 | private let model: OpenAIModelType = .gpt3_5(.gpt_3_5_turbo_1106)
7 | private let apiKey = "1234567890"
8 | private let optionalParameters: CompletionsOptionalParameters = .init(prompt: "Say this is a test")
9 |
10 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
11 | let json = loadJSON(name: "completions")
12 |
13 | let api = API(requester: RequesterMock())
14 | let endpoint = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
15 |
16 | sut = CompletionsRequest()
17 |
18 | stubHTTP(endpoint: endpoint,
19 | json: json,
20 | statusCode: 200)
21 |
22 | do {
23 | let dataModel = try await sut.execute(api: api, apiKey: apiKey, model: model, optionalParameters: optionalParameters)
24 | XCTAssertNotNil(dataModel)
25 | XCTAssertEqual(dataModel?.id, "cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7")
26 | XCTAssertEqual(dataModel?.object, "text_completion")
27 | XCTAssertEqual(dataModel?.created, 1589478378)
28 | XCTAssertEqual(dataModel?.choices.count, 1)
29 | XCTAssertEqual(dataModel?.choices[0].index, 0)
30 | XCTAssertEqual(dataModel?.choices[0].text, "\n\nThis is indeed a test")
31 | XCTAssertEqual(dataModel?.choices[0].index, 0)
32 | XCTAssertEqual(dataModel?.choices[0].finishReason, "length")
33 | XCTAssertEqual(dataModel?.usage.promptTokens, 5)
34 | XCTAssertEqual(dataModel?.usage.completionTokens, 7)
35 | XCTAssertEqual(dataModel?.usage.totalTokens, 12)
36 | } catch {
37 | XCTFail()
38 | }
39 | }
40 |
41 | func testAsyncAPIRequest_ParsesValidErrorJSONToErrorDataModel() async throws {
42 | let json = loadJSON(name: "completions.error.invalid_api_key")
43 |
44 | let api = API(requester: RequesterMock())
45 | let endpoint = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
46 |
47 | sut = CompletionsRequest()
48 |
49 | stubHTTP(endpoint: endpoint,
50 | json: json,
51 | statusCode: 401)
52 |
53 | do {
54 | let _ = try await sut.execute(api: api, apiKey: apiKey, model: model, optionalParameters: optionalParameters)
55 | XCTFail()
56 | } catch let error as OpenAIAPIError {
57 | XCTAssertNotNil(error)
58 | XCTAssertEqual(error.code, "invalid_api_key")
59 | XCTAssertEqual(error.message, "Incorrect API key provided: YOUR_API_KEY. You can find your API key at https://platform.openai.com/account/api-keys.")
60 | XCTAssertEqual(error.type, "invalid_request_error")
61 | XCTAssertEqual(error.param, "")
62 | }
63 | }
64 |
65 | private func stubHTTP(endpoint: Endpoint,
66 | json: Data,
67 | statusCode: Int) {
68 |
69 | URLProtocolMock.completionHandler = { request in
70 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
71 | statusCode: statusCode,
72 | httpVersion: nil,
73 | headerFields: [:])!
74 | return (response, json)
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Completions/CompletionsEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CompletionsEndpointSpec: XCTestCase {
5 | func testEndpointCompletions_WithDavinciModelAndPrompt_CreatesCorrectEndpointParameters() throws {
6 | let model: OpenAIModelType = .gpt3_5(.gpt_3_5_turbo_1106)
7 | let optionalParameters: CompletionsOptionalParameters = .init(prompt: "Say this is a test")
8 | let sut = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
9 |
10 | let modelParameter = sut.parameters!["model"] as! String
11 | let promptParameter = sut.parameters!["prompt"] as! String
12 |
13 | XCTAssertEqual(sut.path, "completions")
14 | XCTAssertEqual(sut.method, .POST)
15 | XCTAssertEqual(sut.parameters?.count, 14)
16 | XCTAssertEqual(modelParameter, model.name)
17 | XCTAssertEqual(promptParameter, optionalParameters.prompt)
18 | }
19 |
20 | func testEndpointCompletions_WithDavinciModelAndPrompt_WithOptionalParameters_CreatesCorrectEndpointParameters() throws {
21 | let model: OpenAIModelType = .gpt3_5(.gpt_3_5_turbo_1106)
22 | let optionalParameters: CompletionsOptionalParameters = .init(prompt: "Say this is a test",
23 | maxTokens: 1024,
24 | temperature: 0.8)
25 | let sut = OpenAIEndpoints.completions(model: model, optionalParameters: optionalParameters).endpoint
26 |
27 | let modelParameter = sut.parameters!["model"] as! String
28 | let promptParameter = sut.parameters!["prompt"] as! String
29 | let maxTokensParameter = sut.parameters!["max_tokens"] as! Int
30 | let temperatureParameter = sut.parameters!["temperature"] as! Double
31 |
32 | XCTAssertEqual(sut.path, "completions")
33 | XCTAssertEqual(sut.method, .POST)
34 | XCTAssertEqual(sut.parameters?.count, 14)
35 | XCTAssertEqual(modelParameter, model.name)
36 | XCTAssertEqual(promptParameter, optionalParameters.prompt)
37 | XCTAssertEqual(maxTokensParameter, optionalParameters.maxTokens)
38 | XCTAssertEqual(temperatureParameter, optionalParameters.temperature)
39 | }
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Embeddings/EmbeddingsAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class EmbeddingsAPIClientSpec: XCTestCase {
5 | private var sut: EmbeddingsRequestProtocol!
6 | private let model: OpenAIModelType = .embedding(.text_embedding_ada_002)
7 | private let apiKey = "1234567890"
8 | private let input = "What day of the wek is it?"
9 |
10 | func testAsyncAPIRequest_ParsesValidJSONToEmbeddingsDataModel() async throws {
11 | let json = loadJSON(name: "embeddings")
12 |
13 | let api = API(requester: RequesterMock())
14 | let endpoint = OpenAIEndpoints.embeddings(model: model, input: input).endpoint
15 |
16 | sut = EmbeddingsRequest()
17 |
18 | stubHTTP(endpoint: endpoint,
19 | json: json,
20 | statusCode: 200)
21 |
22 | do {
23 | let dataModel = try await sut.execute(api: api, apiKey: apiKey, model: model, input: input)
24 | XCTAssertNotNil(dataModel)
25 | XCTAssertEqual(dataModel?.object, "list")
26 | XCTAssertEqual(dataModel?.model, "text-embedding-ada-002")
27 | XCTAssertEqual(dataModel?.data[0].embedding.count, 1536)
28 | XCTAssertEqual(dataModel?.data[0].object, "embedding")
29 | XCTAssertEqual(dataModel?.data[0].embedding[0], 0.0023064255)
30 | XCTAssertEqual(dataModel?.data[0].embedding[1], -0.009327292)
31 | XCTAssertEqual(dataModel?.usage.promptTokens, 8)
32 | XCTAssertEqual(dataModel?.usage.totalTokens, 8)
33 | } catch {
34 | XCTFail()
35 | }
36 | }
37 |
38 | private func stubHTTP(endpoint: Endpoint,
39 | json: Data,
40 | statusCode: Int) {
41 |
42 | URLProtocolMock.completionHandler = { request in
43 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
44 | statusCode: statusCode,
45 | httpVersion: nil,
46 | headerFields: [:])!
47 | return (response, json)
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Embeddings/EmbeddingsEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class EmbeddingsEndpointSpec: XCTestCase {
5 | func testEndpointCompletions_WithEmbeddingADA002ModelInput_CreatesCorrectEndpointParameters() throws {
6 | let model: OpenAIModelType = .embedding(.text_embedding_ada_002)
7 | let input = "What day of the wek is it?"
8 |
9 | let sut = OpenAIEndpoints.embeddings(model: model, input: input).endpoint
10 |
11 | let modelParameter = sut.parameters!["model"] as! String
12 | let inputParameter = sut.parameters!["input"] as! String
13 |
14 | XCTAssertEqual(sut.path, "embeddings")
15 | XCTAssertEqual(sut.method, .POST)
16 | XCTAssertEqual(sut.parameters?.count, 2)
17 | XCTAssertEqual(modelParameter, model.name)
18 | XCTAssertEqual(inputParameter, input)
19 | }
20 | }
21 |
22 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Embeddings/EmbeddingsParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class EmbeddingsParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToEmbeddingsDataModel() async throws {
8 | let jsonData = loadJSON(name: "embeddings")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: EmbeddingResponseDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.object, "list")
17 | XCTAssertEqual(dataModel?.model, "text-embedding-ada-002")
18 | XCTAssertEqual(dataModel?.data[0].embedding.count, 1536)
19 | XCTAssertEqual(dataModel?.data[0].object, "embedding")
20 | XCTAssertEqual(dataModel?.data[0].embedding[0], 0.0023064255)
21 | XCTAssertEqual(dataModel?.data[0].embedding[1], -0.009327292)
22 | XCTAssertEqual(dataModel?.usage.promptTokens, 8)
23 | XCTAssertEqual(dataModel?.usage.totalTokens, 8)
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Embeddings/EmbeddingsRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class EmbeddingsRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let model: OpenAIModelType = .embedding(.text_embedding_ada_002)
10 | let input = "What day of the wek is it?"
11 |
12 | var endpoint = OpenAIEndpoints.embeddings(model: model, input: input).endpoint
13 |
14 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
15 |
16 | var sut = api.buildURLRequest(endpoint: endpoint)
17 | api.addHeaders(urlRequest: &sut,
18 | headers: ["Content-Type" : "application/json",
19 | "Authorization" : "Bearer \(apiKey)"])
20 |
21 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
22 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
23 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Images/CreateImageAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateImageAPIClientSpec: XCTestCase {
5 | private var sut: CreateImagesRequestProtocol!
6 | private let prompt = "Pixar style 3D render of a baby hippo, 4k, high resolution, trending in artstation"
7 | private let numberOfImages = 4
8 | private let size: ImageSize = .s1024
9 | private let model: OpenAIModelType = .gpt4(.base)
10 | private let apiKey = "1234567890"
11 |
12 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
13 | let json = loadJSON(name: "create.image")
14 |
15 | let api = API(requester: RequesterMock())
16 |
17 | let endpoint = OpenAIEndpoints.createImage(model: .dalle(.dalle3), prompt: prompt, numberOfImages: numberOfImages, size: size).endpoint
18 |
19 | sut = CreateImagesRequest()
20 |
21 | stubHTTP(endpoint: endpoint,
22 | json: json,
23 | statusCode: 200)
24 |
25 | do {
26 | let dataModel = try await sut.execute(api: api, apiKey: apiKey, model: .dalle(.dalle3), prompt: prompt, numberOfImages: numberOfImages, size: size)
27 | XCTAssertNotNil(dataModel)
28 | XCTAssertEqual(dataModel?.created, 1589478378)
29 | XCTAssertEqual(dataModel?.data.count, 4)
30 | XCTAssertEqual(dataModel?.data[0].url, "https://www.openai1.com")
31 | XCTAssertEqual(dataModel?.data[1].url, "https://www.openai2.com")
32 | XCTAssertEqual(dataModel?.data[2].url, "https://www.openai3.com")
33 | XCTAssertEqual(dataModel?.data[3].url, "https://www.openai4.com")
34 | } catch {
35 | XCTFail()
36 | }
37 | }
38 |
39 | private func stubHTTP(endpoint: Endpoint,
40 | json: Data,
41 | statusCode: Int) {
42 |
43 | URLProtocolMock.completionHandler = { request in
44 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
45 | statusCode: statusCode,
46 | httpVersion: nil,
47 | headerFields: [:])!
48 | return (response, json)
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Images/CreateImageEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateImageEndpointSpec: XCTestCase {
5 | func testChatEndpointCreateImage_WithAllParameters_CreatesCorrectEndpointParameters() throws {
6 | let prompt = "Pixar style 3D render of a baby hippo, 4k, high resolution, trending in artstation"
7 | let n = 1
8 | let size: ImageSize = .s1024
9 | let sut = OpenAIEndpoints.createImage(model: .dalle(.dalle3), prompt: prompt, numberOfImages: n, size: size).endpoint
10 |
11 | XCTAssertEqual(sut.path, "images/generations")
12 | XCTAssertEqual(sut.method, .POST)
13 | XCTAssertEqual(sut.parameters?.count, 4)
14 | XCTAssertEqual(sut.parameters?["prompt"] as! String, prompt)
15 | XCTAssertEqual(sut.parameters?["n"] as! Int, n)
16 | XCTAssertEqual(sut.parameters?["size"] as! String, size.rawValue)
17 | }
18 | }
19 |
20 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Images/CreateImageParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateImageParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
8 | let jsonData = loadJSON(name: "create.image")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: CreateImageDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.created, 1589478378)
17 | XCTAssertEqual(dataModel?.data.count, 4)
18 | XCTAssertEqual(dataModel?.data[0].url, "https://www.openai1.com")
19 | XCTAssertEqual(dataModel?.data[1].url, "https://www.openai2.com")
20 | XCTAssertEqual(dataModel?.data[2].url, "https://www.openai3.com")
21 | XCTAssertEqual(dataModel?.data[3].url, "https://www.openai4.com")
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Images/CreateImageRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class CreateImageRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 |
10 | let prompt = "Pixar style 3D render of a baby hippo, 4k, high resolution, trending in artstation"
11 | let n = 4
12 | let size: ImageSize = .s1024
13 | var endpoint = OpenAIEndpoints.createImage(model: .dalle(.dalle2), prompt: prompt, numberOfImages: n, size: size).endpoint
14 |
15 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
16 |
17 | var sut = api.buildURLRequest(endpoint: endpoint)
18 | api.addHeaders(urlRequest: &sut,
19 | headers: ["Content-Type" : "application/json",
20 | "Authorization" : "Bearer \(apiKey)"])
21 |
22 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
23 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
24 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/chat.completions.error.invalid_api_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": {
3 | "message": "Incorrect API key provided: YOUR_API_KEY. You can find your API key at https://platform.openai.com/account/api-keys.",
4 | "type": "invalid_request_error",
5 | "param": "",
6 | "code": "invalid_api_key"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/chat.completions.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "chatcmpl-123",
3 | "object": "chat.completion",
4 | "created": 1677652288,
5 | "choices": [{
6 | "index": 0,
7 | "message": {
8 | "role": "assistant",
9 | "content": "Hello there, how may I assist you today?",
10 | },
11 | "finish_reason": "stop"
12 | }],
13 | "usage": {
14 | "prompt_tokens": 9,
15 | "completion_tokens": 12,
16 | "total_tokens": 21
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/completions.error.invalid_api_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "error": {
3 | "message": "Incorrect API key provided: YOUR_API_KEY. You can find your API key at https://platform.openai.com/account/api-keys.",
4 | "type": "invalid_request_error",
5 | "param": "",
6 | "code": "invalid_api_key"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/completions.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7",
3 | "object": "text_completion",
4 | "created": 1589478378,
5 | "model": "text-davinci-003",
6 | "choices": [
7 | {
8 | "text": "\n\nThis is indeed a test",
9 | "index": 0,
10 | "logprobs": null,
11 | "finish_reason": "length"
12 | }
13 | ],
14 | "usage": {
15 | "prompt_tokens": 5,
16 | "completion_tokens": 7,
17 | "total_tokens": 12
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/create.image.json:
--------------------------------------------------------------------------------
1 | {
2 | "created": 1589478378,
3 | "data": [
4 | {
5 | "url": "https://www.openai1.com"
6 | },
7 | {
8 | "url": "https://www.openai2.com"
9 | },
10 | {
11 | "url": "https://www.openai3.com"
12 | },
13 | {
14 | "url": "https://www.openai4.com"
15 | }
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/JSON/moderations.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "modr-5MWoLO",
3 | "model": "text-moderation-001",
4 | "results": [
5 | {
6 | "categories": {
7 | "hate": false,
8 | "hate/threatening": true,
9 | "self-harm": false,
10 | "sexual": false,
11 | "sexual/minors": false,
12 | "violence": true,
13 | "violence/graphic": false
14 | },
15 | "category_scores": {
16 | "hate": 0.22714105248451233,
17 | "hate/threatening": 0.4132447838783264,
18 | "self-harm": 0.005232391878962517,
19 | "sexual": 0.01407341007143259,
20 | "sexual/minors": 0.0038522258400917053,
21 | "violence": 0.9223177433013916,
22 | "violence/graphic": 0.036865197122097015
23 | },
24 | "flagged": true
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ListModels/ListModelsAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ListModelsAPIClientSpec: XCTestCase {
5 | private var sut: ListModelsRequestProtocol!
6 | private let model: OpenAIModelType = .gpt4(.base)
7 | private let apiKey = "1234567890"
8 |
9 | func testAsyncAPIRequest_ParsesValidJSONToChatCompletionsDataModel() async throws {
10 | let json = loadJSON(name: "models")
11 |
12 | let api = API(requester: RequesterMock())
13 | let endpoint = OpenAIEndpoints.listModels.endpoint
14 |
15 | sut = ListModelsRequest()
16 |
17 | stubHTTP(endpoint: endpoint,
18 | json: json,
19 | statusCode: 200)
20 |
21 | do {
22 | let dataModel = try await sut.execute(api: api, apiKey: apiKey)
23 | XCTAssertNotNil(dataModel)
24 | XCTAssertEqual(dataModel?.data.count, 66)
25 | XCTAssertEqual(dataModel?.data[0].object, "model")
26 | XCTAssertEqual(dataModel?.data[0].id, "babbage")
27 | XCTAssertEqual(dataModel?.data[0].created, 1649358449)
28 | XCTAssertEqual(dataModel?.data[0].ownedBy, "openai")
29 | } catch {
30 | XCTFail()
31 | }
32 | }
33 |
34 | private func stubHTTP(endpoint: Endpoint,
35 | json: Data,
36 | statusCode: Int) {
37 |
38 | URLProtocolMock.completionHandler = { request in
39 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
40 | statusCode: statusCode,
41 | httpVersion: nil,
42 | headerFields: [:])!
43 | return (response, json)
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ListModels/ListModelsEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ListModelsEndpointSpec: XCTestCase {
5 | func testEndpointListModels() throws {
6 | let sut = OpenAIEndpoints.listModels.endpoint
7 |
8 | XCTAssertEqual(sut.path, "models")
9 | XCTAssertEqual(sut.method, .GET)
10 | }
11 | }
12 |
13 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ListModels/ListModelsParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ListModelsParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToEditsDataModel() async throws {
8 | let jsonData = loadJSON(name: "models")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: ModelListDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.data.count, 66)
17 | XCTAssertEqual(dataModel?.data[0].object, "model")
18 | XCTAssertEqual(dataModel?.data[0].id, "babbage")
19 | XCTAssertEqual(dataModel?.data[0].created, 1649358449)
20 | XCTAssertEqual(dataModel?.data[0].ownedBy, "openai")
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/ListModels/ListModelsRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ListModelsRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 |
10 | var endpoint = OpenAIEndpoints.listModels.endpoint
11 |
12 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
13 |
14 | var sut = api.buildURLRequest(endpoint: endpoint)
15 | api.addHeaders(urlRequest: &sut,
16 | headers: ["Content-Type" : "application/json",
17 | "Authorization" : "Bearer \(apiKey)"])
18 |
19 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
20 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
21 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Moderations/ModerationsAPIClientSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ModerationsAPIClientSpec: XCTestCase {
5 | private var sut: ModerationsRequestProtocol!
6 | private let apiKey = "1234567890"
7 | private let input = "Some potentially harmful or explicit content."
8 |
9 | func testAsyncAPIRequest_ParsesValidJSONToDataModel() async throws {
10 | let json = loadJSON(name: "moderations")
11 |
12 | let api = API(requester: RequesterMock())
13 | let endpoint = OpenAIEndpoints.moderations(input: input).endpoint
14 |
15 | sut = ModerationsRequest()
16 |
17 | stubHTTP(endpoint: endpoint,
18 | json: json,
19 | statusCode: 200)
20 |
21 | do {
22 | let dataModel = try await sut.execute(api: api, apiKey: apiKey, input: input)
23 | XCTAssertNotNil(dataModel)
24 | XCTAssertEqual(dataModel?.id, "modr-5MWoLO")
25 | XCTAssertEqual(dataModel?.model, "text-moderation-001")
26 | XCTAssertEqual(dataModel?.results.count, 1)
27 | XCTAssertEqual(dataModel?.results[0].categories.hate, false)
28 | XCTAssertEqual(dataModel?.results[0].categories.hateThreatening, true)
29 | XCTAssertEqual(dataModel?.results[0].categories.selfHarm, false)
30 | XCTAssertEqual(dataModel?.results[0].categories.sexual, false)
31 | XCTAssertEqual(dataModel?.results[0].categories.sexualMinors, false)
32 | XCTAssertEqual(dataModel?.results[0].categories.violence, true)
33 | XCTAssertEqual(dataModel?.results[0].categories.violenceGraphic, false)
34 | XCTAssertEqual(dataModel?.results[0].categoryScores.hate, 0.22714105248451233)
35 | XCTAssertEqual(dataModel?.results[0].categoryScores.hateThreatening, 0.4132447838783264)
36 | //XCTAssertEqual(dataModel?.results[0].categoryScores.selfHarm, 0.005232391878962517)
37 | XCTAssertEqual(dataModel?.results[0].categoryScores.sexual, 0.01407341007143259)
38 | XCTAssertEqual(dataModel?.results[0].categoryScores.sexualMinors, 0.0038522258400917053)
39 | XCTAssertEqual(dataModel?.results[0].categoryScores.violence, 0.9223177433013916)
40 | XCTAssertEqual(dataModel?.results[0].categoryScores.violenceGraphic, 0.036865197122097015)
41 | XCTAssertEqual(dataModel?.results[0].flagged, true)
42 | } catch {
43 | XCTFail()
44 | }
45 | }
46 |
47 | private func stubHTTP(endpoint: Endpoint,
48 | json: Data,
49 | statusCode: Int) {
50 |
51 | URLProtocolMock.completionHandler = { request in
52 | let response = HTTPURLResponse(url: URL(string: endpoint.path)!,
53 | statusCode: statusCode,
54 | httpVersion: nil,
55 | headerFields: [:])!
56 | return (response, json)
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Moderations/ModerationsEndpointSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ModerationsEndpointSpec: XCTestCase {
5 | func testEndpointModerations_WithInput_CreatesCorrectEndpointParameters() throws {
6 | let input = "Some potentially harmful or explicit content."
7 |
8 | let sut = OpenAIEndpoints.moderations(input: input).endpoint
9 |
10 | let inputParameter = sut.parameters!["input"] as! String
11 |
12 | XCTAssertEqual(sut.path, "moderations")
13 | XCTAssertEqual(sut.method, .POST)
14 | XCTAssertEqual(sut.parameters?.count, 1)
15 | XCTAssertEqual(inputParameter, input)
16 | }
17 | }
18 |
19 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Moderations/ModerationsParserSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ModerationsParserSpec: XCTestCase {
5 | private var api = API()
6 |
7 | func testAsyncAPIRequest_ParsesValidJSONToModerationsDataModel() async throws {
8 | let jsonData = loadJSON(name: "moderations")
9 |
10 | let jsonDecoder = JSONDecoder()
11 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
12 |
13 | let dataModel = try! api.parse(.success(jsonData), type: ModerationDataModel.self, jsonDecoder: jsonDecoder, errorType: OpenAIAPIError.self)
14 |
15 | XCTAssertNotNil(dataModel)
16 | XCTAssertEqual(dataModel?.id, "modr-5MWoLO")
17 | XCTAssertEqual(dataModel?.model, "text-moderation-001")
18 | XCTAssertEqual(dataModel?.results.count, 1)
19 | XCTAssertEqual(dataModel?.results[0].categories.hate, false)
20 | XCTAssertEqual(dataModel?.results[0].categories.hateThreatening, true)
21 | XCTAssertEqual(dataModel?.results[0].categories.selfHarm, false)
22 | XCTAssertEqual(dataModel?.results[0].categories.sexual, false)
23 | XCTAssertEqual(dataModel?.results[0].categories.sexualMinors, false)
24 | XCTAssertEqual(dataModel?.results[0].categories.violence, true)
25 | XCTAssertEqual(dataModel?.results[0].categories.violenceGraphic, false)
26 | XCTAssertEqual(dataModel?.results[0].categoryScores.hate, 0.22714105248451233)
27 | XCTAssertEqual(dataModel?.results[0].categoryScores.hateThreatening, 0.4132447838783264)
28 | //XCTAssertEqual(dataModel?.results[0].categoryScores.selfHarm, 0.005232391878962517, accuracy: 0.000000000000001)
29 | XCTAssertEqual(dataModel?.results[0].categoryScores.sexual, 0.01407341007143259)
30 | XCTAssertEqual(dataModel?.results[0].categoryScores.sexualMinors, 0.0038522258400917053)
31 | XCTAssertEqual(dataModel?.results[0].categoryScores.violence, 0.9223177433013916)
32 | XCTAssertEqual(dataModel?.results[0].categoryScores.violenceGraphic, 0.036865197122097015)
33 | XCTAssertEqual(dataModel?.results[0].flagged, true)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Tests/SwiftOpenAITests/OpenAITests/Unit Tests/Moderations/ModerationsRequestSpec.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import SwiftOpenAI
3 |
4 | final class ModerationsRequestSpec: XCTestCase {
5 | private let api = API()
6 |
7 | func testRequest_CreatedWithCorrectHeaders() throws {
8 | let apiKey = "1234567890"
9 | let input = "Some potentially harmful or explicit content."
10 |
11 | var endpoint = OpenAIEndpoints.moderations(input: input).endpoint
12 |
13 | api.routeEndpoint(&endpoint, environment: OpenAIEnvironmentV1())
14 |
15 | var sut = api.buildURLRequest(endpoint: endpoint)
16 | api.addHeaders(urlRequest: &sut,
17 | headers: ["Content-Type" : "application/json",
18 | "Authorization" : "Bearer \(apiKey)"])
19 |
20 | XCTAssertEqual(sut.allHTTPHeaderFields?.count, 2)
21 | XCTAssertEqual(sut.allHTTPHeaderFields?["Content-Type"], "application/json")
22 | XCTAssertEqual(sut.allHTTPHeaderFields?["Authorization"], "Bearer 1234567890")
23 | }
24 | }
25 |
--------------------------------------------------------------------------------