├── Demo
├── DemoChat
│ ├── README.md
│ ├── .gitignore
│ ├── Sources
│ │ ├── Models
│ │ │ ├── Message.swift
│ │ │ └── Conversation.swift
│ │ ├── UI
│ │ │ ├── Misc
│ │ │ │ ├── ListModelsView.swift
│ │ │ │ └── MiscView.swift
│ │ │ ├── Environment
│ │ │ │ ├── DateProvider.swift
│ │ │ │ └── IDProvider.swift
│ │ │ ├── ListView.swift
│ │ │ ├── ModerationChatView.swift
│ │ │ ├── ChatView.swift
│ │ │ └── DetailView.swift
│ │ ├── MiscStore.swift
│ │ └── ChatStore.swift
│ └── Package.swift
├── App
│ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ ├── Demo.entitlements
│ ├── DemoApp.swift
│ ├── APIProvidedView.swift
│ ├── SwiftUIAdditions.swift
│ ├── ContentView.swift
│ └── APIKeyModalView.swift
└── Demo.xcodeproj
│ ├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ ├── xcshareddata
│ └── xcschemes
│ │ └── Demo.xcscheme
│ └── project.pbxproj
├── Sources
└── OpenAI
│ ├── Public
│ ├── Models
│ │ ├── AudioTranslationResult.swift
│ │ ├── AudioTranscriptionResult.swift
│ │ ├── Models
│ │ │ ├── ModelsResult.swift
│ │ │ ├── ModelQuery.swift
│ │ │ ├── ModelResult.swift
│ │ │ └── Models.swift
│ │ ├── ImagesResult.swift
│ │ ├── StreamableQuery.swift
│ │ ├── ModerationsQuery.swift
│ │ ├── EmbeddingsQuery.swift
│ │ ├── EmbeddingsResult.swift
│ │ ├── EditsResult.swift
│ │ ├── CompletionsResult.swift
│ │ ├── ImageVariationsQuery.swift
│ │ ├── EditsQuery.swift
│ │ ├── AudioTranslationQuery.swift
│ │ ├── ChatResult.swift
│ │ ├── AudioTranscriptionQuery.swift
│ │ ├── ChatStreamResult.swift
│ │ ├── ImagesQuery.swift
│ │ ├── ImageEditsQuery.swift
│ │ ├── CompletionsQuery.swift
│ │ ├── ModerationsResult.swift
│ │ └── ChatQuery.swift
│ ├── Utilities
│ │ └── Utilities.swift
│ ├── Errors
│ │ └── APIError.swift
│ └── Protocols
│ │ ├── OpenAIProtocol+Combine.swift
│ │ ├── OpenAIProtocol+Async.swift
│ │ └── OpenAIProtocol.swift
│ ├── Private
│ ├── MultipartFormDataBodyEncodable.swift
│ ├── MultipartFormDataEntry.swift
│ ├── URLSessionDataTaskProtocol.swift
│ ├── URLRequestBuildable.swift
│ ├── URLSessionProtocol.swift
│ ├── JSONRequest.swift
│ ├── MultipartFormDataRequest.swift
│ ├── MultipartFormDataBodyBuilder.swift
│ └── StreamingSession.swift
│ └── OpenAI.swift
├── .github
├── PULL_REQUEST_TEMPLATE.md
├── workflows
│ ├── swift.yml
│ └── codeql.yml
└── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── bug_report.md
├── SECURITY.md
├── Package.swift
├── Tests
└── OpenAITests
│ ├── Mocks
│ ├── URLSessionMock.swift
│ └── DataTaskMock.swift
│ ├── Extensions
│ └── XCTestCase+Extensions.swift
│ ├── OpenAITestsCombine.swift
│ └── OpenAITestsDecoder.swift
├── LICENSE
├── .gitignore
└── CODE_OF_CONDUCT.md
/Demo/DemoChat/README.md:
--------------------------------------------------------------------------------
1 | # DemoChat
2 |
3 | A description of this package.
4 |
--------------------------------------------------------------------------------
/Demo/App/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demo/App/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Demo/App/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demo/DemoChat/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | /*.xcodeproj
5 | xcuserdata/
6 | DerivedData/
7 | .swiftpm/config/registries.json
8 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
9 | .netrc
10 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/AudioTranslationResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranslationResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct AudioTranslationResult: Codable, Equatable {
11 |
12 | public let text: String
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/MultipartFormDataBodyEncodable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataBodyEncodable.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | protocol MultipartFormDataBodyEncodable {
11 |
12 | func encode(boundary: String) -> Data
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/AudioTranscriptionResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranscriptionResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct AudioTranscriptionResult: Codable, Equatable {
11 |
12 | public let text: String
13 | }
14 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## What
4 |
5 |
6 |
7 | ## Why
8 |
9 |
10 |
11 | ## Affected Areas
12 |
13 |
14 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/Models/ModelsResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelsResult.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 08/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ModelsResult: Codable, Equatable {
11 |
12 | public let data: [ModelResult]
13 | public let object: String
14 | }
15 |
--------------------------------------------------------------------------------
/Demo/App/Demo.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/MultipartFormDataEntry.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataEntry.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | enum MultipartFormDataEntry {
11 |
12 | case file(paramName: String, fileName: String?, fileData: Data?, contentType: String),
13 | string(paramName: String, value: Any?)
14 | }
15 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/Models/Message.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Message.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import Foundation
9 | import OpenAI
10 |
11 | struct Message {
12 | var id: String
13 | var role: Chat.Role
14 | var content: String
15 | var createdAt: Date
16 | }
17 |
18 | extension Message: Equatable, Codable, Hashable, Identifiable {}
19 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/Models/ModelQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelQuery.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 08/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ModelQuery: Codable, Equatable {
11 | /// The ID of the model to use for this request.
12 | public let model: Model
13 |
14 | public init(model: Model) {
15 | self.model = model
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/URLSessionDataTaskProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | protocol URLSessionDataTaskProtocol {
14 |
15 | func resume()
16 | }
17 |
18 | extension URLSessionDataTask: URLSessionDataTaskProtocol {}
19 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/URLRequestBuildable.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | protocol URLRequestBuildable {
14 |
15 | associatedtype ResultType
16 |
17 | func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ImagesResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImagesResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ImagesResult: Codable, Equatable {
11 |
12 | public struct URLResult: Codable, Equatable {
13 | public let url: String?
14 | public let b64_json: String?
15 | }
16 |
17 | public let created: TimeInterval
18 | public let data: [URLResult]
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/StreamableQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 15/05/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | protocol Streamable {
11 |
12 | var stream: Bool { get set }
13 | func makeStreamable() -> Self
14 | }
15 |
16 | extension Streamable {
17 |
18 | func makeStreamable() -> Self {
19 | var copy = self
20 | copy.stream = true
21 | return copy
22 | }
23 | }
24 |
25 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | | Version | Supported |
6 | | ------- | ------------------ |
7 | | 0.x.x | :white_check_mark: |
8 | | 1.x.x | :white_check_mark: |
9 |
10 | ## Reporting a Bug
11 |
12 | Report security bugs by creating [issues](https://github.com/MacPaw/OpenAI/issues).
13 |
14 | ## Reporting a Vulnerability
15 |
16 | Please report (suspected) security vulnerabilities to
17 | **[support@macpaw.com](mailto:support@macpaw.com)**.
18 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/Models/Conversation.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Conversation.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import Foundation
9 |
10 | struct Conversation {
11 | init(id: String, messages: [Message] = []) {
12 | self.id = id
13 | self.messages = messages
14 | }
15 |
16 | typealias ID = String
17 |
18 | let id: String
19 | var messages: [Message]
20 | }
21 |
22 | extension Conversation: Equatable, Identifiable {}
23 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ModerationsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationsQuery.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 10/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ModerationsQuery: Codable {
11 | /// The input text to classify.
12 | public let input: String
13 | /// ID of the model to use.
14 | public let model: Model?
15 |
16 | public init(input: String, model: Model? = nil) {
17 | self.input = input
18 | self.model = model
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/EmbeddingsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmbeddingsQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct EmbeddingsQuery: Codable {
11 | /// ID of the model to use.
12 | public let model: Model
13 | /// Input text to get embeddings for.
14 | public let input: String
15 |
16 | public init(model: Model, input: String) {
17 | self.model = model
18 | self.input = input
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/Models/ModelResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelResult.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 08/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ModelResult: Codable, Equatable {
11 |
12 | public let id: Model
13 | public let object: String
14 | public let ownedBy: String
15 |
16 | enum CodingKeys: String, CodingKey {
17 | case id
18 | case object
19 | case ownedBy = "owned_by"
20 | }
21 | }
22 |
23 | extension ModelResult: Identifiable {}
24 |
--------------------------------------------------------------------------------
/.github/workflows/swift.yml:
--------------------------------------------------------------------------------
1 | # This workflow will build a Swift project
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-swift
3 |
4 | name: Swift Build
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main" ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Build
20 | run: swift build -v
21 | - name: Run tests
22 | run: swift test
23 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.7
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "OpenAI",
8 | products: [
9 | .library(
10 | name: "OpenAI",
11 | targets: ["OpenAI"]),
12 | ],
13 | targets: [
14 | .target(
15 | name: "OpenAI",
16 | dependencies: []),
17 | .testTarget(
18 | name: "OpenAITests",
19 | dependencies: ["OpenAI"]),
20 | ]
21 | )
22 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/Misc/ListModelsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ListModelsView.swift
3 | // DemoChat
4 | //
5 | // Created by Aled Samuel on 22/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | public struct ListModelsView: View {
11 | @ObservedObject var store: MiscStore
12 |
13 | public var body: some View {
14 | NavigationStack {
15 | List($store.availableModels) { row in
16 | Text(row.id)
17 | }
18 | .listStyle(.insetGrouped)
19 | .navigationTitle("Models")
20 | }
21 | .onAppear {
22 | Task {
23 | await store.getModels()
24 | }
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/Environment/DateProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DateProvider.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | private struct DateProviderKey: EnvironmentKey {
11 | static let defaultValue: () -> Date = Date.init
12 | }
13 |
14 | extension EnvironmentValues {
15 | public var dateProviderValue: () -> Date {
16 | get { self[DateProviderKey.self] }
17 | set { self[DateProviderKey.self] = newValue }
18 | }
19 | }
20 |
21 | extension View {
22 | public func dateProviderValue(_ dateProviderValue: @escaping () -> Date) -> some View {
23 | environment(\.dateProviderValue, dateProviderValue)
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/Environment/IDProvider.swift:
--------------------------------------------------------------------------------
1 | //
2 | // IDProvider.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 4/6/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | private struct IDProviderKey: EnvironmentKey {
11 | static let defaultValue: () -> String = {
12 | UUID().uuidString
13 | }
14 | }
15 |
16 | extension EnvironmentValues {
17 | public var idProviderValue: () -> String {
18 | get { self[IDProviderKey.self] }
19 | set { self[IDProviderKey.self] = newValue }
20 | }
21 | }
22 |
23 | extension View {
24 | public func idProviderValue(_ idProviderValue: @escaping () -> String) -> some View {
25 | environment(\.idProviderValue, idProviderValue)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.8
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "DemoChat",
8 | platforms: [.macOS(.v13), .iOS(.v16)],
9 | products: [
10 | .library(
11 | name: "DemoChat",
12 | targets: ["DemoChat"]
13 | ),
14 | ],
15 | dependencies: [
16 | .package(name: "OpenAI", path: "../.."),
17 | ],
18 | targets: [
19 | .target(
20 | name: "DemoChat",
21 | dependencies: [
22 | "OpenAI",
23 | ],
24 | path: "Sources"
25 | ),
26 | ]
27 | )
28 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/ListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ListView.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ListView: View {
11 | @Binding var conversations: [Conversation]
12 | @Binding var selectedConversationId: Conversation.ID?
13 |
14 | var body: some View {
15 | List(
16 | $conversations,
17 | editActions: [.delete],
18 | selection: $selectedConversationId
19 | ) { $conversation in
20 | Text(
21 | conversation.messages.last?.content ?? "New Conversation"
22 | )
23 | .lineLimit(2)
24 | }
25 | .navigationTitle("Conversations")
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/Tests/OpenAITests/Mocks/URLSessionMock.swift:
--------------------------------------------------------------------------------
1 | //
2 | // URLSessionMock.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 | @testable import OpenAI
13 |
14 | class URLSessionMock: URLSessionProtocol {
15 |
16 | var dataTask: DataTaskMock!
17 |
18 | func dataTask(with request: URLRequest, completionHandler: @escaping @Sendable (Data?, URLResponse?, Error?) -> Void) -> URLSessionDataTaskProtocol {
19 | dataTask.completion = completionHandler
20 | return dataTask
21 | }
22 |
23 | func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol {
24 | dataTask
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/EmbeddingsResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmbeddingsResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct EmbeddingsResult: Codable, Equatable {
11 |
12 | public struct Embedding: Codable, Equatable {
13 | public let object: String
14 | public let embedding: [Double]
15 | public let index: Int
16 | }
17 |
18 | public struct Usage: Codable, Equatable {
19 | public let promptTokens: Int
20 | public let totalTokens: Int
21 |
22 | enum CodingKeys: String, CodingKey {
23 | case promptTokens = "prompt_tokens"
24 | case totalTokens = "total_tokens"
25 | }
26 | }
27 |
28 | public let data: [Embedding]
29 | public let model: Model
30 | public let usage: Usage
31 | }
32 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/EditsResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EditsResult.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 14/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct EditsResult: Codable, Equatable {
11 |
12 | public struct Choice: Codable, Equatable {
13 | public let text: String
14 | public let index: Int
15 | }
16 |
17 | public struct Usage: Codable, Equatable {
18 | public let promptTokens: Int
19 | public let completionTokens: Int
20 | public let totalTokens: Int
21 |
22 | enum CodingKeys: String, CodingKey {
23 | case promptTokens = "prompt_tokens"
24 | case completionTokens = "completion_tokens"
25 | case totalTokens = "total_tokens"
26 | }
27 | }
28 |
29 | public let object: String
30 | public let created: TimeInterval
31 | public let choices: [Choice]
32 | public let usage: Usage
33 | }
34 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | env:
10 | CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: true
11 |
12 | jobs:
13 | analyze:
14 | name: Analyze
15 | runs-on: macos-latest
16 | permissions:
17 | actions: read
18 | contents: read
19 | security-events: write
20 |
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | language: [ 'swift' ]
25 |
26 | steps:
27 | - name: Checkout repository
28 | uses: actions/checkout@v3
29 |
30 | - name: Initialize CodeQL
31 | uses: github/codeql-action/init@v2
32 | with:
33 | languages: ${{ matrix.language }}
34 |
35 | - name: Build library
36 | run: swift build
37 |
38 | - name: Perform CodeQL Analysis
39 | uses: github/codeql-action/analyze@v2
40 | with:
41 | category: "/language:${{matrix.language}}"
42 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/URLSessionProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | protocol URLSessionProtocol {
14 |
15 | func dataTask(with request: URLRequest, completionHandler: @escaping @Sendable (Data?, URLResponse?, Error?) -> Void) -> URLSessionDataTaskProtocol
16 | func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol
17 | }
18 |
19 | extension URLSession: URLSessionProtocol {
20 |
21 | func dataTask(with request: URLRequest) -> URLSessionDataTaskProtocol {
22 | dataTask(with: request) as URLSessionDataTask
23 | }
24 |
25 | func dataTask(with request: URLRequest, completionHandler: @escaping @Sendable (Data?, URLResponse?, Error?) -> Void) -> URLSessionDataTaskProtocol {
26 | dataTask(with: request, completionHandler: completionHandler) as URLSessionDataTask
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 MacPaw Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/ModerationChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationChatView.swift
3 | // DemoChat
4 | //
5 | // Created by Aled Samuel on 26/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | public struct ModerationChatView: View {
11 | @ObservedObject var store: MiscStore
12 |
13 | @Environment(\.dateProviderValue) var dateProvider
14 | @Environment(\.idProviderValue) var idProvider
15 |
16 | public init(store: MiscStore) {
17 | self.store = store
18 | }
19 |
20 | public var body: some View {
21 | DetailView(
22 | conversation: store.moderationConversation,
23 | error: store.moderationConversationError,
24 | sendMessage: { message, _ in
25 | Task {
26 | await store.sendModerationMessage(
27 | Message(
28 | id: idProvider(),
29 | role: .user,
30 | content: message,
31 | createdAt: dateProvider()
32 | )
33 | )
34 | }
35 | }
36 | )
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/Misc/MiscView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MiscView.swift
3 | // DemoChat
4 | //
5 | // Created by Aled Samuel on 22/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | public struct MiscView: View {
11 | @ObservedObject var store: MiscStore
12 |
13 | public init(store: MiscStore) {
14 | self.store = store
15 | }
16 |
17 | public var body: some View {
18 | NavigationStack {
19 | List {
20 | Section(header: Text("Models")) {
21 | NavigationLink("List Models", destination: ListModelsView(store: store))
22 | NavigationLink("Retrieve Model", destination: RetrieveModelView())
23 | }
24 | Section(header: Text("Moderations")) {
25 | NavigationLink("Moderation Chat", destination: ModerationChatView(store: store))
26 | }
27 | }
28 | .listStyle(.insetGrouped)
29 | .navigationTitle("Misc")
30 | }
31 | }
32 | }
33 |
34 | struct RetrieveModelView: View {
35 | var body: some View {
36 | Text("Retrieve Model: TBD")
37 | .font(.largeTitle)
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/Demo/App/DemoApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DemoApp.swift
3 | // Demo
4 | //
5 | // Created by Sihao Lu on 4/6/23.
6 | //
7 |
8 | import DemoChat
9 | import OpenAI
10 | import SwiftUI
11 |
12 | @main
13 | struct DemoApp: App {
14 | @AppStorage("apiKey") var apiKey: String = ""
15 | @State var isShowingAPIConfigModal: Bool = true
16 |
17 | let idProvider: () -> String
18 | let dateProvider: () -> Date
19 |
20 | init() {
21 | self.idProvider = {
22 | UUID().uuidString
23 | }
24 | self.dateProvider = Date.init
25 | }
26 |
27 | var body: some Scene {
28 | WindowGroup {
29 | Group {
30 | APIProvidedView(
31 | apiKey: $apiKey,
32 | idProvider: idProvider
33 | )
34 | }
35 | #if os(iOS)
36 | .fullScreenCover(isPresented: $isShowingAPIConfigModal) {
37 | APIKeyModalView(apiKey: $apiKey)
38 | }
39 | #elseif os(macOS)
40 | .popover(isPresented: $isShowingAPIConfigModal) {
41 | APIKeyModalView(apiKey: $apiKey)
42 | }
43 | #endif
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Tests/OpenAITests/Mocks/DataTaskMock.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DataTaskMock.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 | @testable import OpenAI
13 |
14 | class DataTaskMock: URLSessionDataTaskProtocol {
15 |
16 | var data: Data?
17 | var response: URLResponse?
18 | var error: Error?
19 |
20 | var completion: ((Data?, URLResponse?, Error?) -> Void)?
21 |
22 | func resume() {
23 | completion?(data, response, error)
24 | }
25 | }
26 |
27 | extension DataTaskMock {
28 |
29 | static func successful(with data: Data) -> DataTaskMock {
30 | let task = DataTaskMock()
31 | task.data = data
32 | task.response = HTTPURLResponse(url: URL(fileURLWithPath: ""), statusCode: 200, httpVersion: nil, headerFields: nil)
33 | return task
34 | }
35 |
36 | static func failed(with error: Error) -> DataTaskMock {
37 | let task = DataTaskMock()
38 | task.error = error
39 | task.response = HTTPURLResponse(url: URL(fileURLWithPath: ""), statusCode: 503, httpVersion: nil, headerFields: nil)
40 | return task
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/CompletionsResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CompletionsResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct CompletionsResult: Codable, Equatable {
11 |
12 | public struct Usage: Codable, Equatable {
13 | public let promptTokens: Int
14 | public let completionTokens: Int
15 | public let totalTokens: Int
16 |
17 | enum CodingKeys: String, CodingKey {
18 | case promptTokens = "prompt_tokens"
19 | case completionTokens = "completion_tokens"
20 | case totalTokens = "total_tokens"
21 | }
22 | }
23 |
24 | public struct Choice: Codable, Equatable {
25 | public let text: String
26 | public let index: Int
27 | public let finishReason: String?
28 |
29 | enum CodingKeys: String, CodingKey {
30 | case text
31 | case index
32 | case finishReason = "finish_reason"
33 | }
34 | }
35 |
36 | public let id: String
37 | public let object: String
38 | public let created: TimeInterval
39 | public let model: Model
40 | public let choices: [Choice]
41 | public let usage: Usage?
42 | }
43 |
--------------------------------------------------------------------------------
/Demo/App/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "idiom" : "mac",
10 | "scale" : "1x",
11 | "size" : "16x16"
12 | },
13 | {
14 | "idiom" : "mac",
15 | "scale" : "2x",
16 | "size" : "16x16"
17 | },
18 | {
19 | "idiom" : "mac",
20 | "scale" : "1x",
21 | "size" : "32x32"
22 | },
23 | {
24 | "idiom" : "mac",
25 | "scale" : "2x",
26 | "size" : "32x32"
27 | },
28 | {
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "idiom" : "mac",
35 | "scale" : "2x",
36 | "size" : "128x128"
37 | },
38 | {
39 | "idiom" : "mac",
40 | "scale" : "1x",
41 | "size" : "256x256"
42 | },
43 | {
44 | "idiom" : "mac",
45 | "scale" : "2x",
46 | "size" : "256x256"
47 | },
48 | {
49 | "idiom" : "mac",
50 | "scale" : "1x",
51 | "size" : "512x512"
52 | },
53 | {
54 | "idiom" : "mac",
55 | "scale" : "2x",
56 | "size" : "512x512"
57 | }
58 | ],
59 | "info" : {
60 | "author" : "xcode",
61 | "version" : 1
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ImageVariationsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageVariationsQuery.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 24/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ImageVariationsQuery: Codable {
11 | /// The image to edit. Must be a valid PNG file, less than 4MB, and square.
12 | public let image: Data
13 | public let fileName: String
14 | /// The number of images to generate. Must be between 1 and 10.
15 | public let n: Int?
16 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
17 | public let size: String?
18 |
19 | public init(image: Data, fileName: String, n: Int? = nil, size: String? = nil) {
20 | self.image = image
21 | self.fileName = fileName
22 | self.n = n
23 | self.size = size
24 | }
25 | }
26 |
27 | extension ImageVariationsQuery: MultipartFormDataBodyEncodable {
28 | func encode(boundary: String) -> Data {
29 | let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [
30 | .file(paramName: "image", fileName: fileName, fileData: image, contentType: "image/png"),
31 | .string(paramName: "n", value: n),
32 | .string(paramName: "size", value: size)
33 | ])
34 | return bodyBuilder.build()
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/JSONRequest.swift:
--------------------------------------------------------------------------------
1 | //
2 | // JSONRequest.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 12/19/22.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | final class JSONRequest {
14 |
15 | let body: Codable?
16 | let url: URL
17 | let method: String
18 |
19 | init(body: Codable? = nil, url: URL, method: String = "POST") {
20 | self.body = body
21 | self.url = url
22 | self.method = method
23 | }
24 | }
25 |
26 | extension JSONRequest: URLRequestBuildable {
27 |
28 | func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest {
29 | var request = URLRequest(url: url, timeoutInterval: timeoutInterval)
30 | request.setValue("application/json", forHTTPHeaderField: "Content-Type")
31 | request.setValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
32 | if let organizationIdentifier {
33 | request.setValue(organizationIdentifier, forHTTPHeaderField: "OpenAI-Organization")
34 | }
35 | request.httpMethod = method
36 | if let body = body {
37 | request.httpBody = try JSONEncoder().encode(body)
38 | }
39 | return request
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/EditsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EditsQuery.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 14/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct EditsQuery: Codable {
11 | /// ID of the model to use.
12 | public let model: Model
13 | /// Input text to get embeddings for.
14 | public let input: String?
15 | /// The instruction that tells the model how to edit the prompt.
16 | public let instruction: String
17 | /// The number of images to generate. Must be between 1 and 10.
18 | public let n: Int?
19 | /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
20 | public let temperature: Double?
21 | /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
22 | public let topP: Double?
23 |
24 | public init(model: Model, input: String?, instruction: String, n: Int? = nil, temperature: Double? = nil, topP: Double? = nil) {
25 | self.model = model
26 | self.input = input
27 | self.instruction = instruction
28 | self.n = n
29 | self.temperature = temperature
30 | self.topP = topP
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/Demo/App/APIProvidedView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // APIProvidedView.swift
3 | // Demo
4 | //
5 | // Created by Sihao Lu on 4/7/23.
6 | //
7 |
8 | import DemoChat
9 | import OpenAI
10 | import SwiftUI
11 |
12 | struct APIProvidedView: View {
13 | @Binding var apiKey: String
14 | @StateObject var chatStore: ChatStore
15 | @StateObject var miscStore: MiscStore
16 | @State var isShowingAPIConfigModal: Bool = true
17 |
18 | @Environment(\.idProviderValue) var idProvider
19 | @Environment(\.dateProviderValue) var dateProvider
20 |
21 | init(
22 | apiKey: Binding,
23 | idProvider: @escaping () -> String
24 | ) {
25 | self._apiKey = apiKey
26 | self._chatStore = StateObject(
27 | wrappedValue: ChatStore(
28 | openAIClient: OpenAI(apiToken: apiKey.wrappedValue),
29 | idProvider: idProvider
30 | )
31 | )
32 | self._miscStore = StateObject(
33 | wrappedValue: MiscStore(
34 | openAIClient: OpenAI(apiToken: apiKey.wrappedValue)
35 | )
36 | )
37 | }
38 |
39 | var body: some View {
40 | ContentView(
41 | chatStore: chatStore,
42 | miscStore: miscStore
43 | )
44 | .onChange(of: apiKey) { newApiKey in
45 | let client = OpenAI(apiToken: newApiKey)
46 | chatStore.openAIClient = client
47 | miscStore.openAIClient = client
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/MultipartFormDataRequest.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataRequest.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | final class MultipartFormDataRequest {
14 |
15 | let body: MultipartFormDataBodyEncodable
16 | let url: URL
17 | let method: String
18 |
19 | init(body: MultipartFormDataBodyEncodable, url: URL, method: String = "POST") {
20 | self.body = body
21 | self.url = url
22 | self.method = method
23 | }
24 | }
25 |
26 | extension MultipartFormDataRequest: URLRequestBuildable {
27 |
28 | func build(token: String, organizationIdentifier: String?, timeoutInterval: TimeInterval) throws -> URLRequest {
29 | var request = URLRequest(url: url)
30 | let boundary: String = UUID().uuidString
31 | request.timeoutInterval = timeoutInterval
32 | request.httpMethod = method
33 | request.setValue("Bearer \(token)", forHTTPHeaderField: "Authorization")
34 | request.setValue("multipart/form-data; boundary=\(boundary)", forHTTPHeaderField: "Content-Type")
35 | if let organizationIdentifier {
36 | request.setValue(organizationIdentifier, forHTTPHeaderField: "OpenAI-Organization")
37 | }
38 | request.httpBody = body.encode(boundary: boundary)
39 | return request
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Utilities/Utilities.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Utilities.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 12/19/22.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct Vector {
11 |
12 | /// Returns the similarity between two vectors
13 | ///
14 | /// - Parameters:
15 | /// - a: The first vector
16 | /// - b: The second vector
17 | public static func cosineSimilarity(a: [Double], b: [Double]) -> Double {
18 | return dot(a, b) / (mag(a) * mag(b))
19 | }
20 |
21 | /// Returns the difference between two vectors. Cosine distance is defined as `1 - cosineSimilarity(a, b)`
22 | ///
23 | /// - Parameters:
24 | /// - a: The first vector
25 | /// - b: The second vector
26 | public func cosineDifference(a: [Double], b: [Double]) -> Double {
27 | return 1 - Self.cosineSimilarity(a: a, b: b)
28 | }
29 | }
30 |
31 | private extension Vector {
32 |
33 | static func round(_ input: Double, to places: Int = 1) -> Double {
34 | let divisor = pow(10.0, Double(places))
35 | return (input * divisor).rounded() / divisor
36 | }
37 |
38 | static func dot(_ a: [Double], _ b: [Double]) -> Double {
39 | assert(a.count == b.count, "Vectors must have the same dimension")
40 | let result = zip(a, b)
41 | .map { $0 * $1 }
42 | .reduce(0, +)
43 |
44 | return result
45 | }
46 |
47 | static func mag(_ vector: [Double]) -> Double {
48 | return sqrt(dot(vector, vector))
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/AudioTranslationQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranslationQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct AudioTranslationQuery: Codable, Equatable {
11 | public typealias ResponseFormat = AudioResponseFormat
12 |
13 | public let file: Data
14 | public let fileName: String
15 | public let model: Model
16 |
17 | public let responseFormat: Self.ResponseFormat?
18 | public let prompt: String?
19 | public let temperature: Double?
20 |
21 | public init(file: Data, fileName: String, model: Model, prompt: String? = nil, temperature: Double? = nil, responseFormat: Self.ResponseFormat? = nil) {
22 | self.file = file
23 | self.fileName = fileName
24 | self.model = model
25 | self.prompt = prompt
26 | self.temperature = temperature
27 | self.responseFormat = responseFormat
28 | }
29 | }
30 |
31 | extension AudioTranslationQuery: MultipartFormDataBodyEncodable {
32 |
33 | func encode(boundary: String) -> Data {
34 | let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [
35 | .file(paramName: "file", fileName: fileName, fileData: file, contentType: "audio/mpeg"),
36 | .string(paramName: "model", value: model),
37 | .string(paramName: "prompt", value: prompt),
38 | .string(paramName: "response_format", value: responseFormat),
39 | .string(paramName: "temperature", value: temperature)
40 | ])
41 | return bodyBuilder.build()
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/Demo/App/SwiftUIAdditions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftUIAdditions.swift
3 | // Demo
4 | //
5 | // Created by Sihao Lu on 4/7/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | extension View {
11 | /// Closure given view if conditional.
12 | /// - Parameters:
13 | /// - conditional: Boolean condition.
14 | /// - content: Closure to run on view.
15 | @ViewBuilder func `if`(_ conditional: Bool, @ViewBuilder _ content: (Self) -> Content) -> some View {
16 | if conditional {
17 | content(self)
18 | } else {
19 | self
20 | }
21 | }
22 |
23 | /// Closure given view if conditional.
24 | /// - Parameters:
25 | /// - conditional: Boolean condition.
26 | /// - truthy: Closure to run on view if true.
27 | /// - falsy: Closure to run on view if false.
28 | @ViewBuilder func `if`(
29 | _ conditional: Bool = true,
30 | @ViewBuilder _ truthy: (Self) -> Truthy,
31 | @ViewBuilder else falsy: (Self) -> Falsy
32 | ) -> some View {
33 | if conditional {
34 | truthy(self)
35 | } else {
36 | falsy(self)
37 | }
38 | }
39 |
40 | /// Closure given view and unwrapped optional value if optional is set.
41 | /// - Parameters:
42 | /// - conditional: Optional value.
43 | /// - content: Closure to run on view with unwrapped optional.
44 | @ViewBuilder func ifLet(_ conditional: Optional, @ViewBuilder _ content: (Self, _ value: T) -> Content) -> some View {
45 | if let value = conditional {
46 | content(self, value)
47 | } else {
48 | self
49 | }
50 | }
51 | }
52 |
53 |
--------------------------------------------------------------------------------
/Demo/App/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // Demo
4 | //
5 | // Created by Sihao Lu on 4/7/23.
6 | //
7 |
8 | import DemoChat
9 | import OpenAI
10 | import SwiftUI
11 |
12 | struct ContentView: View {
13 | @ObservedObject var chatStore: ChatStore
14 | @ObservedObject var miscStore: MiscStore
15 | @State private var selectedTab = 0
16 | @Environment(\.idProviderValue) var idProvider
17 |
18 | var body: some View {
19 | TabView(selection: $selectedTab) {
20 | ChatView(
21 | store: chatStore
22 | )
23 | .tabItem {
24 | Label("Chats", systemImage: "message")
25 | }
26 | .tag(0)
27 |
28 | TranscribeView(
29 | )
30 | .tabItem {
31 | Label("Transcribe", systemImage: "mic")
32 | }
33 | .tag(1)
34 |
35 | ImageView(
36 | )
37 | .tabItem {
38 | Label("Image", systemImage: "photo")
39 | }
40 | .tag(2)
41 |
42 | MiscView(
43 | store: miscStore
44 | )
45 | .tabItem {
46 | Label("Misc", systemImage: "ellipsis")
47 | }
48 | .tag(3)
49 | }
50 | }
51 | }
52 |
53 | struct ChatsView: View {
54 | var body: some View {
55 | Text("Chats")
56 | .font(.largeTitle)
57 | }
58 | }
59 |
60 | struct TranscribeView: View {
61 | var body: some View {
62 | Text("Transcribe: TBD")
63 | .font(.largeTitle)
64 | }
65 | }
66 |
67 | struct ImageView: View {
68 | var body: some View {
69 | Text("Image: TBD")
70 | .font(.largeTitle)
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ChatResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ChatResult: Codable, Equatable {
11 |
12 | public struct Choice: Codable, Equatable {
13 |
14 | public let index: Int
15 | /// Exists only if it is a complete message.
16 | public let message: Chat
17 | /// Exists only if it is a complete message.
18 | public let finishReason: String?
19 |
20 | enum CodingKeys: String, CodingKey {
21 | case index
22 | case message
23 | case finishReason = "finish_reason"
24 | }
25 | }
26 |
27 | public struct Usage: Codable, Equatable {
28 | public let promptTokens: Int
29 | public let completionTokens: Int
30 | public let totalTokens: Int
31 |
32 | enum CodingKeys: String, CodingKey {
33 | case promptTokens = "prompt_tokens"
34 | case completionTokens = "completion_tokens"
35 | case totalTokens = "total_tokens"
36 | }
37 | }
38 |
39 | public let id: String
40 | public let object: String
41 | public let created: TimeInterval
42 | public let model: Model
43 | public let choices: [Choice]
44 | public let usage: Usage?
45 |
46 | enum CodingKeys: String, CodingKey {
47 | case id
48 | case object
49 | case created
50 | case model
51 | case choices
52 | case usage
53 | }
54 |
55 | init(id: String, object: String, created: TimeInterval, model: Model, choices: [Choice], usage: Usage) {
56 | self.id = id
57 | self.object = object
58 | self.created = created
59 | self.model = model
60 | self.choices = choices
61 | self.usage = usage
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/AudioTranscriptionQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioTranscriptionQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public enum AudioResponseFormat: String, Codable, Equatable {
11 | case json
12 | case text
13 | case verboseJson = "verbose_json"
14 | case srt
15 | case vtt
16 | }
17 |
18 | public struct AudioTranscriptionQuery: Codable, Equatable {
19 | public typealias ResponseFormat = AudioResponseFormat
20 |
21 | public let file: Data
22 | public let fileName: String
23 | public let model: Model
24 | public let responseFormat: Self.ResponseFormat?
25 |
26 | public let prompt: String?
27 | public let temperature: Double?
28 | public let language: String?
29 |
30 | public init(file: Data, fileName: String, model: Model, prompt: String? = nil, temperature: Double? = nil, language: String? = nil, responseFormat: Self.ResponseFormat? = nil) {
31 | self.file = file
32 | self.fileName = fileName
33 | self.model = model
34 | self.prompt = prompt
35 | self.temperature = temperature
36 | self.language = language
37 | self.responseFormat = responseFormat
38 | }
39 | }
40 |
41 | extension AudioTranscriptionQuery: MultipartFormDataBodyEncodable {
42 |
43 | func encode(boundary: String) -> Data {
44 | let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [
45 | .file(paramName: "file", fileName: fileName, fileData: file, contentType: "audio/mpeg"),
46 | .string(paramName: "model", value: model),
47 | .string(paramName: "prompt", value: prompt),
48 | .string(paramName: "temperature", value: temperature),
49 | .string(paramName: "language", value: language),
50 | .string(paramName: "response_format", value: responseFormat)
51 | ])
52 | return bodyBuilder.build()
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ChatStreamResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatStreamResult.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 15/05/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ChatStreamResult: Codable, Equatable {
11 |
12 | public struct Choice: Codable, Equatable {
13 | public struct Delta: Codable, Equatable {
14 | public let content: String?
15 | public let role: Chat.Role?
16 | /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.
17 | public let name: String?
18 | public let functionCall: ChatFunctionCall?
19 |
20 | enum CodingKeys: String, CodingKey {
21 | case role
22 | case content
23 | case name
24 | case functionCall = "function_call"
25 | }
26 | }
27 |
28 | public let index: Int
29 | public let delta: Delta
30 | public let finishReason: String?
31 |
32 | enum CodingKeys: String, CodingKey {
33 | case index
34 | case delta
35 | case finishReason = "finish_reason"
36 | }
37 | }
38 |
39 | public let id: String
40 | public let object: String
41 | public let created: TimeInterval
42 | public let model: Model
43 | public let choices: [Choice]
44 |
45 | enum CodingKeys: String, CodingKey {
46 | case id
47 | case object
48 | case created
49 | case model
50 | case choices
51 | }
52 |
53 | init(id: String, object: String, created: TimeInterval, model: Model, choices: [Choice]) {
54 | self.id = id
55 | self.object = object
56 | self.created = created
57 | self.model = model
58 | self.choices = choices
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/MultipartFormDataBodyBuilder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MultipartFormDataBodyBuilder.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | final class MultipartFormDataBodyBuilder {
11 |
12 | let boundary: String
13 | let entries: [MultipartFormDataEntry]
14 |
15 | init(boundary: String, entries: [MultipartFormDataEntry]) {
16 | self.boundary = boundary
17 | self.entries = entries
18 | }
19 |
20 | func build() -> Data {
21 | var httpData = entries
22 | .map { $0.makeBodyData(boundary: boundary) }
23 | .reduce(Data(), +)
24 | httpData.append("--\(boundary)--\r\n")
25 | return httpData
26 | }
27 | }
28 |
29 | private extension MultipartFormDataEntry {
30 |
31 | func makeBodyData(boundary: String) -> Data {
32 | var body = Data()
33 | switch self {
34 | case .file(let paramName, let fileName, let fileData, let contentType):
35 | if let fileName, let fileData {
36 | body.append("--\(boundary)\r\n")
37 | body.append("Content-Disposition: form-data; name=\"\(paramName)\"; filename=\"\(fileName)\"\r\n")
38 | body.append("Content-Type: \(contentType)\r\n\r\n")
39 | body.append(fileData)
40 | body.append("\r\n")
41 | }
42 | case .string(let paramName, let value):
43 | if let value {
44 | body.append("--\(boundary)\r\n")
45 | body.append("Content-Disposition: form-data; name=\"\(paramName)\"\r\n\r\n")
46 | body.append("\(value)\r\n")
47 | }
48 | }
49 | return body
50 | }
51 | }
52 |
53 | private extension Data {
54 |
55 | mutating func append(_ string: String) {
56 | let data = string.data(
57 | using: String.Encoding.utf8,
58 | allowLossyConversion: true)
59 | append(data!)
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ImagesQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImagesQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 |
11 | public enum ImageResponseFormat: String, Codable, Equatable {
12 | case url
13 | case b64_json
14 | }
15 |
16 | public struct ImagesQuery: Codable {
17 | public typealias ResponseFormat = ImageResponseFormat
18 |
19 | /// A text description of the desired image(s). The maximum length is 1000 characters.
20 | public let prompt: String
21 |
22 | /// ID of the model to use.
23 | public let model: Model?
24 | /// The format in which the generated images are returned
25 | public let responseFormat: Self.ResponseFormat?
26 | /// The number of images to generate. Must be between 1 and 10.
27 | public let n: Int?
28 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
29 | public let size: String?
30 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
31 | public let user: String?
32 | /// The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3.
33 | public let style: String?
34 |
35 | public init(prompt: String, model: Model?=nil, responseFormat: Self.ResponseFormat?=nil, n: Int?, size: String?, style: String?=nil, user:String?=nil) {
36 | self.style = style
37 | self.prompt = prompt
38 | self.n = n
39 | self.size = size
40 | self.model = model
41 | self.responseFormat = responseFormat
42 | self.user = user
43 | }
44 |
45 | public enum CodingKeys: String, CodingKey {
46 | case model
47 | case prompt
48 | case n
49 | case size
50 | case user
51 | case style
52 | case responseFormat = "response_format"
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Errors/APIError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // APIError.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public enum OpenAIError: Error {
11 | case emptyData
12 | }
13 |
14 | public struct APIError: Error, Decodable, Equatable {
15 | public let message: String
16 | public let type: String
17 | public let param: String?
18 | public let code: String?
19 |
20 | public init(message: String, type: String, param: String?, code: String?) {
21 | self.message = message
22 | self.type = type
23 | self.param = param
24 | self.code = code
25 | }
26 |
27 | enum CodingKeys: CodingKey {
28 | case message
29 | case type
30 | case param
31 | case code
32 | }
33 |
34 | public init(from decoder: Decoder) throws {
35 | let container = try decoder.container(keyedBy: CodingKeys.self)
36 |
37 | //
38 | // message can be String or [String].
39 | //
40 | if let string = try? container.decode(String.self, forKey: .message) {
41 | self.message = string
42 | } else if let array = try? container.decode([String].self, forKey: .message) {
43 | self.message = array.joined(separator: "\n")
44 | } else {
45 | throw DecodingError.typeMismatch(String.self, .init(codingPath: [CodingKeys.message], debugDescription: "message: expected String or [String]"))
46 | }
47 |
48 | self.type = try container.decode(String.self, forKey: .type)
49 | self.param = try container.decodeIfPresent(String.self, forKey: .param)
50 | self.code = try container.decodeIfPresent(String.self, forKey: .code)
51 | }
52 | }
53 |
54 | extension APIError: LocalizedError {
55 |
56 | public var errorDescription: String? {
57 | return message
58 | }
59 | }
60 |
61 | public struct APIErrorResponse: Error, Decodable, Equatable {
62 | public let error: APIError
63 | }
64 |
65 | extension APIErrorResponse: LocalizedError {
66 |
67 | public var errorDescription: String? {
68 | return error.errorDescription
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ImageEditsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageEditsQuery.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 24/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ImageEditsQuery: Codable {
11 | /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.
12 | public let image: Data
13 | public let fileName: String
14 | /// An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.
15 | public let mask: Data?
16 | public let maskFileName: String?
17 | /// A text description of the desired image(s). The maximum length is 1000 characters.
18 | public let prompt: String
19 | /// The number of images to generate. Must be between 1 and 10.
20 | public let n: Int?
21 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024.
22 | public let size: String?
23 |
24 | public init(image: Data, fileName: String, mask: Data? = nil, maskFileName: String? = nil, prompt: String, n: Int? = nil, size: String? = nil) {
25 | self.image = image
26 | self.fileName = fileName
27 | self.mask = mask
28 | self.maskFileName = maskFileName
29 | self.prompt = prompt
30 | self.n = n
31 | self.size = size
32 | }
33 | }
34 |
35 | extension ImageEditsQuery: MultipartFormDataBodyEncodable {
36 | func encode(boundary: String) -> Data {
37 | let bodyBuilder = MultipartFormDataBodyBuilder(boundary: boundary, entries: [
38 | .file(paramName: "image", fileName: fileName, fileData: image, contentType: "image/png"),
39 | .file(paramName: "mask", fileName: maskFileName, fileData: mask, contentType: "image/png"),
40 | .string(paramName: "prompt", value: prompt),
41 | .string(paramName: "n", value: n),
42 | .string(paramName: "size", value: size)
43 | ])
44 | return bodyBuilder.build()
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Tests/OpenAITests/Extensions/XCTestCase+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // XCTestCase+Extensions.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 04/04/2023.
6 | //
7 |
8 | #if canImport(Combine)
9 |
10 | import XCTest
11 | import Combine
12 |
13 | //Borrowed from here: https://www.swiftbysundell.com/articles/unit-testing-combine-based-swift-code/
14 | @available(watchOS 6.0, *)
15 | @available(tvOS 13.0, *)
16 | @available(iOS 13.0, *)
17 | extension XCTestCase {
18 |
19 | func awaitPublisher(
20 | _ publisher: T,
21 | timeout: TimeInterval = 10,
22 | file: StaticString = #file,
23 | line: UInt = #line
24 | ) throws -> T.Output {
25 | // This time, we use Swift's Result type to keep track
26 | // of the result of our Combine pipeline:
27 | var result: Result?
28 | let expectation = self.expectation(description: "Awaiting publisher")
29 |
30 | let cancellable = publisher.sink(
31 | receiveCompletion: { completion in
32 | switch completion {
33 | case .failure(let error):
34 | result = .failure(error)
35 | case .finished:
36 | break
37 | }
38 |
39 | expectation.fulfill()
40 | },
41 | receiveValue: { value in
42 | result = .success(value)
43 | }
44 | )
45 |
46 | // Just like before, we await the expectation that we
47 | // created at the top of our test, and once done, we
48 | // also cancel our cancellable to avoid getting any
49 | // unused variable warnings:
50 | waitForExpectations(timeout: timeout)
51 | cancellable.cancel()
52 |
53 | // Here we pass the original file and line number that
54 | // our utility was called at, to tell XCTest to report
55 | // any encountered errors at that original call site:
56 | let unwrappedResult = try XCTUnwrap(
57 | result,
58 | "Awaited publisher did not produce any output",
59 | file: file,
60 | line: line
61 | )
62 |
63 | return try unwrappedResult.get()
64 | }
65 | }
66 |
67 | #endif
68 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/ChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatView.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import Combine
9 | import SwiftUI
10 |
11 | public struct ChatView: View {
12 | @ObservedObject var store: ChatStore
13 |
14 | @Environment(\.dateProviderValue) var dateProvider
15 | @Environment(\.idProviderValue) var idProvider
16 |
17 | public init(store: ChatStore) {
18 | self.store = store
19 | }
20 |
21 | public var body: some View {
22 | NavigationSplitView {
23 | ListView(
24 | conversations: $store.conversations,
25 | selectedConversationId: Binding(
26 | get: {
27 | store.selectedConversationID
28 | }, set: { newId in
29 | store.selectConversation(newId)
30 | })
31 | )
32 | .toolbar {
33 | ToolbarItem(
34 | placement: .primaryAction
35 | ) {
36 | Button(action: {
37 | store.createConversation()
38 | }) {
39 | Image(systemName: "plus")
40 | }
41 | .buttonStyle(.borderedProminent)
42 | }
43 | }
44 | } detail: {
45 | if let conversation = store.selectedConversation {
46 | DetailView(
47 | conversation: conversation,
48 | error: store.conversationErrors[conversation.id],
49 | sendMessage: { message, selectedModel in
50 | Task {
51 | await store.sendMessage(
52 | Message(
53 | id: idProvider(),
54 | role: .user,
55 | content: message,
56 | createdAt: dateProvider()
57 | ),
58 | conversationId: conversation.id,
59 | model: selectedModel
60 | )
61 | }
62 | }
63 | )
64 | }
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | .DS_Store
6 | .swiftpm
7 |
8 | ## User settings
9 | xcuserdata/
10 |
11 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
12 | *.xcscmblueprint
13 | *.xccheckout
14 |
15 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
16 | build/
17 | DerivedData/
18 | *.moved-aside
19 | *.pbxuser
20 | !default.pbxuser
21 | *.mode1v3
22 | !default.mode1v3
23 | *.mode2v3
24 | !default.mode2v3
25 | *.perspectivev3
26 | !default.perspectivev3
27 |
28 | ## Obj-C/Swift specific
29 | *.hmap
30 |
31 | ## App packaging
32 | *.ipa
33 | *.dSYM.zip
34 | *.dSYM
35 |
36 | ## Playgrounds
37 | timeline.xctimeline
38 | playground.xcworkspace
39 |
40 | # Swift Package Manager
41 | #
42 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
43 | # Packages/
44 | # Package.pins
45 | # Package.resolved
46 | # *.xcodeproj
47 | #
48 | # Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
49 | # hence it is not needed unless you have added a package configuration file to your project
50 | # .swiftpm
51 |
52 | .build/
53 |
54 | # CocoaPods
55 | #
56 | # We recommend against adding the Pods directory to your .gitignore. However
57 | # you should judge for yourself, the pros and cons are mentioned at:
58 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
59 | #
60 | # Pods/
61 | #
62 | # Add this line if you want to avoid checking in source code from the Xcode workspace
63 | # *.xcworkspace
64 |
65 | # Carthage
66 | #
67 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
68 | # Carthage/Checkouts
69 |
70 | Carthage/Build/
71 |
72 | # Accio dependency management
73 | Dependencies/
74 | .accio/
75 |
76 | # fastlane
77 | #
78 | # It is recommended to not store the screenshots in the git repo.
79 | # Instead, use fastlane to re-generate the screenshots whenever they are needed.
80 | # For more information about the recommended setup visit:
81 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
82 |
83 | fastlane/report.xml
84 | fastlane/Preview.html
85 | fastlane/screenshots/**/*.png
86 | fastlane/test_output
87 |
88 | # Code Injection
89 | #
90 | # After new code Injection tools there's a generated folder /iOSInjectionProject
91 | # https://github.com/johnno1962/injectionforxcode
92 |
93 | iOSInjectionProject/
94 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/CompletionsQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CompletionsQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct CompletionsQuery: Codable, Streamable {
11 | /// ID of the model to use.
12 | public let model: Model
13 | /// The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.
14 | public let prompt: String
15 | /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
16 | public let temperature: Double?
17 | /// The maximum number of tokens to generate in the completion.
18 | public let maxTokens: Int?
19 | /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
20 | public let topP: Double?
21 | /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
22 | public let frequencyPenalty: Double?
23 | /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
24 | public let presencePenalty: Double?
25 | /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
26 | public let stop: [String]?
27 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
28 | public let user: String?
29 |
30 | var stream: Bool = false
31 |
32 | enum CodingKeys: String, CodingKey {
33 | case model
34 | case prompt
35 | case stream
36 | case temperature
37 | case maxTokens = "max_tokens"
38 | case topP = "top_p"
39 | case frequencyPenalty = "frequency_penalty"
40 | case presencePenalty = "presence_penalty"
41 | case stop
42 | case user
43 | }
44 |
45 | public init(model: Model, prompt: String, temperature: Double? = nil, maxTokens: Int? = nil, topP: Double? = nil, frequencyPenalty: Double? = nil, presencePenalty: Double? = nil, stop: [String]? = nil, user: String? = nil) {
46 | self.model = model
47 | self.prompt = prompt
48 | self.temperature = temperature
49 | self.maxTokens = maxTokens
50 | self.topP = topP
51 | self.frequencyPenalty = frequencyPenalty
52 | self.presencePenalty = presencePenalty
53 | self.stop = stop
54 | self.user = user
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/xcshareddata/xcschemes/Demo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
42 |
44 |
50 |
51 |
52 |
53 |
59 |
61 |
67 |
68 |
69 |
70 |
72 |
73 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/MiscStore.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MiscStore.swift
3 | // DemoChat
4 | //
5 | // Created by Aled Samuel on 22/04/2023.
6 | //
7 |
8 | import Foundation
9 | import OpenAI
10 |
11 | public final class MiscStore: ObservableObject {
12 | public var openAIClient: OpenAIProtocol
13 |
14 | @Published var availableModels: [ModelResult] = []
15 |
16 | public init(
17 | openAIClient: OpenAIProtocol
18 | ) {
19 | self.openAIClient = openAIClient
20 | }
21 |
22 | // MARK: Models
23 |
24 | @MainActor
25 | func getModels() async {
26 | do {
27 | let response = try await openAIClient.models()
28 | availableModels = response.data
29 | } catch {
30 | // TODO: Better error handling
31 | print(error.localizedDescription)
32 | }
33 | }
34 |
35 | // MARK: Moderations
36 |
37 | @Published var moderationConversation = Conversation(id: "", messages: [])
38 | @Published var moderationConversationError: Error?
39 |
40 | @MainActor
41 | func sendModerationMessage(_ message: Message) async {
42 | moderationConversation.messages.append(message)
43 | await completeModerationChat(message: message)
44 | }
45 |
46 | @MainActor
47 | func completeModerationChat(message: Message) async {
48 |
49 | moderationConversationError = nil
50 |
51 | do {
52 | let response = try await openAIClient.moderations(
53 | query: ModerationsQuery(
54 | input: message.content,
55 | model: .textModerationLatest
56 | )
57 | )
58 |
59 | let categoryResults = response.results
60 |
61 | let existingMessages = moderationConversation.messages
62 |
63 | func circleEmoji(for resultType: Bool) -> String {
64 | resultType ? "🔴" : "🟢"
65 | }
66 |
67 | for result in categoryResults {
68 | let content = """
69 | \(circleEmoji(for: result.categories.hate)) Hate
70 | \(circleEmoji(for: result.categories.hateThreatening)) Hate/Threatening
71 | \(circleEmoji(for: result.categories.selfHarm)) Self-harm
72 | \(circleEmoji(for: result.categories.sexual)) Sexual
73 | \(circleEmoji(for: result.categories.sexualMinors)) Sexual/Minors
74 | \(circleEmoji(for: result.categories.violence)) Violence
75 | \(circleEmoji(for: result.categories.violenceGraphic)) Violence/Graphic
76 | """
77 |
78 | let message = Message(
79 | id: response.id,
80 | role: .assistant,
81 | content: content,
82 | createdAt: message.createdAt)
83 |
84 | if existingMessages.contains(message) {
85 | continue
86 | }
87 | moderationConversation.messages.append(message)
88 | }
89 |
90 | } catch {
91 | moderationConversationError = error
92 | }
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Private/StreamingSession.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StreamingSession.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 18/04/2023.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | final class StreamingSession: NSObject, Identifiable, URLSessionDelegate, URLSessionDataDelegate {
14 |
15 | enum StreamingError: Error {
16 | case unknownContent
17 | case emptyContent
18 | }
19 |
20 | var onReceiveContent: ((StreamingSession, ResultType) -> Void)?
21 | var onProcessingError: ((StreamingSession, Error) -> Void)?
22 | var onComplete: ((StreamingSession, Error?) -> Void)?
23 |
24 | private let streamingCompletionMarker = "[DONE]"
25 | private let urlRequest: URLRequest
26 | private lazy var urlSession: URLSession = {
27 | let session = URLSession(configuration: .default, delegate: self, delegateQueue: nil)
28 | return session
29 | }()
30 |
31 | private var previousChunkBuffer = ""
32 |
33 | init(urlRequest: URLRequest) {
34 | self.urlRequest = urlRequest
35 | }
36 |
37 | func perform() {
38 | self.urlSession
39 | .dataTask(with: self.urlRequest)
40 | .resume()
41 | }
42 |
43 | func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
44 | onComplete?(self, error)
45 | }
46 |
47 | func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
48 | guard let stringContent = String(data: data, encoding: .utf8) else {
49 | onProcessingError?(self, StreamingError.unknownContent)
50 | return
51 | }
52 | processJSON(from: stringContent)
53 | }
54 |
55 | }
56 |
57 | extension StreamingSession {
58 |
59 | private func processJSON(from stringContent: String) {
60 | let jsonObjects = "\(previousChunkBuffer)\(stringContent)"
61 | .components(separatedBy: "data:")
62 | .filter { $0.isEmpty == false }
63 | .map { $0.trimmingCharacters(in: .whitespacesAndNewlines) }
64 |
65 | previousChunkBuffer = ""
66 |
67 | guard jsonObjects.isEmpty == false, jsonObjects.first != streamingCompletionMarker else {
68 | return
69 | }
70 | jsonObjects.enumerated().forEach { (index, jsonContent) in
71 | guard jsonContent != streamingCompletionMarker else {
72 | return
73 | }
74 | guard let jsonData = jsonContent.data(using: .utf8) else {
75 | onProcessingError?(self, StreamingError.unknownContent)
76 | return
77 | }
78 |
79 | var apiError: Error? = nil
80 | do {
81 | let decoder = JSONDecoder()
82 | let object = try decoder.decode(ResultType.self, from: jsonData)
83 | onReceiveContent?(self, object)
84 | } catch {
85 | apiError = error
86 | }
87 |
88 | if let apiError = apiError {
89 | do {
90 | let decoded = try JSONDecoder().decode(APIErrorResponse.self, from: jsonData)
91 | onProcessingError?(self, decoded)
92 | } catch {
93 | if index == jsonObjects.count - 1 {
94 | previousChunkBuffer = "data: \(jsonContent)" // Chunk ends in a partial JSON
95 | } else {
96 | onProcessingError?(self, apiError)
97 | }
98 | }
99 | }
100 | }
101 | }
102 |
103 | }
104 |
--------------------------------------------------------------------------------
/Demo/App/APIKeyModalView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // APIKeyModalView.swift
3 | // Demo
4 | //
5 | // Created by Sihao Lu on 4/7/23.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct APIKeyModalView: View {
11 | @Environment(\.dismiss) var dismiss
12 |
13 | let isMandatory: Bool
14 |
15 | @Binding private var apiKey: String
16 | @State private var internalAPIKey: String
17 |
18 | public init(
19 | apiKey: Binding,
20 | isMandatory: Bool = true
21 | ) {
22 | self._apiKey = apiKey
23 | self._internalAPIKey = State(initialValue: apiKey.wrappedValue)
24 | self.isMandatory = isMandatory
25 | }
26 |
27 | private var strokeColor: Color {
28 | #if os(iOS)
29 | return Color(uiColor: UIColor.systemGray5)
30 | #elseif os(macOS)
31 | return Color(nsColor: NSColor.lightGray)
32 | #endif
33 | }
34 |
35 | var body: some View {
36 | NavigationView {
37 | VStack(alignment: .leading, spacing: 16) {
38 |
39 | VStack(alignment: .leading, spacing: 8) {
40 | Text(
41 | "You can find and configure your OpenAI API key at"
42 | )
43 | .font(.caption)
44 |
45 | Link(
46 | "https://platform.openai.com/account/api-keys",
47 | destination: URL(string: "https://platform.openai.com/account/api-keys")!
48 | )
49 | .font(.caption)
50 | }
51 |
52 | TextEditor(
53 | text: $internalAPIKey
54 | )
55 | .frame(height: 120)
56 | .font(.caption)
57 | .padding(8)
58 | .background(
59 | RoundedRectangle(
60 | cornerRadius: 8
61 | )
62 | .stroke(
63 | strokeColor,
64 | lineWidth: 1
65 | )
66 | )
67 | .padding(4)
68 | .background(Color.white)
69 | .clipShape(RoundedRectangle(cornerRadius: 8))
70 |
71 | if isMandatory {
72 | HStack {
73 | Spacer()
74 |
75 | Button {
76 | apiKey = internalAPIKey
77 | dismiss()
78 | } label: {
79 | Text(
80 | "Continue"
81 | )
82 | .padding(8)
83 | }
84 | .buttonStyle(.borderedProminent)
85 | .disabled(internalAPIKey.isEmpty)
86 |
87 | Spacer()
88 | }
89 | }
90 | }
91 | .padding()
92 | .navigationTitle("OpenAI API Key")
93 | .toolbar {
94 | ToolbarItem(placement: .primaryAction) {
95 | if isMandatory {
96 | EmptyView()
97 | } else {
98 | Button("Close") {
99 | apiKey = internalAPIKey
100 | dismiss()
101 | }
102 | }
103 | }
104 | }
105 | }
106 | }
107 | }
108 |
109 | struct APIKeyModalView_Previews: PreviewProvider {
110 | struct APIKeyModalView_PreviewsContainerView: View {
111 | @State var apiKey = ""
112 | let isMandatory: Bool
113 |
114 | var body: some View {
115 | APIKeyModalView(
116 | apiKey: $apiKey,
117 | isMandatory: isMandatory
118 | )
119 | }
120 | }
121 |
122 | static var previews: some View {
123 | APIKeyModalView_PreviewsContainerView(isMandatory: true)
124 | APIKeyModalView_PreviewsContainerView(isMandatory: false)
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIProtocol+Combine.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 03/04/2023.
6 | //
7 |
8 | #if canImport(Combine)
9 |
10 | import Combine
11 |
12 | @available(iOS 13.0, *)
13 | @available(tvOS 13.0, *)
14 | @available(macOS 10.15, *)
15 | @available(watchOS 6.0, *)
16 | public extension OpenAIProtocol {
17 |
18 | func completions(query: CompletionsQuery) -> AnyPublisher {
19 | Future {
20 | completions(query: query, completion: $0)
21 | }
22 | .eraseToAnyPublisher()
23 | }
24 |
25 | func completionsStream(query: CompletionsQuery) -> AnyPublisher, Error> {
26 | let progress = PassthroughSubject, Error>()
27 | completionsStream(query: query) { result in
28 | progress.send(result)
29 | } completion: { error in
30 | if let error {
31 | progress.send(completion: .failure(error))
32 | } else {
33 | progress.send(completion: .finished)
34 | }
35 | }
36 | return progress.eraseToAnyPublisher()
37 | }
38 |
39 | func images(query: ImagesQuery) -> AnyPublisher {
40 | Future {
41 | images(query: query, completion: $0)
42 | }
43 | .eraseToAnyPublisher()
44 | }
45 |
46 | func imageEdits(query: ImageEditsQuery) -> AnyPublisher {
47 | Future {
48 | imageEdits(query: query, completion: $0)
49 | }
50 | .eraseToAnyPublisher()
51 | }
52 |
53 | func imageVariations(query: ImageVariationsQuery) -> AnyPublisher {
54 | Future {
55 | imageVariations(query: query, completion: $0)
56 | }
57 | .eraseToAnyPublisher()
58 | }
59 |
60 | func embeddings(query: EmbeddingsQuery) -> AnyPublisher {
61 | Future {
62 | embeddings(query: query, completion: $0)
63 | }
64 | .eraseToAnyPublisher()
65 | }
66 |
67 | func chats(query: ChatQuery) -> AnyPublisher {
68 | Future {
69 | chats(query: query, completion: $0)
70 | }
71 | .eraseToAnyPublisher()
72 | }
73 |
74 | func chatsStream(query: ChatQuery) -> AnyPublisher, Error> {
75 | let progress = PassthroughSubject, Error>()
76 | chatsStream(query: query) { result in
77 | progress.send(result)
78 | } completion: { error in
79 | if let error {
80 | progress.send(completion: .failure(error))
81 | } else {
82 | progress.send(completion: .finished)
83 | }
84 | }
85 | return progress.eraseToAnyPublisher()
86 | }
87 |
88 | func edits(query: EditsQuery) -> AnyPublisher {
89 | Future {
90 | edits(query: query, completion: $0)
91 | }
92 | .eraseToAnyPublisher()
93 | }
94 |
95 | func model(query: ModelQuery) -> AnyPublisher {
96 | Future {
97 | model(query: query, completion: $0)
98 | }
99 | .eraseToAnyPublisher()
100 | }
101 |
102 | func models() -> AnyPublisher {
103 | Future {
104 | models(completion: $0)
105 | }
106 | .eraseToAnyPublisher()
107 | }
108 |
109 | func moderations(query: ModerationsQuery) -> AnyPublisher {
110 | Future {
111 | moderations(query: query, completion: $0)
112 | }
113 | .eraseToAnyPublisher()
114 | }
115 |
116 | func audioTranscriptions(query: AudioTranscriptionQuery) -> AnyPublisher {
117 | Future {
118 | audioTranscriptions(query: query, completion: $0)
119 | }
120 | .eraseToAnyPublisher()
121 | }
122 |
123 | func audioTranslations(query: AudioTranslationQuery) -> AnyPublisher {
124 | Future {
125 | audioTranslations(query: query, completion: $0)
126 | }
127 | .eraseToAnyPublisher()
128 | }
129 | }
130 |
131 | #endif
132 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ModerationsResult.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModerationsResult.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 10/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ModerationsResult: Codable, Equatable {
11 |
12 | public struct CategoryResult: Codable, Equatable {
13 |
14 | public struct Categories: Codable, Equatable {
15 | /// Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.
16 | public let hate: Bool
17 | /// Hateful content that also includes violence or serious harm towards the targeted group.
18 | public let hateThreatening: Bool
19 | /// Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.
20 | public let selfHarm: Bool
21 | /// Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).
22 | public let sexual: Bool
23 | /// Sexual content that includes an individual who is under 18 years old.
24 | public let sexualMinors: Bool
25 | /// Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.
26 | public let violence: Bool
27 | /// Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.
28 | public let violenceGraphic: Bool
29 |
30 | enum CodingKeys: String, CodingKey {
31 | case hate
32 | case hateThreatening = "hate/threatening"
33 | case selfHarm = "self-harm"
34 | case sexual
35 | case sexualMinors = "sexual/minors"
36 | case violence
37 | case violenceGraphic = "violence/graphic"
38 | }
39 | }
40 |
41 | public struct CategoryScores: Codable, Equatable {
42 | /// Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.
43 | public let hate: Double
44 | /// Hateful content that also includes violence or serious harm towards the targeted group.
45 | public let hateThreatening: Double
46 | /// Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.
47 | public let selfHarm: Double
48 | /// Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).
49 | public let sexual: Double
50 | /// Sexual content that includes an individual who is under 18 years old.
51 | public let sexualMinors: Double
52 | /// Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.
53 | public let violence: Double
54 | /// Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.
55 | public let violenceGraphic: Double
56 |
57 | enum CodingKeys: String, CodingKey {
58 | case hate
59 | case hateThreatening = "hate/threatening"
60 | case selfHarm = "self-harm"
61 | case sexual
62 | case sexualMinors = "sexual/minors"
63 | case violence
64 | case violenceGraphic = "violence/graphic"
65 | }
66 | }
67 |
68 | /// Collection of per-category binary usage policies violation flags. For each category, the value is true if the model flags the corresponding category as violated, false otherwise.
69 | public let categories: Categories
70 | /// Collection of per-category raw scores output by the model, denoting the model's confidence that the input violates the OpenAI's policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.
71 | public let categoryScores: CategoryScores
72 | /// True if the model classifies the content as violating OpenAI's usage policies, false otherwise.
73 | public let flagged: Bool
74 |
75 | enum CodingKeys: String, CodingKey {
76 | case categories
77 | case categoryScores = "category_scores"
78 | case flagged
79 | }
80 | }
81 |
82 | public let id: String
83 | public let model: Model
84 | public let results: [CategoryResult]
85 | }
86 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | .
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/Models/Models.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Models.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 12/19/22.
6 | //
7 |
8 | import Foundation
9 |
10 | public typealias Model = String
11 | public extension Model {
12 |
13 | // Chat Completions
14 |
15 | /// More capable than any GPT-3.5 model, able to do more complex tasks, and optimized for chat. Will be updated with our latest model iteration 2 weeks after it is released.
16 | static let gpt4 = "gpt-4"
17 |
18 | /// GPT-4 Turbo, teh latest gpt-4 model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.
19 | /// Maximum of 4096 output tokens
20 | static let gpt4_1106_preview = "gpt-4-1106-preview"
21 |
22 | /// Ability to understand images, in addition to all other GPT-4 Turbo capabilities.
23 | static let gpt4_vision_preview = "gpt-4-vision-preview"
24 |
25 | /// Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.
26 | @available(*, deprecated, message: "Please upgrade to the newer model")
27 | static let gpt4_0314 = "gpt-4-0314"
28 | /// Snapshot of gpt-4 from June 13th 2023 with function calling data. Unlike gpt-4, this model will not receive updates, and will be deprecated 3 months after a new version is released.
29 | static let gpt4_0613 = "gpt-4-0613"
30 | /// Same capabilities as the base gpt-4 mode but with 4x the context length. Will be updated with our latest model iteration.
31 | static let gpt4_32k = "gpt-4-32k"
32 | /// Snapshot of gpt-4-32 from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be supported for a three month period ending on June 14th 2023.
33 | @available(*, deprecated, message: "Please upgrade to the newer model")
34 | static let gpt4_32k_0314 = "gpt-4-32k-0314"
35 | /// Snapshot of gpt-4-32 from June 13th 2023. Unlike gpt-4-32k, this model will not receive updates, and will be deprecated 3 months after a new version is released.
36 | static let gpt4_32k_0613 = "gpt-4-32k-0613"
37 |
38 | /// The latest GPT-3.5 Turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling and more.
39 | static let gpt3_5Turbo_1106 = "gpt-3.5-turbo-1106"
40 |
41 | /// Most capable GPT-3.5 model and optimized for chat at 1/10th the cost of text-davinci-003. Will be updated with our latest model iteration.
42 | static let gpt3_5Turbo = "gpt-3.5-turbo"
43 | /// Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.
44 | @available(*, deprecated, message: "Please upgrade to the newer model")
45 | static let gpt3_5Turbo0301 = "gpt-3.5-turbo-0301"
46 | /// Snapshot of gpt-3.5-turbo from June 13th 2023 with function calling data. Unlike gpt-3.5-turbo, this model will not receive updates, and will be deprecated 3 months after a new version is released.
47 | @available(*, deprecated, message: "Please upgrade to the newer model")
48 | static let gpt3_5Turbo0613 = "gpt-3.5-turbo-0613"
49 | /// Same capabilities as the standard gpt-3.5-turbo model but with 4 times the context.
50 | static let gpt3_5Turbo_16k = "gpt-3.5-turbo-16k"
51 | /// Snapshot of gpt-3.5-turbo-16k from June 13th 2023. Unlike gpt-3.5-turbo-16k, this model will not receive updates, and will be deprecated 3 months after a new version is released.
52 | static let gpt3_5Turbo_16k_0613 = "gpt-3.5-turbo-16k-0613"
53 |
54 | // Completions
55 |
56 | /// Can do any language task with better quality, longer output, and consistent instruction-following than the curie, babbage, or ada models. Also supports inserting completions within text.
57 | static let textDavinci_003 = "text-davinci-003"
58 | /// Similar capabilities to text-davinci-003 but trained with supervised fine-tuning instead of reinforcement learning.
59 | static let textDavinci_002 = "text-davinci-002"
60 | /// Very capable, faster and lower cost than Davinci.
61 | static let textCurie = "text-curie-001"
62 | /// Capable of straightforward tasks, very fast, and lower cost.
63 | static let textBabbage = "text-babbage-001"
64 | /// Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.
65 | static let textAda = "text-ada-001"
66 |
67 | // Edits
68 |
69 | static let textDavinci_001 = "text-davinci-001"
70 | static let codeDavinciEdit_001 = "code-davinci-edit-001"
71 |
72 | // Transcriptions / Translations
73 |
74 | static let whisper_1 = "whisper-1"
75 |
76 | // Image Generation
77 | static let dall_e_2 = "dall-e-2"
78 | static let dall_e_3 = "dall-e-3"
79 |
80 | // Fine Tunes
81 |
82 | /// Most capable GPT-3 model. Can do any task the other models can do, often with higher quality.
83 | static let davinci = "davinci"
84 | /// Very capable, but faster and lower cost than Davinci.
85 | static let curie = "curie"
86 | /// Capable of straightforward tasks, very fast, and lower cost.
87 | static let babbage = "babbage"
88 | /// Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.
89 | static let ada = "ada"
90 |
91 | // Embeddings
92 |
93 | static let textEmbeddingAda = "text-embedding-ada-002"
94 | static let textSearchAda = "text-search-ada-doc-001"
95 | static let textSearchBabbageDoc = "text-search-babbage-doc-001"
96 | static let textSearchBabbageQuery001 = "text-search-babbage-query-001"
97 |
98 | // Moderations
99 |
100 | /// Almost as capable as the latest model, but slightly older.
101 | static let textModerationStable = "text-moderation-stable"
102 | /// Most capable moderation model. Accuracy will be slightly higher than the stable model.
103 | static let textModerationLatest = "text-moderation-latest"
104 | static let moderation = "text-moderation-001"
105 | }
106 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/ChatStore.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatStore.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 | import OpenAI
11 |
12 | public final class ChatStore: ObservableObject {
13 | public var openAIClient: OpenAIProtocol
14 | let idProvider: () -> String
15 |
16 | @Published var conversations: [Conversation] = []
17 | @Published var conversationErrors: [Conversation.ID: Error] = [:]
18 | @Published var selectedConversationID: Conversation.ID?
19 |
20 | var selectedConversation: Conversation? {
21 | selectedConversationID.flatMap { id in
22 | conversations.first { $0.id == id }
23 | }
24 | }
25 |
26 | var selectedConversationPublisher: AnyPublisher {
27 | $selectedConversationID.receive(on: RunLoop.main).map { id in
28 | self.conversations.first(where: { $0.id == id })
29 | }
30 | .eraseToAnyPublisher()
31 | }
32 |
33 | public init(
34 | openAIClient: OpenAIProtocol,
35 | idProvider: @escaping () -> String
36 | ) {
37 | self.openAIClient = openAIClient
38 | self.idProvider = idProvider
39 | }
40 |
41 | // MARK: - Events
42 | func createConversation() {
43 | let conversation = Conversation(id: idProvider(), messages: [])
44 | conversations.append(conversation)
45 | }
46 |
47 | func selectConversation(_ conversationId: Conversation.ID?) {
48 | selectedConversationID = conversationId
49 | }
50 |
51 | func deleteConversation(_ conversationId: Conversation.ID) {
52 | conversations.removeAll(where: { $0.id == conversationId })
53 | }
54 |
55 | @MainActor
56 | func sendMessage(
57 | _ message: Message,
58 | conversationId: Conversation.ID,
59 | model: Model
60 | ) async {
61 | guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else {
62 | return
63 | }
64 | conversations[conversationIndex].messages.append(message)
65 |
66 | await completeChat(
67 | conversationId: conversationId,
68 | model: model
69 | )
70 | }
71 |
72 | @MainActor
73 | func completeChat(
74 | conversationId: Conversation.ID,
75 | model: Model
76 | ) async {
77 | guard let conversation = conversations.first(where: { $0.id == conversationId }) else {
78 | return
79 | }
80 |
81 | conversationErrors[conversationId] = nil
82 |
83 | do {
84 | guard let conversationIndex = conversations.firstIndex(where: { $0.id == conversationId }) else {
85 | return
86 | }
87 |
88 | let weatherFunction = ChatFunctionDeclaration(
89 | name: "getWeatherData",
90 | description: "Get the current weather in a given location",
91 | parameters: .init(
92 | type: .object,
93 | properties: [
94 | "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA")
95 | ],
96 | required: ["location"]
97 | )
98 | )
99 |
100 | let functions = [weatherFunction]
101 |
102 | let chatsStream: AsyncThrowingStream = openAIClient.chatsStream(
103 | query: ChatQuery(
104 | model: model,
105 | messages: conversation.messages.map { message in
106 | Chat(role: message.role, content: message.content)
107 | },
108 | functions: functions
109 | )
110 | )
111 |
112 | var functionCallName = ""
113 | var functionCallArguments = ""
114 | for try await partialChatResult in chatsStream {
115 | for choice in partialChatResult.choices {
116 | let existingMessages = conversations[conversationIndex].messages
117 | // Function calls are also streamed, so we need to accumulate.
118 | if let functionCallDelta = choice.delta.functionCall {
119 | if let nameDelta = functionCallDelta.name {
120 | functionCallName += nameDelta
121 | }
122 | if let argumentsDelta = functionCallDelta.arguments {
123 | functionCallArguments += argumentsDelta
124 | }
125 | }
126 | var messageText = choice.delta.content ?? ""
127 | if let finishReason = choice.finishReason,
128 | finishReason == "function_call" {
129 | messageText += "Function call: name=\(functionCallName) arguments=\(functionCallArguments)"
130 | }
131 | let message = Message(
132 | id: partialChatResult.id,
133 | role: choice.delta.role ?? .assistant,
134 | content: messageText,
135 | createdAt: Date(timeIntervalSince1970: TimeInterval(partialChatResult.created))
136 | )
137 | if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) {
138 | // Meld into previous message
139 | let previousMessage = existingMessages[existingMessageIndex]
140 | let combinedMessage = Message(
141 | id: message.id, // id stays the same for different deltas
142 | role: message.role,
143 | content: previousMessage.content + message.content,
144 | createdAt: message.createdAt
145 | )
146 | conversations[conversationIndex].messages[existingMessageIndex] = combinedMessage
147 | } else {
148 | conversations[conversationIndex].messages.append(message)
149 | }
150 | }
151 | }
152 | } catch {
153 | conversationErrors[conversationId] = error
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/Tests/OpenAITests/OpenAITestsCombine.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAITestsCombine.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 04/04/2023.
6 | //
7 |
8 | #if canImport(Combine)
9 |
10 | import XCTest
11 | @testable import OpenAI
12 |
13 | @available(iOS 13.0, *)
14 | @available(watchOS 6.0, *)
15 | @available(tvOS 13.0, *)
16 | final class OpenAITestsCombine: XCTestCase {
17 |
18 | var openAI: OpenAIProtocol!
19 | var urlSession: URLSessionMock!
20 |
21 | override func setUp() {
22 | super.setUp()
23 | self.urlSession = URLSessionMock()
24 | let configuration = OpenAI.Configuration(token: "foo", organizationIdentifier: "bar", timeoutInterval: 14)
25 | self.openAI = OpenAI(configuration: configuration, session: self.urlSession)
26 | }
27 |
28 | func testCompletions() throws {
29 | let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
30 | let expectedResult = CompletionsResult(id: "foo", object: "bar", created: 100500, model: .babbage, choices: [
31 | .init(text: "42 is the answer to everything", index: 0, finishReason: nil)
32 | ], usage: .init(promptTokens: 10, completionTokens: 10, totalTokens: 20))
33 | try self.stub(result: expectedResult)
34 |
35 | let result = try awaitPublisher(self.openAI.completions(query: query))
36 | XCTAssertEqual(result, expectedResult)
37 | }
38 |
39 | func testChats() throws {
40 | let query = ChatQuery(model: .gpt4, messages: [
41 | .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."),
42 | .init(role: .user, content: "Who wrote Harry Potter?")
43 | ])
44 | let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [
45 | .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"),
46 | .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"),
47 | .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2")
48 | ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300))
49 | try self.stub(result: chatResult)
50 | let result = try awaitPublisher(openAI.chats(query: query))
51 | XCTAssertEqual(result, chatResult)
52 | }
53 |
54 | func testEdits() throws {
55 | let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
56 | let editsResult = EditsResult(object: "edit", created: 1589478378, choices: [
57 | .init(text: "What day of the week is it?", index: 0)
58 | ], usage: .init(promptTokens: 25, completionTokens: 32, totalTokens: 57))
59 | try self.stub(result: editsResult)
60 | let result = try awaitPublisher(openAI.edits(query: query))
61 | XCTAssertEqual(result, editsResult)
62 | }
63 |
64 | func testEmbeddings() throws {
65 | let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
66 | let embeddingsResult = EmbeddingsResult(data: [
67 | .init(object: "id-sdasd", embedding: [0.1, 0.2, 0.3, 0.4], index: 0),
68 | .init(object: "id-sdasd1", embedding: [0.4, 0.1, 0.7, 0.1], index: 1),
69 | .init(object: "id-sdasd2", embedding: [0.8, 0.1, 0.2, 0.8], index: 2)
70 | ], model: .textSearchBabbageDoc, usage: .init(promptTokens: 10, totalTokens: 10))
71 | try self.stub(result: embeddingsResult)
72 |
73 | let result = try awaitPublisher(openAI.embeddings(query: query))
74 | XCTAssertEqual(result, embeddingsResult)
75 | }
76 |
77 | func testRetrieveModel() throws {
78 | let query = ModelQuery(model: .gpt4)
79 | let modelResult = ModelResult(id: .gpt4, object: "model", ownedBy: "organization-owner")
80 | try self.stub(result: modelResult)
81 |
82 | let result = try awaitPublisher(openAI.model(query: query))
83 | XCTAssertEqual(result, modelResult)
84 | }
85 |
86 | func testListModels() throws {
87 | let listModelsResult = ModelsResult(data: [], object: "model")
88 | try self.stub(result: listModelsResult)
89 |
90 | let result = try awaitPublisher(openAI.models())
91 | XCTAssertEqual(result, listModelsResult)
92 | }
93 |
94 | func testModerations() throws {
95 | let query = ModerationsQuery(input: "Hello, world!")
96 | let moderationsResult = ModerationsResult(id: "foo", model: .moderation, results: [
97 | .init(categories: .init(hate: false, hateThreatening: false, selfHarm: false, sexual: false, sexualMinors: false, violence: false, violenceGraphic: false),
98 | categoryScores: .init(hate: 0.1, hateThreatening: 0.1, selfHarm: 0.1, sexual: 0.1, sexualMinors: 0.1, violence: 0.1, violenceGraphic: 0.1),
99 | flagged: false)
100 | ])
101 | try self.stub(result: moderationsResult)
102 |
103 | let result = try awaitPublisher(openAI.moderations(query: query))
104 | XCTAssertEqual(result, moderationsResult)
105 | }
106 |
107 | func testAudioTranscriptions() throws {
108 | let data = Data()
109 | let query = AudioTranscriptionQuery(file: data, fileName: "audio.m4a", model: .whisper_1)
110 | let transcriptionResult = AudioTranscriptionResult(text: "Hello, world!")
111 | try self.stub(result: transcriptionResult)
112 |
113 | let result = try awaitPublisher(openAI.audioTranscriptions(query: query))
114 | XCTAssertEqual(result, transcriptionResult)
115 | }
116 |
117 | func testAudioTranslations() throws {
118 | let data = Data()
119 | let query = AudioTranslationQuery(file: data, fileName: "audio.m4a", model: .whisper_1)
120 | let transcriptionResult = AudioTranslationResult(text: "Hello, world!")
121 | try self.stub(result: transcriptionResult)
122 |
123 | let result = try awaitPublisher(openAI.audioTranslations(query: query))
124 | XCTAssertEqual(result, transcriptionResult)
125 | }
126 | }
127 |
128 | @available(tvOS 13.0, *)
129 | @available(iOS 13.0, *)
130 | @available(watchOS 6.0, *)
131 | extension OpenAITestsCombine {
132 |
133 | func stub(error: Error) {
134 | let error = APIError(message: "foo", type: "bar", param: "baz", code: "100")
135 | let task = DataTaskMock.failed(with: error)
136 | self.urlSession.dataTask = task
137 | }
138 |
139 | func stub(result: Codable) throws {
140 | let encoder = JSONEncoder()
141 | let data = try encoder.encode(result)
142 | let task = DataTaskMock.successful(with: data)
143 | self.urlSession.dataTask = task
144 | }
145 | }
146 |
147 | #endif
148 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIProtocol+Async.swift
3 | //
4 | //
5 | // Created by Maxime Maheo on 10/02/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | @available(iOS 13.0, *)
11 | @available(macOS 10.15, *)
12 | @available(tvOS 13.0, *)
13 | @available(watchOS 6.0, *)
14 | public extension OpenAIProtocol {
15 | func completions(
16 | query: CompletionsQuery
17 | ) async throws -> CompletionsResult {
18 | try await withCheckedThrowingContinuation { continuation in
19 | completions(query: query) { result in
20 | switch result {
21 | case let .success(success):
22 | return continuation.resume(returning: success)
23 | case let .failure(failure):
24 | return continuation.resume(throwing: failure)
25 | }
26 | }
27 | }
28 | }
29 |
30 | func completionsStream(
31 | query: CompletionsQuery
32 | ) -> AsyncThrowingStream {
33 | return AsyncThrowingStream { continuation in
34 | return completionsStream(query: query) { result in
35 | continuation.yield(with: result)
36 | } completion: { error in
37 | continuation.finish(throwing: error)
38 | }
39 | }
40 | }
41 |
42 | func images(
43 | query: ImagesQuery
44 | ) async throws -> ImagesResult {
45 | try await withCheckedThrowingContinuation { continuation in
46 | images(query: query) { result in
47 | switch result {
48 | case let .success(success):
49 | return continuation.resume(returning: success)
50 | case let .failure(failure):
51 | return continuation.resume(throwing: failure)
52 | }
53 | }
54 | }
55 | }
56 |
57 | func imageEdits(
58 | query: ImageEditsQuery
59 | ) async throws -> ImagesResult {
60 | try await withCheckedThrowingContinuation { continuation in
61 | imageEdits(query: query) { result in
62 | switch result {
63 | case let .success(success):
64 | return continuation.resume(returning: success)
65 | case let .failure(failure):
66 | return continuation.resume(throwing: failure)
67 | }
68 | }
69 | }
70 | }
71 |
72 | func imageVariations(
73 | query: ImageVariationsQuery
74 | ) async throws -> ImagesResult {
75 | try await withCheckedThrowingContinuation { continuation in
76 | imageVariations(query: query) { result in
77 | switch result {
78 | case let .success(success):
79 | return continuation.resume(returning: success)
80 | case let .failure(failure):
81 | return continuation.resume(throwing: failure)
82 | }
83 | }
84 | }
85 | }
86 |
87 | func embeddings(
88 | query: EmbeddingsQuery
89 | ) async throws -> EmbeddingsResult {
90 | try await withCheckedThrowingContinuation { continuation in
91 | embeddings(query: query) { result in
92 | switch result {
93 | case let .success(success):
94 | return continuation.resume(returning: success)
95 | case let .failure(failure):
96 | return continuation.resume(throwing: failure)
97 | }
98 | }
99 | }
100 | }
101 |
102 | func chats(
103 | query: ChatQuery
104 | ) async throws -> ChatResult {
105 | try await withCheckedThrowingContinuation { continuation in
106 | chats(query: query) { result in
107 | switch result {
108 | case let .success(success):
109 | return continuation.resume(returning: success)
110 | case let .failure(failure):
111 | return continuation.resume(throwing: failure)
112 | }
113 | }
114 | }
115 | }
116 |
117 | func chatsStream(
118 | query: ChatQuery
119 | ) -> AsyncThrowingStream {
120 | return AsyncThrowingStream { continuation in
121 | return chatsStream(query: query) { result in
122 | continuation.yield(with: result)
123 | } completion: { error in
124 | continuation.finish(throwing: error)
125 | }
126 | }
127 | }
128 |
129 | func edits(
130 | query: EditsQuery
131 | ) async throws -> EditsResult {
132 | try await withCheckedThrowingContinuation { continuation in
133 | edits(query: query) { result in
134 | switch result {
135 | case let .success(success):
136 | return continuation.resume(returning: success)
137 | case let .failure(failure):
138 | return continuation.resume(throwing: failure)
139 | }
140 | }
141 | }
142 | }
143 |
144 | func model(
145 | query: ModelQuery
146 | ) async throws -> ModelResult {
147 | try await withCheckedThrowingContinuation { continuation in
148 | model(query: query) { result in
149 | switch result {
150 | case let .success(success):
151 | return continuation.resume(returning: success)
152 | case let .failure(failure):
153 | return continuation.resume(throwing: failure)
154 | }
155 | }
156 | }
157 | }
158 |
159 | func models() async throws -> ModelsResult {
160 | try await withCheckedThrowingContinuation { continuation in
161 | models() { result in
162 | switch result {
163 | case let .success(success):
164 | return continuation.resume(returning: success)
165 | case let .failure(failure):
166 | return continuation.resume(throwing: failure)
167 | }
168 | }
169 | }
170 | }
171 |
172 | func moderations(
173 | query: ModerationsQuery
174 | ) async throws -> ModerationsResult {
175 | try await withCheckedThrowingContinuation { continuation in
176 | moderations(query: query) { result in
177 | switch result {
178 | case let .success(success):
179 | return continuation.resume(returning: success)
180 | case let .failure(failure):
181 | return continuation.resume(throwing: failure)
182 | }
183 | }
184 | }
185 | }
186 |
187 | func audioTranscriptions(
188 | query: AudioTranscriptionQuery
189 | ) async throws -> AudioTranscriptionResult {
190 | try await withCheckedThrowingContinuation { continuation in
191 | audioTranscriptions(query: query) { result in
192 | switch result {
193 | case let .success(success):
194 | return continuation.resume(returning: success)
195 | case let .failure(failure):
196 | return continuation.resume(throwing: failure)
197 | }
198 | }
199 | }
200 | }
201 |
202 | func audioTranslations(
203 | query: AudioTranslationQuery
204 | ) async throws -> AudioTranslationResult {
205 | try await withCheckedThrowingContinuation { continuation in
206 | audioTranslations(query: query) { result in
207 | switch result {
208 | case let .success(success):
209 | return continuation.resume(returning: success)
210 | case let .failure(failure):
211 | return continuation.resume(throwing: failure)
212 | }
213 | }
214 | }
215 | }
216 | }
217 |
--------------------------------------------------------------------------------
/Sources/OpenAI/OpenAI.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAI.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 9/18/22.
6 | //
7 |
8 | import Foundation
9 | #if canImport(FoundationNetworking)
10 | import FoundationNetworking
11 | #endif
12 |
13 | final public class OpenAI: OpenAIProtocol {
14 |
15 | public struct Configuration {
16 |
17 | /// OpenAI API token. See https://platform.openai.com/docs/api-reference/authentication
18 | public let token: String
19 |
20 | /// Optional OpenAI organization identifier. See https://platform.openai.com/docs/api-reference/authentication
21 | public let organizationIdentifier: String?
22 |
23 | /// API host. Set this property if you use some kind of proxy or your own server. Default is api.openai.com
24 | public let host: String
25 |
26 | /// Default request timeout
27 | public let timeoutInterval: TimeInterval
28 |
29 | public init(token: String, organizationIdentifier: String? = nil, host: String = "api.openai.com", timeoutInterval: TimeInterval = 60.0) {
30 | self.token = token
31 | self.organizationIdentifier = organizationIdentifier
32 | self.host = host
33 | self.timeoutInterval = timeoutInterval
34 | }
35 | }
36 |
37 | private let session: URLSessionProtocol
38 | private var streamingSessions: [NSObject] = []
39 |
40 | public let configuration: Configuration
41 |
42 | public convenience init(apiToken: String) {
43 | self.init(configuration: Configuration(token: apiToken), session: URLSession.shared)
44 | }
45 |
46 | public convenience init(configuration: Configuration) {
47 | self.init(configuration: configuration, session: URLSession.shared)
48 | }
49 |
50 | init(configuration: Configuration, session: URLSessionProtocol) {
51 | self.configuration = configuration
52 | self.session = session
53 | }
54 |
55 | public convenience init(configuration: Configuration, session: URLSession = URLSession.shared) {
56 | self.init(configuration: configuration, session: session as URLSessionProtocol)
57 | }
58 |
59 | public func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void) {
60 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .completions)), completion: completion)
61 | }
62 |
63 | public func completionsStream(query: CompletionsQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) {
64 | performSteamingRequest(request: JSONRequest(body: query.makeStreamable(), url: buildURL(path: .completions)), onResult: onResult, completion: completion)
65 | }
66 |
67 | public func images(query: ImagesQuery, completion: @escaping (Result) -> Void) {
68 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .images)), completion: completion)
69 | }
70 |
71 | public func imageEdits(query: ImageEditsQuery, completion: @escaping (Result) -> Void) {
72 | performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .imageEdits)), completion: completion)
73 | }
74 |
75 | public func imageVariations(query: ImageVariationsQuery, completion: @escaping (Result) -> Void) {
76 | performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .imageVariations)), completion: completion)
77 | }
78 |
79 | public func embeddings(query: EmbeddingsQuery, completion: @escaping (Result) -> Void) {
80 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .embeddings)), completion: completion)
81 | }
82 |
83 | public func chats(query: ChatQuery, completion: @escaping (Result) -> Void) {
84 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .chats)), completion: completion)
85 | }
86 |
87 | public func chatsStream(query: ChatQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) {
88 | performSteamingRequest(request: JSONRequest(body: query.makeStreamable(), url: buildURL(path: .chats)), onResult: onResult, completion: completion)
89 | }
90 |
91 | public func edits(query: EditsQuery, completion: @escaping (Result) -> Void) {
92 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .edits)), completion: completion)
93 | }
94 |
95 | public func model(query: ModelQuery, completion: @escaping (Result) -> Void) {
96 | performRequest(request: JSONRequest(url: buildURL(path: .models.withPath(query.model)), method: "GET"), completion: completion)
97 | }
98 |
99 | public func models(completion: @escaping (Result) -> Void) {
100 | performRequest(request: JSONRequest(url: buildURL(path: .models), method: "GET"), completion: completion)
101 | }
102 |
103 | public func moderations(query: ModerationsQuery, completion: @escaping (Result) -> Void) {
104 | performRequest(request: JSONRequest(body: query, url: buildURL(path: .moderations)), completion: completion)
105 | }
106 |
107 | public func audioTranscriptions(query: AudioTranscriptionQuery, completion: @escaping (Result) -> Void) {
108 | performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .audioTranscriptions)), completion: completion)
109 | }
110 |
111 | public func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result) -> Void) {
112 | performRequest(request: MultipartFormDataRequest(body: query, url: buildURL(path: .audioTranslations)), completion: completion)
113 | }
114 | }
115 |
116 | extension OpenAI {
117 |
118 | func performRequest(request: any URLRequestBuildable, completion: @escaping (Result) -> Void) {
119 | do {
120 | let request = try request.build(token: configuration.token, organizationIdentifier: configuration.organizationIdentifier, timeoutInterval: configuration.timeoutInterval)
121 | let task = session.dataTask(with: request) { data, _, error in
122 | if let error = error {
123 | completion(.failure(error))
124 | return
125 | }
126 | guard let data = data else {
127 | completion(.failure(OpenAIError.emptyData))
128 | return
129 | }
130 |
131 | var apiError: Error? = nil
132 | do {
133 | let decoded = try JSONDecoder().decode(ResultType.self, from: data)
134 | completion(.success(decoded))
135 | } catch {
136 | apiError = error
137 | }
138 |
139 | if let apiError = apiError {
140 | do {
141 | let decoded = try JSONDecoder().decode(APIErrorResponse.self, from: data)
142 | completion(.failure(decoded))
143 | } catch {
144 | completion(.failure(apiError))
145 | }
146 | }
147 | }
148 | task.resume()
149 | } catch {
150 | completion(.failure(error))
151 | }
152 | }
153 |
154 | func performSteamingRequest(request: any URLRequestBuildable, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) {
155 | do {
156 | let request = try request.build(token: configuration.token, organizationIdentifier: configuration.organizationIdentifier, timeoutInterval: configuration.timeoutInterval)
157 | let session = StreamingSession(urlRequest: request)
158 | session.onReceiveContent = {_, object in
159 | onResult(.success(object))
160 | }
161 | session.onProcessingError = {_, error in
162 | onResult(.failure(error))
163 | }
164 | session.onComplete = { [weak self] object, error in
165 | self?.streamingSessions.removeAll(where: { $0 == object })
166 | completion?(error)
167 | }
168 | session.perform()
169 | streamingSessions.append(session)
170 | } catch {
171 | completion?(error)
172 | }
173 | }
174 | }
175 |
176 | extension OpenAI {
177 |
178 | func buildURL(path: String) -> URL {
179 | var components = URLComponents()
180 | components.scheme = "https"
181 | components.host = configuration.host
182 | components.path = path
183 | return components.url!
184 | }
185 | }
186 |
187 | typealias APIPath = String
188 | extension APIPath {
189 |
190 | static let completions = "/v1/completions"
191 | static let embeddings = "/v1/embeddings"
192 | static let chats = "/v1/chat/completions"
193 | static let edits = "/v1/edits"
194 | static let models = "/v1/models"
195 | static let moderations = "/v1/moderations"
196 |
197 | static let audioTranscriptions = "/v1/audio/transcriptions"
198 | static let audioTranslations = "/v1/audio/translations"
199 |
200 | static let images = "/v1/images/generations"
201 | static let imageEdits = "/v1/images/edits"
202 | static let imageVariations = "/v1/images/variations"
203 |
204 | func withPath(_ path: String) -> String {
205 | self + "/" + path
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/Demo/DemoChat/Sources/UI/DetailView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DetailView.swift
3 | // DemoChat
4 | //
5 | // Created by Sihao Lu on 3/25/23.
6 | //
7 |
8 | #if os(iOS)
9 | import UIKit
10 | #elseif os(macOS)
11 | import AppKit
12 | #endif
13 | import OpenAI
14 | import SwiftUI
15 |
16 | struct DetailView: View {
17 | @State var inputText: String = ""
18 | @FocusState private var isFocused: Bool
19 | @State private var showsModelSelectionSheet = false
20 | @State private var selectedChatModel: Model = .gpt4_0613
21 |
22 | private let availableChatModels: [Model] = [.gpt3_5Turbo0613, .gpt4_0613]
23 |
24 | let conversation: Conversation
25 | let error: Error?
26 | let sendMessage: (String, Model) -> Void
27 |
28 | private var fillColor: Color {
29 | #if os(iOS)
30 | return Color(uiColor: UIColor.systemBackground)
31 | #elseif os(macOS)
32 | return Color(nsColor: NSColor.textBackgroundColor)
33 | #endif
34 | }
35 |
36 | private var strokeColor: Color {
37 | #if os(iOS)
38 | return Color(uiColor: UIColor.systemGray5)
39 | #elseif os(macOS)
40 | return Color(nsColor: NSColor.lightGray)
41 | #endif
42 | }
43 |
44 | var body: some View {
45 | NavigationStack {
46 | ScrollViewReader { scrollViewProxy in
47 | VStack {
48 | List {
49 | ForEach(conversation.messages) { message in
50 | ChatBubble(message: message)
51 | }
52 | .listRowSeparator(.hidden)
53 | }
54 | .listStyle(.plain)
55 | .animation(.default, value: conversation.messages)
56 | // .onChange(of: conversation) { newValue in
57 | // if let lastMessage = newValue.messages.last {
58 | // scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom)
59 | // }
60 | // }
61 |
62 | if let error = error {
63 | errorMessage(error: error)
64 | }
65 |
66 | inputBar(scrollViewProxy: scrollViewProxy)
67 | }
68 | .navigationTitle("Chat")
69 | .safeAreaInset(edge: .top) {
70 | HStack {
71 | Text(
72 | "Model: \(selectedChatModel)"
73 | )
74 | .font(.caption)
75 | .foregroundColor(.secondary)
76 | Spacer()
77 | }
78 | .padding(.horizontal, 16)
79 | .padding(.vertical, 8)
80 | }
81 | .toolbar {
82 | ToolbarItem(placement: .navigationBarTrailing) {
83 | Button(action: {
84 | showsModelSelectionSheet.toggle()
85 | }) {
86 | Image(systemName: "cpu")
87 | }
88 | }
89 | }
90 | .confirmationDialog(
91 | "Select model",
92 | isPresented: $showsModelSelectionSheet,
93 | titleVisibility: .visible,
94 | actions: {
95 | ForEach(availableChatModels, id: \.self) { model in
96 | Button {
97 | selectedChatModel = model
98 | } label: {
99 | Text(model)
100 | }
101 | }
102 |
103 | Button("Cancel", role: .cancel) {
104 | showsModelSelectionSheet = false
105 | }
106 | },
107 | message: {
108 | Text(
109 | "View https://platform.openai.com/docs/models/overview for details"
110 | )
111 | .font(.caption)
112 | }
113 | )
114 | }
115 | }
116 | }
117 |
118 | @ViewBuilder private func errorMessage(error: Error) -> some View {
119 | Text(
120 | error.localizedDescription
121 | )
122 | .font(.caption)
123 | .foregroundColor({
124 | #if os(iOS)
125 | return Color(uiColor: .systemRed)
126 | #elseif os(macOS)
127 | return Color(.systemRed)
128 | #endif
129 | }())
130 | .padding(.horizontal)
131 | }
132 |
133 | @ViewBuilder private func inputBar(scrollViewProxy: ScrollViewProxy) -> some View {
134 | HStack {
135 | TextEditor(
136 | text: $inputText
137 | )
138 | .padding(.vertical, -8)
139 | .padding(.horizontal, -4)
140 | .frame(minHeight: 22, maxHeight: 300)
141 | .foregroundColor(.primary)
142 | .padding(EdgeInsets(top: 12, leading: 16, bottom: 12, trailing: 16))
143 | .background(
144 | RoundedRectangle(
145 | cornerRadius: 16,
146 | style: .continuous
147 | )
148 | .fill(fillColor)
149 | .overlay(
150 | RoundedRectangle(
151 | cornerRadius: 16,
152 | style: .continuous
153 | )
154 | .stroke(
155 | strokeColor,
156 | lineWidth: 1
157 | )
158 | )
159 | )
160 | .fixedSize(horizontal: false, vertical: true)
161 | .onSubmit {
162 | withAnimation {
163 | tapSendMessage(scrollViewProxy: scrollViewProxy)
164 | }
165 | }
166 | .padding(.leading)
167 |
168 | Button(action: {
169 | withAnimation {
170 | tapSendMessage(scrollViewProxy: scrollViewProxy)
171 | }
172 | }) {
173 | Image(systemName: "paperplane")
174 | .resizable()
175 | .aspectRatio(contentMode: .fit)
176 | .frame(width: 24, height: 24)
177 | .padding(.trailing)
178 | }
179 | .disabled(inputText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty)
180 | }
181 | .padding(.bottom)
182 | }
183 |
184 | private func tapSendMessage(
185 | scrollViewProxy: ScrollViewProxy
186 | ) {
187 | let message = inputText.trimmingCharacters(in: .whitespacesAndNewlines)
188 | if message.isEmpty {
189 | return
190 | }
191 |
192 | sendMessage(message, selectedChatModel)
193 | inputText = ""
194 |
195 | // if let lastMessage = conversation.messages.last {
196 | // scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom)
197 | // }
198 | }
199 | }
200 |
201 | struct ChatBubble: View {
202 | let message: Message
203 |
204 | private var assistantBackgroundColor: Color {
205 | #if os(iOS)
206 | return Color(uiColor: UIColor.systemGray5)
207 | #elseif os(macOS)
208 | return Color(nsColor: NSColor.lightGray)
209 | #endif
210 | }
211 |
212 | private var userForegroundColor: Color {
213 | #if os(iOS)
214 | return Color(uiColor: .white)
215 | #elseif os(macOS)
216 | return Color(nsColor: NSColor.white)
217 | #endif
218 | }
219 |
220 | private var userBackgroundColor: Color {
221 | #if os(iOS)
222 | return Color(uiColor: .systemBlue)
223 | #elseif os(macOS)
224 | return Color(nsColor: NSColor.systemBlue)
225 | #endif
226 | }
227 |
228 | var body: some View {
229 | HStack {
230 | switch message.role {
231 | case .assistant:
232 | Text(message.content)
233 | .padding(.horizontal, 16)
234 | .padding(.vertical, 12)
235 | .background(assistantBackgroundColor)
236 | .clipShape(RoundedRectangle(cornerRadius: 16, style: .continuous))
237 | Spacer(minLength: 24)
238 | case .user:
239 | Spacer(minLength: 24)
240 | Text(message.content)
241 | .padding(.horizontal, 16)
242 | .padding(.vertical, 12)
243 | .foregroundColor(userForegroundColor)
244 | .background(userBackgroundColor)
245 | .clipShape(RoundedRectangle(cornerRadius: 16, style: .continuous))
246 | case .function:
247 | Text(message.content)
248 | .font(.footnote.monospaced())
249 | .padding(.horizontal, 16)
250 | .padding(.vertical, 12)
251 | .background(assistantBackgroundColor)
252 | .clipShape(RoundedRectangle(cornerRadius: 16, style: .continuous))
253 | Spacer(minLength: 24)
254 | case .system:
255 | EmptyView()
256 | }
257 | }
258 | }
259 | }
260 |
261 | struct DetailView_Previews: PreviewProvider {
262 | static var previews: some View {
263 | DetailView(
264 | conversation: Conversation(
265 | id: "1",
266 | messages: [
267 | Message(id: "1", role: .assistant, content: "Hello, how can I help you today?", createdAt: Date(timeIntervalSinceReferenceDate: 0)),
268 | Message(id: "2", role: .user, content: "I need help with my subscription.", createdAt: Date(timeIntervalSinceReferenceDate: 100)),
269 | Message(id: "3", role: .assistant, content: "Sure, what seems to be the problem with your subscription?", createdAt: Date(timeIntervalSinceReferenceDate: 200)),
270 | Message(id: "4", role: .function, content:
271 | """
272 | get_current_weather({
273 | "location": "Glasgow, Scotland",
274 | "format": "celsius"
275 | })
276 | """, createdAt: Date(timeIntervalSinceReferenceDate: 200))
277 | ]
278 | ),
279 | error: nil,
280 | sendMessage: { _, _ in }
281 | )
282 | }
283 | }
284 |
285 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIProvider.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | public protocol OpenAIProtocol {
11 |
12 | /**
13 | This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI's language models, like the powerful GPT-3.
14 |
15 | Example:
16 | ```
17 | let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
18 | openAI.completions(query: query) { result in
19 | //Handle result here
20 | }
21 | ```
22 |
23 | - Parameters:
24 | - query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.
25 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed.
26 | **/
27 | func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void)
28 |
29 | /**
30 | This function sends a completions query to the OpenAI API and retrieves generated completions in response. The Completions API enables you to build applications using OpenAI's language models, like the powerful GPT-3. The result is returned by chunks.
31 |
32 | Example:
33 | ```
34 | let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?")
35 | openAI.completions(query: query) { result in
36 | //Handle result here
37 | }
38 | ```
39 |
40 | - Parameters:
41 | - query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings.
42 | - onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed.
43 | - completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured
44 | **/
45 | func completionsStream(query: CompletionsQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?)
46 |
47 | /**
48 | This function sends an images query to the OpenAI API and retrieves generated images in response. The Images Generation API enables you to create various images or graphics using OpenAI's powerful deep learning models.
49 |
50 | Example:
51 | ```
52 | let query = ImagesQuery(prompt: "White cat with heterochromia sitting on the kitchen table", n: 1, size: "1024x1024")
53 | openAI.images(query: query) { result in
54 | //Handle result here
55 | }
56 | ```
57 |
58 | - Parameters:
59 | - query: An `ImagesQuery` object containing the input parameters for the API request. This includes the query parameters such as the text prompt, image size, and other settings.
60 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ImagesResult` object with the generated images, or an error if the request failed.
61 | **/
62 | func images(query: ImagesQuery, completion: @escaping (Result) -> Void)
63 |
64 | /**
65 | This function sends an image edit query to the OpenAI API and retrieves generated images in response. The Images Edit API enables you to edit images or graphics using OpenAI's powerful deep learning models.
66 |
67 | Example:
68 | ```
69 | let query = ImagesEditQuery(image: "@whitecat.png", prompt: "White cat with heterochromia sitting on the kitchen table with a bowl of food", n: 1, size: "1024x1024")
70 | openAI.imageEdits(query: query) { result in
71 | //Handle result here
72 | }
73 | ```
74 |
75 | - Parameters:
76 | - query: An `ImagesEditQuery` object containing the input parameters for the API request. This includes the query parameters such as the image to be edited, an image to be used a mask if applicable, text prompt, image size, and other settings.
77 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ImagesResult` object with the generated images, or an error if the request failed.
78 | **/
79 | func imageEdits(query: ImageEditsQuery, completion: @escaping (Result) -> Void)
80 |
81 | /**
82 | This function sends an image variation query to the OpenAI API and retrieves generated images in response. The Images Variations API enables you to create a variation of a given image using OpenAI's powerful deep learning models.
83 |
84 | Example:
85 | ```
86 | let query = ImagesVariationQuery(image: "@whitecat.png", n: 1, size: "1024x1024")
87 | openAI.imageVariations(query: query) { result in
88 | //Handle result here
89 | }
90 | ```
91 |
92 | - Parameters:
93 | - query: An `ImagesVariationQuery` object containing the input parameters for the API request. This includes the query parameters such as the image to use as a basis for the variation(s), image size, and other settings.
94 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ImagesResult` object with the generated images, or an error if the request failed.
95 | **/
96 | func imageVariations(query: ImageVariationsQuery, completion: @escaping (Result) -> Void)
97 |
98 | /**
99 | This function sends an embeddings query to the OpenAI API and retrieves embeddings in response. The Embeddings API enables you to generate high-dimensional vector representations of texts, which can be used for various natural language processing tasks such as semantic similarity, clustering, and classification.
100 |
101 | Example:
102 | ```
103 | let query = EmbeddingsQuery(model: .textSearchBabbageDoc, input: "The food was delicious and the waiter...")
104 | openAI.embeddings(query: query) { result in
105 | //Handle response here
106 | }
107 | ```
108 |
109 | - Parameters:
110 | - query: An `EmbeddingsQuery` object containing the input parameters for the API request. This includes the list of text prompts to be converted into embeddings, the model to be used, and other settings.
111 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `EmbeddingsResult` object with the generated embeddings, or an error if the request failed.
112 | **/
113 | func embeddings(query: EmbeddingsQuery, completion: @escaping (Result) -> Void)
114 |
115 | /**
116 | This function sends a chat query to the OpenAI API and retrieves chat conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI's powerful natural language models, like GPT-3.
117 |
118 | Example:
119 | ```
120 | let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
121 | openAI.chats(query: query) { result in
122 | //Handle response here
123 | }
124 | ```
125 |
126 | - Parameters:
127 | - query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
128 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ChatResult` object with the model's response to the conversation, or an error if the request failed.
129 | **/
130 | func chats(query: ChatQuery, completion: @escaping (Result) -> Void)
131 |
132 | /**
133 | This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI's powerful natural language models, like GPT-3. The result is returned by chunks.
134 |
135 | Example:
136 | ```
137 | let query = ChatQuery(model: .gpt3_5Turbo, messages: [.init(role: "user", content: "who are you")])
138 | openAI.chats(query: query) { result in
139 | //Handle response here
140 | }
141 | ```
142 |
143 | - Parameters:
144 | - query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
145 | - onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed.
146 | - completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured
147 | **/
148 | func chatsStream(query: ChatQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?)
149 |
150 | /**
151 | This function sends an edits query to the OpenAI API and retrieves an edited version of the prompt based on the instruction given.
152 |
153 | Example:
154 | ```
155 | let query = EditsQuery(model: .gpt4, input: "What day of the wek is it?", instruction: "Fix the spelling mistakes")
156 | openAI.edits(query: query) { result in
157 | //Handle response here
158 | }
159 | ```
160 |
161 | - Parameters:
162 | - query: An `EditsQuery` object containing the input parameters for the API request. This includes the input to be edited, the instruction specifying how it should be edited, and other settings.
163 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `EditsResult` object with the model's response to the queried edit, or an error if the request failed.
164 | **/
165 | func edits(query: EditsQuery, completion: @escaping (Result) -> Void)
166 |
167 | /**
168 | This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.
169 |
170 | Example:
171 | ```
172 | let query = ModelQuery(model: .gpt3_5Turbo)
173 | openAI.model(query: query) { result in
174 | //Handle response here
175 | }
176 | ```
177 |
178 | - Parameters:
179 | - query: A `ModelQuery` object containing the input parameters for the API request, which is only the model to be queried.
180 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ModelResult` object with more information about the model, or an error if the request failed.
181 | **/
182 | func model(query: ModelQuery, completion: @escaping (Result) -> Void)
183 |
184 | /**
185 | This function sends a models query to the OpenAI API and retrieves a list of models. The Models API in this usage enables you to list all the available models.
186 |
187 | Example:
188 | ```
189 | openAI.models() { result in
190 | //Handle response here
191 | }
192 | ```
193 |
194 | - Parameters:
195 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ModelsResult` object with the list of model types, or an error if the request failed.
196 | **/
197 | func models(completion: @escaping (Result) -> Void)
198 |
199 | /**
200 | This function sends a moderations query to the OpenAI API and retrieves a list of category results to classify how text may violate OpenAI's Content Policy.
201 |
202 | Example:
203 | ```
204 | let query = ModerationsQuery(input: "I want to kill them.")
205 | openAI.moderations(query: query) { result in
206 | //Handle response here
207 | }
208 | ```
209 |
210 | - Parameters:
211 | - query: A `ModerationsQuery` object containing the input parameters for the API request. This includes the input text and optionally the model to be used.
212 | - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ModerationsResult` object with the list of category results, or an error if the request failed.
213 | **/
214 | func moderations(query: ModerationsQuery, completion: @escaping (Result) -> Void)
215 |
216 | /**
217 | Transcribes audio data using OpenAI's audio transcription API and completes the operation asynchronously.
218 |
219 | - Parameter query: The `AudioTranscriptionQuery` instance, containing the information required for the transcription request.
220 | - Parameter completion: The completion handler to be executed upon completion of the transcription request.
221 | Returns a `Result` of type `AudioTranscriptionResult` if successful, or an `Error` if an error occurs.
222 | **/
223 | func audioTranscriptions(query: AudioTranscriptionQuery, completion: @escaping (Result) -> Void)
224 |
225 | /**
226 | Translates audio data using OpenAI's audio translation API and completes the operation asynchronously.
227 |
228 | - Parameter query: The `AudioTranslationQuery` instance, containing the information required for the translation request.
229 | - Parameter completion: The completion handler to be executed upon completion of the translation request.
230 | Returns a `Result` of type `AudioTranslationResult` if successful, or an `Error` if an error occurs.
231 | **/
232 | func audioTranslations(query: AudioTranslationQuery, completion: @escaping (Result) -> Void)
233 | }
234 |
--------------------------------------------------------------------------------
/Tests/OpenAITests/OpenAITestsDecoder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAITestsDecoder.swift
3 | //
4 | //
5 | // Created by Aled Samuel on 10/04/2023.
6 | //
7 |
8 | import XCTest
9 | @testable import OpenAI
10 |
11 | @available(iOS 13.0, *)
12 | @available(watchOS 6.0, *)
13 | @available(tvOS 13.0, *)
14 | class OpenAITestsDecoder: XCTestCase {
15 |
16 | override func setUp() {
17 | super.setUp()
18 | }
19 |
20 | private func decode(_ jsonString: String, _ expectedValue: T) throws {
21 | let data = jsonString.data(using: .utf8)!
22 | let decoded = try JSONDecoder().decode(T.self, from: data)
23 | XCTAssertEqual(decoded, expectedValue)
24 | }
25 |
26 | func jsonDataAsNSDictionary(_ data: Data) throws -> NSDictionary {
27 | return NSDictionary(dictionary: try JSONSerialization.jsonObject(with: data, options: []) as! [String: Any])
28 | }
29 |
30 | func testCompletions() async throws {
31 | let data = """
32 | {
33 | "id": "foo",
34 | "object": "text_completion",
35 | "created": 1589478378,
36 | "model": "text-davinci-003",
37 | "choices": [
38 | {
39 | "text": "Hello, world!",
40 | "index": 0,
41 | "logprobs": null,
42 | "finish_reason": "length"
43 | }
44 | ],
45 | "usage": {
46 | "prompt_tokens": 5,
47 | "completion_tokens": 7,
48 | "total_tokens": 12
49 | }
50 | }
51 | """
52 |
53 | let expectedValue = CompletionsResult(id: "foo", object: "text_completion", created: 1589478378, model: .textDavinci_003, choices: [
54 | .init(text: "Hello, world!", index: 0, finishReason: "length")
55 | ], usage: .init(promptTokens: 5, completionTokens: 7, totalTokens: 12))
56 | try decode(data, expectedValue)
57 | }
58 |
59 | func testImages() async throws {
60 | let data = """
61 | {
62 | "created": 1589478378,
63 | "data": [
64 | {
65 | "url": "https://foo.bar"
66 | },
67 | {
68 | "url": "https://bar.foo"
69 | },
70 | {
71 | "b64_json": "test"
72 | }
73 | ]
74 | }
75 | """
76 |
77 | let expectedValue = ImagesResult(created: 1589478378, data: [
78 | .init(url: "https://foo.bar", b64_json: nil),
79 | .init(url: "https://bar.foo", b64_json: nil),
80 | .init(url: nil, b64_json: "test")
81 | ])
82 | try decode(data, expectedValue)
83 | }
84 |
85 | func testChatCompletion() async throws {
86 | let data = """
87 | {
88 | "id": "chatcmpl-123",
89 | "object": "chat.completion",
90 | "created": 1677652288,
91 | "model": "gpt-4",
92 | "choices": [{
93 | "index": 0,
94 | "message": {
95 | "role": "assistant",
96 | "content": "Hello, world!",
97 | },
98 | "finish_reason": "stop"
99 | }],
100 | "usage": {
101 | "prompt_tokens": 9,
102 | "completion_tokens": 12,
103 | "total_tokens": 21
104 | }
105 | }
106 | """
107 |
108 | let expectedValue = ChatResult(id: "chatcmpl-123", object: "chat.completion", created: 1677652288, model: .gpt4, choices: [
109 | .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), finishReason: "stop")
110 | ], usage: .init(promptTokens: 9, completionTokens: 12, totalTokens: 21))
111 | try decode(data, expectedValue)
112 | }
113 |
114 | func testImageQuery() async throws {
115 | let imageQuery = ImagesQuery(
116 | prompt: "test",
117 | model: .dall_e_2,
118 | responseFormat: .b64_json,
119 | n: 1,
120 | size: "10",
121 | style: "vivid",
122 | user: "user"
123 | )
124 |
125 | let expectedValue = """
126 | {
127 | "model": "dall-e-2",
128 | "prompt": "test",
129 | "n": 1,
130 | "size": "10",
131 | "style": "vivid",
132 | "user": "user",
133 | "response_format": "b64_json"
134 | }
135 | """
136 |
137 | // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries)
138 | let imageQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(imageQuery))
139 | let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)
140 |
141 | XCTAssertEqual(imageQueryAsDict, expectedValueAsDict)
142 | }
143 |
144 | func testChatQueryWithFunctionCall() async throws {
145 | let chatQuery = ChatQuery(
146 | model: .gpt3_5Turbo,
147 | messages: [
148 | Chat(role: .user, content: "What's the weather like in Boston?")
149 | ],
150 | responseFormat: .init(type: .jsonObject),
151 | functions: [
152 | ChatFunctionDeclaration(
153 | name: "get_current_weather",
154 | description: "Get the current weather in a given location",
155 | parameters:
156 | JSONSchema(
157 | type: .object,
158 | properties: [
159 | "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA"),
160 | "unit": .init(type: .string, enumValues: ["celsius", "fahrenheit"])
161 | ],
162 | required: ["location"]
163 | )
164 | )
165 | ]
166 | )
167 | let expectedValue = """
168 | {
169 | "model": "gpt-3.5-turbo",
170 | "messages": [
171 | { "role": "user", "content": "What's the weather like in Boston?" }
172 | ],
173 | "response_format": {
174 | "type": "json_object"
175 | },
176 | "functions": [
177 | {
178 | "name": "get_current_weather",
179 | "description": "Get the current weather in a given location",
180 | "parameters": {
181 | "type": "object",
182 | "properties": {
183 | "location": {
184 | "type": "string",
185 | "description": "The city and state, e.g. San Francisco, CA"
186 | },
187 | "unit": { "type": "string", "enum": ["celsius", "fahrenheit"] }
188 | },
189 | "required": ["location"]
190 | }
191 | }
192 | ],
193 | "stream": false
194 | }
195 | """
196 |
197 | // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries)
198 | let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery))
199 | let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)
200 |
201 | XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
202 | }
203 |
204 | func testChatCompletionWithFunctionCall() async throws {
205 | let data = """
206 | {
207 | "id": "chatcmpl-1234",
208 | "object": "chat.completion",
209 | "created": 1677652288,
210 | "model": "gpt-3.5-turbo",
211 | "choices": [
212 | {
213 | "index": 0,
214 | "message": {
215 | "role": "assistant",
216 | "content": null,
217 | "function_call": {
218 | "name": "get_current_weather"
219 | }
220 | },
221 | "finish_reason": "function_call"
222 | }
223 | ],
224 | "usage": {
225 | "prompt_tokens": 82,
226 | "completion_tokens": 18,
227 | "total_tokens": 100
228 | }
229 | }
230 | """
231 |
232 | let expectedValue = ChatResult(
233 | id: "chatcmpl-1234",
234 | object: "chat.completion",
235 | created: 1677652288,
236 | model: .gpt3_5Turbo,
237 | choices: [
238 | .init(index: 0, message:
239 | Chat(role: .assistant,
240 | functionCall: ChatFunctionCall(name: "get_current_weather", arguments: nil)),
241 | finishReason: "function_call")
242 | ],
243 | usage: .init(promptTokens: 82, completionTokens: 18, totalTokens: 100))
244 | try decode(data, expectedValue)
245 | }
246 |
247 | func testEdits() async throws {
248 | let data = """
249 | {
250 | "object": "edit",
251 | "created": 1589478378,
252 | "choices": [
253 | {
254 | "text": "What day of the week is it?",
255 | "index": 0,
256 | }
257 | ],
258 | "usage": {
259 | "prompt_tokens": 25,
260 | "completion_tokens": 32,
261 | "total_tokens": 57
262 | }
263 | }
264 | """
265 |
266 | let expectedValue = EditsResult(object: "edit", created: 1589478378, choices: [
267 | .init(text: "What day of the week is it?", index: 0)
268 | ], usage: .init(promptTokens: 25, completionTokens: 32, totalTokens: 57))
269 | try decode(data, expectedValue)
270 | }
271 |
272 | func testEmbeddings() async throws {
273 | let data = """
274 | {
275 | "object": "list",
276 | "data": [
277 | {
278 | "object": "embedding",
279 | "embedding": [
280 | 0.0023064255,
281 | -0.009327292,
282 | -0.0028842222,
283 | ],
284 | "index": 0
285 | }
286 | ],
287 | "model": "text-embedding-ada-002",
288 | "usage": {
289 | "prompt_tokens": 8,
290 | "total_tokens": 8
291 | }
292 | }
293 | """
294 |
295 | let expectedValue = EmbeddingsResult(data: [
296 | .init(object: "embedding", embedding: [0.0023064255, -0.009327292, -0.0028842222], index: 0)
297 | ], model: .textEmbeddingAda, usage: .init(promptTokens: 8, totalTokens: 8))
298 | try decode(data, expectedValue)
299 | }
300 |
301 | func testModels() async throws {
302 | let data = """
303 | {
304 | "data": [
305 | {
306 | "id": "gpt-3.5-turbo",
307 | "object": "model",
308 | "owned_by": "organization-owner"
309 | },
310 | {
311 | "id": "gpt-4",
312 | "object": "model",
313 | "owned_by": "organization-owner"
314 | },
315 | {
316 | "id": "text-davinci-001",
317 | "object": "model",
318 | "owned_by": "openai"
319 | }
320 | ],
321 | "object": "list"
322 | }
323 | """
324 |
325 | let expectedValue = ModelsResult(data: [
326 | .init(id: .gpt3_5Turbo, object: "model", ownedBy: "organization-owner"),
327 | .init(id: .gpt4, object: "model", ownedBy: "organization-owner"),
328 | .init(id: .textDavinci_001, object: "model", ownedBy: "openai")
329 | ], object: "list")
330 | try decode(data, expectedValue)
331 | }
332 |
333 | func testModelType() async throws {
334 | let data = """
335 | {
336 | "id": "text-davinci-003",
337 | "object": "model",
338 | "owned_by": "openai"
339 | }
340 | """
341 |
342 | let expectedValue = ModelResult(id: .textDavinci_003, object: "model", ownedBy: "openai")
343 | try decode(data, expectedValue)
344 | }
345 |
346 | func testModerations() async throws {
347 | let data = """
348 | {
349 | "id": "modr-5MWoLO",
350 | "model": "text-moderation-001",
351 | "results": [
352 | {
353 | "categories": {
354 | "hate": false,
355 | "hate/threatening": true,
356 | "self-harm": false,
357 | "sexual": false,
358 | "sexual/minors": false,
359 | "violence": true,
360 | "violence/graphic": false
361 | },
362 | "category_scores": {
363 | "hate": 0.22714105248451233,
364 | "hate/threatening": 0.4132447838783264,
365 | "self-harm": 0.00523239187896251,
366 | "sexual": 0.01407341007143259,
367 | "sexual/minors": 0.0038522258400917053,
368 | "violence": 0.9223177433013916,
369 | "violence/graphic": 0.036865197122097015
370 | },
371 | "flagged": true
372 | }
373 | ]
374 | }
375 | """
376 |
377 | let expectedValue = ModerationsResult(id: "modr-5MWoLO", model: .moderation, results: [
378 | .init(categories: .init(hate: false, hateThreatening: true, selfHarm: false, sexual: false, sexualMinors: false, violence: true, violenceGraphic: false),
379 | categoryScores: .init(hate: 0.22714105248451233, hateThreatening: 0.4132447838783264, selfHarm: 0.00523239187896251, sexual: 0.01407341007143259, sexualMinors: 0.0038522258400917053, violence: 0.9223177433013916, violenceGraphic: 0.036865197122097015),
380 | flagged: true)
381 | ])
382 | try decode(data, expectedValue)
383 | }
384 |
385 | func testAudioTranscriptions() async throws {
386 | let data = """
387 | {
388 | "text": "Hello, world!"
389 | }
390 | """
391 |
392 | let expectedValue = AudioTranscriptionResult(text: "Hello, world!")
393 | try decode(data, expectedValue)
394 | }
395 |
396 | func testAudioTranslations() async throws {
397 | let data = """
398 | {
399 | "text": "Hello, world!"
400 | }
401 | """
402 |
403 | let expectedValue = AudioTranslationResult(text: "Hello, world!")
404 | try decode(data, expectedValue)
405 | }
406 | }
407 |
--------------------------------------------------------------------------------
/Sources/OpenAI/Public/Models/ChatQuery.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatQuery.swift
3 | //
4 | //
5 | // Created by Sergii Kryvoblotskyi on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | // See more https://platform.openai.com/docs/guides/text-generation/json-mode
11 | public struct ResponseFormat: Codable, Equatable {
12 | public static let jsonObject = ResponseFormat(type: .jsonObject)
13 | public static let text = ResponseFormat(type: .text)
14 |
15 | public let type: Self.ResponseFormatType
16 |
17 | public enum ResponseFormatType: String, Codable, Equatable {
18 | case jsonObject = "json_object"
19 | case text
20 | }
21 | }
22 |
23 | public struct Chat: Codable, Equatable {
24 | public let role: Role
25 | /// The contents of the message. `content` is required for all messages except assistant messages with function calls.
26 | public let content: String?
27 | /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.
28 | public let name: String?
29 | public let functionCall: ChatFunctionCall?
30 |
31 | public enum Role: String, Codable, Equatable {
32 | case system
33 | case assistant
34 | case user
35 | case function
36 | }
37 |
38 | enum CodingKeys: String, CodingKey {
39 | case role
40 | case content
41 | case name
42 | case functionCall = "function_call"
43 | }
44 |
45 | public init(role: Role, content: String? = nil, name: String? = nil, functionCall: ChatFunctionCall? = nil) {
46 | self.role = role
47 | self.content = content
48 | self.name = name
49 | self.functionCall = functionCall
50 | }
51 |
52 | public func encode(to encoder: Encoder) throws {
53 | var container = encoder.container(keyedBy: CodingKeys.self)
54 | try container.encode(role, forKey: .role)
55 |
56 | if let name = name {
57 | try container.encode(name, forKey: .name)
58 | }
59 |
60 | if let functionCall = functionCall {
61 | try container.encode(functionCall, forKey: .functionCall)
62 | }
63 |
64 | // Should add 'nil' to 'content' property for function calling response
65 | // See https://openai.com/blog/function-calling-and-other-api-updates
66 | if content != nil || (role == .assistant && functionCall != nil) {
67 | try container.encode(content, forKey: .content)
68 | }
69 | }
70 | }
71 |
72 | public struct ChatFunctionCall: Codable, Equatable {
73 | /// The name of the function to call.
74 | public let name: String?
75 | /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.
76 | public let arguments: String?
77 |
78 | public init(name: String?, arguments: String?) {
79 | self.name = name
80 | self.arguments = arguments
81 | }
82 | }
83 |
84 |
85 | /// See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format.
86 | public struct JSONSchema: Codable, Equatable {
87 | public let type: JSONType
88 | public let properties: [String: Property]?
89 | public let required: [String]?
90 | public let pattern: String?
91 | public let const: String?
92 | public let enumValues: [String]?
93 | public let multipleOf: Int?
94 | public let minimum: Int?
95 | public let maximum: Int?
96 |
97 | private enum CodingKeys: String, CodingKey {
98 | case type, properties, required, pattern, const
99 | case enumValues = "enum"
100 | case multipleOf, minimum, maximum
101 | }
102 |
103 | public struct Property: Codable, Equatable {
104 | public let type: JSONType
105 | public let description: String?
106 | public let format: String?
107 | public let items: Items?
108 | public let required: [String]?
109 | public let pattern: String?
110 | public let const: String?
111 | public let enumValues: [String]?
112 | public let multipleOf: Int?
113 | public let minimum: Double?
114 | public let maximum: Double?
115 | public let minItems: Int?
116 | public let maxItems: Int?
117 | public let uniqueItems: Bool?
118 |
119 | private enum CodingKeys: String, CodingKey {
120 | case type, description, format, items, required, pattern, const
121 | case enumValues = "enum"
122 | case multipleOf, minimum, maximum
123 | case minItems, maxItems, uniqueItems
124 | }
125 |
126 | public init(type: JSONType, description: String? = nil, format: String? = nil, items: Items? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) {
127 | self.type = type
128 | self.description = description
129 | self.format = format
130 | self.items = items
131 | self.required = required
132 | self.pattern = pattern
133 | self.const = const
134 | self.enumValues = enumValues
135 | self.multipleOf = multipleOf
136 | self.minimum = minimum
137 | self.maximum = maximum
138 | self.minItems = minItems
139 | self.maxItems = maxItems
140 | self.uniqueItems = uniqueItems
141 | }
142 | }
143 |
144 | public enum JSONType: String, Codable {
145 | case integer = "integer"
146 | case string = "string"
147 | case boolean = "boolean"
148 | case array = "array"
149 | case object = "object"
150 | case number = "number"
151 | case `null` = "null"
152 | }
153 |
154 | public struct Items: Codable, Equatable {
155 | public let type: JSONType
156 | public let properties: [String: Property]?
157 | public let pattern: String?
158 | public let const: String?
159 | public let enumValues: [String]?
160 | public let multipleOf: Int?
161 | public let minimum: Double?
162 | public let maximum: Double?
163 | public let minItems: Int?
164 | public let maxItems: Int?
165 | public let uniqueItems: Bool?
166 |
167 | private enum CodingKeys: String, CodingKey {
168 | case type, properties, pattern, const
169 | case enumValues = "enum"
170 | case multipleOf, minimum, maximum, minItems, maxItems, uniqueItems
171 | }
172 |
173 | public init(type: JSONType, properties: [String : Property]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) {
174 | self.type = type
175 | self.properties = properties
176 | self.pattern = pattern
177 | self.const = const
178 | self.enumValues = enumValues
179 | self.multipleOf = multipleOf
180 | self.minimum = minimum
181 | self.maximum = maximum
182 | self.minItems = minItems
183 | self.maxItems = maxItems
184 | self.uniqueItems = uniqueItems
185 | }
186 | }
187 |
188 | public init(type: JSONType, properties: [String : Property]? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Int? = nil, maximum: Int? = nil) {
189 | self.type = type
190 | self.properties = properties
191 | self.required = required
192 | self.pattern = pattern
193 | self.const = const
194 | self.enumValues = enumValues
195 | self.multipleOf = multipleOf
196 | self.minimum = minimum
197 | self.maximum = maximum
198 | }
199 | }
200 |
201 | public struct ChatFunctionDeclaration: Codable, Equatable {
202 | /// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
203 | public let name: String
204 |
205 | /// The description of what the function does.
206 | public let description: String
207 |
208 | /// The parameters the functions accepts, described as a JSON Schema object.
209 | public let parameters: JSONSchema
210 |
211 | public init(name: String, description: String, parameters: JSONSchema) {
212 | self.name = name
213 | self.description = description
214 | self.parameters = parameters
215 | }
216 | }
217 |
218 | public struct ChatQueryFunctionCall: Codable, Equatable {
219 | /// The name of the function to call.
220 | public let name: String?
221 | /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.
222 | public let arguments: String?
223 | }
224 |
225 | public struct ChatQuery: Equatable, Codable, Streamable {
226 | /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported.
227 | public let model: Model
228 | /// An object specifying the format that the model must output.
229 | public let responseFormat: ResponseFormat?
230 | /// The messages to generate chat completions for
231 | public let messages: [Chat]
232 | /// A list of functions the model may generate JSON inputs for.
233 | public let functions: [ChatFunctionDeclaration]?
234 | /// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present.
235 | public let functionCall: FunctionCall?
236 | /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both.
237 | public let temperature: Double?
238 | /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
239 | public let topP: Double?
240 | /// How many chat completion choices to generate for each input message.
241 | public let n: Int?
242 | /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
243 | public let stop: [String]?
244 | /// The maximum number of tokens to generate in the completion.
245 | public let maxTokens: Int?
246 | /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
247 | public let presencePenalty: Double?
248 | /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
249 | public let frequencyPenalty: Double?
250 | /// Modify the likelihood of specified tokens appearing in the completion.
251 | public let logitBias: [String:Int]?
252 | /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
253 | public let user: String?
254 |
255 | var stream: Bool = false
256 |
257 | public enum FunctionCall: Codable, Equatable {
258 | case none
259 | case auto
260 | case function(String)
261 |
262 | enum CodingKeys: String, CodingKey {
263 | case none = "none"
264 | case auto = "auto"
265 | case function = "name"
266 | }
267 |
268 | public func encode(to encoder: Encoder) throws {
269 | switch self {
270 | case .none:
271 | var container = encoder.singleValueContainer()
272 | try container.encode(CodingKeys.none.rawValue)
273 | case .auto:
274 | var container = encoder.singleValueContainer()
275 | try container.encode(CodingKeys.auto.rawValue)
276 | case .function(let name):
277 | var container = encoder.container(keyedBy: CodingKeys.self)
278 | try container.encode(name, forKey: .function)
279 | }
280 | }
281 | }
282 |
283 | enum CodingKeys: String, CodingKey {
284 | case model
285 | case messages
286 | case functions
287 | case functionCall = "function_call"
288 | case temperature
289 | case topP = "top_p"
290 | case n
291 | case stream
292 | case stop
293 | case maxTokens = "max_tokens"
294 | case presencePenalty = "presence_penalty"
295 | case frequencyPenalty = "frequency_penalty"
296 | case logitBias = "logit_bias"
297 | case user
298 | case responseFormat = "response_format"
299 | }
300 |
301 | public init(model: Model, messages: [Chat], responseFormat: ResponseFormat? = nil, functions: [ChatFunctionDeclaration]? = nil, functionCall: FunctionCall? = nil, temperature: Double? = nil, topP: Double? = nil, n: Int? = nil, stop: [String]? = nil, maxTokens: Int? = nil, presencePenalty: Double? = nil, frequencyPenalty: Double? = nil, logitBias: [String : Int]? = nil, user: String? = nil, stream: Bool = false) {
302 | self.model = model
303 | self.messages = messages
304 | self.functions = functions
305 | self.functionCall = functionCall
306 | self.temperature = temperature
307 | self.topP = topP
308 | self.n = n
309 | self.responseFormat = responseFormat
310 | self.stop = stop
311 | self.maxTokens = maxTokens
312 | self.presencePenalty = presencePenalty
313 | self.frequencyPenalty = frequencyPenalty
314 | self.logitBias = logitBias
315 | self.user = user
316 | self.stream = stream
317 | }
318 | }
319 |
--------------------------------------------------------------------------------
/Demo/Demo.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 56;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | EFBC534029DFB4EA00334182 /* DemoApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFBC533F29DFB4EA00334182 /* DemoApp.swift */; };
11 | EFBC534429DFB4EB00334182 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EFBC534329DFB4EB00334182 /* Assets.xcassets */; };
12 | EFBC534829DFB4EB00334182 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EFBC534729DFB4EB00334182 /* Preview Assets.xcassets */; };
13 | EFBC536429DFEC2600334182 /* DemoChat in Frameworks */ = {isa = PBXBuildFile; productRef = EFBC536329DFEC2600334182 /* DemoChat */; };
14 | EFBC536629DFFF3200334182 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFBC536529DFFF3200334182 /* ContentView.swift */; };
15 | EFBC536829E0047400334182 /* APIKeyModalView.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFBC536729E0047400334182 /* APIKeyModalView.swift */; };
16 | EFBC536C29E0105800334182 /* SwiftUIAdditions.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFBC536B29E0105800334182 /* SwiftUIAdditions.swift */; };
17 | EFE6B73329E0D47500884A87 /* APIProvidedView.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE6B73229E0D47500884A87 /* APIProvidedView.swift */; };
18 | /* End PBXBuildFile section */
19 |
20 | /* Begin PBXFileReference section */
21 | EFBC533C29DFB4EA00334182 /* Demo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Demo.app; sourceTree = BUILT_PRODUCTS_DIR; };
22 | EFBC533F29DFB4EA00334182 /* DemoApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoApp.swift; sourceTree = ""; };
23 | EFBC534329DFB4EB00334182 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
24 | EFBC534529DFB4EB00334182 /* Demo.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = Demo.entitlements; sourceTree = ""; };
25 | EFBC534729DFB4EB00334182 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; };
26 | EFBC536129DFEA9900334182 /* DemoChat */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = DemoChat; sourceTree = ""; };
27 | EFBC536529DFFF3200334182 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; };
28 | EFBC536729E0047400334182 /* APIKeyModalView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIKeyModalView.swift; sourceTree = ""; };
29 | EFBC536B29E0105800334182 /* SwiftUIAdditions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SwiftUIAdditions.swift; sourceTree = ""; };
30 | EFE6B73229E0D47500884A87 /* APIProvidedView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIProvidedView.swift; sourceTree = ""; };
31 | /* End PBXFileReference section */
32 |
33 | /* Begin PBXFrameworksBuildPhase section */
34 | EFBC533929DFB4EA00334182 /* Frameworks */ = {
35 | isa = PBXFrameworksBuildPhase;
36 | buildActionMask = 2147483647;
37 | files = (
38 | EFBC536429DFEC2600334182 /* DemoChat in Frameworks */,
39 | );
40 | runOnlyForDeploymentPostprocessing = 0;
41 | };
42 | /* End PBXFrameworksBuildPhase section */
43 |
44 | /* Begin PBXGroup section */
45 | EFBC533329DFB4EA00334182 = {
46 | isa = PBXGroup;
47 | children = (
48 | EFBC535F29DFCE0700334182 /* Packages */,
49 | EFBC533E29DFB4EA00334182 /* App */,
50 | EFBC533D29DFB4EA00334182 /* Products */,
51 | EFBC536229DFEC2600334182 /* Frameworks */,
52 | );
53 | sourceTree = "";
54 | };
55 | EFBC533D29DFB4EA00334182 /* Products */ = {
56 | isa = PBXGroup;
57 | children = (
58 | EFBC533C29DFB4EA00334182 /* Demo.app */,
59 | );
60 | name = Products;
61 | sourceTree = "";
62 | };
63 | EFBC533E29DFB4EA00334182 /* App */ = {
64 | isa = PBXGroup;
65 | children = (
66 | EFBC533F29DFB4EA00334182 /* DemoApp.swift */,
67 | EFBC536729E0047400334182 /* APIKeyModalView.swift */,
68 | EFE6B73229E0D47500884A87 /* APIProvidedView.swift */,
69 | EFBC536529DFFF3200334182 /* ContentView.swift */,
70 | EFBC534329DFB4EB00334182 /* Assets.xcassets */,
71 | EFBC534529DFB4EB00334182 /* Demo.entitlements */,
72 | EFBC534629DFB4EB00334182 /* Preview Content */,
73 | EFBC536B29E0105800334182 /* SwiftUIAdditions.swift */,
74 | );
75 | path = App;
76 | sourceTree = "";
77 | };
78 | EFBC534629DFB4EB00334182 /* Preview Content */ = {
79 | isa = PBXGroup;
80 | children = (
81 | EFBC534729DFB4EB00334182 /* Preview Assets.xcassets */,
82 | );
83 | path = "Preview Content";
84 | sourceTree = "";
85 | };
86 | EFBC535F29DFCE0700334182 /* Packages */ = {
87 | isa = PBXGroup;
88 | children = (
89 | EFBC536129DFEA9900334182 /* DemoChat */,
90 | );
91 | name = Packages;
92 | sourceTree = "";
93 | };
94 | EFBC536229DFEC2600334182 /* Frameworks */ = {
95 | isa = PBXGroup;
96 | children = (
97 | );
98 | name = Frameworks;
99 | sourceTree = "";
100 | };
101 | /* End PBXGroup section */
102 |
103 | /* Begin PBXNativeTarget section */
104 | EFBC533B29DFB4EA00334182 /* Demo */ = {
105 | isa = PBXNativeTarget;
106 | buildConfigurationList = EFBC534B29DFB4EB00334182 /* Build configuration list for PBXNativeTarget "Demo" */;
107 | buildPhases = (
108 | EFBC533829DFB4EA00334182 /* Sources */,
109 | EFBC533929DFB4EA00334182 /* Frameworks */,
110 | EFBC533A29DFB4EA00334182 /* Resources */,
111 | );
112 | buildRules = (
113 | );
114 | dependencies = (
115 | );
116 | name = Demo;
117 | packageProductDependencies = (
118 | EFBC536329DFEC2600334182 /* DemoChat */,
119 | );
120 | productName = Demo;
121 | productReference = EFBC533C29DFB4EA00334182 /* Demo.app */;
122 | productType = "com.apple.product-type.application";
123 | };
124 | /* End PBXNativeTarget section */
125 |
126 | /* Begin PBXProject section */
127 | EFBC533429DFB4EA00334182 /* Project object */ = {
128 | isa = PBXProject;
129 | attributes = {
130 | BuildIndependentTargetsInParallel = 1;
131 | LastSwiftUpdateCheck = 1430;
132 | LastUpgradeCheck = 1430;
133 | TargetAttributes = {
134 | EFBC533B29DFB4EA00334182 = {
135 | CreatedOnToolsVersion = 14.3;
136 | };
137 | };
138 | };
139 | buildConfigurationList = EFBC533729DFB4EA00334182 /* Build configuration list for PBXProject "Demo" */;
140 | compatibilityVersion = "Xcode 14.0";
141 | developmentRegion = en;
142 | hasScannedForEncodings = 0;
143 | knownRegions = (
144 | en,
145 | Base,
146 | );
147 | mainGroup = EFBC533329DFB4EA00334182;
148 | productRefGroup = EFBC533D29DFB4EA00334182 /* Products */;
149 | projectDirPath = "";
150 | projectRoot = "";
151 | targets = (
152 | EFBC533B29DFB4EA00334182 /* Demo */,
153 | );
154 | };
155 | /* End PBXProject section */
156 |
157 | /* Begin PBXResourcesBuildPhase section */
158 | EFBC533A29DFB4EA00334182 /* Resources */ = {
159 | isa = PBXResourcesBuildPhase;
160 | buildActionMask = 2147483647;
161 | files = (
162 | EFBC534829DFB4EB00334182 /* Preview Assets.xcassets in Resources */,
163 | EFBC534429DFB4EB00334182 /* Assets.xcassets in Resources */,
164 | );
165 | runOnlyForDeploymentPostprocessing = 0;
166 | };
167 | /* End PBXResourcesBuildPhase section */
168 |
169 | /* Begin PBXSourcesBuildPhase section */
170 | EFBC533829DFB4EA00334182 /* Sources */ = {
171 | isa = PBXSourcesBuildPhase;
172 | buildActionMask = 2147483647;
173 | files = (
174 | EFBC536629DFFF3200334182 /* ContentView.swift in Sources */,
175 | EFBC536C29E0105800334182 /* SwiftUIAdditions.swift in Sources */,
176 | EFBC534029DFB4EA00334182 /* DemoApp.swift in Sources */,
177 | EFE6B73329E0D47500884A87 /* APIProvidedView.swift in Sources */,
178 | EFBC536829E0047400334182 /* APIKeyModalView.swift in Sources */,
179 | );
180 | runOnlyForDeploymentPostprocessing = 0;
181 | };
182 | /* End PBXSourcesBuildPhase section */
183 |
184 | /* Begin XCBuildConfiguration section */
185 | EFBC534929DFB4EB00334182 /* Debug */ = {
186 | isa = XCBuildConfiguration;
187 | buildSettings = {
188 | ALWAYS_SEARCH_USER_PATHS = NO;
189 | CLANG_ANALYZER_NONNULL = YES;
190 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
191 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
192 | CLANG_ENABLE_MODULES = YES;
193 | CLANG_ENABLE_OBJC_ARC = YES;
194 | CLANG_ENABLE_OBJC_WEAK = YES;
195 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
196 | CLANG_WARN_BOOL_CONVERSION = YES;
197 | CLANG_WARN_COMMA = YES;
198 | CLANG_WARN_CONSTANT_CONVERSION = YES;
199 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
200 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
201 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
202 | CLANG_WARN_EMPTY_BODY = YES;
203 | CLANG_WARN_ENUM_CONVERSION = YES;
204 | CLANG_WARN_INFINITE_RECURSION = YES;
205 | CLANG_WARN_INT_CONVERSION = YES;
206 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
207 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
208 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
209 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
210 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
211 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
212 | CLANG_WARN_STRICT_PROTOTYPES = YES;
213 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
214 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
215 | CLANG_WARN_UNREACHABLE_CODE = YES;
216 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
217 | COPY_PHASE_STRIP = NO;
218 | DEBUG_INFORMATION_FORMAT = dwarf;
219 | ENABLE_STRICT_OBJC_MSGSEND = YES;
220 | ENABLE_TESTABILITY = YES;
221 | GCC_C_LANGUAGE_STANDARD = gnu11;
222 | GCC_DYNAMIC_NO_PIC = NO;
223 | GCC_NO_COMMON_BLOCKS = YES;
224 | GCC_OPTIMIZATION_LEVEL = 0;
225 | GCC_PREPROCESSOR_DEFINITIONS = (
226 | "DEBUG=1",
227 | "$(inherited)",
228 | );
229 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
230 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
231 | GCC_WARN_UNDECLARED_SELECTOR = YES;
232 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
233 | GCC_WARN_UNUSED_FUNCTION = YES;
234 | GCC_WARN_UNUSED_VARIABLE = YES;
235 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
236 | MTL_FAST_MATH = YES;
237 | ONLY_ACTIVE_ARCH = YES;
238 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
239 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
240 | };
241 | name = Debug;
242 | };
243 | EFBC534A29DFB4EB00334182 /* Release */ = {
244 | isa = XCBuildConfiguration;
245 | buildSettings = {
246 | ALWAYS_SEARCH_USER_PATHS = NO;
247 | CLANG_ANALYZER_NONNULL = YES;
248 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
249 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
250 | CLANG_ENABLE_MODULES = YES;
251 | CLANG_ENABLE_OBJC_ARC = YES;
252 | CLANG_ENABLE_OBJC_WEAK = YES;
253 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
254 | CLANG_WARN_BOOL_CONVERSION = YES;
255 | CLANG_WARN_COMMA = YES;
256 | CLANG_WARN_CONSTANT_CONVERSION = YES;
257 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
258 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
259 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
260 | CLANG_WARN_EMPTY_BODY = YES;
261 | CLANG_WARN_ENUM_CONVERSION = YES;
262 | CLANG_WARN_INFINITE_RECURSION = YES;
263 | CLANG_WARN_INT_CONVERSION = YES;
264 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
265 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
266 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
267 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
268 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
269 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
270 | CLANG_WARN_STRICT_PROTOTYPES = YES;
271 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
272 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
273 | CLANG_WARN_UNREACHABLE_CODE = YES;
274 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
275 | COPY_PHASE_STRIP = NO;
276 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
277 | ENABLE_NS_ASSERTIONS = NO;
278 | ENABLE_STRICT_OBJC_MSGSEND = YES;
279 | GCC_C_LANGUAGE_STANDARD = gnu11;
280 | GCC_NO_COMMON_BLOCKS = YES;
281 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
282 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
283 | GCC_WARN_UNDECLARED_SELECTOR = YES;
284 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
285 | GCC_WARN_UNUSED_FUNCTION = YES;
286 | GCC_WARN_UNUSED_VARIABLE = YES;
287 | MTL_ENABLE_DEBUG_INFO = NO;
288 | MTL_FAST_MATH = YES;
289 | SWIFT_COMPILATION_MODE = wholemodule;
290 | SWIFT_OPTIMIZATION_LEVEL = "-O";
291 | };
292 | name = Release;
293 | };
294 | EFBC534C29DFB4EB00334182 /* Debug */ = {
295 | isa = XCBuildConfiguration;
296 | buildSettings = {
297 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
298 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
299 | CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements;
300 | CODE_SIGN_STYLE = Automatic;
301 | CURRENT_PROJECT_VERSION = 1;
302 | ENABLE_PREVIEWS = YES;
303 | GENERATE_INFOPLIST_FILE = YES;
304 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
305 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
306 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
307 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
308 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
309 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
310 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
311 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
312 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
313 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
314 | IPHONEOS_DEPLOYMENT_TARGET = 16.4;
315 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
316 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
317 | MACOSX_DEPLOYMENT_TARGET = 13.3;
318 | MARKETING_VERSION = 1.0;
319 | PRODUCT_BUNDLE_IDENTIFIER = openAI.MacPaw.Demo;
320 | PRODUCT_NAME = "$(TARGET_NAME)";
321 | SDKROOT = auto;
322 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
323 | SUPPORTS_MACCATALYST = NO;
324 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
325 | SWIFT_EMIT_LOC_STRINGS = YES;
326 | SWIFT_VERSION = 5.0;
327 | TARGETED_DEVICE_FAMILY = "1,2";
328 | };
329 | name = Debug;
330 | };
331 | EFBC534D29DFB4EB00334182 /* Release */ = {
332 | isa = XCBuildConfiguration;
333 | buildSettings = {
334 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
335 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
336 | CODE_SIGN_ENTITLEMENTS = App/Demo.entitlements;
337 | CODE_SIGN_STYLE = Automatic;
338 | CURRENT_PROJECT_VERSION = 1;
339 | ENABLE_PREVIEWS = YES;
340 | GENERATE_INFOPLIST_FILE = YES;
341 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
342 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
343 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
344 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
345 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
346 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
347 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
348 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
349 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
350 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
351 | IPHONEOS_DEPLOYMENT_TARGET = 16.4;
352 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
353 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
354 | MACOSX_DEPLOYMENT_TARGET = 13.3;
355 | MARKETING_VERSION = 1.0;
356 | PRODUCT_BUNDLE_IDENTIFIER = openAI.MacPaw.Demo;
357 | PRODUCT_NAME = "$(TARGET_NAME)";
358 | SDKROOT = auto;
359 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
360 | SUPPORTS_MACCATALYST = NO;
361 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
362 | SWIFT_EMIT_LOC_STRINGS = YES;
363 | SWIFT_VERSION = 5.0;
364 | TARGETED_DEVICE_FAMILY = "1,2";
365 | };
366 | name = Release;
367 | };
368 | /* End XCBuildConfiguration section */
369 |
370 | /* Begin XCConfigurationList section */
371 | EFBC533729DFB4EA00334182 /* Build configuration list for PBXProject "Demo" */ = {
372 | isa = XCConfigurationList;
373 | buildConfigurations = (
374 | EFBC534929DFB4EB00334182 /* Debug */,
375 | EFBC534A29DFB4EB00334182 /* Release */,
376 | );
377 | defaultConfigurationIsVisible = 0;
378 | defaultConfigurationName = Release;
379 | };
380 | EFBC534B29DFB4EB00334182 /* Build configuration list for PBXNativeTarget "Demo" */ = {
381 | isa = XCConfigurationList;
382 | buildConfigurations = (
383 | EFBC534C29DFB4EB00334182 /* Debug */,
384 | EFBC534D29DFB4EB00334182 /* Release */,
385 | );
386 | defaultConfigurationIsVisible = 0;
387 | defaultConfigurationName = Release;
388 | };
389 | /* End XCConfigurationList section */
390 |
391 | /* Begin XCSwiftPackageProductDependency section */
392 | EFBC536329DFEC2600334182 /* DemoChat */ = {
393 | isa = XCSwiftPackageProductDependency;
394 | productName = DemoChat;
395 | };
396 | /* End XCSwiftPackageProductDependency section */
397 | };
398 | rootObject = EFBC533429DFB4EA00334182 /* Project object */;
399 | }
400 |
--------------------------------------------------------------------------------