├── .gitattributes
├── .gitignore
├── .spi.yml
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── LICENSE
├── Package.resolved
├── Package.swift
├── README.md
├── Sources
└── openai-async-image-swiftui
│ ├── OpenAIAsyncImage.swift
│ ├── enum
│ ├── AsyncImageErrors.swift
│ ├── DalleModel.swift
│ ├── ImageState.swift
│ ├── OpenAIImageSize.swift
│ └── ResponseFormat.swift
│ ├── environmentKey
│ └── OpenAIAsyncImageLoaderKey.swift
│ ├── model
│ ├── Input.swift
│ └── Output.swift
│ ├── net
│ └── OpenAIImageEndpoint.swift
│ ├── protocol
│ ├── IOpenAIImageEndpoint.swift
│ └── IOpenAILoader.swift
│ └── viewModel
│ └── OpenAIDefaultLoader.swift
├── Tests
└── openai-async-image-swiftuiTests
│ └── openai_async_image_swiftuiTests.swift
└── image
├── appletv_art.png
├── sun_11.png
└── sun_watch.png
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## User settings
6 | xcuserdata/
7 |
8 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
9 | *.xcscmblueprint
10 | *.xccheckout
11 |
12 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
13 | build/
14 | DerivedData/
15 | *.moved-aside
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 |
28 | ## App packaging
29 | *.ipa
30 | *.dSYM.zip
31 | *.dSYM
32 |
33 | ## Playgrounds
34 | timeline.xctimeline
35 | playground.xcworkspace
36 |
37 | # Swift Package Manager
38 | #
39 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
40 | # Packages/
41 | # Package.pins
42 | # Package.resolved
43 | # *.xcodeproj
44 | #
45 | # Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
46 | # hence it is not needed unless you have added a package configuration file to your project
47 | # .swiftpm
48 |
49 | .build/
50 |
51 | # CocoaPods
52 | #
53 | # We recommend against adding the Pods directory to your .gitignore. However
54 | # you should judge for yourself, the pros and cons are mentioned at:
55 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
56 | #
57 | # Pods/
58 | #
59 | # Add this line if you want to avoid checking in source code from the Xcode workspace
60 | # *.xcworkspace
61 |
62 | # Carthage
63 | #
64 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
65 | # Carthage/Checkouts
66 |
67 | Carthage/Build/
68 |
69 | # Accio dependency management
70 | Dependencies/
71 | .accio/
72 |
73 | # fastlane
74 | #
75 | # It is recommended to not store the screenshots in the git repo.
76 | # Instead, use fastlane to re-generate the screenshots whenever they are needed.
77 | # For more information about the recommended setup visit:
78 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
79 |
80 | fastlane/report.xml
81 | fastlane/Preview.html
82 | fastlane/screenshots/**/*.png
83 | fastlane/test_output
84 |
85 | # Code Injection
86 | #
87 | # After new code Injection tools there's a generated folder /iOSInjectionProject
88 | # https://github.com/johnno1962/injectionforxcode
89 |
90 | iOSInjectionProject/
91 | .DS_Store
92 |
--------------------------------------------------------------------------------
/.spi.yml:
--------------------------------------------------------------------------------
1 | version: 1
2 | builder:
3 | configs:
4 | - documentation_targets: [openai-async-image-swiftui]
5 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Igor Shelopaev
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "async-http-client",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/swiftuiux/async-http-client.git",
7 | "state" : {
8 | "revision" : "936a6e953d3c3e05a14c3d852fea9955e57c9854",
9 | "version" : "1.5.0"
10 | }
11 | },
12 | {
13 | "identity" : "async-task",
14 | "kind" : "remoteSourceControl",
15 | "location" : "https://github.com/swiftuiux/async-task.git",
16 | "state" : {
17 | "revision" : "d05dc1ec967813392da38e3501dfe666098baaec",
18 | "version" : "1.2.5"
19 | }
20 | },
21 | {
22 | "identity" : "retry-policy-service",
23 | "kind" : "remoteSourceControl",
24 | "location" : "https://github.com/swiftuiux/retry-policy-service.git",
25 | "state" : {
26 | "revision" : "2a6a1f057fbf77337dfc73db98bd3d538127b3e2",
27 | "version" : "1.0.1"
28 | }
29 | }
30 | ],
31 | "version" : 2
32 | }
33 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.7
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "openai-async-image-swiftui",
8 | platforms: [.macOS(.v12), .iOS(.v15), .watchOS(.v8), .tvOS(.v15)],
9 | products: [
10 | // Products define the executables and libraries a package produces, and make them visible to other packages.
11 | .library(
12 | name: "openai-async-image-swiftui",
13 | targets: ["openai-async-image-swiftui"]),
14 | ],
15 | dependencies: [
16 | // Dependencies declare other packages that this package depends on.
17 | .package(url: "https://github.com/swiftuiux/async-http-client.git", from: "1.5.0"),
18 | .package(url: "https://github.com/swiftuiux/async-task.git", from: "1.2.5")
19 | ],
20 | targets: [
21 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
22 | // Targets can depend on other targets in this package, and on products in packages this package depends on.
23 | .target(
24 | name: "openai-async-image-swiftui",
25 | dependencies: ["async-http-client", "async-task"]),
26 | .testTarget(
27 | name: "openai-async-image-swiftuiTests",
28 | dependencies: ["openai-async-image-swiftui"]),
29 | ]
30 | )
31 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OpenAI DALL·E AsyncImage SwiftUI
2 |
3 | SwiftUI view that asynchronously loads and displays an OpenAI image from open API
4 |
5 | ### Please star the repository if you believe continuing the development of this package is worthwhile. This will help me understand which package deserves more effort.
6 |
7 | [](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui)
8 |
9 | ## [Example for the package](https://github.com/swiftuiux/openai-async-image-swiftui-example)
10 | ## [Documentation(API)](https://swiftpackageindex.com/swiftuiux/openai-async-image-swiftui/main/documentation/openai_async_image_swiftui)
11 |
12 |
13 | ## Features
14 | - [x] Supports multiple platforms: iOS, macOS, watchOS, and tvOS
15 | - [x] Customizable with SwiftUI Image properties (e.g., `renderingMode`, `resizable`, `antialiased`)
16 | - [x] Configurable transport layer via custom `Loader`
17 | - [x] Designed with interfaces, not implementations
18 | - [x] Fully leverages Swift's new concurrency model
19 |
20 | 
21 |
22 | ## How to use
23 |
24 | ### 1. Get your API key from OpenAI
25 | [Where do I find my Secret API Key?](https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key)
26 |
27 |
28 | ### 2. Override the default loader at Environment with you apiKey
29 |
30 | ```swift
31 | let apiKey = "your API KEY"
32 | let endpoint = OpenAIImageEndpoint.get(with: apiKey)
33 | let loader = OpenAIDefaultLoader(endpoint: endpoint)
34 | OpenAIDefaultLoaderKey.defaultValue = loader
35 | ```
36 |
37 | ### 3. Add **OpenAIAsyncImage** to your code
38 |
39 | ```swift
40 | OpenAIAsyncImage(prompt: .constant("sun"))
41 | ```
42 | or with custom **ViewBuilder**
43 |
44 | ```swift
45 | OpenAIAsyncImage(prompt: $imageText, size: .dpi1024){ state in
46 | switch state{
47 | case .loaded(let image) :
48 | image
49 | .resizable()
50 | .scaledToFill()
51 | case .loadError(let error) : Text(error.localizedDescription)
52 | case .loading : ProgressView()
53 | }
54 | }
55 | ```
56 |
57 | | Param | Description |
58 | | --- | --- |
59 | | prompt | A text description of the desired image(s). The maximum length is 1000 characters |
60 | | size | The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 |
61 | | tpl | Custom view builder tpl |
62 | | loader | Custom loader if you need something specific|
63 |
64 | 
65 |
66 | ## Documentation(API)
67 | - You need to have Xcode 13 installed in order to have access to Documentation Compiler (DocC)
68 | - Go to Product > Build Documentation or **⌃⇧⌘ D**
69 |
70 |
71 | 
72 |
73 | ## More Stable Diffusion examples
74 |
75 | ### Replicate toolkit for swift. Set of diffusion models
76 | Announced in 2022, OpenAI's text-to-image model DALL-E 2 is a recent example of diffusion models. It uses diffusion models for both the model's prior (which produces an image embedding given a text caption) and the decoder that generates the final image.
77 | In machine learning, diffusion models, also known as diffusion probabilistic models, are a class of latent variable models. They are Markov chains trained using variational inference. The goal of diffusion models is to learn the latent structure of a dataset by modeling the way in which data points diffuse through the latent space.
78 | Diffusion models can be applied to a variety of tasks, including image denoising, inpainting, super-resolution, and image generation. For example, an image generation model would start with a random noise image and then, after having been trained reversing the diffusion process on natural images, the model would be able to generate new natural images.
79 | [Replicate kit](https://github.com/swiftuiux/replicate-kit-swift)
80 |
81 |
82 | 
83 |
84 | ### CoreML Stable Diffusion
85 | [The example app](https://github.com/swiftuiux/coreml-stable-diffusion-swift-example) for running text-to-image or image-to-image models to generate images using Apple's Core ML Stable Diffusion implementation
86 |
87 | 
88 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/OpenAIAsyncImage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIAsyncImage.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import SwiftUI
9 | import async_task
10 |
11 | fileprivate typealias ImageSize = OpenAIImageSize
12 | fileprivate typealias TaskModel = Async.SingleTask
13 |
14 | /// Async image component to load and show OpenAI image from OpenAI image API
15 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
16 | public struct OpenAIAsyncImage: View {
17 |
18 | /// Task model for managing image loading cycle
19 | @StateObject private var taskModel = TaskModel(errorMapper: errorMapper)
20 |
21 | /// Custom view builder template type alias
22 | public typealias ImageProcess = (ImageState) -> Content
23 |
24 | /// Default loader, injected from environment
25 | @Environment(\.openAIDefaultLoader) var defaultLoader : OpenAIDefaultLoader
26 |
27 | // MARK: - Config
28 |
29 | /// A binding to the text prompt describing the desired image. The maximum length is 1000 characters
30 | @Binding var prompt : String
31 |
32 | /// Optional custom loader conforming to `IOpenAILoader` protocol
33 | let loader : T?
34 |
35 | /// The size of the image to be generated
36 | let size : OpenAIImageSize
37 |
38 | /// Optional custom view builder template
39 | let tpl : ImageProcess?
40 |
41 | /// Dall-e model type
42 | let model : DalleModel
43 |
44 | // MARK: - Life cycle
45 |
46 | /// Initializes a view model for generating images using the OpenAI API with customizable parameters.
47 | /// - Parameters:
48 | /// - prompt: A `Binding` to a `String` that represents a text description of the desired image(s).
49 | /// The maximum length for the prompt is 1000 characters.
50 | /// - size: The size of the generated images, specified as an `OpenAIImageSize`.
51 | /// Defaults to `.dpi256`. Must be one of `.dpi256` (256x256), `.dpi512` (512x512), or `.dpi1024` (1024x1024).
52 | /// - model: The `DalleModel` specifying which model to use for generating the image(s).
53 | /// Defaults to `.dalle2`.
54 | /// - tpl: A custom SwiftUI `ViewBuilder` template for processing or rendering the generated image(s).
55 | /// - loader: A custom loader conforming to the `IOpenAILoader` protocol, responsible for handling
56 | /// the image generation process, such as communicating with the OpenAI API.
57 | public init(
58 | prompt: Binding,
59 | size: OpenAIImageSize = .dpi256,
60 | model: DalleModel = .dalle2,
61 | @ViewBuilder tpl: @escaping ImageProcess,
62 | loader: T
63 | ) {
64 | self._prompt = prompt
65 | self.size = size
66 | self.model = model
67 | self.tpl = tpl
68 | self.loader = loader
69 | }
70 |
71 | /// The content and behavior of the view
72 | public var body: some View {
73 | ZStack{
74 | let state = getState()
75 | if let tpl {
76 | tpl(state)
77 | }else{
78 | imageTpl(state)
79 | }
80 | }
81 | .onChange(of: prompt){ _ in
82 | start()
83 | }
84 | .onAppear {
85 | start()
86 | }
87 | .onDisappear{
88 | cancel()
89 | }
90 | }
91 |
92 | // MARK: - Private methods
93 |
94 | /// - Returns: The current image state status
95 | private func getState () -> ImageState{
96 |
97 | if let image = taskModel.value { return .loaded(image) }
98 | else if let error = taskModel.error { return .loadError(error)}
99 |
100 | return .loading
101 | }
102 |
103 | /// Loads an image using the default loader.
104 | /// - Parameters:
105 | /// - prompt: The text prompt describing the desired image content.
106 | /// - size: The dimensions of the generated image, specified as `ImageSize`.
107 | /// - model: The `DalleModel` specifying the AI model to use for image generation.
108 | /// - Returns: A generated `Image` object if successful.
109 | /// - Throws: An error if the image generation fails.
110 | private func loadImageDefault(
111 | _ prompt: String,
112 | with size: ImageSize,
113 | model: DalleModel
114 | ) async throws -> Image {
115 | try await defaultLoader.load(prompt, with: size, model: model)
116 | }
117 |
118 | /// Loads an image using a provided loader, or falls back to the default loader if none is provided.
119 | /// - Parameters:
120 | /// - prompt: The text prompt describing the desired image content.
121 | /// - size: The dimensions of the generated image, specified as `ImageSize`.
122 | /// - model: The `DalleModel` specifying the AI model to use for image generation.
123 | /// - Returns: An `Image` object if successful, or `nil` if the operation fails or is cancelled.
124 | private func loadImage(
125 | _ prompt: String,
126 | with size: ImageSize,
127 | model: DalleModel
128 | ) async throws -> Image? {
129 | if let loader = loader {
130 | return try await loader.load(prompt, with: size, model: model)
131 | }
132 | return try await loadImageDefault(prompt, with: size, model: model)
133 | }
134 |
135 | /// Creates and returns a task to fetch the OpenAI image
136 | /// - Returns: A task that fetches the OpenAI image
137 | private func start(){
138 | taskModel.start{
139 | try await loadImage(prompt, with: size, model: model)
140 | }
141 | }
142 |
143 | /// Cancel task
144 | private func cancel(){
145 | taskModel.cancel()
146 | }
147 | }
148 |
149 | // MARK: - Public extensions -
150 |
151 | public extension OpenAIAsyncImage where Content == EmptyView, T == OpenAIDefaultLoader{
152 |
153 | /// Convenience initializer for creating an instance with the default loader and no custom view template.
154 | /// - Parameters:
155 | /// - prompt: A `Binding` to a `String` containing the text prompt that describes the desired image content.
156 | /// - size: The desired size of the generated image, specified as an `OpenAIImageSize`.
157 | /// Defaults to `.dpi256`.
158 | /// - model: The `DalleModel` specifying the AI model to use for image generation. Defaults to `.dalle2`.
159 | init(
160 | prompt: Binding,
161 | size: OpenAIImageSize = .dpi256,
162 | model: DalleModel = .dalle2
163 | ) {
164 | self._prompt = prompt
165 | self.size = size
166 | self.model = model
167 | self.tpl = nil
168 | self.loader = nil
169 | }
170 | }
171 |
172 | public extension OpenAIAsyncImage where T == OpenAIDefaultLoader{
173 |
174 | /// Convenience initializer for creating an instance with the default loader and a custom view template.
175 | /// - Parameters:
176 | /// - prompt: A `Binding` to a `String` containing the text prompt that describes the desired image content.
177 | /// - size: The desired size of the generated image, specified as an `OpenAIImageSize`. Defaults to `.dpi256`.
178 | /// - model: The `DalleModel` specifying the AI model to use for image generation. Defaults to `.dalle2`.
179 | /// - tpl: A SwiftUI `@ViewBuilder` closure that provides a custom view template for processing or rendering the generated image.
180 | init(
181 | prompt: Binding,
182 | size: OpenAIImageSize = .dpi256,
183 | model: DalleModel = .dalle2,
184 | @ViewBuilder tpl: @escaping ImageProcess
185 | ) {
186 | self._prompt = prompt
187 | self.size = size
188 | self.model = model
189 | self.tpl = tpl
190 | self.loader = nil
191 | }
192 | }
193 |
194 | // MARK: - File private functions -
195 |
196 | /// A function that builds the appropriate view for a given `ImageState`.
197 | /// - Parameter state: The current state of the image.
198 | /// - Returns: A SwiftUI view representing the current state of the image.
199 | @ViewBuilder
200 | fileprivate func imageTpl(_ state: ImageState) -> some View {
201 | switch state {
202 | case .loaded(let image):
203 | image.resizable()
204 | case .loadError(let error):
205 | Text(error.localizedDescription)
206 | case .loading:
207 | ProgressView()
208 | }
209 | }
210 |
211 | /// Maps an error to a corresponding `AsyncImageErrors` type.
212 | /// - Parameter error: The error to map, which may be `nil`.
213 | /// - Returns: An `AsyncImageErrors` value if the error can be mapped; otherwise, `nil`.
214 | @Sendable
215 | fileprivate func errorMapper(_ error: Error?) -> AsyncImageErrors? {
216 | if error is CancellationError {
217 | return .cancellationError
218 | }
219 |
220 | // Return nil for other errors
221 | return nil
222 | }
223 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/enum/AsyncImageErrors.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AsyncImageErrors.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import Foundation
9 | import async_http_client
10 |
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | /// Enum representing different errors that can occur when loading images asynchronously
13 | enum AsyncImageErrors: Error {
14 | case imageInit // Error initializing an image from data
15 | case clientIsNotDefined // HTTP client is not defined
16 | case returnedNoImages // No images were returned in the response
17 | case httpStatus(String) // HTTP status error with a message
18 | case responseError(Error) // Generic response error
19 | case cancellationError
20 | }
21 |
22 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
23 | extension AsyncImageErrors: LocalizedError {
24 | public var errorDescription: String? {
25 | switch self {
26 | case .imageInit:
27 | return NSLocalizedString("Unable to create image from the provided data.", comment: "")
28 | case .clientIsNotDefined:
29 | return NSLocalizedString("Client not found. The URL might be invalid.", comment: "")
30 | case .returnedNoImages:
31 | return NSLocalizedString("The response did not contain any images.", comment: "")
32 | case .httpStatus(let description):
33 | return NSLocalizedString(description, comment: "")
34 | case .responseError(let error):
35 | return error.localizedDescription
36 | case .cancellationError:
37 | return NSLocalizedString("Cancellation error.", comment: "")
38 | }
39 | }
40 | }
41 |
42 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
43 | extension AsyncImageErrors {
44 | /// Handles errors that occur during the request
45 | /// - Parameter error: The error that occurred
46 | /// - Returns: An instance of `AsyncImageErrors`
47 | static func handleRequest(_ error: Error) -> AsyncImageErrors {
48 | if let httpError = error as? Http.Errors,
49 | case let .status(_, _, data) = httpError,
50 | let responseData = data {
51 | return decodeErrorResponse(from: responseData)
52 | }
53 | return .responseError(error)
54 | }
55 | }
56 |
57 | /// Decodes the error response data
58 | /// - Parameter responseData: The response data to decode
59 | /// - Returns: An instance of `AsyncImageErrors` with a decoded message
60 | fileprivate func decodeErrorResponse(from responseData: Data) -> AsyncImageErrors {
61 | if let apiResponse = try? JSONDecoder().decode(ErrorResponseWrapper.self, from: responseData) {
62 | return .httpStatus(apiResponse.error.message)
63 | }
64 |
65 | let dataString = String(data: responseData, encoding: .utf8) ?? "Unable to decode data"
66 | return .httpStatus(dataString)
67 | }
68 |
69 | /// Defines the structure for the inner "error" object in the API response
70 | fileprivate struct ErrorResponse: Decodable {
71 | let code: String?
72 | let message: String
73 | let param: String?
74 | let type: String
75 | }
76 |
77 | /// Defines the structure for the overall response wrapper containing the error object
78 | fileprivate struct ErrorResponseWrapper: Decodable {
79 | let error: ErrorResponse
80 | }
81 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/enum/DalleModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DalleModel.swift
3 | // openai-async-image-swiftui
4 | //
5 | // Created by Igor on 26.11.24.
6 | //
7 |
8 | public enum DalleModel: String{
9 |
10 | case dalle2 = "dall-e-2"
11 |
12 | case dalle3 = "dall-e-3"
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/enum/ImageState.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageState.swift
3 | //
4 | //
5 | // Created by Igor on 28.02.2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | /// Enumeration representing the various states of `OpenAIAsyncImage`
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public enum ImageState {
13 |
14 | /// State when the image is currently being loaded
15 | case loading
16 |
17 | /// State when the image has been successfully loaded
18 | case loaded(Image)
19 |
20 | /// State when an error occurred during image fetching
21 | case loadError(Error)
22 | }
23 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/enum/OpenAIImageSize.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageSize.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public enum OpenAIImageSize: String, Encodable{
13 |
14 | case dpi256 = "256x256"
15 |
16 | case dpi512 = "512x512"
17 |
18 | case dpi1024 = "1024x1024"
19 |
20 | case dpi1792x1024 = "1792x1024"
21 |
22 | case dpi1024x1792 = "1024x1792"
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/enum/ResponseFormat.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ResponseFormat.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Type of response format from OpenAI API
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | enum ResponseFormat: String,Encodable{
13 |
14 | case url = "url"
15 |
16 | case b64 = "b64_json"
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/environmentKey/OpenAIAsyncImageLoaderKey.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIAsyncImageLoaderKey.swift
3 | //
4 | //
5 | // Created by Igor on 28.02.2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | /// A key for accessing default loader in the environment
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public struct OpenAIDefaultLoaderKey : EnvironmentKey{
13 | public typealias Value = OpenAIDefaultLoader
14 |
15 | public static let defaultValue = OpenAIDefaultLoader(endpoint: OpenAIImageEndpoint.get(with: ""))
16 | }
17 |
18 | public extension EnvironmentValues{
19 | var openAIDefaultLoader: OpenAIDefaultLoader{
20 | get { self[OpenAIDefaultLoaderKey.self] }
21 | set { self[OpenAIDefaultLoaderKey.self] = newValue }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/model/Input.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIImageRequest.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | // https://platform.openai.com/docs/api-reference/images
7 |
8 | import Foundation
9 |
10 |
11 | /// Input format to OpenAI API
12 | /// Given a prompt and/or an input image, the model will generate a new image
13 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
14 | struct Input: Encodable{
15 |
16 | /// dall-e model
17 | let model : String
18 |
19 | /// A text description of the desired image(s). The maximum length is 1000 characters
20 | let prompt: String
21 |
22 | /// The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024
23 | let size : OpenAIImageSize
24 |
25 | /// The format in which the generated images are returned. Must be one of url or b64_json
26 | let response_format : ResponseFormat
27 |
28 | /// The number of images to generate
29 | let n : Int
30 | }
31 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/model/Output.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Response.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Structure representing the output format for the OpenAI API response
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | struct Output: Decodable {
13 |
14 | /// The creation date and time of the response in UNIX timestamp format
15 | let created: Int
16 |
17 | /// An array of base64 encoded images
18 | let data: [Base64]
19 |
20 | /// The first image from the received data set, if available
21 | var firstImage: String? {
22 | data.first?.b64_json
23 | }
24 | }
25 |
26 | /// Structure representing a base64 encoded image
27 | struct Base64: Decodable {
28 | /// The base64 encoded image data in JSON format
29 | let b64_json: String
30 | }
31 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/net/OpenAIImageEndpoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIImageEndpoint.swift
3 | //
4 | //
5 | // Created by Igor on 18.02.2023.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Struct providing specifications for accessing the OpenAI image resource
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public struct OpenAIImageEndpoint: IOpenAIImageEndpoint {
13 |
14 | // MARK: - Static Properties
15 |
16 | /// Static base URL for the OpenAI image resource
17 | public static let urlString = "https://api.openai.com"
18 |
19 | /// Static path to the specific endpoint for generating images
20 | public static let path = "/v1/images/generations"
21 |
22 | /// Creates an instance of `OpenAIImageEndpoint` with the provided API key
23 | /// - Parameter apiKey: API key for accessing the OpenAI API
24 | /// - Returns: Configured instance of `OpenAIImageEndpoint`
25 | public static func get(with apiKey: String) -> Self {
26 | .init(
27 | urlString: Self.urlString,
28 | apiKey: apiKey,
29 | path: Self.path
30 | )
31 | }
32 |
33 | // MARK: - Instance Properties
34 |
35 | /// Base URL for the OpenAI image resource
36 | public let urlString: String
37 |
38 | /// Path to the specific endpoint
39 | public let path: String
40 |
41 | /// API key for authentication and access to the OpenAI API
42 | public let apiKey: String
43 |
44 | // MARK: - Initializer
45 |
46 | /// Initializes a new instance of `OpenAIImageEndpoint`
47 | /// - Parameters:
48 | /// - urlString: Base URL for the OpenAI image resource
49 | /// - apiKey: API key for accessing the OpenAI API
50 | /// - path: Path to the specific endpoint
51 | public init(urlString: String, apiKey: String, path: String) {
52 | self.urlString = urlString
53 | self.apiKey = apiKey
54 | self.path = path
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/protocol/IOpenAIImageEndpoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // IOpenAIImageEndpoint.swift
3 | //
4 | //
5 | // Created by Igor on 28.02.2023.
6 | //
7 |
8 | import Foundation
9 |
10 | /// Protocol defining access to the OpenAI image API
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public protocol IOpenAIImageEndpoint: Sendable {
13 |
14 | /// Base URL for the OpenAI image resource
15 | var urlString: String { get }
16 |
17 | /// Path to the specific endpoint within the OpenAI API
18 | var path: String { get }
19 |
20 | /// API key for authentication and access to the OpenAI API
21 | var apiKey: String { get }
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/protocol/IOpenAILoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // IOpenAILoader.swift
3 | //
4 | //
5 | // Created by Igor on 28.02.2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | /// Protocol defining the loader for fetching images from the OpenAI API
11 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
12 | public protocol IOpenAILoader {
13 |
14 | /// Asynchronously generates an image using a given text prompt, size, and model.
15 | /// - Parameters:
16 | /// - prompt: A descriptive text prompt that defines the content of the desired image.
17 | /// - size: The dimensions of the generated image, specified as an `OpenAIImageSize`.
18 | /// - model: The `DalleModel` used for image generation.
19 | /// - Returns: A generated `Image` based on the provided prompt and size.
20 | /// - Throws: An error if the image generation process fails, such as issues with the prompt, model, or network.
21 | func load(_ prompt: String, with size: OpenAIImageSize,
22 | model: DalleModel) async throws -> Image
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/openai-async-image-swiftui/viewModel/OpenAIDefaultLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIViewModel.swift
3 | //
4 | //
5 | // Created by Igor on 28.02.2023.
6 | //
7 |
8 | import SwiftUI
9 | import async_http_client
10 |
11 | #if os(iOS)
12 | import UIKit.UIImage
13 | #endif
14 |
15 | #if os(macOS)
16 | import AppKit.NSImage
17 | #endif
18 |
19 | @available(iOS 15.0, macOS 12.0, tvOS 15.0, watchOS 8.0, *)
20 | public final class OpenAIDefaultLoader: IOpenAILoader, Sendable {
21 |
22 | /// HTTP async client to handle requests
23 | private let client: Http.Proxy?
24 |
25 | /// Endpoint parameters required for making requests
26 | private let endpoint: IOpenAIImageEndpoint
27 |
28 | /// Initializes the loader with endpoint parameters
29 | /// - Parameter endpoint: Set of parameters for making requests
30 | public init(endpoint: IOpenAIImageEndpoint) {
31 | self.endpoint = endpoint
32 |
33 | guard let url = URL(string: endpoint.urlString) else {
34 | client = nil
35 | return
36 | }
37 |
38 | client = Http.Proxy(baseURL: url)
39 | }
40 |
41 | /// Asynchronously loads an image from the OpenAI API using a text prompt and specified parameters.
42 | /// - Parameters:
43 | /// - prompt: The text prompt describing the desired image content.
44 | /// - size: The dimensions of the generated image, specified as `OpenAIImageSize`.
45 | /// - model: The `DalleModel` used for generating the image.
46 | /// - Returns: A generated `Image` object based on the prompt and size.
47 | /// - Throws: An `AsyncImageErrors` if the client is undefined, the request fails,
48 | /// or the OpenAI API returns an error.
49 | public func load(
50 | _ prompt: String,
51 | with size: OpenAIImageSize,
52 | model: DalleModel
53 | ) async throws -> Image {
54 |
55 | guard let client = client else {
56 | throw AsyncImageErrors.clientIsNotDefined
57 | }
58 |
59 | do {
60 | let (path, body, headers) = prepareRequest(prompt: prompt, size: size, model: model)
61 | let result: Http.Response