├── LICENSE.md
├── PromptToImage.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ ├── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ └── xcuserdata
│ │ └── hany.xcuserdatad
│ │ └── IDEFindNavigatorScopes.plist
└── xcuserdata
│ └── hany.xcuserdatad
│ └── xcschemes
│ └── xcschememanagement.plist
├── PromptToImage
├── AppDelegate.swift
├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.appiconset
│ │ ├── Contents.json
│ │ ├── prompttoimage 1.png
│ │ ├── prompttoimage 2.png
│ │ ├── prompttoimage 3.png
│ │ ├── prompttoimage 4.png
│ │ ├── prompttoimage 5.png
│ │ ├── prompttoimage 6.png
│ │ ├── prompttoimage 7.png
│ │ ├── prompttoimage 8.png
│ │ ├── prompttoimage 9.png
│ │ └── prompttoimage.png
│ └── Contents.json
├── Base.lproj
│ └── MainMenu.xib
├── Downloader.swift
├── Extensions.swift
├── Global Vars.swift
├── History.swift
├── Info.plist
├── Load SD Models.swift
├── Load Upscale Model.swift
├── Main Window Controller
│ ├── Controls Actions.swift
│ ├── Controls Values.swift
│ ├── Display Result.swift
│ ├── Generate Image.swift
│ ├── Info Popover.swift
│ ├── Resize Image.swift
│ ├── SDMainWindowController.swift
│ ├── SDMainWindowController.xib
│ ├── Settings.swift
│ └── Share.swift
├── Models.swift
├── PromptToImage.entitlements
├── PromptToImageDebug.entitlements
├── Stable Diffusion Resources
│ └── .gitkeep
├── Stable Diffusion
│ └── ml-stable-diffusion
│ │ ├── pipeline
│ │ ├── AlphasCumprodCalculation.swift
│ │ ├── CGImage+vImage.swift
│ │ ├── DPMSolverMultistepScheduler.swift
│ │ ├── Decoder.swift
│ │ ├── Encoder.swift
│ │ ├── ManagedMLModel.swift
│ │ ├── Random.swift
│ │ ├── ResourceManaging.swift
│ │ ├── SafetyChecker.swift
│ │ ├── SampleTimer.swift
│ │ ├── Scheduler.swift
│ │ ├── StableDiffusionPipeline+Resources.swift
│ │ ├── StableDiffusionPipeline.swift
│ │ ├── TextEncoder.swift
│ │ └── Unet.swift
│ │ └── tokenizer
│ │ ├── BPETokenizer+Reading.swift
│ │ └── BPETokenizer.swift
├── Start.swift
├── Upscale Model
│ ├── .gitkeep
│ └── realesrgan512.mlmodel
├── Upscaler.swift
└── img
│ ├── aneoff.png
│ ├── aneon.png
│ ├── bigsd-ship.png
│ ├── cpuoff.png
│ ├── cpuon.png
│ ├── gpuoff.png
│ ├── gpuon.png
│ ├── prompttoimage.png
│ ├── sd-ship.png
│ ├── testimage.png
│ └── tree.png
└── README.md
/PromptToImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/PromptToImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/PromptToImage.xcodeproj/project.xcworkspace/xcuserdata/hany.xcuserdatad/IDEFindNavigatorScopes.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/PromptToImage.xcodeproj/xcuserdata/hany.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | PromptToImage.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/PromptToImage/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 02/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 |
12 | // COREML STABLE DIFFUSION
13 |
14 | @main
15 | class AppDelegate: NSObject, NSApplicationDelegate {
16 | @IBOutlet weak var aboutWindow: NSWindow!
17 | @IBOutlet weak var aboutVersionString: NSTextField!
18 |
19 |
20 | func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool { return true }
21 | func applicationSupportsSecureRestorableState(_ app: NSApplication) -> Bool { return true }
22 | func applicationDidFinishLaunching(_ aNotification: Notification) { self.startPromptToImage() }
23 | func applicationWillTerminate(_ aNotification: Notification) { self.willTerminate() }
24 | }
25 |
26 |
27 | extension AppDelegate {
28 | @IBAction func openSettingsWindow(_ sender: Any) {
29 | guard let ctrl = wins["main"] as? SDMainWindowController else { return }
30 | ctrl.window?.beginSheet(ctrl.settingsWindow)
31 | }
32 | @IBAction func openAboutWindow(_ sender: Any) {
33 | self.aboutVersionString.stringValue = appFullVersion()
34 | self.aboutWindow.makeKeyAndOrderFront(nil)
35 | self.aboutWindow.center()
36 | }
37 | }
38 |
39 |
40 |
41 | func appVersion() -> String {
42 | return Bundle.main.infoDictionary?["CFBundleShortVersionString"] as? String ?? "4.0"
43 | }
44 | func appBuild() -> String {
45 | return Bundle.main.infoDictionary?["CFBundleVersion"] as? String ?? "1"
46 | }
47 | func appFullVersion() -> String {
48 | return "Version " + ((Bundle.main.infoDictionary?["CFBundleShortVersionString"] as? String ?? "4.0") + " (build " + (Bundle.main.infoDictionary?["CFBundleVersion"] as? String ?? "000") + ")")
49 | }
50 |
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "color" : {
5 | "platform" : "universal",
6 | "reference" : "systemBlueColor"
7 | },
8 | "idiom" : "universal"
9 | }
10 | ],
11 | "info" : {
12 | "author" : "xcode",
13 | "version" : 1
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "prompttoimage 9.png",
5 | "idiom" : "mac",
6 | "scale" : "1x",
7 | "size" : "16x16"
8 | },
9 | {
10 | "filename" : "prompttoimage 8.png",
11 | "idiom" : "mac",
12 | "scale" : "2x",
13 | "size" : "16x16"
14 | },
15 | {
16 | "filename" : "prompttoimage 7.png",
17 | "idiom" : "mac",
18 | "scale" : "1x",
19 | "size" : "32x32"
20 | },
21 | {
22 | "filename" : "prompttoimage 6.png",
23 | "idiom" : "mac",
24 | "scale" : "2x",
25 | "size" : "32x32"
26 | },
27 | {
28 | "filename" : "prompttoimage 5.png",
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "filename" : "prompttoimage 4.png",
35 | "idiom" : "mac",
36 | "scale" : "2x",
37 | "size" : "128x128"
38 | },
39 | {
40 | "filename" : "prompttoimage 3.png",
41 | "idiom" : "mac",
42 | "scale" : "1x",
43 | "size" : "256x256"
44 | },
45 | {
46 | "filename" : "prompttoimage 2.png",
47 | "idiom" : "mac",
48 | "scale" : "2x",
49 | "size" : "256x256"
50 | },
51 | {
52 | "filename" : "prompttoimage 1.png",
53 | "idiom" : "mac",
54 | "scale" : "1x",
55 | "size" : "512x512"
56 | },
57 | {
58 | "filename" : "prompttoimage.png",
59 | "idiom" : "mac",
60 | "scale" : "2x",
61 | "size" : "512x512"
62 | }
63 | ],
64 | "info" : {
65 | "author" : "xcode",
66 | "version" : 1
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 1.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 2.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 3.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 4.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 5.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 6.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 7.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 8.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage 9.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Assets.xcassets/AppIcon.appiconset/prompttoimage.png
--------------------------------------------------------------------------------
/PromptToImage/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/PromptToImage/Downloader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Downloader.swift
3 | // DownloaderUnzipper
4 | //
5 | // Created by hany on 26/12/22.
6 | //
7 |
8 | import Foundation
9 | import Cocoa
10 | import ZIPFoundation
11 |
12 |
13 | // MARK: Click Download model button
14 |
15 | extension SDMainWindowController {
16 |
17 | @IBAction func clickDownloadButton(_ sender: NSButton) {
18 | sender.isHidden = true
19 | let downloader = Downloader(downloadPath: customModelsDirectoryPath)
20 | downloader.startDownload(url: URL(string: defaultModelPublicURL)!)
21 | //
22 | self.progressLabel.stringValue = "Downloading Stable Diffusion model..."
23 | self.progressLabel.isHidden = false
24 | self.progressValueLabel.isHidden = false
25 | self.downloadProgr.doubleValue = 0
26 | self.downloadProgr.isHidden = false
27 | }
28 |
29 |
30 |
31 |
32 |
33 |
34 | func gotNewModel(url:URL) {
35 | DispatchQueue.main.async {
36 | self.window?.endSheet(self.downloadWindow)
37 | self.populateModelsPopup()
38 | }
39 | currentComputeUnits = defaultComputeUnits
40 | for customModelURL in installedCustomModels() {
41 | print("Attempting to load default model \(customModelURL.lastPathComponent)")
42 | currentModelResourcesURL = customModelURL
43 | createStableDiffusionPipeline(computeUnits: currentComputeUnits, url:currentModelResourcesURL)
44 | if sdPipeline != nil {
45 | print("Success loading default model \(customModelURL.lastPathComponent)")
46 | // save to user defaults
47 | UserDefaults.standard.set(currentModelResourcesURL, forKey: "modelResourcesURL")
48 | return
49 | }
50 | }
51 |
52 | }
53 |
54 |
55 |
56 | }
57 |
58 |
59 |
60 |
61 | // MARK: Downloader
62 |
63 | class Downloader : NSObject,
64 | URLSessionTaskDelegate,
65 | URLSessionDownloadDelegate {
66 |
67 | var downloadPath = NSString()
68 |
69 | convenience init(downloadPath:String) {
70 | self.init()
71 | self.downloadPath = downloadPath as NSString
72 | }
73 |
74 | var downloadTask = URLSession.shared.downloadTask(with: URL(string: "localhost")!)
75 | private lazy var urlSession = URLSession(configuration: .default,
76 | delegate: self,
77 | delegateQueue: nil)
78 | // Start download
79 | func startDownload(url: URL) {
80 | let downloadTask = urlSession.downloadTask(with: url)
81 | downloadTask.resume()
82 | self.downloadTask = downloadTask
83 | }
84 |
85 | // Download Progress
86 | func urlSession(_ session: URLSession,
87 | downloadTask: URLSessionDownloadTask,
88 | didWriteData bytesWritten: Int64,
89 | totalBytesWritten: Int64,
90 | totalBytesExpectedToWrite: Int64) {
91 | if downloadTask == self.downloadTask {
92 | let calculatedProgress = Float(totalBytesWritten) / Float(totalBytesExpectedToWrite)
93 | DispatchQueue.main.async {
94 | (wins["main"] as! SDMainWindowController).downloadProgr.doubleValue = Double(calculatedProgress * 100)
95 | (wins["main"] as! SDMainWindowController).progressValueLabel.stringValue = "\(Double(Int(calculatedProgress * 1000)) / 10)%"
96 | }
97 | }
98 | }
99 |
100 |
101 | // Download finished
102 | func urlSession(_ session: URLSession,
103 | downloadTask: URLSessionDownloadTask,
104 | didFinishDownloadingTo location: URL) {
105 |
106 | // download url
107 | let downloadURL = URL(fileURLWithPath: downloadPath as String)
108 |
109 | // ZIP archive URL
110 | let archivePath = downloadPath.appendingPathComponent("archive.zip")
111 | let archiveURL = URL(fileURLWithPath: archivePath)
112 |
113 |
114 | // downloaded file
115 | do {
116 | let downloadedData = try Data(contentsOf: location)
117 | FileManager.default.createFile(atPath: archivePath,
118 | contents: downloadedData,
119 | attributes: nil)
120 | //NSWorkspace.shared.activateFileViewerSelecting([URL(fileURLWithPath: archivePath).absoluteURL])
121 | if FileManager.default.fileExists(atPath: archivePath) {
122 | self.unzip(aturl: archiveURL, tourl: downloadURL)
123 | }
124 | } catch {
125 | print("Error downloading file: \(error.localizedDescription)")
126 | }
127 | }
128 |
129 |
130 | // UNZIP downloaded archive
131 | func unzip(aturl:URL, tourl:URL) {
132 | DispatchQueue.main.async {
133 | (wins["main"] as! SDMainWindowController).progressLabel.stringValue = "Unzipping archive..."
134 | (wins["main"] as! SDMainWindowController).progressValueLabel.integerValue = 0
135 | (wins["main"] as! SDMainWindowController).downloadProgr.doubleValue = 0
136 | }
137 |
138 | let progress = Progress()
139 | let progrobs = progress.observe(\.fractionCompleted) { progress, _ in
140 | DispatchQueue.main.async {
141 | (wins["main"] as! SDMainWindowController).downloadProgr.doubleValue = Double(progress.fractionCompleted * 100)
142 | (wins["main"] as! SDMainWindowController).progressValueLabel.stringValue = "\(Double(Int(progress.fractionCompleted * 1000)) / 10)%"
143 | }
144 | }
145 |
146 | do {
147 | try FileManager.default.unzipItem(at: aturl, to: tourl, progress: progress)
148 | if FileManager.default.fileExists(atPath: tourl.path) {
149 | print("SUCCESS! extracted file/directory exists")
150 | // delete ZIP archive
151 | try FileManager.default.removeItem(at: aturl)
152 | // delete bogus dir
153 | let bogusDirURL = URL(fileURLWithPath: downloadPath.appendingPathComponent("__MACOSX"))
154 | try FileManager.default.removeItem(at: bogusDirURL)
155 | // load model
156 | (wins["main"] as! SDMainWindowController).gotNewModel(url: tourl)
157 |
158 | }
159 | } catch {
160 | print("Error unzipping file: \(error.localizedDescription)")
161 | }
162 |
163 | progrobs.invalidate()
164 | }
165 |
166 |
167 |
168 |
169 |
170 |
171 | }
172 |
--------------------------------------------------------------------------------
/PromptToImage/Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Extensions.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 05/12/22.
6 | //
7 |
8 | import Foundation
9 | import Cocoa
10 |
11 |
12 |
13 | extension CGImage {
14 | func resize(size:CGSize) -> CGImage? {
15 | let width: Int = Int(size.width)
16 | let height: Int = Int(size.height)
17 |
18 | let bytesPerPixel = self.bitsPerPixel / self.bitsPerComponent
19 | let destBytesPerRow = width * bytesPerPixel
20 |
21 |
22 | guard let colorSpace = self.colorSpace else { return nil }
23 | guard let context = CGContext(data: nil, width: width, height: height, bitsPerComponent: self.bitsPerComponent, bytesPerRow: destBytesPerRow, space: colorSpace, bitmapInfo: self.alphaInfo.rawValue) else { return nil }
24 |
25 | context.interpolationQuality = .high
26 | context.draw(self, in: CGRect(x: 0, y: 0, width: width, height: height))
27 |
28 | return context.makeImage()
29 | }
30 | }
31 |
32 |
33 |
34 | // MARK: CIImage
35 |
36 | extension NSImage {
37 | /// Generates a CIImage for this NSImage.
38 | /// - Returns: A CIImage optional.
39 | func ciImage() -> CIImage? {
40 | guard let data = self.tiffRepresentation,
41 | let bitmap = NSBitmapImageRep(data: data) else {
42 | return nil
43 | }
44 | let ci = CIImage(bitmapImageRep: bitmap)
45 | return ci
46 | }
47 |
48 | /// Generates an NSImage from a CIImage.
49 | /// - Parameter ciImage: The CIImage
50 | /// - Returns: An NSImage optional.
51 | static func fromCIImage(_ ciImage: CIImage) -> NSImage {
52 | let rep = NSCIImageRep(ciImage: ciImage)
53 | let nsImage = NSImage(size: rep.size)
54 | nsImage.addRepresentation(rep)
55 | return nsImage
56 | }
57 |
58 |
59 | }
60 |
61 |
62 |
63 | // MARK: URL Type Identifiers
64 |
65 | // UTI Docs: https://developer.apple.com/documentation/uniformtypeidentifiers/system-declared_uniform_type_identifiers
66 |
67 | extension URL {
68 | var typeIdentifier: String? { (try? resourceValues(forKeys: [.typeIdentifierKey]))?.typeIdentifier }
69 | var isImage: Bool { typeIdentifier == "public.image" }
70 | var isMovie: Bool { typeIdentifier == "public.audiovisual-content" }
71 | var isPNG: Bool { typeIdentifier == "public.png" }
72 | var isJPEG: Bool { typeIdentifier == "public.jpeg" }
73 | var isFolder: Bool { typeIdentifier == "public.folder" }
74 | var isDirectory: Bool { typeIdentifier == "public.directory" }
75 | var isCompiledCoreMLModel: Bool { self.pathExtension == "mlmodelc" }
76 | var localizedName: String? { (try? resourceValues(forKeys: [.localizedNameKey]))?.localizedName }
77 | var hasHiddenExtension: Bool {
78 | get { (try? resourceValues(forKeys: [.hasHiddenExtensionKey]))?.hasHiddenExtension == true }
79 | set {
80 | var resourceValues = URLResourceValues()
81 | resourceValues.hasHiddenExtension = newValue
82 | try? setResourceValues(resourceValues)
83 | }
84 | }
85 | }
86 |
87 |
88 |
89 |
90 | // MARK: Crop & Resize NSImage
91 |
92 | // https://gist.github.com/MaciejGad/11d8469b218817290ee77012edb46608
93 |
94 | extension NSImage {
95 |
96 | /// Returns the height of the current image.
97 | var height: CGFloat {
98 | return self.size.height
99 | }
100 |
101 | /// Returns the width of the current image.
102 | var width: CGFloat {
103 | return self.size.width
104 | }
105 |
106 | /// Returns a png representation of the current image.
107 | var PNGRepresentation: Data? {
108 | if let tiff = self.tiffRepresentation, let tiffData = NSBitmapImageRep(data: tiff) {
109 | return tiffData.representation(using: .png, properties: [:])
110 | }
111 |
112 | return nil
113 | }
114 |
115 | /// Copies the current image and resizes it to the given size.
116 | ///
117 | /// - parameter size: The size of the new image.
118 | ///
119 | /// - returns: The resized copy of the given image.
120 | func copy(size: NSSize) -> NSImage? {
121 | // Create a new rect with given width and height
122 | let frame = NSMakeRect(0, 0, size.width, size.height)
123 |
124 | // Get the best representation for the given size.
125 | guard let rep = self.bestRepresentation(for: frame, context: nil, hints: nil) else {
126 | return nil
127 | }
128 |
129 | // Create an empty image with the given size.
130 | let img = NSImage(size: size)
131 |
132 | // Set the drawing context and make sure to remove the focus before returning.
133 | img.lockFocus()
134 | defer { img.unlockFocus() }
135 |
136 | // Draw the new image
137 | if rep.draw(in: frame) {
138 | return img
139 | }
140 |
141 | // Return nil in case something went wrong.
142 | return nil
143 | }
144 |
145 | /// Copies the current image and resizes it to the size of the given NSSize, while
146 | /// maintaining the aspect ratio of the original image.
147 | ///
148 | /// - parameter size: The size of the new image.
149 | ///
150 | /// - returns: The resized copy of the given image.
151 | func resizeWhileMaintainingAspectRatioToSize(size: NSSize) -> NSImage? {
152 | let newSize: NSSize
153 |
154 | let widthRatio = size.width / self.width
155 | let heightRatio = size.height / self.height
156 |
157 | if widthRatio > heightRatio {
158 | newSize = NSSize(width: floor(self.width * widthRatio), height: floor(self.height * widthRatio))
159 | } else {
160 | newSize = NSSize(width: floor(self.width * heightRatio), height: floor(self.height * heightRatio))
161 | }
162 |
163 | return self.copy(size: newSize)
164 | }
165 |
166 | /// Copies and crops an image to the supplied size.
167 | ///
168 | /// - parameter size: The size of the new image.
169 | ///
170 | /// - returns: The cropped copy of the given image.
171 | func crop(size: NSSize) -> NSImage? {
172 | // Resize the current image, while preserving the aspect ratio.
173 | guard let resized = self.resizeWhileMaintainingAspectRatioToSize(size: size) else {
174 | return nil
175 | }
176 | // Get some points to center the cropping area.
177 | let x = floor((resized.width - size.width) / 2)
178 | let y = floor((resized.height - size.height) / 2)
179 |
180 | // Create the cropping frame.
181 | let frame = NSMakeRect(x, y, size.width, size.height)
182 |
183 | // Get the best representation of the image for the given cropping frame.
184 | guard let rep = resized.bestRepresentation(for: frame, context: nil, hints: nil) else {
185 | return nil
186 | }
187 |
188 | // Create a new image with the new size
189 | let img = NSImage(size: size)
190 |
191 | img.lockFocus()
192 | defer { img.unlockFocus() }
193 |
194 | if rep.draw(in: NSMakeRect(0, 0, size.width, size.height),
195 | from: frame,
196 | operation: NSCompositingOperation.copy,
197 | fraction: 1.0,
198 | respectFlipped: false,
199 | hints: [:]) {
200 | // Return the cropped image.
201 | return img
202 | }
203 |
204 | // Return nil in case anything fails.
205 | return nil
206 | }
207 |
208 | /// Saves the PNG representation of the current image to the HD.
209 | ///
210 | /// - parameter url: The location url to which to write the png file.
211 | func savePNGRepresentationToURL(url: URL) throws {
212 | if let png = self.PNGRepresentation {
213 | try png.write(to: url, options: .atomicWrite)
214 | }
215 | }
216 | }
217 |
--------------------------------------------------------------------------------
/PromptToImage/Global Vars.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Global Vars.swift
3 | // PromptToImage
4 | //
5 | // Created by Hany El Imam on 05/12/22.
6 | //
7 |
8 | import Foundation
9 | import CoreML
10 | import AppKit
11 |
12 |
13 |
14 |
15 | // default model remote URL
16 | let defaultModelPublicURL = "https://www.murusfirewall.com/downloads/Stable-Diffusion-2-1-SPLIT-EINSUM.zip"
17 |
18 | // dirs for custom models and history file
19 | // this is a sandboxed app, these dirs are inside app's container in user home dir
20 | let customModelsDirectoryPath = "models"
21 | let customUpscalersDirectoryPath = "upscale"
22 | let historyPath = "history"
23 |
24 | // win controllers store
25 | var wins = [String:NSWindowController]()
26 |
27 | // sd status
28 | var isRunning = false
29 |
30 | // built-in stable diffusion model (MacAppStore only)
31 | let builtInModelResourcesURL : URL = Bundle.main.resourceURL!
32 | let defaultModelName = "Stable Diffusion 2.1 SPLIT EINSUM"
33 |
34 | // current model resources URL
35 | var currentModelResourcesURL : URL = Bundle.main.resourceURL!
36 | var currentModelRealName : String? = nil {
37 | didSet {
38 | DispatchQueue.main.async {
39 | (wins["main"] as! SDMainWindowController).modelCardBtn.isHidden = currentModelRealName == nil
40 | }
41 | }
42 | }
43 |
44 | // file format
45 | let savefileFormat : NSBitmapImageRep.FileType = .png
46 |
47 | // model image size
48 | var modelWidth : Double = 512
49 | var modelHeight: Double = 512
50 |
51 | // Stable Diffusion pipeline
52 | var sdPipeline : StableDiffusionPipeline? = nil
53 |
54 | // pipeline compute units
55 | let defaultComputeUnits : MLComputeUnits = .cpuAndGPU
56 | var currentComputeUnits : MLComputeUnits = .cpuAndGPU
57 |
58 | // upscaler model
59 | let defaultUpscaleModelPath = Bundle.main.path(forResource: "realesrgan512", ofType: "mlmodelc")
60 | let defaultUpscalerComputeUnits : MLComputeUnits = .cpuAndGPU
61 |
--------------------------------------------------------------------------------
/PromptToImage/History.swift:
--------------------------------------------------------------------------------
1 | //
2 | // History.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 23/12/22.
6 | //
7 |
8 | import Foundation
9 | import Cocoa
10 | import CoreML
11 |
12 |
13 | // MARK: History Item Model
14 |
15 | class HistoryItem : NSObject {
16 | @objc dynamic var modelName = String()
17 | var date = Date()
18 | var originalSize = NSSize()
19 | var upscaledSize : NSSize? = nil
20 | @objc dynamic var prompt = String()
21 | @objc dynamic var negativePrompt = String()
22 | @objc dynamic var steps = Int()
23 | @objc dynamic var guidanceScale = Float()
24 | var inputImage : CGImage? = nil
25 | var strength = Float()
26 | var sampler = String()
27 | var computeUnits = String()
28 | @objc dynamic var image = NSImage()
29 | var upscaledImage : NSImage? = nil
30 | var seed = UInt32()
31 | var upscaled = Bool()
32 |
33 | // Init history item
34 | convenience init(modelName:String,
35 | prompt:String,
36 | negativePrompt:String,
37 | steps:Int,
38 | guidanceScale:Float,
39 | inputImage:CGImage?,
40 | strength:Float,
41 | image:NSImage,
42 | upscaledImage:NSImage?,
43 | seed:UInt32,
44 | sampler:String) {
45 | self.init()
46 | self.modelName = modelName
47 | self.date = Date()
48 | self.prompt = prompt
49 | self.negativePrompt = negativePrompt
50 | self.steps = steps
51 | self.guidanceScale = guidanceScale
52 | self.inputImage = inputImage
53 | self.strength = strength
54 | self.image = image
55 | self.upscaledImage = upscaledImage
56 | self.seed = seed
57 | self.upscaled = self.upscaledImage != nil
58 | self.originalSize = self.image.size
59 | self.upscaledSize = self.upscaledImage?.size
60 | self.sampler = sampler
61 | self.computeUnits = cu2hrstr(cu: currentComputeUnits)
62 | }
63 |
64 |
65 | // Encode history item
66 | func encode() -> Data {
67 | let archiver = NSKeyedArchiver(requiringSecureCoding: true)
68 | archiver.encode(self.modelName, forKey: "modelName")
69 | archiver.encode(self.date, forKey: "date")
70 | archiver.encode(self.prompt, forKey: "prompt")
71 | archiver.encode(self.negativePrompt, forKey: "negativePrompt")
72 | archiver.encode(self.steps, forKey: "steps")
73 | archiver.encode(self.guidanceScale, forKey: "guidanceScale")
74 | archiver.encode(self.strength, forKey: "strength")
75 | archiver.encode(self.seed, forKey: "seed")
76 | archiver.encode(self.sampler, forKey: "sampler")
77 | archiver.encode(self.computeUnits, forKey: "computeUnits")
78 | archiver.encode(self.image.tiffRepresentation, forKey: "image")
79 | if let inputImage = self.inputImage {
80 | archiver.encode(NSImage(cgImage: inputImage, size: .zero).tiffRepresentation, forKey: "inputImage")
81 | }
82 | return archiver.encodedData
83 | }
84 |
85 |
86 | // Decode history item
87 | convenience init?(data:Data) {
88 | self.init()
89 | do {
90 | let unarchiver = try NSKeyedUnarchiver.init(forReadingFrom: data)
91 | unarchiver.requiresSecureCoding = false
92 | defer { unarchiver.finishDecoding() }
93 | //unarchiver.decode
94 | self.modelName = unarchiver.decodeObject(forKey: "modelName") as? String ?? String()
95 | self.sampler = unarchiver.decodeObject(forKey: "sampler") as? String ?? "-"
96 | self.computeUnits = unarchiver.decodeObject(forKey: "computeUnits") as? String ?? "-"
97 | self.date = unarchiver.decodeObject(forKey: "date") as? Date ?? Date()
98 | self.prompt = unarchiver.decodeObject(forKey: "prompt") as? String ?? String()
99 | self.negativePrompt = unarchiver.decodeObject(forKey: "negativePrompt") as? String ?? String()
100 | self.steps = unarchiver.decodeInteger(forKey: "steps")
101 | self.guidanceScale = unarchiver.decodeFloat(forKey: "guidanceScale")
102 | self.strength = unarchiver.decodeFloat(forKey: "strength")
103 | let storedSeed = (unarchiver.decodeObject(forKey: "seed") as? NSNumber) ?? (unarchiver.decodeInteger(forKey: "seed")) as NSNumber
104 | self.seed = UInt32(truncating: storedSeed)
105 |
106 | // original image
107 | if let imageData = unarchiver.decodeObject(forKey: "image") as? Data {
108 | if let image = NSImage(data: imageData) {
109 | self.image = image
110 | }
111 | }
112 | // input image
113 | if let imageData = unarchiver.decodeObject(forKey: "inputImage") as? Data {
114 | if let image = NSImage(data: imageData) {
115 | self.inputImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil)
116 | }
117 | }
118 | //
119 | self.originalSize = self.image.size
120 | self.upscaled = false
121 |
122 |
123 | } catch let error as NSError {
124 | NSLog("Error decoding history item" + error.localizedDescription)
125 | }
126 | }
127 |
128 | }
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 | extension SDMainWindowController {
137 |
138 | // MARK: Save History
139 |
140 | func saveHistory() {
141 | if self.settings_keepHistoryBtn.state == .on {
142 | print("Saving history...")
143 | let limit = self.settings_historyLimitStepper.integerValue
144 | let dict : [String:Any] = ["history": (self.history.suffix(limit).map { $0.encode() })]
145 | var confdata : Data? = nil
146 | do {
147 | confdata = try PropertyListSerialization.data(fromPropertyList:dict,
148 | format: .xml,
149 | options: 0)
150 |
151 |
152 | } catch let error as NSError {
153 | NSLog("Error creating history data: " + error.localizedDescription)
154 | }
155 | // save
156 | guard let confdata = confdata else { return }
157 | do {
158 | try confdata.write(to: URL(fileURLWithPath: historyPath + "/PromptToImage.history"))
159 |
160 | } catch let error as NSError {
161 | NSLog("Error!!!!!!!!!!!!!!!!!!!!" + error.localizedDescription)
162 | }
163 | }
164 | }
165 |
166 |
167 |
168 | // MARK: Load History
169 |
170 | func loadHistory() {
171 | if self.settings_keepHistoryBtn.state == .on {
172 | print("loading history...")
173 | DispatchQueue.global().async {
174 | if let historydict = NSDictionary.init(contentsOfFile: historyPath + "/PromptToImage.history") {
175 | if let items = historydict["history"] as? [Data] {
176 | print("importing \(items.count) history items")
177 | for data in items {
178 | if let newitem = HistoryItem(data: data) {
179 | DispatchQueue.main.async {
180 | self.historyArrayController.addObject(newitem)
181 | }
182 | }
183 | }
184 | }
185 | }
186 | }
187 | }
188 | }
189 |
190 |
191 |
192 |
193 | // MARK: Delete History Item
194 |
195 | @IBAction func deleteSelectedHistoryItems(_ sender: Any) {
196 | self.historyArrayController.remove(contentsOf: self.historyArrayController.selectedObjects)
197 | self.imageview.isHidden = true
198 | self.imageControlsView.isHidden = true
199 | }
200 |
201 | }
202 |
--------------------------------------------------------------------------------
/PromptToImage/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ITSAppUsesNonExemptEncryption
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/PromptToImage/Load SD Models.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Load SD Model.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 05/12/22.
6 | //
7 |
8 | import Foundation
9 | import CoreML
10 | import AppKit
11 |
12 |
13 | // MARK: Create Pipeline
14 |
15 | func createStableDiffusionPipeline(computeUnits:MLComputeUnits, url:URL) {
16 | DispatchQueue.main.async {
17 | // show wait window
18 | (wins["main"] as! SDMainWindowController).waitProgr.startAnimation(nil)
19 | (wins["main"] as! SDMainWindowController).waitLabel.stringValue = "Loading Model"
20 | (wins["main"] as! SDMainWindowController).waitInfoLabel.stringValue = url == builtInModelResourcesURL ? "Built-in model" : url.lastPathComponent
21 | (wins["main"] as! SDMainWindowController).clearCUImages()
22 | (wins["main"] as! SDMainWindowController).modelsPopup.isHidden = true
23 | var custring = String()
24 | switch computeUnits {
25 | case .cpuAndNeuralEngine: custring = "CPU and Neural Engine"
26 | case .cpuAndGPU: custring = "CPU and GPU"
27 | case .cpuOnly: custring = "CPU only"
28 | default: custring = "All Compute Units"
29 | }
30 | (wins["main"] as! SDMainWindowController).waitCULabel.stringValue = custring
31 | (wins["main"] as! SDMainWindowController).window?.beginSheet((wins["main"] as! SDMainWindowController).waitWin)
32 | }
33 |
34 |
35 | // clear pipeline
36 | sdPipeline?.unloadResources()
37 | sdPipeline = nil
38 |
39 | // create Stable Diffusion pipeline from CoreML resources
40 | print("creating Stable Diffusion pipeline...")
41 | print("Model: \(url.lastPathComponent)")
42 | print("Model dir path: \(url.path(percentEncoded: false))")
43 |
44 | do {
45 | let config = MLModelConfiguration()
46 | config.computeUnits = computeUnits
47 | sdPipeline = try StableDiffusionPipeline(resourcesAt: url,
48 | configuration:config)
49 | try sdPipeline?.loadResources()
50 | DispatchQueue.main.async {
51 | (wins["main"] as! SDMainWindowController).modelsPopup.isHidden = false
52 | (wins["main"] as! SDMainWindowController).populateModelsPopup()
53 | }
54 | } catch {
55 | print("Unable to create Stable Diffusion pipeline")
56 | sdPipeline = nil
57 | DispatchQueue.main.async {
58 | (wins["main"] as! SDMainWindowController).modelsPopup.isHidden = true
59 | }
60 | }
61 |
62 |
63 |
64 | // close wait window
65 | DispatchQueue.main.async {
66 | (wins["main"] as! SDMainWindowController).window?.endSheet((wins["main"] as! SDMainWindowController).waitWin)
67 | (wins["main"] as! SDMainWindowController).enableImg2Img()
68 | (wins["main"] as! SDMainWindowController).setCUImages()
69 | }
70 |
71 | }
72 |
73 |
74 |
75 |
76 | // MARK: Reload Model
77 |
78 | func loadSDModel() {
79 | DispatchQueue.global().async {
80 | createStableDiffusionPipeline(computeUnits: currentComputeUnits, url:currentModelResourcesURL)
81 | if sdPipeline == nil {
82 | // error
83 | print("error creating pipeline")
84 | DispatchQueue.main.async {
85 | displayErrorAlert(txt: "Unable to create Stable Diffusion pipeline using model at url \(currentModelResourcesURL)\n\nClick the button below to dismiss this alert and restore default model")
86 | // restore default model and compute units
87 | createStableDiffusionPipeline(computeUnits: defaultComputeUnits,
88 | url: builtInModelResourcesURL)
89 | currentModelResourcesURL = builtInModelResourcesURL
90 | // set user defaults
91 | UserDefaults.standard.set(currentModelResourcesURL, forKey: "modelResourcesURL")
92 | }
93 | } else {
94 | // save to user defaults
95 | UserDefaults.standard.set(currentModelResourcesURL, forKey: "modelResourcesURL")
96 | }
97 | }
98 | }
99 |
100 |
--------------------------------------------------------------------------------
/PromptToImage/Load Upscale Model.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Load Upscale Models.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 |
10 |
11 | func loadUpscalerModel() {
12 | Upscaler.shared.setupUpscaleModelFromPath(path: defaultUpscaleModelPath!, computeUnits: defaultUpscalerComputeUnits)
13 | }
14 |
15 | func loadUpscalerModel(from path:String) {
16 | Upscaler.shared.setupUpscaleModelFromPath(path: path, computeUnits: defaultUpscalerComputeUnits)
17 | }
18 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Controls Actions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Controls Actions.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 05/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 | import CoreML
11 | import AVFoundation
12 |
13 |
14 | extension SDMainWindowController {
15 |
16 |
17 | // MARK: Click Generate Image
18 |
19 | @IBAction func clickGenerateImage(_ sender: NSButton) {
20 | //guard self.promptView.stringValue != "" else { return }
21 | guard self.promptTextView.string != "" else { return }
22 | self.historyArrayController.setSelectedObjects([])
23 | self.imageview.isHidden = true
24 | self.imageControlsView.isHidden = true
25 | isRunning = true
26 | let inputImage = self.inputImageview.image?.cgImage(forProposedRect: nil, context: nil, hints: nil)
27 | // seed
28 | var seed : UInt32 = UInt32(Int.random(in: 0.. 0) {
30 | seed = UInt32(self.seedView.integerValue)
31 | }
32 | self.seedView.stringValue = String(seed)
33 |
34 | // generate image
35 | self.generateImage(prompt: self.promptTextView.string, //self.promptView.stringValue,
36 | negativePrompt: self.negativePromptTextView.string, //self.negativePromptView.stringValue,
37 | startingImage: inputImage,
38 | strength: inputImage != nil ? self.strengthLabel.floatValue : Float(1),
39 | imageCount: self.imageCountSlider.integerValue, //self.imageCountStepper.integerValue,
40 | stepCount: self.stepsSlider.integerValue,
41 | seed: seed,
42 | guidanceScale: self.guidanceLabel.floatValue,
43 | scheduler: schedulerPopup.indexOfSelectedItem == 0 ? .pndmScheduler : .dpmSolverMultistepScheduler,
44 | upscale: self.upscaleCheckBox.state == .on)
45 | }
46 |
47 |
48 | // MARK: Stop generate image
49 |
50 | @IBAction func clickStop(_ sender: Any) {
51 | isRunning = false
52 | self.window?.endSheet(self.progrWin)
53 | }
54 |
55 |
56 |
57 |
58 |
59 |
60 | // move selected image to input image from collection view item
61 | @IBAction func clickCopyImageToInputImage(_ sender: NSButton) {
62 | if let pipeline = sdPipeline {
63 | if pipeline.canUseInputImage {
64 | guard let view = sender.superview?.superview else {return}
65 | let row = self.historyTableView.row(for: view)
66 | print("ROW:\(row)")
67 | self.inputImageview.image = self.history[row].image
68 | return
69 | }
70 | }
71 | displayErrorAlert(txt: "Image to Image is not available with current model: VAEEncoder.mlmodelc not found")
72 |
73 | }
74 |
75 |
76 |
77 |
78 |
79 | // MARK: Set Guidance Scale
80 |
81 | @IBAction func setGSlider(_ sender: NSSlider) {
82 | self.guidanceLabel.stringValue = "\(Double(sender.integerValue) / 100)"
83 | }
84 |
85 |
86 |
87 |
88 |
89 | // MARK: IMG2IMG Input Image Controls
90 |
91 | @IBAction func setStrength(_ sender: NSSlider) {
92 | self.strengthLabel.stringValue = "\(Double(sender.integerValue) / 100)"
93 | }
94 |
95 | @IBAction func clearInputImage(_ sender: NSButton) {
96 | self.inputImageview.image = nil
97 | }
98 |
99 |
100 | // IMPORT INPUT IMAGE FROM OPEN PANEL
101 | @IBAction func importInputImage(_ sender: NSButton) {
102 | let myFiledialog:NSOpenPanel = NSOpenPanel()
103 | myFiledialog.allowsMultipleSelection = false
104 | myFiledialog.canChooseDirectories = true
105 | myFiledialog.message = "Import Image"
106 | myFiledialog.runModal()
107 |
108 | if let url = myFiledialog.url {
109 | do {
110 | guard let typeID = try url.resourceValues(forKeys: [.typeIdentifierKey]).typeIdentifier else { return }
111 | guard let supertypes = UTType(typeID)?.supertypes else { return }
112 |
113 | if supertypes.contains(.image) {
114 | if let image = NSImage(contentsOf: URL(fileURLWithPath: url.path)) {
115 | self.insertNewInputImage(image: image)
116 | }
117 | return
118 | }
119 | } catch {
120 | print(error.localizedDescription)
121 | }
122 | }
123 | }
124 |
125 | // IMPORT INPUT IMAGE WITH DRAG AND DROP FROM FINDER
126 | @IBAction func dragInputImage(_ sender: NSImageView) {
127 | if let draggedImage = self.inputImageview.image {
128 | self.insertNewInputImage(image: draggedImage)
129 | }
130 | }
131 |
132 |
133 | // NORMALIZE INPUT IMAGE
134 | func insertNewInputImage(image:NSImage) {
135 | self.inputImageview.image = image.resize(w: modelWidth, h: modelHeight) //image.copy(size: NSSize(width: modelWidth,height: modelHeight))
136 | }
137 |
138 |
139 |
140 | // MARK: ImageKit View Controls
141 |
142 | @IBAction func clickControlsSegmentedCtrl(_ sender: NSSegmentedControl) {
143 | switch sender.indexOfSelectedItem {
144 | case 0:
145 | self.imageview.zoomIn(self)
146 | self.zoomToFit = false
147 | self.viewZoomFactor = self.imageview.zoomFactor
148 | case 1:
149 | self.imageview.zoomOut(self)
150 | self.zoomToFit = false
151 | self.viewZoomFactor = self.imageview.zoomFactor
152 | case 2:
153 | self.imageview.zoomImageToActualSize(self)
154 | self.zoomToFit = false
155 | self.viewZoomFactor = self.imageview.zoomFactor
156 | default:
157 | self.imageview.zoomImageToFit(self)
158 | self.zoomToFit = true
159 | }
160 | }
161 |
162 |
163 |
164 | // MARK: Upscale image from imageview
165 |
166 | @IBAction func clickUpscale(_ sender: NSPopUpButton) {
167 | guard let upscalerUrl = sender.selectedItem?.representedObject as? URL else {
168 | print("import compiled CoreML upscale model from file")
169 | self.importUpscaleModel()
170 | return
171 | }
172 |
173 | guard !self.historyArrayController.selectedObjects.isEmpty else { return }
174 | let displayedHistoryItem = self.historyArrayController.selectedObjects[0] as! HistoryItem
175 |
176 | self.waitLabel.stringValue = "Upscaling image..."
177 | self.waitInfoLabel.stringValue = "Model: \(URL(string: NSURL(fileURLWithPath: upscalerUrl.path).lastPathComponent ?? String())?.deletingPathExtension().path ?? String())"
178 | self.waitCULabel.stringValue = ""
179 | self.window?.beginSheet(self.waitWin)
180 |
181 | DispatchQueue.global().async {
182 |
183 | Upscaler.shared.setupUpscaleModelFromPath(path: upscalerUrl.path, computeUnits: defaultUpscalerComputeUnits)
184 |
185 | guard let upscaledImage = Upscaler.shared.upscaledImage(image: displayedHistoryItem.image) else { return }
186 | displayedHistoryItem.upscaledImage = upscaledImage
187 | displayedHistoryItem.upscaledSize = upscaledImage.size
188 | displayedHistoryItem.upscaled = true
189 | DispatchQueue.main.async {
190 | self.imageview.setImage(upscaledImage.cgImage(forProposedRect: nil, context: nil, hints: nil), imageProperties: [:])
191 | self.imageview.zoomImageToFit(self)
192 | self.zoomToFit = true
193 | self.originalUpscaledSwitch.isHidden = displayedHistoryItem.upscaledImage == nil
194 | self.originalUpscaledSwitch.selectSegment(withTag: displayedHistoryItem.upscaledImage == nil ? 0 : 1)
195 | self.window?.endSheet(self.waitWin)
196 | }
197 | }
198 | }
199 |
200 |
201 | // MARK: Switch Original/Upscaled
202 |
203 | @IBAction func clickOriginalUpscaledSwitch(_ sender: NSSegmentedControl) {
204 | guard !self.historyArrayController.selectsInsertedObjects.description.isEmpty else { return }
205 | let displayedHistoryitem = self.historyArrayController.selectedObjects[0] as! HistoryItem
206 | switch sender.indexOfSelectedItem {
207 | case 0: // original
208 | self.imageview.setImage(displayedHistoryitem.image.cgImage(forProposedRect: nil, context: nil, hints: nil), imageProperties: [:])
209 | // zoom
210 | if self.zoomToFit {
211 | self.imageview.zoomImageToFit(self)
212 | } else {
213 | self.imageview.zoomFactor = viewZoomFactor
214 | }
215 | default: // upscaled
216 | guard let image = displayedHistoryitem.upscaledImage else { break }
217 | self.imageview.setImage(image.cgImage(forProposedRect: nil, context: nil, hints: nil), imageProperties: [:])
218 | // zoom
219 | self.imageview.zoomImageToFit(self)
220 | self.zoomToFit = true
221 | }
222 | }
223 |
224 |
225 |
226 | // MARK: Compute Units Images
227 |
228 |
229 |
230 | func setCUImages() {
231 | self.led_cpu.image = NSImage(named:"cpuon")!
232 | self.led_cpu.isEnabled = true
233 | switch currentComputeUnits {
234 | case .cpuAndGPU:
235 | self.led_gpu.image = NSImage(named:"gpuon")!
236 | self.led_gpu.isEnabled = true
237 | self.led_ane.image = NSImage(named:"aneoff")!
238 | self.led_ane.isEnabled = false
239 | case .cpuAndNeuralEngine:
240 | self.led_gpu.image = NSImage(named:"gpuoff")!
241 | self.led_gpu.isEnabled = false
242 | self.led_ane.image = NSImage(named:"aneon")!
243 | self.led_ane.isEnabled = true
244 | default:
245 | self.led_gpu.image = NSImage(named:"gpuon")!
246 | self.led_gpu.isEnabled = true
247 | self.led_ane.image = NSImage(named:"aneon")!
248 | self.led_ane.isEnabled = true
249 | }
250 | }
251 |
252 | func clearCUImages() {
253 | self.led_cpu.image = NSImage(named:"cpuoff")!
254 | self.led_cpu.isEnabled = false
255 | self.led_ane.image = NSImage(named:"aneoff")!
256 | self.led_ane.isEnabled = false
257 | self.led_gpu.image = NSImage(named:"gpuoff")!
258 | self.led_gpu.isEnabled = false
259 | }
260 |
261 |
262 | }
263 |
264 |
265 |
266 | let dateFormatter: DateFormatter = {
267 | let formatter = DateFormatter()
268 | formatter.locale = Locale.autoupdatingCurrent
269 | formatter.timeZone = TimeZone.autoupdatingCurrent
270 | formatter.dateStyle = .long
271 | formatter.timeStyle = .medium
272 | formatter.doesRelativeDateFormatting = true
273 | return formatter
274 | }()
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 | @IBDesignable class FlatButton: NSButton {
288 | @IBInspectable var cornerRadius: CGFloat = 5
289 |
290 | @IBInspectable var dxPadding: CGFloat = 0
291 | @IBInspectable var dyPadding: CGFloat = 0
292 |
293 | @IBInspectable var backgroundColor: NSColor = .controlAccentColor
294 |
295 | @IBInspectable var imageName: String = "NSActionTemplate"
296 |
297 | override func draw(_ dirtyRect: NSRect) {
298 | // Set corner radius
299 | self.wantsLayer = true
300 | self.layer?.cornerRadius = cornerRadius
301 |
302 | // Darken background color when highlighted
303 | if isHighlighted {
304 | layer?.backgroundColor = backgroundColor.blended(
305 | withFraction: 0.2, of: .black
306 | )?.cgColor
307 | } else {
308 | layer?.backgroundColor = backgroundColor.cgColor
309 | }
310 |
311 | // Set Image
312 | imagePosition = .imageLeading
313 | //image = NSImage(named: imageName)
314 |
315 | // Reset the bounds after drawing is complete
316 | let originalBounds = self.bounds
317 | defer { self.bounds = originalBounds }
318 |
319 | // Inset bounds by padding
320 | self.bounds = originalBounds.insetBy(
321 | dx: dxPadding, dy: dyPadding
322 | )
323 |
324 | // Super
325 | super.draw(dirtyRect)
326 | }
327 | }
328 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Controls Values.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Controls Values.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 | extension SDMainWindowController {
12 |
13 |
14 |
15 | func storeControlsValues() {
16 | #if DEBUG
17 | //UserDefaults.standard.setValue(self.promptView.stringValue, forKey: "prompt")
18 | UserDefaults.standard.setValue(self.promptTextView.string, forKey: "prompt")
19 | //UserDefaults.standard.setValue(self.negativePromptView.stringValue, forKey: "negative")
20 | UserDefaults.standard.setValue(self.negativePromptTextView.string, forKey: "negative")
21 | #endif
22 | UserDefaults.standard.setValue(self.stepsSlider.doubleValue, forKey: "steps")
23 | UserDefaults.standard.setValue(self.upscaleCheckBox.state == .on, forKey: "upscale")
24 | UserDefaults.standard.setValue(self.guidanceLabel.floatValue, forKey: "guidance")
25 | UserDefaults.standard.setValue(self.settings_selectDefaultCU.state == .on, forKey: "alwaysSetDefaultCUwhenSwitchingModel")
26 | UserDefaults.standard.setValue(self.settings_keepHistoryBtn.state == .on, forKey: "keepHistory")
27 | UserDefaults.standard.setValue(self.settings_historyLimitStepper.doubleValue, forKey: "historyLimit")
28 | UserDefaults.standard.setValue(self.schedulerPopup.indexOfSelectedItem, forKey: "schedulerPopupItem")
29 | UserDefaults.standard.setValue(Float(self.viewZoomFactor), forKey: "viewZoomFactor")
30 | UserDefaults.standard.setValue(self.zoomToFit, forKey: "zoomToFit")
31 | }
32 |
33 |
34 |
35 |
36 |
37 | func readStoredControlsValues() {
38 | #if DEBUG
39 | //self.promptView.stringValue = UserDefaults.standard.value(forKey: "prompt") as? String ?? String()
40 | //self.promptTextView.string = UserDefaults.standard.value(forKey: "prompt") as? String ?? String()
41 | //self.negativePromptView.stringValue = UserDefaults.standard.value(forKey: "negative") as? String ?? String()
42 | //self.negativePromptTextView.string = UserDefaults.standard.value(forKey: "negative") as? String ?? String()
43 | #endif
44 | self.stepsSlider.integerValue = Int(UserDefaults.standard.value(forKey: "steps") as? Double ?? 25)
45 | self.stepsLabel.stringValue = String(self.stepsSlider.integerValue)
46 | self.upscaleCheckBox.state = (UserDefaults.standard.value(forKey: "upscale") as? Bool ?? true) ? .on : .off
47 | let guidance = UserDefaults.standard.value(forKey: "guidance") as? Float ?? 7.50
48 | self.guidanceSlider.doubleValue = Double(Int(guidance * 100))
49 | self.guidanceLabel.stringValue = String(guidance)
50 | self.settings_selectDefaultCU.state = (UserDefaults.standard.value(forKey: "alwaysSetDefaultCUwhenSwitchingModel") as? Bool ?? true) ? .on : .off
51 | self.settings_keepHistoryBtn.state = (UserDefaults.standard.value(forKey: "keepHistory") as? Bool ?? true) ? .on : .off
52 | self.settings_historyLimitStepper.integerValue = Int(UserDefaults.standard.value(forKey: "historyLimit") as? Double ?? 50)
53 | self.settings_historyLimitLabel.stringValue = String(self.settings_historyLimitStepper.integerValue)
54 | self.schedulerPopup.selectItem(at: UserDefaults.standard.value(forKey: "schedulerPopupItem") as? Int ?? 0)
55 | self.viewZoomFactor = UserDefaults.standard.value(forKey: "viewZoomFactor") as? CGFloat ?? 1
56 | self.zoomToFit = UserDefaults.standard.value(forKey: "zoomToFit") as? Bool ?? true
57 | }
58 |
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Display Result.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Display Result.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 | extension SDMainWindowController {
12 |
13 |
14 | // MARK: Display Results
15 |
16 | func displayResult(images:[CGImage?],
17 | upscale:Bool,
18 | prompt:String,
19 | negativePrompt:String,
20 | startingImage: CGImage?,
21 | strength: Float,
22 | stepCount:Int,
23 | seed:UInt32,
24 | guidanceScale:Float) {
25 |
26 | if images.count == 1 {
27 | self.displaySingleImage(image: images[0], upscale: upscale, prompt: prompt, negativePrompt: negativePrompt, startingImage: startingImage, strength: strength, stepCount: stepCount, seed: seed, guidanceScale: guidanceScale)
28 | } else if images.count > 1 {
29 | self.displayMultipleImages(images: images, upscale: upscale, prompt: prompt, negativePrompt: negativePrompt, startingImage: startingImage, strength: strength, stepCount: stepCount, seed: seed, guidanceScale: guidanceScale)
30 | }
31 |
32 | }
33 |
34 |
35 |
36 |
37 | // MARK: Display Multiple Images
38 |
39 | // display collection view
40 | func displayMultipleImages(images:[CGImage?],
41 | upscale:Bool,
42 | prompt:String,
43 | negativePrompt:String,
44 | startingImage: CGImage?,
45 | strength: Float,
46 | stepCount:Int,
47 | seed:UInt32,
48 | guidanceScale:Float) {
49 |
50 | DispatchQueue.main.async {
51 | self.progrLabel.stringValue = "Upscaling images..."
52 | self.speedLabel.stringValue = "Model: \(currentUpscalerName)"
53 | self.indindicator.isHidden = false
54 | self.indicator.isHidden = true
55 | let historyCount = self.history.count
56 | let sampler = self.schedulerPopup.titleOfSelectedItem ?? String()
57 |
58 | DispatchQueue.global().async {
59 | var theseed = seed
60 | for cgimage in images {
61 | if cgimage != nil {
62 | let nsimage = NSImage(cgImage: cgimage!,size: .zero)
63 | if upscale {
64 | if let upscaledImage = Upscaler.shared.upscaledImage(image: nsimage) {
65 |
66 | // add history item
67 | let item = HistoryItem(modelName: currentModelName(),
68 | prompt: prompt,
69 | negativePrompt: negativePrompt,
70 | steps: stepCount,
71 | guidanceScale: guidanceScale,
72 | inputImage: startingImage,
73 | strength: strength,
74 | image: nsimage,
75 | upscaledImage: upscaledImage,
76 | seed: theseed,
77 | sampler: sampler)
78 | DispatchQueue.main.async {
79 | self.historyArrayController.addObject(item)
80 | if cgimage == images.last {
81 | self.historyArrayController.setSelectionIndex(historyCount)
82 | self.historyTableView.scrollRowToVisible(historyCount + 1)
83 | }
84 |
85 | }
86 | }
87 | } else {
88 |
89 | // add history item
90 | let item = HistoryItem(modelName: currentModelName(),
91 | prompt: prompt,
92 | negativePrompt: negativePrompt,
93 | steps: stepCount,
94 | guidanceScale: guidanceScale,
95 | inputImage: startingImage,
96 | strength: strength,
97 | image: nsimage,
98 | upscaledImage: nil,
99 | seed: theseed,
100 | sampler: sampler)
101 | DispatchQueue.main.async {
102 | self.historyArrayController.addObject(item)
103 | if cgimage == images.last {
104 | self.historyArrayController.setSelectionIndex(historyCount)
105 | self.historyTableView.scrollRowToVisible(historyCount + 1)
106 | }
107 |
108 | }
109 | }
110 | }
111 | theseed = theseed + 1
112 | }
113 | }
114 |
115 | //
116 | self.window?.endSheet(self.progrWin)
117 |
118 | }
119 | }
120 |
121 |
122 |
123 |
124 | // MARK: Display Single Image
125 |
126 | // display image view
127 | func displaySingleImage(image:CGImage??,
128 | upscale:Bool,
129 | prompt:String,
130 | negativePrompt:String,
131 | startingImage: CGImage?,
132 | strength: Float,
133 | stepCount:Int,
134 | seed:UInt32,
135 | guidanceScale:Float) {
136 |
137 | // if image != nil {
138 | guard image != nil else {
139 | print("ERROR image is nil")
140 | DispatchQueue.main.async {
141 | self.window?.endSheet(self.progrWin)
142 | }
143 | return
144 | }
145 |
146 | let nsimage = NSImage(cgImage: image!!, size: .zero)
147 | DispatchQueue.main.async {
148 | isRunning = false
149 | let sampler = self.schedulerPopup.titleOfSelectedItem ?? String()
150 |
151 | if upscale {
152 | // UPSCALE OUTPUT IMAGE
153 |
154 | self.progrLabel.stringValue = "Upscaling image..."
155 | self.speedLabel.stringValue = "Model: \(currentUpscalerName)"
156 | self.indindicator.isHidden = false
157 | self.indicator.isHidden = true
158 | print("upscaling image...")
159 | DispatchQueue.global().async {
160 | if let upscaledImage = Upscaler.shared.upscaledImage(image: nsimage) {
161 |
162 | // add history item
163 | let item = HistoryItem(modelName: currentModelName(),
164 | prompt: prompt,
165 | negativePrompt: negativePrompt,
166 | steps: stepCount,
167 | guidanceScale: guidanceScale,
168 | inputImage: startingImage,
169 | strength: strength,
170 | image: nsimage,
171 | upscaledImage: upscaledImage,
172 | seed: seed,
173 | sampler: sampler)
174 |
175 | DispatchQueue.main.async {
176 | self.historyArrayController.addObject(item)
177 | self.historyArrayController.setSelectedObjects([item])
178 | self.historyTableView.scrollToEndOfDocument(nil)
179 | // close wait window
180 | self.window?.endSheet(self.progrWin)
181 | }
182 |
183 | }
184 | }
185 | } else {
186 | // add history item
187 | let item = HistoryItem(modelName: currentModelName(),
188 | prompt: prompt,
189 | negativePrompt: negativePrompt,
190 | steps: stepCount,
191 | guidanceScale: guidanceScale,
192 | inputImage: startingImage,
193 | strength: strength,
194 | image: nsimage,
195 | upscaledImage: nil,
196 | seed: seed,
197 | sampler: sampler)
198 | DispatchQueue.main.async {
199 | self.historyArrayController.addObject(item)
200 | self.historyArrayController.setSelectedObjects([item])
201 | self.historyTableView.scrollToEndOfDocument(nil)
202 | }
203 | // close wait window
204 | self.window?.endSheet(self.progrWin)
205 | }
206 |
207 |
208 | }
209 | }
210 |
211 |
212 | }
213 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Generate Image.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Generate Image.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 03/12/22.
6 | //
7 |
8 | import Foundation
9 | import Cocoa
10 | import CoreML
11 |
12 |
13 |
14 | extension SDMainWindowController {
15 |
16 |
17 |
18 |
19 | // MARK: Generate Image
20 |
21 | func generateImage(prompt:String,
22 | negativePrompt:String,
23 | startingImage: CGImage? = nil,
24 | strength: Float = 1,
25 | imageCount: Int = 1,
26 | stepCount:Int,
27 | seed:UInt32,
28 | guidanceScale:Float,
29 | scheduler:StableDiffusionScheduler,
30 | upscale:Bool) {
31 |
32 | // resize input image if needed
33 | let inputImage = self.resizeInputImage(image: startingImage)
34 |
35 | // set labels and indicators
36 | self.indicator.doubleValue = 0
37 | self.indicator.maxValue = startingImage == nil ? Double(stepCount) : Double(Float(stepCount) * strength)
38 | self.indicator.isHidden = true
39 | self.indindicator.isHidden = false
40 | self.indindicator.startAnimation(nil)
41 | self.progrLabel.stringValue = "Waiting for pipeline..."
42 | self.speedLabel.isHidden = true
43 | // show wait win
44 | self.window?.beginSheet(self.progrWin)
45 |
46 | // generate images
47 | DispatchQueue.global(qos: .userInitiated).async {
48 | do {
49 | if let pipeline = sdPipeline {
50 | // timer for performance indicator
51 | let sampleTimer = SampleTimer() // used for pipeline performance indicator
52 | sampleTimer.start()
53 |
54 | // generate images
55 | let images = try pipeline.generateImages(prompt: prompt,
56 | negativePrompt: negativePrompt,
57 | startingImage: inputImage,
58 | strength: strength,
59 | imageCount: imageCount,
60 | stepCount: stepCount,
61 | seed: seed,
62 | guidanceScale: guidanceScale,
63 | disableSafety: true,
64 | scheduler: scheduler) {
65 | progress in
66 | return self.handleProgress(progress, imageCount: imageCount, sampleTimer:sampleTimer)
67 | }
68 |
69 |
70 | // display images
71 | self.displayResult(images: images,
72 | upscale: upscale,
73 | prompt: prompt,
74 | negativePrompt: negativePrompt,
75 | startingImage: inputImage,
76 | strength: strength,
77 | stepCount: stepCount,
78 | seed: seed,
79 | guidanceScale: guidanceScale)
80 |
81 |
82 | } else {
83 | print("ERROR: cannot create pipeline")
84 | DispatchQueue.main.async { self.window?.endSheet(self.progrWin) }
85 | }
86 |
87 | } catch {
88 | print("ERROR \(error)")
89 | DispatchQueue.main.async { self.window?.endSheet(self.progrWin) }
90 | }
91 | }
92 | }
93 |
94 |
95 |
96 |
97 | // MARK: Handle Progress
98 |
99 | private func handleProgress(_ progress: StableDiffusionPipeline.Progress,
100 | imageCount:Int,
101 | sampleTimer:SampleTimer) -> Bool {
102 | DispatchQueue.main.async {
103 |
104 | //self.imageview.isHidden = false
105 | //self.imageview.setImage(progress.currentImages.last!, imageProperties: [:])
106 | // progress indicator
107 | self.indicator.doubleValue = Double(progress.step)
108 | if progress.step > 0 {
109 | self.speedLabel.isHidden = false
110 | self.progrLabel.stringValue = imageCount == 1 ? "Generating image..." : "Generating \(imageCount) images..."
111 | self.indindicator.isHidden = true
112 | self.indicator.isHidden = false
113 | }
114 | // performance indicator
115 | sampleTimer.stop()
116 | self.speedLabel.stringValue = "\(String(format: "Speed: %.2f ", (1.0 / sampleTimer.median * Double(imageCount)))) step/sec"
117 | if progress.stepCount != progress.step { sampleTimer.start() }
118 |
119 | }
120 | return isRunning
121 | }
122 |
123 |
124 |
125 | // FIXME: Resize CGImage
126 | func resizeInputImage(image:CGImage?) -> CGImage? {
127 | guard let cgimage = image else { return nil }
128 | print("original input image size: \(cgimage.width)x\(cgimage.height)")
129 | guard let nsimage = resizeImage(image: NSImage(cgImage: cgimage, size: .zero), new_width: modelWidth, new_height: modelHeight) else { return nil }
130 | let resizedCGimage = nsimage.cgImage(forProposedRect: nil, context: nil, hints: nil)
131 | print("resized input image size: \(resizedCGimage?.width ?? 0)x\(resizedCGimage?.height ?? 0)")
132 | return resizedCGimage
133 | }
134 |
135 | }
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Info Popover.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Info Popover.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 | extension SDMainWindowController {
12 |
13 |
14 |
15 | // MARK: Info Popover
16 |
17 | // display image info popover
18 | @IBAction func clickDisplayInfoPopover(_ sender: NSButton) {
19 | guard let view = sender.superview?.superview else {return}
20 | let row = self.historyTableView.row(for: view)
21 | print("ROW:\(row)")
22 | self.presentPopover(originview: sender as NSView, edge: NSRectEdge.maxX, historyItem: self.history[row])
23 |
24 | }
25 |
26 |
27 |
28 | // MARK: Draw Info Popover
29 |
30 | // create info Popover
31 | func presentPopover(originview:NSView,
32 | edge:NSRectEdge?,
33 | historyItem:HistoryItem) {
34 | self.setInfoPopover(item: historyItem)
35 | self.currentHistoryItemForInfoPopover = historyItem
36 | infoPopover = NSPopover()
37 | let popoverCtrl = NSViewController()
38 | popoverCtrl.view = self.infoPopoverView
39 | infoPopover!.contentViewController = popoverCtrl
40 | infoPopover!.behavior = NSPopover.Behavior.transient
41 | infoPopover!.animates = true
42 | infoPopover!.show(relativeTo: originview.bounds, of: originview, preferredEdge: edge ?? NSRectEdge.minY)
43 | }
44 |
45 |
46 | // draw info popover
47 | func setInfoPopover(item:HistoryItem) {
48 | self.info_date.stringValue = dateFormatter.string(from: item.date)
49 | self.info_model.stringValue = item.modelName
50 | self.info_promptTextView.string = item.prompt
51 | self.info_sampler.stringValue = item.sampler
52 | self.info_negPromptTextView.string = item.negativePrompt
53 | self.info_seed.stringValue = String(item.seed)
54 | self.info_steps.stringValue = String(item.steps)
55 | self.info_guidance.stringValue = String(item.guidanceScale)
56 | self.info_strength.stringValue = String(item.strength)
57 | self.info_cu.stringValue = item.computeUnits
58 | self.info_inputImage.image = NSImage()
59 | if let cgimage = item.inputImage {
60 | self.info_inputImage.image = NSImage(cgImage: cgimage, size: .zero)
61 | self.info_inputImageView.isHidden = false
62 | } else {
63 | self.info_inputImageView.isHidden = true
64 | }
65 | let size = item.originalSize
66 | self.info_size.stringValue = "\(String(Int(size.width)))x\(String(Int(size.height)))"
67 | self.info_upscaledView.isHidden = !item.upscaled
68 | // self.info_upscaleBtn.isHidden = item.upscaled
69 | if let upscaledImage = item.upscaledImage {
70 | self.info_upscaledsize.stringValue = "\(String(Int(upscaledImage.width)))x\(String(Int(upscaledImage.height)))"
71 | }
72 | }
73 |
74 |
75 | // MARK: Delete upscaled image from Info Popover
76 |
77 | @IBAction func info_removeUpscaledImage(_ sender: Any) {
78 | if let item = self.currentHistoryItemForInfoPopover {
79 | item.upscaledImage = nil
80 | item.upscaledSize = nil
81 | item.upscaled = false
82 | if !self.historyArrayController.selectedObjects.isEmpty {
83 | if let firstItem = self.historyArrayController.selectedObjects[0] as? HistoryItem {
84 | let image = firstItem.upscaledImage ?? firstItem.image
85 | self.imageview.setImage(image.cgImage(forProposedRect: nil, context: nil, hints: nil), imageProperties: [:])
86 |
87 | if self.zoomToFit {
88 | self.imageview.zoomImageToFit(self)
89 | } else {
90 | self.imageview.zoomFactor = viewZoomFactor
91 | }
92 | }
93 | }
94 | self.setInfoPopover(item: item)
95 | }
96 | }
97 |
98 |
99 |
100 | /* Upscale image from Info Popover
101 |
102 | @IBAction func info_upscaleImage(_ sender: Any) {
103 | if let item = self.currentHistoryItemForInfoPopover {
104 | self.info_progress.isHidden = false
105 | self.info_progress.startAnimation(nil)
106 | DispatchQueue.global().async {
107 | let upscaledImage = Upscaler.shared.upscaledImage(image: item.image)
108 | DispatchQueue.main.async {
109 | item.upscaledImage = upscaledImage
110 | item.upscaledSize = upscaledImage?.size
111 | item.upscaled = true
112 |
113 | if !self.historyArrayController.selectedObjects.isEmpty {
114 | if let firstItem = self.historyArrayController.selectedObjects[0] as? HistoryItem {
115 | let image = firstItem.upscaledImage ?? firstItem.image
116 | self.imageview.setImage(image.cgImage(forProposedRect: nil, context: nil, hints: nil), imageProperties: [:])
117 | self.imageview.zoomImageToFit(self)
118 | self.zoomToFit = true
119 | }
120 | }
121 |
122 | self.info_progress.stopAnimation(nil)
123 | self.info_progress.isHidden = true
124 | self.setInfoPopover(item: item)
125 | }
126 |
127 | }
128 | }
129 | }
130 | */
131 |
132 |
133 |
134 | // MARK: info popover actions
135 |
136 | // prompt
137 | @IBAction func infoCopyPrompt(_ sender: Any) {
138 | //self.promptView.stringValue = info_promptTextView.string
139 | self.promptTextView.string = info_promptTextView.string
140 | }
141 | // negative prompt
142 | @IBAction func infoCopyNegativePrompt(_ sender: Any) {
143 | //self.negativePromptView.stringValue = info_negPromptTextView.string
144 | self.negativePromptTextView.string = info_negPromptTextView.string
145 | }
146 | // seed
147 | @IBAction func infoCopySeed(_ sender: Any) {
148 | self.seedView.stringValue = info_seed.stringValue
149 | self.seedBtn.state = .off
150 | self.seedView.isSelectable = true
151 | self.seedView.isEditable = true
152 | }
153 | // steps
154 | @IBAction func infoCopySteps(_ sender: Any) {
155 | self.stepsSlider.integerValue = info_steps.integerValue
156 | self.stepsLabel.integerValue = info_steps.integerValue
157 | }
158 | // guidance scale
159 | @IBAction func infoCopyGuidance(_ sender: Any) {
160 | self.guidanceSlider.doubleValue = info_guidance.doubleValue * 100
161 | self.guidanceLabel.stringValue = String(info_guidance.doubleValue)
162 | }
163 | // input image strength
164 | @IBAction func infoCopyStrength(_ sender: Any) {
165 | self.strengthSlider.doubleValue = info_strength.doubleValue * 100
166 | self.strengthLabel.stringValue = String(info_strength.doubleValue)
167 | }
168 | // input image
169 | @IBAction func infoCopyInputImage(_ sender: Any) {
170 | if let pipeline = sdPipeline {
171 | if pipeline.canUseInputImage {
172 | if let image = self.info_inputImage.image {
173 | self.inputImageview.image = image
174 | return
175 | }
176 | }
177 | }
178 | displayErrorAlert(txt: "Image to Image is not available with current model: VAEEncoder.mlmodelc not found")
179 | }
180 |
181 |
182 |
183 | // MARK: Copy to clipboard
184 |
185 | @IBAction func copyPromptToClipBoard(_ sender: Any) {
186 | let pasteboard = NSPasteboard.general
187 | pasteboard.declareTypes([.string], owner: nil)
188 | pasteboard.setString(self.info_promptTextView.string, forType: .string)
189 | }
190 |
191 | @IBAction func copyNegativePromptToClipBoard(_ sender: Any) {
192 | let pasteboard = NSPasteboard.general
193 | pasteboard.declareTypes([.string], owner: nil)
194 | pasteboard.setString(self.info_negPromptTextView.string, forType: .string)
195 | }
196 | }
197 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Resize Image.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Resize Image.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 |
12 | func resizeImage(image:NSImage,
13 | new_width:Double,
14 | new_height:Double) -> NSImage? {
15 |
16 | let newSize = NSSize(width: new_width, height: new_height)
17 |
18 | if let bitmapRep = NSBitmapImageRep(
19 | bitmapDataPlanes: nil, pixelsWide: Int(newSize.width), pixelsHigh: Int(newSize.height),
20 | bitsPerSample: 8, samplesPerPixel: 4, hasAlpha: true, isPlanar: false,
21 | colorSpaceName: .calibratedRGB, bytesPerRow: 0, bitsPerPixel: 0
22 | ) {
23 | bitmapRep.size = newSize
24 | NSGraphicsContext.saveGraphicsState()
25 | NSGraphicsContext.current = NSGraphicsContext(bitmapImageRep: bitmapRep)
26 | image.draw(in: NSRect(x: 0, y: 0, width: newSize.width, height: newSize.height), from: .zero, operation: .copy, fraction: 1.0)
27 | NSGraphicsContext.restoreGraphicsState()
28 |
29 | let resizedImage = NSImage(size: newSize)
30 | resizedImage.addRepresentation(bitmapRep)
31 |
32 | return resizedImage
33 | }
34 | return nil
35 | }
36 |
37 |
38 | extension NSImage {
39 | func resize(w: Double, h: Double) -> NSImage {
40 | let destSize = NSMakeSize(CGFloat(w), CGFloat(h))
41 | let newImage = NSImage(size: destSize)
42 | newImage.lockFocus()
43 | self.draw(in: NSMakeRect(0, 0, destSize.width, destSize.height), from: NSMakeRect(0, 0, self.size.width, self.size.height), operation: NSCompositingOperation.sourceOver, fraction: CGFloat(1))
44 | newImage.unlockFocus()
45 | newImage.size = destSize
46 | return NSImage(data: newImage.tiffRepresentation!)!
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/SDMainWindowController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SDMainWindowController.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 05/12/22.
6 | //
7 |
8 | import Cocoa
9 | import Quartz
10 |
11 | func displayErrorAlert(txt:String) {
12 | DispatchQueue.main.async {
13 | let alert = NSAlert()
14 | alert.messageText = "Error"
15 | alert.informativeText = txt
16 | alert.runModal()
17 | }
18 | }
19 |
20 |
21 | class SDMainWindowController: NSWindowController,
22 | NSWindowDelegate,
23 | NSSharingServicePickerDelegate,
24 | NSSplitViewDelegate,
25 | NSMenuDelegate,
26 | NSTableViewDelegate {
27 |
28 |
29 | @IBOutlet weak var imagescrollview: NSScrollView!
30 | @IBOutlet weak var imageview: IKImageView!
31 | @IBOutlet weak var imageControlsView: NSView!
32 | @IBOutlet weak var stepsSlider: NSSlider!
33 | @IBOutlet weak var stepsLabel: NSTextField!
34 | @IBOutlet weak var indicator: NSProgressIndicator!
35 | @IBOutlet weak var indindicator: NSProgressIndicator!
36 | @IBOutlet weak var mainBtn: NSButton!
37 | @IBOutlet var promptTextView: NSTextView!
38 | @IBOutlet var negativePromptTextView: NSTextView!
39 | //@IBOutlet weak var promptView: NSTextField!
40 | //@IBOutlet weak var negativePromptView: NSTextField!
41 | @IBOutlet weak var waitWin: NSWindow!
42 | @IBOutlet weak var waitProgr: NSProgressIndicator!
43 | @IBOutlet weak var waitLabel: NSTextField!
44 | @IBOutlet weak var waitInfoLabel: NSTextField!
45 | @IBOutlet weak var waitCULabel: NSTextField!
46 | @IBOutlet weak var unitsPopup: NSPopUpButton!
47 | @IBOutlet weak var progrWin: NSWindow!
48 | @IBOutlet weak var progrLabel: NSTextField!
49 | @IBOutlet weak var upscaleCheckBox: NSButton!
50 | @IBOutlet weak var speedLabel: NSTextField!
51 | @IBOutlet weak var guidanceLabel: NSTextField!
52 | @IBOutlet weak var guidanceSlider: NSSlider!
53 | // img2img
54 | @IBOutlet weak var img2imgView: NSView!
55 | @IBOutlet weak var strengthSlider: NSSlider!
56 | @IBOutlet weak var str_clearBtn: NSButton!
57 | @IBOutlet weak var str_importBtn: NSButton!
58 | @IBOutlet weak var str_label: NSTextField!
59 | @IBOutlet weak var strengthLabel: NSTextField!
60 | @IBOutlet weak var inputImageview: NSImageView!
61 | // images count
62 | @IBOutlet weak var imageCountSlider: NSSlider!
63 | @IBOutlet weak var imageCountLabel: NSTextField!
64 | // seed
65 | @IBOutlet weak var seedView: NSTextField!
66 | @IBOutlet weak var seedBtn: NSButton!
67 | @IBAction func switchSeedBtn(_ sender: NSButton) {
68 | self.seedView.isSelectable = sender.state == .off
69 | self.seedView.isEditable = sender.state == .off
70 | }
71 | // scheduler
72 | @IBOutlet weak var schedulerPopup: NSPopUpButton!
73 | // models
74 | @IBOutlet weak var modelsPopup: NSPopUpButton!
75 | // table view menu items
76 | @IBOutlet weak var item_saveAllSelectedImages: NSMenuItem!
77 | // history
78 | @IBOutlet weak var historyTableView: NSTableView!
79 | @objc dynamic var history = [HistoryItem]()
80 | @IBOutlet var historyArrayController: NSArrayController!
81 | @IBOutlet weak var settings_keepHistoryBtn: NSButton!
82 | var currentHistoryItemForSharePicker : HistoryItem? = nil // used for sharing a single item from item's share btn
83 | // info popover
84 | var currentHistoryItemForInfoPopover : HistoryItem? = nil
85 | var infoPopover : NSPopover? = nil
86 | @IBOutlet var infoPopoverView: NSView!
87 | @IBOutlet weak var info_date: NSTextField!
88 | @IBOutlet weak var info_seed: NSTextField!
89 | @IBOutlet weak var info_steps: NSTextField!
90 | @IBOutlet weak var info_guidance: NSTextField!
91 | @IBOutlet weak var info_inputImage: NSImageView!
92 | @IBOutlet weak var info_strength: NSTextField!
93 | @IBOutlet weak var info_size: NSTextField!
94 | @IBOutlet weak var info_upscaledsize: NSTextField!
95 | @IBOutlet weak var info_model: NSTextField!
96 | @IBOutlet weak var info_sampler: NSTextField!
97 | @IBOutlet weak var info_inputImageView: NSView!
98 | @IBOutlet weak var info_btn_copyStrength: NSButton!
99 | @IBOutlet var info_promptTextView: NSTextView!
100 | @IBOutlet var info_negPromptTextView: NSTextView!
101 | @IBOutlet weak var info_cu: NSTextField!
102 |
103 | @IBOutlet weak var info_btn_copyInputImage: NSButton!
104 | @IBOutlet var info_upscaledView: NSView!
105 |
106 | // Settings
107 | @IBOutlet var settingsWindow: NSWindow!
108 | @IBOutlet weak var modelsPopupMenu: NSMenu!
109 | @IBOutlet weak var settings_selectDefaultCU: NSButton!
110 | @IBOutlet weak var settings_historyLimitLabel: NSTextField!
111 | @IBOutlet weak var settings_historyLimitStepper: NSStepper!
112 | @IBOutlet weak var settings_downloadBtn: NSButton!
113 | // Welcome window
114 | @IBOutlet var downloadWindow: NSWindow!
115 | @IBOutlet weak var downloadProgr: NSProgressIndicator!
116 | @IBOutlet weak var progressLabel: NSTextField!
117 | @IBOutlet weak var progressValueLabel: NSTextField!
118 | @IBOutlet weak var downloadButton: NSButton!
119 |
120 | @IBAction func quitWelcome(_ sender: Any) {
121 | self.window?.endSheet(self.downloadWindow)
122 | NSApplication.shared.terminate(nil)
123 | }
124 |
125 | // model info button (show model card on Huggingface)
126 | @IBOutlet weak var modelCardBtn: NSButton!
127 | @IBAction func clickModelCardBtn(_ sender: Any) {
128 | if let name = currentModelRealName { // real name taken from Unet model
129 | let url = "https://huggingface.co/\(name)"
130 | if let url = URL(string: url) { NSWorkspace.shared.open(url) }
131 | }
132 | }
133 | // IKImageView status
134 | var viewZoomFactor : CGFloat = 1
135 | var zoomToFit : Bool = true
136 | // IKImageView image popup
137 | @IBOutlet weak var upscalePopup: NSPopUpButton!
138 | // model alert accessory view
139 | @IBOutlet var modelAlertView: NSView!
140 | @IBOutlet weak var modelAlertCUPopup: NSPopUpButton!
141 | // original/upscaled switch
142 | @IBOutlet weak var originalUpscaledSwitch: NSSegmentedControl!
143 | // compute units images
144 | @IBOutlet weak var led_cpu: NSImageView!
145 | @IBOutlet weak var led_gpu: NSImageView!
146 | @IBOutlet weak var led_ane: NSImageView!
147 |
148 | // models popup accessory view
149 | @IBOutlet var modelsPopupAccView: NSView!
150 | // open huggingface repo in browser
151 | @IBAction func openModelsRepo(_ sender: Any) {
152 | let url = "https://huggingface.co/TheMurusTeam"
153 | if let url = URL(string: url) { NSWorkspace.shared.open(url) }
154 | }
155 | @IBAction func openAppleRepo(_ sender: Any) {
156 | let url = "https://github.com/apple/ml-stable-diffusion"
157 | if let url = URL(string: url) { NSWorkspace.shared.open(url) }
158 | }
159 |
160 | // export popup
161 | @IBOutlet weak var exportPopup: NSPopUpButton!
162 | @IBOutlet weak var item_exportUpscaled: NSMenuItem!
163 | @IBOutlet weak var item_exportOriginal: NSMenuItem!
164 |
165 | // help button in main window
166 | @IBAction func clickHelp(_ sender: Any) {
167 | let url = "https://github.com/TheMurusTeam/PromptToImage"
168 | if let url = URL(string: url) { NSWorkspace.shared.open(url) }
169 | }
170 |
171 |
172 |
173 |
174 | // MARK: Init
175 |
176 | convenience init(windowNibName:String, info:[String:AnyObject]?) {
177 | self.init(windowNibName: windowNibName)
178 | NSApplication.shared.activate(ignoringOtherApps: true)
179 | self.window?.makeKeyAndOrderFront(nil)
180 | //self.window?.appearance = NSAppearance(named: .darkAqua)
181 | }
182 |
183 |
184 |
185 | // MARK: Did Load
186 |
187 | override func windowDidLoad() {
188 | super.windowDidLoad()
189 | self.setUnitsPopup()
190 | self.populateModelsPopup()
191 | self.readStoredControlsValues()
192 | self.loadHistory()
193 | // main IKImageView
194 | self.imageview.hasVerticalScroller = true
195 | self.imageview.hasHorizontalScroller = true
196 | self.imageview.autohidesScrollers = false
197 | // set prompt textViews
198 | let attributes: [NSAttributedString.Key: Any] =
199 | [.foregroundColor: NSColor.tertiaryLabelColor, .font: NSFont.systemFont(ofSize: 13)]
200 | self.promptTextView.setValue(NSAttributedString(string: "Prompt", attributes: attributes),
201 | forKey: "placeholderAttributedString")
202 | self.promptTextView.font = NSFont.systemFont(ofSize: 13)
203 | self.negativePromptTextView.setValue(NSAttributedString(string: "Negative prompt", attributes: attributes),
204 | forKey: "placeholderAttributedString")
205 | self.negativePromptTextView.font = NSFont.systemFont(ofSize: 13)
206 |
207 | // info popover prompt textviews
208 | self.info_promptTextView.font = NSFont.systemFont(ofSize: 13)
209 | self.info_negPromptTextView.font = NSFont.systemFont(ofSize: 13)
210 | }
211 |
212 |
213 |
214 |
215 | // MARK: Will Close
216 |
217 | func windowWillClose(_ notification: Notification) {
218 | saveHistory()
219 | storeControlsValues()
220 | }
221 |
222 |
223 |
224 | // MARK: Menu Delegate
225 |
226 | func menuWillOpen(_ menu: NSMenu) {
227 | if menu == self.modelsPopup.menu {
228 | // Models popup
229 | self.populateModelsPopup()
230 | } else if menu == self.unitsPopup.menu {
231 | // Compute Units popup
232 | self.setUnitsPopup()
233 | } else if menu == self.upscalePopup.menu {
234 | // Upscale popup
235 | self.populateUpscalePopup()
236 | } else if menu == self.historyTableView.menu {
237 | // history tableview contextual menu
238 | self.item_saveAllSelectedImages.isEnabled = !self.historyArrayController.selectedObjects.isEmpty
239 | } else if menu == self.exportPopup.menu {
240 | // export menu
241 | guard !self.historyArrayController.selectedObjects.isEmpty else { return }
242 | guard let displayedHistoryItem = self.historyArrayController.selectedObjects[0] as? HistoryItem else { return }
243 | self.item_exportUpscaled.isEnabled = displayedHistoryItem.upscaledImage != nil
244 | }
245 | }
246 |
247 | //
248 |
249 |
250 |
251 |
252 | // MARK: Enable/Disable IMG2IMG
253 |
254 | // display/hide img2img controls view according to pipeline parameter
255 | func enableImg2Img() {
256 | if let pipeline = sdPipeline {
257 | self.img2imgView.isHidden = !pipeline.canUseInputImage
258 | if !pipeline.canUseInputImage {
259 | self.inputImageview.image = nil
260 | }
261 | }
262 | }
263 |
264 |
265 |
266 |
267 | // MARK: TableView Selection Did Change
268 |
269 | func tableViewSelectionDidChange(_ notification: Notification) {
270 | if self.historyArrayController.selectedObjects.isEmpty {
271 | self.imageview.isHidden = true
272 | self.imageControlsView.isHidden = true
273 | } else {
274 | let displayedHistoryItem = self.historyArrayController.selectedObjects[0] as! HistoryItem
275 | let image = displayedHistoryItem.upscaledImage ?? displayedHistoryItem.image
276 | self.originalUpscaledSwitch.isHidden = displayedHistoryItem.upscaledImage == nil
277 | self.originalUpscaledSwitch.selectSegment(withTag: displayedHistoryItem.upscaledImage == nil ? 0 : 1)
278 | self.imageview.isHidden = false
279 | self.imageControlsView.isHidden = false
280 | var viewRect = self.imageview.visibleRect as CGRect
281 | self.imageview.setImage(image.cgImage(forProposedRect: &viewRect, context: nil, hints: nil), imageProperties: [:])
282 | // zoom
283 | if self.zoomToFit {
284 | self.imageview.zoomImageToFit(self)
285 | } else {
286 | self.imageview.zoomFactor = viewZoomFactor
287 | }
288 | }
289 | }
290 |
291 |
292 |
293 |
294 | // MARK: Window Did Resize
295 |
296 | func windowDidResize(_ notification: Notification) {
297 | if self.zoomToFit { self.imageview.zoomImageToFit(self) }
298 | }
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 | }
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Settings.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Settings.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 25/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 |
11 | extension SDMainWindowController {
12 |
13 | // MARK: show settings window
14 |
15 | @IBAction func displaySettings(_ sender: Any) {
16 | self.window?.beginSheet(self.settingsWindow)
17 | }
18 |
19 |
20 |
21 | // MARK: close settings window
22 |
23 | @IBAction func closeSettingsWindow(_ sender: Any) {
24 | self.window?.endSheet(self.settingsWindow)
25 | }
26 |
27 |
28 |
29 | // MARK: Reveal SD models dir in Finder
30 |
31 | @IBAction func clickRevealModelsInFinder(_ sender: Any) {
32 | revealCustomModelsDirInFinder()
33 | }
34 |
35 | // MARK: Reveal upscale models dir in Finder
36 |
37 | @IBAction func clickRevealUpscaleModelsInFinder(_ sender: Any) {
38 | NSWorkspace.shared.activateFileViewerSelecting([URL(fileURLWithPath: customUpscalersDirectoryPath).absoluteURL])
39 | }
40 |
41 |
42 | // MARK: Compute Units Popup
43 |
44 | func setUnitsPopup() {
45 | self.unitsPopup.itemArray.forEach { $0.state = .off }
46 | var idx = Int()
47 | switch currentComputeUnits {
48 | case .cpuAndNeuralEngine: idx = 1
49 | case .cpuAndGPU: idx = 2
50 | default: idx = 3 // all
51 | }
52 | self.unitsPopup.selectItem(at: idx)
53 | self.unitsPopup.item(at: idx)?.state = .on
54 | }
55 |
56 | @IBAction func switchUnitsPopup(_ sender: NSPopUpButton) {
57 | switch sender.indexOfSelectedItem {
58 | case 2: currentComputeUnits = .cpuAndGPU; loadSDModel()
59 | case 3: currentComputeUnits = .all; loadSDModel()
60 | case 1: currentComputeUnits = .cpuAndNeuralEngine; loadSDModel()
61 | default:break
62 | }
63 |
64 | }
65 |
66 |
67 |
68 | // MARK: Switch Models Popup
69 |
70 | @IBAction func switchModelsPopup(_ sender: NSPopUpButton) {
71 | guard let modelName = sender.titleOfSelectedItem else {return}
72 | // evaluate item
73 | if let modelUrl = sender.selectedItem?.representedObject as? URL {
74 | // item has an URL, load model
75 | self.loadModelFromURL(modelName: modelName, modelUrl: modelUrl)
76 | } else {
77 | // item has no URL, import model
78 | guard let repObj = sender.selectedItem?.representedObject as? String else { return }
79 | if repObj == "import" {
80 | // import model from dir
81 | self.importModel()
82 | } /*else if repObj == "repo" {
83 | // open huggingface repo in browser
84 | let url = "https://huggingface.co/TheMurusTeam"
85 | if let url = URL(string: url) { NSWorkspace.shared.open(url) }
86 | }*/
87 | // restore selected item
88 | self.setModelsPopup()
89 | }
90 | }
91 |
92 |
93 |
94 | // MARK: Load model from URL
95 |
96 | // called when switching models popup or when loading an imported model
97 | func loadModelFromURL(modelName:String, modelUrl:URL) {
98 | let alert = NSAlert()
99 | alert.messageText = "Load model?"
100 | alert.informativeText = "Model \(modelName) will be loaded using the specified compute units."
101 | alert.addButton(withTitle: "Cancel")
102 | alert.addButton(withTitle: "Load Model")
103 | alert.accessoryView = self.modelAlertView
104 | // set alert units popup
105 | if self.settings_selectDefaultCU.state == .on {
106 | currentComputeUnits = defaultComputeUnits
107 | self.modelAlertCUPopup.selectItem(at: 1)
108 | } else {
109 | switch currentComputeUnits {
110 | case .cpuAndNeuralEngine: self.modelAlertCUPopup.selectItem(at: 0)
111 | case .cpuAndGPU: self.modelAlertCUPopup.selectItem(at: 1)
112 | default: self.modelAlertCUPopup.selectItem(at: 2) // all
113 | }
114 | }
115 | // show alert
116 | guard alert.runModal() != NSApplication.ModalResponse.alertFirstButtonReturn else {
117 | // Cancel
118 | self.setModelsPopup()
119 | return
120 | }
121 | // Load model
122 | print("loading model \(modelName)")
123 | // set compute units
124 | switch self.modelAlertCUPopup.indexOfSelectedItem {
125 | case 0: currentComputeUnits = .cpuAndNeuralEngine
126 | case 1: currentComputeUnits = .cpuAndGPU
127 | default: currentComputeUnits = .all
128 | }
129 |
130 | currentModelResourcesURL = modelUrl
131 | print("setting currentModelResourcesURL to \(currentModelResourcesURL)")
132 | // load sd model
133 | loadSDModel()
134 | }
135 |
136 |
137 | // MARK: Populate Upscale Popup
138 |
139 | func populateUpscalePopup() {
140 | guard let menu = self.upscalePopup.menu else {return}
141 | guard let firstitem = menu.item(at: 0) else {return}
142 | menu.removeAllItems()
143 | menu.addItem(firstitem)
144 | // title
145 | let title = NSMenuItem()
146 | title.title = "Select Upscale Model"
147 | menu.addItem(title)
148 | title.isEnabled = false
149 |
150 | // built-in model
151 | let item = NSMenuItem()
152 | item.title = "realesrgan512 (Default)"
153 | item.representedObject = URL(fileURLWithPath: defaultUpscaleModelPath!).absoluteURL
154 | menu.addItem(item)
155 |
156 | // custom models
157 | for upscaler in installedCustomUpscalers() {
158 | let item = NSMenuItem()
159 | item.title = URL(string: NSURL(fileURLWithPath: upscaler.path).lastPathComponent ?? String())?.deletingPathExtension().path ?? String()
160 | item.representedObject = upscaler.absoluteURL
161 | menu.addItem(item)
162 | }
163 |
164 | // add custom model
165 | menu.addItem(NSMenuItem.separator())
166 | let itemi = NSMenuItem()
167 | itemi.title = "Import CoreML upscale model..."
168 | itemi.representedObject = "import"
169 | menu.addItem(itemi)
170 |
171 | // set selected item
172 | if currentUpscalerName == "realesrgan512" {
173 | menu.item(at: 2)!.state = .on
174 | } else {
175 | for item in menu.items {
176 | if item.title == currentUpscalerName {
177 | item.state = .on
178 | }
179 | }
180 | }
181 | }
182 |
183 |
184 |
185 | // MARK: Populate Models Popup
186 |
187 | func populateModelsPopup() {
188 | // create menu items
189 | if let menu = self.modelsPopup.menu {
190 | menu.removeAllItems()
191 |
192 | // built-in model
193 | if builtInModelExists() {
194 | let item = NSMenuItem()
195 | item.title = "Stable Diffusion 2.1 SPLIT EINSUM (Default)"
196 | item.representedObject = builtInModelResourcesURL
197 | menu.addItem(item)
198 | menu.addItem(NSMenuItem.separator())
199 | }
200 |
201 | // default and custom models
202 | let urls = installedCustomModels()
203 | for modelurl in urls {
204 | if modelurl.isFolder {
205 | let item = NSMenuItem()
206 | item.title = modelurl.lastPathComponent
207 | item.representedObject = modelurl
208 | menu.addItem(item)
209 | }
210 | }
211 |
212 |
213 | menu.addItem(NSMenuItem.separator())
214 | let item = NSMenuItem()
215 | item.title = "Import CoreML Stable Diffusion model..."
216 | item.representedObject = "import"
217 | menu.addItem(item)
218 |
219 | // item view
220 | menu.addItem(NSMenuItem.separator())
221 | let item3 = NSMenuItem()
222 | item3.view = self.modelsPopupAccView
223 | menu.addItem(item3)
224 |
225 | // set selected item
226 | self.setModelsPopup()
227 | }
228 | }
229 |
230 |
231 |
232 |
233 |
234 | // MARK: Set Models Popup
235 |
236 | func setModelsPopup() {
237 | // set selected item
238 | if let menu = self.modelsPopup.menu {
239 | for mitem in menu.items {
240 | mitem.state = .off
241 | if let url = mitem.representedObject as? URL {
242 | if url == currentModelResourcesURL {
243 | self.modelsPopup.select(mitem)
244 | mitem.state = .on
245 | //print("current model: \(mitem.title)")
246 | }
247 | }
248 | }
249 | }
250 | }
251 |
252 |
253 | }
254 |
--------------------------------------------------------------------------------
/PromptToImage/Main Window Controller/Share.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Share.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 05/12/22.
6 | //
7 |
8 | import Foundation
9 | import AppKit
10 | import AVFoundation
11 |
12 | extension SDMainWindowController {
13 |
14 | // MARK: Click Share in History Item
15 |
16 | // save from table view item Share btn
17 | @IBAction func clickShareInHistoryItem(_ sender: NSButton) {
18 | guard let view = sender.superview?.superview else {return}
19 | let row = self.historyTableView.row(for: view)
20 | print("ROW:\(row)")
21 | // save
22 | let items : [NSImage] = [(self.history[row].upscaledImage ?? self.history[row].image)]
23 | self.currentHistoryItemForSharePicker = self.history[row]
24 | let sharingPicker = NSSharingServicePicker(items: items)
25 | sharingPicker.delegate = self
26 | sharingPicker.show(relativeTo: NSZeroRect,
27 | of: sender,
28 | preferredEdge: .minY)
29 |
30 | }
31 |
32 |
33 |
34 | // MARK: Sharing Picker
35 |
36 | // draw share menu
37 | func sharingServicePicker(_ sharingServicePicker: NSSharingServicePicker, sharingServicesForItems items: [Any], proposedSharingServices proposedServices: [NSSharingService]) -> [NSSharingService] {
38 |
39 | guard let historyItem = self.currentHistoryItemForSharePicker else { return [] }
40 | let btnimage = NSImage(systemSymbolName: "display.and.arrow.down", accessibilityDescription: nil) // item icon
41 | var share = proposedServices
42 |
43 | if let currentImages = items as? [NSImage] {
44 | let customService = NSSharingService(title: "Save As...", image: btnimage ?? NSImage(), alternateImage: btnimage, handler: {
45 | if currentImages.count == 1 {
46 | // write single image to file
47 | self.displaySavePanel(item:historyItem)
48 | }/* else if currentImages.count > 1 {
49 | // write multiple images to folder
50 | self.displaySavePanel(images: currentImages)
51 | } */
52 | })
53 | share.insert(customService, at: 0)
54 | }
55 | return share
56 |
57 |
58 | }
59 |
60 |
61 |
62 |
63 | // MARK: Save Panel for EXIF single image
64 |
65 | // save panel for single image with metadata
66 | func displaySavePanel(item:HistoryItem) {
67 | print("displaying save panel for single image with metadata")
68 | let image = item.upscaledImage ?? item.image
69 | guard let img = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
70 | print("invalid image")
71 | return
72 | }
73 | let panel = NSSavePanel()
74 | panel.nameFieldLabel = "Image file name:"
75 | panel.allowedContentTypes = [.png]
76 | // suggested file name
77 | panel.nameFieldStringValue = "\(String(item.prompt.prefix(50))).\(item.seed).png"
78 | // panel strings
79 | panel.title = "Save image"
80 | panel.prompt = "Save Image"
81 |
82 | panel.beginSheetModal(for: self.window!, completionHandler: { response in
83 | if response == NSApplication.ModalResponse.OK {
84 | guard let url = panel.url else { return }
85 | guard let data = CFDataCreateMutable(nil, 0) else { return }
86 | guard let destination = CGImageDestinationCreateWithData(data, UTType.png.identifier as CFString, 1, nil) else { return }
87 | let iptc = [
88 | kCGImagePropertyIPTCOriginatingProgram: "PromptToImage for macOS",
89 | kCGImagePropertyIPTCCaptionAbstract: self.metadata(item:item),
90 | kCGImagePropertyIPTCProgramVersion: "1.0"]
91 | let meta = [kCGImagePropertyIPTCDictionary: iptc]
92 | CGImageDestinationAddImage(destination, img, meta as CFDictionary)
93 | guard CGImageDestinationFinalize(destination) else { return }
94 | // save
95 | do {
96 | try (data as Data).write(to: url)
97 | } catch {
98 | print("error saving file: \(error)")
99 | }
100 | } else {
101 | print("cancel")
102 | }
103 | })
104 | }
105 |
106 |
107 | // MARK: Build metadata for IPTC
108 |
109 | private func metadata(item:HistoryItem) -> String {
110 | return "Prompt:\n\(item.prompt)\n\nNegative Prompt:\n\(item.negativePrompt)\n\nModel:\n\(item.modelName)\n\nSeed: \(item.seed)\nSteps: \(item.steps)\nGuidance Scale: \(item.guidanceScale)\nScheduler: \(item.sampler)\nimg2img: \(item.upscaledImage != nil)\n\nMade with PromptToImage for macOS"
111 | }
112 |
113 | // MARK: Click Remove in tableview contextual menu
114 |
115 | @IBAction func removeSelectedImages(_ sender: Any) {
116 | guard let items = self.historyArrayController.selectedObjects as? [HistoryItem] else { return }
117 | if !items.isEmpty { self.historyArrayController.remove(items) }
118 | }
119 |
120 | // MARK: Click Save in tableview contextual menu
121 |
122 | @IBAction func saveSelectedImages(_ sender: Any) {
123 | guard let items = self.historyArrayController.selectedObjects as? [HistoryItem] else { return }
124 | if !items.isEmpty { self.displaySavePanel(historyItems: items)}
125 | }
126 |
127 | // MARK: Save Panel for multiple images
128 |
129 | // open panel for multiple images
130 | func displaySavePanel(historyItems:[HistoryItem]) {
131 | print("displaying open panel for saving multiple images")
132 | let panel = NSOpenPanel()
133 | panel.canCreateDirectories = true
134 | panel.canChooseDirectories = true
135 | panel.canChooseFiles = false
136 | panel.message = "Select a folder"
137 | panel.prompt = "Save Selected Images"
138 |
139 |
140 | panel.beginSheetModal(for: self.window!, completionHandler: { response in
141 | if response == NSApplication.ModalResponse.OK {
142 | //
143 | self.indindicator.isHidden = false
144 | self.indindicator.startAnimation(nil)
145 | self.progrLabel.stringValue = "Saving \(historyItems.count) images..."
146 | self.speedLabel.isHidden = true
147 | self.window?.beginSheet(self.progrWin)
148 | //
149 | guard let path = panel.url?.path(percentEncoded: false) else { return }
150 |
151 | var i = 1
152 | for item in historyItems {
153 | let fullpath = path + "/\(String(item.prompt.prefix(50)))-\(i).\(item.seed).png"
154 | let url = URL(filePath: fullpath)
155 | print("save \(i) at \(url.absoluteString)")
156 |
157 | let nsimg = item.upscaledImage ?? item.image
158 | guard let img = nsimg.cgImage(forProposedRect: nil, context: nil, hints: nil) else {return}
159 | guard let data = CFDataCreateMutable(nil, 0) else { return }
160 | guard let destination = CGImageDestinationCreateWithData(data, UTType.png.identifier as CFString, 1, nil) else { return }
161 | let iptc = [
162 | kCGImagePropertyIPTCOriginatingProgram: "PromptToImage for macOS",
163 | kCGImagePropertyIPTCCaptionAbstract: self.metadata(item:item),
164 | kCGImagePropertyIPTCProgramVersion: "1.0"]
165 | let meta = [kCGImagePropertyIPTCDictionary: iptc]
166 | CGImageDestinationAddImage(destination, img, meta as CFDictionary)
167 | guard CGImageDestinationFinalize(destination) else { return }
168 | // save
169 | do {
170 | try (data as Data).write(to: url)
171 | } catch {
172 | print("error saving file: \(error)")
173 | }
174 |
175 | i = i + 1
176 | }
177 | self.window?.endSheet(self.progrWin)
178 |
179 | } else {
180 | print("cancel")
181 |
182 | }
183 | })
184 |
185 | }
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 | // Click Main Share Button
204 |
205 | // main view share button
206 | @IBAction func clickMainShareButton(_ sender: NSButton) {
207 | guard let historyitems = self.historyArrayController.selectedObjects as? [HistoryItem] else { return }
208 | let images : [NSImage] = historyitems.map { $0.upscaledImage ?? $0.image }
209 | let sharingPicker = NSSharingServicePicker(items: images)
210 | sharingPicker.delegate = self
211 | sharingPicker.show(relativeTo: NSZeroRect,
212 | of: sender,
213 | preferredEdge: .minY)
214 |
215 | /*
216 | guard let historyitems = self.historyArrayController.selectedObjects as? [HistoryItem] else { return }
217 | //let images : [NSImage] = historyitems.map { $0.upscaledImage ?? $0.image }
218 | var images = [NSImage]()
219 | for hitem in historyitems {
220 | images.append(hitem.upscaledImage ?? hitem.image)
221 | }
222 | print("MAIN SHARE BUTTON items:\(images.count)")
223 | let sharingPicker = NSSharingServicePicker(items: images)
224 | sharingPicker.delegate = self
225 | sharingPicker.show(relativeTo: NSZeroRect,
226 | of: sender,
227 | preferredEdge: .minY)
228 | */
229 |
230 | }
231 |
232 |
233 |
234 |
235 | // MARK: Write To File
236 |
237 | func writeImageToFile(path: String,
238 | image: NSImage,
239 | format: NSBitmapImageRep.FileType) {
240 | let imageRep = NSBitmapImageRep(data: image.tiffRepresentation!)
241 | if let imageData = imageRep?.representation(using: format, properties: [:]) {
242 | do {
243 | try imageData.write(to: URL(fileURLWithPath: (path.hasSuffix(".png") ? path : "\(path).png")))
244 | }catch{ print(error) }
245 | }
246 | }
247 |
248 |
249 |
250 |
251 |
252 | // MARK: - Image Popup Actions
253 |
254 | @IBAction func exportOriginalImage(_ sender: Any) {
255 | self.exportDisplayedImage(upscaled: false)
256 | }
257 | @IBAction func exportUpscaledImage(_ sender: Any) {
258 | self.exportDisplayedImage(upscaled: true)
259 | }
260 |
261 | func exportDisplayedImage(upscaled:Bool) {
262 | guard !self.historyArrayController.selectedObjects.isEmpty else { return }
263 | guard let item = self.historyArrayController.selectedObjects[0] as? HistoryItem else { return }
264 |
265 |
266 | print("displaying save panel for single image with metadata")
267 | guard let image = upscaled ? item.upscaledImage : item.image else { return }
268 | guard let img = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
269 | displayErrorAlert(txt: "Unable to create CGImage")
270 | return
271 | }
272 |
273 | let panel = NSSavePanel()
274 | panel.nameFieldLabel = "Image file name:"
275 | panel.allowedContentTypes = [.png]
276 | // suggested file name
277 | panel.nameFieldStringValue = "\(String(item.prompt.prefix(50))).\(item.seed).png"
278 | // panel strings
279 | panel.title = "Save image"
280 | panel.prompt = "Save Image"
281 |
282 | panel.beginSheetModal(for: self.window!, completionHandler: { response in
283 | if response == NSApplication.ModalResponse.OK {
284 | guard let url = panel.url else { return }
285 | guard let data = CFDataCreateMutable(nil, 0) else { return }
286 | guard let destination = CGImageDestinationCreateWithData(data, UTType.png.identifier as CFString, 1, nil) else { return }
287 | let iptc = [
288 | kCGImagePropertyIPTCOriginatingProgram: "PromptToImage for macOS",
289 | kCGImagePropertyIPTCCaptionAbstract: self.metadata(item:item),
290 | kCGImagePropertyIPTCProgramVersion: "1.0"]
291 | let meta = [kCGImagePropertyIPTCDictionary: iptc]
292 | CGImageDestinationAddImage(destination, img, meta as CFDictionary)
293 | guard CGImageDestinationFinalize(destination) else { return }
294 |
295 | // write to file
296 | do {
297 | try (data as Data).write(to: url)
298 | } catch {
299 | print("error saving file: \(error)")
300 | }
301 | } else {
302 | print("cancel")
303 | }
304 | })
305 | }
306 | }
307 |
308 |
309 |
--------------------------------------------------------------------------------
/PromptToImage/Models.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Models.swift
3 | // PromptToImage
4 | //
5 | // Created by hany on 22/12/22.
6 | //
7 |
8 | import Foundation
9 | import Cocoa
10 | import UniformTypeIdentifiers
11 | import CoreML
12 |
13 |
14 | func builtInModelExists() -> Bool {
15 | return FileManager.default.fileExists(atPath: builtInModelResourcesURL.path + "/merges.txt") &&
16 | FileManager.default.fileExists(atPath: builtInModelResourcesURL.path + "/vocab.json") &&
17 | FileManager.default.fileExists(atPath: builtInModelResourcesURL.path + "/TextEncoder.mlmodelc") &&
18 | FileManager.default.fileExists(atPath: builtInModelResourcesURL.path + "/Unet.mlmodelc") &&
19 | FileManager.default.fileExists(atPath: builtInModelResourcesURL.path + "/VAEDecoder.mlmodelc")
20 | }
21 |
22 |
23 | func currentModelName() -> String {
24 | // model name
25 | var modelName = defaultModelName
26 | if currentModelResourcesURL.absoluteURL != builtInModelResourcesURL.absoluteURL {
27 | modelName = currentModelResourcesURL.lastPathComponent
28 | }
29 | return modelName
30 | }
31 |
32 |
33 | // MARK: Create dirs
34 | func createModelsDir() {
35 | if !FileManager.default.fileExists(atPath: customModelsDirectoryPath) {
36 | do {
37 | try FileManager.default.createDirectory(atPath: customModelsDirectoryPath, withIntermediateDirectories: true)
38 | } catch { print("error creating custom stable diffusion models directory at \(customModelsDirectoryPath)")}
39 | }
40 | }
41 | func createUpscalersDir() {
42 | if !FileManager.default.fileExists(atPath: customUpscalersDirectoryPath) {
43 | do {
44 | try FileManager.default.createDirectory(atPath: customUpscalersDirectoryPath, withIntermediateDirectories: true)
45 | } catch { print("error creating custom upscale models directory at \(customUpscalersDirectoryPath)")}
46 | }
47 | }
48 | func createHistoryDir() {
49 | if !FileManager.default.fileExists(atPath: historyPath) {
50 | do {
51 | try FileManager.default.createDirectory(atPath: historyPath, withIntermediateDirectories: true)
52 | } catch { print("error creating history directory at \(historyPath)")}
53 | }
54 | }
55 |
56 |
57 |
58 | func revealCustomModelsDirInFinder() {
59 | NSWorkspace.shared.activateFileViewerSelecting([URL(fileURLWithPath: customModelsDirectoryPath).absoluteURL])
60 | }
61 |
62 |
63 | // MARK: Get Custom SD Models List
64 |
65 | func installedCustomModels() -> [URL] {
66 | var urls = [URL]()
67 | do {
68 | let directoryContents = try FileManager.default.contentsOfDirectory(at: URL(fileURLWithPath: customModelsDirectoryPath),includingPropertiesForKeys: nil)
69 | urls = directoryContents.filter({ $0.isFolder && $0.isModelURL })
70 | } catch {}
71 | return urls
72 | }
73 |
74 |
75 |
76 | // check if model directory contains all needed files
77 | extension URL {
78 | var isModelURL: Bool {
79 | FileManager.default.fileExists(atPath: self.path + "/merges.txt") &&
80 | FileManager.default.fileExists(atPath: self.path + "/vocab.json") &&
81 | FileManager.default.fileExists(atPath: self.path + "/TextEncoder.mlmodelc") &&
82 | FileManager.default.fileExists(atPath: self.path + "/Unet.mlmodelc") &&
83 | FileManager.default.fileExists(atPath: self.path + "/VAEDecoder.mlmodelc")
84 | }
85 |
86 | }
87 |
88 |
89 | // MARK: Get custom upscale models list
90 |
91 | func installedCustomUpscalers() -> [URL] {
92 | var urls = [URL]()
93 | do {
94 | let directoryContents = try FileManager.default.contentsOfDirectory(at: URL(fileURLWithPath: customUpscalersDirectoryPath),includingPropertiesForKeys: nil)
95 | urls = directoryContents.filter({ $0.isCompiledCoreMLModel })
96 | } catch {}
97 | return urls
98 | }
99 |
100 |
101 |
102 |
103 |
104 | extension SDMainWindowController {
105 |
106 |
107 |
108 | // MARK: Import custom SD model
109 |
110 | func importModel() {
111 | let panel = NSOpenPanel()
112 | panel.canChooseFiles = false
113 | panel.canChooseDirectories = true
114 | panel.allowsMultipleSelection = false
115 | panel.nameFieldLabel = "Model folder"
116 | panel.prompt = "Import Model"
117 | panel.message = "Select a CoreML Stable Diffusion model directory"
118 | let destinationPath = URL(fileURLWithPath: customModelsDirectoryPath).absoluteURL.path
119 |
120 | panel.beginSheetModal(for: self.window!, completionHandler: { response in
121 | guard response == NSApplication.ModalResponse.OK else { return }
122 | guard let modelUrl = panel.url else { return }
123 | let modelDirName = modelUrl.lastPathComponent
124 | guard modelUrl.isModelURL else {
125 | // invalid model url, missing files?
126 | displayErrorAlert(txt: "Invalid model\n\nA CoreML Stable Diffusion model directory must include these files at least:\n- merges.txt, vocab.json, TextEncoder.mlmodelc, Unet.mlmodelc, VAEDecoder.mlmodelc")
127 | return
128 | }
129 | print("Valid <<\(modelDirName)>> model directory at path: \(modelUrl.path)")
130 |
131 | let toPath = destinationPath + "/" + modelDirName
132 | if FileManager.default.fileExists(atPath: toPath) {
133 | print("model already exists at \(toPath)")
134 | displayErrorAlert(txt: "Model already installed")
135 | return
136 | }
137 | panel.endSheet(self.window!)
138 |
139 | DispatchQueue.global().async {
140 | DispatchQueue.main.async {
141 | self.waitLabel.stringValue = "Installing model"
142 | self.waitInfoLabel.stringValue = modelDirName
143 | self.waitCULabel.stringValue = ""
144 | self.window?.beginSheet(self.waitWin)
145 |
146 | // copy model to app's custom models dir
147 | print("copying model directory \(modelUrl.path) to PromptToImage custom models directory at \(toPath)")
148 | DispatchQueue.global().async {
149 | do {
150 | try FileManager.default.copyItem(atPath: modelUrl.path, toPath: customModelsDirectoryPath + "/" + modelDirName)
151 | } catch { self.presentError(error) }
152 |
153 | DispatchQueue.main.async {
154 | self.window?.endSheet(self.waitWin)
155 |
156 | // load model
157 | self.loadModelFromURL(modelName: modelDirName, modelUrl: URL(fileURLWithPath: toPath))
158 | }
159 | }
160 | }
161 | }
162 | })
163 | }
164 |
165 |
166 |
167 | // MARK: Import custom upscale model
168 |
169 | func importUpscaleModel() {
170 | let panel = NSOpenPanel()
171 | panel.canChooseFiles = true
172 | panel.canChooseDirectories = false
173 | panel.allowsMultipleSelection = false
174 | panel.allowedFileTypes = ["com.apple.coreml.model"]
175 | panel.nameFieldLabel = "Model folder"
176 | panel.prompt = "Import Upscale Model"
177 | panel.message = "Select a CoreML upscale model file"
178 | let destinationPath = URL(fileURLWithPath: customUpscalersDirectoryPath).absoluteURL.path
179 |
180 | panel.beginSheetModal(for: self.window!, completionHandler: { response in
181 | guard response == NSApplication.ModalResponse.OK else { return }
182 | guard let modelUrl = panel.url else { return }
183 | let modelName = modelUrl.lastPathComponent
184 | let compiledModelName = modelUrl.lastPathComponent + "c"
185 | let toPath = destinationPath + "/" + compiledModelName
186 | panel.endSheet(self.window!)
187 |
188 | DispatchQueue.global().async {
189 | DispatchQueue.main.async {
190 | self.waitLabel.stringValue = "Installing upscale model"
191 | self.waitInfoLabel.stringValue = modelName
192 | self.waitCULabel.stringValue = ""
193 | self.window?.beginSheet(self.waitWin)
194 |
195 | // compile CoreML model
196 | DispatchQueue.global().async {
197 | var temporaryModelURL : URL? = nil
198 | do {
199 | print("compiling model...")
200 | temporaryModelURL = try MLModel.compileModel(at: modelUrl)
201 | } catch {
202 | // error compiling model
203 | displayErrorAlert(txt: "Unable to compile CoreML model")
204 | return
205 | }
206 |
207 | // copy model to app's custom models dir
208 | guard let compiledModelUrl = temporaryModelURL else {
209 | displayErrorAlert(txt: "Invalid CoreML model")
210 | return
211 | }
212 |
213 | // copy file
214 | print("copying model directory \(compiledModelUrl.path) to PromptToImage custom models directory at \(toPath)")
215 |
216 | do {
217 | try FileManager.default.copyItem(atPath: compiledModelUrl.path, toPath: toPath)
218 | } catch {
219 | DispatchQueue.main.async {
220 | self.window?.endSheet(self.waitWin)
221 | self.presentError(error)
222 | }
223 | return
224 | }
225 |
226 | DispatchQueue.main.async {
227 | self.window?.endSheet(self.waitWin)
228 | let alert = NSAlert()
229 | alert.messageText = "Done"
230 | alert.informativeText = "Upscale model \(compiledModelName) installed"
231 | alert.runModal()
232 | }
233 |
234 | }
235 |
236 | }
237 | }
238 | })
239 |
240 | }
241 |
242 |
243 |
244 |
245 | }
246 |
--------------------------------------------------------------------------------
/PromptToImage/PromptToImage.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-write
8 |
9 | com.apple.security.network.client
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/PromptToImage/PromptToImageDebug.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-write
8 |
9 | com.apple.security.network.client
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion Resources/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Stable Diffusion Resources/.gitkeep
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/AlphasCumprodCalculation.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 |
6 | public struct AlphasCumprodCalculation {
7 | public var sqrtAlphasCumprod: Float
8 | public var sqrtOneMinusAlphasCumprod: Float
9 |
10 | public init(
11 | sqrtAlphasCumprod: Float,
12 | sqrtOneMinusAlphasCumprod: Float
13 | ) {
14 | self.sqrtAlphasCumprod = sqrtAlphasCumprod
15 | self.sqrtOneMinusAlphasCumprod = sqrtOneMinusAlphasCumprod
16 | }
17 |
18 | public init(
19 | alphasCumprod: [Float],
20 | timesteps: Int = 1_000,
21 | steps: Int,
22 | strength: Float
23 | ) {
24 | let tEnc = Int(strength * Float(steps))
25 | let initTimestep = min(max(0, timesteps - timesteps / steps * (steps - tEnc) + 1), timesteps - 1)
26 | self.sqrtAlphasCumprod = alphasCumprod[initTimestep].squareRoot()
27 | self.sqrtOneMinusAlphasCumprod = (1 - alphasCumprod[initTimestep]).squareRoot()
28 | }
29 | }
30 |
31 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/CGImage+vImage.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import Accelerate
6 | import CoreML
7 |
8 | @available(iOS 16.0, macOS 13.0, *)
9 | extension CGImage {
10 |
11 | typealias PixelBufferPFx1 = vImage.PixelBuffer
12 | typealias PixelBufferP8x3 = vImage.PixelBuffer
13 | typealias PixelBufferIFx3 = vImage.PixelBuffer
14 | typealias PixelBufferI8x3 = vImage.PixelBuffer
15 |
16 | public enum ShapedArrayError: String, Swift.Error {
17 | case wrongNumberOfChannels
18 | case incorrectFormatsConvertingToShapedArray
19 | case vImageConverterNotInitialized
20 | }
21 |
22 | public static func fromShapedArray(_ array: MLShapedArray) throws -> CGImage {
23 |
24 | // array is [N,C,H,W], where C==3
25 | let channelCount = array.shape[1]
26 | guard channelCount == 3 else {
27 | throw ShapedArrayError.wrongNumberOfChannels
28 | }
29 |
30 | let height = array.shape[2]
31 | let width = array.shape[3]
32 |
33 | // Normalize each channel into a float between 0 and 1.0
34 | let floatChannels = (0.. [0.0 1.0]
45 | cIn.multiply(by: 0.5, preBias: 1.0, postBias: 0.0, destination: cOut)
46 | }
47 | return cOut
48 | }
49 |
50 | // Convert to interleaved and then to UInt8
51 | let floatImage = PixelBufferIFx3(planarBuffers: floatChannels)
52 | let uint8Image = PixelBufferI8x3(width: width, height: height)
53 | floatImage.convert(to:uint8Image) // maps [0.0 1.0] -> [0 255] and clips
54 |
55 | // Convert to uint8x3 to RGB CGImage (no alpha)
56 | let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue)
57 | let cgImage = uint8Image.makeCGImage(cgImageFormat:
58 | .init(bitsPerComponent: 8,
59 | bitsPerPixel: 3*8,
60 | colorSpace: CGColorSpaceCreateDeviceRGB(),
61 | bitmapInfo: bitmapInfo)!)!
62 |
63 | return cgImage
64 | }
65 |
66 | public var plannerRGBShapedArray: MLShapedArray {
67 | get throws {
68 | guard
69 | var sourceFormat = vImage_CGImageFormat(cgImage: self),
70 | var mediumFormat = vImage_CGImageFormat(
71 | bitsPerComponent: 8 * MemoryLayout.size,
72 | bitsPerPixel: 8 * MemoryLayout.size * 4,
73 | colorSpace: CGColorSpaceCreateDeviceRGB(),
74 | bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue)),
75 | let width = vImagePixelCount(exactly: self.width),
76 | let height = vImagePixelCount(exactly: self.height)
77 | else {
78 | throw ShapedArrayError.incorrectFormatsConvertingToShapedArray
79 | }
80 |
81 | var sourceImageBuffer = try vImage_Buffer(cgImage: self)
82 |
83 | var mediumDesination = try vImage_Buffer(width: Int(width), height: Int(height), bitsPerPixel: mediumFormat.bitsPerPixel)
84 |
85 | let converter = vImageConverter_CreateWithCGImageFormat(
86 | &sourceFormat,
87 | &mediumFormat,
88 | nil,
89 | vImage_Flags(kvImagePrintDiagnosticsToConsole),
90 | nil)
91 |
92 | guard let converter = converter?.takeRetainedValue() else {
93 | throw ShapedArrayError.vImageConverterNotInitialized
94 | }
95 |
96 | vImageConvert_AnyToAny(converter, &sourceImageBuffer, &mediumDesination, nil, vImage_Flags(kvImagePrintDiagnosticsToConsole))
97 |
98 | var destinationA = try vImage_Buffer(width: Int(width), height: Int(height), bitsPerPixel: 8 * UInt32(MemoryLayout.size))
99 | var destinationR = try vImage_Buffer(width: Int(width), height: Int(height), bitsPerPixel: 8 * UInt32(MemoryLayout.size))
100 | var destinationG = try vImage_Buffer(width: Int(width), height: Int(height), bitsPerPixel: 8 * UInt32(MemoryLayout.size))
101 | var destinationB = try vImage_Buffer(width: Int(width), height: Int(height), bitsPerPixel: 8 * UInt32(MemoryLayout.size))
102 |
103 | var minFloat: [Float] = [-1.0, -1.0, -1.0, -1.0]
104 | var maxFloat: [Float] = [1.0, 1.0, 1.0, 1.0]
105 |
106 | vImageConvert_ARGB8888toPlanarF(&mediumDesination, &destinationA, &destinationR, &destinationG, &destinationB, &maxFloat, &minFloat, .zero)
107 |
108 | let redData = Data(bytes: destinationR.data, count: Int(width) * Int(height) * MemoryLayout.size)
109 | let greenData = Data(bytes: destinationG.data, count: Int(width) * Int(height) * MemoryLayout.size)
110 | let blueData = Data(bytes: destinationB.data, count: Int(width) * Int(height) * MemoryLayout.size)
111 |
112 | let imageData = redData + greenData + blueData
113 |
114 | let shapedArray = MLShapedArray(data: imageData, shape: [1, 3, Int(modelHeight), Int(modelWidth)])
115 |
116 | return shapedArray
117 | }
118 | }
119 | }
120 |
121 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/DPMSolverMultistepScheduler.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. and The HuggingFace Team. All Rights Reserved.
3 |
4 | import Accelerate
5 | import CoreML
6 |
7 | /// A scheduler used to compute a de-noised image
8 | ///
9 | /// This implementation matches:
10 | /// [Hugging Face Diffusers DPMSolverMultistepScheduler](https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_dpmsolver_multistep.py)
11 | ///
12 | /// It uses the DPM-Solver++ algorithm: [code](https://github.com/LuChengTHU/dpm-solver) [paper](https://arxiv.org/abs/2211.01095).
13 | /// Limitations:
14 | /// - Only implemented for DPM-Solver++ algorithm (not DPM-Solver).
15 | /// - Second order only.
16 | /// - Assumes the model predicts epsilon.
17 | /// - No dynamic thresholding.
18 | /// - `midpoint` solver algorithm.
19 | @available(iOS 16.2, macOS 13.1, *)
20 | public final class DPMSolverMultistepScheduler: Scheduler {
21 | public let trainStepCount: Int
22 | public let inferenceStepCount: Int
23 | public let betas: [Float]
24 | public let alphas: [Float]
25 | public let alphasCumProd: [Float]
26 | public let timeSteps: [Int]
27 |
28 | public let alpha_t: [Float]
29 | public let sigma_t: [Float]
30 | public let lambda_t: [Float]
31 |
32 | public let solverOrder = 2
33 | private(set) var lowerOrderStepped = 0
34 |
35 | /// Whether to use lower-order solvers in the final steps. Only valid for less than 15 inference steps.
36 | /// We empirically find this trick can stabilize the sampling of DPM-Solver, especially with 10 or fewer steps.
37 | public let useLowerOrderFinal = true
38 |
39 | // Stores solverOrder (2) items
40 | private(set) var modelOutputs: [MLShapedArray] = []
41 |
42 | /// Create a scheduler that uses a second order DPM-Solver++ algorithm.
43 | ///
44 | /// - Parameters:
45 | /// - stepCount: Number of inference steps to schedule
46 | /// - trainStepCount: Number of training diffusion steps
47 | /// - betaSchedule: Method to schedule betas from betaStart to betaEnd
48 | /// - betaStart: The starting value of beta for inference
49 | /// - betaEnd: The end value for beta for inference
50 | /// - Returns: A scheduler ready for its first step
51 | public init(
52 | stepCount: Int = 50,
53 | trainStepCount: Int = 1000,
54 | betaSchedule: BetaSchedule = .scaledLinear,
55 | betaStart: Float = 0.00085,
56 | betaEnd: Float = 0.012
57 | ) {
58 | self.trainStepCount = trainStepCount
59 | self.inferenceStepCount = stepCount
60 |
61 | switch betaSchedule {
62 | case .linear:
63 | self.betas = linspace(betaStart, betaEnd, trainStepCount)
64 | case .scaledLinear:
65 | self.betas = linspace(pow(betaStart, 0.5), pow(betaEnd, 0.5), trainStepCount).map({ $0 * $0 })
66 | }
67 |
68 | self.alphas = betas.map({ 1.0 - $0 })
69 | var alphasCumProd = self.alphas
70 | for i in 1.., timestep: Int, sample: MLShapedArray) -> MLShapedArray {
86 | assert(modelOutput.scalars.count == sample.scalars.count)
87 | let (alpha_t, sigma_t) = (self.alpha_t[timestep], self.sigma_t[timestep])
88 |
89 | // This could be optimized with a Metal kernel if we find we need to
90 | let x0_scalars = zip(modelOutput.scalars, sample.scalars).map { m, s in
91 | (s - m * sigma_t) / alpha_t
92 | }
93 | return MLShapedArray(scalars: x0_scalars, shape: modelOutput.shape)
94 | }
95 |
96 | /// One step for the first-order DPM-Solver (equivalent to DDIM).
97 | /// See https://arxiv.org/abs/2206.00927 for the detailed derivation.
98 | /// var names and code structure mostly follow https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_dpmsolver_multistep.py
99 | func firstOrderUpdate(
100 | modelOutput: MLShapedArray,
101 | timestep: Int,
102 | prevTimestep: Int,
103 | sample: MLShapedArray
104 | ) -> MLShapedArray {
105 | let (p_lambda_t, lambda_s) = (Double(lambda_t[prevTimestep]), Double(lambda_t[timestep]))
106 | let p_alpha_t = Double(alpha_t[prevTimestep])
107 | let (p_sigma_t, sigma_s) = (Double(sigma_t[prevTimestep]), Double(sigma_t[timestep]))
108 | let h = p_lambda_t - lambda_s
109 | // x_t = (sigma_t / sigma_s) * sample - (alpha_t * (torch.exp(-h) - 1.0)) * model_output
110 | let x_t = weightedSum(
111 | [p_sigma_t / sigma_s, -p_alpha_t * (exp(-h) - 1)],
112 | [sample, modelOutput]
113 | )
114 | return x_t
115 | }
116 |
117 | /// One step for the second-order multistep DPM-Solver++ algorithm, using the midpoint method.
118 | /// var names and code structure mostly follow https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_dpmsolver_multistep.py
119 | func secondOrderUpdate(
120 | modelOutputs: [MLShapedArray],
121 | timesteps: [Int],
122 | prevTimestep t: Int,
123 | sample: MLShapedArray
124 | ) -> MLShapedArray {
125 | let (s0, s1) = (timesteps[back: 1], timesteps[back: 2])
126 | let (m0, m1) = (modelOutputs[back: 1], modelOutputs[back: 2])
127 | let (p_lambda_t, lambda_s0, lambda_s1) = (Double(lambda_t[t]), Double(lambda_t[s0]), Double(lambda_t[s1]))
128 | let p_alpha_t = Double(alpha_t[t])
129 | let (p_sigma_t, sigma_s0) = (Double(sigma_t[t]), Double(sigma_t[s0]))
130 | let (h, h_0) = (p_lambda_t - lambda_s0, lambda_s0 - lambda_s1)
131 | let r0 = h_0 / h
132 | let D0 = m0
133 |
134 | // D1 = (1.0 / r0) * (m0 - m1)
135 | let D1 = weightedSum(
136 | [1/r0, -1/r0],
137 | [m0, m1]
138 | )
139 |
140 | // See https://arxiv.org/abs/2211.01095 for detailed derivations
141 | // x_t = (
142 | // (sigma_t / sigma_s0) * sample
143 | // - (alpha_t * (torch.exp(-h) - 1.0)) * D0
144 | // - 0.5 * (alpha_t * (torch.exp(-h) - 1.0)) * D1
145 | // )
146 | let x_t = weightedSum(
147 | [p_sigma_t/sigma_s0, -p_alpha_t * (exp(-h) - 1), -0.5 * p_alpha_t * (exp(-h) - 1)],
148 | [sample, D0, D1]
149 | )
150 | return x_t
151 | }
152 |
153 | public func step(output: MLShapedArray, timeStep t: Int, sample: MLShapedArray) -> MLShapedArray {
154 | let stepIndex = timeSteps.firstIndex(of: t) ?? timeSteps.count - 1
155 | let prevTimestep = stepIndex == timeSteps.count - 1 ? 0 : timeSteps[stepIndex + 1]
156 |
157 | let lowerOrderFinal = useLowerOrderFinal && stepIndex == timeSteps.count - 1 && timeSteps.count < 15
158 | let lowerOrderSecond = useLowerOrderFinal && stepIndex == timeSteps.count - 2 && timeSteps.count < 15
159 | let lowerOrder = lowerOrderStepped < 1 || lowerOrderFinal || lowerOrderSecond
160 |
161 | let modelOutput = convertModelOutput(modelOutput: output, timestep: t, sample: sample)
162 | if modelOutputs.count == solverOrder { modelOutputs.removeFirst() }
163 | modelOutputs.append(modelOutput)
164 |
165 | let prevSample: MLShapedArray
166 | if lowerOrder {
167 | prevSample = firstOrderUpdate(modelOutput: modelOutput, timestep: t, prevTimestep: prevTimestep, sample: sample)
168 | } else {
169 | prevSample = secondOrderUpdate(
170 | modelOutputs: modelOutputs,
171 | timesteps: [timeSteps[stepIndex - 1], t],
172 | prevTimestep: prevTimestep,
173 | sample: sample
174 | )
175 | }
176 | if lowerOrderStepped < solverOrder {
177 | lowerOrderStepped += 1
178 | }
179 |
180 | return prevSample
181 | }
182 | }
183 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/Decoder.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | /// A decoder model which produces RGB images from latent samples
8 | @available(iOS 16.2, macOS 13.1, *)
9 | public struct Decoder: ResourceManaging {
10 |
11 | /// VAE decoder model
12 | var model: ManagedMLModel
13 |
14 | /// Create decoder from Core ML model
15 | ///
16 | /// - Parameters:
17 | /// - url: Location of compiled VAE decoder Core ML model
18 | /// - configuration: configuration to be used when the model is loaded
19 | /// - Returns: A decoder that will lazily load its required resources when needed or requested
20 | public init(modelAt url: URL, configuration: MLModelConfiguration) {
21 | self.model = ManagedMLModel(modelAt: url, configuration: configuration)
22 | }
23 |
24 | /// Ensure the model has been loaded into memory
25 | public func loadResources() throws {
26 | try model.loadResources()
27 | }
28 |
29 | /// Unload the underlying model to free up memory
30 | public func unloadResources() {
31 | model.unloadResources()
32 | }
33 |
34 | /// Batch decode latent samples into images
35 | ///
36 | /// - Parameters:
37 | /// - latents: Batch of latent samples to decode
38 | /// - Returns: decoded images
39 | public func decode(_ latents: [MLShapedArray]) throws -> [CGImage] {
40 |
41 | // Form batch inputs for model
42 | let inputs: [MLFeatureProvider] = try latents.map { sample in
43 | // Reference pipeline scales the latent samples before decoding
44 | let sampleScaled = MLShapedArray(
45 | scalars: sample.scalars.map { $0 / 0.18215 },
46 | shape: sample.shape)
47 |
48 | let dict = [inputName: MLMultiArray(sampleScaled)]
49 | return try MLDictionaryFeatureProvider(dictionary: dict)
50 | }
51 | let batch = MLArrayBatchProvider(array: inputs)
52 |
53 | // Batch predict with model
54 | let results = try model.perform { model in
55 | try model.predictions(fromBatch: batch)
56 | }
57 |
58 | // Transform the outputs to CGImages
59 | let images: [CGImage] = try (0..(output))
64 | }
65 |
66 | return images
67 | }
68 |
69 | var inputName: String {
70 | try! model.perform { model in
71 | model.modelDescription.inputDescriptionsByName.first!.key
72 | }
73 | }
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/Encoder.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | @available(iOS 16.0, macOS 13.0, *)
8 | /// Encoder, currently supports image2image
9 | public struct Encoder {
10 |
11 | public enum Error: String, Swift.Error {
12 | case latentOutputNotValid
13 | case batchLatentOutputEmpty
14 | }
15 |
16 | /// VAE encoder model + post math and adding noise from schedular
17 | var model: MLModel
18 |
19 | /// Create decoder from Core ML model
20 | ///
21 | /// - Parameters
22 | /// - model: Core ML model for VAE decoder
23 | public init(model: MLModel) {
24 | self.model = model
25 | }
26 |
27 | /// Prediction queue
28 | let queue = DispatchQueue(label: "encoder.predict")
29 |
30 | /// Batch encode latent samples into images
31 | /// - Parameters:
32 | /// - image: image used for image2image
33 | /// - diagonalNoise: random noise for `DiagonalGaussianDistribution` operation
34 | /// - noise: random noise for initial latent space based on strength argument
35 | /// - alphasCumprodStep: calculations using the scheduler traditionally calculated in the pipeline in pyTorch Diffusers library.
36 | /// - Returns: The encoded latent space as MLShapedArray
37 | public func encode(
38 | image: CGImage,
39 | diagonalNoise: MLShapedArray,
40 | noise: MLShapedArray,
41 | alphasCumprodStep: AlphasCumprodCalculation
42 | ) throws -> MLShapedArray {
43 | let sample = try image.plannerRGBShapedArray
44 | let sqrtAlphasCumprod = MLShapedArray(scalars: [alphasCumprodStep.sqrtAlphasCumprod], shape: [1, 1])
45 | let sqrtOneMinusAlphasCumprod = MLShapedArray(scalars: [alphasCumprodStep.sqrtOneMinusAlphasCumprod], shape: [1, 1])
46 |
47 | let dict: [String: Any] = [
48 | "sample": MLMultiArray(sample),
49 | "diagonalNoise": MLMultiArray(diagonalNoise),
50 | "noise": MLMultiArray(noise),
51 | "sqrtAlphasCumprod": MLMultiArray(sqrtAlphasCumprod),
52 | "sqrtOneMinusAlphasCumprod": MLMultiArray(sqrtOneMinusAlphasCumprod),
53 | ]
54 | let featureProvider = try MLDictionaryFeatureProvider(dictionary: dict)
55 |
56 | let batch = MLArrayBatchProvider(array: [featureProvider])
57 |
58 | // Batch predict with model
59 | let results = try queue.sync { try model.predictions(fromBatch: batch) }
60 |
61 | let batchLatents: [MLShapedArray] = try (0..(_ body: (MLModel) throws -> R) throws -> R {
61 | return try queue.sync {
62 | try autoreleasepool {
63 | try loadModel()
64 | return try body(loadedModel!)
65 | }
66 | }
67 | }
68 |
69 | private func loadModel() throws {
70 | if loadedModel == nil {
71 | loadedModel = try MLModel(contentsOf: modelURL,
72 | configuration: configuration)
73 | }
74 | }
75 |
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/Random.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | /// A random source consistent with NumPy
8 | ///
9 | /// This implementation matches:
10 | /// [NumPy's older randomkit.c](https://github.com/numpy/numpy/blob/v1.0/numpy/random/mtrand/randomkit.c)
11 | ///
12 | @available(iOS 16.2, macOS 13.1, *)
13 | struct NumPyRandomSource: RandomNumberGenerator {
14 |
15 | struct State {
16 | var key = [UInt32](repeating: 0, count: 624)
17 | var pos: Int = 0
18 | var nextGauss: Double? = nil
19 | }
20 |
21 | var state: State
22 |
23 | /// Initialize with a random seed
24 | ///
25 | /// - Parameters
26 | /// - seed: Seed for underlying Mersenne Twister 19937 generator
27 | /// - Returns random source
28 | init(seed: UInt32) {
29 | state = .init()
30 | var s = seed & 0xffffffff
31 | for i in 0 ..< state.key.count {
32 | state.key[i] = s
33 | s = UInt32((UInt64(1812433253) * UInt64(s ^ (s >> 30)) + UInt64(i) + 1) & 0xffffffff)
34 | }
35 | state.pos = state.key.count
36 | state.nextGauss = nil
37 | }
38 |
39 | /// Generate next UInt32 using fast 32bit Mersenne Twister
40 | mutating func nextUInt32() -> UInt32 {
41 | let n = 624
42 | let m = 397
43 | let matrixA: UInt64 = 0x9908b0df
44 | let upperMask: UInt32 = 0x80000000
45 | let lowerMask: UInt32 = 0x7fffffff
46 |
47 | var y: UInt32
48 | if state.pos == state.key.count {
49 | for i in 0 ..< (n - m) {
50 | y = (state.key[i] & upperMask) | (state.key[i + 1] & lowerMask)
51 | state.key[i] = state.key[i + m] ^ (y >> 1) ^ UInt32((UInt64(~(y & 1)) + 1) & matrixA)
52 | }
53 | for i in (n - m) ..< (n - 1) {
54 | y = (state.key[i] & upperMask) | (state.key[i + 1] & lowerMask)
55 | state.key[i] = state.key[i + (m - n)] ^ (y >> 1) ^ UInt32((UInt64(~(y & 1)) + 1) & matrixA)
56 | }
57 | y = (state.key[n - 1] & upperMask) | (state.key[0] & lowerMask)
58 | state.key[n - 1] = state.key[m - 1] ^ (y >> 1) ^ UInt32((UInt64(~(y & 1)) + 1) & matrixA)
59 | state.pos = 0
60 | }
61 | y = state.key[state.pos]
62 | state.pos += 1
63 |
64 | y ^= (y >> 11)
65 | y ^= (y << 7) & 0x9d2c5680
66 | y ^= (y << 15) & 0xefc60000
67 | y ^= (y >> 18)
68 |
69 | return y
70 | }
71 |
72 | mutating func next() -> UInt64 {
73 | let low = nextUInt32()
74 | let high = nextUInt32()
75 | return (UInt64(high) << 32) | UInt64(low)
76 | }
77 |
78 | /// Generate next random double value
79 | mutating func nextDouble() -> Double {
80 | let a = Double(nextUInt32() >> 5)
81 | let b = Double(nextUInt32() >> 6)
82 | return (a * 67108864.0 + b) / 9007199254740992.0
83 | }
84 |
85 | /// Generate next random value from a standard normal
86 | mutating func nextGauss() -> Double {
87 | if let nextGauss = state.nextGauss {
88 | state.nextGauss = nil
89 | return nextGauss
90 | }
91 | var x1, x2, r2: Double
92 | repeat {
93 | x1 = 2.0 * nextDouble() - 1.0
94 | x2 = 2.0 * nextDouble() - 1.0
95 | r2 = x1 * x1 + x2 * x2
96 | } while r2 >= 1.0 || r2 == 0.0
97 |
98 | // Box-Muller transform
99 | let f = sqrt(-2.0 * log(r2) / r2)
100 | state.nextGauss = f * x1
101 | return f * x2
102 | }
103 |
104 | /// Generates a random value from a normal distribution with given mean and standard deviation.
105 | mutating func nextNormal(mean: Double = 0.0, stdev: Double = 1.0) -> Double {
106 | nextGauss() * stdev + mean
107 | }
108 |
109 | /// Generates an array of random values from a normal distribution with given mean and standard deviation.
110 | mutating func normalArray(count: Int, mean: Double = 0.0, stdev: Double = 1.0) -> [Double] {
111 | (0 ..< count).map { _ in nextNormal(mean: mean, stdev: stdev) }
112 | }
113 |
114 | /// Generate a shaped array with scalars from a normal distribution with given mean and standard deviation.
115 | mutating func normalShapedArray(_ shape: [Int], mean: Double = 0.0, stdev: Double = 1.0) -> MLShapedArray {
116 | let count = shape.reduce(1, *)
117 | return .init(scalars: normalArray(count: count, mean: mean, stdev: stdev), shape: shape)
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/ResourceManaging.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | /// Protocol for managing internal resources
5 | public protocol ResourceManaging {
6 |
7 | /// Request resources to be loaded and ready if possible
8 | func loadResources() throws
9 |
10 | /// Request resources are unloaded / remove from memory if possible
11 | func unloadResources()
12 | }
13 |
14 | extension ResourceManaging {
15 | /// Request resources are pre-warmed by loading and unloading
16 | func prewarmResources() throws {
17 | try loadResources()
18 | unloadResources()
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/SafetyChecker.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 | import Accelerate
7 |
8 | /// Image safety checking model
9 | @available(iOS 16.2, macOS 13.1, *)
10 | public struct SafetyChecker: ResourceManaging {
11 |
12 | /// Safety checking Core ML model
13 | var model: ManagedMLModel
14 |
15 | /// Creates safety checker
16 | ///
17 | /// - Parameters:
18 | /// - url: Location of compiled safety checking Core ML model
19 | /// - configuration: configuration to be used when the model is loaded
20 | /// - Returns: A safety cherker that will lazily load its required resources when needed or requested
21 | public init(modelAt url: URL, configuration: MLModelConfiguration) {
22 | self.model = ManagedMLModel(modelAt: url, configuration: configuration)
23 | }
24 |
25 | /// Ensure the model has been loaded into memory
26 | public func loadResources() throws {
27 | try model.loadResources()
28 | }
29 |
30 | /// Unload the underlying model to free up memory
31 | public func unloadResources() {
32 | model.unloadResources()
33 | }
34 |
35 | typealias PixelBufferPFx1 = vImage.PixelBuffer
36 | typealias PixelBufferP8x1 = vImage.PixelBuffer
37 | typealias PixelBufferPFx3 = vImage.PixelBuffer
38 | typealias PixelBufferP8x3 = vImage.PixelBuffer
39 | typealias PixelBufferIFx3 = vImage.PixelBuffer
40 | typealias PixelBufferI8x3 = vImage.PixelBuffer
41 | typealias PixelBufferI8x4 = vImage.PixelBuffer
42 |
43 | enum SafetyCheckError: Error {
44 | case imageResizeFailure
45 | case imageToFloatFailure
46 | case modelInputFailure
47 | case unexpectedModelOutput
48 | }
49 |
50 | /// Check if image is safe
51 | ///
52 | /// - Parameters:
53 | /// - image: Image to check
54 | /// - Returns: Whether the model considers the image to be safe
55 | public func isSafe(_ image: CGImage) throws -> Bool {
56 |
57 | let inputName = "clip_input"
58 | let adjustmentName = "adjustment"
59 | let imagesNames = "images"
60 |
61 | let inputInfo = try model.perform { model in
62 | model.modelDescription.inputDescriptionsByName
63 | }
64 | let inputShape = inputInfo[inputName]!.multiArrayConstraint!.shape
65 |
66 | let width = inputShape[2].intValue
67 | let height = inputShape[3].intValue
68 |
69 | let resizedImage = try resizeToRGBA(image, width: width, height: height)
70 |
71 | let bufferP8x3 = try getRGBPlanes(of: resizedImage)
72 |
73 | let arrayPFx3 = normalizeToFloatShapedArray(bufferP8x3)
74 |
75 | guard let input = try? MLDictionaryFeatureProvider(
76 | dictionary:[
77 | // Input that is analyzed for safety
78 | inputName : MLMultiArray(arrayPFx3),
79 | // No adjustment, use default threshold
80 | adjustmentName : MLMultiArray(MLShapedArray(scalars: [0], shape: [1])),
81 | // Supplying dummy images to be filtered (will be ignored)
82 | imagesNames : MLMultiArray(shape:[1, 512, 512, 3], dataType: .float16)
83 | ]
84 | ) else {
85 | throw SafetyCheckError.modelInputFailure
86 | }
87 |
88 | let result = try model.perform { model in
89 | try model.prediction(from: input)
90 | }
91 |
92 | let output = result.featureValue(for: "has_nsfw_concepts")
93 |
94 | guard let unsafe = output?.multiArrayValue?[0].boolValue else {
95 | throw SafetyCheckError.unexpectedModelOutput
96 | }
97 |
98 | return !unsafe
99 | }
100 |
101 | func resizeToRGBA(_ image: CGImage,
102 | width: Int, height: Int) throws -> CGImage {
103 |
104 | guard let context = CGContext(
105 | data: nil,
106 | width: width,
107 | height: height,
108 | bitsPerComponent: 8,
109 | bytesPerRow: width*4,
110 | space: CGColorSpaceCreateDeviceRGB(),
111 | bitmapInfo: CGImageAlphaInfo.noneSkipLast.rawValue) else {
112 | throw SafetyCheckError.imageResizeFailure
113 | }
114 |
115 | context.interpolationQuality = .high
116 | context.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
117 | guard let resizedImage = context.makeImage() else {
118 | throw SafetyCheckError.imageResizeFailure
119 | }
120 |
121 | return resizedImage
122 | }
123 |
124 | func getRGBPlanes(of rgbaImage: CGImage) throws -> PixelBufferP8x3 {
125 | // Reference as interleaved 8 bit vImage PixelBuffer
126 | var emptyFormat = vImage_CGImageFormat()
127 | guard let bufferI8x4 = try? PixelBufferI8x4(
128 | cgImage: rgbaImage,
129 | cgImageFormat:&emptyFormat) else {
130 | throw SafetyCheckError.imageToFloatFailure
131 | }
132 |
133 | // Drop the alpha channel, keeping RGB
134 | let bufferI8x3 = PixelBufferI8x3(width: rgbaImage.width, height:rgbaImage.height)
135 | bufferI8x4.convert(to: bufferI8x3, channelOrdering: .RGBA)
136 |
137 | // De-interleave into 8-bit planes
138 | return PixelBufferP8x3(interleavedBuffer: bufferI8x3)
139 | }
140 |
141 | func normalizeToFloatShapedArray(_ bufferP8x3: PixelBufferP8x3) -> MLShapedArray {
142 | let width = bufferP8x3.width
143 | let height = bufferP8x3.height
144 |
145 | let means = [0.485, 0.456, 0.406] as [Float]
146 | let stds = [0.229, 0.224, 0.225] as [Float]
147 |
148 | // Convert to normalized float 1x3xWxH input (plannar)
149 | let arrayPFx3 = MLShapedArray(repeating: 0.0, shape: [1, 3, width, height])
150 | for c in 0..<3 {
151 | arrayPFx3[0][c].withUnsafeShapedBufferPointer { ptr, _, strides in
152 | let floatChannel = PixelBufferPFx1(data: .init(mutating: ptr.baseAddress!),
153 | width: width, height: height,
154 | byteCountPerRow: strides[0]*4)
155 |
156 | bufferP8x3.withUnsafePixelBuffer(at: c) { uint8Channel in
157 | uint8Channel.convert(to: floatChannel) // maps [0 255] -> [0 1]
158 | floatChannel.multiply(by: 1.0/stds[c],
159 | preBias: -means[c],
160 | postBias: 0.0,
161 | destination: floatChannel)
162 | }
163 | }
164 | }
165 | return arrayPFx3
166 | }
167 | }
168 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/SampleTimer.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 |
6 | /// A utility for timing events and tracking time statistics
7 | ///
8 | /// Typical usage
9 | /// ```
10 | /// let timer: SampleTimer
11 | ///
12 | /// for i in 0... Double {
38 | guard let startTime = startTime else {
39 | return 0
40 | }
41 |
42 | let elapsed = CFAbsoluteTimeGetCurrent() - startTime
43 | sum += elapsed
44 | sumOfSquares += elapsed * elapsed
45 | count += 1
46 | samples.append(elapsed)
47 | return elapsed
48 | }
49 |
50 | /// Mean of all sampled times
51 | public var mean: Double { sum / Double(count) }
52 |
53 | /// Variance of all sampled times
54 | public var variance: Double {
55 | guard count > 1 else {
56 | return 0.0
57 | }
58 | return sumOfSquares / Double(count - 1) - mean * mean
59 | }
60 |
61 | /// Standard deviation of all sampled times
62 | public var stdev: Double { variance.squareRoot() }
63 |
64 | /// Median of all sampled times
65 | public var median: Double {
66 | let sorted = samples.sorted()
67 | let (q, r) = sorted.count.quotientAndRemainder(dividingBy: 2)
68 | if r == 0 {
69 | return (sorted[q] + sorted[q - 1]) / 2.0
70 | } else {
71 | return Double(sorted[q])
72 | }
73 | }
74 |
75 | public var allSamples: [Double] {
76 | samples
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/Scheduler.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import CoreML
5 |
6 | @available(iOS 16.2, macOS 13.1, *)
7 | public protocol Scheduler {
8 | /// Number of diffusion steps performed during training
9 | var trainStepCount: Int { get }
10 |
11 | /// Number of inference steps to be performed
12 | var inferenceStepCount: Int { get }
13 |
14 | /// Training diffusion time steps index by inference time step
15 | var timeSteps: [Int] { get }
16 |
17 | /// Training diffusion time steps index by inference time step
18 | func calculateTimesteps(strength: Float?) -> [Int]
19 |
20 | /// Schedule of betas which controls the amount of noise added at each timestep
21 | var betas: [Float] { get }
22 |
23 | /// 1 - betas
24 | var alphas: [Float] { get }
25 |
26 | /// Cached cumulative product of alphas
27 | var alphasCumProd: [Float] { get }
28 |
29 | /// Standard deviation of the initial noise distribution
30 | var initNoiseSigma: Float { get }
31 |
32 | /// Compute a de-noised image sample and step scheduler state
33 | ///
34 | /// - Parameters:
35 | /// - output: The predicted residual noise output of learned diffusion model
36 | /// - timeStep: The current time step in the diffusion chain
37 | /// - sample: The current input sample to the diffusion model
38 | /// - Returns: Predicted de-noised sample at the previous time step
39 | /// - Postcondition: The scheduler state is updated.
40 | /// The state holds the current sample and history of model output noise residuals
41 | func step(
42 | output: MLShapedArray,
43 | timeStep t: Int,
44 | sample s: MLShapedArray
45 | ) -> MLShapedArray
46 | }
47 |
48 | @available(iOS 16.2, macOS 13.1, *)
49 | public extension Scheduler {
50 | var initNoiseSigma: Float { 1 }
51 | }
52 |
53 | @available(iOS 16.2, macOS 13.1, *)
54 | public extension Scheduler {
55 | /// Compute weighted sum of shaped arrays of equal shapes
56 | ///
57 | /// - Parameters:
58 | /// - weights: The weights each array is multiplied by
59 | /// - values: The arrays to be weighted and summed
60 | /// - Returns: sum_i weights[i]*values[i]
61 | func weightedSum(_ weights: [Double], _ values: [MLShapedArray]) -> MLShapedArray {
62 | assert(weights.count > 1 && values.count == weights.count)
63 | assert(values.allSatisfy({ $0.scalarCount == values.first!.scalarCount }))
64 | var w = Float(weights.first!)
65 | var scalars = values.first!.scalars.map({ $0 * w })
66 | for next in 1 ..< values.count {
67 | w = Float(weights[next])
68 | let nextScalars = values[next].scalars
69 | for i in 0 ..< scalars.count {
70 | scalars[i] += w * nextScalars[i]
71 | }
72 | }
73 | return MLShapedArray(scalars: scalars, shape: values.first!.shape)
74 | }
75 | }
76 |
77 | // MARK: - Image2Image
78 |
79 | @available(iOS 16.2, macOS 13.1, *)
80 | public extension Scheduler {
81 |
82 | func calculateAlphasCumprod(strength: Float) -> AlphasCumprodCalculation {
83 | AlphasCumprodCalculation(
84 | alphasCumprod: alphasCumProd,
85 | timesteps: trainStepCount,
86 | steps: inferenceStepCount,
87 | strength: strength)
88 | }
89 | }
90 |
91 | // MARK: - Timesteps
92 |
93 | @available(iOS 16.2, macOS 13.1, *)
94 | public extension Scheduler {
95 |
96 | func calculateTimesteps(strength: Float?) -> [Int] {
97 | guard let strength else { return timeSteps }
98 | let startStep = max(inferenceStepCount - Int(Float(inferenceStepCount) * strength), 0)
99 | let actualTimesteps = Array(timeSteps[startStep...])
100 | return actualTimesteps
101 | }
102 | }
103 |
104 | // MARK: - BetaSchedule
105 |
106 | /// How to map a beta range to a sequence of betas to step over
107 | @available(iOS 16.2, macOS 13.1, *)
108 | public enum BetaSchedule {
109 | /// Linear stepping between start and end
110 | case linear
111 | /// Steps using linspace(sqrt(start),sqrt(end))^2
112 | case scaledLinear
113 | }
114 |
115 | // MARK: - PNDMScheduler
116 |
117 | /// A scheduler used to compute a de-noised image
118 | ///
119 | /// This implementation matches:
120 | /// [Hugging Face Diffusers PNDMScheduler](https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_pndm.py)
121 | ///
122 | /// This scheduler uses the pseudo linear multi-step (PLMS) method only, skipping pseudo Runge-Kutta (PRK) steps
123 | @available(iOS 16.2, macOS 13.1, *)
124 | public final class PNDMScheduler: Scheduler {
125 | public let trainStepCount: Int
126 | public let inferenceStepCount: Int
127 | public let betas: [Float]
128 | public let alphas: [Float]
129 | public let alphasCumProd: [Float]
130 | public let timeSteps: [Int]
131 |
132 | // Internal state
133 | var counter: Int
134 | var ets: [MLShapedArray]
135 | var currentSample: MLShapedArray?
136 |
137 | /// Create a scheduler that uses a pseudo linear multi-step (PLMS) method
138 | ///
139 | /// - Parameters:
140 | /// - stepCount: Number of inference steps to schedule
141 | /// - trainStepCount: Number of training diffusion steps
142 | /// - betaSchedule: Method to schedule betas from betaStart to betaEnd
143 | /// - betaStart: The starting value of beta for inference
144 | /// - betaEnd: The end value for beta for inference
145 | /// - Returns: A scheduler ready for its first step
146 | public init(
147 | stepCount: Int = 50,
148 | trainStepCount: Int = 1000,
149 | betaSchedule: BetaSchedule = .scaledLinear,
150 | betaStart: Float = 0.00085,
151 | betaEnd: Float = 0.012
152 | ) {
153 | self.trainStepCount = trainStepCount
154 | self.inferenceStepCount = stepCount
155 |
156 | switch betaSchedule {
157 | case .linear:
158 | self.betas = linspace(betaStart, betaEnd, trainStepCount)
159 | case .scaledLinear:
160 | self.betas = linspace(pow(betaStart, 0.5), pow(betaEnd, 0.5), trainStepCount).map({ $0 * $0 })
161 | }
162 | self.alphas = betas.map({ 1.0 - $0 })
163 | var alphasCumProd = self.alphas
164 | for i in 1..,
197 | timeStep t: Int,
198 | sample s: MLShapedArray
199 | ) -> MLShapedArray {
200 |
201 | var timeStep = t
202 | let stepInc = (trainStepCount / inferenceStepCount)
203 | var prevStep = timeStep - stepInc
204 | var modelOutput = output
205 | var sample = s
206 |
207 | if counter != 1 {
208 | if ets.count > 3 {
209 | ets = Array(ets[(ets.count - 3)..,
259 | _ timeStep: Int,
260 | _ prevStep: Int,
261 | _ modelOutput: MLShapedArray
262 | ) -> MLShapedArray {
263 |
264 | // Compute x_(t−δ) using formula (9) from
265 | // "Pseudo Numerical Methods for Diffusion Models on Manifolds",
266 | // Luping Liu, Yi Ren, Zhijie Lin & Zhou Zhao.
267 | // ICLR 2022
268 | //
269 | // Notation:
270 | //
271 | // alphaProdt α_t
272 | // alphaProdtPrev α_(t−δ)
273 | // betaProdt (1 - α_t)
274 | // betaProdtPrev (1 - α_(t−δ))
275 | let alphaProdt = alphasCumProd[timeStep]
276 | let alphaProdtPrev = alphasCumProd[max(0,prevStep)]
277 | let betaProdt = 1 - alphaProdt
278 | let betaProdtPrev = 1 - alphaProdtPrev
279 |
280 | // sampleCoeff = (α_(t−δ) - α_t) divided by
281 | // denominator of x_t in formula (9) and plus 1
282 | // Note: (α_(t−δ) - α_t) / (sqrt(α_t) * (sqrt(α_(t−δ)) + sqr(α_t))) =
283 | // sqrt(α_(t−δ)) / sqrt(α_t))
284 | let sampleCoeff = sqrt(alphaProdtPrev / alphaProdt)
285 |
286 | // Denominator of e_θ(x_t, t) in formula (9)
287 | let modelOutputDenomCoeff = alphaProdt * sqrt(betaProdtPrev)
288 | + sqrt(alphaProdt * betaProdt * alphaProdtPrev)
289 |
290 | // full formula (9)
291 | let modelCoeff = -(alphaProdtPrev - alphaProdt)/modelOutputDenomCoeff
292 | let prevSample = weightedSum(
293 | [Double(sampleCoeff), Double(modelCoeff)],
294 | [sample, modelOutput]
295 | )
296 |
297 | return prevSample
298 | }
299 | }
300 |
301 | /// Evenly spaced floats between specified interval
302 | ///
303 | /// - Parameters:
304 | /// - start: Start of the interval
305 | /// - end: End of the interval
306 | /// - count: The number of floats to return between [*start*, *end*]
307 | /// - Returns: Float array with *count* elements evenly spaced between at *start* and *end*
308 | func linspace(_ start: Float, _ end: Float, _ count: Int) -> [Float] {
309 | let scale = (end - start) / Float(count - 1)
310 | return (0.. Element {
316 | return self[index(endIndex, offsetBy: -i)]
317 | }
318 | }
319 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/StableDiffusionPipeline+Resources.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | @available(iOS 16.2, macOS 13.1, *)
8 | public extension StableDiffusionPipeline {
9 |
10 | struct ResourceURLs {
11 |
12 | public let textEncoderURL: URL
13 | public let unetURL: URL
14 | public let unetChunk1URL: URL
15 | public let unetChunk2URL: URL
16 | public let decoderURL: URL
17 | public let encoderURL: URL
18 | public let safetyCheckerURL: URL
19 | public let vocabURL: URL
20 | public let mergesURL: URL
21 |
22 | public init(resourcesAt baseURL: URL) {
23 | textEncoderURL = baseURL.appending(path: "TextEncoder.mlmodelc")
24 | unetURL = baseURL.appending(path: "Unet.mlmodelc")
25 | unetChunk1URL = baseURL.appending(path: "UnetChunk1.mlmodelc")
26 | unetChunk2URL = baseURL.appending(path: "UnetChunk2.mlmodelc")
27 | decoderURL = baseURL.appending(path: "VAEDecoder.mlmodelc")
28 | encoderURL = baseURL.appending(path: "VAEEncoder.mlmodelc")
29 | safetyCheckerURL = baseURL.appending(path: "SafetyChecker.mlmodelc")
30 | vocabURL = baseURL.appending(path: "vocab.json")
31 | mergesURL = baseURL.appending(path: "merges.txt")
32 | }
33 | }
34 |
35 | /// Create stable diffusion pipeline using model resources at a
36 | /// specified URL
37 | ///
38 | /// - Parameters:
39 | /// - baseURL: URL pointing to directory holding all model
40 | /// and tokenization resources
41 | /// - configuration: The configuration to load model resources with
42 | /// - disableSafety: Load time disable of safety to save memory
43 | /// - reduceMemory: Setup pipeline in reduced memory mode
44 | /// - Returns:
45 | /// Pipeline ready for image generation if all necessary resources loaded
46 | init(resourcesAt baseURL: URL,
47 | configuration config: MLModelConfiguration = .init(),
48 | disableSafety: Bool = false,
49 | reduceMemory: Bool = false) throws {
50 |
51 | /// Expect URL of each resource
52 | let urls = ResourceURLs(resourcesAt: baseURL)
53 |
54 | // Text tokenizer and encoder
55 | let tokenizer = try BPETokenizer(mergesAt: urls.mergesURL, vocabularyAt: urls.vocabURL)
56 | let textEncoder = TextEncoder(tokenizer: tokenizer,
57 | modelAt: urls.textEncoderURL,
58 | configuration: config)
59 |
60 | // Unet model
61 | let unet: Unet
62 | if FileManager.default.fileExists(atPath: urls.unetChunk1URL.path) &&
63 | FileManager.default.fileExists(atPath: urls.unetChunk2URL.path) {
64 | unet = Unet(chunksAt: [urls.unetChunk1URL, urls.unetChunk2URL],
65 | configuration: config)
66 | } else {
67 | unet = Unet(modelAt: urls.unetURL, configuration: config)
68 | }
69 |
70 | // Image Decoder
71 | let decoder = Decoder(modelAt: urls.decoderURL, configuration: config)
72 |
73 | // Optional safety checker
74 | var safetyChecker: SafetyChecker? = nil
75 | if !disableSafety &&
76 | FileManager.default.fileExists(atPath: urls.safetyCheckerURL.path) {
77 | safetyChecker = SafetyChecker(modelAt: urls.safetyCheckerURL, configuration: config)
78 | }
79 |
80 | // Optional Image Encoder
81 | let encoder: Encoder?
82 | if
83 | let encoderModel = try? MLModel(contentsOf: urls.encoderURL, configuration: config)
84 | {
85 | encoder = Encoder(model: encoderModel)
86 | } else {
87 | encoder = nil
88 | }
89 |
90 | // Construct pipeline
91 | self.init(textEncoder: textEncoder,
92 | unet: unet,
93 | decoder: decoder,
94 | encoder: encoder,
95 | safetyChecker: safetyChecker,
96 | reduceMemory: reduceMemory)
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/TextEncoder.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | /// A model for encoding text
8 | @available(iOS 16.2, macOS 13.1, *)
9 | public struct TextEncoder: ResourceManaging {
10 |
11 | /// Text tokenizer
12 | var tokenizer: BPETokenizer
13 |
14 | /// Embedding model
15 | var model: ManagedMLModel
16 |
17 | /// Creates text encoder which embeds a tokenized string
18 | ///
19 | /// - Parameters:
20 | /// - tokenizer: Tokenizer for input text
21 | /// - url: Location of compiled text encoding Core ML model
22 | /// - configuration: configuration to be used when the model is loaded
23 | /// - Returns: A text encoder that will lazily load its required resources when needed or requested
24 | public init(tokenizer: BPETokenizer,
25 | modelAt url: URL,
26 | configuration: MLModelConfiguration) {
27 | self.tokenizer = tokenizer
28 | self.model = ManagedMLModel(modelAt: url, configuration: configuration)
29 | }
30 |
31 | /// Ensure the model has been loaded into memory
32 | public func loadResources() throws {
33 | try model.loadResources()
34 | }
35 |
36 | /// Unload the underlying model to free up memory
37 | public func unloadResources() {
38 | model.unloadResources()
39 | }
40 |
41 | /// Encode input text/string
42 | ///
43 | /// - Parameters:
44 | /// - text: Input text to be tokenized and then embedded
45 | /// - Returns: Embedding representing the input text
46 | public func encode(_ text: String) throws -> MLShapedArray {
47 |
48 | // Get models expected input length
49 | let inputLength = inputShape.last!
50 |
51 | // Tokenize, padding to the expected length
52 | var (tokens, ids) = tokenizer.tokenize(input: text, minCount: inputLength)
53 |
54 | // Truncate if necessary
55 | if ids.count > inputLength {
56 | tokens = tokens.dropLast(tokens.count - inputLength)
57 | ids = ids.dropLast(ids.count - inputLength)
58 | let truncated = tokenizer.decode(tokens: tokens)
59 | print("Needed to truncate input '\(text)' to '\(truncated)'")
60 | }
61 |
62 | // Use the model to generate the embedding
63 | return try encode(ids: ids)
64 | }
65 |
66 | /// Prediction queue
67 | let queue = DispatchQueue(label: "textencoder.predict")
68 |
69 | func encode(ids: [Int]) throws -> MLShapedArray {
70 | let inputName = inputDescription.name
71 | let inputShape = inputShape
72 |
73 | let floatIds = ids.map { Float32($0) }
74 | let inputArray = MLShapedArray(scalars: floatIds, shape: inputShape)
75 | let inputFeatures = try! MLDictionaryFeatureProvider(
76 | dictionary: [inputName: MLMultiArray(inputArray)])
77 |
78 | let result = try model.perform { model in
79 | try model.prediction(from: inputFeatures)
80 | }
81 |
82 | let embeddingFeature = result.featureValue(for: "last_hidden_state")
83 | return MLShapedArray(converting: embeddingFeature!.multiArrayValue!)
84 | }
85 |
86 | var inputDescription: MLFeatureDescription {
87 | try! model.perform { model in
88 | model.modelDescription.inputDescriptionsByName.first!.value
89 | }
90 | }
91 |
92 | var inputShape: [Int] {
93 | inputDescription.multiArrayConstraint!.shape.map { $0.intValue }
94 | }
95 |
96 | }
97 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/pipeline/Unet.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 | import CoreML
6 |
7 | /// U-Net noise prediction model for stable diffusion
8 | @available(iOS 16.2, macOS 13.1, *)
9 | public struct Unet: ResourceManaging {
10 |
11 | /// Model used to predict noise residuals given an input, diffusion time step, and conditional embedding
12 | ///
13 | /// It can be in the form of a single model or multiple stages
14 | var models: [ManagedMLModel]
15 |
16 | /// Creates a U-Net noise prediction model
17 | ///
18 | /// - Parameters:
19 | /// - url: Location of single U-Net compiled Core ML model
20 | /// - configuration: Configuration to be used when the model is loaded
21 | /// - Returns: U-net model that will lazily load its required resources when needed or requested
22 | public init(modelAt url: URL,
23 | configuration: MLModelConfiguration) {
24 | self.models = [ManagedMLModel(modelAt: url, configuration: configuration)]
25 | }
26 |
27 | /// Creates a U-Net noise prediction model
28 | ///
29 | /// - Parameters:
30 | /// - urls: Location of chunked U-Net via urls to each compiled chunk
31 | /// - configuration: Configuration to be used when the model is loaded
32 | /// - Returns: U-net model that will lazily load its required resources when needed or requested
33 | public init(chunksAt urls: [URL],
34 | configuration: MLModelConfiguration) {
35 | self.models = urls.map { ManagedMLModel(modelAt: $0, configuration: configuration) }
36 | }
37 |
38 | /// Load resources.
39 | public func loadResources() throws {
40 | for model in models {
41 | try model.loadResources()
42 | self.setResolutionAndName()
43 | }
44 | }
45 |
46 | /// Unload the underlying model to free up memory
47 | public func unloadResources() {
48 | for model in models {
49 | model.unloadResources()
50 | }
51 | }
52 |
53 | /// Pre-warm resources
54 | public func prewarmResources() throws {
55 | // Override default to pre-warm each model
56 | for model in models {
57 | try model.loadResources()
58 | self.setResolutionAndName()
59 | model.unloadResources()
60 | }
61 | }
62 |
63 | /// Set resolution and model real name
64 | func setResolutionAndName() {
65 | // set resolution
66 | if let shape = (self.models[0].loadedModel?.modelDescription.inputDescriptionsByName["sample"] as? MLFeatureDescription)?.multiArrayConstraint?.shape {
67 | modelWidth = Double(truncating: shape[3]) * 8
68 | modelHeight = Double(truncating: shape[2]) * 8
69 | print("Current resolution: \(String(Int(modelWidth)))x\(String(Int(modelHeight)))")
70 | } else {
71 | modelWidth = 512
72 | modelHeight = 512
73 | }
74 | // set model real name
75 | if let name = (self.models[0].loadedModel?.modelDescription.metadata[MLModelMetadataKey(rawValue: "MLModelVersionStringKey")]) as? String {
76 | print("Created pipeline for model: \(name)")
77 | currentModelRealName = name
78 | }
79 | }
80 |
81 |
82 | var latentSampleDescription: MLFeatureDescription {
83 | try! models.first!.perform { model in
84 | model.modelDescription.inputDescriptionsByName["sample"]!
85 | }
86 | }
87 |
88 | /// The expected shape of the models latent sample input
89 | public var latentSampleShape: [Int] {
90 | latentSampleDescription.multiArrayConstraint!.shape.map { $0.intValue }
91 | }
92 |
93 | /// Batch prediction noise from latent samples
94 | ///
95 | /// - Parameters:
96 | /// - latents: Batch of latent samples in an array
97 | /// - timeStep: Current diffusion timestep
98 | /// - hiddenStates: Hidden state to condition on
99 | /// - Returns: Array of predicted noise residuals
100 | func predictNoise(
101 | latents: [MLShapedArray],
102 | timeStep: Int,
103 | hiddenStates: MLShapedArray
104 | ) throws -> [MLShapedArray] {
105 |
106 | // Match time step batch dimension to the model / latent samples
107 | let t = MLShapedArray(scalars:[Float(timeStep), Float(timeStep)],shape:[2])
108 |
109 | // Form batch input to model
110 | let inputs = try latents.map {
111 | let dict: [String: Any] = [
112 | "sample" : MLMultiArray($0),
113 | "timestep" : MLMultiArray(t),
114 | "encoder_hidden_states": MLMultiArray(hiddenStates)
115 | ]
116 | return try MLDictionaryFeatureProvider(dictionary: dict)
117 | }
118 | let batch = MLArrayBatchProvider(array: inputs)
119 |
120 | // Make predictions
121 | let results = try predictions(from: batch)
122 |
123 | // Pull out the results in Float32 format
124 | let noise = (0..(fp32Noise)
140 | }
141 |
142 | return noise
143 | }
144 |
145 | func predictions(from batch: MLBatchProvider) throws -> MLBatchProvider {
146 |
147 | var results = try models.first!.perform { model in
148 | try model.predictions(fromBatch: batch)
149 | }
150 |
151 | if models.count == 1 {
152 | return results
153 | }
154 |
155 | // Manual pipeline batch prediction
156 | let inputs = batch.arrayOfFeatureValueDictionaries
157 | for stage in models.dropFirst() {
158 |
159 | // Combine the original inputs with the outputs of the last stage
160 | let next = try results.arrayOfFeatureValueDictionaries
161 | .enumerated().map { (index, dict) in
162 | let nextDict = dict.merging(inputs[index]) { (out, _) in out }
163 | return try MLDictionaryFeatureProvider(dictionary: nextDict)
164 | }
165 | let nextBatch = MLArrayBatchProvider(array: next)
166 |
167 | // Predict
168 | results = try stage.perform { model in
169 | try model.predictions(fromBatch: nextBatch)
170 | }
171 | }
172 |
173 | return results
174 | }
175 | }
176 |
177 | extension MLFeatureProvider {
178 | var featureValueDictionary: [String : MLFeatureValue] {
179 | self.featureNames.reduce(into: [String : MLFeatureValue]()) { result, name in
180 | result[name] = self.featureValue(for: name)
181 | }
182 | }
183 | }
184 |
185 | extension MLBatchProvider {
186 | var arrayOfFeatureValueDictionaries: [[String : MLFeatureValue]] {
187 | (0.. [String: Int] {
14 | let content = try Data(contentsOf: url)
15 | return try JSONDecoder().decode([String: Int].self, from: content)
16 | }
17 |
18 | /// Read merges.txt file at URL into a dictionary mapping bigrams to the line number/rank/priority
19 | static func readMerges(url: URL) throws -> [TokenPair: Int] {
20 | let content = try String(contentsOf: url)
21 | let lines = content.split(separator: "\n")
22 |
23 | let merges: [(TokenPair, Int)] = try lines.enumerated().compactMap { (index, line) in
24 | if line.hasPrefix("#") {
25 | return nil
26 | }
27 | let pair = line.split(separator: " ")
28 | if pair.count != 2 {
29 | throw FileReadError.invalidMergeFileLine(index+1)
30 | }
31 | return (TokenPair(String(pair[0]), String(pair[1])),index)
32 | }
33 | return [TokenPair : Int](uniqueKeysWithValues: merges)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/PromptToImage/Stable Diffusion/ml-stable-diffusion/tokenizer/BPETokenizer.swift:
--------------------------------------------------------------------------------
1 | // For licensing see accompanying LICENSE.md file.
2 | // Copyright (C) 2022 Apple Inc. All Rights Reserved.
3 |
4 | import Foundation
5 |
6 | /// A tokenizer based on byte pair encoding.
7 | @available(iOS 16.2, macOS 13.1, *)
8 | public struct BPETokenizer {
9 | /// A dictionary that maps pairs of tokens to the rank/order of the merge.
10 | let merges: [TokenPair : Int]
11 |
12 | /// A dictionary from of tokens to identifiers.
13 | let vocabulary: [String: Int]
14 |
15 | /// The start token.
16 | let startToken: String = "<|startoftext|>"
17 |
18 | /// The end token.
19 | let endToken: String = "<|endoftext|>"
20 |
21 | /// The token used for padding
22 | let padToken: String = "<|endoftext|>"
23 |
24 | /// The unknown token.
25 | let unknownToken: String = "<|endoftext|>"
26 |
27 | var unknownTokenID: Int {
28 | vocabulary[unknownToken, default: 0]
29 | }
30 |
31 | /// Creates a tokenizer.
32 | ///
33 | /// - Parameters:
34 | /// - merges: A dictionary that maps pairs of tokens to the rank/order of the merge.
35 | /// - vocabulary: A dictionary from of tokens to identifiers.
36 | public init(merges: [TokenPair: Int], vocabulary: [String: Int]) {
37 | self.merges = merges
38 | self.vocabulary = vocabulary
39 | }
40 |
41 | /// Creates a tokenizer by loading merges and vocabulary from URLs.
42 | ///
43 | /// - Parameters:
44 | /// - mergesURL: The URL of a text file containing merges.
45 | /// - vocabularyURL: The URL of a JSON file containing the vocabulary.
46 | public init(mergesAt mergesURL: URL, vocabularyAt vocabularyURL: URL) throws {
47 | self.merges = try Self.readMerges(url: mergesURL)
48 | self.vocabulary = try! Self.readVocabulary(url: vocabularyURL)
49 | }
50 |
51 | /// Tokenizes an input string.
52 | ///
53 | /// - Parameters:
54 | /// - input: A string.
55 | /// - minCount: The minimum number of tokens to return.
56 | /// - Returns: An array of tokens and an array of token identifiers.
57 | public func tokenize(input: String, minCount: Int? = nil) -> (tokens: [String], tokenIDs: [Int]) {
58 | var tokens: [String] = []
59 |
60 | tokens.append(startToken)
61 | tokens.append(contentsOf: encode(input: input))
62 | tokens.append(endToken)
63 |
64 | // Pad if there was a min length specified
65 | if let minLen = minCount, minLen > tokens.count {
66 | tokens.append(contentsOf: repeatElement(padToken, count: minLen - tokens.count))
67 | }
68 |
69 | let ids = tokens.map({ vocabulary[$0, default: unknownTokenID] })
70 | return (tokens: tokens, tokenIDs: ids)
71 | }
72 |
73 | /// Returns the token identifier for a token.
74 | public func tokenID(for token: String) -> Int? {
75 | vocabulary[token]
76 | }
77 |
78 | /// Returns the token for a token identifier.
79 | public func token(id: Int) -> String? {
80 | vocabulary.first(where: { $0.value == id })?.key
81 | }
82 |
83 | /// Decodes a sequence of tokens into a fully formed string
84 | public func decode(tokens: [String]) -> String {
85 | String(tokens.joined())
86 | .replacingOccurrences(of: "", with: " ")
87 | .replacingOccurrences(of: startToken, with: "")
88 | .replacingOccurrences(of: endToken, with: "")
89 | }
90 |
91 | /// Encode an input string to a sequence of tokens
92 | func encode(input: String) -> [String] {
93 | let normalized = input.trimmingCharacters(in: .whitespacesAndNewlines).lowercased()
94 | let words = normalized.split(separator: " ")
95 | return words.flatMap({ encode(word: $0) })
96 | }
97 |
98 | /// Encode a single word into a sequence of tokens
99 | func encode(word: Substring) -> [String] {
100 | var tokens = word.map { String($0) }
101 | if let last = tokens.indices.last {
102 | tokens[last] = tokens[last] + ""
103 | }
104 |
105 | while true {
106 | let pairs = pairs(for: tokens)
107 | let canMerge = pairs.filter { merges[$0] != nil }
108 |
109 | if canMerge.isEmpty {
110 | break
111 | }
112 |
113 | // If multiple merges are found, use the one with the lowest rank
114 | let shouldMerge = canMerge.min { merges[$0]! < merges[$1]! }!
115 | tokens = update(tokens, merging: shouldMerge)
116 | }
117 | return tokens
118 | }
119 |
120 | /// Get the set of adjacent pairs / bigrams from a sequence of tokens
121 | func pairs(for tokens: [String]) -> Set {
122 | guard tokens.count > 1 else {
123 | return Set()
124 | }
125 |
126 | var pairs = Set(minimumCapacity: tokens.count - 1)
127 | var prev = tokens.first!
128 | for current in tokens.dropFirst() {
129 | pairs.insert(TokenPair(prev, current))
130 | prev = current
131 | }
132 | return pairs
133 | }
134 |
135 | /// Update the sequence of tokens by greedily merging instance of a specific bigram
136 | func update(_ tokens: [String], merging bigram: TokenPair) -> [String] {
137 | guard tokens.count > 1 else {
138 | return []
139 | }
140 |
141 | var newTokens = [String]()
142 | newTokens.reserveCapacity(tokens.count - 1)
143 |
144 | var index = 0
145 | while index < tokens.count {
146 | let remainingTokens = tokens[index...]
147 | if let startMatchIndex = remainingTokens.firstIndex(of: bigram.first) {
148 | // Found a possible match, append everything before it
149 | newTokens.append(contentsOf: tokens[index.. UInt64 {
18 | return ProcessInfo.processInfo.physicalMemory / 1073741824
19 | }
20 |
21 | extension AppDelegate {
22 |
23 | func startPromptToImage() {
24 | print("Starting PromptToImage")
25 |
26 | // create custom directories in app sandbox if needed
27 | createModelsDir()
28 | createHistoryDir()
29 | createUpscalersDir()
30 |
31 | // set model and compute units
32 | if CGEventSource.keyState(CGEventSourceStateID.init(rawValue: 0)!, key: 0x3a) ||
33 | CGEventSource.keyState(CGEventSourceStateID.init(rawValue: 0)!, key: 0x37) {
34 | // app has been launched keeping ALT or COMMAND pressed, use factory settings
35 | currentModelResourcesURL = builtInModelResourcesURL
36 | currentComputeUnits = defaultComputeUnits
37 | } else {
38 | // set model url
39 | currentModelResourcesURL = UserDefaults.standard.url(forKey: "modelResourcesURL") ?? builtInModelResourcesURL
40 | // set compute units
41 | if let str = UserDefaults.standard.value(forKey: "computeUnits") as? String {
42 | print("Compute units: \(str)")
43 | currentComputeUnits = str2cu(str: str)
44 | }
45 | }
46 |
47 | // show main window
48 | wins["main"] = SDMainWindowController(windowNibName: "SDMainWindowController", info: nil)
49 |
50 | // load models
51 | print("Built-in model exists: \(builtInModelExists())")
52 |
53 | // load default upscaler CoreML model (realesrgan512.mlmodelc)
54 | DispatchQueue.global().async {
55 | loadUpscalerModel()
56 | }
57 |
58 | // load Stable Diffusion CoreML models
59 | if !builtInModelExists() && installedCustomModels().isEmpty {
60 | // ALL MODEL DIRS EMPTY, show model download window
61 | DispatchQueue.main.async {
62 | if let ctrl = wins["main"] as? SDMainWindowController {
63 | ctrl.window?.beginSheet(ctrl.downloadWindow)
64 | }
65 | }
66 |
67 | } else {
68 | // ATTEMPT TO LOAD A MODEL
69 | DispatchQueue.global().async {
70 | // load last used model
71 | print("Load last used model from \(currentModelResourcesURL)...")
72 | createStableDiffusionPipeline(computeUnits: currentComputeUnits, url:currentModelResourcesURL)
73 |
74 | if sdPipeline == nil {
75 | print("unable to load last used model, trying built-in model at \(builtInModelResourcesURL) (appstore only)")
76 | // unable to load last used model, load built-in model if available (MacAppStore only)
77 | currentComputeUnits = defaultComputeUnits
78 | createStableDiffusionPipeline(computeUnits: defaultComputeUnits, url:builtInModelResourcesURL)
79 | }
80 |
81 | if sdPipeline == nil {
82 | print("unable to load built-in model, checking custom models dir...")
83 | // unable to load built-in model, checking custom models directory...
84 | for customModelURL in installedCustomModels() {
85 | print("Attempting to load custom model \(customModelURL.lastPathComponent)")
86 | createStableDiffusionPipeline(computeUnits: defaultComputeUnits, url:customModelURL)
87 | if sdPipeline != nil {
88 | print("Success loading model \(customModelURL.lastPathComponent)")
89 | currentModelResourcesURL = customModelURL
90 | // save to user defaults
91 | UserDefaults.standard.set(currentModelResourcesURL, forKey: "modelResourcesURL")
92 | return
93 | }
94 | }
95 | } else {
96 | // save to user defaults
97 | UserDefaults.standard.set(currentModelResourcesURL, forKey: "modelResourcesURL")
98 | }
99 |
100 | if sdPipeline == nil {
101 | // unable to load model, request user interaction
102 | print("Unable to load a Stable Diffusion model!")
103 | // show model download window
104 | DispatchQueue.main.async {
105 | if let ctrl = wins["main"] as? SDMainWindowController {
106 | ctrl.window?.beginSheet(ctrl.downloadWindow)
107 | }
108 | }
109 | }
110 | }
111 | }
112 |
113 |
114 |
115 | }
116 |
117 |
118 |
119 |
120 |
121 | func willTerminate() {
122 | UserDefaults.standard.setValue(cu2str(cu: currentComputeUnits), forKey: "computeUnits")
123 | }
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 | }
133 |
134 |
135 | // MLComputeUnits -> String -> MLComputeUnits
136 |
137 | func cu2str(cu:MLComputeUnits) -> String {
138 | switch cu {
139 | case .cpuAndGPU: return "cpuAndGPU"
140 | case .cpuAndNeuralEngine: return "cpuAndNeuralEngine"
141 | case .cpuOnly: return "cpuOnly"
142 | default: return "all"
143 | }
144 | }
145 |
146 | func cu2hrstr(cu:MLComputeUnits) -> String {
147 | switch cu {
148 | case .cpuAndGPU: return "CPU and GPU"
149 | case .cpuAndNeuralEngine: return "CPU and Neural Engine"
150 | case .cpuOnly: return "CPU only"
151 | default: return "All Compute Units"
152 | }
153 | }
154 |
155 | func str2cu(str:String) -> MLComputeUnits {
156 | switch str {
157 | case "cpuAndGPU": return .cpuAndGPU
158 | case "cpuAndNeuralEngine": return .cpuAndNeuralEngine
159 | case "cpuOnly": return .cpuOnly
160 | default: return .all
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/PromptToImage/Upscale Model/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Upscale Model/.gitkeep
--------------------------------------------------------------------------------
/PromptToImage/Upscale Model/realesrgan512.mlmodel:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/Upscale Model/realesrgan512.mlmodel
--------------------------------------------------------------------------------
/PromptToImage/Upscaler.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Upscaler.swift
3 | // FreeScaler
4 | //
5 | // Created by Hany El Imam on 28/11/22.
6 | //
7 |
8 |
9 | import Foundation
10 | import Cocoa
11 | import Vision
12 | import Accelerate
13 |
14 | var currentUpscalerName : String = String()
15 |
16 | // default model: realesrgan512
17 |
18 | class Upscaler : NSObject {
19 |
20 | static let shared = Upscaler()
21 | private override init() {}
22 |
23 | let conf = MLModelConfiguration()
24 | var request: VNCoreMLRequest?
25 | var visionModel: VNCoreMLModel?
26 |
27 | var myWidth : CGFloat = 0
28 | var myHeight : CGFloat = 0
29 |
30 |
31 | // MARK: Setup CoreML Model
32 |
33 | func setupUpscaleModelFromPath(path:String,
34 | computeUnits:MLComputeUnits) {
35 | print("setting up CoreML upscale model from path \(path)")
36 | self.conf.computeUnits = computeUnits
37 |
38 | // model
39 | let classificationModel = try! MLModel(contentsOf: URL(fileURLWithPath: path), configuration: self.conf)
40 | if let visionModel = try? VNCoreMLModel(for: classificationModel) {
41 | self.visionModel = visionModel
42 | self.request = VNCoreMLRequest(model: visionModel)
43 | self.request?.imageCropAndScaleOption = .scaleFill
44 | self.request?.usesCPUOnly = false
45 | currentUpscalerName = URL(string: NSURL(fileURLWithPath: path).lastPathComponent ?? String())?.deletingPathExtension().path ?? String()
46 | } else {
47 | currentUpscalerName = String()
48 | fatalError()
49 | }
50 | }
51 |
52 |
53 |
54 |
55 | // MARK: Upscale
56 |
57 | func upscaledImage(image:NSImage) -> NSImage? {
58 | return self.predict(with: image)
59 | }
60 |
61 |
62 |
63 | func predict(with image: NSImage) -> NSImage? {
64 | self.myWidth = image.size.width
65 | self.myHeight = image.size.height
66 | guard let ciImage = image.ciImage() else { fatalError() }
67 | guard let request = self.request else { fatalError() }
68 | let handler = VNImageRequestHandler(ciImage: ciImage)
69 | try? handler.perform([request])
70 | // result
71 | if let result = request.results?.first as? VNPixelBufferObservation {
72 | // resize output pixbuf
73 | let factor : CGFloat = 4
74 | if let newbuffer = self.resizePixelBuffer(result.pixelBuffer,
75 | width: Int(self.myWidth * factor),
76 | height: Int(self.myHeight * factor)) {
77 | return self.pixbufferToNSImage(pixbuf: newbuffer)
78 | }
79 |
80 | }
81 | return nil
82 |
83 | }
84 |
85 |
86 |
87 | // MARK: Pixel Buffer
88 |
89 | // PIXELBUFFER TO NSIMAGE
90 | func pixbufferToNSImage(pixbuf:CVPixelBuffer) -> NSImage {
91 | let ciimage = CIImage(cvPixelBuffer: pixbuf)
92 | let context = CIContext(options: nil)
93 | let width = CVPixelBufferGetWidth(pixbuf)
94 | let height = CVPixelBufferGetHeight(pixbuf)
95 | let cgImage = context.createCGImage(ciimage, from: CGRect(x: 0, y: 0, width: width, height: height))
96 | let nsImage = NSImage(cgImage: cgImage!, size: CGSize(width: width, height: height))
97 | //print("pixbufferToNSImage output width:\(width) height:\(height)")
98 | return nsImage
99 | }
100 |
101 |
102 |
103 |
104 |
105 | // RESIZE PIXELBUFFER
106 | func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
107 | width: Int, height: Int) -> CVPixelBuffer? {
108 | return resizePixelBuffer(pixelBuffer, cropX: 0, cropY: 0,
109 | cropWidth: CVPixelBufferGetWidth(pixelBuffer),
110 | cropHeight: CVPixelBufferGetHeight(pixelBuffer),
111 | scaleWidth: width, scaleHeight: height)
112 | }
113 |
114 | func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
115 | cropX: Int,
116 | cropY: Int,
117 | cropWidth: Int,
118 | cropHeight: Int,
119 | scaleWidth: Int,
120 | scaleHeight: Int) -> CVPixelBuffer? {
121 |
122 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
123 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
124 | print("Error: could not get pixel buffer base address")
125 | return nil
126 | }
127 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
128 | let offset = cropY*srcBytesPerRow + cropX*4
129 | var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
130 | height: vImagePixelCount(cropHeight),
131 | width: vImagePixelCount(cropWidth),
132 | rowBytes: srcBytesPerRow)
133 |
134 | let destBytesPerRow = scaleWidth*4
135 | guard let destData = malloc(scaleHeight*destBytesPerRow) else {
136 | print("Error: out of memory")
137 | return nil
138 | }
139 | var destBuffer = vImage_Buffer(data: destData,
140 | height: vImagePixelCount(scaleHeight),
141 | width: vImagePixelCount(scaleWidth),
142 | rowBytes: destBytesPerRow)
143 |
144 | let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImage_Flags(0))
145 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
146 | if error != kvImageNoError {
147 | print("Error:", error)
148 | free(destData)
149 | return nil
150 | }
151 |
152 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
153 | if let ptr = ptr {
154 | free(UnsafeMutableRawPointer(mutating: ptr))
155 | }
156 | }
157 |
158 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
159 | var dstPixelBuffer: CVPixelBuffer?
160 | let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight,
161 | pixelFormat, destData,
162 | destBytesPerRow, releaseCallback,
163 | nil, nil, &dstPixelBuffer)
164 | if status != kCVReturnSuccess {
165 | print("Error: could not create new pixel buffer")
166 | free(destData)
167 | return nil
168 | }
169 | return dstPixelBuffer
170 | }
171 |
172 |
173 |
174 |
175 | }
176 |
--------------------------------------------------------------------------------
/PromptToImage/img/aneoff.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/aneoff.png
--------------------------------------------------------------------------------
/PromptToImage/img/aneon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/aneon.png
--------------------------------------------------------------------------------
/PromptToImage/img/bigsd-ship.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/bigsd-ship.png
--------------------------------------------------------------------------------
/PromptToImage/img/cpuoff.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/cpuoff.png
--------------------------------------------------------------------------------
/PromptToImage/img/cpuon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/cpuon.png
--------------------------------------------------------------------------------
/PromptToImage/img/gpuoff.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/gpuoff.png
--------------------------------------------------------------------------------
/PromptToImage/img/gpuon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/gpuon.png
--------------------------------------------------------------------------------
/PromptToImage/img/prompttoimage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/prompttoimage.png
--------------------------------------------------------------------------------
/PromptToImage/img/sd-ship.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/sd-ship.png
--------------------------------------------------------------------------------
/PromptToImage/img/testimage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/testimage.png
--------------------------------------------------------------------------------
/PromptToImage/img/tree.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TheMurusTeam/PromptToImage/0b91aaa9f9c36d167b17e0c5afa8e245c7c7653d/PromptToImage/img/tree.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PromptToImage
2 | Swift/AppKit CoreML Stable Diffusion app for macOS
3 |
4 |
5 |
6 | # Features
7 | - Negative Prompt
8 | - Guidance Scale
9 | - Multiple Images
10 | - Image to Image
11 | - History
12 | - Export image with EXIF metadata
13 | - Sandboxed app
14 | - Custom Stable Diffusion models
15 | - Custom upscale models
16 | - Supports Stable Diffusion with custom output size
17 | - Built-in 4x RealESRGAN Upscaler
18 | - Default model: Stable Diffusion 2.1 SPLIT EINSUM (will be downloaded at application launch)
19 |
20 | # Download App
21 | Beta available via Apple TestFlight here: https://testflight.apple.com/join/oMxyZ7wO
22 |
23 | # Stable Diffusion Models
24 | PromptToImage supports only models in CoreML format. This repo does not include any model.
25 | The app bundle does not include Stable Diffusion models. The first time you launch the app the default Stable Diffusion 2.1 SPLIT EINSUM model will be dowloaded and installed.
26 | You can find more Stable Diffusion CoreML models designed for this app here:
27 | https://huggingface.co/TheMurusTeam
28 | Learn how to convert Stable Diffusion models to CoreML format here: https://github.com/apple/ml-stable-diffusion
29 |
30 | # Install custom Stable Diffusion models
31 | 1. Download a CoreML Stable Diffusion model. You can find some models here: https://huggingface.co/TheMurusTeam/
32 | 2. Unzip model
33 | 3. Open the 'models popup button' on top of main window and select "Import CoreML Stable Diffusion model...", select model directory
34 |
35 | # Upscale models
36 | PromptToImage supports only upscale models in CoreML format. The app comes bundled with the built-in RealESRGAN upscale model. You can add custom models from the 'upscale popup button'. You can find more upscale CoreML models designed for this app here:
37 | https://huggingface.co/TheMurusTeam
38 |
39 | # System Requirements
40 | Requires an Apple Silicon Mac running macOS 13.1 Ventura
41 | Intel Macs not supported.
42 |
43 | # About Compute Units
44 | For best compatibility always use the default compute units "CPU and GPU". The first time you try a new model, always try it using default compute units.
45 | There are two ways to convert CoreML Stable Diffusion models: ORIGINAL and SPLIT_EINSUM
46 | Models converted using attention implementation ORIGINAL must be used only with default compute units "CPU and GPU". Attempting to use different compute units will crash the app.
47 | Models converted using attention implementation SPLIT_EINSUM can be used with all kind of compute units
48 |
49 |
50 | # Performances and energy
51 | For best performance on M1 and M2:
52 | model: Stable Diffusion 2.1 SPLIT EINSUM, compute units: CPU and Neural Engine
53 | For best performance on M1Pro, M1Max and M1Ultra:
54 | model: Stable Diffusion 2.1 ORIGINAL, compute units: CPU and GPU
55 |
56 | To drastically reduce power consumption on laptops you can use the default model (or any SPLIT EINSUM model) and "CPU and Neural Engine" compute units. On M1Pro and M1Max it will be slower but much more energy efficient.
57 | To monitor compute units energy consumption you can use the free and open source app PowerMetrix, see here: https://github.com/TheMurusTeam/PowerMetrix
58 |
59 | # Benchmarks
60 | MacBook Pro 14" M1Max, 24core GPU, 32Gb RAM (macOS 13.1):
61 | - Stable Diffusion 2.1 SPLIT EINSUM, CPU and Neural Engine: 1.8 step/sec, 3.5 Watt
62 | - Stable Diffusion 2.1 SPLIT EINSUM, CPU and GPU: 1.95 step/sec, 21.5 Watt
63 | - Stable Diffusion 2.1 SPLIT EINSUM, All compute units: 2.2 step/sec, 11 Watt
64 | - Stable Diffusion 2.1 ORIGINAL, CPU and GPU: 2.7 step/sec, 28 Watt
65 |
66 | MacMini M1, 8core GPU, 16Gb RAM (macOS 13.1):
67 | - Stable Diffusion 2.1 SPLIT EINSUM, CPU and Neural Engine: 2.0 step/sec, 4.7 Watt
68 | - Stable Diffusion 2.1 SPLIT EINSUM, CPU and GPU: 0.75 step/sec, 7.5 Watt
69 | - Stable Diffusion 2.1 ORIGINAL, CPU and GPU: 0.95 step/sec, 8.8 Watt
70 |
71 | # Known issues
72 | 1. Attempting to load an -ORIGINAL model using "CPU and Neural Engine" or "All Compute Units" fails.
73 | 2. The first time you launch the app, loading a -SPLIT_EINSUM model using "CPU and Neural Engine" may take up to 2 minutes.
74 | 3. Neural Engine performance on M1 is higher than M1Pro and M1Max
75 | 4. Models converted with attention implementation SPLIT_EINSUM do not support resolutions other than 512x512
76 | 5. Images shared using sharingpicker default services do not include EXIF metadata
77 | 6. Prompt weights not supported
78 | 7. Some Stable Diffusion models can cause hang or crash when using "CPU and Neural Engine" or "All compute units"
79 |
80 | # Restore default settings
81 | Keep the OPTION key pressed when launching PromptToImage in order to restore default compute units (CPU and GPU)
82 |
83 | # Privacy
84 | This is a sandboxed app. It is not allowed to access your personal files and data. Everything runs locally, nothing is sent to the network. None of your data is collected.
85 |
86 | # Build
87 | To build this app you need an Apple Silicon Mac running macOS 13 Ventura 13.1 or later, and Xcode 14.2 or later.
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
--------------------------------------------------------------------------------