├── .gitignore
├── ChatGPT
├── ChatGPT.entitlements
├── ChatGPT.xcdatamodeld
│ ├── .xccurrentversion
│ └── ChatGPT.xcdatamodel
│ │ └── contents
├── ChatGPTApp.swift
├── Class
│ ├── API
│ │ ├── HuggingFace
│ │ │ ├── HuggingFace.swift
│ │ │ ├── HuggingFaceService.swift
│ │ │ ├── ImageClassification .swift
│ │ │ ├── ObjectDetection.swift
│ │ │ └── Text2Image.swift
│ │ ├── Models
│ │ │ └── Prompt.swift
│ │ └── OpenAI
│ │ │ ├── ImageModels.swift
│ │ │ ├── LanguageModels.swift
│ │ │ ├── OpenAI.swift
│ │ │ └── OpenAIService.swift
│ ├── Utility
│ │ ├── DateExtensions.swift
│ │ ├── KeyboardPublisher.swift
│ │ ├── ScrollViewDidScrollViewModifier.swift
│ │ └── Splash.swift
│ ├── View
│ │ ├── DialogueList
│ │ │ ├── DialogueListPlaceholderView.swift
│ │ │ └── DialogueSessionListView.swift
│ │ ├── MessageList
│ │ │ ├── BottomViews
│ │ │ │ ├── BottomInputView.swift
│ │ │ │ ├── ComposerInputView.swift
│ │ │ │ └── LeadingComposerView.swift
│ │ │ ├── ConversationView.swift
│ │ │ ├── DialogueSettingsView.swift
│ │ │ ├── ErrorMessageView.swift
│ │ │ ├── ImageDataMessageView.swift
│ │ │ ├── ImageMessageView.swift
│ │ │ ├── MessageBubble.swift
│ │ │ ├── MessageListView.swift
│ │ │ ├── MessageMarkdownView.swift
│ │ │ └── TextMessageView.swift
│ │ ├── Setting
│ │ │ ├── AppSettingsView.swift
│ │ │ ├── CustomPromptsView.swift
│ │ │ ├── HuggingFaceSettingsView.swift
│ │ │ ├── MacOSSettingsView.swift
│ │ │ ├── OpenAISettingsView.swift
│ │ │ └── PromptsListView.swift
│ │ └── UI
│ │ │ ├── ReplyingIndicatorView.swift
│ │ │ └── ToolTipView.swift
│ └── ViewModel
│ │ ├── Conversation.swift
│ │ └── DialogueSession.swift
├── ContentView.swift
├── Persistence.swift
├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
└── Resource
│ ├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.appiconset
│ │ ├── 100.png
│ │ ├── 1024 1.png
│ │ ├── 1024.png
│ │ ├── 114.png
│ │ ├── 120.png
│ │ ├── 128.png
│ │ ├── 144.png
│ │ ├── 152.png
│ │ ├── 16.png
│ │ ├── 167.png
│ │ ├── 180.png
│ │ ├── 20.png
│ │ ├── 256 1.png
│ │ ├── 256.png
│ │ ├── 29.png
│ │ ├── 32 1.png
│ │ ├── 32.png
│ │ ├── 40.png
│ │ ├── 50.png
│ │ ├── 512 1.png
│ │ ├── 512.png
│ │ ├── 57.png
│ │ ├── 58.png
│ │ ├── 60.png
│ │ ├── 64.png
│ │ ├── 72.png
│ │ ├── 76.png
│ │ ├── 80.png
│ │ ├── 87.png
│ │ └── Contents.json
│ ├── Contents.json
│ ├── huggingface.imageset
│ │ ├── Contents.json
│ │ └── huggingface.png
│ └── openai.imageset
│ │ ├── Contents.json
│ │ └── openai.png
│ ├── chatgpt_prompts.json
│ └── zh-Hans.lproj
│ └── Localizable.strings
├── ChatGPTTests
└── ChatGPTTests.swift
├── ChatGPTUITests
├── ChatGPTUITests.swift
└── ChatGPTUITestsLaunchTests.swift
├── GPTMessage-Info.plist
├── GPTMessage.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ ├── IDEWorkspaceChecks.plist
│ │ └── swiftpm
│ │ └── Package.resolved
└── xcshareddata
│ └── xcschemes
│ ├── ChatGPT.xcscheme
│ └── GPTMessage.xcscheme
├── LICENSE.md
├── README.md
├── screenshot.jpg
├── screenshot1.jpg
├── screenshot_image_caption.jpg
├── screenshot_image_caption1.jpg
├── screenshot_macOS.jpg
└── screenshot_macOS_image_caption.jpg
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore,
4 | .DS_Store
5 | Objective-C.gitignore & Swift.gitignore
6 |
7 | ## User settings
8 | xcuserdata/
9 |
10 | ## compatibility with Xcode 8 and earlier (ignoring not required starting
11 | Xcode 9)
12 | *.xcscmblueprint
13 | *.xccheckout
14 |
15 | ## compatibility with Xcode 3 and earlier (ignoring not required starting
16 | Xcode 4)
17 | build/
18 | DerivedData/
19 | *.moved-aside
20 | *.pbxuser
21 | !default.pbxuser
22 | *.mode1v3
23 | !default.mode1v3
24 | *.mode2v3
25 | !default.mode2v3
26 | *.perspectivev3
27 | !default.perspectivev3
28 |
29 | ## Obj-C/Swift specific
30 | *.hmap
31 |
32 | ## App packaging
33 | *.ipa
34 | *.dSYM.zip
35 | *.dSYM
36 |
37 | ## Playgrounds
38 | timeline.xctimeline
39 | playground.xcworkspace
40 |
41 | # Swift Package Manager
42 | #
43 | # Add this line if you want to avoid checking in source code from Swift
44 | Package Manager dependencies.
45 | # Packages/
46 | # Package.pins
47 | # Package.resolved
48 | # *.xcodeproj
49 | #
50 | # Xcode automatically generates this directory with a .xcworkspacedata
51 | file and xcuserdata
52 | # hence it is not needed unless you have added a package configuration
53 | file to your project
54 | # .swiftpm
55 |
56 | .build/
57 |
58 | # CocoaPods
59 | #
60 | # We recommend against adding the Pods directory to your .gitignore.
61 | However
62 | # you should judge for yourself, the pros and cons are mentioned at:
63 | #
64 | https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
65 | #
66 | # Pods/
67 | #
68 | # Add this line if you want to avoid checking in source code from the
69 | Xcode workspace
70 | # *.xcworkspace
71 |
72 | # Carthage
73 | #
74 | # Add this line if you want to avoid checking in source code from Carthage
75 | dependencies.
76 | # Carthage/Checkouts
77 |
78 | Carthage/Build/
79 |
80 | # Accio dependency management
81 | Dependencies/
82 | .accio/
83 |
84 | # fastlane
85 | #
86 | # It is recommended to not store the screenshots in the git repo.
87 | # Instead, use fastlane to re-generate the screenshots whenever they are
88 | needed.
89 | # For more information about the recommended setup visit:
90 | #
91 | https://docs.fastlane.tools/best-practices/source-control/#source-control
92 |
93 | fastlane/report.xml
94 | fastlane/Preview.html
95 | fastlane/screenshots/**/*.png
96 | fastlane/test_output
97 |
98 | # Code Injection
99 | #
100 | # After new code Injection tools there's a generated folder
101 | /iOSInjectionProject
102 | # https://github.com/johnno1962/injectionforxcode
103 |
104 | iOSInjectionProject/
105 |
--------------------------------------------------------------------------------
/ChatGPT/ChatGPT.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 | com.apple.security.network.client
10 |
11 | com.apple.security.personal-information.photos-library
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/ChatGPT/ChatGPT.xcdatamodeld/.xccurrentversion:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | _XCCurrentVersionName
6 | ChatGPT.xcdatamodel
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ChatGPT/ChatGPT.xcdatamodeld/ChatGPT.xcdatamodel/contents:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/ChatGPT/ChatGPTApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatGPTApp.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct ChatGPTApp: App {
12 |
13 | let persistenceController = PersistenceController.shared
14 |
15 | @State var showOpenAIKeyAlert = false
16 |
17 | var body: some Scene {
18 | WindowGroup {
19 | ContentView()
20 | .onAppear() {
21 | PromptManager.shared.sync()
22 | if AppConfiguration.shared.key.isEmpty {
23 | showOpenAIKeyAlert = true
24 | }
25 | }
26 | .alert("Enter OpenAI API Key", isPresented: $showOpenAIKeyAlert) {
27 | TextField("OpenAI API Key", text: AppConfiguration.shared.$key)
28 | Button("Later", role: .cancel) { }
29 | Button("Confirm", role: .none) { }
30 | } message: {
31 | Text("You need set OpenAI API Key before start a conversation.")
32 | }
33 | .environment(\.managedObjectContext, persistenceController.container.viewContext)
34 | }
35 | #if os(macOS)
36 | Settings {
37 | MacOSSettingsView()
38 | }
39 | #endif
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/HuggingFace/HuggingFace.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HuggingFace.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | class HuggingFaceConfiguration: ObservableObject {
11 |
12 | static let shared = HuggingFaceConfiguration()
13 |
14 | @AppStorage("huggingFace.text2ImageModel") var text2ImageModelPath: String = "/stabilityai/stable-diffusion-2-1"
15 |
16 | @AppStorage("huggingFace.key") var key: String = ""
17 |
18 | }
19 |
20 | enum HuggingFaceAPI {
21 |
22 | case text2Image(HuggingFaceModel)
23 | case imageClassification(HuggingFaceModel)
24 | case imageCaption
25 |
26 | var headers: [String: String] {
27 | switch self {
28 | case .imageClassification, .text2Image:
29 | return [
30 | "Authorization": "Bearer \(HuggingFaceConfiguration.shared.key)",
31 | ]
32 | case .imageCaption:
33 | return [
34 | "Content-Type" : "application/json"
35 | ]
36 | }
37 |
38 | }
39 |
40 | var path: String {
41 | switch self {
42 | case .text2Image(let huggingFaceModel):
43 | return huggingFaceModel.path
44 | case .imageClassification(let huggingFaceModel):
45 | return huggingFaceModel.path
46 | case .imageCaption:
47 | return "/run/predict"
48 | }
49 | }
50 |
51 | var method: String {
52 | return "POST"
53 | }
54 |
55 | func baseURL() -> String {
56 | switch self {
57 | case .imageClassification, .text2Image:
58 | return "https://api-inference.huggingface.co/models"
59 | case .imageCaption:
60 | return "https://lhuanyu-nlpconnect-vit-gpt2-image-captioning.hf.space"
61 | }
62 | }
63 |
64 | }
65 |
66 | protocol HuggingFaceModel {
67 |
68 | var owner: String { get }
69 | var name: String { get }
70 | var path: String { get }
71 |
72 | }
73 |
74 | extension HuggingFaceModel {
75 | var path: String {
76 | "/\(owner)/\(name)"
77 | }
78 | }
79 |
80 | enum CompVis: String, CaseIterable, HuggingFaceModel {
81 |
82 | var owner: String { "CompVis" }
83 |
84 | var name: String {
85 | rawValue
86 | }
87 |
88 | case stableDiffusionV14 = "stable-diffusion-v1-4"
89 | }
90 |
91 | enum StabilityAI: String, CaseIterable, HuggingFaceModel {
92 |
93 | var owner: String { "stabilityai" }
94 |
95 | var name: String {
96 | rawValue
97 | }
98 |
99 | case stableDiffusion2 = "stable-diffusion-2"
100 | case stableDiffusion21 = "stable-diffusion-2-1"
101 | case stableDiffusion21Unclip = "stable-diffusion-2-1-unclip"
102 | case stableDiffusion21UnclipSmall = "stable-diffusion-2-1-unclip-small"
103 | }
104 |
105 | enum RunwayML: String, CaseIterable, HuggingFaceModel {
106 |
107 | var owner: String { "runwayml" }
108 |
109 | var name: String {
110 | rawValue
111 | }
112 |
113 | case stableDiffusionV15 = "stable-diffusion-v1-5"
114 | }
115 |
116 | enum Hakurei: String, CaseIterable, HuggingFaceModel {
117 |
118 | var owner: String { "hakurei" }
119 |
120 | var name: String { rawValue }
121 |
122 | case waifuDiffusion = "waifu-diffusion"
123 | }
124 |
125 |
126 | struct HuggingFace {
127 |
128 | static var text2ImageModels: [HuggingFaceModel] = {
129 | CompVis.allCases +
130 | StabilityAI.allCases +
131 | RunwayML.allCases +
132 | Hakurei.allCases
133 | }()
134 |
135 | }
136 |
137 | struct HuggingFaceModelType: HuggingFaceModel {
138 |
139 | var owner: String {
140 | path.dropFirst().components(separatedBy: "/").first ?? ""
141 | }
142 |
143 | var name: String {
144 | path.components(separatedBy: "/").last ?? ""
145 | }
146 |
147 | var path: String
148 |
149 | }
150 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/HuggingFace/HuggingFaceService.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HuggingFaceService.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | class HuggingFaceService: @unchecked Sendable {
11 |
12 | static let shared = HuggingFaceService()
13 |
14 | private lazy var urlSession: URLSession = {
15 | let configuration = URLSessionConfiguration.default
16 | configuration.timeoutIntervalForRequest = 60
17 | let session = URLSession(configuration: configuration)
18 | return session
19 | }()
20 |
21 | private func makeRequest(_ api: HuggingFaceAPI, body: T) throws -> URLRequest {
22 | let url = URL(string: api.baseURL() + api.path)!
23 | var urlRequest = URLRequest(url: url)
24 | urlRequest.httpMethod = api.method
25 | api.headers.forEach { urlRequest.setValue($1, forHTTPHeaderField: $0) }
26 | if let body = body as? Data {
27 | urlRequest.httpBody = body
28 | } else {
29 | let encoder = JSONEncoder()
30 | encoder.keyEncodingStrategy = .convertToSnakeCase
31 | urlRequest.httpBody = try encoder.encode(body)
32 | }
33 | return urlRequest
34 | }
35 |
36 | private let jsonDecoder: JSONDecoder = {
37 | let jsonDecoder = JSONDecoder()
38 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
39 | return jsonDecoder
40 | }()
41 |
42 | func generateImage(_ input: String) async throws -> String {
43 | try await generateImage(input, api: .text2Image(
44 | HuggingFaceModelType(path: HuggingFaceConfiguration.shared.text2ImageModelPath)
45 | )
46 | )
47 | }
48 |
49 | func generateImage(_ input: String, api: HuggingFaceAPI) async throws -> String {
50 | guard !HuggingFaceConfiguration.shared.key.isEmpty else {
51 | throw String(localized: "HuggingFace User Access Token is not set.")
52 | }
53 |
54 | let request = try makeRequest(api, body: Text2Image(inputs: input))
55 | let (data, response) = try await urlSession.data(for: request)
56 |
57 | guard let httpResponse = response as? HTTPURLResponse else {
58 | throw String(localized: "Invalid response")
59 | }
60 |
61 | guard 200...299 ~= httpResponse.statusCode else {
62 | var error = String(localized: "Response Error: \(httpResponse.statusCode)")
63 | if let errorResponse = try? jsonDecoder.decode(HuggingFaceErrorResponse.self, from: data) {
64 | error.append("\n\(errorResponse.error)")
65 | }
66 | throw error
67 | }
68 |
69 | let base64String = data.base64EncodedString()
70 |
71 | if base64String.isEmpty {
72 | return ""
73 | } else {
74 | return ")"
75 | }
76 | }
77 |
78 | func imageClassification(_ image: Data, api: HuggingFaceAPI) async throws -> String {
79 | guard !HuggingFaceConfiguration.shared.key.isEmpty else {
80 | throw String(localized: "HuggingFace User Access Token is not set.")
81 | }
82 |
83 | let request = try makeRequest(api, body: image)
84 | let (data, response) = try await urlSession.data(for: request)
85 |
86 | guard let httpResponse = response as? HTTPURLResponse else {
87 | throw String(localized: "Invalid response")
88 | }
89 |
90 | guard 200...299 ~= httpResponse.statusCode else {
91 | var error = String(localized: "Response Error: \(httpResponse.statusCode)")
92 | if let errorResponse = try? jsonDecoder.decode(HuggingFaceErrorResponse.self, from: data) {
93 | error.append("\n\(errorResponse.error)")
94 | }
95 | throw error
96 | }
97 |
98 | let result = try jsonDecoder.decode([ImageClassification].self, from: data)
99 | return result.reduce("") {
100 | $0 + "\($1.label): \(Int(100 * $1.score))%\n"
101 | }
102 | }
103 |
104 | func createCaption(for image: Data) async throws -> String {
105 | let body = [
106 | "data" : [
107 | image.imageBased64String
108 | ]
109 | ]
110 |
111 | let request = try makeRequest(.imageCaption, body: body)
112 | let (data, response) = try await urlSession.data(for: request)
113 |
114 | guard let httpResponse = response as? HTTPURLResponse else {
115 | throw String(localized: "Invalid response")
116 | }
117 |
118 | guard 200...299 ~= httpResponse.statusCode else {
119 | var error = String(localized: "Response Error: \(httpResponse.statusCode)")
120 | if let errorResponse = try? jsonDecoder.decode(HuggingFaceErrorResponse.self, from: data) {
121 | error.append("\n\(errorResponse.error)")
122 | }
123 | throw error
124 | }
125 |
126 | let result = try jsonDecoder.decode(ImageCaptionResponse.self, from: data)
127 | if let caption = result.data.first {
128 | return caption
129 | } else {
130 | throw "Invalid Response"
131 | }
132 |
133 | }
134 |
135 | }
136 |
137 | struct ImageCaptionResponse: Codable {
138 | var data: [String]
139 | var duration: Double
140 | }
141 |
142 | struct HuggingFaceErrorResponse: Codable {
143 | var error: String
144 | var estimatedTime: Double
145 | }
146 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/HuggingFace/ImageClassification .swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageClassification .swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/7.
6 | //
7 |
8 | import Foundation
9 |
10 | enum Google: String, HuggingFaceModel {
11 |
12 | var owner: String {
13 | "google"
14 | }
15 |
16 | var name: String {
17 | rawValue
18 | }
19 |
20 | case vitBasePatch16224 = "vit-base-patch16-224"
21 | }
22 |
23 | struct ImageClassification: Codable {
24 | let label: String
25 | let score: Double
26 | }
27 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/HuggingFace/ObjectDetection.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ObjectDetection.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/8.
6 | //
7 |
8 | import Foundation
9 |
10 | enum Facebook: String, HuggingFaceModel {
11 |
12 | var owner: String {
13 | "facebook"
14 | }
15 |
16 | var name: String {
17 | rawValue
18 | }
19 |
20 | case detrResnet50 = "detr-resnet-50"
21 | }
22 |
23 | struct ObjectDetection: Codable {
24 | let label: String
25 | let score: Double
26 | let box: Box
27 | }
28 |
29 | struct Box: Codable {
30 | let xmin: CGFloat
31 | let xmax: CGFloat
32 | let ymin: CGFloat
33 | let ymax: CGFloat
34 | }
35 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/HuggingFace/Text2Image.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Text2Image.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import Foundation
9 |
10 | struct Text2Image: Codable {
11 | var inputs: String
12 | var options: HuggingFaceOptions = .init()
13 | }
14 |
15 | struct HuggingFaceOptions: Codable {
16 | var waitForModel = true ///usually cost 20 seconds or longer
17 | }
18 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/Models/Prompt.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Prompt.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/21.
6 | //
7 |
8 | import Foundation
9 | import SwiftCSV
10 | import SwiftUI
11 |
12 | // {
13 | // "cmd": "linux_terminal",
14 | // "act": "Linux Terminal",
15 | // "prompt": "I want you to act as a linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}. my first command is pwd",
16 | // "tags": [
17 | // "chatgpt-prompts"
18 | // ],
19 | // "enable": true
20 | // },
21 |
22 | struct Prompt: Codable, Identifiable, Hashable, Equatable {
23 | var id: String {
24 | cmd
25 | }
26 | let cmd: String
27 | let act: String
28 | let prompt: String
29 | let tags: [String]
30 |
31 | static func == (lhs: Prompt, rhs: Prompt) -> Bool {
32 | lhs.id == rhs.id
33 | }
34 | }
35 |
36 | class PromptManager: ObservableObject {
37 |
38 | static let shared = PromptManager()
39 |
40 | @Published private(set) var prompts: [Prompt] = []
41 |
42 | @Published private(set) var syncedPrompts: [Prompt] = []
43 |
44 | @Published var customPrompts = [Prompt]()
45 |
46 | func addCustomPrompt(_ prompt: Prompt) {
47 | customPrompts.append(prompt)
48 | mergePrompts()
49 | saveCustomPrompts()
50 | }
51 |
52 | private func saveCustomPrompts() {
53 | do {
54 | let data = try JSONEncoder().encode(customPrompts)
55 | try data.write(to: customFileURL, options: .atomic)
56 | print("[Prompt Manager] Write user custom prompts to \(customFileURL).")
57 | } catch let error {
58 | print(error.localizedDescription)
59 | }
60 | }
61 |
62 | func removeCustomPrompts(atOffsets indexSet: IndexSet) {
63 | customPrompts.remove(atOffsets: indexSet)
64 | saveCustomPrompts()
65 | }
66 |
67 | func removeCustomPrompt(_ prompt: Prompt) {
68 | customPrompts.removeAll {
69 | $0 == prompt
70 | }
71 | saveCustomPrompts()
72 | }
73 |
74 | init() {
75 | loadCachedPrompts()
76 | loadCustomPrompts()
77 | mergePrompts()
78 | print("[Prompt Manager] Load local prompts. Count: \(prompts.count).")
79 | }
80 |
81 | private func mergePrompts() {
82 | prompts = (syncedPrompts + customPrompts).sorted(by: {
83 | $0.act < $1.act
84 | })
85 | prompts.removeDuplicates()
86 | }
87 |
88 | private func jsonData() -> Data? {
89 | if let data = try? Data(contentsOf: cachedFileURL) {
90 | return data
91 | }
92 | if let path = Bundle.main.path(forResource: "chatgpt_prompts", ofType: "json"),
93 | let data = try? Data(contentsOf: URL(fileURLWithPath: path)) {
94 | return data
95 | }
96 | return nil
97 | }
98 |
99 | private func loadCachedPrompts() {
100 | if let data = jsonData(),
101 | let prompts = try? JSONDecoder().decode([Prompt].self, from: data) {
102 | syncedPrompts = prompts
103 | syncedPrompts.removeDuplicates()
104 | print("[Prompt Manager] Load cached prompts. Count: \(syncedPrompts.count).")
105 | }
106 | }
107 |
108 | private func loadCustomPrompts() {
109 | guard let data = try? Data(contentsOf: customFileURL),
110 | let prompts = try? JSONDecoder().decode([Prompt].self, from: data) else {
111 | return
112 | }
113 | customPrompts = prompts
114 | print("[Prompt Manager] Load user custom prompts. Count: \(customPrompts.count).")
115 | }
116 |
117 | @Published private(set) var isSyncing: Bool = false
118 |
119 | @AppStorage("promptSource") var promptSource: String = "https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv"
120 |
121 | func sync() {
122 | guard let url = URL(string: promptSource) else {
123 | return
124 | }
125 | let request = URLRequest(url: url)
126 | let task = URLSession.shared.downloadTask(with: request) { fileURL, _, error in
127 | DispatchQueue.main.async {
128 | if let error = error {
129 | print(error.localizedDescription)
130 | } else if let fileURL = fileURL {
131 | self.parseCSVFile(at: fileURL)
132 | }
133 | self.isSyncing = false
134 | }
135 | }
136 | task.resume()
137 | isSyncing = true
138 | }
139 |
140 | @AppStorage("lastSyncAt") var lastSyncAt: TimeInterval = Date.distantPast.timeIntervalSince1970
141 |
142 | private func parseCSVFile(at url: URL) {
143 | do {
144 | let csv: CSV = try CSV(url: url)
145 | var prompts = [Prompt]()
146 | try csv.enumerateAsDict({ dic in
147 | if let act = dic["act"],
148 | let prompt = dic["prompt"] {
149 | let cmd = act.convertToSnakeCase()
150 | prompts.append(.init(cmd: cmd, act: act, prompt: prompt, tags: ["chatgpt-prompts"]))
151 | }
152 | })
153 | syncedPrompts = prompts
154 | syncedPrompts.removeDuplicates()
155 | mergePrompts()
156 |
157 | print("[Prompt Manager] Sync completed. Count: \(syncedPrompts.count). Total: \(self.prompts.count).")
158 | let data = try JSONEncoder().encode(prompts)
159 | try data.write(to: cachedFileURL, options: .atomic)
160 | print("[Prompt Manager] Write synced prompts to \(cachedFileURL).")
161 | lastSyncAt = Date().timeIntervalSince1970
162 | } catch let error as CSVParseError {
163 | print(error.localizedDescription)
164 | } catch let error {
165 | print(error.localizedDescription)
166 | }
167 | }
168 |
169 | private var cachedFileURL: URL {
170 | URL.documentsDirectory.appendingPathComponent("chatgpt_prompts.json")
171 | }
172 |
173 | private var customFileURL: URL {
174 | URL.documentsDirectory.appendingPathComponent("custom_prompts.json")
175 | }
176 |
177 | }
178 |
179 |
180 | extension String {
181 |
182 | func convertToSnakeCase() -> String {
183 | let lowercaseInput = self.lowercased()
184 | let separatorSet = CharacterSet(charactersIn: "- ")
185 | let replaced = lowercaseInput
186 | .replacingOccurrences(of: "`", with: "")
187 | .components(separatedBy: separatorSet)
188 | .joined(separator: "_")
189 | return replaced
190 | }
191 |
192 | }
193 |
194 | extension URL {
195 |
196 | // Get user's documents directory path
197 | static func documentDirectoryPath() -> URL {
198 | let arrayPaths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
199 | let docDirectoryPath = arrayPaths[0]
200 | return docDirectoryPath
201 | }
202 |
203 | }
204 |
205 |
206 | extension Array where Element: Hashable {
207 | @discardableResult
208 | mutating func removeDuplicates() -> [Element] {
209 | // Thanks to https://github.com/sairamkotha for improving the method
210 | self = reduce(into: [Element]()) {
211 | if !$0.contains($1) {
212 | $0.append($1)
213 | }
214 | }
215 | return self
216 | }
217 |
218 | }
219 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/OpenAI/ImageModels.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageModels.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/4.
6 | //
7 |
8 | import Foundation
9 |
10 | struct ImageGeneration: Codable {
11 | enum Size: String, Codable, CaseIterable {
12 | case small = "256x256"
13 | case middle = "512x512"
14 | case large = "1024x1024"
15 | }
16 |
17 | var prompt: String
18 | var n: Int = 1
19 | var size: Size = AppConfiguration.shared.imageSize
20 | }
21 |
22 | struct ImageGenerationResponse: Codable {
23 | var data: [URLResponse]
24 | }
25 |
26 | struct URLResponse: Codable {
27 | var url: URL
28 | }
29 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/OpenAI/LanguageModels.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LanguageModels.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import Foundation
9 |
10 | ///Completion
11 | struct Command: Encodable {
12 | let prompt: String
13 | let model: String
14 | let maxTokens: Int
15 | let temperature: Double
16 | let stream: Bool
17 |
18 | enum CodingKeys: String, CodingKey {
19 | case prompt
20 | case model
21 | case maxTokens = "max_tokens"
22 | case temperature
23 | case stream
24 | }
25 | }
26 |
27 | ///Edit
28 | struct Instruction: Encodable {
29 | let instruction: String
30 | let model: String
31 | let input: String
32 | }
33 |
34 | ///Chat
35 | struct Message: Codable {
36 | let role: String
37 | let content: String
38 | }
39 |
40 | extension Character {
41 | var isChinese: Bool {
42 | for scalar in unicodeScalars {
43 | if !(0x4E00...0x9FA5).contains(scalar.value) {
44 | return false
45 | }
46 | }
47 | return true
48 | }
49 |
50 | }
51 |
52 | extension String {
53 |
54 | var token: Int {
55 | var count = 0.0
56 | for character in self {
57 | if character.isChinese {
58 | count += 1
59 | } else {
60 | count += 0.3
61 | }
62 | }
63 | return Int(count)
64 | }
65 |
66 | }
67 |
68 | extension Array where Element == Message {
69 |
70 | var tokenCount: Int {
71 | contentCount
72 | }
73 |
74 | var contentCount: Int { reduce(0, { $0 + $1.content.token })}
75 | }
76 |
77 | struct Chat: Codable {
78 | let model: String
79 | let temperature: Double
80 | let messages: [Message]
81 | let stream: Bool
82 | }
83 |
84 | struct ErrorRootResponse: Decodable {
85 | let error: ErrorResponse
86 | }
87 |
88 | struct ErrorResponse: Decodable {
89 | let message: String
90 | let type: String?
91 | }
92 |
93 | struct StreamCompletionResponse: Decodable {
94 | let choices: [StreamChoice]
95 | }
96 |
97 | struct StreamResponse: Decodable {
98 | let choices: [ChoiceType]
99 | }
100 |
101 | struct CompletionResponse: Decodable {
102 | let choices: [Choice]
103 | let usage: Usage?
104 | }
105 |
106 | struct Usage: Decodable {
107 | let promptTokens: Int?
108 | let completionTokens: Int?
109 | let totalTokens: Int?
110 | }
111 |
112 | struct TextChoice: Decodable {
113 | let text: String
114 | let finishReason: String?
115 | }
116 |
117 | struct Choice: Decodable {
118 | let message: Message
119 | let finishReason: String?
120 | }
121 |
122 | struct StreamChoice: Decodable {
123 | let finishReason: String?
124 | let delta: StreamMessage
125 | }
126 |
127 | struct StreamMessage: Decodable {
128 | let role: String?
129 | let content: String?
130 | }
131 |
132 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/OpenAI/OpenAI.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAI.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/7.
6 | //
7 |
8 | import Foundation
9 |
10 | /// The type of model used to generate the output
11 | enum OpenAIModelType: String, Codable {
12 |
13 | /// A set of models that can understand and generate natural language
14 | ///
15 | /// [GPT-3 Models OpenAI API Docs](https://beta.openai.com/docs/models/gpt-3)
16 |
17 | /// Most capable GPT-3 model. Can do any task the other models can do, often with higher quality, longer output and better instruction-following. Also supports inserting completions within text.
18 | ///
19 | /// > Model Name: text-davinci-003
20 | case textDavinci = "text-davinci-003"
21 |
22 | /// Very capable, but faster and lower cost than GPT3 ``davinci``.
23 | ///
24 | /// > Model Name: text-curie-001
25 | case textCurie = "text-curie-001"
26 |
27 | /// Capable of straightforward tasks, very fast, and lower cost.
28 | ///
29 | /// > Model Name: text-babbage-001
30 | case textBabbage = "text-babbage-001"
31 |
32 | /// Capable of very simple tasks, usually the fastest model in the GPT-3 series, and lowest cost.
33 | ///
34 | /// > Model Name: text-ada-001
35 | case textAda = "text-ada-001"
36 |
37 | static var gpt3Models: [OpenAIModelType] {
38 | [.textDavinci, .textCurie, .textBabbage, .textAda]
39 | }
40 |
41 | /// A set of models that can understand and generate code, including translating natural language to code
42 | ///
43 | /// [Codex Models OpenAI API Docs](https://beta.openai.com/docs/models/codex)
44 | ///
45 | /// > Limited Beta
46 | /// Most capable Codex model. Particularly good at translating natural language to code. In addition to completing code, also supports inserting completions within code.
47 | ///
48 | /// > Model Name: code-davinci-002
49 | case codeDavinci = "code-davinci-002"
50 |
51 | /// Almost as capable as ``davinci`` Codex, but slightly faster. This speed advantage may make it preferable for real-time applications.
52 | ///
53 | /// > Model Name: code-cushman-001
54 | case codeCushman = "code-cushman-001"
55 |
56 |
57 | static var codexModels: [OpenAIModelType] {
58 | [.codeDavinci, .codeCushman]
59 | }
60 |
61 | case textDavinciEdit = "text-davinci-edit-001"
62 |
63 |
64 |
65 | static var featureModels: [OpenAIModelType] {
66 | [.textDavinciEdit]
67 | }
68 |
69 | /// A set of models for the new chat completions
70 | /// You can read the [API Docs](https://platform.openai.com/docs/api-reference/chat/create)
71 |
72 | /// Most capable GPT-3.5 model and optimized for chat at 1/10th the cost of text-davinci-003. Will be updated with our latest model iteration.
73 | /// > Model Name: gpt-3.5-turbo
74 | case chatgpt = "gpt-3.5-turbo"
75 |
76 | /// Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will only be supported for a three month period ending on June 1st 2023.
77 | /// > Model Name: gpt-3.5-turbo-0301
78 | case chatgpt0301 = "gpt-3.5-turbo-0301"
79 |
80 |
81 | case dalle = "dall-e"
82 |
83 |
84 | static var chatModels: [OpenAIModelType] {
85 | [.chatgpt, .chatgpt0301]
86 | }
87 |
88 | var id: RawValue {
89 | rawValue
90 | }
91 |
92 | var supportedModes: [Mode] {
93 | switch self {
94 | case .textDavinci:
95 | return [.completions]
96 | case .textCurie:
97 | return [.completions]
98 | case .textBabbage:
99 | return [.completions]
100 | case .textAda:
101 | return [.completions]
102 | case .codeDavinci:
103 | return [.completions]
104 | case .codeCushman:
105 | return [.completions]
106 | case .textDavinciEdit:
107 | return [.edits]
108 | case .chatgpt:
109 | return [.chat]
110 | case .chatgpt0301:
111 | return [.chat]
112 | case .dalle:
113 | return [.image]
114 | }
115 | }
116 | }
117 |
118 | enum Mode: String, CaseIterable, Codable, Identifiable {
119 | case completions = "Completions"
120 | case edits = "Edits"
121 | case chat = "Chat"
122 | case image = "Image"
123 |
124 | var id: RawValue {
125 | rawValue
126 | }
127 | }
128 |
129 | extension Mode {
130 | var path: String {
131 | switch self {
132 | case .completions:
133 | return "/v1/completions"
134 | case .edits:
135 | return "/v1/edits"
136 | case .chat:
137 | return "/v1/chat/completions"
138 | case .image:
139 | return "/v1/images/generations"
140 | }
141 | }
142 |
143 | var method: String {
144 | switch self {
145 | case .completions, .edits, .chat, .image:
146 | return "POST"
147 | }
148 | }
149 |
150 | func baseURL() -> String {
151 | switch self {
152 | case .completions, .edits, .chat, .image:
153 | return "https://api.openai.com"
154 | }
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/ChatGPT/Class/API/OpenAI/OpenAIService.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAIService.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import SwiftUI
9 |
10 | class OpenAIService: @unchecked Sendable {
11 |
12 | init(configuration: DialogueSession.Configuration) {
13 | self.configuration = configuration
14 | }
15 |
16 | var configuration: DialogueSession.Configuration
17 |
18 | var messages = [Message]()
19 | private var trimmedMessagesIndex = 0
20 |
21 | private lazy var urlSession: URLSession = {
22 | let configuration = URLSessionConfiguration.default
23 | configuration.timeoutIntervalForRequest = 30
24 | let session = URLSession(configuration: configuration)
25 | return session
26 | }()
27 |
28 | private func makeRequest(with input: String, mode: Mode? = nil, stream: Bool = false) throws -> URLRequest {
29 | let mode = mode ?? configuration.mode
30 | let url = URL(string: mode.baseURL() + mode.path)!
31 | var urlRequest = URLRequest(url: url)
32 | urlRequest.httpMethod = configuration.mode.method
33 | headers.forEach { urlRequest.setValue($1, forHTTPHeaderField: $0) }
34 | urlRequest.httpBody = try makeJSONBody(with: input, mode: mode, stream: stream)
35 | return urlRequest
36 | }
37 |
38 | private var headers: [String: String] {
39 | [
40 | "Content-Type": "application/json",
41 | "Authorization": "Bearer \(configuration.key)"
42 | ]
43 | }
44 |
45 | private let jsonDecoder: JSONDecoder = {
46 | let jsonDecoder = JSONDecoder()
47 | jsonDecoder.keyDecodingStrategy = .convertFromSnakeCase
48 | return jsonDecoder
49 | }()
50 |
51 | private func trimConversation(with input: String) -> [Message] {
52 | var trimmedMessages = [Message]()
53 | if trimmedMessagesIndex > messages.endIndex - 1 {
54 | trimmedMessages.append(Message(role: "user", content: input))
55 | } else {
56 | trimmedMessages += messages[trimmedMessagesIndex...]
57 | trimmedMessages.append(Message(role: "user", content: input))
58 | }
59 |
60 | let maxToken = 4096
61 | print("maxToken:\(maxToken)")
62 | var tokenCount = trimmedMessages.tokenCount
63 | print("tokenCount:\(tokenCount)")
64 | while tokenCount > maxToken {
65 | print(trimmedMessages.remove(at: 0))
66 | trimmedMessagesIndex += 1
67 | print("trimmedMessagesIndex: \(trimmedMessagesIndex)")
68 | tokenCount = trimmedMessages.tokenCount
69 | print("tokenCount:\(tokenCount)")
70 | }
71 |
72 | trimmedMessages.insert(Message(role: "system", content: configuration.systemPrompt), at: 0)
73 |
74 | return trimmedMessages
75 | }
76 |
77 | func createTitle() async throws -> String {
78 | try await sendTaskMessage("Summarize our conversation, give me a title as short as possible in the language of your last response. Return the title only.")
79 | }
80 |
81 | private var suggestionsCount: Int {
82 | #if os(iOS)
83 | return 3
84 | #else
85 | return 5
86 | #endif
87 | }
88 |
89 | func createSuggestions() async throws -> [String] {
90 | var prompt = "Give me \(suggestionsCount) reply suggestions which I may use to ask you based on your last reply. Each suggestion must be in a []. Suggestions must be concise and informative, less than 6 words. If your last reply is in Chinese,your must give me Chinese suggestions. Does not include other words."
91 | if messages.isEmpty {
92 | prompt = "Give me \(suggestionsCount) prompts which I can use to chat with you based on your capabilities as an AI language model. Each prompt must be in a []. Prompts should be concise and creative, between 5 and 20 words. It must not contain these topic: weather, what's you favorite, any other personal questions. Does not include other words."
93 | }
94 |
95 | let suggestionReply = try await sendTaskMessage(prompt)
96 | print(suggestionReply)
97 |
98 | return suggestionReply.normalizedPrompts
99 | }
100 |
101 | private func makeJSONBody(with input: String, mode: Mode? = nil, stream: Bool = true) throws -> Data {
102 | let mode = mode ?? configuration.mode
103 | switch mode {
104 | case .chat:
105 | let request = Chat(model: configuration.model.rawValue, temperature: configuration.temperature,
106 | messages: trimConversation(with: input), stream: stream)
107 | return try JSONEncoder().encode(request)
108 | case .edits:
109 | let instruct = Instruction(instruction: input, model: configuration.model.rawValue, input: "")
110 | return try JSONEncoder().encode(instruct)
111 | case .completions:
112 | let command = Command(prompt: input, model: configuration.model.rawValue, maxTokens: 2048 - input.count, temperature: configuration.temperature, stream: stream)
113 | return try JSONEncoder().encode(command)
114 | case .image:
115 | let image = ImageGeneration(prompt: input)
116 | return try JSONEncoder().encode(image)
117 | }
118 | }
119 |
120 | func appendNewMessage(input: String, reply: String) {
121 | messages.append(.init(role: "user", content: input))
122 | messages.append(.init(role: "assistant", content: reply))
123 | }
124 |
125 | func sendMessage(_ input: String, data: Data? = nil) async throws -> AsyncThrowingStream {
126 |
127 | var messageText = input
128 | do {
129 | if let data = data {
130 | let caption = try await HuggingFaceService.shared.createCaption(for: data)
131 | print("Image Caption: \(caption)")
132 | let captionPrompt =
133 | """
134 | I sent you an image. There is an image description blow, write a more readable and human-friendly version based on the original description. You must also ask me how to handle the image in next step.
135 | \(caption)
136 | """
137 | messageText = "An image: \(caption)"
138 | return try await sendTaskMessageStream(
139 | captionPrompt
140 | ,
141 | messageText: messageText,
142 | temperature: 0.5
143 | )
144 | }
145 |
146 | if AppConfiguration.shared.isSmartModeEnabled {
147 | if let taskReply = try? await sendTaskMessage(
148 | """
149 | Determine whether the prompt below is an image generation prompt based on our conversations history:
150 | \(input)
151 | If it is an image generation prompt which has a high probability, remove the command words in the prompt, leave only the object with modifiers and styles needed to draw, and must return it in a [].
152 | """
153 | ) {
154 | print(taskReply)
155 | if let prompt = taskReply.normalizedPrompts.first {
156 | return try await generateImageStream(prompt, input: input)
157 | }
158 | }
159 | } else if input.isImageGenerationPrompt {
160 | return try await generateImageStream(input.imagePrompt)
161 | }
162 |
163 | return try await sendMessageStream(input)
164 | } catch {
165 | appendNewMessage(input: messageText, reply: "")
166 | throw error
167 | }
168 | }
169 |
170 | func sendMessageStream(_ input: String) async throws -> AsyncThrowingStream {
171 | let urlRequest = try makeRequest(with: input, stream: true)
172 |
173 | let (result, response) = try await urlSession.bytes(for: urlRequest)
174 | guard let httpResponse = response as? HTTPURLResponse else {
175 | throw String(localized: "Invalid response")
176 | }
177 |
178 | guard 200...299 ~= httpResponse.statusCode else {
179 | var errorText = ""
180 | for try await line in result.lines {
181 | errorText += line
182 | }
183 |
184 | if let data = errorText.data(using: .utf8), let errorResponse = try? jsonDecoder.decode(ErrorRootResponse.self, from: data).error {
185 | errorText = "\n\(errorResponse.message)"
186 | }
187 | throw String(localized: "Response Error: \(httpResponse.statusCode), \(errorText)")
188 | }
189 |
190 | return AsyncThrowingStream { continuation in
191 | Task(priority: .userInitiated) { [weak self] in
192 | guard let self else { return }
193 | do {
194 | var reply = ""
195 | for try await line in result.lines {
196 | if line.hasPrefix("data: "),
197 | let data = line.dropFirst(6).data(using: .utf8),
198 | let response = try? self.jsonDecoder.decode(StreamCompletionResponse.self, from: data),
199 | let text = response.choices.first?.delta.content {
200 | reply += text
201 | continuation.yield(text)
202 | }
203 | }
204 | self.appendNewMessage(input: input, reply: reply)
205 | continuation.finish()
206 | } catch {
207 | continuation.finish(throwing: error)
208 | }
209 | }
210 | }
211 | }
212 |
213 | func sendTaskMessageStream(_ taskPrompt: String, messageText: String? = nil, temperature: Double = 0) async throws -> AsyncThrowingStream {
214 | let messages = [
215 | Message(role: "system", content: configuration.systemPrompt),
216 | Message(role: "user", content: taskPrompt)
217 | ]
218 |
219 | let url = URL(string: Mode.chat.baseURL() + Mode.chat.path)!
220 | var urlRequest = URLRequest(url: url)
221 | urlRequest.httpMethod = Mode.chat.method
222 | headers.forEach { urlRequest.setValue($1, forHTTPHeaderField: $0) }
223 | let requestModel = Chat(model: configuration.model.rawValue, temperature: temperature,
224 | messages: messages, stream: true)
225 | urlRequest.httpBody = try JSONEncoder().encode(requestModel)
226 |
227 | let (result, response) = try await urlSession.bytes(for: urlRequest)
228 |
229 | guard let httpResponse = response as? HTTPURLResponse else {
230 | throw String(localized: "Invalid response")
231 | }
232 |
233 | guard 200...299 ~= httpResponse.statusCode else {
234 | var errorText = ""
235 | for try await line in result.lines {
236 | errorText += line
237 | }
238 |
239 | if let data = errorText.data(using: .utf8), let errorResponse = try? jsonDecoder.decode(ErrorRootResponse.self, from: data).error {
240 | errorText = "\n\(errorResponse.message)"
241 | }
242 |
243 | throw String(localized: "Response Error: \(httpResponse.statusCode), \(errorText)")
244 | }
245 |
246 | return AsyncThrowingStream { continuation in
247 | Task(priority: .userInitiated) { [weak self] in
248 | guard let self else { return }
249 | do {
250 | var reply = ""
251 | for try await line in result.lines {
252 | if line.hasPrefix("data: "),
253 | let data = line.dropFirst(6).data(using: .utf8),
254 | let response = try? self.jsonDecoder.decode(StreamCompletionResponse.self, from: data),
255 | let text = response.choices.first?.delta.content {
256 | reply += text
257 | continuation.yield(text)
258 | }
259 | }
260 | self.appendNewMessage(input: messageText ?? "", reply: reply)
261 | continuation.finish()
262 | } catch {
263 | continuation.finish(throwing: error)
264 | }
265 | }
266 | }
267 | }
268 |
269 | func sendTaskMessage(_ text: String) async throws -> String {
270 | let url = URL(string: Mode.chat.baseURL() + Mode.chat.path)!
271 | var urlRequest = URLRequest(url: url)
272 | urlRequest.httpMethod = Mode.chat.method
273 | headers.forEach { urlRequest.setValue($1, forHTTPHeaderField: $0) }
274 | let requestModel = Chat(model: configuration.model.rawValue, temperature: 0,
275 | messages: trimConversation(with: text), stream: false)
276 | urlRequest.httpBody = try JSONEncoder().encode(requestModel)
277 |
278 | let (data, response) = try await urlSession.data(for: urlRequest)
279 |
280 | guard let httpResponse = response as? HTTPURLResponse else {
281 | throw String(localized: "Invalid response")
282 | }
283 |
284 | guard 200...299 ~= httpResponse.statusCode else {
285 | var error = String(localized: "Response Error: \(httpResponse.statusCode)")
286 | if let errorResponse = try? jsonDecoder.decode(ErrorRootResponse.self, from: data).error {
287 | error.append("\n\(errorResponse.message)")
288 | }
289 | throw error
290 | }
291 |
292 | do {
293 | let completionResponse = try jsonDecoder.decode(CompletionResponse.self, from: data)
294 | let reply = completionResponse.choices.first?.message.content ?? ""
295 | return reply
296 | } catch {
297 | throw error
298 | }
299 | }
300 |
301 | func generateImage(_ prompt: String) async throws -> String {
302 | let urlRequest = try makeRequest(with: prompt, mode: .image)
303 |
304 | let (data, response) = try await urlSession.data(for: urlRequest)
305 |
306 | guard let httpResponse = response as? HTTPURLResponse else {
307 | throw String(localized: "Invalid response")
308 | }
309 |
310 | guard 200...299 ~= httpResponse.statusCode else {
311 | var error = String(localized: "Response Error: \(httpResponse.statusCode)")
312 | if let errorResponse = try? jsonDecoder.decode(ErrorRootResponse.self, from: data).error {
313 | error.append("\n\(errorResponse.message)")
314 | }
315 | throw error
316 | }
317 |
318 | do {
319 | let response = try jsonDecoder.decode(ImageGenerationResponse.self, from: data)
320 | if let url = response.data.first?.url {
321 | return ")"
322 | } else {
323 | throw String(localized: "Failed to generate image.")
324 | }
325 | } catch {
326 | throw error
327 | }
328 | }
329 |
330 | func generateImageStream(_ prompt: String, input: String? = nil) async throws -> AsyncThrowingStream {
331 | return AsyncThrowingStream { continuation in
332 | Task(priority: .userInitiated) {
333 | do {
334 | var image: String
335 | switch AppConfiguration.shared.preferredText2ImageService {
336 | case .openAI:
337 | image = try await generateImage(prompt)
338 | case .huggingFace:
339 | image = try await HuggingFaceService.shared.generateImage(prompt)
340 | }
341 | if image.isEmpty {
342 | self.appendNewMessage(input: input ?? prompt, reply: "")
343 | continuation.finish(throwing: String(localized: "Invalid Response"))
344 | } else {
345 | continuation.yield(image)
346 | continuation.finish()
347 | self.appendNewMessage(input: input ?? prompt, reply: "An image")
348 | }
349 | } catch {
350 | continuation.finish(throwing: error)
351 | }
352 | }
353 | }
354 | }
355 |
356 | func removeAllMessages() {
357 | messages.removeAll()
358 | }
359 | }
360 |
361 | extension String: CustomNSError {
362 |
363 | public var errorUserInfo: [String : Any] {
364 | [
365 | NSLocalizedDescriptionKey: self
366 | ]
367 | }
368 |
369 | var isImageGenerationPrompt: Bool {
370 | lowercased().hasPrefix("draw") || lowercased().hasPrefix("画")
371 | }
372 |
373 | var imagePrompt: String {
374 | if lowercased().hasPrefix("draw") {
375 | return self.deletingPrefix("draw")
376 | } else if hasPrefix("画") {
377 | return self.deletingPrefix("画")
378 | }
379 | return self
380 | }
381 |
382 | func deletingPrefix(_ prefix: String) -> String {
383 | guard self.hasPrefix(prefix) else { return self }
384 | return String(self.dropFirst(prefix.count))
385 | }
386 |
387 | var normalizedPrompts: [String] {
388 | var result = [String]()
389 | let pattern = "\\[(.*?)\\]"
390 |
391 | do {
392 | let regex = try NSRegularExpression(pattern: pattern)
393 | let nsText = self as NSString
394 | let matches = regex.matches(in: self, range: NSRange(location: 0, length: nsText.length))
395 |
396 | for match in matches {
397 | let range = match.range(at: 1)
398 | let content = nsText.substring(with: range)
399 | if !result.contains(content) && content.count > 1 {
400 | result.append(content)
401 | }
402 | }
403 | } catch {
404 | print("Error creating regex: \(error.localizedDescription)")
405 | }
406 | return result
407 | }
408 |
409 |
410 | }
411 |
--------------------------------------------------------------------------------
/ChatGPT/Class/Utility/DateExtensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DateExtensions.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/26.
6 | //
7 |
8 | import Foundation
9 |
10 | extension Date {
11 | /// SwifterSwift: User’s current calendar.
12 | var calendar: Calendar { Calendar.current }
13 |
14 | /// SwifterSwift: Check if date is within today.
15 | ///
16 | /// Date().isInToday -> true
17 | ///
18 | var isInToday: Bool {
19 | return calendar.isDateInToday(self)
20 | }
21 |
22 | /// SwifterSwift: Check if date is within yesterday.
23 | ///
24 | /// Date().isInYesterday -> false
25 | ///
26 | var isInYesterday: Bool {
27 | return calendar.isDateInYesterday(self)
28 | }
29 |
30 | /// SwifterSwift: Date string from date.
31 | ///
32 | /// Date().dateString(ofStyle: .short) -> "1/12/17"
33 | /// Date().dateString(ofStyle: .medium) -> "Jan 12, 2017"
34 | /// Date().dateString(ofStyle: .long) -> "January 12, 2017"
35 | /// Date().dateString(ofStyle: .full) -> "Thursday, January 12, 2017"
36 | ///
37 | /// - Parameter style: DateFormatter style (default is .medium).
38 | /// - Returns: date string.
39 | func dateString(ofStyle style: DateFormatter.Style = .medium) -> String {
40 | let dateFormatter = DateFormatter()
41 | dateFormatter.timeStyle = .none
42 | dateFormatter.dateStyle = style
43 | return dateFormatter.string(from: self)
44 | }
45 |
46 | /// SwifterSwift: Date and time string from date.
47 | ///
48 | /// Date().dateTimeString(ofStyle: .short) -> "1/12/17, 7:32 PM"
49 | /// Date().dateTimeString(ofStyle: .medium) -> "Jan 12, 2017, 7:32:00 PM"
50 | /// Date().dateTimeString(ofStyle: .long) -> "January 12, 2017 at 7:32:00 PM GMT+3"
51 | /// Date().dateTimeString(ofStyle: .full) -> "Thursday, January 12, 2017 at 7:32:00 PM GMT+03:00"
52 | ///
53 | /// - Parameter style: DateFormatter style (default is .medium).
54 | /// - Returns: date and time string.
55 | func dateTimeString(ofStyle style: DateFormatter.Style = .medium) -> String {
56 | let dateFormatter = DateFormatter()
57 | dateFormatter.timeStyle = style
58 | dateFormatter.dateStyle = style
59 | return dateFormatter.string(from: self)
60 | }
61 |
62 | var iMessageDateTimeString: String {
63 | if isInToday {
64 | return String(localized: "Today") + " " + timeString(ofStyle: .short)
65 | } else if isInYesterday {
66 | return String(localized: "Yesterday") + " " + timeString(ofStyle: .short)
67 | }
68 | let dateFormatter = DateFormatter()
69 | dateFormatter.timeStyle = .short
70 | dateFormatter.dateStyle = .medium
71 | return dateFormatter.string(from: self)
72 | }
73 |
74 | /// SwifterSwift: Time string from date
75 | ///
76 | /// Date().timeString(ofStyle: .short) -> "7:37 PM"
77 | /// Date().timeString(ofStyle: .medium) -> "7:37:02 PM"
78 | /// Date().timeString(ofStyle: .long) -> "7:37:02 PM GMT+3"
79 | /// Date().timeString(ofStyle: .full) -> "7:37:02 PM GMT+03:00"
80 | ///
81 | /// - Parameter style: DateFormatter style (default is .medium).
82 | /// - Returns: time string.
83 | func timeString(ofStyle style: DateFormatter.Style = .medium) -> String {
84 | let dateFormatter = DateFormatter()
85 | dateFormatter.timeStyle = style
86 | dateFormatter.dateStyle = .none
87 | return dateFormatter.string(from: self)
88 | }
89 |
90 |
91 | }
92 |
--------------------------------------------------------------------------------
/ChatGPT/Class/Utility/KeyboardPublisher.swift:
--------------------------------------------------------------------------------
1 | //
2 | // KeyboardPublisher.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/20.
6 | //
7 |
8 | #if os(iOS)
9 | import UIKit
10 | import Combine
11 |
12 | /// Publisher to read keyboard changes.
13 | public protocol KeyboardReadable {
14 | var keyboardWillChangePublisher: AnyPublisher { get }
15 | var keyboardDidChangePublisher: AnyPublisher { get }
16 | var keyboardHeight: AnyPublisher { get }
17 | }
18 |
19 | extension KeyboardReadable {
20 | public var keyboardWillChangePublisher: AnyPublisher {
21 | Publishers.Merge(
22 | NotificationCenter.default
23 | .publisher(for: UIResponder.keyboardWillShowNotification)
24 | .map { _ in true },
25 | NotificationCenter.default
26 | .publisher(for: UIResponder.keyboardWillHideNotification)
27 | .map { _ in false }
28 | )
29 | .eraseToAnyPublisher()
30 | }
31 |
32 | public var keyboardDidChangePublisher: AnyPublisher {
33 | Publishers.Merge(
34 | NotificationCenter.default
35 | .publisher(for: UIResponder.keyboardDidShowNotification)
36 | .map { _ in true },
37 | NotificationCenter.default
38 | .publisher(for: UIResponder.keyboardDidHideNotification)
39 | .map { _ in false }
40 | )
41 | .eraseToAnyPublisher()
42 | }
43 |
44 | public var keyboardHeight: AnyPublisher {
45 | NotificationCenter
46 | .default
47 | .publisher(for: UIResponder.keyboardDidShowNotification)
48 | .map { notification in
49 | if let keyboardFrame: NSValue = notification
50 | .userInfo?[UIResponder.keyboardFrameEndUserInfoKey] as? NSValue {
51 | let keyboardRectangle = keyboardFrame.cgRectValue
52 | let keyboardHeight = keyboardRectangle.height
53 | return keyboardHeight
54 | } else {
55 | return 0
56 | }
57 | }
58 | .eraseToAnyPublisher()
59 | }
60 | }
61 | #endif
62 |
--------------------------------------------------------------------------------
/ChatGPT/Class/Utility/ScrollViewDidScrollViewModifier.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ScrollViewDidScrollViewModifier.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/2.
6 | //
7 |
8 |
9 | #if os(iOS)
10 |
11 | import SwiftUI
12 | import Combine
13 |
14 | struct ScrollViewDidScrollViewModifier: ViewModifier {
15 | @MainActor
16 | class ViewModel: ObservableObject {
17 | @Published var contentOffset: CGPoint = .zero
18 |
19 | var contentOffsetSubscription: AnyCancellable?
20 |
21 | func subscribe(scrollView: UIScrollView) {
22 | contentOffsetSubscription = scrollView.publisher(for: \.contentOffset).sink { [weak self] contentOffset in
23 | self?.contentOffset = contentOffset
24 | }
25 | }
26 | }
27 |
28 | @StateObject var viewModel = ViewModel()
29 | var didScroll: (CGPoint) -> Void
30 |
31 | func body(content: Content) -> some View {
32 | content
33 | .introspectScrollView { scrollView in
34 | if viewModel.contentOffsetSubscription == nil {
35 | viewModel.subscribe(scrollView: scrollView)
36 | }
37 | }
38 | .onReceive(viewModel.$contentOffset) { contentOffset in
39 | didScroll(contentOffset)
40 | }
41 | }
42 | }
43 |
44 | extension View {
45 | func didScroll(_ didScroll: @escaping (CGPoint) -> Void) -> some View {
46 | self.modifier(ScrollViewDidScrollViewModifier(didScroll: didScroll))
47 | }
48 | }
49 |
50 | #endif
51 |
--------------------------------------------------------------------------------
/ChatGPT/Class/Utility/Splash.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Splash.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/20.
6 | //
7 |
8 | import MarkdownUI
9 | import Splash
10 | import SwiftUI
11 |
12 | struct TextOutputFormat: OutputFormat {
13 | private let theme: Splash.Theme
14 |
15 | init(theme: Splash.Theme) {
16 | self.theme = theme
17 | }
18 |
19 | func makeBuilder() -> Builder {
20 | Builder(theme: self.theme)
21 | }
22 | }
23 |
24 | extension TextOutputFormat {
25 | struct Builder: OutputBuilder {
26 | private let theme: Splash.Theme
27 | private var accumulatedText: [Text]
28 |
29 | fileprivate init(theme: Splash.Theme) {
30 | self.theme = theme
31 | self.accumulatedText = []
32 | }
33 |
34 | mutating func addToken(_ token: String, ofType type: TokenType) {
35 | let color = self.theme.tokenColors[type] ?? self.theme.plainTextColor
36 | self.accumulatedText.append(Text(token)
37 | #if os(iOS)
38 | .foregroundColor(.init(uiColor: color))
39 | #endif
40 | #if os(macOS)
41 | .foregroundColor(.init(nsColor: color))
42 | #endif
43 | )
44 |
45 | }
46 |
47 | mutating func addPlainText(_ text: String) {
48 | self.accumulatedText.append(
49 | Text(text)
50 | #if os(iOS)
51 | .foregroundColor(.init(uiColor: self.theme.plainTextColor))
52 | #endif
53 | #if os(macOS)
54 | .foregroundColor(.init(nsColor: self.theme.plainTextColor))
55 | #endif
56 | )
57 | }
58 |
59 | mutating func addWhitespace(_ whitespace: String) {
60 | self.accumulatedText.append(Text(whitespace))
61 | }
62 |
63 | func build() -> Text {
64 | self.accumulatedText.reduce(Text(""), +)
65 | }
66 | }
67 | }
68 |
69 |
70 | struct SplashCodeSyntaxHighlighter: CodeSyntaxHighlighter {
71 | private let syntaxHighlighter: SyntaxHighlighter
72 |
73 | init(theme: Splash.Theme) {
74 | self.syntaxHighlighter = SyntaxHighlighter(format: TextOutputFormat(theme: theme))
75 | }
76 |
77 | func highlightCode(_ content: String, language: String?) -> Text {
78 | guard language?.lowercased() == "swift" else {
79 | return Text(content)
80 | }
81 |
82 | return self.syntaxHighlighter.highlight(content)
83 | }
84 | }
85 |
86 | extension CodeSyntaxHighlighter where Self == SplashCodeSyntaxHighlighter {
87 | static func splash(theme: Splash.Theme) -> Self {
88 | SplashCodeSyntaxHighlighter(theme: theme)
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/DialogueList/DialogueListPlaceholderView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DialogueListPlaceholderView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/29.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DialogueListPlaceholderView: View {
11 | var body: some View {
12 | VStack {
13 | Spacer()
14 | Image(systemName: "message.fill")
15 | .font(.system(size: 50))
16 | .padding()
17 | .foregroundColor(.secondary)
18 | Text("No Message")
19 | .font(.title3)
20 | .bold()
21 | Spacer()
22 | }
23 | }
24 | }
25 |
26 | struct DialogueListPlaceholderView_Previews: PreviewProvider {
27 | static var previews: some View {
28 | DialogueListPlaceholderView()
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/DialogueList/DialogueSessionListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DialogueSessionListView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/17.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DialogueSessionListView: View {
11 | @Environment(\.managedObjectContext) private var viewContext
12 |
13 | #if os(iOS)
14 | @Environment(\.verticalSizeClass) var verticalSizeClass
15 |
16 | private var shouldShowIcon: Bool {
17 | verticalSizeClass != .compact
18 | }
19 | #endif
20 |
21 |
22 | @Binding var dialogueSessions: [DialogueSession]
23 | @Binding var selectedDialogueSession: DialogueSession?
24 |
25 | @Binding var isReplying: Bool
26 |
27 | var deleteHandler: (IndexSet) -> Void
28 | var deleteDialogueHandler: (DialogueSession) -> Void
29 |
30 | var body: some View {
31 | List(selection: $selectedDialogueSession) {
32 | ForEach(dialogueSessions) { session in
33 | #if os(iOS)
34 | HStack {
35 | if shouldShowIcon {
36 | Image("openai")
37 | .resizable()
38 | .frame(width: 40, height: 40)
39 | .cornerRadius(20)
40 | .padding()
41 | }
42 | VStack(spacing: 4) {
43 | NavigationLink(value: session) {
44 | HStack {
45 | Text(session.configuration.model.rawValue)
46 | .bold()
47 | .font(Font.system(.headline))
48 | Spacer()
49 | Text(session.date.dialogueDesc)
50 | .font(Font.system(.subheadline))
51 | .foregroundColor(.secondary)
52 | }
53 | }
54 | HStack {
55 | Text(session.lastMessage)
56 | .font(Font.system(.body))
57 | .foregroundColor(.secondary)
58 | .lineLimit(2)
59 | .frame(
60 | maxWidth: .infinity,
61 | maxHeight: .infinity,
62 | alignment: .topLeading
63 | )
64 | }
65 | .frame(height:44)
66 | }
67 | }
68 | .contextMenu {
69 | Button(role: .destructive) {
70 | deleteDialogueHandler(session)
71 | if session == selectedDialogueSession {
72 | selectedDialogueSession = nil
73 | }
74 | } label: {
75 | HStack {
76 | Image(systemName: "trash")
77 | Text("Delete")
78 | }
79 | }
80 | }
81 | #else
82 | NavigationLink(value: session) {
83 | HStack {
84 | Image("openai")
85 | .resizable()
86 | .frame(width: 40, height: 40)
87 | .cornerRadius(20)
88 | .padding()
89 | VStack(spacing: 4) {
90 | HStack {
91 | Text(session.configuration.model.rawValue)
92 | .bold()
93 | .font(Font.system(.headline))
94 | Spacer()
95 | Text(session.date.dialogueDesc)
96 | .font(Font.system(.subheadline))
97 | .foregroundColor(.secondary)
98 | }
99 | HStack {
100 | Text(session.lastMessage)
101 | .font(Font.system(.body))
102 | .foregroundColor(.secondary)
103 | .lineLimit(2)
104 | .frame(
105 | maxWidth: .infinity,
106 | maxHeight: .infinity,
107 | alignment: .topLeading
108 | )
109 | }
110 | .frame(height:44)
111 | }
112 | }
113 | }
114 | .contextMenu {
115 | Button(role: .destructive) {
116 | deleteDialogueHandler(session)
117 | if session == selectedDialogueSession {
118 | selectedDialogueSession = nil
119 | }
120 | } label: {
121 | HStack {
122 | Image(systemName: "trash")
123 | Text("Delete")
124 | }
125 | }
126 | }
127 |
128 | #endif
129 | }
130 | .onDelete { indexSet in
131 | deleteHandler(indexSet)
132 | }
133 | }
134 | .onAppear(perform: sortList)
135 | #if os(iOS)
136 | .listStyle(.plain)
137 | .navigationTitle(Text("ChatGPT"))
138 | #else
139 | .frame(minWidth: 300)
140 | #endif
141 | .onChange(of: isReplying) { isReplying in
142 | updateList()
143 | }
144 | }
145 |
146 | private func updateList() {
147 | withAnimation {
148 | if selectedDialogueSession != nil {
149 | let session = selectedDialogueSession
150 | sortList()
151 | selectedDialogueSession = session
152 | } else {
153 | sortList()
154 | }
155 | }
156 | }
157 |
158 | private func sortList() {
159 | dialogueSessions = dialogueSessions.sorted(by: {
160 | $0.date > $1.date
161 | })
162 | }
163 | }
164 |
165 | extension Date {
166 |
167 | var dialogueDesc: String {
168 | if self.isInYesterday {
169 | return String(localized: "Yesterday")
170 | }
171 | if self.isInToday {
172 | return timeString(ofStyle: .short)
173 | }
174 | return dateString(ofStyle: .short)
175 | }
176 | }
177 |
178 | import Combine
179 |
180 | extension Published.Publisher {
181 | var didSet: AnyPublisher {
182 | // Any better ideas on how to get the didSet semantics?
183 | // This works, but I'm not sure if it's ideal.
184 | self.receive(on: RunLoop.main).eraseToAnyPublisher()
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/BottomViews/BottomInputView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BottomInputView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/23.
6 | //
7 |
8 | import SwiftUI
9 | import SwiftUIX
10 |
11 | struct BottomInputView: View {
12 |
13 | @ObservedObject var session: DialogueSession
14 | @Binding var isLoading: Bool
15 | @Environment(\.colorScheme) var colorScheme
16 |
17 | let namespace: Namespace.ID
18 |
19 | @FocusState var isTextFieldFocused: Bool
20 |
21 | var send: (String) -> Void
22 |
23 | var body: some View {
24 | HStack(alignment: .bottom) {
25 | LeadingComposerView(session: session, isLoading: $isLoading)
26 | .fixedSize()
27 | .alignmentGuide(.bottom, computeValue: { d in
28 | d[.bottom] - d.height * 0.5 + leadingComposerDelta
29 | })
30 | .padding([.leading])
31 | ZStack {
32 | ComposerInputView(
33 | session: session,
34 | isTextFieldFocused: _isTextFieldFocused,
35 | namespace: namespace,
36 | send: send
37 | )
38 | }
39 | }
40 | #if os(iOS)
41 | .padding([.top, .bottom], 6)
42 | .background{
43 | if colorScheme == .light {
44 | BlurEffectView(style: .light)
45 | .edgesIgnoringSafeArea(.bottom)
46 | } else {
47 | BlurEffectView(style: .systemUltraThinMaterialDark)
48 | .edgesIgnoringSafeArea(.bottom)
49 | }
50 | }
51 | #else
52 | .padding(.top, 10)
53 | .padding(.bottom, 16)
54 | #endif
55 | }
56 |
57 |
58 | private var leadingComposerDelta: CGFloat {
59 | #if os(iOS)
60 | 17
61 | #else
62 | 16
63 | #endif
64 | }
65 |
66 |
67 | }
68 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/BottomViews/ComposerInputView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ComposerInputView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/17.
6 | //
7 |
8 |
9 | import SwiftUI
10 |
11 | struct ComposerInputView: View {
12 |
13 | @ObservedObject var session: DialogueSession
14 | @FocusState var isTextFieldFocused: Bool
15 | let namespace: Namespace.ID
16 |
17 | var send: (String) -> Void
18 |
19 | private var size: CGFloat {
20 | #if os(macOS)
21 | 24
22 | #else
23 | 26
24 | #endif
25 | }
26 |
27 | var radius: CGFloat {
28 | #if os(macOS)
29 | 16
30 | #else
31 | 17
32 | #endif
33 | }
34 |
35 | var body: some View {
36 | ZStack(alignment: .bottomTrailing) {
37 | textField
38 | if let data = session.sendingData {
39 | animationImageView(data)
40 | } else if let data = session.inputData {
41 | imageView(data)
42 | } else if session.isSending {
43 | animationTextView
44 | }
45 | sendButton
46 | }
47 | .macButtonStyle()
48 | .padding(4)
49 | .overlay(
50 | RoundedRectangle(cornerRadius: radius, style: .continuous)
51 | .stroke(.tertiary, lineWidth: 1)
52 | .opacity(0.7)
53 | )
54 | .padding([.trailing])
55 | }
56 |
57 | @ViewBuilder
58 | private var textField: some View {
59 | if session.inputData == nil {
60 | TextField("Ask anything, or type /", text: $session.input, axis: .vertical)
61 | .focused($isTextFieldFocused)
62 | .multilineTextAlignment(.leading)
63 | .lineLimit(1...20)
64 | .padding(.leading, 12)
65 | .padding(.trailing, size + 6)
66 | .frame(minHeight: size)
67 | #if os(macOS)
68 | .textFieldStyle(.plain)
69 | #endif
70 | } else {
71 | EmptyView()
72 | }
73 | }
74 |
75 | @ViewBuilder
76 | private func animationImageView(_ data: Data) -> some View {
77 | HStack {
78 | Image(data: data)?
79 | .resizable()
80 | .scaledToFit()
81 | .bubbleStyle(isMyMessage: true, type: .imageData)
82 | .matchedGeometryEffect(id: AnimationID.senderBubble, in: namespace)
83 | Spacer(minLength: 80)
84 | }
85 | }
86 |
87 | @ViewBuilder
88 | private func imageView(_ data: Data) -> some View {
89 | HStack {
90 | ZStack(alignment: .topTrailing) {
91 | Image(data: data)?
92 | .resizable()
93 | .scaledToFit()
94 | .cornerRadius(radius)
95 | Button {
96 | withAnimation {
97 | session.inputData = nil
98 | }
99 | } label: {
100 | ZStack {
101 | Color.white
102 | #if os(macOS)
103 | .frame(width: 16, height: 16)
104 | .cornerRadius(8)
105 | #else
106 | .frame(width: 20, height: 20)
107 | .cornerRadius(10)
108 | #endif
109 | Image(systemName: "xmark.circle.fill")
110 | .foregroundColor(.systemGray)
111 | }
112 | }
113 | .padding([.top, .trailing], 6)
114 | }
115 | Spacer(minLength: 80)
116 | }
117 | }
118 |
119 | private var animationTextView: some View {
120 | Text("\(session.bubbleText)")
121 | .frame(maxWidth: .infinity, minHeight: radius * 2 - 8, alignment: .leading)
122 | .bubbleStyle(isMyMessage: true)
123 | .matchedGeometryEffect(id: AnimationID.senderBubble, in: namespace)
124 | .padding(-4)
125 | }
126 |
127 | @ViewBuilder
128 | private var sendButton: some View {
129 | if !session.input.isEmpty || session.inputData != nil {
130 | Button {
131 | send(session.input)
132 | } label: {
133 | Image(systemName: "arrow.up.circle.fill")
134 | .resizable()
135 | .scaledToFit()
136 | .frame(width: size, height: size)
137 | .foregroundColor(.blue)
138 | .font(.body.weight(.semibold))
139 | }
140 | .keyboardShortcut(.defaultAction)
141 | } else {
142 | #if os(iOS)
143 | Button {
144 |
145 | } label: {
146 | Image(systemName: "mic")
147 | .resizable()
148 | .scaledToFit()
149 | .frame(width: 18, height: 18)
150 | .foregroundColor(.secondary)
151 | .opacity(0.7)
152 | }
153 | .offset(x:-4, y: -4)
154 | #endif
155 | }
156 | }
157 |
158 | }
159 |
160 |
161 | struct ComposerInputView_Previews: PreviewProvider {
162 |
163 | @Namespace static var namespace
164 |
165 | static var previews: some View {
166 | ComposerInputView(session: .init(), namespace: namespace) { _ in
167 |
168 | }
169 | }
170 |
171 | }
172 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/BottomViews/LeadingComposerView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LeadingComposerView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/17.
6 | //
7 |
8 | import SwiftUI
9 | import PhotosUI
10 |
11 | struct LeadingComposerView: View {
12 |
13 | @ObservedObject var session: DialogueSession
14 |
15 | @State var selectedPromt: Prompt?
16 |
17 | @State var showPromptPopover: Bool = false
18 |
19 | @State var showPhotoPicker = false
20 |
21 | @State var imageSelection: PhotosPickerItem? = nil
22 |
23 | @Binding var isLoading: Bool
24 |
25 |
26 | private var height: CGFloat {
27 | #if os(iOS)
28 | 22
29 | #else
30 | 17
31 | #endif
32 | }
33 |
34 | var body: some View {
35 | HStack(spacing: 16) {
36 | Button {
37 | showPhotoPicker = true
38 | } label: {
39 | Image(systemName: "camera.fill")
40 | .resizable()
41 | .scaledToFit()
42 | .frame(height: height)
43 | .foregroundColor(.gray)
44 | }
45 | #if os(iOS)
46 | if session.inputData == nil && !session.isSending {
47 | Menu {
48 | ForEach(PromptManager.shared.prompts) { promt in
49 | Button {
50 | session.input = promt.prompt
51 | } label: {
52 | Text(promt.act)
53 | }
54 | }
55 | } label: {
56 | Image(systemName: "person.text.rectangle.fill")
57 | .resizable()
58 | .scaledToFit()
59 | .frame(height: height)
60 | .foregroundColor(.gray)
61 | .ignoresSafeArea(.keyboard)
62 | }
63 | .menuIndicator(.hidden)
64 | .ignoresSafeArea(.keyboard)
65 | }
66 | #endif
67 | }
68 | .macButtonStyle()
69 | .padding(.horizontal, 8)
70 | .frame(maxHeight: 32)
71 | .photosPicker(isPresented: $showPhotoPicker, selection: $imageSelection, matching: .images)
72 | .onChange(of: imageSelection) { imageSelection in
73 | if let imageSelection {
74 | isLoading = true
75 | imageSelection.loadTransferable(type: Data.self) { result in
76 | switch result {
77 | case .success(let data):
78 | if let data = data {
79 | DispatchQueue.main.async {
80 | isLoading = false
81 | withAnimation {
82 | session.inputData = data
83 | }
84 | }
85 | }
86 | case .failure:
87 | break
88 | }
89 | }
90 | }
91 | }
92 |
93 | }
94 |
95 | }
96 |
97 | extension Data {
98 |
99 | var imageBased64String: String {
100 | "data:image/png;base64,\(base64EncodedString()))"
101 | }
102 |
103 | }
104 |
105 | struct LeadingComposerView_Previews: PreviewProvider {
106 | static var previews: some View {
107 | LeadingComposerView(session: .init(), isLoading: .constant(false))
108 | .previewLayout(.fixed(width: 400.0, height: 100.0))
109 |
110 | LeadingComposerView(session: .init(), isLoading: .constant(false))
111 | .previewLayout(.fixed(width: 400.0, height: 100.0))
112 |
113 | HStack(alignment: .bottom) {
114 | LeadingComposerView(session: .init(), isLoading: .constant(false))
115 |
116 | Capsule()
117 | .stroke(.gray, lineWidth: 2)
118 | .frame(maxHeight: 50)
119 |
120 | }
121 | .preferredColorScheme(.dark)
122 | .previewLayout(.fixed(width: 400.0, height: 100.0))
123 |
124 | LeadingComposerView(session: .init(), isLoading: .constant(false))
125 | .preferredColorScheme(.dark)
126 | .previewLayout(.fixed(width: 400.0, height: 100.0))
127 | }
128 | }
129 |
130 | extension View {
131 | func macButtonStyle() -> some View {
132 | modifier(MacButtonModifier())
133 | }
134 | }
135 |
136 | struct MacButtonModifier: ViewModifier {
137 |
138 | func body(content: Content) -> some View {
139 | #if os(macOS)
140 | content
141 | .buttonStyle(.borderless)
142 | #else
143 | content
144 | #endif
145 | }
146 | }
147 |
148 |
149 | struct MacTextFieldModifier: ViewModifier {
150 |
151 | func body(content: Content) -> some View {
152 | #if os(macOS)
153 | content
154 | .buttonStyle(.borderless)
155 | #else
156 | content
157 | #endif
158 | }
159 | }
160 |
161 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/ConversationView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConversationView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import SwiftUI
9 | import SwiftUIX
10 | import Kingfisher
11 |
12 | struct AnimationID {
13 |
14 | static let senderBubble = "SenderBubble"
15 |
16 | }
17 |
18 | struct ConversationView: View {
19 |
20 | let conversation: Conversation
21 | let namespace: Namespace.ID
22 | var lastConversationDate: Date?
23 | let retryHandler: (Conversation) -> Void
24 |
25 | @State var isEditing = false
26 | @FocusState var isFocused: Bool
27 | @State var editingMessage: String = ""
28 | var deleteHandler: (() -> Void)?
29 |
30 | var body: some View {
31 | VStack(spacing: 0) {
32 | dateView
33 | VStack {
34 | message(isSender: true)
35 | .padding(.leading, horizontalPadding(for: conversation.inputType)).padding(.vertical, 10)
36 | if conversation.reply != nil {
37 | message()
38 | .transition(.move(edge: .leading))
39 | .padding(.trailing, horizontalPadding(for: conversation.replyType)).padding(.vertical, 10)
40 | }
41 | }
42 | }
43 | .transition(.moveAndFade)
44 | .padding(.horizontal, 15)
45 | }
46 |
47 | private func horizontalPadding(for type: MessageType) -> CGFloat {
48 | #if os(iOS)
49 | type.isImage ? 105 : 55
50 | #else
51 | type.isImage ? 205 : 105
52 | #endif
53 | }
54 |
55 | var dateView: some View {
56 | HStack {
57 | Spacer()
58 | if let lastConversationDate = lastConversationDate {
59 | if conversation.date.timeIntervalSince(lastConversationDate) > 60 {
60 | Text(conversation.date.iMessageDateTimeString)
61 | .font(.footnote)
62 | .foregroundColor(.secondaryLabel)
63 | }
64 | } else {
65 | Text(conversation.date.iMessageDateTimeString)
66 | .font(.footnote)
67 | .foregroundColor(.secondaryLabel)
68 | }
69 | Spacer()
70 | }
71 | .padding(.top, 10)
72 | }
73 |
74 | private var showRefreshButton: Bool {
75 | !conversation.isReplying && conversation.isLast
76 | }
77 |
78 | @ViewBuilder
79 | func message(isSender: Bool = false) -> some View {
80 | if isSender {
81 | senderMessage
82 | .contextMenu {
83 | Button {
84 | if let data = conversation.inputData {
85 | KFCrossPlatformImage(data: data)?.copyToPasteboard()
86 | } else {
87 | conversation.input.copyToPasteboard()
88 | }
89 | } label: {
90 | HStack {
91 | Image(systemName: "doc.on.doc")
92 | Text("Copy")
93 | }
94 | }
95 | if !conversation.isReplying {
96 | Button(role: .destructive) {
97 | deleteHandler?()
98 | } label: {
99 | HStack {
100 | Image(systemName: "trash")
101 | Text("Delete")
102 | }
103 | }
104 | }
105 | }
106 | } else {
107 | replyMessage
108 | .contextMenu {
109 | VStack {
110 | Button {
111 | if let imageURL = conversation.replyImageURL {
112 | ImageCache.default.retrieveImage(forKey: imageURL.absoluteString) { result in
113 | switch result {
114 | case let .success(image):
115 | image.image?.copyToPasteboard()
116 | print("copied!")
117 | case .failure:
118 | break
119 | }
120 | }
121 | } else if let data = conversation.replyImageData {
122 | KFCrossPlatformImage(data: data)?.copyToPasteboard()
123 | } else {
124 | conversation.reply?.copyToPasteboard()
125 | }
126 | } label: {
127 | HStack {
128 | Image(systemName: "doc.on.doc")
129 | Text("Copy")
130 | }
131 | }
132 | if conversation.isLast {
133 | Button {
134 | retryHandler(conversation)
135 | } label: {
136 | HStack {
137 | Image(systemName: "arrow.clockwise")
138 | Text("Regenerate")
139 | }
140 | }
141 | }
142 | if !conversation.isReplying {
143 | Button(role: .destructive) {
144 | deleteHandler?()
145 | } label: {
146 | HStack {
147 | Image(systemName: "trash")
148 | Text("Delete")
149 | }
150 | }
151 | }
152 | }
153 | }
154 | }
155 | }
156 |
157 | var senderMessage: some View {
158 | HStack(spacing: 0) {
159 | Spacer()
160 | if conversation.isLast {
161 | messageEditButton()
162 | senderMessageContent
163 | .frame(minHeight: 24)
164 | .bubbleStyle(isMyMessage: true, type: conversation.inputType)
165 | .matchedGeometryEffect(id: AnimationID.senderBubble, in: namespace)
166 | } else {
167 | senderMessageContent
168 | .frame(minHeight: 24)
169 | .bubbleStyle(isMyMessage: true, type: conversation.inputType)
170 | }
171 | }
172 | }
173 |
174 | @ViewBuilder
175 | var senderMessageContent: some View {
176 | if let data = conversation.inputData {
177 | ImageDataMessageView(data: data)
178 | .maxWidth(256)
179 | } else {
180 | if isEditing {
181 | TextField("", text: $editingMessage, axis: .vertical)
182 | .foregroundColor(.primary)
183 | .focused($isFocused)
184 | .lineLimit(1...20)
185 | .background(.background)
186 | } else {
187 | Text(conversation.input)
188 | .textSelection(.enabled)
189 | }
190 | }
191 | }
192 |
193 | @ViewBuilder
194 | func messageEditButton() -> some View {
195 | if conversation.isReplying || conversation.inputType.isImage {
196 | EmptyView()
197 | } else {
198 | Button {
199 | if isEditing {
200 | if editingMessage != conversation.input {
201 | var message = conversation
202 | message.input = editingMessage
203 | retryHandler(message)
204 | }
205 | } else {
206 | editingMessage = conversation.input
207 | }
208 | isEditing.toggle()
209 | isFocused = isEditing
210 | } label: {
211 | if isEditing {
212 | Image(systemName: "checkmark")
213 | } else {
214 | Image(systemName: "pencil")
215 | }
216 | }
217 | .keyboardShortcut(isEditing ? .defaultAction : .none)
218 | .frame(width: 30)
219 | .padding(.trailing)
220 | .padding(.leading, -50)
221 | }
222 | }
223 |
224 | var replyMessage: some View {
225 | HStack(spacing: 0) {
226 | VStack(alignment: .leading) {
227 | switch conversation.replyType {
228 | case .text:
229 | TextMessageView(text: conversation.reply ?? "", isReplying: conversation.isReplying)
230 | case .image:
231 | ImageMessageView(url: conversation.replyImageURL)
232 | .maxWidth(256)
233 | case .imageData:
234 | ImageDataMessageView(data: conversation.replyImageData)
235 | .maxWidth(256)
236 | case .error:
237 | ErrorMessageView(error: conversation.errorDesc) {
238 | retryHandler(conversation)
239 | }
240 | }
241 | if conversation.isReplying {
242 | ReplyingIndicatorView()
243 | .frame(width: 48, height: 24)
244 | }
245 | }
246 | .frame(minHeight: 24)
247 | .bubbleStyle(isMyMessage: false, type: conversation.replyType)
248 | retryButton
249 | Spacer()
250 | }
251 | }
252 |
253 | @ViewBuilder
254 | var retryButton: some View {
255 | if !conversation.isReplying {
256 | if conversation.errorDesc == nil && conversation.isLast {
257 | Button {
258 | retryHandler(conversation)
259 | } label: {
260 | HStack {
261 | Image(systemName: "arrow.clockwise")
262 | }
263 | }
264 | .frame(width: 30)
265 | .padding(.leading)
266 | .padding(.trailing, -50)
267 | }
268 | }
269 | }
270 |
271 | }
272 |
273 | extension String {
274 | func copyToPasteboard() {
275 | #if os(iOS)
276 | UIPasteboard.general.string = self
277 | #else
278 | NSPasteboard.general.clearContents()
279 | NSPasteboard.general.setString(self, forType: .string)
280 | #endif
281 | }
282 | }
283 |
284 | extension KFCrossPlatformImage {
285 | func copyToPasteboard() {
286 | #if os(iOS)
287 | UIPasteboard.general.image = self
288 | #else
289 | NSPasteboard.general.clearContents()
290 | NSPasteboard.general.writeObjects([self])
291 | #endif
292 | }
293 | }
294 |
295 | extension AnyTransition {
296 | static var moveAndFade: AnyTransition {
297 | .asymmetric(
298 | insertion: .move(edge: .bottom),
299 | removal: .move(edge: .top).combined(with: .opacity)
300 | )
301 | }
302 | }
303 |
304 | struct MessageRowView_Previews: PreviewProvider {
305 |
306 | static let message = Conversation(
307 | isReplying: true, isLast: false,
308 | input: "What is SwiftUI?",
309 | reply: "SwiftUI is a user interface framework that allows developers to design and develop user interfaces for iOS, macOS, watchOS, and tvOS applications using Swift, a programming language developed by Apple Inc.")
310 |
311 | static let message2 = Conversation(
312 | isReplying: false, isLast: false,
313 | input: "What is SwiftUI?",
314 | reply: "",
315 | errorDesc: "ChatGPT is currently not available")
316 |
317 | static let message3 = Conversation(
318 | isReplying: true, isLast: false,
319 | input: "What is SwiftUI?",
320 | reply: "")
321 |
322 | static let message4 = Conversation(
323 | isReplying: false, isLast: true,
324 | input: "What is SwiftUI?",
325 | reply: "SwiftUI is a user interface framework that allows developers to design and develop user interfaces for iOS, macOS, watchOS, and tvOS applications using Swift, a programming language developed by Apple Inc.",
326 | errorDesc: nil)
327 |
328 | @Namespace static var animation
329 |
330 | static var previews: some View {
331 | NavigationStack {
332 | ScrollView {
333 | ConversationView(conversation: message, namespace: animation, retryHandler: { message in
334 |
335 | })
336 | ConversationView(conversation: message2, namespace: animation, retryHandler: { message in
337 |
338 | })
339 | ConversationView(conversation: message3, namespace: animation, retryHandler: { message in
340 |
341 | })
342 | ConversationView(conversation: message4, namespace: animation, retryHandler: { message in
343 |
344 | })
345 | }
346 | .frame(width: 400)
347 | .previewLayout(.sizeThatFits)
348 | }
349 | }
350 | }
351 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/DialogueSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DialogueSettingsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/26.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DialogueSettingsView: View {
11 |
12 | @Binding var configuration: DialogueSession.Configuration
13 |
14 | @Environment(\.dismiss) var dismiss
15 |
16 | var body: some View {
17 | Form {
18 | Section {
19 | HStack {
20 | Text("Model")
21 | .fixedSize()
22 | Spacer()
23 | Picker("Model", selection: $configuration.model) {
24 | ForEach([OpenAIModelType.chatgpt, .chatgpt0301], id: \.self) { model in
25 | Text(model.rawValue)
26 | .tag(model)
27 | }
28 | }
29 | .labelsHidden()
30 | }
31 | VStack {
32 | Stepper(value: $configuration.temperature, in: 0...2, step: 0.1) {
33 | HStack {
34 | Text("Temperature")
35 | Spacer()
36 | Text(String(format: "%.1f", configuration.temperature))
37 | .padding(.horizontal)
38 | .height(32)
39 | .width(60)
40 | .background(Color.secondarySystemFill)
41 | .cornerRadius(8)
42 | }
43 | }
44 | }
45 | }
46 | }
47 | .navigationTitle("Settings")
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/ErrorMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ErrorMessageView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ErrorMessageView: View {
11 |
12 | var error: String?
13 | var retryHandler: (() -> Void)?
14 |
15 | var body: some View {
16 | if let error = error {
17 | Text("Error: \(error)")
18 | .foregroundColor(.red)
19 | .multilineTextAlignment(.leading)
20 | Button {
21 | retryHandler?()
22 | } label: {
23 | Text("Regenerate response")
24 | }
25 | .foregroundColor(.accentColor)
26 | .padding([.top,.bottom])
27 | }
28 | }
29 | }
30 |
31 | struct ErrorMessageView_Previews: PreviewProvider {
32 | static var previews: some View {
33 | ErrorMessageView(error: "请求超时")
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/ImageDataMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageDataMessageView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ImageDataMessageView: View {
11 |
12 | var data: Data?
13 |
14 | var body: some View {
15 | if let data = data {
16 | Image(data: data)?
17 | .resizable()
18 | .sizeToFit()
19 | } else {
20 | EmptyView()
21 | }
22 | }
23 | }
24 |
25 | struct ImageDataMessageView_Previews: PreviewProvider {
26 | static var previews: some View {
27 | ImageDataMessageView()
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/ImageMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageMessageView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 | import Kingfisher
10 |
11 | struct ImageMessageView: View {
12 |
13 | var url: URL?
14 |
15 | var body: some View {
16 | KFImage(url)
17 | .resizable()
18 | .fade(duration: 0.25)
19 | .placeholder { p in
20 | ProgressView()
21 | }
22 | .cacheOriginalImage()
23 | .aspectRatio(contentMode: .fit)
24 | }
25 | }
26 |
27 | struct ImageMessageView_Previews: PreviewProvider {
28 | static var previews: some View {
29 | ImageMessageView()
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/MessageBubble.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageBubble.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/16.
6 | //
7 |
8 |
9 | import SwiftUI
10 |
11 |
12 | struct BubbleShape: Shape {
13 | var myMessage : Bool
14 | func path(in rect: CGRect) -> Path {
15 | let width = rect.width
16 | let height = rect.height
17 |
18 | #if os(macOS)
19 | let bezierPath = NSBezierPath()
20 | if !myMessage {
21 | bezierPath.move(to: CGPoint(x: 20, y: height))
22 | bezierPath.line(to: CGPoint(x: width - 15, y: height))
23 | bezierPath.curve(to: CGPoint(x: width, y: height - 15), controlPoint1: CGPoint(x: width - 8, y: height), controlPoint2: CGPoint(x: width, y: height - 8))
24 | bezierPath.line(to: CGPoint(x: width, y: 15))
25 | bezierPath.curve(to: CGPoint(x: width - 15, y: 0), controlPoint1: CGPoint(x: width, y: 8), controlPoint2: CGPoint(x: width - 8, y: 0))
26 | bezierPath.line(to: CGPoint(x: 20, y: 0))
27 | bezierPath.curve(to: CGPoint(x: 5, y: 15), controlPoint1: CGPoint(x: 12, y: 0), controlPoint2: CGPoint(x: 5, y: 8))
28 | bezierPath.line(to: CGPoint(x: 5, y: height - 10))
29 | bezierPath.curve(to: CGPoint(x: 0, y: height), controlPoint1: CGPoint(x: 5, y: height - 1), controlPoint2: CGPoint(x: 0, y: height))
30 | bezierPath.line(to: CGPoint(x: -1, y: height))
31 | bezierPath.curve(to: CGPoint(x: 12, y: height - 4), controlPoint1: CGPoint(x: 4, y: height + 1), controlPoint2: CGPoint(x: 8, y: height - 1))
32 | bezierPath.curve(to: CGPoint(x: 20, y: height), controlPoint1: CGPoint(x: 15, y: height), controlPoint2: CGPoint(x: 20, y: height))
33 | } else {
34 | bezierPath.move(to: CGPoint(x: width - 20, y: height))
35 | bezierPath.line(to: CGPoint(x: 15, y: height))
36 | bezierPath.curve(to: CGPoint(x: 0, y: height - 15), controlPoint1: CGPoint(x: 8, y: height), controlPoint2: CGPoint(x: 0, y: height - 8))
37 | bezierPath.line(to: CGPoint(x: 0, y: 15))
38 | bezierPath.curve(to: CGPoint(x: 15, y: 0), controlPoint1: CGPoint(x: 0, y: 8), controlPoint2: CGPoint(x: 8, y: 0))
39 | bezierPath.line(to: CGPoint(x: width - 20, y: 0))
40 | bezierPath.curve(to: CGPoint(x: width - 5, y: 15), controlPoint1: CGPoint(x: width - 12, y: 0), controlPoint2: CGPoint(x: width - 5, y: 8))
41 | bezierPath.line(to: CGPoint(x: width - 5, y: height - 12))
42 | bezierPath.curve(to: CGPoint(x: width, y: height), controlPoint1: CGPoint(x: width - 5, y: height - 1), controlPoint2: CGPoint(x: width, y: height))
43 | bezierPath.line(to: CGPoint(x: width + 1, y: height))
44 | bezierPath.curve(to: CGPoint(x: width - 12, y: height - 4), controlPoint1: CGPoint(x: width - 4, y: height + 1), controlPoint2: CGPoint(x: width - 8, y: height - 1))
45 | bezierPath.curve(to: CGPoint(x: width - 20, y: height), controlPoint1: CGPoint(x: width - 15, y: height), controlPoint2: CGPoint(x: width - 20, y: height))
46 | }
47 | return Path(bezierPath.cgPath)
48 | #else
49 | let bezierPath = UIBezierPath()
50 | if !myMessage {
51 | bezierPath.move(to: CGPoint(x: 20, y: height))
52 | bezierPath.addLine(to: CGPoint(x: width - 15, y: height))
53 | bezierPath.addCurve(to: CGPoint(x: width, y: height - 15), controlPoint1: CGPoint(x: width - 8, y: height), controlPoint2: CGPoint(x: width, y: height - 8))
54 | bezierPath.addLine(to: CGPoint(x: width, y: 15))
55 | bezierPath.addCurve(to: CGPoint(x: width - 15, y: 0), controlPoint1: CGPoint(x: width, y: 8), controlPoint2: CGPoint(x: width - 8, y: 0))
56 | bezierPath.addLine(to: CGPoint(x: 20, y: 0))
57 | bezierPath.addCurve(to: CGPoint(x: 5, y: 15), controlPoint1: CGPoint(x: 12, y: 0), controlPoint2: CGPoint(x: 5, y: 8))
58 | bezierPath.addLine(to: CGPoint(x: 5, y: height - 10))
59 | bezierPath.addCurve(to: CGPoint(x: 0, y: height), controlPoint1: CGPoint(x: 5, y: height - 1), controlPoint2: CGPoint(x: 0, y: height))
60 | bezierPath.addLine(to: CGPoint(x: -1, y: height))
61 | bezierPath.addCurve(to: CGPoint(x: 12, y: height - 4), controlPoint1: CGPoint(x: 4, y: height + 1), controlPoint2: CGPoint(x: 8, y: height - 1))
62 | bezierPath.addCurve(to: CGPoint(x: 20, y: height), controlPoint1: CGPoint(x: 15, y: height), controlPoint2: CGPoint(x: 20, y: height))
63 | } else {
64 | bezierPath.move(to: CGPoint(x: width - 20, y: height))
65 | bezierPath.addLine(to: CGPoint(x: 15, y: height))
66 | bezierPath.addCurve(to: CGPoint(x: 0, y: height - 15), controlPoint1: CGPoint(x: 8, y: height), controlPoint2: CGPoint(x: 0, y: height - 8))
67 | bezierPath.addLine(to: CGPoint(x: 0, y: 15))
68 | bezierPath.addCurve(to: CGPoint(x: 15, y: 0), controlPoint1: CGPoint(x: 0, y: 8), controlPoint2: CGPoint(x: 8, y: 0))
69 | bezierPath.addLine(to: CGPoint(x: width - 20, y: 0))
70 | bezierPath.addCurve(to: CGPoint(x: width - 5, y: 15), controlPoint1: CGPoint(x: width - 12, y: 0), controlPoint2: CGPoint(x: width - 5, y: 8))
71 | bezierPath.addLine(to: CGPoint(x: width - 5, y: height - 12))
72 | bezierPath.addCurve(to: CGPoint(x: width, y: height), controlPoint1: CGPoint(x: width - 5, y: height - 1), controlPoint2: CGPoint(x: width, y: height))
73 | bezierPath.addLine(to: CGPoint(x: width + 1, y: height))
74 | bezierPath.addCurve(to: CGPoint(x: width - 12, y: height - 4), controlPoint1: CGPoint(x: width - 4, y: height + 1), controlPoint2: CGPoint(x: width - 8, y: height - 1))
75 | bezierPath.addCurve(to: CGPoint(x: width - 20, y: height), controlPoint1: CGPoint(x: width - 15, y: height), controlPoint2: CGPoint(x: width - 20, y: height))
76 | }
77 | return Path(bezierPath.cgPath)
78 | #endif
79 |
80 | }
81 | }
82 |
83 | #if os(macOS)
84 | extension NSBezierPath {
85 |
86 | var cgPath: CGPath {
87 | let path = CGMutablePath()
88 | var points = [CGPoint](repeating: .zero, count: 3)
89 | for i in 0 ..< self.elementCount {
90 | let type = self.element(at: i, associatedPoints: &points)
91 | switch type {
92 | case .moveTo: path.move(to: points[0])
93 | case .lineTo: path.addLine(to: points[0])
94 | case .curveTo: path.addCurve(to: points[2], control1: points[0], control2: points[1])
95 | case .closePath: path.closeSubpath()
96 | @unknown default: fatalError("Unknown element \(type)")
97 | }
98 | }
99 | return path
100 | }
101 |
102 | }
103 | #endif
104 |
105 | extension View {
106 | func bubbleStyle(isMyMessage: Bool, type: MessageType = .text) -> some View {
107 | modifier(Bubble(isMyMessage: isMyMessage, type: type))
108 | }
109 | }
110 |
111 | struct Bubble: ViewModifier {
112 |
113 | var isMyMessage: Bool
114 |
115 | var type: MessageType = .text
116 |
117 | func body(content: Content) -> some View {
118 | switch type {
119 | case .text, .error:
120 | if isMyMessage {
121 | content
122 | .padding([.leading, .trailing])
123 | .padding(.vertical, 4)
124 | .background(Color(.systemBlue))
125 | #if os(iOS)
126 | .contentShape(.contextMenuPreview, BubbleShape(myMessage: true))
127 | #endif
128 | .clipShape(BubbleShape(myMessage: true))
129 | .foregroundColor(.white)
130 | } else {
131 | content
132 | .padding([.leading, .trailing])
133 | .padding(.vertical, 4)
134 | .background(replyBackgroundColor)
135 | #if os(iOS)
136 | .contentShape(.contextMenuPreview, BubbleShape(myMessage: false))
137 | #endif
138 | .clipShape(BubbleShape(myMessage: false))
139 | .foregroundColor(.primary)
140 | }
141 | case .image, .imageData:
142 | if isMyMessage {
143 | content
144 | .background(.clear)
145 | #if os(iOS)
146 | .contentShape(.contextMenuPreview, BubbleShape(myMessage: true))
147 | #endif
148 | .clipShape(BubbleShape(myMessage: true))
149 | .foregroundColor(.white)
150 | } else {
151 | content
152 | .background(replyBackgroundColor)
153 | #if os(iOS)
154 | .contentShape(.contextMenuPreview, BubbleShape(myMessage: false))
155 | #endif
156 | .clipShape(BubbleShape(myMessage: false))
157 | .foregroundColor(.primary)
158 | }
159 | }
160 |
161 | }
162 |
163 | private var replyBackgroundColor: Color {
164 | colorScheme == .light ? Color(hexadecimal: "#e9e9eb") : Color(hexadecimal: "#262529")
165 | }
166 |
167 | @Environment(\.colorScheme) var colorScheme
168 | }
169 |
170 |
171 |
172 | struct MessageBubble_Previews: PreviewProvider {
173 | static var previews: some View {
174 | VStack {
175 | HStack {
176 | Spacer()
177 | Text("He has gone")
178 | .bubbleStyle(isMyMessage: true)
179 | }
180 | .padding(.leading, 55).padding(.vertical, 10)
181 |
182 | HStack {
183 | Spacer()
184 | Text("Here’s to the crazy ones, the misfits, the rebels, the troublemakers, the round pegs in the square holes… the ones who see things differently — they’re not fond of rules…")
185 | .bubbleStyle(isMyMessage: true)
186 | }
187 | .padding(.leading, 55).padding(.vertical, 10)
188 |
189 |
190 |
191 | HStack {
192 | Text("You can quote them, disagree with them, glorify or vilify them, but the only thing you can’t do is ignore them because they change things…")
193 | .bubbleStyle(isMyMessage: false)
194 | Spacer()
195 | }
196 | .padding(.trailing, 55).padding(.vertical, 10)
197 |
198 | HStack {
199 | Text("You can…")
200 | .bubbleStyle(isMyMessage: false)
201 | Spacer()
202 | }
203 | .padding(.trailing, 55).padding(.vertical, 10)
204 |
205 |
206 | }.padding(.horizontal, 15)
207 | }
208 | }
209 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/MessageMarkdownView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageMarkdownView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/7.
6 | //
7 |
8 | import MarkdownUI
9 | import Splash
10 | import SwiftUI
11 |
12 | struct MessageMarkdownView: View {
13 | @Environment(\.colorScheme) private var colorScheme
14 |
15 | var text: String
16 |
17 | var body: some View {
18 | Markdown(MarkdownContent(text))
19 | .markdownCodeSyntaxHighlighter(.splash(theme: theme))
20 | .markdownImageProvider(.webImage)
21 | .textSelection(.enabled)
22 | }
23 |
24 | private var theme: Splash.Theme {
25 | // NOTE: We are ignoring the Splash theme font
26 | switch self.colorScheme {
27 | case .dark:
28 | return .wwdc17(withFont: .init(size: 16))
29 | default:
30 | return .sunset(withFont: .init(size: 16))
31 | }
32 | }
33 | }
34 |
35 |
36 | // MARK: - WebImageProvider
37 |
38 | struct WebImageProvider: ImageProvider {
39 | func makeImage(url: URL?) -> some View {
40 | ResizeToFit {
41 | AsyncImage(url: url) { image in
42 | image
43 | .resizable()
44 | } placeholder: {
45 | ProgressView()
46 | }
47 | }
48 | }
49 | }
50 |
51 | extension ImageProvider where Self == WebImageProvider {
52 | static var webImage: Self {
53 | .init()
54 | }
55 | }
56 |
57 | // MARK: - ResizeToFit
58 |
59 | /// A layout that resizes its content to fit the container **only** if the content width is greater than the container width.
60 | struct ResizeToFit: Layout {
61 | func sizeThatFits(proposal: ProposedViewSize, subviews: Subviews, cache: inout ()) -> CGSize {
62 | guard let view = subviews.first else {
63 | return .zero
64 | }
65 |
66 | var size = view.sizeThatFits(.unspecified)
67 |
68 | if let width = proposal.width, size.width > width {
69 | let aspectRatio = size.width / size.height
70 | size.width = width
71 | size.height = width / aspectRatio
72 | }
73 | return size
74 | }
75 |
76 | func placeSubviews(
77 | in bounds: CGRect, proposal: ProposedViewSize, subviews: Subviews, cache: inout ()
78 | ) {
79 | guard let view = subviews.first else { return }
80 | view.place(at: bounds.origin, proposal: .init(bounds.size))
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/MessageList/TextMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TextMessageView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct TextMessageView: View {
11 |
12 | var text: String
13 | var isReplying: Bool
14 |
15 | var body: some View {
16 | if text.isEmpty {
17 | EmptyView()
18 | } else {
19 | if AppConfiguration.shared.isMarkdownEnabled && !isReplying {
20 | MessageMarkdownView(text: text)
21 | .textSelection(.enabled)
22 | } else {
23 | Text(text)
24 | .textSelection(.enabled)
25 | }
26 | }
27 | }
28 | }
29 |
30 | struct TextMessageView_Previews: PreviewProvider {
31 | static var previews: some View {
32 | TextMessageView(text: "Test", isReplying: false)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/AppSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SettingsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/7.
6 | //
7 |
8 | import SwiftUI
9 |
10 |
11 | enum AIService: String, CaseIterable {
12 | case openAI
13 | case huggingFace
14 | }
15 |
16 | class AppConfiguration: ObservableObject {
17 |
18 | static let shared = AppConfiguration()
19 |
20 | @AppStorage("configuration.key") var key = ""
21 |
22 | @AppStorage("configuration.model") var model: OpenAIModelType = .chatgpt {
23 | didSet {
24 | if !model.supportedModes.contains(mode) {
25 | mode = model.supportedModes.first!
26 | }
27 | }
28 | }
29 |
30 | @AppStorage("configuration.image.size") var imageSize: ImageGeneration.Size = .middle
31 |
32 |
33 | @AppStorage("configuration.mode") var mode: Mode = .chat
34 |
35 | @AppStorage("configuration.isReplySuggestionsEnabled") var isReplySuggestionsEnabled = true
36 |
37 | @AppStorage("configuration.isSmartModeEnabled") var isSmartModeEnabled = false
38 |
39 | @AppStorage("configuration.temperature") var temperature: Double = 0.8
40 |
41 | @AppStorage("configuration.systemPrompt") var systemPrompt: String = "You are a helpful assistant"
42 |
43 | @AppStorage("configuration.isMarkdownEnabled") var isMarkdownEnabled: Bool = false
44 |
45 | @AppStorage("configuration.preferredText2ImageService") var preferredText2ImageService: AIService = .openAI
46 |
47 | }
48 |
49 | struct AppSettingsView: View {
50 |
51 | @ObservedObject var configuration: AppConfiguration
52 |
53 | @State private var selectedModel = OpenAIModelType.chatgpt
54 | @State var models: [OpenAIModelType] = OpenAIModelType.chatModels
55 |
56 | @State private var selectedMode = Mode.chat
57 | @State var modes = OpenAIModelType.chatgpt.supportedModes
58 |
59 | @Environment(\.presentationMode) var presentationMode
60 | @Environment(\.dismiss) var dismiss
61 |
62 | @State var showAPIKey = false
63 |
64 | var body: some View {
65 | Form {
66 | Section("General") {
67 | HStack {
68 | Image(systemName: "text.bubble.fill")
69 | .renderingMode(.original)
70 | Toggle("Markdown Enabled", isOn: $configuration.isMarkdownEnabled)
71 | Spacer()
72 | }
73 | HStack {
74 | Image(systemName: "paintpalette.fill")
75 | Text("Text2Image")
76 | .fixedSize()
77 | Spacer()
78 | Picker("Text2Image", selection: configuration.$preferredText2ImageService) {
79 | ForEach(AIService.allCases, id: \.self) { service in
80 | Text(service.rawValue.capitalizingFirstLetter())
81 | }
82 | }
83 | .labelsHidden()
84 | }
85 | }
86 | Section("Model") {
87 | NavigationLink {
88 | OpenAISettingsView()
89 | } label: {
90 | HStack {
91 | Image("openai")
92 | .resizable()
93 | .frame(width: 30, height: 30)
94 | Text("OpenAI")
95 | }
96 | }
97 | NavigationLink {
98 | HuggingFaceSettingsView()
99 | } label: {
100 | HStack {
101 | Image("huggingface")
102 | .resizable()
103 | .frame(width: 30, height: 30)
104 | Text("HuggingFace")
105 | }
106 | }
107 | }
108 | Section("Prompt") {
109 | NavigationLink {
110 | PromptsListView()
111 | } label: {
112 | HStack {
113 | Image(systemName: "arrow.clockwise")
114 | Text("Sync Prompts")
115 | }
116 | }
117 | NavigationLink {
118 | CustomPromptsView()
119 | } label: {
120 | HStack {
121 | Image(systemName: "person.fill")
122 | Text("Custom Prompts")
123 | }
124 | }
125 | }
126 | }
127 | .onAppear() {
128 | self.selectedModel = configuration.model
129 | self.selectedMode = configuration.mode
130 | }
131 | .navigationTitle("Settings")
132 | }
133 |
134 |
135 | private func updateModes(_ model: OpenAIModelType) {
136 | configuration.model = model
137 | modes = model.supportedModes
138 | selectedMode = modes.first!
139 | }
140 | }
141 |
142 |
143 | struct AppSettingsView_Previews: PreviewProvider {
144 | static var previews: some View {
145 | NavigationStack {
146 | AppSettingsView(configuration: AppConfiguration())
147 | }
148 | }
149 | }
150 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/CustomPromptsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CustomPromptsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/31.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct CustomPromptsView: View {
11 |
12 | @State var showAddPromptView = false
13 | @ObservedObject var manager = PromptManager.shared
14 |
15 | @State var name: String = ""
16 |
17 | @State var prompt: String = ""
18 |
19 | var body: some View {
20 | contenView()
21 | #if os(iOS)
22 | .navigationTitle("Custom Prompts")
23 | .toolbar {
24 | ToolbarItem {
25 | Button {
26 | showAddPromptView = true
27 | } label: {
28 | Image(systemName: "plus")
29 | }
30 | }
31 | }
32 | #endif
33 | .sheet(isPresented: $showAddPromptView) {
34 | #if os(iOS)
35 | NavigationStack {
36 | editingPromptView
37 | .navigationTitle("Add Prompt")
38 | .navigationBarTitleDisplayMode(.inline)
39 | .toolbar {
40 | ToolbarItem {
41 | Button {
42 | showAddPromptView = false
43 | } label: {
44 | Text("Cancel")
45 | }
46 | }
47 | }
48 | }
49 | #else
50 | editingPromptView
51 | #endif
52 | }
53 | }
54 |
55 | @ViewBuilder
56 | func contenView() -> some View {
57 | #if os(iOS)
58 | if manager.customPrompts.isEmpty {
59 | VStack {
60 | Spacer()
61 | Image(systemName: "tray")
62 | .font(.system(size: 50))
63 | .padding()
64 | .foregroundColor(.secondary)
65 | Text("No Prompts")
66 | .font(.title3)
67 | .bold()
68 | Spacer()
69 | }
70 | } else {
71 | List {
72 | ForEach(manager.customPrompts) { prompt in
73 | NavigationLink {
74 | PromptDetailView(prompt: prompt)
75 | } label: {
76 | Text(prompt.act)
77 | }
78 | }
79 | .onDelete { indexSet in
80 | withAnimation {
81 | manager.removeCustomPrompts(atOffsets: indexSet)
82 | }
83 | }
84 | }
85 | }
86 | #else
87 | VStack(alignment: .leading) {
88 | Button {
89 | showAddPromptView = true
90 | } label: {
91 | Text("Add Prompt")
92 | }
93 | List {
94 | Section {
95 | ForEach(manager.customPrompts) { prompt in
96 | VStack {
97 | HStack {
98 | Text(prompt.act)
99 | Spacer()
100 | Button {
101 | manager.removeCustomPrompt(prompt)
102 | } label: {
103 | Image(systemName: "trash.circle")
104 | }
105 | .buttonStyle(.borderless)
106 | Button {
107 | if selectedPrompt == prompt {
108 | selectedPrompt = nil
109 | } else {
110 | selectedPrompt = prompt
111 | }
112 | } label: {
113 | if selectedPrompt == prompt {
114 | Image(systemName: "arrowtriangle.up.circle")
115 | } else {
116 | Image(systemName: "info.circle")
117 | }
118 | }
119 | .buttonStyle(.borderless)
120 | }
121 | if selectedPrompt == prompt {
122 | PromptDetailView(prompt: prompt)
123 | .frame(maxWidth: .infinity)
124 | .background(Color.systemBackground)
125 | }
126 | }
127 | }
128 | }
129 | }
130 | .listStyle(.bordered(alternatesRowBackgrounds: false))
131 | }
132 | .padding()
133 | #endif
134 | }
135 |
136 | @State var selectedPrompt: Prompt?
137 |
138 | var editingPromptView: some View {
139 | #if os(iOS)
140 | Form {
141 | Section {
142 | HStack {
143 | Text("Name")
144 | .bold()
145 | Spacer()
146 | TextField("Type a shortcut name", text: $name)
147 | }
148 | HStack(alignment: .top) {
149 | Text("Prompt")
150 | .bold()
151 | Spacer()
152 | TextField("Type a prompt", text: $prompt, axis: .vertical)
153 | .lineLimit(1...30)
154 | }
155 | }
156 | Section {
157 | Button {
158 | showAddPromptView = false
159 | addPrompt()
160 | } label: {
161 | HStack {
162 | Spacer()
163 | Text("Confirm")
164 | Spacer()
165 | }
166 | }
167 | .disabled(name.isEmpty || prompt.isEmpty)
168 | }
169 | }
170 | #else
171 | VStack {
172 | HStack {
173 | HStack {
174 | Spacer()
175 | Text("Name:")
176 | }
177 | .width(60)
178 | Spacer()
179 | TextField("Type a shortcut name", text: $name)
180 | .textFieldStyle(.roundedBorder)
181 | }
182 | HStack(alignment: .top) {
183 | HStack {
184 | Spacer()
185 | Text("Prompt:")
186 | }
187 | .width(60)
188 | Spacer()
189 | TextEditor(text: $prompt)
190 | .border(Color.gray.opacity(0.1), width: 1)
191 | }
192 | Spacer()
193 | Button {
194 | showAddPromptView = false
195 | addPrompt()
196 | } label: {
197 | HStack {
198 | Spacer()
199 | Text("Confirm")
200 | Spacer()
201 | }
202 | }
203 | .disabled(name.isEmpty || prompt.isEmpty)
204 | Button(role: .cancel) {
205 | showAddPromptView = false
206 | } label: {
207 | HStack {
208 | Spacer()
209 | Text("Cancel")
210 | Spacer()
211 | }
212 | }
213 | }
214 | .minHeight(300)
215 | .minWidth(400)
216 | .padding()
217 | #endif
218 |
219 | }
220 |
221 |
222 | func addPrompt() {
223 | guard !name.isEmpty && !prompt.isEmpty else {
224 | return
225 | }
226 | withAnimation {
227 | manager.addCustomPrompt(.init(cmd: name.convertToSnakeCase(), act: name, prompt: prompt, tags: []))
228 | }
229 | }
230 |
231 | }
232 |
233 | struct CustomPromptsView_Previews: PreviewProvider {
234 | static var previews: some View {
235 | CustomPromptsView()
236 | }
237 | }
238 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/HuggingFaceSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HuggingFaceSettingsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/6.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct HuggingFaceSettingsView: View {
11 |
12 | @ObservedObject var configuration = HuggingFaceConfiguration.shared
13 |
14 | @State var showAPIKey = false
15 |
16 | var body: some View {
17 | #if os(macOS)
18 | macOS
19 | #else
20 | iOS
21 | #endif
22 | }
23 |
24 | var macOS: some View {
25 | ScrollView {
26 | VStack(alignment: .leading) {
27 | Text("Text2Image")
28 | .bold()
29 | GroupBox {
30 | HStack {
31 | Picker("Model", selection: configuration.$text2ImageModelPath) {
32 | ForEach(HuggingFace.text2ImageModels, id: \.path) { model in
33 | Text(model.path.dropFirst())
34 | }
35 | }
36 | }
37 | .padding()
38 | }
39 | .padding(.bottom)
40 | GroupBox {
41 | HStack {
42 | Image(systemName: "key")
43 | if showAPIKey {
44 | TextField("", text: configuration.$key)
45 | .textFieldStyle(.roundedBorder)
46 | } else {
47 | SecureField("", text: configuration.$key)
48 | .textFieldStyle(.roundedBorder)
49 | }
50 | Button {
51 | showAPIKey.toggle()
52 | } label: {
53 | if showAPIKey {
54 | Image(systemName: "eye.slash")
55 | } else {
56 | Image(systemName: "eye")
57 | }
58 | }
59 | .buttonStyle(.borderless)
60 |
61 | }
62 | .padding()
63 | }
64 | HStack {
65 | Spacer()
66 | Link("HuggingFace", destination: URL(string: "https://huggingface.co/")!)
67 | }
68 | Spacer()
69 | }
70 | .padding()
71 | }
72 | }
73 |
74 | var iOS: some View {
75 | Form {
76 | Section {
77 | HStack {
78 | Text("Model")
79 | .fixedSize()
80 | Spacer()
81 | Picker("Model", selection: configuration.$text2ImageModelPath) {
82 | ForEach(HuggingFace.text2ImageModels, id: \.path) { model in
83 | Text(model.path.dropFirst())
84 | }
85 | }
86 | .labelsHidden()
87 | }
88 | HStack {
89 | Image(systemName: "key")
90 | if showAPIKey {
91 | TextField("", text: configuration.$key)
92 | .truncationMode(.middle)
93 | } else {
94 | SecureField("", text: configuration.$key)
95 | .truncationMode(.middle)
96 | }
97 | Button {
98 | showAPIKey.toggle()
99 | } label: {
100 | if showAPIKey {
101 | Image(systemName: "eye.slash")
102 | } else {
103 | Image(systemName: "eye")
104 | }
105 | }
106 | }
107 | } header: {
108 | Text("Text2Image")
109 | }
110 | }
111 | .navigationTitle("HuggingFace")
112 | }
113 | }
114 |
115 | struct HuggingFaceSettingsView_Previews: PreviewProvider {
116 | static var previews: some View {
117 | HuggingFaceSettingsView()
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/MacOSSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MacOSSettingsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/3.
6 | //
7 |
8 | #if os(macOS)
9 |
10 | import SwiftUI
11 |
12 | struct MacOSSettingsView: View {
13 | var body: some View {
14 | TabView {
15 | GeneralSettingsView()
16 | .tabItem {
17 | Label("General", systemImage: "gear")
18 | }
19 |
20 | ModelSettingsView()
21 | .tabItem {
22 | Label("Model", systemImage: "brain.head.profile")
23 | }
24 | PromptSettingsView()
25 | .tabItem {
26 | Label("Prompt", systemImage: "text.book.closed")
27 | }
28 | }
29 | .frame(minWidth: 700, minHeight: 400)
30 | }
31 | }
32 |
33 |
34 | struct GeneralSettingsView: View {
35 |
36 | @StateObject var configuration = AppConfiguration.shared
37 |
38 | var body: some View {
39 | ZStack {
40 | VStack(alignment: .leading) {
41 | Toggle("Markdown Enabled", isOn: configuration.$isMarkdownEnabled)
42 | .height(20)
43 | Picker(selection: configuration.$preferredText2ImageService) {
44 | ForEach(AIService.allCases, id: \.self) {
45 | Text($0.rawValue.capitalizingFirstLetter())
46 | }
47 | }
48 | .frame(width: 180, height: 30)
49 | Spacer()
50 | }
51 | .padding(.top)
52 | HStack {
53 | Spacer()
54 | VStack(alignment: .trailing) {
55 | Text("")
56 | .height(20)
57 | Text("Text2Image:")
58 | .height(30)
59 | Spacer()
60 | }
61 | .offset(x: -295)
62 | }
63 | .frame(width: 400)
64 | .padding(.top)
65 | }
66 | }
67 | }
68 |
69 |
70 | struct ModelSettingsView: View {
71 |
72 |
73 | enum Item: String, CaseIterable, Identifiable, Hashable {
74 | case openAI
75 | case huggingFace
76 |
77 | var id: String { rawValue }
78 |
79 | @ViewBuilder
80 | var destination: some View {
81 | switch self {
82 | case .openAI:
83 | OpenAISettingsView()
84 | case .huggingFace:
85 | HuggingFaceSettingsView()
86 | }
87 | }
88 |
89 | var label: some View {
90 | HStack {
91 | Image(self.rawValue.lowercased())
92 | .resizable()
93 | .frame(width: 40, height: 40)
94 | Text(rawValue.capitalizingFirstLetter())
95 | }
96 | }
97 | }
98 |
99 | @State var selection: Item? = .openAI
100 |
101 | var body: some View {
102 | NavigationView {
103 | List(selection: $selection) {
104 | ForEach(Item.allCases) { item in
105 | NavigationLink(
106 | destination: item.destination,
107 | tag: item,
108 | selection: $selection,
109 | label: {
110 | item.label
111 | }
112 | )
113 | }
114 | }
115 | .listStyle(.sidebar)
116 | }
117 | }
118 | }
119 |
120 | struct PromptSettingsView: View {
121 |
122 | enum Item: String, CaseIterable, Identifiable, Hashable {
123 | case syncPrompts = "syncPrompts"
124 | case customPrompts = "customPrompts"
125 |
126 | var id: String { rawValue }
127 |
128 | var destination: some View {
129 | makeDestination()
130 | }
131 |
132 | @ViewBuilder
133 | private func makeDestination() -> some View {
134 | switch self {
135 | case .syncPrompts:
136 | PromptsListView()
137 | .padding()
138 | case .customPrompts:
139 | CustomPromptsView()
140 | }
141 | }
142 |
143 | var label: some View {
144 | switch self {
145 | case .syncPrompts:
146 | return HStack {
147 | Image(systemName: "arrow.clockwise")
148 | Text("Sync Prompts")
149 | }
150 | case .customPrompts:
151 | return HStack {
152 | Image(systemName: "person")
153 | Text("Custom Prompts")
154 | }
155 | }
156 | }
157 | }
158 |
159 | @State var selection: Item? = .syncPrompts
160 |
161 | var body: some View {
162 | NavigationView {
163 | List(selection: $selection) {
164 | ForEach(Item.allCases) { item in
165 | NavigationLink(
166 | destination: item.destination,
167 | tag: item,
168 | selection: $selection,
169 | label: {
170 | item.label
171 | }
172 | )
173 | }
174 | }
175 | .listStyle(.sidebar)
176 | }
177 | }
178 |
179 | }
180 |
181 | struct MacOSSettingsView_Previews: PreviewProvider {
182 | static var previews: some View {
183 | MacOSSettingsView()
184 | }
185 | }
186 |
187 | #endif
188 |
189 |
190 | extension String {
191 |
192 | func capitalizingFirstLetter() -> String {
193 | return prefix(1).capitalized + dropFirst()
194 | }
195 |
196 | mutating func capitalizeFirstLetter() {
197 | self = self.capitalizingFirstLetter()
198 | }
199 |
200 | }
201 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/OpenAISettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // OpenAISettingsView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/4/7.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct OpenAISettingsView: View {
11 |
12 | @StateObject var configuration = AppConfiguration.shared
13 |
14 | @State private var showAPIKey = false
15 |
16 | var body: some View {
17 | #if os(macOS)
18 | macOS
19 | #else
20 | iOS
21 | #endif
22 | }
23 |
24 | var macOS: some View {
25 | ScrollView {
26 | VStack(alignment: .leading) {
27 | Text("Language Model")
28 | .bold()
29 | GroupBox {
30 | HStack {
31 | Text("Model")
32 | Spacer()
33 | Picker(selection: configuration.$model) {
34 | ForEach(OpenAIModelType.chatModels, id: \.self) { model in
35 | Text(model.rawValue)
36 | .tag(model)
37 | }
38 | }
39 | .frame(width: 150)
40 | }
41 | .padding()
42 | Divider()
43 | VStack {
44 | HStack {
45 | Text("Temperature")
46 | Spacer()
47 | Slider(value: configuration.$temperature, in: 0...2) {
48 |
49 | } minimumValueLabel: {
50 | Text("0")
51 | } maximumValueLabel: {
52 | Text("2")
53 | }
54 | .width(200)
55 | Text(String(format: "%.2f", configuration.temperature))
56 | .width(30)
57 | }
58 | }
59 | .padding()
60 | Divider()
61 | VStack(alignment: .leading) {
62 | Toggle(isOn: configuration.$isReplySuggestionsEnabled) {
63 | HStack {
64 | Text("Reply Suggestions")
65 | Spacer()
66 | }
67 | }
68 | .toggleStyle(.switch)
69 | Text("ChatGPT will generate reply suggestions based on past conversations.")
70 | .foregroundColor(.secondaryLabel)
71 | }
72 | .padding()
73 | Divider()
74 | VStack(alignment: .leading) {
75 | Toggle(isOn: configuration.$isSmartModeEnabled) {
76 | HStack {
77 | Text("Smart Mode")
78 | Spacer()
79 | }
80 | }
81 | .toggleStyle(.switch)
82 | Text("ChatGPT will classify your prompt and then select the most appropriate model to handle it.")
83 | .foregroundColor(.secondaryLabel)
84 | }
85 | .padding()
86 | }
87 | .padding(.bottom)
88 | Text("DALL·E")
89 | .bold()
90 | GroupBox {
91 | HStack {
92 | Text("Image Size")
93 | Spacer()
94 | Picker(selection: configuration.$imageSize) {
95 | ForEach(ImageGeneration.Size.allCases, id: \.self) { model in
96 | Text(model.rawValue)
97 | .tag(model)
98 | }
99 | }
100 | .frame(width: 100)
101 | }
102 | .padding()
103 | }
104 | .padding(.bottom)
105 | GroupBox {
106 | HStack {
107 | Image(systemName: "key")
108 | if showAPIKey {
109 | TextField("", text: configuration.$key)
110 | .textFieldStyle(.roundedBorder)
111 | } else {
112 | SecureField("", text: configuration.$key)
113 | .textFieldStyle(.roundedBorder)
114 | }
115 | Button {
116 | showAPIKey.toggle()
117 | } label: {
118 | if showAPIKey {
119 | Image(systemName: "eye.slash")
120 | } else {
121 | Image(systemName: "eye")
122 | }
123 | }
124 | .buttonStyle(.borderless)
125 |
126 | }
127 | .padding()
128 | }
129 | HStack {
130 | Spacer()
131 | Link("OpenAI Documentation", destination: URL(string: "https://platform.openai.com/docs/introduction")!)
132 | }
133 | Spacer()
134 | }
135 | .padding()
136 | }
137 |
138 | }
139 |
140 |
141 | var iOS: some View {
142 | Form {
143 | Section {
144 | HStack {
145 | Text("Model")
146 | .fixedSize()
147 | Spacer()
148 | Picker("Model", selection: configuration.$model) {
149 | ForEach(OpenAIModelType.chatModels, id: \.self) { model in
150 | Text(model.rawValue)
151 | .tag(model)
152 | }
153 | }
154 | .labelsHidden()
155 | }
156 | VStack {
157 | Stepper(value: $configuration.temperature, in: 0...2, step: 0.1) {
158 | HStack {
159 | Text("Temperature")
160 | Spacer()
161 | Text(String(format: "%.1f", configuration.temperature))
162 | .padding(.horizontal)
163 | .height(32)
164 | .width(60)
165 | .background(Color.secondarySystemFill)
166 | .cornerRadius(8)
167 | }
168 | }
169 | }
170 | VStack(alignment: .leading) {
171 | Toggle("Reply Suggestions", isOn: configuration.$isReplySuggestionsEnabled)
172 | }
173 | } header: {
174 | Text("Language Model")
175 | } footer: {
176 | Text("ChatGPT will generate reply suggestions based on past conversations.")
177 | .foregroundColor(.secondaryLabel)
178 | }
179 |
180 | Section {
181 | VStack(alignment: .leading) {
182 | Toggle("Smart Mode", isOn: configuration.$isSmartModeEnabled)
183 | }
184 | } footer: {
185 | Text("ChatGPT will classify your prompt and then select the most appropriate model to handle it.")
186 | .foregroundColor(.secondaryLabel)
187 | }
188 |
189 | Section("DALL·E") {
190 | HStack {
191 | Text("Image Size")
192 | .fixedSize()
193 | Spacer()
194 | Picker("Model", selection: configuration.$imageSize) {
195 | ForEach(ImageGeneration.Size.allCases, id: \.self) { model in
196 | Text(model.rawValue)
197 | .tag(model)
198 | }
199 | }
200 | .labelsHidden()
201 | }
202 | }
203 | Section {
204 | HStack {
205 | Image(systemName: "key")
206 | Spacer()
207 | if showAPIKey {
208 | TextField("OpenAI API Key", text: $configuration.key)
209 | .truncationMode(.middle)
210 | } else {
211 | SecureField("OpenAI API Key", text: $configuration.key)
212 | .truncationMode(.middle)
213 | }
214 | Button {
215 | showAPIKey.toggle()
216 | } label: {
217 | if showAPIKey {
218 | Image(systemName: "eye.slash")
219 | } else {
220 | Image(systemName: "eye")
221 | }
222 | }
223 | }
224 | }
225 | }
226 | .navigationTitle("OpenAI")
227 | }
228 | }
229 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/Setting/PromptsListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PromptsListView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/31.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct PromptsListView: View {
11 |
12 | @ObservedObject var manager = PromptManager.shared
13 |
14 | @State var selectedPrompt: Prompt?
15 |
16 | var body: some View {
17 | #if os(macOS)
18 | VStack(alignment: .leading) {
19 | HStack {
20 | Text("Source")
21 | TextField("", text: manager.$promptSource)
22 | .textFieldStyle(.roundedBorder)
23 | .truncationMode(.middle)
24 | Button {
25 | manager.sync()
26 | } label: {
27 | HStack {
28 | Text("Sync")
29 | }
30 | }
31 | .disabled(manager.isSyncing)
32 | }
33 | Text(manager.isSyncing ? "Updating..." : manager.lastSyncAt.dateDesc)
34 | .foregroundColor(.secondaryLabel)
35 | List {
36 | Section {
37 | ForEach(manager.syncedPrompts.sorted(by: {
38 | $0.act < $1.act
39 | })) { prompt in
40 | VStack {
41 | HStack {
42 | Text(prompt.act)
43 | Spacer()
44 | Button {
45 | if selectedPrompt == prompt {
46 | selectedPrompt = nil
47 | } else {
48 | selectedPrompt = prompt
49 | }
50 | } label: {
51 | if selectedPrompt == prompt {
52 | Image(systemName: "arrowtriangle.up.circle")
53 | } else {
54 | Image(systemName: "info.circle")
55 | }
56 | }
57 | .buttonStyle(.borderless)
58 | }
59 | if selectedPrompt == prompt {
60 | PromptDetailView(prompt: prompt)
61 | .frame(maxWidth: .infinity)
62 | .background(Color.systemBackground)
63 | }
64 | }
65 | }
66 | }
67 | }
68 | .listStyle(.bordered(alternatesRowBackgrounds: false))
69 | }
70 | #else
71 | List {
72 | Section(header: "", footer: manager.isSyncing ? "Updating..." : manager.lastSyncAt.dateDesc) {
73 | HStack {
74 | Text("Source")
75 | TextField("", text: manager.$promptSource)
76 | .truncationMode(.middle)
77 | .foregroundColor(Color.secondaryLabel)
78 | }
79 | Button {
80 | manager.sync()
81 | } label: {
82 | HStack {
83 | Text("Sync")
84 | if manager.isSyncing {
85 | Spacer()
86 | ProgressView()
87 | }
88 | }
89 | }
90 | .disabled(manager.isSyncing)
91 | }
92 |
93 | Section {
94 | ForEach(manager.syncedPrompts.sorted(by: {
95 | $0.act < $1.act
96 | })) { prompt in
97 | NavigationLink {
98 | PromptDetailView(prompt: prompt)
99 | } label: {
100 | Text(prompt.act)
101 | }
102 |
103 | }
104 | }
105 | }
106 | .navigationTitle("Prompts")
107 | #endif
108 | }
109 | }
110 |
111 | struct PromptDetailView: View {
112 |
113 | let prompt: Prompt
114 |
115 | var body: some View {
116 | #if os(iOS)
117 | Form {
118 | Section {
119 | HStack {
120 | Image(systemName: "terminal.fill")
121 | Text("/\(prompt.cmd)")
122 | }
123 |
124 | }
125 | Section("Prompt") {
126 | Text(prompt.prompt)
127 | .textSelection(.enabled)
128 | }
129 | }
130 | .navigationTitle(prompt.act)
131 | #else
132 | Form {
133 | Section {
134 | HStack {
135 | Image(systemName: "terminal.fill")
136 | Text("/\(prompt.cmd)")
137 | .textSelection(.enabled)
138 | Spacer()
139 | }
140 | }
141 | .padding(.bottom)
142 | Section {
143 | Text(prompt.prompt)
144 | .textSelection(.enabled)
145 | }
146 | }
147 | .padding()
148 | #endif
149 | }
150 |
151 | }
152 |
153 | struct PromptsListView_Previews: PreviewProvider {
154 | static var previews: some View {
155 | PromptsListView()
156 | }
157 | }
158 |
159 | extension TimeInterval {
160 |
161 | var date: Date {
162 | Date(timeIntervalSince1970: self)
163 | }
164 |
165 | var dateDesc: String {
166 | if date == .distantPast {
167 | return String(localized: "Never")
168 | }
169 | return String(localized: "Last updated on \(date.dateTimeString())")
170 | }
171 |
172 | }
173 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/UI/ReplyingIndicatorView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ReplyingIndicatorView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ReplyingIndicatorView: View {
11 |
12 | @State private var showLeftDot = false
13 | @State private var showMiddleDot = false
14 | @State private var showRightDot = false
15 |
16 | var body: some View {
17 | HStack {
18 | Circle()
19 | .opacity(showLeftDot ? 1 : 0)
20 | Circle()
21 | .opacity(showMiddleDot ? 1 : 0)
22 | Circle()
23 | .opacity(showRightDot ? 1 : 0)
24 | }
25 | .foregroundColor(.gray.opacity(0.5))
26 | .onAppear { performAnimation() }
27 | }
28 |
29 | func performAnimation() {
30 | let animation = Animation.easeInOut(duration: 0.4)
31 | withAnimation(animation) {
32 | showLeftDot = true
33 | showRightDot = false
34 | }
35 |
36 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.4) {
37 | withAnimation(animation) {
38 | self.showMiddleDot = true
39 | self.showLeftDot = false
40 | }
41 | }
42 |
43 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.8) {
44 | withAnimation(animation) {
45 | self.showMiddleDot = false
46 | self.showRightDot = true
47 | }
48 | }
49 |
50 | DispatchQueue.main.asyncAfter(deadline: .now() + 1.2) {
51 | self.performAnimation()
52 | }
53 | }
54 | }
55 |
56 | struct DotLoadingView_Previews: PreviewProvider {
57 | static var previews: some View {
58 | ReplyingIndicatorView()
59 | }
60 | }
61 |
62 |
--------------------------------------------------------------------------------
/ChatGPT/Class/View/UI/ToolTipView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolTipView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/30.
6 | //
7 |
8 | #if os(macOS)
9 | import SwiftUI
10 | import AppKit
11 |
12 | extension View {
13 | func toolTip(_ tip: String) -> some View {
14 | background(GeometryReader { childGeometry in
15 | TooltipView(tip, geometry: childGeometry) {
16 | self
17 | }
18 | })
19 | }
20 | }
21 |
22 | private struct TooltipView: View where Content: View {
23 | let content: () -> Content
24 | let tip: String
25 | let geometry: GeometryProxy
26 |
27 | init(_ tip: String, geometry: GeometryProxy, @ViewBuilder content: @escaping () -> Content) {
28 | self.content = content
29 | self.tip = tip
30 | self.geometry = geometry
31 | }
32 |
33 | var body: some View {
34 | ToolTip(tip, content: content)
35 | .frame(width: geometry.size.width, height: geometry.size.height)
36 | }
37 | }
38 |
39 | private struct ToolTip: NSViewRepresentable {
40 | typealias NSViewType = NSHostingView
41 |
42 | init(_ text: String?, @ViewBuilder content: () -> Content) {
43 | self.text = text
44 | self.content = content()
45 | }
46 |
47 | let text: String?
48 | let content: Content
49 |
50 | func makeNSView(context _: Context) -> NSHostingView {
51 | NSViewType(rootView: content)
52 | }
53 |
54 | func updateNSView(_ nsView: NSHostingView, context _: Context) {
55 | nsView.rootView = content
56 | nsView.toolTip = text
57 | }
58 | }
59 | #endif
60 |
--------------------------------------------------------------------------------
/ChatGPT/Class/ViewModel/Conversation.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Conversation.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import SwiftUI
9 |
10 | enum MessageType {
11 | case text
12 | case image
13 | case imageData
14 | case error
15 |
16 |
17 | var isImage: Bool {
18 | self == .image || self == .imageData
19 | }
20 | }
21 |
22 | struct Conversation: Identifiable, Codable, Equatable {
23 |
24 | var id = UUID()
25 |
26 | var isReplying: Bool = false
27 |
28 | var isLast: Bool = false
29 |
30 | var input: String
31 |
32 | var inputData: Data?
33 |
34 | var reply: String?
35 |
36 | var replyData: Data?
37 |
38 | var errorDesc: String?
39 |
40 | var date = Date()
41 |
42 | var preview: String {
43 | if let errorDesc = errorDesc {
44 | return errorDesc
45 | }
46 | if reply == nil {
47 | return inputPreview
48 | }
49 | if replyType == .image || replyType == .imageData {
50 | return String(localized: "[Image]")
51 | }
52 | return reply ?? ""
53 | }
54 |
55 | private var inputPreview: String {
56 | if inputType == .image || inputType == .imageData {
57 | return String(localized: "[Image]")
58 | }
59 | return input
60 | }
61 |
62 | var inputType: MessageType {
63 | if inputData != nil {
64 | return .imageData
65 | }
66 | if input.hasPrefix("![Image]") {
67 | return .image
68 | } else if input.hasPrefix("![ImageData]") {
69 | return .imageData
70 | }
71 | return .text
72 | }
73 |
74 | var replyType: MessageType {
75 | guard errorDesc == nil else {
76 | return .error
77 | }
78 | guard let reply = reply else {
79 | return .error
80 | }
81 | if reply.hasPrefix("![Image]") {
82 | return .image
83 | } else if reply.hasPrefix("![ImageData]") {
84 | return .imageData
85 | }
86 | return .text
87 | }
88 |
89 | var replyImageURL: URL? {
90 | guard replyType == .image else {
91 | return nil
92 | }
93 | guard let reply = reply else {
94 | return nil
95 | }
96 | let path = String(reply.deletingPrefix(".dropLast())
97 | return URL(string: path)
98 | }
99 |
100 | var replyImageData: Data? {
101 | guard replyType == .imageData else {
102 | return nil
103 | }
104 | if let replyData = replyData {
105 | return replyData
106 | }
107 | guard let reply = reply else {
108 | return nil
109 | }
110 | let base64 = String(reply.deletingPrefix(".dropLast())
111 | return Data(base64Encoded: base64)
112 | }
113 | }
114 |
115 |
116 | extension String {
117 |
118 | var base64ImageData: Data? {
119 | guard hasPrefix(" else {
120 | return nil
121 | }
122 | let base64 = String(self.deletingPrefix(".dropLast())
123 | return Data(base64Encoded: base64)
124 | }
125 |
126 | }
127 |
--------------------------------------------------------------------------------
/ChatGPT/Class/ViewModel/DialogueSession.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DialogueSession.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/3.
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 | import SwiftUIX
11 | import AudioToolbox
12 |
13 | class DialogueSession: ObservableObject, Identifiable, Equatable, Hashable, Codable {
14 |
15 | struct Configuration: Codable {
16 |
17 | var key: String {
18 | AppConfiguration.shared.key
19 | }
20 |
21 | var model: OpenAIModelType = .chatgpt {
22 | didSet {
23 | if !model.supportedModes.contains(mode) {
24 | mode = model.supportedModes.first!
25 | }
26 | }
27 | }
28 |
29 | var mode: Mode = .chat
30 |
31 | var temperature: Double = 0.5
32 |
33 | var systemPrompt: String = "You are a helpful assistant"
34 |
35 | init() {
36 | model = AppConfiguration.shared.model
37 | temperature = AppConfiguration.shared.temperature
38 | systemPrompt = AppConfiguration.shared.systemPrompt
39 | }
40 |
41 | }
42 |
43 | //MARK: - Codable
44 |
45 | required init(from decoder: Decoder) throws {
46 | let container = try decoder.container(keyedBy: CodingKeys.self)
47 | configuration = try container.decode(Configuration.self, forKey: .configuration)
48 | conversations = try container.decode([Conversation].self, forKey: .conversations)
49 | date = try container.decode(Date.self, forKey: .date)
50 | id = try container.decode(UUID.self, forKey: .id)
51 | let messages = try container.decode([Message].self, forKey: .messages)
52 |
53 | isReplying = false
54 | isStreaming = false
55 | input = ""
56 | service = OpenAIService(configuration: configuration)
57 | service.messages = messages
58 | initFinished = true
59 | }
60 |
61 | func encode(to encoder: Encoder) throws {
62 | var container = encoder.container(keyedBy: CodingKeys.self)
63 | try container.encode(configuration, forKey: .configuration)
64 | try container.encode(conversations, forKey: .conversations)
65 | try container.encode(service.messages, forKey: .messages)
66 | try container.encode(id, forKey: .id)
67 | try container.encode(date, forKey: .date)
68 | }
69 |
70 | enum CodingKeys: CodingKey {
71 | case configuration
72 | case conversations
73 | case messages
74 | case date
75 | case id
76 | }
77 |
78 | //MARK: - Hashable, Equatable
79 |
80 | static func == (lhs: DialogueSession, rhs: DialogueSession) -> Bool {
81 | lhs.id == rhs.id
82 | }
83 |
84 | func hash(into hasher: inout Hasher) {
85 | hasher.combine(id)
86 | }
87 |
88 | var id = UUID()
89 |
90 | var rawData: DialogueData?
91 |
92 | //MARK: - State
93 |
94 | @Published var isReplying: Bool = false
95 | @Published var isSending: Bool = false
96 | @Published var bubbleText: String = ""
97 | @Published var isStreaming: Bool = false
98 | @Published var input: String = ""
99 | @Published var inputData: Data?
100 | @Published var sendingData: Data?
101 | @Published var title: String = "New Chat"
102 | @Published var conversations: [Conversation] = [] {
103 | didSet {
104 | if let date = conversations.last?.date {
105 | self.date = date
106 | }
107 | }
108 | }
109 | @Published var suggestions: [String] = []
110 | @Published var date = Date()
111 |
112 | private var initFinished = false
113 | //MARK: - Properties
114 |
115 | @Published var configuration: Configuration = Configuration() {
116 | didSet {
117 | service.configuration = configuration
118 | save()
119 | }
120 | }
121 |
122 | var lastMessage: String {
123 | return conversations.last?.preview ?? ""
124 | }
125 |
126 | lazy var service = OpenAIService(configuration: configuration)
127 |
128 | init() {
129 |
130 | }
131 |
132 | //MARK: - Message Actions
133 |
134 | @MainActor
135 | func send(scroll: ((UnitPoint) -> Void)? = nil) async {
136 | if input.isEmpty, let inputData = inputData {
137 | sendingData = inputData
138 | self.inputData = nil
139 | await send(text: "An image", data: sendingData, scroll: scroll)
140 | } else {
141 | let text = input
142 | input = ""
143 | await send(text: text, scroll: scroll)
144 | }
145 | }
146 |
147 | @MainActor
148 | func clearMessages() {
149 | service.removeAllMessages()
150 | title = "Empty"
151 | withAnimation { [weak self] in
152 | self?.removeAllConversations()
153 | self?.suggestions.removeAll()
154 | }
155 | }
156 |
157 | @MainActor
158 | func retry(_ conversation: Conversation, scroll: ((UnitPoint) -> Void)? = nil) async {
159 | removeConversation(conversation)
160 | service.messages.removeLast()
161 | await send(text: conversation.input, data: conversation.inputData, isRetry: true, scroll: scroll)
162 | }
163 |
164 | private var lastConversationData: ConversationData?
165 |
166 | @MainActor
167 | private func send(text: String, data: Data? = nil, isRetry: Bool = false, scroll: ((UnitPoint) -> Void)? = nil) async {
168 | var streamText = ""
169 | var conversation = Conversation(
170 | isReplying: true,
171 | isLast: true,
172 | input: text,
173 | inputData: data,
174 | reply: "",
175 | errorDesc: nil)
176 |
177 | if conversations.count > 0 {
178 | conversations[conversations.endIndex-1].isLast = false
179 | }
180 |
181 | if isRetry {
182 | suggestions.removeAll()
183 | isReplying = true
184 | lastConversationData = appendConversation(conversation)
185 | } else {
186 | withAnimation(.easeInOut(duration: 0.25)) {
187 | suggestions.removeAll()
188 | isReplying = true
189 | lastConversationData = appendConversation(conversation)
190 | scroll?(.bottom)
191 | }
192 | }
193 |
194 | AudioServicesPlaySystemSound(1004)
195 |
196 | do {
197 | try await Task.sleep(for: .milliseconds(260))
198 | isSending = false
199 | bubbleText = ""
200 | sendingData = nil
201 | #if os(iOS)
202 | withAnimation {
203 | scroll?(.top)
204 | scroll?(.bottom)
205 | }
206 | #else
207 | scroll?(.top)
208 | scroll?(.bottom)
209 | #endif
210 | let stream = try await service.sendMessage(text, data: data)
211 | isStreaming = true
212 | AudioServicesPlaySystemSound(1301)
213 | for try await text in stream {
214 | streamText += text
215 | conversation.reply = streamText.trimmingCharacters(in: .whitespacesAndNewlines)
216 | conversations[conversations.count - 1] = conversation
217 | #if os(iOS)
218 | withAnimation {
219 | scroll?(.top)///for an issue of iOS 16
220 | scroll?(.bottom)
221 | }
222 | #else
223 | scroll?(.top)
224 | scroll?(.bottom)/// withAnimation may cause scrollview jitter in macOS
225 | #endif
226 | }
227 | lastConversationData?.sync(with: conversation)
228 | isStreaming = false
229 | createSuggestions(scroll: scroll)
230 | } catch {
231 | #if os(iOS)
232 | withAnimation {
233 | conversation.errorDesc = error.localizedDescription
234 | lastConversationData?.sync(with: conversation)
235 | scroll?(.bottom)
236 | }
237 | #else
238 | conversation.errorDesc = error.localizedDescription
239 | lastConversationData?.sync(with: conversation)
240 | scroll?(.bottom)
241 | #endif
242 | }
243 | #if os(iOS)
244 | withAnimation {
245 | conversation.isReplying = false
246 | updateLastConversation(conversation)
247 | isReplying = false
248 | scroll?(.bottom)
249 | save()
250 | }
251 | #else
252 | conversation.isReplying = false
253 | updateLastConversation(conversation)
254 | isReplying = false
255 | scroll?(.bottom)
256 | save()
257 | #endif
258 |
259 | }
260 |
261 | func createSuggestions(scroll: ((UnitPoint) -> Void)? = nil) {
262 | guard AppConfiguration.shared.isReplySuggestionsEnabled else {
263 | return
264 | }
265 | Task { @MainActor in
266 | do {
267 | let suggestions = try await service.createSuggestions()
268 | print(suggestions)
269 | #if os(iOS)
270 | withAnimation {
271 | self.suggestions = suggestions
272 | scroll?(.bottom)
273 | }
274 | #else
275 | self.suggestions = suggestions
276 | scroll?(.bottom)
277 | #endif
278 | } catch let error {
279 | print(error)
280 | }
281 | }
282 | }
283 |
284 | }
285 |
286 |
287 | extension DialogueSession {
288 |
289 | convenience init?(rawData: DialogueData) {
290 | self.init()
291 | guard let id = rawData.id,
292 | let date = rawData.date,
293 | let configurationData = rawData.configuration,
294 | let conversations = rawData.conversations as? Set else {
295 | return nil
296 | }
297 | self.rawData = rawData
298 | self.id = id
299 | self.date = date
300 | if let configuration = try? JSONDecoder().decode(Configuration.self, from: configurationData) {
301 | self.configuration = configuration
302 | }
303 |
304 | self.conversations = conversations.compactMap { data in
305 | if let id = data.id,
306 | let input = data.input,
307 | let date = data.date {
308 | let conversation = Conversation(
309 | id: id,
310 | input: input,
311 | inputData: data.inputData,
312 | reply: data.reply,
313 | replyData: data.replyData,
314 | errorDesc: data.errorDesc,
315 | date: date
316 | )
317 | return conversation
318 | } else {
319 | return nil
320 | }
321 | }
322 | self.conversations.sort {
323 | $0.date < $1.date
324 | }
325 |
326 | self.conversations.forEach {
327 | self.service.appendNewMessage(
328 | input: $0.inputType.isImage ? "An image" : $0.input,
329 | reply: $0.replyType.isImage ? "An image" : $0.reply ?? "")
330 | }
331 | if !self.conversations.isEmpty {
332 | self.conversations[self.conversations.endIndex-1].isLast = true
333 | }
334 | initFinished = true
335 | }
336 |
337 | @discardableResult
338 | func appendConversation(_ conversation: Conversation) -> ConversationData {
339 | conversations.append(conversation)
340 | let data = ConversationData(context: PersistenceController.shared.container.viewContext)
341 | data.id = conversation.id
342 | data.date = conversation.date
343 | data.input = conversation.input
344 | data.reply = conversation.reply
345 | rawData?.conversations?.adding(data)
346 | data.dialogue = rawData
347 |
348 | do {
349 | try PersistenceController.shared.save()
350 | } catch let error {
351 | print(error.localizedDescription)
352 | }
353 |
354 | return data
355 | }
356 |
357 | func updateLastConversation(_ conversation: Conversation) {
358 | conversations[conversations.count - 1] = conversation
359 | lastConversationData?.sync(with: conversation)
360 | }
361 |
362 | func removeConversation(_ conversation: Conversation) {
363 | guard let index = conversations.firstIndex(where: { $0.id == conversation.id }) else {
364 | return
365 | }
366 | removeConversation(at: index)
367 | }
368 |
369 | func removeConversation(at index: Int) {
370 | let isLast = conversations.endIndex-1 == index
371 | let conversation = conversations.remove(at: index)
372 | if isLast && !conversations.isEmpty {
373 | conversations[conversations.endIndex-1].isLast = true
374 | suggestions.removeAll()
375 | }
376 | do {
377 | if let conversationsSet = rawData?.conversations as? Set,
378 | let conversationData = conversationsSet.first(where: {
379 | $0.id == conversation.id
380 | }) {
381 | PersistenceController.shared.container.viewContext.delete(conversationData)
382 | }
383 | try PersistenceController.shared.save()
384 | } catch let error {
385 | print(error.localizedDescription)
386 | }
387 | }
388 |
389 | func removeAllConversations() {
390 | conversations.removeAll()
391 | do {
392 | let viewContext = PersistenceController.shared.container.viewContext
393 | if let conversations = rawData?.conversations as? Set {
394 | conversations.forEach(viewContext.delete)
395 | }
396 | try PersistenceController.shared.save()
397 | } catch let error {
398 | print(error.localizedDescription)
399 | }
400 | }
401 |
402 | func save() {
403 | guard initFinished else {
404 | return
405 | }
406 | do {
407 | rawData?.date = date
408 | rawData?.configuration = try JSONEncoder().encode(configuration)
409 | try PersistenceController.shared.save()
410 | } catch let error {
411 | print(error.localizedDescription)
412 | }
413 | }
414 |
415 |
416 | }
417 |
418 | extension ConversationData {
419 |
420 | func sync(with conversation: Conversation) {
421 | id = conversation.id
422 | date = conversation.date
423 | input = conversation.input
424 | inputData = conversation.inputData
425 | reply = conversation.reply
426 | replyData = conversation.replyData
427 | errorDesc = conversation.errorDesc
428 | do {
429 | try PersistenceController.shared.save()
430 | } catch let error {
431 | print(error.localizedDescription)
432 | }
433 | }
434 |
435 | }
436 |
--------------------------------------------------------------------------------
/ChatGPT/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import SwiftUI
9 | import CoreData
10 |
11 | struct ContentView: View {
12 | @Environment(\.managedObjectContext) private var viewContext
13 |
14 | @FetchRequest(
15 | sortDescriptors: [NSSortDescriptor(keyPath: \DialogueData.date, ascending: false)],
16 | animation: .default)
17 | private var items: FetchedResults
18 |
19 | @StateObject var configuration = AppConfiguration.shared
20 | @State var dialogueSessions: [DialogueSession] = []
21 | @State var selectedDialogueSession: DialogueSession?
22 |
23 | @State var isShowSettingView = false
24 |
25 | @State var isReplying = false
26 |
27 | @State private var columnVisibility = NavigationSplitViewVisibility.doubleColumn
28 |
29 | var body: some View {
30 | NavigationSplitView(columnVisibility: $columnVisibility) {
31 | contentView()
32 | .toolbar {
33 | #if os(iOS)
34 | ToolbarItem(placement: .automatic) {
35 | Button {
36 | isShowSettingView = true
37 | } label: {
38 | Image(systemName: "ellipsis.circle")
39 | }
40 | }
41 | #endif
42 | ToolbarItem(placement: .automatic) {
43 | Button {
44 | addItem()
45 | } label: {
46 | Image(systemName: "square.and.pencil")
47 | }
48 | }
49 | }
50 | } detail: {
51 | ZStack {
52 | if let selectedDialogueSession = selectedDialogueSession {
53 | MessageListView(session:selectedDialogueSession)
54 | .onReceive(selectedDialogueSession.$isReplying.didSet) { isReplying in
55 | self.isReplying = isReplying
56 | }
57 | .onReceive(selectedDialogueSession.$conversations.didSet) { conversations in
58 | if conversations.isEmpty {
59 | isReplying = true
60 | isReplying = false
61 | }
62 | }
63 | }
64 | }
65 | #if os(iOS)
66 | .navigationBarTitleDisplayMode(.inline)
67 | .toolbarBackground(.visible, for: .navigationBar)
68 | #else
69 | .frame(minWidth: 500)
70 | #endif
71 | }
72 | .navigationSplitViewStyle(.balanced)
73 | #if os(macOS)
74 | .frame(minWidth: 800, minHeight: 500)
75 | .background(.secondarySystemBackground)
76 | #else
77 | .sheet(isPresented: $isShowSettingView) {
78 | NavigationStack {
79 | AppSettingsView(configuration: configuration)
80 | .navigationBarTitleDisplayMode(.inline)
81 | .toolbar {
82 | ToolbarItem {
83 | Button {
84 | isShowSettingView = false
85 | } label: {
86 | Text("Done")
87 | .bold()
88 | }
89 | }
90 | }
91 | }
92 | }
93 | #endif
94 | .onAppear() {
95 | dialogueSessions = items.compactMap {
96 | DialogueSession(rawData: $0)
97 | }
98 | }
99 |
100 |
101 | }
102 |
103 |
104 | @ViewBuilder
105 | func contentView() -> some View {
106 | if dialogueSessions.isEmpty {
107 | DialogueListPlaceholderView()
108 | } else {
109 | DialogueSessionListView(
110 | dialogueSessions: $dialogueSessions,
111 | selectedDialogueSession: $selectedDialogueSession,
112 | isReplying: $isReplying
113 | ) {
114 | deleteItems(offsets: $0)
115 | } deleteDialogueHandler: {
116 | deleteItem($0)
117 | }
118 | }
119 | }
120 |
121 |
122 | private func addItem() {
123 | withAnimation {
124 | do {
125 | let session = DialogueSession()
126 | dialogueSessions.insert(session, at: 0)
127 | let newItem = DialogueData(context: viewContext)
128 | newItem.id = session.id
129 | newItem.date = session.date
130 | newItem.configuration = try JSONEncoder().encode(session.configuration)
131 | try PersistenceController.shared.save()
132 | } catch {
133 | // Replace this implementation with code to handle the error appropriately.
134 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
135 | let nsError = error as NSError
136 | fatalError("Unresolved error \(nsError), \(nsError.userInfo)")
137 | }
138 | }
139 | }
140 |
141 | private func deleteItems(offsets: IndexSet) {
142 | withAnimation {
143 | dialogueSessions.remove(atOffsets: offsets)
144 | offsets.map { items[$0] }.forEach(viewContext.delete)
145 |
146 | do {
147 | try PersistenceController.shared.save()
148 | } catch {
149 | // Replace this implementation with code to handle the error appropriately.
150 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
151 | let nsError = error as NSError
152 | fatalError("Unresolved error \(nsError), \(nsError.userInfo)")
153 | }
154 | }
155 | }
156 |
157 | private func deleteItem(_ session: DialogueSession) {
158 | withAnimation {
159 | dialogueSessions.removeAll {
160 | $0.id == session.id
161 | }
162 | if let item = session.rawData {
163 | viewContext.delete(item)
164 | }
165 |
166 | do {
167 | try PersistenceController.shared.save()
168 | } catch {
169 | // Replace this implementation with code to handle the error appropriately.
170 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
171 | let nsError = error as NSError
172 | fatalError("Unresolved error \(nsError), \(nsError.userInfo)")
173 | }
174 | }
175 | }
176 |
177 | }
178 |
--------------------------------------------------------------------------------
/ChatGPT/Persistence.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Persistence.swift
3 | // ChatGPT
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import CoreData
9 |
10 | struct PersistenceController {
11 | static let shared = PersistenceController()
12 |
13 | let container: NSPersistentCloudKitContainer
14 |
15 | func save() throws {
16 | try container.viewContext.save()
17 | print("[CoreData] Save succeed.")
18 | }
19 |
20 | init(inMemory: Bool = false) {
21 | container = NSPersistentCloudKitContainer(name: "ChatGPT")
22 | if inMemory {
23 | container.persistentStoreDescriptions.first!.url = URL(fileURLWithPath: "/dev/null")
24 | }
25 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in
26 | if let error = error as NSError? {
27 | // Replace this implementation with code to handle the error appropriately.
28 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
29 |
30 | /*
31 | Typical reasons for an error here include:
32 | * The parent directory does not exist, cannot be created, or disallows writing.
33 | * The persistent store is not accessible, due to permissions or data protection when the device is locked.
34 | * The device is out of space.
35 | * The store could not be migrated to the current model version.
36 | Check the error message to determine what the actual problem was.
37 | */
38 | fatalError("Unresolved error \(error), \(error.userInfo)")
39 | } else {
40 | print("[CoreData] \(storeDescription.description)")
41 | }
42 | })
43 | container.viewContext.automaticallyMergesChangesFromParent = true
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/ChatGPT/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/100.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/100.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/1024 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/1024 1.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/1024.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/114.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/114.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/120.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/120.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/128.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/144.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/152.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/152.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/16.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/167.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/167.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/180.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/180.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/20.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/20.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/256 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/256 1.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/256.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/29.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/29.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/32 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/32 1.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/32.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/40.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/40.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/50.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/50.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/512 1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/512 1.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/512.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/57.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/57.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/58.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/58.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/60.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/60.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/64.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/72.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/72.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/76.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/76.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/80.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/80.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/87.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/87.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "40.png",
5 | "idiom" : "iphone",
6 | "scale" : "2x",
7 | "size" : "20x20"
8 | },
9 | {
10 | "filename" : "60.png",
11 | "idiom" : "iphone",
12 | "scale" : "3x",
13 | "size" : "20x20"
14 | },
15 | {
16 | "filename" : "29.png",
17 | "idiom" : "iphone",
18 | "scale" : "1x",
19 | "size" : "29x29"
20 | },
21 | {
22 | "filename" : "58.png",
23 | "idiom" : "iphone",
24 | "scale" : "2x",
25 | "size" : "29x29"
26 | },
27 | {
28 | "filename" : "87.png",
29 | "idiom" : "iphone",
30 | "scale" : "3x",
31 | "size" : "29x29"
32 | },
33 | {
34 | "filename" : "80.png",
35 | "idiom" : "iphone",
36 | "scale" : "2x",
37 | "size" : "40x40"
38 | },
39 | {
40 | "filename" : "120.png",
41 | "idiom" : "iphone",
42 | "scale" : "3x",
43 | "size" : "40x40"
44 | },
45 | {
46 | "filename" : "57.png",
47 | "idiom" : "iphone",
48 | "scale" : "1x",
49 | "size" : "57x57"
50 | },
51 | {
52 | "filename" : "114.png",
53 | "idiom" : "iphone",
54 | "scale" : "2x",
55 | "size" : "57x57"
56 | },
57 | {
58 | "filename" : "120.png",
59 | "idiom" : "iphone",
60 | "scale" : "2x",
61 | "size" : "60x60"
62 | },
63 | {
64 | "filename" : "180.png",
65 | "idiom" : "iphone",
66 | "scale" : "3x",
67 | "size" : "60x60"
68 | },
69 | {
70 | "filename" : "20.png",
71 | "idiom" : "ipad",
72 | "scale" : "1x",
73 | "size" : "20x20"
74 | },
75 | {
76 | "filename" : "40.png",
77 | "idiom" : "ipad",
78 | "scale" : "2x",
79 | "size" : "20x20"
80 | },
81 | {
82 | "filename" : "29.png",
83 | "idiom" : "ipad",
84 | "scale" : "1x",
85 | "size" : "29x29"
86 | },
87 | {
88 | "filename" : "58.png",
89 | "idiom" : "ipad",
90 | "scale" : "2x",
91 | "size" : "29x29"
92 | },
93 | {
94 | "filename" : "40.png",
95 | "idiom" : "ipad",
96 | "scale" : "1x",
97 | "size" : "40x40"
98 | },
99 | {
100 | "filename" : "80.png",
101 | "idiom" : "ipad",
102 | "scale" : "2x",
103 | "size" : "40x40"
104 | },
105 | {
106 | "filename" : "50.png",
107 | "idiom" : "ipad",
108 | "scale" : "1x",
109 | "size" : "50x50"
110 | },
111 | {
112 | "filename" : "100.png",
113 | "idiom" : "ipad",
114 | "scale" : "2x",
115 | "size" : "50x50"
116 | },
117 | {
118 | "filename" : "72.png",
119 | "idiom" : "ipad",
120 | "scale" : "1x",
121 | "size" : "72x72"
122 | },
123 | {
124 | "filename" : "144.png",
125 | "idiom" : "ipad",
126 | "scale" : "2x",
127 | "size" : "72x72"
128 | },
129 | {
130 | "filename" : "76.png",
131 | "idiom" : "ipad",
132 | "scale" : "1x",
133 | "size" : "76x76"
134 | },
135 | {
136 | "filename" : "152.png",
137 | "idiom" : "ipad",
138 | "scale" : "2x",
139 | "size" : "76x76"
140 | },
141 | {
142 | "filename" : "167.png",
143 | "idiom" : "ipad",
144 | "scale" : "2x",
145 | "size" : "83.5x83.5"
146 | },
147 | {
148 | "filename" : "1024.png",
149 | "idiom" : "ios-marketing",
150 | "scale" : "1x",
151 | "size" : "1024x1024"
152 | },
153 | {
154 | "filename" : "16.png",
155 | "idiom" : "mac",
156 | "scale" : "1x",
157 | "size" : "16x16"
158 | },
159 | {
160 | "filename" : "32.png",
161 | "idiom" : "mac",
162 | "scale" : "2x",
163 | "size" : "16x16"
164 | },
165 | {
166 | "filename" : "32 1.png",
167 | "idiom" : "mac",
168 | "scale" : "1x",
169 | "size" : "32x32"
170 | },
171 | {
172 | "filename" : "64.png",
173 | "idiom" : "mac",
174 | "scale" : "2x",
175 | "size" : "32x32"
176 | },
177 | {
178 | "filename" : "128.png",
179 | "idiom" : "mac",
180 | "scale" : "1x",
181 | "size" : "128x128"
182 | },
183 | {
184 | "filename" : "256.png",
185 | "idiom" : "mac",
186 | "scale" : "2x",
187 | "size" : "128x128"
188 | },
189 | {
190 | "filename" : "256 1.png",
191 | "idiom" : "mac",
192 | "scale" : "1x",
193 | "size" : "256x256"
194 | },
195 | {
196 | "filename" : "512.png",
197 | "idiom" : "mac",
198 | "scale" : "2x",
199 | "size" : "256x256"
200 | },
201 | {
202 | "filename" : "512 1.png",
203 | "idiom" : "mac",
204 | "scale" : "1x",
205 | "size" : "512x512"
206 | },
207 | {
208 | "filename" : "1024 1.png",
209 | "idiom" : "mac",
210 | "scale" : "2x",
211 | "size" : "512x512"
212 | }
213 | ],
214 | "info" : {
215 | "author" : "xcode",
216 | "version" : 1
217 | }
218 | }
219 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/huggingface.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "filename" : "huggingface.png",
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/huggingface.imageset/huggingface.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/huggingface.imageset/huggingface.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/openai.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "filename" : "openai.png",
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/ChatGPT/Resource/Assets.xcassets/openai.imageset/openai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/ChatGPT/Resource/Assets.xcassets/openai.imageset/openai.png
--------------------------------------------------------------------------------
/ChatGPT/Resource/zh-Hans.lproj/Localizable.strings:
--------------------------------------------------------------------------------
1 | /*
2 | Localizable.strings
3 | ChatGPT
4 |
5 | Created by LuoHuanyu on 2023/4/3.
6 |
7 | */
8 |
9 | "Today" = "今天";
10 | "Yesterday" = "昨天";
11 | "No Message" = "没有信息";
12 | "Copy" = "复制";
13 | "Delete" = "删除";
14 | "Regenerate" = "重新生成";
15 |
16 | "Ask anything, or type /" = "问任何问题,或输入/";
17 |
18 | "Warning" = "警告";
19 | "Remove all messages?" = "删除所有信息?";
20 | "Cancel" = "取消";
21 | "Confirm" = "确认";
22 | "Done" = "完成";
23 | "Regenerate response" = "重新生成";
24 | "Error: %@" = "错误:%@";
25 | "Response Error: %@" = "响应错误:%@";
26 | "Response Error: %@, %@" = "响应错误: %@, %@";
27 | "Invalid Response" = "无效的响应";
28 | "Failed to generate image." = "生成图片失败";
29 | "HuggingFace User Access Token is not set." = "HuggingFace User Access Token 未设置";
30 |
31 | "Settings" = "设置";
32 |
33 | "Model" = "模型";
34 | "Prompt" = "提示词";
35 | "Prompts" = "提示词";
36 | "General" = "通用";
37 |
38 | "Temperature" = "温度";
39 | "Sync Prompts" = "同步提示词";
40 | "Source" = "源网址";
41 | "Sync" = "同步";
42 | "Never" = "从未更新";
43 | "Updating" = "更新中";
44 | "Last updated on %@" = "上次更新于 %@";
45 |
46 | "Custom Prompts" = "自定义提示词";
47 | "Add Prompt" = "新增提示词";
48 | "No Prompts" = "没有提示词";
49 | "Name" = "名称";
50 | "Name:" = "名称:";
51 | "Prompt:" = "提示词:";
52 | "Type a shortcut name" = "输入快捷名称";
53 | "Type a prompt" = "输入提示词内容";
54 |
55 | "Markdown Enabled" = "使用Markdown";
56 |
57 |
58 | "OpenAI" = "OpenAI";
59 | "OpenAI API Key" = "OpenAI API Key";
60 | "OpenAI Documentation" = "OpenAI 文档";
61 | "API Key" = "API Key";
62 | "ChatGPT" = "ChatGPT";
63 | "Language Model" = "语言模型";
64 | "Image Size" = "图片尺寸";
65 | "[Image]" = "[图片]";
66 | "Smart Mode" = "智能模式";
67 | "ChatGPT will classify your prompt and then select the most appropriate model to handle it." = "ChatGPT会对您的提示进行分类,然后选择最合适的模型来处理它。";
68 | "Reply Suggestions" = "回复建议";
69 | "ChatGPT will generate reply suggestions based on past conversations." = "ChatGPT将根据过去的对话生成回复建议。";
70 | "Text2Image:" = "文字生成图片:";
71 | "Text2Image" = "文字生成图片";
72 |
73 | "Later" = "稍后";
74 | "Confirm" = "确认";
75 | "Enter OpenAI API Key" = "输入Open AI API Key";
76 | "You need set OpenAI API Key before start a conversation." = "在开始对话前,您需要输入您的OpenAI API Key。";
77 |
--------------------------------------------------------------------------------
/ChatGPTTests/ChatGPTTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatGPTTests.swift
3 | // ChatGPTTests
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import XCTest
9 |
10 | final class ChatGPTTests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 | }
15 |
16 | override func tearDownWithError() throws {
17 | // Put teardown code here. This method is called after the invocation of each test method in the class.
18 | }
19 |
20 | func testExample() throws {
21 | // This is an example of a functional test case.
22 | // Use XCTAssert and related functions to verify your tests produce the correct results.
23 | // Any test you write for XCTest can be annotated as throws and async.
24 | // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error.
25 | // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards.
26 | }
27 |
28 | func testPerformanceExample() throws {
29 | // This is an example of a performance test case.
30 | measure {
31 | // Put the code you want to measure the time of here.
32 | }
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/ChatGPTUITests/ChatGPTUITests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatGPTUITests.swift
3 | // ChatGPTUITests
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import XCTest
9 |
10 | final class ChatGPTUITests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 |
15 | // In UI tests it is usually best to stop immediately when a failure occurs.
16 | continueAfterFailure = false
17 |
18 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
19 | }
20 |
21 | override func tearDownWithError() throws {
22 | // Put teardown code here. This method is called after the invocation of each test method in the class.
23 | }
24 |
25 | func testExample() throws {
26 | // UI tests must launch the application that they test.
27 | let app = XCUIApplication()
28 | app.launch()
29 |
30 | // Use XCTAssert and related functions to verify your tests produce the correct results.
31 | }
32 |
33 | func testLaunchPerformance() throws {
34 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
35 | // This measures how long it takes to launch your application.
36 | measure(metrics: [XCTApplicationLaunchMetric()]) {
37 | XCUIApplication().launch()
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/ChatGPTUITests/ChatGPTUITestsLaunchTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatGPTUITestsLaunchTests.swift
3 | // ChatGPTUITests
4 | //
5 | // Created by LuoHuanyu on 2023/3/22.
6 | //
7 |
8 | import XCTest
9 |
10 | final class ChatGPTUITestsLaunchTests: XCTestCase {
11 |
12 | override class var runsForEachTargetApplicationUIConfiguration: Bool {
13 | true
14 | }
15 |
16 | override func setUpWithError() throws {
17 | continueAfterFailure = false
18 | }
19 |
20 | func testLaunch() throws {
21 | let app = XCUIApplication()
22 | app.launch()
23 |
24 | // Insert steps here to perform after app launch but before taking a screenshot,
25 | // such as logging into a test account or navigating somewhere in the app
26 |
27 | let attachment = XCTAttachment(screenshot: app.screenshot())
28 | attachment.name = "Launch Screen"
29 | attachment.lifetime = .keepAlways
30 | add(attachment)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/GPTMessage-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | ITSAppUsesNonExemptEncryption
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/GPTMessage.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/GPTMessage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/GPTMessage.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "kingfisher",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/onevcat/Kingfisher.git",
7 | "state" : {
8 | "revision" : "af4be924ad984cf4d16f4ae4df424e79a443d435",
9 | "version" : "7.6.2"
10 | }
11 | },
12 | {
13 | "identity" : "splash",
14 | "kind" : "remoteSourceControl",
15 | "location" : "https://github.com/JohnSundell/Splash",
16 | "state" : {
17 | "revision" : "7f4df436eb78fe64fe2c32c58006e9949fa28ad8",
18 | "version" : "0.16.0"
19 | }
20 | },
21 | {
22 | "identity" : "swift-markdown-ui",
23 | "kind" : "remoteSourceControl",
24 | "location" : "https://github.com/gonzalezreal/swift-markdown-ui",
25 | "state" : {
26 | "revision" : "4392c3cefd08db10f13ffb019d7c7a4a622824f5",
27 | "version" : "2.0.2"
28 | }
29 | },
30 | {
31 | "identity" : "swiftcsv",
32 | "kind" : "remoteSourceControl",
33 | "location" : "https://github.com/swiftcsv/SwiftCSV.git",
34 | "state" : {
35 | "revision" : "96fa14b92e88e0befdbc8bc31c7c2c9594a30060",
36 | "version" : "0.8.1"
37 | }
38 | },
39 | {
40 | "identity" : "swiftui-introspect",
41 | "kind" : "remoteSourceControl",
42 | "location" : "https://github.com/siteline/SwiftUI-Introspect.git",
43 | "state" : {
44 | "revision" : "c18951c747ab62af7c15e17a81bd37d4fd5a9979",
45 | "version" : "0.2.3"
46 | }
47 | },
48 | {
49 | "identity" : "swiftuix",
50 | "kind" : "remoteSourceControl",
51 | "location" : "https://github.com/SwiftUIX/SwiftUIX",
52 | "state" : {
53 | "revision" : "1b8b443b61b8b37fafb06c1abad9b6be244c9de9",
54 | "version" : "0.1.4"
55 | }
56 | }
57 | ],
58 | "version" : 2
59 | }
60 |
--------------------------------------------------------------------------------
/GPTMessage.xcodeproj/xcshareddata/xcschemes/ChatGPT.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
35 |
41 |
42 |
43 |
46 |
52 |
53 |
54 |
55 |
56 |
66 |
68 |
74 |
75 |
76 |
77 |
83 |
85 |
91 |
92 |
93 |
94 |
96 |
97 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/GPTMessage.xcodeproj/xcshareddata/xcschemes/GPTMessage.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
42 |
44 |
50 |
51 |
52 |
53 |
59 |
61 |
67 |
68 |
69 |
70 |
72 |
73 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Huanyu Luo
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GPTMessage
2 | A SwiftUI app demonstrating how ChatGPT interacts with DALL·E and HuggingFace models for iOS and macOS.
3 |
4 | This is what the app looks like on iOS:
5 |
6 |
7 |
8 |
9 |
10 | And macOS:
11 |
12 |
13 |
14 |
15 | ## Feautures
16 | ### Chat Completion
17 |
18 | Chat Completion is driven by OpenAI's chat language models, including gpt-3.5-turbo and gpt-3.5-turbo-0301.
19 | ### Image Generation
20 |
21 | Image Generation uses OpenAI's image generation API(DALL·E) and HuggingFace's Inference API to create images.
22 |
23 | To start drawing, simply send a message beginning with "Draw". For example, you could say `Draw a close-up, studio photographic portrait of a curious-looking blue British Shorthair cat`.
24 |
25 | `Draw something` is a hardcoded prompt. However, when Smart Mode is enabled, ChatGPT will classify your prompt and select the most appropriate model to handle it. Therefore, you could ask, `Can you assist me in creating a close-up, studio photographic portrait of a curious-looking blue British Shorthair cat?`.
26 |
27 | OpenAI's DALL·E is the preferred option since it's stable and fast(but expensive). You can easily switch to Hugging Face's Inference API(like [stable-diffusion-v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5) or [stabilityai/stable-diffusion-2-1](https://huggingface.co/stabilityai/stable-diffusion-2-1)) in the settings.
28 |
29 | ### Image Caption
30 |
31 | By connecting ChatGPT with an Image Caption model such as [nlpconnect/vit-gpt2-image-captioning](https://huggingface.co/nlpconnect/vit-gpt2-image-captioning) from Hugging Face, we can easily integrate the image captioning task with the image generation task.
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 | ## Prompts
43 |
44 | Default prompts come from **[Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts)**.
45 |
46 | ### iOS
47 |
48 | Click the person icon or type '/' to show the prompts list.
49 |
50 | ### macOS
51 |
52 | Type '/' to show the prompts list.
53 |
54 | ## Usage
55 |
56 | Set your OpenAI API key in the `AppConfiguration`.
57 |
58 | ```swift
59 | class AppConfiguration: ObservableObject {
60 |
61 | @AppStorage("configuration.key") var key = "OpenAI API Key"
62 |
63 | }
64 | ```
65 |
66 | Set your Hugging Face User Access Token in the `HuggingFaceConfiguration`.
67 |
68 | ```swift
69 | class HuggingFaceConfiguration: ObservableObject {
70 |
71 | @AppStorage("huggingFace.key") var key: String = ""
72 |
73 | }
74 | ```
75 |
--------------------------------------------------------------------------------
/screenshot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot.jpg
--------------------------------------------------------------------------------
/screenshot1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot1.jpg
--------------------------------------------------------------------------------
/screenshot_image_caption.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot_image_caption.jpg
--------------------------------------------------------------------------------
/screenshot_image_caption1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot_image_caption1.jpg
--------------------------------------------------------------------------------
/screenshot_macOS.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot_macOS.jpg
--------------------------------------------------------------------------------
/screenshot_macOS_image_caption.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lhuanyu/GPTMessage/4850f053169b6824805461d5b20f677e796350ff/screenshot_macOS_image_caption.jpg
--------------------------------------------------------------------------------