├── .gitignore
├── Sources
└── ChatGPTUI
│ ├── Models
│ ├── VoiceType.swift
│ ├── ParserResult.swift
│ ├── VoiceChatState.swift
│ └── MessageRow.swift
│ ├── Helper
│ ├── ResponseParsingTask.swift
│ └── MarkdownAttributedString.swift
│ ├── Views
│ ├── AttributedView.swift
│ ├── DotLoadingView.swift
│ ├── CodeblockView.swift
│ ├── MessageRowView.swift
│ ├── VoiceChatView.swift
│ └── TextChatView.swift
│ └── ViewModels
│ ├── TextChatViewModel.swift
│ └── VoiceChatViewModel.swift
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── Tests
└── ChatGPTUITests
│ └── ChatGPTUITests.swift
├── LICENSE
├── Package.swift
├── README.MD
└── Package.resolved
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | xcuserdata/
5 | DerivedData/
6 | .swiftpm/configuration/registries.json
7 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
8 | .netrc
9 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Models/VoiceType.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public enum VoiceType: String, Codable, Hashable, Sendable, CaseIterable {
4 | case alloy
5 | case echo
6 | case fable
7 | case onyx
8 | case nova
9 | case shimmer
10 | }
11 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Tests/ChatGPTUITests/ChatGPTUITests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import ChatGPTUI
3 |
4 | final class ChatGPTUITests: XCTestCase {
5 | func testExample() throws {
6 | // XCTest Documentation
7 | // https://developer.apple.com/documentation/xctest
8 |
9 | // Defining Test Cases and Test Methods
10 | // https://developer.apple.com/documentation/xctest/defining_test_cases_and_test_methods
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Models/ParserResult.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ParserResult: Identifiable {
4 |
5 | public let id = UUID()
6 | public let attributedString: AttributedString
7 | public let isCodeBlock: Bool
8 | public let codeBlockLanguage: String?
9 |
10 | public init(attributedString: AttributedString, isCodeBlock: Bool, codeBlockLanguage: String?) {
11 | self.attributedString = attributedString
12 | self.isCodeBlock = isCodeBlock
13 | self.codeBlockLanguage = codeBlockLanguage
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Helper/ResponseParsingTask.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Alfian Losari on 19/05/24.
6 | //
7 |
8 | import Foundation
9 | import Markdown
10 |
11 | actor ResponseParsingTask {
12 |
13 | func parse(text: String) async -> AttributedOutput {
14 | let document = Document(parsing: text)
15 | var markdownParser = MarkdownAttributedStringParser()
16 | let results = markdownParser.parserResults(from: document)
17 | return AttributedOutput(string: text, results: results)
18 | }
19 |
20 | }
21 |
22 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/AttributedView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 |
4 | public struct AttributedView: View {
5 |
6 | public let results: [ParserResult]
7 |
8 | public init(results: [ParserResult]) {
9 | self.results = results
10 | }
11 |
12 | public var body: some View {
13 | VStack(alignment: .leading, spacing: 0) {
14 | ForEach(results) { parsed in
15 | if parsed.isCodeBlock {
16 | CodeBlockView(parserResult: parsed)
17 | .padding(.bottom)
18 | } else {
19 | Text(parsed.attributedString)
20 | .textSelection(.enabled)
21 | }
22 | }
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Alfian Losari
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.9
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "ChatGPTUI",
8 | platforms: [
9 | .iOS(.v17),
10 | .macOS(.v14),
11 | .visionOS(.v1)],
12 | products: [
13 | .library(
14 | name: "ChatGPTUI",
15 | targets: ["ChatGPTUI"]),
16 | ],
17 | dependencies: [
18 | .package(url: "https://github.com/alfianlosari/ChatGPTSwift.git", from: "2.5.0"),
19 | .package(url: "https://github.com/apple/swift-markdown.git", from: "0.4.0"),
20 | .package(url: "https://github.com/alfianlosari/HighlighterSwift.git", from: "1.0.0"),
21 | .package(url: "https://github.com/alfianlosari/SiriWaveView.git", from: "1.1.0")
22 | ],
23 | targets: [
24 | .target(
25 | name: "ChatGPTUI",
26 | dependencies: [
27 | "ChatGPTSwift",
28 | "SiriWaveView",
29 | .product(name: "Highlighter", package: "HighlighterSwift"),
30 | .product(name: "Markdown", package: "swift-markdown")
31 | ]),
32 | .testTarget(
33 | name: "ChatGPTUITests",
34 | dependencies: ["ChatGPTUI"]),
35 | ]
36 | )
37 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Models/VoiceChatState.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 |
4 | public enum VoiceChatState {
5 | case idle(ChatResponse?)
6 | case recordingSpeech
7 | case processingSpeech
8 | case playingSpeech(ChatResponse)
9 | case error(Error)
10 |
11 | public var isIdle: Bool {
12 | if case .idle = self {
13 | return true
14 | }
15 | return false
16 | }
17 |
18 | public var idleResponse: ChatResponse? {
19 | if case .idle(let chatResponse) = self {
20 | return chatResponse
21 | }
22 | return nil
23 | }
24 |
25 | public var playingSpeechResponse: ChatResponse? {
26 | if case .playingSpeech(let chatResponse) = self {
27 | return chatResponse
28 | }
29 | return nil
30 | }
31 |
32 | public var isRecordingSpeech: Bool {
33 | if case .recordingSpeech = self {
34 | return true
35 | }
36 | return false
37 | }
38 |
39 | public var isProcessingSpeech: Bool {
40 | if case .processingSpeech = self {
41 | return true
42 | }
43 | return false
44 | }
45 |
46 | public var isPlayingSpeech: Bool {
47 | if case .playingSpeech = self {
48 | return true
49 | }
50 | return false
51 | }
52 |
53 | public var error: Error? {
54 | if case .error(let error) = self {
55 | return error
56 | }
57 | return nil
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/DotLoadingView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SwiftUIView.swift
3 | //
4 | //
5 | // Created by Alfian Losari on 19/05/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | public struct DotLoadingView: View {
11 |
12 | @State private var showCircle1 = false
13 | @State private var showCircle2 = false
14 | @State private var showCircle3 = false
15 |
16 | public init() {}
17 |
18 | public var body: some View {
19 | HStack {
20 | Circle()
21 | .opacity(showCircle1 ? 1 : 0)
22 | Circle()
23 | .opacity(showCircle2 ? 1 : 0)
24 | Circle()
25 | .opacity(showCircle3 ? 1 : 0)
26 | }
27 | .foregroundColor(.gray.opacity(0.5))
28 | .onAppear { performAnimation() }
29 | }
30 |
31 | func performAnimation() {
32 | let animation = Animation.easeInOut(duration: 0.4)
33 | withAnimation(animation) {
34 | self.showCircle1 = true
35 | self.showCircle3 = false
36 | }
37 |
38 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.4) {
39 | withAnimation(animation) {
40 | self.showCircle2 = true
41 | self.showCircle1 = false
42 | }
43 | }
44 |
45 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.8) {
46 | withAnimation(animation) {
47 | self.showCircle2 = false
48 | self.showCircle3 = true
49 | }
50 | }
51 |
52 | DispatchQueue.main.asyncAfter(deadline: .now() + 1.2) {
53 | self.performAnimation()
54 | }
55 | }
56 | }
57 |
58 | //#Preview {
59 | // SwiftUIView()
60 | //}
61 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Models/MessageRow.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftUI
3 |
4 | public struct MessageRow: Identifiable {
5 |
6 | public let id = UUID()
7 | public var isPrompting: Bool
8 |
9 | public let sendImage: String?
10 | public var send: MessageRowType
11 | public var sendText: String {
12 | send.text
13 | }
14 |
15 | public let responseImage: String?
16 | public var response: MessageRowType?
17 | public var responseText: String? {
18 | response?.text
19 | }
20 |
21 | public var responseError: String?
22 |
23 | public init(isPrompting: Bool, sendImage: String?, send: MessageRowType, responseImage: String?, response: MessageRowType? = nil, responseError: String? = nil) {
24 | self.isPrompting = isPrompting
25 | self.sendImage = sendImage
26 | self.send = send
27 | self.responseImage = responseImage
28 | self.response = response
29 | self.responseError = responseError
30 | }
31 | }
32 |
33 | public enum MessageRowType {
34 | case attributed(AttributedOutput)
35 | case rawText(String)
36 | case customContent(() -> CustomContent)
37 |
38 | public var text: String {
39 | switch self {
40 | case .attributed(let attributedOutput):
41 | return attributedOutput.string
42 | case .rawText(let string):
43 | return string
44 | case .customContent(let viewProvider):
45 | return "custom \(String(describing: viewProvider))"
46 | }
47 | }
48 | }
49 |
50 | public struct AttributedOutput {
51 | public let string: String
52 | public let results: [ParserResult]
53 |
54 | public init(string: String, results: [ParserResult]) {
55 | self.string = string
56 | self.results = results
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/CodeblockView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import Markdown
3 |
4 | enum HighlighterConstants {
5 | static let color = Color(red: 38/255, green: 38/255, blue: 38/255)
6 | }
7 |
8 | struct CodeBlockView: View {
9 |
10 | let parserResult: ParserResult
11 | @State var isCopied = false
12 |
13 | var body: some View {
14 | VStack(alignment: .leading) {
15 | header
16 | .padding(.horizontal)
17 | .padding(.vertical, 8)
18 | .background(Color(red: 9/255, green: 49/255, blue: 69/255))
19 |
20 | ScrollView(.horizontal, showsIndicators: true) {
21 | Text(parserResult.attributedString)
22 | .padding(.horizontal, 16)
23 | .textSelection(.enabled)
24 | }
25 | }
26 | .background(HighlighterConstants.color)
27 | .cornerRadius(8)
28 | }
29 |
30 | var header: some View {
31 | HStack {
32 | if let codeBlockLanguage = parserResult.codeBlockLanguage {
33 | Text(codeBlockLanguage.capitalized)
34 | .font(.headline.monospaced())
35 | .foregroundColor(.white)
36 | }
37 | Spacer()
38 | button
39 | }
40 | }
41 |
42 | @ViewBuilder
43 | var button: some View {
44 | if isCopied {
45 | HStack {
46 | Text("Copied")
47 | .foregroundColor(.white)
48 | .font(.subheadline.monospaced().bold())
49 | Image(systemName: "checkmark.circle.fill")
50 | .imageScale(.large)
51 | .symbolRenderingMode(.multicolor)
52 | }
53 | .frame(alignment: .trailing)
54 | } else {
55 | Button {
56 | let string = NSAttributedString(parserResult.attributedString).string
57 | #if os(macOS)
58 | NSPasteboard.general.setString(string, forType: .string)
59 | #else
60 | UIPasteboard.general.string = string
61 | #endif
62 |
63 | withAnimation {
64 | isCopied = true
65 | }
66 | DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
67 | withAnimation {
68 | isCopied = false
69 | }
70 | }
71 | } label: {
72 | Image(systemName: "doc.on.doc")
73 | }
74 | .foregroundColor(.white)
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/MessageRowView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | public struct MessageRowView: View {
4 |
5 | @Environment(\.colorScheme) private var colorScheme
6 | let message: MessageRow
7 | let retryCallback: (MessageRow) -> Void
8 |
9 | var imageSize: CGSize {
10 | CGSize(width: 25, height: 25)
11 | }
12 |
13 | public init(message: MessageRow, retryCallback: @escaping (MessageRow) -> Void) {
14 | self.message = message
15 | self.retryCallback = retryCallback
16 | }
17 |
18 | public var body: some View {
19 | VStack(spacing: 0) {
20 | messageRow(rowType: message.send, image: message.sendImage, bgColor: colorScheme == .light ? .white : Color(red: 52/255, green: 53/255, blue: 65/255, opacity: 0.5))
21 |
22 | if let response = message.response {
23 | Divider()
24 | messageRow(rowType: response, image: message.responseImage, bgColor: colorScheme == .light ? .gray.opacity(0.1) : Color(red: 52/255, green: 53/255, blue: 65/255, opacity: 1), responseError: message.responseError, showDotLoading: message.isPrompting)
25 | Divider()
26 | }
27 | }
28 | }
29 |
30 | func messageRow(rowType: MessageRowType, image: String?, bgColor: Color, responseError: String? = nil, showDotLoading: Bool = false) -> some View {
31 | HStack(alignment: .top, spacing: 24) {
32 | messageRowContent(rowType: rowType, image: image, responseError: responseError, showDotLoading: showDotLoading)
33 | }
34 | .padding(16)
35 | .frame(maxWidth: .infinity, alignment: .leading)
36 | .background(bgColor)
37 | }
38 |
39 | @ViewBuilder
40 | func messageRowContent(rowType: MessageRowType, image: String?, responseError: String? = nil, showDotLoading: Bool = false) -> some View {
41 | if let image = image {
42 | if image.hasPrefix("http"), let url = URL(string: image) {
43 | AsyncImage(url: url) { image in
44 | image
45 | .resizable()
46 | .frame(width: imageSize.width, height: imageSize.height)
47 | } placeholder: {
48 | ProgressView()
49 | }
50 |
51 | } else {
52 | Image(image)
53 | .resizable()
54 | .frame(width: imageSize.width, height: imageSize.height)
55 | }
56 | }
57 |
58 | VStack(alignment: .leading) {
59 | switch rowType {
60 | case .attributed(let attributedOutput):
61 | AttributedView(results: attributedOutput.results)
62 |
63 | case .rawText(let text):
64 | if !text.isEmpty {
65 | Text(text)
66 | .multilineTextAlignment(.leading)
67 | .textSelection(.enabled)
68 | }
69 |
70 | case .customContent(let customViewProvider):
71 | customViewProvider()
72 | }
73 |
74 | if let error = responseError {
75 | Text("Error: \(error)")
76 | .foregroundColor(.red)
77 | .multilineTextAlignment(.leading)
78 |
79 | Button("Regenerate response") {
80 | retryCallback(message)
81 | }
82 | .foregroundColor(.accentColor)
83 | .padding(.top)
84 | }
85 |
86 | if showDotLoading {
87 | DotLoadingView()
88 | .frame(width: 60, height: 30)
89 | }
90 | }
91 | }
92 |
93 | }
94 |
95 |
96 | //#Preview {
97 | // SwiftUIView()
98 | //}
99 |
--------------------------------------------------------------------------------
/README.MD:
--------------------------------------------------------------------------------
1 | # ChatGPT UI For SwiftUI
2 |
3 | 
4 |
5 | Simple and extensible Drop-in ChatGPT UI Solution for Apple Platforms.
6 |
7 | ## Supported Platforms
8 |
9 | - iOS 17
10 | - macOS 14
11 | - visionOS 1.x
12 |
13 | ## Installation
14 |
15 | ### Swift Package Manager
16 | - File > Swift Packages > Add Package Dependency
17 | - Copy and paste this URL
18 |
19 | ```swift
20 | https://github.com/alfianlosari/ChatGPTUI.git
21 | ```
22 |
23 | ### Import
24 | Import to your project source file.
25 |
26 | ```swift
27 | import ChatGPTUI
28 | ```
29 |
30 | ## Requirement
31 |
32 | Register for API key fromx [OpenAI](https://openai.com/api). Initialize with api key
33 |
34 | ### Voice Chat
35 |
36 | ‼️ You need to add key of `Privacy - Microphone Usage Description` in `info.plist` when using this otherwise your app will crash ‼️
37 |
38 | ### macOS Users
39 |
40 | For App Sandbox, check these checkboxes in Xcode:
41 | - Network Outgoing Connections (Client)
42 | - Hardware Audio Input (For Voice Chat)
43 | - Resource Access Audio Input (For Voice Chat)
44 |
45 | ## Usage
46 |
47 | ### Text Chat View
48 |
49 | Simply initialize passing the `apiKey` in SwiftUI View
50 |
51 | ```swift
52 | var body: some View {
53 | NavigationStack {
54 | TextChatView(apiKey: apiKey)
55 | .navigationTitle("XCA ChatGPTUI")
56 | }
57 | }
58 | ```
59 |
60 | Pass these optional params for customization:
61 | - `senderImage` and `botImage` from asset or remote image URL to render as sender and bot images.
62 | - `model` ChatGPTModel enum (gpt-4o, gpt-4turbo, gpt-3.5, etc)
63 | - `systemText` system text prompt used by ChatGPT.
64 | - `temperature` Temperature used by ChatGPT for response.
65 |
66 | ```swift
67 | var body: some View {
68 | NavigationStack {
69 | TextChatView(
70 | senderImage: senderImage,
71 | botImage: botImage,
72 | model: .gpt_hyphen_4o,
73 | systemText: "You're master of Swift Programming",
74 | temperature: 1.0,
75 | apiKey: apiKey)
76 | .navigationTitle("XCA ChatGPTUI")
77 | }
78 | }
79 | ```
80 |
81 | Make sure you are in tier that eligible for gpt4 models access. You can learn more from here [How can I access GPT-4, GPT-4 Turbo and GPT-4o?](https://help.openai.com/en/articles/7102672-how-can-i-access-gpt-4-gpt-4-turbo-and-gpt-4o).
82 |
83 | If you're not sure just pass gpt-3.5 models, by default it uses `gpt-3.5turbo`
84 |
85 | ### Voice Chat View
86 |
87 | Converse with ChatGPT using Voice. It uses 3 OpenAI APIs under the hood:
88 | - Transcribe user speech to text.
89 | - Prompt ChatGPT API using the text.
90 | - Convert text to speech (TTS) using Whisper API.
91 |
92 | ‼️ You need to add key of `Privacy - Microphone Usage Description` in `info.plist` when using this otherwise your app will crash ‼️
93 |
94 | Simply initialize passing the `apiKey` in SwiftUI View
95 |
96 | ```swift
97 | var body: some View {
98 | NavigationStack {
99 | VoiceChatView(apiKey: apiKey)
100 | .navigationTitle("XCA ChatGPTUI")
101 | }
102 | }
103 | ```
104 |
105 |
106 | Pass these optional params for customization:
107 | - `Voice Type`: select voice type from `alloy` (default), `echo`, `fable`, `onyx`, `nova`, `shimmer`
108 | - `model` ChatGPTModel enum (gpt-4o, gpt-4turbo, gpt-3.5, etc)
109 | - `systemText` system text prompt used by ChatGPT.
110 | - `temperature` Temperature used by ChatGPT for response.
111 |
112 | ```swift
113 | var body: some View {
114 | NavigationStack {
115 | VoiceChatView(
116 | voiceType: .nova,
117 | model: .gpt_hyphen_4o,
118 | systemText: "You're master of Swift Programming",
119 | temperature: 1.0,
120 | apiKey: apiKey)
121 | .navigationTitle("XCA ChatGPTUI")
122 |
123 | }
124 | }
125 | ```
126 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/VoiceChatView.swift:
--------------------------------------------------------------------------------
1 | import ChatGPTSwift
2 | import SiriWaveView
3 | import SwiftUI
4 |
5 | public struct VoiceChatView: View {
6 |
7 | @State var vm: VoiceChatViewModel
8 | @State var isSymbolAnimating = false
9 | var loadingImageSystemName = "circle.dotted.circle"
10 |
11 | public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) where CustomContent == Text {
12 | self.vm = .init(voiceType: voiceType, model: model, systemText: systemText, temperature: temperature, renderAsMarkdown: renderAsMarkdown, apiKey: apiKey)
13 | }
14 |
15 | public init(customContentVM: VoiceChatViewModel) {
16 | self.vm = customContentVM
17 | }
18 |
19 | public var body: some View {
20 | VStack(spacing: 0) {
21 | ScrollView {
22 | if let response = vm.response {
23 | switch response {
24 | case .attributed(let attributedOutput):
25 | AttributedView(results: attributedOutput.results)
26 |
27 | case .rawText(let text):
28 | if !text.isEmpty {
29 | Text(text)
30 | .multilineTextAlignment(.leading)
31 | .textSelection(.enabled)
32 | }
33 |
34 | case .customContent(let customViewProvider):
35 | customViewProvider()
36 | }
37 | }
38 | }
39 | .contentMargins(.vertical, 16, for: .scrollContent)
40 | .frame(maxWidth: .infinity)
41 | .padding(.horizontal)
42 | .overlay { overlayView }
43 |
44 | if vm.response != nil {
45 | Divider()
46 | }
47 |
48 | HStack {
49 | if case .playingSpeech = self.vm.state {
50 | SiriWaveView(power: $vm.audioPower)
51 | .frame(height: 64)
52 | }
53 |
54 | switch vm.state {
55 | case .idle, .error:
56 | startCaptureButton
57 | case .recordingSpeech:
58 | cancelRecordingButton
59 | case .processingSpeech, .playingSpeech:
60 | cancelButton
61 | }
62 | }
63 | .padding()
64 | }
65 | }
66 |
67 | @ViewBuilder
68 | var overlayView: some View {
69 | switch vm.state {
70 | case .recordingSpeech:
71 | SiriWaveView(power: $vm.audioPower)
72 | .frame(height: 256)
73 |
74 | case .processingSpeech:
75 | Image(systemName: loadingImageSystemName)
76 | .symbolEffect(.bounce.up.byLayer, options: .repeating, value: isSymbolAnimating)
77 | #if os(iOS)
78 | .font(.system(size: 64))
79 | #else
80 | .font(.system(size: 96))
81 | #endif
82 | .onAppear { isSymbolAnimating = true }
83 | .onDisappear { isSymbolAnimating = false }
84 |
85 | case .error(let error):
86 | Text(error.localizedDescription)
87 | .foregroundStyle(.red)
88 | .font(.caption)
89 | .lineLimit(4)
90 | .padding(.horizontal)
91 |
92 | default: EmptyView()
93 | }
94 | }
95 |
96 | var startCaptureButton: some View {
97 | Button {
98 | vm.startCaptureAudio()
99 | } label: {
100 | Image(systemName: "mic.circle")
101 | .symbolRenderingMode(.multicolor)
102 | #if os(iOS)
103 | .font(.system(size: 64))
104 | #else
105 | .font(.system(size: 96))
106 | #endif
107 | }.buttonStyle(.borderless)
108 | }
109 |
110 | var cancelRecordingButton: some View {
111 | Button(role: .destructive) {
112 | vm.cancelRecording()
113 | } label: {
114 | Image(systemName: "xmark.circle.fill")
115 | .symbolRenderingMode(.multicolor)
116 | .font(.system(size: 44))
117 | }.buttonStyle(.borderless)
118 | }
119 |
120 | var cancelButton: some View {
121 | Button(role: .destructive) {
122 | vm.cancelProcessingTask()
123 | } label: {
124 | Image(systemName: "stop.circle.fill")
125 | .symbolRenderingMode(.monochrome)
126 | .foregroundStyle(.red)
127 | .font(.system(size: 44))
128 | }.buttonStyle(.borderless)
129 | }
130 | }
131 |
132 | //#Preview {
133 | // SwiftUIView()
134 | //}
135 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Views/TextChatView.swift:
--------------------------------------------------------------------------------
1 | import ChatGPTSwift
2 | import Foundation
3 | import SwiftUI
4 |
5 | public struct TextChatView: View {
6 |
7 | @Environment(\.colorScheme) var colorScheme
8 |
9 | @State var vm: TextChatViewModel
10 | @FocusState var isTextFieldFocused: Bool
11 |
12 | public init(senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) where CustomContent == Text {
13 | self.vm = .init(senderImage: senderImage, botImage: botImage, useStreaming: useStreaming, model: model, systemText: systemText, temperature: temperature, renderAsMarkdown: renderAsMarkdown, apiKey: apiKey)
14 | }
15 |
16 | public init(customContentVM: TextChatViewModel) {
17 | self.vm = customContentVM
18 | }
19 |
20 | public var body: some View {
21 | chatListView
22 | .toolbar {
23 | ToolbarItemGroup(placement: .destructiveAction) {
24 | Button("Clear", role: .destructive) {
25 | vm.clearMessages()
26 | }
27 | .disabled(vm.isPrompting)
28 | }
29 | }
30 | }
31 |
32 | var chatListView: some View {
33 | ScrollViewReader { proxy in
34 | VStack(spacing: 0) {
35 | ScrollView {
36 | LazyVStack(spacing: 0) {
37 | ForEach(vm.messages) { message in
38 | MessageRowView(message: message) { message in
39 | Task { @MainActor in
40 | await vm.retry(message: message)
41 | }
42 | }
43 | }
44 | }
45 | .onTapGesture {
46 | isTextFieldFocused = false
47 | }
48 | }
49 | Divider()
50 | bottomView(image: vm.senderImage, proxy: proxy)
51 | Spacer()
52 | }
53 | .onChange(of: vm.messages.last?.responseText) { scrollToBottom(proxy: proxy) }
54 | }
55 | .background(colorScheme == .light ? .white : Color(red: 52/255, green: 53/255, blue: 65/255, opacity: 0.5))
56 | }
57 |
58 | func bottomView(image: String?, proxy: ScrollViewProxy) -> some View {
59 | HStack(alignment: .top, spacing: 8) {
60 | if let image {
61 | if image.hasPrefix("http"), let url = URL(string: image) {
62 | AsyncImage(url: url) { image in
63 | image
64 | .resizable()
65 | .frame(width: 30, height: 30)
66 | } placeholder: {
67 | ProgressView()
68 | }
69 |
70 | } else {
71 | Image(image)
72 | .resizable()
73 | .frame(width: 30, height: 30)
74 | }
75 | }
76 |
77 | TextField("Send message", text: $vm.inputMessage, axis: .vertical)
78 | .autocorrectionDisabled()
79 | .textFieldStyle(.roundedBorder)
80 | .focused($isTextFieldFocused)
81 | .disabled(vm.isPrompting)
82 |
83 | if vm.isPrompting {
84 | Button {
85 | vm.cancelStreamingResponse()
86 | } label: {
87 | Image(systemName: "stop.circle.fill")
88 | .font(.system(size: 30))
89 | .symbolRenderingMode(.multicolor)
90 | .foregroundColor(.red)
91 | }
92 | } else {
93 | Button {
94 | Task { @MainActor in
95 | isTextFieldFocused = false
96 | scrollToBottom(proxy: proxy)
97 | await vm.sendTapped()
98 | }
99 | } label: {
100 | Image(systemName: "paperplane.circle.fill")
101 | .rotationEffect(.degrees(45))
102 | .font(.system(size: 30))
103 | }
104 | #if os(macOS)
105 | .buttonStyle(.borderless)
106 | .keyboardShortcut(.defaultAction)
107 | .foregroundColor(.accentColor)
108 | #endif
109 | .disabled(vm.inputMessage.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty)
110 | }
111 | }
112 | .padding(.horizontal, 16)
113 | .padding(.top, 12)
114 | }
115 |
116 | private func scrollToBottom(proxy: ScrollViewProxy) {
117 | guard let id = vm.messages.last?.id else { return }
118 | proxy.scrollTo(id, anchor: .bottomTrailing)
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "originHash" : "a5f2cc5957e77582b8af3f8bd2b338297240fc279554a2505d8b80d4d51d85fc",
3 | "pins" : [
4 | {
5 | "identity" : "async-http-client",
6 | "kind" : "remoteSourceControl",
7 | "location" : "https://github.com/swift-server/async-http-client.git",
8 | "state" : {
9 | "revision" : "a22083713ee90808d527d0baa290c2fb13ca3096",
10 | "version" : "1.21.1"
11 | }
12 | },
13 | {
14 | "identity" : "chatgptswift",
15 | "kind" : "remoteSourceControl",
16 | "location" : "https://github.com/alfianlosari/ChatGPTSwift.git",
17 | "state" : {
18 | "revision" : "5d10da6f680a217ab458bea2402c41982599d525",
19 | "version" : "2.3.2"
20 | }
21 | },
22 | {
23 | "identity" : "gptencoder",
24 | "kind" : "remoteSourceControl",
25 | "location" : "https://github.com/alfianlosari/GPTEncoder.git",
26 | "state" : {
27 | "revision" : "a86968867ab4380e36b904a14c42215f71efe8b4",
28 | "version" : "1.0.4"
29 | }
30 | },
31 | {
32 | "identity" : "highlighterswift",
33 | "kind" : "remoteSourceControl",
34 | "location" : "https://github.com/alfianlosari/HighlighterSwift.git",
35 | "state" : {
36 | "revision" : "6d697f875a064dda825d943fe7f6b53edea08fe8",
37 | "version" : "1.0.0"
38 | }
39 | },
40 | {
41 | "identity" : "siriwaveview",
42 | "kind" : "remoteSourceControl",
43 | "location" : "https://github.com/alfianlosari/SiriWaveView.git",
44 | "state" : {
45 | "revision" : "711287cd8d6ef16b5dbcced5ead82d93d0cb3c88",
46 | "version" : "1.1.0"
47 | }
48 | },
49 | {
50 | "identity" : "swift-algorithms",
51 | "kind" : "remoteSourceControl",
52 | "location" : "https://github.com/apple/swift-algorithms",
53 | "state" : {
54 | "revision" : "f6919dfc309e7f1b56224378b11e28bab5bccc42",
55 | "version" : "1.2.0"
56 | }
57 | },
58 | {
59 | "identity" : "swift-atomics",
60 | "kind" : "remoteSourceControl",
61 | "location" : "https://github.com/apple/swift-atomics.git",
62 | "state" : {
63 | "revision" : "cd142fd2f64be2100422d658e7411e39489da985",
64 | "version" : "1.2.0"
65 | }
66 | },
67 | {
68 | "identity" : "swift-cmark",
69 | "kind" : "remoteSourceControl",
70 | "location" : "https://github.com/apple/swift-cmark.git",
71 | "state" : {
72 | "revision" : "3bc2f3e25df0cecc5dc269f7ccae65d0f386f06a",
73 | "version" : "0.4.0"
74 | }
75 | },
76 | {
77 | "identity" : "swift-collections",
78 | "kind" : "remoteSourceControl",
79 | "location" : "https://github.com/apple/swift-collections",
80 | "state" : {
81 | "revision" : "94cf62b3ba8d4bed62680a282d4c25f9c63c2efb",
82 | "version" : "1.1.0"
83 | }
84 | },
85 | {
86 | "identity" : "swift-http-types",
87 | "kind" : "remoteSourceControl",
88 | "location" : "https://github.com/apple/swift-http-types",
89 | "state" : {
90 | "revision" : "1ddbea1ee34354a6a2532c60f98501c35ae8edfa",
91 | "version" : "1.2.0"
92 | }
93 | },
94 | {
95 | "identity" : "swift-log",
96 | "kind" : "remoteSourceControl",
97 | "location" : "https://github.com/apple/swift-log.git",
98 | "state" : {
99 | "revision" : "e97a6fcb1ab07462881ac165fdbb37f067e205d5",
100 | "version" : "1.5.4"
101 | }
102 | },
103 | {
104 | "identity" : "swift-markdown",
105 | "kind" : "remoteSourceControl",
106 | "location" : "https://github.com/apple/swift-markdown.git",
107 | "state" : {
108 | "revision" : "4aae40bf6fff5286e0e1672329d17824ce16e081",
109 | "version" : "0.4.0"
110 | }
111 | },
112 | {
113 | "identity" : "swift-nio",
114 | "kind" : "remoteSourceControl",
115 | "location" : "https://github.com/apple/swift-nio",
116 | "state" : {
117 | "revision" : "359c461e5561d22c6334828806cc25d759ca7aa6",
118 | "version" : "2.65.0"
119 | }
120 | },
121 | {
122 | "identity" : "swift-nio-extras",
123 | "kind" : "remoteSourceControl",
124 | "location" : "https://github.com/apple/swift-nio-extras.git",
125 | "state" : {
126 | "revision" : "a3b640d7dc567225db7c94386a6e71aded1bfa63",
127 | "version" : "1.22.0"
128 | }
129 | },
130 | {
131 | "identity" : "swift-nio-http2",
132 | "kind" : "remoteSourceControl",
133 | "location" : "https://github.com/apple/swift-nio-http2.git",
134 | "state" : {
135 | "revision" : "c6afe04165c865faaa687b42c32ed76dfcc91076",
136 | "version" : "1.31.0"
137 | }
138 | },
139 | {
140 | "identity" : "swift-nio-ssl",
141 | "kind" : "remoteSourceControl",
142 | "location" : "https://github.com/apple/swift-nio-ssl.git",
143 | "state" : {
144 | "revision" : "7c381eb6083542b124a6c18fae742f55001dc2b5",
145 | "version" : "2.26.0"
146 | }
147 | },
148 | {
149 | "identity" : "swift-nio-transport-services",
150 | "kind" : "remoteSourceControl",
151 | "location" : "https://github.com/apple/swift-nio-transport-services.git",
152 | "state" : {
153 | "revision" : "38ac8221dd20674682148d6451367f89c2652980",
154 | "version" : "1.21.0"
155 | }
156 | },
157 | {
158 | "identity" : "swift-numerics",
159 | "kind" : "remoteSourceControl",
160 | "location" : "https://github.com/apple/swift-numerics.git",
161 | "state" : {
162 | "revision" : "0a5bc04095a675662cf24757cc0640aa2204253b",
163 | "version" : "1.0.2"
164 | }
165 | },
166 | {
167 | "identity" : "swift-openapi-async-http-client",
168 | "kind" : "remoteSourceControl",
169 | "location" : "https://github.com/swift-server/swift-openapi-async-http-client",
170 | "state" : {
171 | "revision" : "abfe558a66992ef1e896a577010f957915f30591",
172 | "version" : "1.0.0"
173 | }
174 | },
175 | {
176 | "identity" : "swift-openapi-runtime",
177 | "kind" : "remoteSourceControl",
178 | "location" : "https://github.com/apple/swift-openapi-runtime",
179 | "state" : {
180 | "revision" : "9a8291fa2f90cc7296f2393a99bb4824ee34f869",
181 | "version" : "1.4.0"
182 | }
183 | },
184 | {
185 | "identity" : "swift-openapi-urlsession",
186 | "kind" : "remoteSourceControl",
187 | "location" : "https://github.com/apple/swift-openapi-urlsession",
188 | "state" : {
189 | "revision" : "6efbfda5276bbbc8b4fec5d744f0ecd8c784eb47",
190 | "version" : "1.0.1"
191 | }
192 | },
193 | {
194 | "identity" : "swift-system",
195 | "kind" : "remoteSourceControl",
196 | "location" : "https://github.com/apple/swift-system.git",
197 | "state" : {
198 | "revision" : "f9266c85189c2751589a50ea5aec72799797e471",
199 | "version" : "1.3.0"
200 | }
201 | }
202 | ],
203 | "version" : 3
204 | }
205 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/ViewModels/TextChatViewModel.swift:
--------------------------------------------------------------------------------
1 | import ChatGPTSwift
2 | import Foundation
3 | import Observation
4 | import SwiftUI
5 | #if os(macOS)
6 | import Cocoa
7 | #else
8 | import UIKit
9 | #endif
10 |
11 | @Observable
12 | open class TextChatViewModel {
13 |
14 | public var messages: [MessageRow] = []
15 |
16 | public var inputMessage = ""
17 | public var isPrompting = false
18 | public var task: Task?
19 | public var senderImage: String?
20 | public var botImage: String?
21 | public var useStreaming = true
22 | public var renderAsMarkdown = true
23 |
24 | public let api: ChatGPTAPI
25 | public var model: ChatGPTModel
26 | public var systemText: String
27 | public var temperature: Double
28 |
29 | public init(messages: [MessageRow] = [], senderImage: String? = nil, botImage: String? = nil, useStreaming: Bool = true, model: ChatGPTModel = .gpt_hyphen_3_period_5_hyphen_turbo, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) {
30 | self.messages = messages
31 | self.senderImage = senderImage
32 | self.botImage = botImage
33 | self.useStreaming = useStreaming
34 | self.model = model
35 | self.api = ChatGPTAPI(apiKey: apiKey)
36 | self.systemText = systemText
37 | self.renderAsMarkdown = renderAsMarkdown
38 | self.temperature = temperature
39 | }
40 |
41 | @MainActor
42 | open func sendTapped() async {
43 | self.task = Task {
44 | let text = inputMessage
45 | inputMessage = ""
46 | if useStreaming {
47 | await send(text: text)
48 | } else {
49 | await sendWithoutStream(text: text)
50 | }
51 | }
52 | }
53 |
54 | @MainActor
55 | open func clearMessages() {
56 | api.deleteHistoryList()
57 | withAnimation { [weak self] in
58 | self?.messages = []
59 | }
60 | }
61 |
62 | open func cancelStreamingResponse() {
63 | self.task?.cancel()
64 | self.task = nil
65 | }
66 |
67 | @MainActor
68 | open func send(text: String) async {
69 | isPrompting = true
70 | var messageRow = MessageRow(
71 | isPrompting: true,
72 | sendImage: senderImage,
73 | send: .rawText(text),
74 | responseImage: botImage,
75 | response: .rawText(""),
76 | responseError: nil)
77 |
78 | var streamText = ""
79 | do {
80 | let parsingTask = ResponseParsingTask()
81 | if renderAsMarkdown {
82 | let attributedSend = await parsingTask.parse(text: text)
83 | try Task.checkCancellation()
84 | messageRow.send = .attributed(attributedSend)
85 | } else {
86 | messageRow.send = .rawText(text)
87 | }
88 |
89 | self.messages.append(messageRow)
90 | let parserThresholdTextCount = 64
91 | var currentTextCount = 0
92 | var currentOutput: AttributedOutput?
93 |
94 | let stream = try await api.sendMessageStream(text: text, model: model, systemText: systemText, temperature: temperature)
95 | for try await text in stream {
96 | streamText += text
97 | if renderAsMarkdown {
98 | currentTextCount += text.count
99 |
100 | if currentTextCount >= parserThresholdTextCount || text.contains("```") {
101 | currentOutput = await parsingTask.parse(text: streamText)
102 | try Task.checkCancellation()
103 | currentTextCount = 0
104 | }
105 |
106 | if let currentOutput = currentOutput, !currentOutput.results.isEmpty {
107 | let suffixText = streamText.trimmingPrefix(currentOutput.string)
108 | var results = currentOutput.results
109 | let lastResult = results[results.count - 1]
110 | var lastAttrString = lastResult.attributedString
111 | if lastResult.isCodeBlock {
112 | #if os(macOS)
113 | lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: NSFont.systemFont(ofSize: 12).apply(newTraits: .monoSpace), .foregroundColor: NSColor.white])))
114 | #else
115 | lastAttrString.append(AttributedString(String(suffixText), attributes: .init([.font: UIFont.systemFont(ofSize: 12).apply(newTraits: .traitMonoSpace), .foregroundColor: UIColor.white])))
116 | #endif
117 |
118 | } else {
119 | lastAttrString.append(AttributedString(String(suffixText)))
120 | }
121 | results[results.count - 1] = ParserResult(attributedString: lastAttrString, isCodeBlock: lastResult.isCodeBlock, codeBlockLanguage: lastResult.codeBlockLanguage)
122 | messageRow.response = .attributed(.init(string: streamText, results: results))
123 | } else {
124 | messageRow.response = .attributed(.init(string: streamText, results: [
125 | ParserResult(attributedString: AttributedString(stringLiteral: streamText), isCodeBlock: false, codeBlockLanguage: nil)
126 | ]))
127 | }
128 |
129 | } else {
130 | messageRow.response = .rawText(streamText)
131 | }
132 | self.messages[self.messages.count - 1] = messageRow
133 | if renderAsMarkdown {
134 | if let currentString = currentOutput?.string, currentString != streamText {
135 | let output = await parsingTask.parse(text: streamText)
136 | try Task.checkCancellation()
137 | messageRow.response = .attributed(output)
138 | }
139 | }
140 | }
141 | } catch is CancellationError {
142 | messageRow.responseError = "The response was cancelled"
143 | } catch {
144 | messageRow.responseError = error.localizedDescription
145 | }
146 |
147 | if messageRow.response == nil {
148 | messageRow.response = .rawText(streamText)
149 | }
150 |
151 | messageRow.isPrompting = false
152 | self.messages[self.messages.count - 1] = messageRow
153 | self.isPrompting = false
154 | }
155 |
156 | @MainActor
157 | open func sendWithoutStream(text: String) async {
158 | isPrompting = true
159 | var messageRow = MessageRow(
160 | isPrompting: true,
161 | sendImage: senderImage,
162 | send: .rawText(text),
163 | responseImage: botImage,
164 | response: .rawText(""),
165 | responseError: nil)
166 |
167 | self.messages.append(messageRow)
168 |
169 | do {
170 | let responseText = try await api.sendMessage(text: text, model: model, systemText: systemText, temperature: temperature)
171 | try Task.checkCancellation()
172 |
173 | if renderAsMarkdown {
174 | let parsingTask = ResponseParsingTask()
175 | let output = await parsingTask.parse(text: responseText)
176 | try Task.checkCancellation()
177 | messageRow.response = .attributed(output)
178 | } else {
179 | messageRow.response = .rawText(responseText)
180 | }
181 | } catch {
182 | messageRow.responseError = error.localizedDescription
183 | }
184 |
185 | messageRow.isPrompting = false
186 | self.messages[self.messages.count - 1] = messageRow
187 | isPrompting = false
188 | }
189 |
190 |
191 | @MainActor
192 | open func retry(message: MessageRow) async {
193 | self.task = Task {
194 | guard let index = messages.firstIndex(where: { $0.id == message.id }) else {
195 | return
196 | }
197 | self.messages.remove(at: index)
198 | if useStreaming {
199 | await send(text: message.sendText)
200 | } else {
201 | await sendWithoutStream(text: message.sendText)
202 | }
203 | }
204 | }
205 |
206 | func updateLastMessageInList(updateHandler: (inout MessageRow) -> Void) {
207 | var messageRow = messages[self.messages.count - 1]
208 | updateHandler(&messageRow)
209 | self.messages[self.messages.count - 1] = messageRow
210 | }
211 |
212 | }
213 |
214 |
215 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/ViewModels/VoiceChatViewModel.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 | import Foundation
3 | import Observation
4 | import ChatGPTSwift
5 | import SwiftUI
6 |
7 | public typealias ChatResponse = MessageRowType
8 |
9 | @Observable
10 | open class VoiceChatViewModel: NSObject, AVAudioRecorderDelegate, AVAudioPlayerDelegate {
11 |
12 | public let api: ChatGPTAPI
13 | public var model: ChatGPTModel
14 | public var systemText: String
15 | public var temperature: Double
16 | public var renderAsMarkdown = true
17 |
18 | public var state: VoiceChatState = .idle(nil) {
19 | didSet {
20 | #if DEBUG
21 | print(state)
22 | #endif
23 | }
24 | }
25 |
26 | public var response: ChatResponse? {
27 | state.idleResponse ?? state.playingSpeechResponse
28 | }
29 |
30 | public var selectedVoice = VoiceType.alloy
31 | var audioPlayer: AVAudioPlayer!
32 | var audioRecorder: AVAudioRecorder!
33 | #if !os(macOS)
34 | var recordingSession = AVAudioSession.sharedInstance()
35 | #endif
36 | var animationTimer: Timer?
37 | var recordingTimer: Timer?
38 | var audioPower = 0.0
39 | var prevAudioPower: Double?
40 | public var processingSpeechTask: Task?
41 |
42 | var captureURL: URL {
43 | FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask)
44 | .first!.appendingPathComponent("recording.m4a")
45 | }
46 |
47 | public init(voiceType: VoiceType = .alloy, model: ChatGPTModel = .gpt_hyphen_4o, systemText: String = "You're a helpful assistant", temperature: Double = 0.6, renderAsMarkdown: Bool = true, apiKey: String) {
48 | self.selectedVoice = voiceType
49 | self.model = model
50 | self.systemText = systemText
51 | self.temperature = temperature
52 | self.renderAsMarkdown = renderAsMarkdown
53 | self.api = ChatGPTAPI(apiKey: apiKey)
54 | super.init()
55 | #if !os(macOS)
56 | do {
57 | #if os(iOS)
58 | try recordingSession.setCategory(.playAndRecord, options: .defaultToSpeaker)
59 | #else
60 | try recordingSession.setCategory(.playAndRecord, mode: .default)
61 | #endif
62 | try recordingSession.setActive(true)
63 |
64 | AVAudioApplication.requestRecordPermission { [unowned self] allowed in
65 | if !allowed {
66 | self.state = .error("Recording not allowed by the user")
67 | }
68 | }
69 | } catch {
70 | state = .error(error)
71 | }
72 | #endif
73 | }
74 |
75 | open func startCaptureAudio() {
76 | resetValues()
77 | state = .recordingSpeech
78 | do {
79 | audioRecorder = try AVAudioRecorder(url: captureURL,
80 | settings: [
81 | AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
82 | AVSampleRateKey: 12000,
83 | AVNumberOfChannelsKey: 1,
84 | AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
85 | ])
86 | audioRecorder.isMeteringEnabled = true
87 | audioRecorder.delegate = self
88 | audioRecorder.record()
89 |
90 | animationTimer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true, block: { [unowned self]_ in
91 | guard self.audioRecorder != nil else { return }
92 | self.audioRecorder.updateMeters()
93 | let power = min(1, max(0, 1 - abs(Double(self.audioRecorder.averagePower(forChannel: 0)) / 50) ))
94 | self.audioPower = power
95 | })
96 |
97 | recordingTimer = Timer.scheduledTimer(withTimeInterval: 1.6, repeats: true, block: { [unowned self]_ in
98 | guard self.audioRecorder != nil else { return }
99 | self.audioRecorder.updateMeters()
100 | let power = min(1, max(0, 1 - abs(Double(self.audioRecorder.averagePower(forChannel: 0)) / 50) ))
101 | if self.prevAudioPower == nil {
102 | self.prevAudioPower = power
103 | return
104 | }
105 | if let prevAudioPower = self.prevAudioPower, prevAudioPower < 0.25 && power < 0.175 {
106 | self.finishCaptureAudio()
107 | return
108 | }
109 | self.prevAudioPower = power
110 | })
111 |
112 | } catch {
113 | resetValues()
114 | state = .error(error)
115 | }
116 | }
117 |
118 | open func finishCaptureAudio() {
119 | resetValues()
120 | do {
121 | let data = try Data(contentsOf: captureURL)
122 | processingSpeechTask = processSpeechTask(audioData: data)
123 | } catch {
124 | state = .error(error)
125 | resetValues()
126 | }
127 | }
128 |
129 | open func processSpeechTask(audioData: Data) -> Task {
130 | Task { @MainActor [unowned self] in
131 | do {
132 | self.state = .processingSpeech
133 | let prompt = try await api.generateAudioTransciptions(audioData: audioData)
134 | try Task.checkCancellation()
135 |
136 | let response = try await api.sendMessage(text: prompt, model: model, systemText: systemText, temperature: temperature)
137 | try Task.checkCancellation()
138 |
139 | let data = try await api.generateSpeechFrom(input: response, voice:
140 | .init(rawValue: selectedVoice.rawValue) ?? .alloy)
141 | try Task.checkCancellation()
142 |
143 | if self.renderAsMarkdown {
144 | let parsingTask = ResponseParsingTask()
145 | let output = await parsingTask.parse(text: response)
146 | try Task.checkCancellation()
147 | try self.playAudio(data: data, response: .attributed(output))
148 | } else {
149 | try self.playAudio(data: data, response: .rawText(response))
150 | }
151 | } catch {
152 | if Task.isCancelled { return }
153 | state = .error(error)
154 | resetValues()
155 | }
156 | }
157 | }
158 |
159 | open func playAudio(data: Data, response: ChatResponse) throws {
160 | self.state = .playingSpeech(response)
161 | audioPlayer = try AVAudioPlayer(data: data)
162 | audioPlayer.isMeteringEnabled = true
163 | audioPlayer.delegate = self
164 | audioPlayer.play()
165 |
166 | // Scheduled timer interval cause wave view to not updated when scrolling as audio plays
167 | // Use GCD after with recursion until further cleaner solution can be found
168 | self.scheduleAudioPlayerPowerUpdate()
169 | }
170 |
171 | open func cancelRecording() {
172 | resetValues()
173 | state = .idle(nil)
174 | }
175 |
176 | open func cancelProcessingTask() {
177 | processingSpeechTask?.cancel()
178 | processingSpeechTask = nil
179 | resetValues()
180 | if case .playingSpeech(let response) = self.state {
181 | state = .idle(response)
182 | } else {
183 | state = .idle(nil)
184 | }
185 | }
186 |
187 | open func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
188 | if !flag {
189 | resetValues()
190 | state = .idle(nil)
191 | }
192 | }
193 |
194 | open func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
195 | resetValues()
196 | if let response = self.state.playingSpeechResponse {
197 | self.state = .idle(response)
198 | }
199 | }
200 |
201 | func scheduleAudioPlayerPowerUpdate() {
202 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
203 | guard let audioPlayer = self.audioPlayer else { return }
204 | audioPlayer.updateMeters()
205 | let power = min(1, max(0, 1 - abs(Double(audioPlayer.averagePower(forChannel: 0)) / 160) ))
206 | self.audioPower = power
207 | self.scheduleAudioPlayerPowerUpdate()
208 | }
209 | }
210 |
211 | open func resetValues() {
212 | audioPower = 0
213 | prevAudioPower = nil
214 | audioRecorder?.stop()
215 | audioRecorder = nil
216 | audioPlayer?.stop()
217 | audioPlayer = nil
218 | recordingTimer?.invalidate()
219 | recordingTimer = nil
220 | animationTimer?.invalidate()
221 | animationTimer = nil
222 | }
223 | }
224 |
--------------------------------------------------------------------------------
/Sources/ChatGPTUI/Helper/MarkdownAttributedString.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | #if os(macOS)
3 | import Cocoa
4 | #else
5 | import UIKit
6 | #endif
7 | import Markdown
8 | import Highlighter
9 |
10 | /// Based on the source code from Christian Selig
11 | /// https://github.com/christianselig/Markdownosaur/blob/main/Sources/Markdownosaur/Markdownosaur.swift
12 |
13 | public struct MarkdownAttributedStringParser: MarkupVisitor {
14 |
15 | #if os(macOS)
16 | let baseFontSize: CGFloat = NSFont.preferredFont(forTextStyle: .body).pointSize
17 | #else
18 | let baseFontSize: CGFloat = UIFont.preferredFont(forTextStyle: .body).pointSize
19 | #endif
20 |
21 | let highlighter: Highlighter = {
22 | let highlighter = Highlighter()!
23 | highlighter.setTheme("stackoverflow-dark")
24 | return highlighter
25 | }()
26 |
27 | let newLineFontSize: CGFloat = 12
28 |
29 | public init() {}
30 |
31 | public mutating func attributedString(from document: Document) -> NSAttributedString {
32 | return visit(document)
33 | }
34 |
35 | mutating func parserResults(from document: Document) -> [ParserResult] {
36 | var results = [ParserResult]()
37 | var currentAttrString = NSMutableAttributedString()
38 |
39 | func appendCurrentAttrString() {
40 | if !currentAttrString.string.isEmpty {
41 | #if os(macOS)
42 | let currentAttrStringToAppend = (try? AttributedString(currentAttrString, including: \.appKit)) ?? AttributedString(stringLiteral: currentAttrString.string)
43 | #else
44 | let currentAttrStringToAppend = (try? AttributedString(currentAttrString, including: \.uiKit)) ?? AttributedString(stringLiteral: currentAttrString.string)
45 | #endif
46 |
47 | results.append(.init(attributedString: currentAttrStringToAppend, isCodeBlock: false, codeBlockLanguage: nil))
48 | }
49 | }
50 |
51 | document.children.forEach { markup in
52 | let attrString = visit(markup)
53 | if let codeBlock = markup as? CodeBlock {
54 | appendCurrentAttrString()
55 | #if os(macOS)
56 | let attrStringToAppend = (try? AttributedString(attrString, including: \.appKit)) ?? AttributedString(stringLiteral: attrString.string)
57 | #else
58 | let attrStringToAppend = (try? AttributedString(attrString, including: \.uiKit)) ?? AttributedString(stringLiteral: attrString.string)
59 | #endif
60 |
61 |
62 | results.append(.init(attributedString: attrStringToAppend, isCodeBlock: true, codeBlockLanguage: codeBlock.language))
63 | currentAttrString = NSMutableAttributedString()
64 | } else {
65 | currentAttrString.append(attrString)
66 | }
67 | }
68 |
69 | appendCurrentAttrString()
70 | return results
71 | }
72 |
73 | mutating public func defaultVisit(_ markup: Markup) -> NSAttributedString {
74 | let result = NSMutableAttributedString()
75 |
76 | for child in markup.children {
77 | result.append(visit(child))
78 | }
79 |
80 | return result
81 | }
82 |
83 | mutating public func visitText(_ text: Text) -> NSAttributedString {
84 | #if os(macOS)
85 | NSAttributedString(string: text.plainText, attributes: [.font: NSFont.systemFont(ofSize: baseFontSize, weight: .regular)])
86 | #else
87 | NSAttributedString(string: text.plainText, attributes: [.font: UIFont.systemFont(ofSize: baseFontSize, weight: .regular)])
88 | #endif
89 | }
90 |
91 | mutating public func visitEmphasis(_ emphasis: Emphasis) -> NSAttributedString {
92 | let result = NSMutableAttributedString()
93 |
94 | for child in emphasis.children {
95 | result.append(visit(child))
96 | }
97 |
98 | result.applyEmphasis()
99 |
100 | return result
101 | }
102 |
103 | mutating public func visitStrong(_ strong: Strong) -> NSAttributedString {
104 | let result = NSMutableAttributedString()
105 |
106 | for child in strong.children {
107 | result.append(visit(child))
108 | }
109 |
110 | result.applyStrong()
111 |
112 | return result
113 | }
114 |
115 | mutating public func visitParagraph(_ paragraph: Paragraph) -> NSAttributedString {
116 | let result = NSMutableAttributedString()
117 |
118 | for child in paragraph.children {
119 | result.append(visit(child))
120 | }
121 |
122 | if paragraph.hasSuccessor {
123 | result.append(paragraph.isContainedInList ? .singleNewline(withFontSize: newLineFontSize) : .doubleNewline(withFontSize: newLineFontSize))
124 | }
125 |
126 | return result
127 | }
128 |
129 | mutating public func visitHeading(_ heading: Heading) -> NSAttributedString {
130 | let result = NSMutableAttributedString()
131 |
132 | for child in heading.children {
133 | result.append(visit(child))
134 | }
135 |
136 | result.applyHeading(withLevel: heading.level)
137 |
138 | if heading.hasSuccessor {
139 | result.append(.doubleNewline(withFontSize: newLineFontSize))
140 | }
141 |
142 | return result
143 | }
144 |
145 | mutating public func visitLink(_ link: Link) -> NSAttributedString {
146 | let result = NSMutableAttributedString()
147 |
148 | for child in link.children {
149 | result.append(visit(child))
150 | }
151 |
152 | let url = link.destination != nil ? URL(string: link.destination!) : nil
153 |
154 | result.applyLink(withURL: url)
155 |
156 | return result
157 | }
158 |
159 | mutating public func visitInlineCode(_ inlineCode: InlineCode) -> NSAttributedString {
160 | #if os(macOS)
161 | NSAttributedString(string: inlineCode.code, attributes: [.font: NSFont.monospacedSystemFont(ofSize: baseFontSize - 1.0, weight: .regular), .foregroundColor: NSColor.systemPink])
162 | #else
163 | NSAttributedString(string: inlineCode.code, attributes: [.font: UIFont.monospacedSystemFont(ofSize: baseFontSize - 1.0, weight: .regular), .foregroundColor: UIColor.systemPink])
164 | #endif
165 |
166 | }
167 |
168 | public func visitCodeBlock(_ codeBlock: CodeBlock) -> NSAttributedString {
169 | let result = NSMutableAttributedString(attributedString: highlighter.highlight(codeBlock.code, as: codeBlock.language) ?? NSAttributedString(string: codeBlock.code))
170 |
171 | if codeBlock.hasSuccessor {
172 | result.append(.singleNewline(withFontSize: newLineFontSize))
173 | }
174 |
175 | return result
176 | }
177 |
178 | mutating public func visitStrikethrough(_ strikethrough: Strikethrough) -> NSAttributedString {
179 | let result = NSMutableAttributedString()
180 |
181 | for child in strikethrough.children {
182 | result.append(visit(child))
183 | }
184 |
185 | result.applyStrikethrough()
186 |
187 | return result
188 | }
189 |
190 | mutating public func visitUnorderedList(_ unorderedList: UnorderedList) -> NSAttributedString {
191 | let result = NSMutableAttributedString()
192 | #if os(macOS)
193 | let font = NSFont.systemFont(ofSize: baseFontSize, weight: .regular)
194 | #else
195 | let font = UIFont.systemFont(ofSize: baseFontSize, weight: .regular)
196 | #endif
197 |
198 | for listItem in unorderedList.listItems {
199 | var listItemAttributes: [NSAttributedString.Key: Any] = [:]
200 |
201 | let listItemParagraphStyle = NSMutableParagraphStyle()
202 |
203 | let baseLeftMargin: CGFloat = 15.0
204 | let leftMarginOffset = baseLeftMargin + (20.0 * CGFloat(unorderedList.listDepth))
205 | let spacingFromIndex: CGFloat = 8.0
206 | let bulletWidth = ceil(NSAttributedString(string: "•", attributes: [.font: font]).size().width)
207 | let firstTabLocation = leftMarginOffset + bulletWidth
208 | let secondTabLocation = firstTabLocation + spacingFromIndex
209 |
210 | listItemParagraphStyle.tabStops = [
211 | NSTextTab(textAlignment: .right, location: firstTabLocation),
212 | NSTextTab(textAlignment: .left, location: secondTabLocation)
213 | ]
214 |
215 | listItemParagraphStyle.headIndent = secondTabLocation
216 |
217 | listItemAttributes[.paragraphStyle] = listItemParagraphStyle
218 | #if os(macOS)
219 | listItemAttributes[.font] = NSFont.systemFont(ofSize: baseFontSize, weight: .regular)
220 | #else
221 | listItemAttributes[.font] = UIFont.systemFont(ofSize: baseFontSize, weight: .regular)
222 | #endif
223 | listItemAttributes[.listDepth] = unorderedList.listDepth
224 |
225 | let listItemAttributedString = visit(listItem).mutableCopy() as! NSMutableAttributedString
226 | listItemAttributedString.insert(NSAttributedString(string: "\t•\t", attributes: listItemAttributes), at: 0)
227 |
228 | result.append(listItemAttributedString)
229 | }
230 |
231 | if unorderedList.hasSuccessor {
232 | result.append(.doubleNewline(withFontSize: newLineFontSize))
233 | }
234 |
235 | return result
236 | }
237 |
238 | mutating public func visitListItem(_ listItem: ListItem) -> NSAttributedString {
239 | let result = NSMutableAttributedString()
240 |
241 | for child in listItem.children {
242 | result.append(visit(child))
243 | }
244 |
245 | if listItem.hasSuccessor {
246 | result.append(.singleNewline(withFontSize: newLineFontSize))
247 | }
248 |
249 | return result
250 | }
251 |
252 | mutating public func visitOrderedList(_ orderedList: OrderedList) -> NSAttributedString {
253 | let result = NSMutableAttributedString()
254 |
255 | for (index, listItem) in orderedList.listItems.enumerated() {
256 | var listItemAttributes: [NSAttributedString.Key: Any] = [:]
257 | #if os(macOS)
258 | let font = NSFont.systemFont(ofSize: baseFontSize, weight: .regular)
259 | let numeralFont = NSFont.monospacedDigitSystemFont(ofSize: baseFontSize, weight: .regular)
260 | #else
261 | let font = UIFont.systemFont(ofSize: baseFontSize, weight: .regular)
262 | let numeralFont = UIFont.monospacedDigitSystemFont(ofSize: baseFontSize, weight: .regular)
263 | #endif
264 |
265 |
266 | let listItemParagraphStyle = NSMutableParagraphStyle()
267 |
268 | // Implement a base amount to be spaced from the left side at all times to better visually differentiate it as a list
269 | let baseLeftMargin: CGFloat = 15.0
270 | let leftMarginOffset = baseLeftMargin + (20.0 * CGFloat(orderedList.listDepth))
271 |
272 | // Grab the highest number to be displayed and measure its width (yes normally some digits are wider than others but since we're using the numeral mono font all will be the same width in this case)
273 | let highestNumberInList = orderedList.childCount
274 | let numeralColumnWidth = ceil(NSAttributedString(string: "\(highestNumberInList).", attributes: [.font: numeralFont]).size().width)
275 |
276 | let spacingFromIndex: CGFloat = 8.0
277 | let firstTabLocation = leftMarginOffset + numeralColumnWidth
278 | let secondTabLocation = firstTabLocation + spacingFromIndex
279 |
280 | listItemParagraphStyle.tabStops = [
281 | NSTextTab(textAlignment: .right, location: firstTabLocation),
282 | NSTextTab(textAlignment: .left, location: secondTabLocation)
283 | ]
284 |
285 | listItemParagraphStyle.headIndent = secondTabLocation
286 |
287 | listItemAttributes[.paragraphStyle] = listItemParagraphStyle
288 | listItemAttributes[.font] = font
289 | listItemAttributes[.listDepth] = orderedList.listDepth
290 |
291 | let listItemAttributedString = visit(listItem).mutableCopy() as! NSMutableAttributedString
292 |
293 | // Same as the normal list attributes, but for prettiness in formatting we want to use the cool monospaced numeral font
294 | var numberAttributes = listItemAttributes
295 | numberAttributes[.font] = numeralFont
296 |
297 | let numberAttributedString = NSAttributedString(string: "\t\(index + 1).\t", attributes: numberAttributes)
298 | listItemAttributedString.insert(numberAttributedString, at: 0)
299 |
300 | result.append(listItemAttributedString)
301 | }
302 |
303 | if orderedList.hasSuccessor {
304 | result.append(orderedList.isContainedInList ? .singleNewline(withFontSize: newLineFontSize) : .doubleNewline(withFontSize: newLineFontSize))
305 | }
306 |
307 | return result
308 | }
309 |
310 | mutating public func visitBlockQuote(_ blockQuote: BlockQuote) -> NSAttributedString {
311 | let result = NSMutableAttributedString()
312 |
313 | for child in blockQuote.children {
314 | var quoteAttributes: [NSAttributedString.Key: Any] = [:]
315 |
316 | let quoteParagraphStyle = NSMutableParagraphStyle()
317 |
318 | let baseLeftMargin: CGFloat = 15.0
319 | let leftMarginOffset = baseLeftMargin + (20.0 * CGFloat(blockQuote.quoteDepth))
320 |
321 | quoteParagraphStyle.tabStops = [NSTextTab(textAlignment: .left, location: leftMarginOffset)]
322 |
323 | quoteParagraphStyle.headIndent = leftMarginOffset
324 |
325 | quoteAttributes[.paragraphStyle] = quoteParagraphStyle
326 | #if os(macOS)
327 | quoteAttributes[.font] = NSFont.systemFont(ofSize: baseFontSize, weight: .regular)
328 | #else
329 | quoteAttributes[.font] = UIFont.systemFont(ofSize: baseFontSize, weight: .regular)
330 | #endif
331 | quoteAttributes[.listDepth] = blockQuote.quoteDepth
332 |
333 | let quoteAttributedString = visit(child).mutableCopy() as! NSMutableAttributedString
334 | quoteAttributedString.insert(NSAttributedString(string: "\t", attributes: quoteAttributes), at: 0)
335 | #if os(macOS)
336 | quoteAttributedString.addAttribute(.foregroundColor, value: NSColor.systemGray)
337 | #else
338 | quoteAttributedString.addAttribute(.foregroundColor, value: UIColor.systemGray)
339 | #endif
340 |
341 | result.append(quoteAttributedString)
342 | }
343 |
344 | if blockQuote.hasSuccessor {
345 | result.append(.doubleNewline(withFontSize: newLineFontSize))
346 | }
347 |
348 | return result
349 | }
350 | }
351 |
352 | // MARK: - Extensions Land
353 |
354 | extension NSMutableAttributedString {
355 | func applyEmphasis() {
356 | enumerateAttribute(.font, in: NSRange(location: 0, length: length), options: []) { value, range, stop in
357 | #if os(macOS)
358 | guard let font = value as? NSFont else { return }
359 | let newFont = font.apply(newTraits: .italic)
360 | #else
361 | guard let font = value as? UIFont else { return }
362 | let newFont = font.apply(newTraits: .traitItalic)
363 | #endif
364 |
365 | addAttribute(.font, value: newFont, range: range)
366 | }
367 | }
368 |
369 | func applyStrong() {
370 | enumerateAttribute(.font, in: NSRange(location: 0, length: length), options: []) { value, range, stop in
371 | #if os(macOS)
372 | guard let font = value as? NSFont else { return }
373 | let newFont = font.apply(newTraits: .bold)
374 | #else
375 | guard let font = value as? UIFont else { return }
376 | let newFont = font.apply(newTraits: .traitBold)
377 | #endif
378 | addAttribute(.font, value: newFont, range: range)
379 | }
380 | }
381 |
382 | func applyLink(withURL url: URL?) {
383 | #if os(macOS)
384 | addAttribute(.foregroundColor, value: NSColor.systemBlue)
385 | #else
386 | addAttribute(.foregroundColor, value: UIColor.systemBlue)
387 | #endif
388 |
389 | if let url = url {
390 | addAttribute(.link, value: url)
391 | }
392 | }
393 |
394 | func applyBlockquote() {
395 | #if os(macOS)
396 | addAttribute(.foregroundColor, value: NSColor.systemGray)
397 | #else
398 | addAttribute(.foregroundColor, value: UIColor.systemGray)
399 | #endif
400 | }
401 |
402 | func applyHeading(withLevel headingLevel: Int) {
403 | enumerateAttribute(.font, in: NSRange(location: 0, length: length), options: []) { value, range, stop in
404 | #if os(macOS)
405 | guard let font = value as? NSFont else { return }
406 | let newFont = font.apply(newTraits: .bold, newPointSize: 28.0 - CGFloat(headingLevel * 2))
407 | #else
408 | guard let font = value as? UIFont else { return }
409 | let newFont = font.apply(newTraits: .traitBold, newPointSize: 28.0 - CGFloat(headingLevel * 2))
410 | #endif
411 |
412 | addAttribute(.font, value: newFont, range: range)
413 | }
414 | }
415 |
416 | func applyStrikethrough() {
417 | addAttribute(.strikethroughStyle, value: NSUnderlineStyle.single.rawValue)
418 | }
419 | }
420 |
421 |
422 | #if os(macOS)
423 | extension NSFont {
424 | func apply(newTraits: NSFontDescriptor.SymbolicTraits, newPointSize: CGFloat? = nil) -> NSFont {
425 | var existingTraits = fontDescriptor.symbolicTraits
426 | existingTraits.insert(newTraits)
427 | let newFontDescriptor = fontDescriptor.withSymbolicTraits(existingTraits)
428 | return NSFont(descriptor: newFontDescriptor, size: newPointSize ?? pointSize) ?? self
429 | }
430 | }
431 |
432 | #else
433 | extension UIFont {
434 | func apply(newTraits: UIFontDescriptor.SymbolicTraits, newPointSize: CGFloat? = nil) -> UIFont {
435 | var existingTraits = fontDescriptor.symbolicTraits
436 | existingTraits.insert(newTraits)
437 |
438 | guard let newFontDescriptor = fontDescriptor.withSymbolicTraits(existingTraits) else { return self }
439 | return UIFont(descriptor: newFontDescriptor, size: newPointSize ?? pointSize)
440 | }
441 | }
442 | #endif
443 |
444 |
445 |
446 | extension ListItemContainer {
447 | /// Depth of the list if nested within others. Index starts at 0.
448 | var listDepth: Int {
449 | var index = 0
450 |
451 | var currentElement = parent
452 |
453 | while currentElement != nil {
454 | if currentElement is ListItemContainer {
455 | index += 1
456 | }
457 |
458 | currentElement = currentElement?.parent
459 | }
460 |
461 | return index
462 | }
463 | }
464 |
465 | extension BlockQuote {
466 | /// Depth of the quote if nested within others. Index starts at 0.
467 | var quoteDepth: Int {
468 | var index = 0
469 |
470 | var currentElement = parent
471 |
472 | while currentElement != nil {
473 | if currentElement is BlockQuote {
474 | index += 1
475 | }
476 |
477 | currentElement = currentElement?.parent
478 | }
479 |
480 | return index
481 | }
482 | }
483 |
484 | extension NSAttributedString.Key {
485 | static let listDepth = NSAttributedString.Key("ListDepth")
486 | static let quoteDepth = NSAttributedString.Key("QuoteDepth")
487 | }
488 |
489 | extension NSMutableAttributedString {
490 | func addAttribute(_ name: NSAttributedString.Key, value: Any) {
491 | addAttribute(name, value: value, range: NSRange(location: 0, length: length))
492 | }
493 |
494 | func addAttributes(_ attrs: [NSAttributedString.Key : Any]) {
495 | addAttributes(attrs, range: NSRange(location: 0, length: length))
496 | }
497 | }
498 |
499 | extension Markup {
500 | /// Returns true if this element has sibling elements after it.
501 | var hasSuccessor: Bool {
502 | guard let childCount = parent?.childCount else { return false }
503 | return indexInParent < childCount - 1
504 | }
505 |
506 | var isContainedInList: Bool {
507 | var currentElement = parent
508 |
509 | while currentElement != nil {
510 | if currentElement is ListItemContainer {
511 | return true
512 | }
513 |
514 | currentElement = currentElement?.parent
515 | }
516 |
517 | return false
518 | }
519 | }
520 |
521 | extension NSAttributedString {
522 | static func singleNewline(withFontSize fontSize: CGFloat) -> NSAttributedString {
523 | #if os(macOS)
524 | NSAttributedString(string: "\n", attributes: [.font: NSFont.systemFont(ofSize: fontSize, weight: .regular)])
525 | #else
526 | NSAttributedString(string: "\n", attributes: [.font: UIFont.systemFont(ofSize: fontSize, weight: .regular)])
527 | #endif
528 | }
529 |
530 | static func doubleNewline(withFontSize fontSize: CGFloat) -> NSAttributedString {
531 | #if os(macOS)
532 | NSAttributedString(string: "\n\n", attributes: [.font: NSFont.systemFont(ofSize: fontSize, weight: .regular)])
533 | #else
534 | NSAttributedString(string: "\n\n", attributes: [.font: UIFont.systemFont(ofSize: fontSize, weight: .regular)])
535 | #endif
536 | }
537 | }
538 |
539 |
540 |
--------------------------------------------------------------------------------