├── .github
├── FUNDING.yml
└── workflows
│ └── xcodebuild.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── LICENSE
├── LlamaChat.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ ├── IDEWorkspaceChecks.plist
│ └── swiftpm
│ └── Package.resolved
├── LlamaChat
├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.appiconset
│ │ ├── Contents.json
│ │ ├── icon_128x128.png
│ │ ├── icon_128x128@2x.png
│ │ ├── icon_16x16.png
│ │ ├── icon_16x16@2x.png
│ │ ├── icon_256x256.png
│ │ ├── icon_256x256@2x.png
│ │ ├── icon_32x32.png
│ │ ├── icon_32x32@2x.png
│ │ ├── icon_512x512.png
│ │ └── icon_512x512@2x.png
│ ├── Contents.json
│ ├── GroupedSelectionRowHover.colorset
│ │ └── Contents.json
│ └── avatars
│ │ ├── Contents.json
│ │ ├── avatar-1.imageset
│ │ ├── 1.png
│ │ ├── 1@2x.png
│ │ └── Contents.json
│ │ ├── avatar-2.imageset
│ │ ├── 2.png
│ │ ├── 2@2x.png
│ │ └── Contents.json
│ │ ├── avatar-3.imageset
│ │ ├── 3.png
│ │ ├── 3@2x.png
│ │ └── Contents.json
│ │ ├── avatar-4.imageset
│ │ ├── 4.png
│ │ ├── 4@2x.png
│ │ └── Contents.json
│ │ ├── avatar-5.imageset
│ │ ├── 5.png
│ │ ├── 5@2x.png
│ │ └── Contents.json
│ │ ├── avatar-6.imageset
│ │ ├── 6.png
│ │ ├── 6@2x.png
│ │ └── Contents.json
│ │ ├── avatar-7.imageset
│ │ ├── 7.png
│ │ ├── 7@2x.png
│ │ └── Contents.json
│ │ └── avatar-8.imageset
│ │ ├── 8.png
│ │ ├── 8@2x.png
│ │ └── Contents.json
├── ContentView.swift
├── Credits.rtf
├── Info.plist
├── LlamaChat.entitlements
├── LlamaChat.xcconfig
├── LlamaChatApp.swift
├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
├── data
│ └── names.json
├── error
│ └── LlamaChatError.swift
├── extensions
│ ├── NSScrollView+Bottom.swift
│ ├── ProcessInfo+Threads.swift
│ └── Text+TextSelection.swift
├── model
│ ├── chat
│ │ ├── ChatModel.swift
│ │ ├── MessagesModel.swift
│ │ ├── Sender.swift
│ │ └── messages
│ │ │ ├── ClearedContextMessage.swift
│ │ │ ├── GeneratedMessage.swift
│ │ │ ├── Message.swift
│ │ │ ├── MessageGenerationState.swift
│ │ │ └── StaticMessage.swift
│ ├── models
│ │ ├── ModelFileManager.swift
│ │ └── ModelParameters.swift
│ ├── settings
│ │ └── AppSettings.swift
│ └── sources
│ │ ├── ChatSourceType.swift
│ │ ├── ChatSources.swift
│ │ ├── ModelSize.swift
│ │ └── SourceNameGenerator.swift
├── persistence
│ └── SerializedPayload.swift
├── ui
│ ├── chat
│ │ ├── ChatInfoView.swift
│ │ ├── ChatListView.swift
│ │ ├── ChatView.swift
│ │ ├── MainChatView.swift
│ │ ├── avatar
│ │ │ └── AvatarView.swift
│ │ ├── composer
│ │ │ └── ComposeView.swift
│ │ └── messages
│ │ │ ├── ClearedContextMessageView.swift
│ │ │ ├── GeneratedMessageView.swift
│ │ │ ├── MessageBubbleView.swift
│ │ │ ├── MessagesTableView.swift
│ │ │ ├── MessagesView.swift
│ │ │ └── TypingBubbleContentView.swift
│ ├── components
│ │ ├── AvatarPickerView.swift
│ │ ├── BorderlessTextField.swift
│ │ ├── DebouncedView.swift
│ │ ├── DidEndEditingTextField.swift
│ │ ├── NonEditableTextView.swift
│ │ ├── NumericTextFieldWithRandomSelector.swift
│ │ └── Sliders.swift
│ ├── debug
│ │ ├── DebugBuildBannerView.swift
│ │ └── ModelContextView.swift
│ ├── restoration
│ │ └── StateRestoration.swift
│ ├── settings
│ │ ├── SettingsView.swift
│ │ ├── SettingsWindowPresenter.swift
│ │ └── tabs
│ │ │ └── sources
│ │ │ ├── ConfirmSheetDeletionContentView.swift
│ │ │ ├── GeneralSettingsView.swift
│ │ │ ├── SourcesSettingsListView.swift
│ │ │ ├── SourcesSettingsView.swift
│ │ │ └── detail
│ │ │ ├── SourceSettingsParametersView.swift
│ │ │ ├── SourceSettingsPropertiesView.swift
│ │ │ └── SourcesSettingsDetailView.swift
│ └── sources
│ │ ├── AddSourceContentView.swift
│ │ ├── AddSourceFlowPresentationStyle.swift
│ │ ├── configure
│ │ ├── ConfigureLocalGgmlModelSettingsView.swift
│ │ ├── ConfigureLocalModelSourceView.swift
│ │ ├── ConfigureLocalPyTorchModelSettingsView.swift
│ │ ├── ConfigureSourcePrimaryActionsView.swift
│ │ └── components
│ │ │ ├── ConfigureLocalModelPathSelectorView.swift
│ │ │ ├── ConfigureLocalModelSelectFormatView.swift
│ │ │ └── ConfigureLocalModelSizePickerView.swift
│ │ ├── convert
│ │ ├── ConvertSourcePrimaryActionsView.swift
│ │ ├── ConvertSourceStepView.swift
│ │ └── ConvertSourceView.swift
│ │ └── type
│ │ ├── SelectSourceTypeView.swift
│ │ └── SourceTypeSelectionView.swift
├── util
│ ├── FileUtils.swift
│ └── ModelParameterUtils.swift
└── viewmodel
│ ├── MainMenu.xib
│ ├── chat
│ ├── ChatInfoViewModel.swift
│ ├── ChatListViewModel.swift
│ ├── ChatViewModel.swift
│ ├── ChatWindowContentViewModel.swift
│ ├── avatar
│ │ └── AvatarViewModel.swift
│ ├── composer
│ │ └── ComposeViewModel.swift
│ └── messages
│ │ ├── ClearedContextMessageViewModel.swift
│ │ ├── GeneratedMessageViewModel.swift
│ │ ├── MessageViewModel.swift
│ │ ├── MessagesViewModel.swift
│ │ └── StaticMessageViewModel.swift
│ ├── debug
│ └── ModelContextContentViewModel.swift
│ ├── settings
│ ├── GeneralSettingsViewModel.swift
│ ├── SettingsViewModel.swift
│ └── sources
│ │ ├── SourceSettingsParametersViewModel.swift
│ │ ├── SourceSettingsPropertiesViewModel.swift
│ │ ├── SourcesSettingsDetailViewModel.swift
│ │ └── SourcesSettingsViewModel.swift
│ ├── sources
│ ├── AddSourceViewModel.swift
│ ├── ConfigureSourceViewModel.swift
│ ├── SelectSourceTypeViewModel.swift
│ ├── configure
│ │ ├── components
│ │ │ ├── ConfigureLocalModelPathSelectorViewModel.swift
│ │ │ └── ConfigureLocalModelSizePickerViewModel.swift
│ │ └── settings
│ │ │ ├── ConfigureLocalGgmlModelSettingsViewModel.swift
│ │ │ ├── ConfigureLocalModelSettingsViewModel.swift
│ │ │ ├── ConfigureLocalModelSourceViewModel.swift
│ │ │ └── ConfigureLocalPyTorchModelSettingsViewModel.swift
│ └── convert
│ │ ├── ConvertSourceStepViewModel.swift
│ │ └── ConvertSourceViewModel.swift
│ └── updates
│ └── CheckForUpdatesViewModel.swift
├── README.md
├── Resources
├── banner-a5248619.png
├── dmg-background.png
└── screenshot.png
└── Scripts
├── .gitignore
├── bump-version.sh
└── make-dmg.sh
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: [alexrozanski]
2 |
--------------------------------------------------------------------------------
/.github/workflows/xcodebuild.yml:
--------------------------------------------------------------------------------
1 | name: Xcode - Build and Analyze
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | jobs:
10 | build:
11 | name: Xcodebuild - Build & Analyze
12 | runs-on: macos-latest
13 |
14 | steps:
15 | - name: Checkout
16 | uses: actions/checkout@v3
17 | - name: Set Default Scheme
18 | run: |
19 | scheme_list=$(xcodebuild -list -json | tr -d "\n")
20 | default=$(echo $scheme_list | ruby -e "require 'json'; puts JSON.parse(STDIN.gets)['project']['targets'][0]")
21 | echo $default | cat >default
22 | echo Using default scheme: $default
23 | - name: Build
24 | env:
25 | scheme: ${{ 'default' }}
26 | run: |
27 | if [ $scheme = default ]; then scheme=$(cat default); fi
28 | if [ "`ls -A | grep -i \\.xcworkspace\$`" ]; then filetype_parameter="workspace" && file_to_build="`ls -A | grep -i \\.xcworkspace\$`"; else filetype_parameter="project" && file_to_build="`ls -A | grep -i \\.xcodeproj\$`"; fi
29 | file_to_build=`echo $file_to_build | awk '{$1=$1;print}'`
30 | xcodebuild clean build analyze -scheme "$scheme" -"$filetype_parameter" "$file_to_build" | xcpretty && exit ${PIPESTATUS[0]}
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | .DS_Store
6 |
7 | ## User settings
8 | xcuserdata/
9 |
10 | LlamaChat.xcodeproj/xcshareddata/xcschemes/LlamaChat.xcscheme
11 |
12 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
13 | *.xcscmblueprint
14 | *.xccheckout
15 |
16 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
17 | build/
18 | DerivedData/
19 | *.moved-aside
20 | *.pbxuser
21 | !default.pbxuser
22 | *.mode1v3
23 | !default.mode1v3
24 | *.mode2v3
25 | !default.mode2v3
26 | *.perspectivev3
27 | !default.perspectivev3
28 |
29 | ## Obj-C/Swift specific
30 | *.hmap
31 |
32 | ## App packaging
33 | *.ipa
34 | *.dSYM.zip
35 | *.dSYM
36 |
37 | ## Playgrounds
38 | timeline.xctimeline
39 | playground.xcworkspace
40 |
41 | # Swift Package Manager
42 | #
43 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
44 | # Packages/
45 | # Package.pins
46 | # Package.resolved
47 | # *.xcodeproj
48 | #
49 | # Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
50 | # hence it is not needed unless you have added a package configuration file to your project
51 | # .swiftpm
52 |
53 | .build/
54 |
55 | # CocoaPods
56 | #
57 | # We recommend against adding the Pods directory to your .gitignore. However
58 | # you should judge for yourself, the pros and cons are mentioned at:
59 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
60 | #
61 | # Pods/
62 | #
63 | # Add this line if you want to avoid checking in source code from the Xcode workspace
64 | # *.xcworkspace
65 |
66 | # Carthage
67 | #
68 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
69 | # Carthage/Checkouts
70 |
71 | Carthage/Build/
72 |
73 | # Accio dependency management
74 | Dependencies/
75 | .accio/
76 |
77 | # fastlane
78 | #
79 | # It is recommended to not store the screenshots in the git repo.
80 | # Instead, use fastlane to re-generate the screenshots whenever they are needed.
81 | # For more information about the recommended setup visit:
82 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
83 |
84 | fastlane/report.xml
85 | fastlane/Preview.html
86 | fastlane/screenshots/**/*.png
87 | fastlane/test_output
88 |
89 | # Code Injection
90 | #
91 | # After new code Injection tools there's a generated folder /iOSInjectionProject
92 | # https://github.com/johnno1962/injectionforxcode
93 |
94 | iOSInjectionProject/
95 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | alex@rozanski.me.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Alex Rozanski
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/LlamaChat.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/LlamaChat.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/LlamaChat.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "coquille",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/alexrozanski/Coquille.git",
7 | "state" : {
8 | "branch" : "v0.3",
9 | "revision" : "4250068ef7d259e0023c099be58459e5d7dbcedd"
10 | }
11 | },
12 | {
13 | "identity" : "llama.swift",
14 | "kind" : "remoteSourceControl",
15 | "location" : "https://github.com/alexrozanski/llama.swift/",
16 | "state" : {
17 | "branch" : "v2",
18 | "revision" : "67bb47170f5e32b993b3dac79176f065fc82daf5"
19 | }
20 | },
21 | {
22 | "identity" : "sparkle",
23 | "kind" : "remoteSourceControl",
24 | "location" : "https://github.com/sparkle-project/Sparkle",
25 | "state" : {
26 | "revision" : "7907f058bcef1132c9b4af6c049cac598330a5f9",
27 | "version" : "2.4.1"
28 | }
29 | },
30 | {
31 | "identity" : "sqlite.swift",
32 | "kind" : "remoteSourceControl",
33 | "location" : "https://github.com/stephencelis/SQLite.swift.git",
34 | "state" : {
35 | "revision" : "7a2e3cd27de56f6d396e84f63beefd0267b55ccb",
36 | "version" : "0.14.1"
37 | }
38 | }
39 | ],
40 | "version" : 2
41 | }
42 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "icon_16x16.png",
5 | "idiom" : "mac",
6 | "scale" : "1x",
7 | "size" : "16x16"
8 | },
9 | {
10 | "filename" : "icon_16x16@2x.png",
11 | "idiom" : "mac",
12 | "scale" : "2x",
13 | "size" : "16x16"
14 | },
15 | {
16 | "filename" : "icon_32x32.png",
17 | "idiom" : "mac",
18 | "scale" : "1x",
19 | "size" : "32x32"
20 | },
21 | {
22 | "filename" : "icon_32x32@2x.png",
23 | "idiom" : "mac",
24 | "scale" : "2x",
25 | "size" : "32x32"
26 | },
27 | {
28 | "filename" : "icon_128x128.png",
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "filename" : "icon_128x128@2x.png",
35 | "idiom" : "mac",
36 | "scale" : "2x",
37 | "size" : "128x128"
38 | },
39 | {
40 | "filename" : "icon_256x256.png",
41 | "idiom" : "mac",
42 | "scale" : "1x",
43 | "size" : "256x256"
44 | },
45 | {
46 | "filename" : "icon_256x256@2x.png",
47 | "idiom" : "mac",
48 | "scale" : "2x",
49 | "size" : "256x256"
50 | },
51 | {
52 | "filename" : "icon_512x512.png",
53 | "idiom" : "mac",
54 | "scale" : "1x",
55 | "size" : "512x512"
56 | },
57 | {
58 | "filename" : "icon_512x512@2x.png",
59 | "idiom" : "mac",
60 | "scale" : "2x",
61 | "size" : "512x512"
62 | }
63 | ],
64 | "info" : {
65 | "author" : "xcode",
66 | "version" : 1
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_128x128.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_128x128@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_128x128@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_16x16.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_16x16@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_16x16@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_256x256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_256x256.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_256x256@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_256x256@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_32x32.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_32x32@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_32x32@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_512x512.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_512x512@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/AppIcon.appiconset/icon_512x512@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/GroupedSelectionRowHover.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "color" : {
5 | "color-space" : "srgb",
6 | "components" : {
7 | "alpha" : "0.050",
8 | "blue" : "0.000",
9 | "green" : "0.000",
10 | "red" : "0.000"
11 | }
12 | },
13 | "idiom" : "universal"
14 | },
15 | {
16 | "appearances" : [
17 | {
18 | "appearance" : "luminosity",
19 | "value" : "dark"
20 | }
21 | ],
22 | "color" : {
23 | "color-space" : "srgb",
24 | "components" : {
25 | "alpha" : "0.050",
26 | "blue" : "1.000",
27 | "green" : "1.000",
28 | "red" : "1.000"
29 | }
30 | },
31 | "idiom" : "universal"
32 | }
33 | ],
34 | "info" : {
35 | "author" : "xcode",
36 | "version" : 1
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-1.imageset/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-1.imageset/1.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-1.imageset/1@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-1.imageset/1@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-1.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "1.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "1@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-2.imageset/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-2.imageset/2.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-2.imageset/2@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-2.imageset/2@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-2.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "2.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "2@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-3.imageset/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-3.imageset/3.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-3.imageset/3@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-3.imageset/3@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-3.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "3.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "3@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-4.imageset/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-4.imageset/4.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-4.imageset/4@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-4.imageset/4@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-4.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "4.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "4@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-5.imageset/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-5.imageset/5.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-5.imageset/5@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-5.imageset/5@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-5.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "5.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "5@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-6.imageset/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-6.imageset/6.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-6.imageset/6@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-6.imageset/6@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-6.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "6.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "6@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-7.imageset/7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-7.imageset/7.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-7.imageset/7@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-7.imageset/7@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-7.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "7.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "7@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-8.imageset/8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-8.imageset/8.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-8.imageset/8@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/LlamaChat/Assets.xcassets/avatars/avatar-8.imageset/8@2x.png
--------------------------------------------------------------------------------
/LlamaChat/Assets.xcassets/avatars/avatar-8.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "8.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "filename" : "8@2x.png",
10 | "idiom" : "universal",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "scale" : "3x"
16 | }
17 | ],
18 | "info" : {
19 | "author" : "xcode",
20 | "version" : 1
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 28/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | VStack {
13 | Image(systemName: "globe")
14 | .imageScale(.large)
15 | .foregroundColor(.accentColor)
16 | Text("Hello, world!")
17 | }
18 | .padding()
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/LlamaChat/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SUFeedURL
6 | https://llamachat.app/api/appcast.xml
7 | SUPublicEDKey
8 | kYcF5CTAVZ6ljL01giZNTfMtJCDUl/DUUQ5FsibYCx8=
9 |
10 |
11 |
--------------------------------------------------------------------------------
/LlamaChat/LlamaChat.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/LlamaChat/LlamaChat.xcconfig:
--------------------------------------------------------------------------------
1 | CURRENT_PROJECT_VERSION = 5
2 | MARKETING_VERSION = 1.2.0
3 |
--------------------------------------------------------------------------------
/LlamaChat/LlamaChatApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CamelApp.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 02/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | enum WindowIdentifier: String {
11 | case chat
12 | case setup
13 | case modelContext
14 | }
15 |
16 | class LlamaChatAppDelegate: NSObject, NSApplicationDelegate {
17 | func applicationDidFinishLaunching(_ notification: Notification) {
18 | ModelFileManager.shared.cleanUpUnquantizedModelFiles()
19 | }
20 | }
21 |
22 | @main
23 | struct LlamaChatApp: App {
24 | @NSApplicationDelegateAdaptor var appDelegate: LlamaChatAppDelegate
25 |
26 | @StateObject var chatSources: ChatSources
27 | @StateObject var chatModels: ChatModels
28 | @StateObject var messagesModel: MessagesModel
29 | @StateObject var stateRestoration: StateRestoration
30 |
31 | @StateObject var mainChatViewModel: MainChatViewModel
32 | @StateObject var settingsViewModel: SettingsViewModel
33 | @StateObject var checkForUpdatesViewModel = CheckForUpdatesViewModel()
34 |
35 | init() {
36 | let chatSources = ChatSources()
37 | let messagesModel = MessagesModel()
38 | let chatModels = ChatModels(messagesModel: messagesModel)
39 | let stateRestoration = StateRestoration()
40 | let settingsViewModel = SettingsViewModel(chatSources: chatSources, stateRestoration: stateRestoration)
41 |
42 | _chatSources = StateObject(wrappedValue: chatSources)
43 | _chatModels = StateObject(wrappedValue: chatModels)
44 | _messagesModel = StateObject(wrappedValue: messagesModel)
45 | _stateRestoration = StateObject(wrappedValue: stateRestoration)
46 |
47 | _mainChatViewModel = StateObject(wrappedValue: MainChatViewModel(
48 | chatSources: chatSources,
49 | chatModels: chatModels,
50 | messagesModel: messagesModel,
51 | stateRestoration: stateRestoration
52 | ))
53 | _settingsViewModel = StateObject(wrappedValue: settingsViewModel)
54 |
55 | // For deeplinking
56 | SettingsWindowPresenter.shared.settingsViewModel = settingsViewModel
57 | }
58 |
59 | var body: some Scene {
60 | Settings {
61 | SettingsView(viewModel: settingsViewModel)
62 | }
63 | .windowToolbarStyle(.expanded)
64 |
65 | Window("Chat", id: WindowIdentifier.chat.rawValue) {
66 | MainChatView(viewModel: mainChatViewModel)
67 | }
68 | .commands {
69 | CommandGroup(after: .newItem) {
70 | Button("New Chat Source", action: {
71 | mainChatViewModel.presentAddSourceSheet()
72 | })
73 | .keyboardShortcut(KeyboardShortcut(KeyEquivalent("n")))
74 | }
75 | CommandGroup(after: .appInfo) {
76 | Button("Check for Updates…", action: { checkForUpdatesViewModel.checkForUpdates() })
77 | .disabled(!checkForUpdatesViewModel.canCheckForUpdates)
78 | }
79 | }
80 |
81 | WindowGroup("Model Context", id: WindowIdentifier.modelContext.rawValue, for: ChatSource.ID.self) { $chatId in
82 | ModelContextView(chatSourceId: chatId)
83 | .environmentObject(chatSources)
84 | .environmentObject(chatModels)
85 | }
86 | // Remove the File > New menu item as this should be opened programmatically.
87 | .commandsRemoved()
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/LlamaChat/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/LlamaChat/data/names.json:
--------------------------------------------------------------------------------
1 | {
2 | "llamaNames": [
3 | "Barack O'Llama",
4 | "Kendrick Llama",
5 | "Llama Del Rey",
6 | "Llamark Zuckerberg",
7 | "Llamary Poppins",
8 | "Llamanda Seyfried",
9 | "Llamark Twain",
10 | "Llamarilyn Monwool",
11 | "Llamatilda",
12 | "Llamatthew McConaughey",
13 | "Llamadonna",
14 | "Llamarlon Brando",
15 | "Llamargot Robbie",
16 | "Llamary-Kate Olsen",
17 | "Llamartha Stewart",
18 | "Llama Mia",
19 | "Drama Llama",
20 | "Sellama Gomez",
21 | ],
22 | "alpacaNames": [
23 | "Alpacachino",
24 | "Fuzz Aldrin",
25 | "Fleece Witherspoon",
26 | "Alpacalvin Harris",
27 | "Alpacameron Diaz",
28 | "Woolly Wonka",
29 | "Wool Smith",
30 | "Alpacapone",
31 | "Alpacolin Firth",
32 | "Chewpaca",
33 | "Alpacasso",
34 |
35 | ]
36 | }
37 |
--------------------------------------------------------------------------------
/LlamaChat/error/LlamaChatError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LlamaChatError.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 08/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | struct LlamaChatError {
11 | static let domain = "com.alexrozanski.LlamaChat.error"
12 |
13 | enum Code: Int {
14 | case failedToExecuteConversionStep = -1000
15 | }
16 |
17 | private init() {}
18 | }
19 |
--------------------------------------------------------------------------------
/LlamaChat/extensions/NSScrollView+Bottom.swift:
--------------------------------------------------------------------------------
1 | //
2 | // NSScrollView+Bottom.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 08/04/2023.
6 | //
7 |
8 | import AppKit
9 |
10 | extension NSScrollView {
11 | func isScrolledToBottom() -> Bool {
12 | return contentView.bounds.origin.y == 0
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/LlamaChat/extensions/ProcessInfo+Threads.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ProcessInfo+Threads.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 17/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | extension ProcessInfo {
11 | var defaultThreadCount: Int {
12 | let activeProcessorCount = ProcessInfo.processInfo.activeProcessorCount
13 | if activeProcessorCount < 4 {
14 | return activeProcessorCount
15 | }
16 |
17 | return max(ProcessInfo.processInfo.activeProcessorCount - 2, 4)
18 | }
19 |
20 | var threadCountRange: ClosedRange {
21 | // In practice we should be running on 4+ cores but have this as a fallback
22 | // just in case.
23 | let activeProcessorCount = ProcessInfo.processInfo.activeProcessorCount
24 | if activeProcessorCount < 4 {
25 | return 1...activeProcessorCount
26 | }
27 |
28 | return 4...activeProcessorCount
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/LlamaChat/extensions/Text+TextSelection.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Text+TextSelection.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 12/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct EnabledTextSelection: ViewModifier {
11 | func body(content: Content) -> some View {
12 | content
13 | .textSelection(.enabled)
14 | }
15 | }
16 |
17 | struct DisabledTextSelection: ViewModifier {
18 | func body(content: Content) -> some View {
19 | content
20 | .textSelection(.disabled)
21 | }
22 | }
23 |
24 | extension View {
25 | @ViewBuilder func textSelectionEnabled(_ flag: Bool) -> some View {
26 | if flag {
27 | modifier(EnabledTextSelection())
28 | } else {
29 | modifier(DisabledTextSelection())
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/MessagesModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessagesModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | import SQLite
10 |
11 | class MessagesModel: ObservableObject {
12 | private lazy var databaseURL: URL? = {
13 | return applicationSupportDirectoryURL()?.appending(path: "messages.db")
14 | }()
15 |
16 | private lazy var db: Connection? = {
17 | return databaseURL.flatMap {
18 | do {
19 | return try Connection($0.path)
20 | } catch {
21 | print("Error getting DB connection:", error)
22 | return nil
23 | }
24 | }
25 | }()
26 |
27 | private let chatSourcesTable = Table("chat_sources")
28 | private let messagesTable = Table("messages")
29 |
30 | private let idColumn = Expression("id")
31 |
32 | private let chatIdColumn = Expression("chat_id")
33 |
34 | private let seqColumn = Expression("seq")
35 | private let messageTypeColumn = Expression("message_type")
36 | private let chatSourceIdColumn = Expression("chat_source_id")
37 | private let isMeColumn = Expression("is_me")
38 | private let messageColumn = Expression("message")
39 | private let sendDateColumn = Expression("send_date")
40 | private let isErrorColumn = Expression("is_error")
41 |
42 | private func setUpSchema() {
43 | do {
44 | try db?.run(chatSourcesTable.create(ifNotExists: true) { t in
45 | t.column(idColumn, primaryKey: true)
46 | t.column(chatIdColumn, unique: true)
47 | })
48 |
49 | try db?.run(messagesTable.create(ifNotExists: true) { t in
50 | t.column(idColumn, primaryKey: true)
51 | t.column(seqColumn)
52 | t.column(messageTypeColumn)
53 | t.column(chatSourceIdColumn)
54 | t.column(isMeColumn)
55 | t.column(messageColumn)
56 | t.column(sendDateColumn)
57 | t.column(isErrorColumn)
58 | })
59 | } catch {
60 | print(error)
61 | }
62 | }
63 |
64 | init() {
65 | setUpSchema()
66 | }
67 |
68 | func loadMessages(from chatSource: ChatSource) -> [Message] {
69 | do {
70 | guard let db else { return [] }
71 |
72 | guard let chatSourceId = try getId(for: chatSource) else {
73 | return []
74 | }
75 |
76 | let messagesQuery = try db.prepare(
77 | messagesTable
78 | .where(chatSourceIdColumn == chatSourceId)
79 | .order(seqColumn.asc)
80 | )
81 |
82 | var messages = [Message]()
83 | for message in messagesQuery {
84 | let isMe = message[isMeColumn]
85 | let type = MessageType(rawValue: message[messageTypeColumn]) ?? .message
86 |
87 | switch type {
88 | case .message:
89 | messages.append(
90 | StaticMessage(
91 | content: message[messageColumn],
92 | sender: isMe ? .me : .other,
93 | sendDate: message[sendDateColumn],
94 | isError: message[isErrorColumn]
95 | )
96 | )
97 | case .clearedContext:
98 | messages.append(
99 | ClearedContextMessage(sendDate: message[sendDateColumn])
100 | )
101 | }
102 | }
103 | return messages
104 | } catch {
105 | print(error)
106 | return []
107 | }
108 | }
109 |
110 | func append(message: Message, in chatSource: ChatSource) {
111 | do {
112 | guard
113 | let db,
114 | let chatSourceId = try insertChatSourceIfNeeded(chatSource)
115 | else { return }
116 |
117 | let lastSeq = try db.scalar(messagesTable.select(seqColumn.max).where(chatSourceIdColumn == chatSourceId)) ?? 0
118 | let insert = messagesTable.insert(
119 | seqColumn <- lastSeq + 1,
120 | messageTypeColumn <- message.messageType.rawValue,
121 | chatSourceIdColumn <- chatSourceId,
122 | isMeColumn <- message.sender.isMe,
123 | messageColumn <- message.content,
124 | sendDateColumn <- message.sendDate,
125 | isErrorColumn <- message.isError
126 | )
127 | _ = try db.run(insert)
128 | } catch {
129 | print(error)
130 | }
131 | }
132 |
133 | func clearMessages(for chatSource: ChatSource) {
134 | do {
135 | guard let db, let chatSourceId = try getId(for: chatSource) else { return }
136 |
137 | let delete = messagesTable.filter(chatSourceIdColumn == chatSourceId).delete()
138 | _ = try db.run(delete)
139 | } catch {
140 | print(error)
141 | }
142 | }
143 |
144 | private func getId(for chatSource: ChatSource) throws -> Int64? {
145 | guard let db else { return nil }
146 | return try db.pluck(chatSourcesTable.select(idColumn).where(chatIdColumn == chatSource.id))?[idColumn]
147 | }
148 |
149 | private func insertChatSourceIfNeeded(_ chatSource: ChatSource) throws -> Int64? {
150 | guard let db else { return nil }
151 | if let existingId = try getId(for: chatSource) {
152 | return existingId
153 | }
154 |
155 | return try db.run(chatSourcesTable.insert(chatIdColumn <- chatSource.id))
156 | }
157 | }
158 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/Sender.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Sender.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | enum Sender {
11 | case me
12 | case other
13 |
14 | var isMe: Bool {
15 | switch self {
16 | case .me: return true
17 | case .other: return false
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/messages/ClearedContextMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ClearedContextMessage.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ClearedContextMessage: Message {
11 | let id = UUID()
12 | var messageType: MessageType { return .clearedContext }
13 | var content: String { return "" }
14 | var sender: Sender { return .other }
15 | let sendDate: Date
16 | var isError: Bool { return false }
17 |
18 | init(sendDate: Date) {
19 | self.sendDate = sendDate
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/messages/GeneratedMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GeneratedMessage.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class GeneratedMessage: ObservableObject, Message {
12 | typealias CancellationHandler = () -> Void
13 |
14 | var messageType: MessageType { return .message }
15 |
16 | let id = UUID()
17 | private(set) var content: String = "" {
18 | didSet {
19 | contentDidChange.send()
20 | }
21 | }
22 | let contentDidChange = PassthroughSubject()
23 | let sender: Sender
24 | let sendDate: Date
25 |
26 | @Published var isError = false
27 |
28 | @Published private(set) var state: MessageGenerationState = .none {
29 | didSet {
30 | isError = state.isError
31 | }
32 | }
33 |
34 | var cancellationHandler: CancellationHandler?
35 |
36 | init(sender: Sender, sendDate: Date) {
37 | self.sender = sender
38 | self.sendDate = sendDate
39 | }
40 |
41 | func update(contents: String) {
42 | content = contents
43 | }
44 |
45 | func append(contents: String) {
46 | if content.isEmpty {
47 | content = contents.trimmingCharactersInCharacterSetFromPrefix(.whitespacesAndNewlines)
48 | } else {
49 | content += contents
50 | }
51 | }
52 |
53 | func updateState(_ state: MessageGenerationState) {
54 | self.state = state
55 | }
56 |
57 | func cancelGeneration() {
58 | cancellationHandler?()
59 | }
60 | }
61 |
62 | private extension String {
63 | func trimmingCharactersInCharacterSetFromPrefix(_ characterSet: CharacterSet) -> String {
64 | return String(trimmingPrefix(while: { character in character.unicodeScalars.allSatisfy { scalar in characterSet.contains(scalar) } }))
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/messages/Message.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Message.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | enum MessageType: Int {
12 | case message = 1
13 | // Implement this as a message just to make this easier
14 | case clearedContext = 2
15 |
16 | var isClearedContext: Bool {
17 | switch self {
18 | case .message:
19 | return false
20 | case .clearedContext:
21 | return true
22 | }
23 | }
24 | }
25 |
26 | protocol Message {
27 | var id: UUID { get }
28 | var messageType: MessageType { get }
29 | var sender: Sender { get }
30 | var content: String { get }
31 | var sendDate: Date { get }
32 | var isError: Bool { get }
33 | }
34 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/messages/MessageGenerationState.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageGenerationState.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | enum MessageGenerationState {
11 | case none
12 | case waiting
13 | case generating
14 | case cancelled
15 | case finished
16 | case error(Error)
17 |
18 | var isError: Bool {
19 | switch self {
20 | case .none, .generating, .finished, .cancelled, .waiting:
21 | return false
22 | case .error:
23 | return true
24 | }
25 | }
26 |
27 | var isWaiting: Bool {
28 | switch self {
29 | case .none, .generating, .finished, .cancelled, .error:
30 | return false
31 | case .waiting:
32 | return true
33 | }
34 | }
35 |
36 | var isFinished: Bool {
37 | switch self {
38 | case .none, .waiting, .generating, .cancelled, .error:
39 | return false
40 | case .finished:
41 | return true
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/LlamaChat/model/chat/messages/StaticMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StaticMessage.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class StaticMessage: Message {
12 | var messageType: MessageType { return .message }
13 |
14 | let id = UUID()
15 | let content: String
16 | let sender: Sender
17 | let sendDate: Date
18 | let isError: Bool
19 |
20 | init(content: String, sender: Sender, sendDate: Date, isError: Bool) {
21 | self.content = content
22 | self.sender = sender
23 | self.sendDate = sendDate
24 | self.isError = isError
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/LlamaChat/model/models/ModelFileManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelFileManager.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 10/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ModelDirectory {
11 | typealias ID = String
12 |
13 | let id: ID
14 | let url: URL
15 |
16 | private var hasCleanedUp = false
17 |
18 | fileprivate init(id: ID, url: URL) {
19 | self.id = id
20 | self.url = url
21 | }
22 |
23 | func cleanUp() {
24 | do {
25 | guard !hasCleanedUp else { return }
26 |
27 | try FileManager.default.removeItem(at: url)
28 | hasCleanedUp = true
29 | } catch {
30 | print("WARNING: failed to clean up model directory")
31 | }
32 | }
33 | }
34 |
35 | class ModelFileManager {
36 | static let shared = ModelFileManager()
37 |
38 | private init() {}
39 |
40 | private var modelsDirectoryURL: URL? {
41 | return applicationSupportDirectoryURL()?.appendingPathComponent("models")
42 | }
43 |
44 | func modelDirectory(with id: ModelDirectory.ID) -> ModelDirectory? {
45 | guard let modelDirectory = modelDirectoryURL(for: id) else { return nil }
46 | return ModelDirectory(id: id, url: modelDirectory)
47 | }
48 |
49 | func makeNewModelDirectory() throws -> ModelDirectory? {
50 | let id = UUID().uuidString
51 | guard let modelDirectory = modelDirectoryURL(for: id) else { return nil }
52 | try FileManager.default.createDirectory(at: modelDirectory, withIntermediateDirectories: true)
53 |
54 | return ModelDirectory(id: id, url: modelDirectory)
55 | }
56 |
57 | private func modelDirectoryURL(for id: ModelDirectory.ID) -> URL? {
58 | guard let modelsDirectory = modelsDirectoryURL else { return nil }
59 | return modelsDirectory.appendingPathComponent(id, isDirectory: true)
60 | }
61 |
62 | // Fixes any issues caused by https://github.com/alexrozanski/LlamaChat/issues/10
63 | func cleanUpUnquantizedModelFiles() {
64 | guard let modelsDirectoryURL else { return }
65 |
66 | let enumerator = FileManager.default.enumerator(at: modelsDirectoryURL, includingPropertiesForKeys: nil, options: [])
67 | enumerator?.forEach { itemURL in
68 | guard let itemURL = itemURL as? URL else { return }
69 |
70 | // This is hardcoded by the conversion script
71 | let unquantizedModelName = "ggml-model-f16.bin"
72 |
73 | if itemURL.lastPathComponent == unquantizedModelName {
74 | do {
75 | try FileManager.default.removeItem(at: itemURL)
76 | } catch {
77 | print("WARNING: Couldn't clean up unquantized model at", itemURL)
78 | }
79 | }
80 | }
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/LlamaChat/model/models/ModelParameters.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelParameters.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 20/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ModelParameters: ObservableObject, Codable {
11 | @Published var seedValue: Int32?
12 |
13 | @Published var contextSize: UInt
14 | @Published var numberOfTokens: UInt
15 |
16 | @Published var topP: Double
17 | @Published var topK: UInt
18 | @Published var temperature: Double
19 | @Published var batchSize: UInt
20 |
21 | @Published var lastNTokensToPenalize: UInt
22 | @Published var repeatPenalty: Double
23 |
24 | enum CodingKeys: CodingKey {
25 | case seedValue
26 | case contextSize
27 | case numberOfTokens
28 | case topP
29 | case topK
30 | case temperature
31 | case batchSize
32 | case lastNTokensToPenalize
33 | case repeatPenalty
34 | }
35 |
36 | init(
37 | seedValue: Int32?,
38 | contextSize: UInt,
39 | numberOfTokens: UInt,
40 | topP: Double,
41 | topK: UInt,
42 | temperature: Double,
43 | batchSize: UInt,
44 | lastNTokensToPenalize: UInt,
45 | repeatPenalty: Double
46 | ) {
47 | self.seedValue = seedValue
48 | self.contextSize = contextSize
49 | self.numberOfTokens = numberOfTokens
50 | self.topP = topP
51 | self.topK = topK
52 | self.temperature = temperature
53 | self.batchSize = batchSize
54 | self.lastNTokensToPenalize = lastNTokensToPenalize
55 | self.repeatPenalty = repeatPenalty
56 | }
57 |
58 | required init(from decoder: Decoder) throws {
59 | let values = try decoder.container(keyedBy: CodingKeys.self)
60 | seedValue = try values.decode(Int32?.self, forKey: .seedValue)
61 | contextSize = try values.decode(UInt.self, forKey: .contextSize)
62 | numberOfTokens = try values.decode(UInt.self, forKey: .numberOfTokens)
63 | topP = try values.decode(Double.self, forKey: .topP)
64 | topK = try values.decode(UInt.self, forKey: .topK)
65 | temperature = try values.decode(Double.self, forKey: .temperature)
66 | batchSize = try values.decode(UInt.self, forKey: .batchSize)
67 | lastNTokensToPenalize = try values.decode(UInt.self, forKey: .lastNTokensToPenalize)
68 | repeatPenalty = try values.decode(Double.self, forKey: .repeatPenalty)
69 | }
70 |
71 | func encode(to encoder: Encoder) throws {
72 | var container = encoder.container(keyedBy: CodingKeys.self)
73 | try container.encode(seedValue, forKey: .seedValue)
74 | try container.encode(contextSize, forKey: .contextSize)
75 | try container.encode(numberOfTokens, forKey: .numberOfTokens)
76 | try container.encode(topP, forKey: .topP)
77 | try container.encode(topK, forKey: .topK)
78 | try container.encode(temperature, forKey: .temperature)
79 | try container.encode(batchSize, forKey: .batchSize)
80 | try container.encode(lastNTokensToPenalize, forKey: .lastNTokensToPenalize)
81 | try container.encode(repeatPenalty, forKey: .repeatPenalty)
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/LlamaChat/model/settings/AppSettings.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppSettings.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 15/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | fileprivate class SerializedAppSettingsPayload: SerializedPayload {
11 | override class var valueKey: String? { return "settings" }
12 | override class var currentPayloadVersion: Int { return 1 }
13 | }
14 |
15 | class AppSettings: ObservableObject, Codable {
16 | // Number of threads to run prediction on.
17 | @Published var numThreads: Int {
18 | didSet {
19 | persistSettings()
20 | }
21 | }
22 |
23 | enum CodingKeys: CodingKey {
24 | case numThreads
25 | }
26 |
27 | static let shared: AppSettings = {
28 | guard
29 | let settingsFileURL = persistedFileURL,
30 | FileManager.default.fileExists(atPath: settingsFileURL.path)
31 | else {
32 | return makeDefaultSettings()
33 | }
34 |
35 | do {
36 | let jsonData = try Data(contentsOf: settingsFileURL)
37 | let payload = try JSONDecoder().decode(SerializedAppSettingsPayload.self, from: jsonData)
38 | return payload.value
39 | } catch {
40 | print("Error loading sources:", error)
41 | return makeDefaultSettings()
42 | }
43 | }()
44 |
45 | fileprivate init(numThreads: Int) {
46 | self.numThreads = numThreads
47 | }
48 |
49 | required init(from decoder: Decoder) throws {
50 | let values = try decoder.container(keyedBy: CodingKeys.self)
51 | numThreads = try values.decode(Int.self, forKey: .numThreads)
52 | }
53 |
54 | func encode(to encoder: Encoder) throws {
55 | var container = encoder.container(keyedBy: CodingKeys.self)
56 | try container.encode(numThreads, forKey: .numThreads)
57 | }
58 |
59 | // MARK: - Persistence
60 |
61 | private static var persistedFileURL: URL? {
62 | return applicationSupportDirectoryURL()?.appending(path: "appSettings.json")
63 | }
64 |
65 | private func persistSettings() {
66 | guard let persistedFileURL = type(of: self).persistedFileURL else { return }
67 |
68 | let jsonEncoder = JSONEncoder()
69 | do {
70 | let json = try jsonEncoder.encode(SerializedAppSettingsPayload(value: self))
71 | try json.write(to: persistedFileURL)
72 | } catch {
73 | print("Error persisting settings:", error)
74 | }
75 | }
76 | }
77 |
78 | private func makeDefaultSettings() -> AppSettings {
79 | return AppSettings(numThreads: ProcessInfo.processInfo.defaultThreadCount)
80 | }
81 |
--------------------------------------------------------------------------------
/LlamaChat/model/sources/ChatSourceType.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatSourceType.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | enum ChatSourceType: String, Codable {
11 | case llama = "llama"
12 | case alpaca = "alpaca"
13 | case gpt4All = "gpt4all"
14 |
15 | var readableName: String {
16 | switch self {
17 | case .llama: return "LLaMA"
18 | case .alpaca: return "Alpaca"
19 | case .gpt4All: return "GPT4All"
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/model/sources/ModelSize.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelSize.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | enum ModelSize: String, Codable, Hashable, Identifiable {
11 | case unknown
12 | case size7B
13 | case size13B
14 | case size30B
15 | case size65B
16 |
17 | var id: String {
18 | return rawValue
19 | }
20 |
21 | var isUnknown: Bool {
22 | switch self {
23 | case .unknown:
24 | return true
25 | case .size7B, .size13B, .size30B, .size65B:
26 | return false
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/LlamaChat/model/sources/SourceNameGenerator.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceNameGenerator.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | fileprivate struct Names: Decodable {
11 | let llamaNames: [String]
12 | let alpacaNames: [String]
13 | }
14 |
15 | class SourceNameGenerator {
16 | private lazy var names: Names? = {
17 | guard let fileURL = Bundle.main.url(forResource: "names", withExtension: "json") else { return nil }
18 |
19 | do {
20 | let data = try Data(contentsOf: fileURL)
21 | return try JSONDecoder().decode(Names.self, from: data)
22 | } catch {
23 | print("Error loading source names:", error)
24 | return nil
25 | }
26 | }()
27 |
28 | var canGenerateNames: Bool {
29 | return names != nil
30 | }
31 |
32 | func generateName(for sourceType: ChatSourceType) -> String? {
33 | guard let names else { return nil }
34 |
35 | switch sourceType {
36 | case .llama:
37 | return names.llamaNames.randomElement()
38 | case .alpaca:
39 | return names.alpacaNames.randomElement()
40 | case .gpt4All:
41 | var all = names.alpacaNames
42 | all.append(contentsOf: names.llamaNames)
43 | return all.randomElement()
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/LlamaChat/persistence/SerializedPayload.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SerializedPayload.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 21/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | // Defines a Codable payload type which stores some nested Codable data alongside information
11 | // about the payload itself as well as the app version which created it.
12 | //
13 | // This is useful if opening a version of LlamaChat with data that was serialized by a newer version
14 | // and we can handle these cases more gracefully, including displaying which app version serialized
15 | // the data.
16 | class SerializedPayload: Codable where T: Codable {
17 | let value: T
18 | let payloadVersion: Int
19 | // Info about the version of the app which wrote this payload.
20 | let serializingAppVersion: Int?
21 | let serializingAppShortVersionString: String?
22 |
23 | // Override these in subclasses.
24 | class var valueKey: String? { return nil }
25 | class var currentPayloadVersion: Int { return -1 }
26 |
27 | // Unfortunately because the `value` key is dynamic we have to implement this struct ourselves.
28 | struct CodingKeys: CodingKey {
29 | enum Key {
30 | case value(String?)
31 | case payloadVersion
32 | case serializingAppVersion
33 | case serializingAppShortVersionString
34 |
35 | init(string: String) {
36 | switch string {
37 | case "payloadVersion": self = .payloadVersion
38 | case "serializingAppVersion": self = .serializingAppVersion
39 | case "serializingAppShortVersionString": self = .serializingAppShortVersionString
40 | // This only works because there is one non-static key.
41 | default: self = .value(string)
42 | }
43 | }
44 |
45 | var stringValue: String {
46 | switch self {
47 | case .value(let key): return key ?? "value"
48 | case .payloadVersion: return "payloadVersion"
49 | case .serializingAppVersion: return "serializingAppVersion"
50 | case .serializingAppShortVersionString: return "serializingAppShortVersionString"
51 | }
52 | }
53 | }
54 |
55 | var key: Key
56 | static func key(_ key: Key) -> CodingKeys {
57 | return self.init(stringValue: key.stringValue)
58 | }
59 |
60 | // `CodingKey` cruft
61 | var intValue: Int? { return nil }
62 | init(stringValue: String) { self.key = .init(string: stringValue) }
63 | init?(intValue: Int) { return nil }
64 | var stringValue: String {
65 | return key.stringValue
66 | }
67 | }
68 |
69 | init(value: T) {
70 | self.value = value
71 | self.payloadVersion = type(of: self).currentPayloadVersion
72 | self.serializingAppVersion = (Bundle.main.infoDictionary?["CFBundleVersion"] as? String).flatMap { Int($0) }
73 | self.serializingAppShortVersionString = Bundle.main.infoDictionary?["CFBundleShortVersionString"] as? String
74 | }
75 |
76 | required init(from decoder: Decoder) throws {
77 | let container = try decoder.container(keyedBy: CodingKeys.self)
78 | self.value = try container.decode(T.self, forKey: .key(.value(type(of: self).valueKey)))
79 | let payloadVersion = try container.decodeIfPresent(Int.self, forKey: .key(.payloadVersion))
80 | // TODO: handle any upgrade work here?
81 | self.payloadVersion = payloadVersion ?? type(of: self).currentPayloadVersion
82 | self.serializingAppVersion = try container.decodeIfPresent(Int.self, forKey: .key(.serializingAppVersion))
83 | self.serializingAppShortVersionString = try container.decodeIfPresent(String.self, forKey: .key(.serializingAppShortVersionString))
84 | }
85 |
86 | func encode(to encoder: Encoder) throws {
87 | var container = encoder.container(keyedBy: CodingKeys.self)
88 | try container.encode(value, forKey: .key(.value(type(of: self).valueKey)))
89 | try container.encode(payloadVersion, forKey: .key(.payloadVersion))
90 | try container.encode(serializingAppVersion, forKey: .key(.serializingAppVersion))
91 | try container.encode(serializingAppShortVersionString, forKey: .key(.serializingAppShortVersionString))
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/ChatListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatListView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 02/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct ItemView: View {
11 | @ObservedObject var viewModel: ChatListItemViewModel
12 |
13 | var body: some View {
14 | HStack {
15 | AvatarView(viewModel: viewModel.avatarViewModel, size: .medium)
16 | VStack(alignment: .leading, spacing: 4) {
17 | Text(viewModel.title)
18 | .font(.system(size: 13, weight: .semibold))
19 | Text(viewModel.modelDescription)
20 | .font(.system(size: 11))
21 | }
22 | }
23 | .padding(8)
24 | .contextMenu {
25 | Button("Configure...") {
26 | SettingsWindowPresenter.shared.present(deeplinkingTo: .sources(sourceId: viewModel.id, sourcesTab: .properties))
27 | }
28 | Divider()
29 | Button("Remove...") {
30 | viewModel.remove()
31 | }
32 | }
33 | }
34 | }
35 |
36 |
37 | struct ChatListView: View {
38 | @ObservedObject var viewModel: ChatListViewModel
39 |
40 | var body: some View {
41 | let selectionBinding = Binding(
42 | get: { viewModel.selectedSourceId },
43 | set: { viewModel.selectSource(with: $0) }
44 | )
45 | HStack {
46 | List(viewModel.items, id: \.id, selection: selectionBinding) { source in
47 | ItemView(viewModel: source)
48 | }
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/ChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ChatView: View {
11 | var viewModel: ChatViewModel
12 |
13 | @State var presentingInfo = false
14 |
15 | var body: some View {
16 | VStack(spacing: 0) {
17 | MessagesView(viewModel: viewModel.messagesViewModel)
18 | ComposeView(viewModel: viewModel.composeViewModel)
19 | }
20 | .toolbar {
21 | Button {
22 | presentingInfo = true
23 | } label: { Image(systemName: "info.circle")}
24 | .popover(isPresented: $presentingInfo, arrowEdge: .bottom) {
25 | ChatInfoView(viewModel: viewModel.infoViewModel)
26 | }
27 | }
28 | .navigationTitle("\(viewModel.sourceName)")
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/MainChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MainChatView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct MainChatView: View {
11 | @ObservedObject var viewModel: MainChatViewModel
12 |
13 | @State var initialWidth: Double?
14 | @State var selectedChatViewModel: ChatViewModel?
15 |
16 | init(viewModel: MainChatViewModel) {
17 | self.viewModel = viewModel
18 | _initialWidth = State(wrappedValue: viewModel.sidebarWidth)
19 | }
20 |
21 | @ViewBuilder var list: some View {
22 | GeometryReader { geometry in
23 | ChatListView(viewModel: viewModel.chatListViewModel)
24 | .overlay(
25 | Color.clear
26 | .onChange(of: geometry.size.width) { newWidth in
27 | viewModel.sidebarWidth = newWidth
28 | }
29 | )
30 | .toolbar {
31 | Spacer()
32 | Button {
33 | viewModel.presentAddSourceSheet()
34 | } label: {
35 | Image(systemName: "square.and.pencil")
36 | }
37 | }
38 | }
39 | }
40 |
41 | var body: some View {
42 | NavigationSplitView {
43 | if let initialWidth {
44 | list
45 | .frame(width: initialWidth)
46 | } else {
47 | list
48 | }
49 | } detail: {
50 | if let viewModel = selectedChatViewModel {
51 | ChatView(viewModel: viewModel)
52 | .id(viewModel.sourceId)
53 | }
54 | }
55 | .sheet(isPresented: $viewModel.sheetPresented) {
56 | if let viewModel = viewModel.sheetViewModel as? ConfirmDeleteSourceSheetViewModel {
57 | ConfirmDeleteSourceSheetContentView(viewModel: viewModel)
58 | } else if let viewModel = viewModel.sheetViewModel as? AddSourceViewModel {
59 | AddSourceContentView(viewModel: viewModel)
60 | .interactiveDismissDisabled()
61 | }
62 | }
63 | .onAppear {
64 | initialWidth = nil
65 | selectedChatViewModel = viewModel.selectedSourceId.flatMap { viewModel.makeChatViewModel(for: $0) }
66 | viewModel.presentAddSourceSheetIfNeeded()
67 | }
68 | .onChange(of: viewModel.selectedSourceId) { newSourceId in
69 | selectedChatViewModel = newSourceId.flatMap { viewModel.makeChatViewModel(for: $0) }
70 | }
71 | .onChange(of: viewModel.sheetPresented) { isPresented in
72 | if !isPresented {
73 | viewModel.sheetViewModel = nil
74 | }
75 | }
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/avatar/AvatarView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AvatarView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct AvatarView: View {
11 | enum Size {
12 | case medium
13 | case large
14 |
15 | var sideLength: Double {
16 | switch self {
17 | case .medium: return 40
18 | case .large: return 48
19 | }
20 | }
21 |
22 | var textSideLength: Double {
23 | switch self {
24 | case .medium: return 30
25 | case .large: return 35
26 | }
27 | }
28 |
29 | var fontSize: Double {
30 | switch self {
31 | case .medium: return 20
32 | case .large: return 24
33 | }
34 | }
35 | }
36 |
37 | @ObservedObject var viewModel: AvatarViewModel
38 | var size: Size
39 |
40 | @ViewBuilder var avatarView: some View {
41 | switch viewModel.avatar {
42 | case .initials(let initials):
43 | Circle()
44 | .fill(.gray)
45 | .frame(width: size.sideLength, height: size.sideLength)
46 | .overlay {
47 | Text(initials)
48 | .font(.system(size: size.fontSize))
49 | .lineLimit(1)
50 | .minimumScaleFactor(0.5)
51 | .foregroundColor(.white)
52 | .frame(width: size.textSideLength, height: size.textSideLength)
53 | }
54 | case .image(named: let name):
55 | Image(name)
56 | .resizable()
57 | .frame(width: size.sideLength, height: size.sideLength)
58 | }
59 | }
60 |
61 | var body: some View {
62 | avatarView
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/composer/ComposeView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ComposeView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ComposeView: View {
11 | @ObservedObject var viewModel: ComposeViewModel
12 |
13 | @FocusState private var isFocused: Bool
14 |
15 | @ViewBuilder var textField: some View {
16 | let messageEmpty = viewModel.text.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty
17 | HStack(spacing: 4) {
18 | BorderlessTextField("Chat here...", text: $viewModel.text)
19 | .focused($isFocused)
20 | .disabled(!viewModel.allowedToCompose)
21 | .padding(.vertical, 4)
22 | if !messageEmpty {
23 | Button(action: {
24 | viewModel.send(message: viewModel.text)
25 | }, label: {
26 | Image(systemName: "arrow.up")
27 | .padding(3)
28 | .foregroundColor(.white)
29 | .background(.blue)
30 | .clipShape(Circle())
31 | })
32 | .buttonStyle(BorderlessButtonStyle())
33 | .keyboardShortcut(.return, modifiers: [])
34 | }
35 | }
36 | .padding(.vertical, 2)
37 | .padding(.leading, 10)
38 | .padding(.trailing, 5)
39 | .background(
40 | RoundedRectangle(cornerRadius: 15)
41 | .fill(Color(nsColor: .controlBackgroundColor))
42 | .overlay {
43 | RoundedRectangle(cornerRadius: 15)
44 | .stroke(Color(nsColor: .separatorColor))
45 | }
46 | )
47 | }
48 |
49 | var body: some View {
50 | HStack {
51 | if viewModel.canClearContext {
52 | Button(action: {
53 | viewModel.clearContext()
54 | }, label: {
55 | Image(systemName: "eraser.line.dashed")
56 | .resizable()
57 | .scaledToFit()
58 | .frame(width: 20, height: 20)
59 | })
60 | .buttonStyle(BorderlessButtonStyle())
61 | .help("Clear model context")
62 | }
63 | textField
64 | }
65 | .padding()
66 | .background(Color(nsColor: .controlBackgroundColor))
67 | .onAppear {
68 | isFocused = true
69 | }
70 | .onChange(of: viewModel.allowedToCompose) { newValue in
71 | if newValue {
72 | isFocused = true
73 | }
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/messages/ClearedContextMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ClearedContextMessageView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 05/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct LineView: View {
11 | var body: some View {
12 | Rectangle()
13 | .fill(.separator)
14 | .frame(height: 1)
15 | }
16 | }
17 |
18 | struct ClearedContextMessageView: View {
19 | let viewModel: ClearedContextMessageViewModel
20 |
21 | var body: some View {
22 | HStack(spacing: 8) {
23 | LineView()
24 | Text("Chat context cleared")
25 | .foregroundColor(.gray)
26 | .font(.footnote)
27 | LineView()
28 | }
29 | .padding(.vertical)
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/messages/GeneratedMessageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GeneratedMessageView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct GeneratedMessageView: View {
11 | @ObservedObject var viewModel: GeneratedMessageViewModel
12 |
13 | let availableWidth: Double?
14 |
15 | var body: some View {
16 | VStack(alignment: .leading) {
17 | HStack {
18 | DebouncedView(isVisible: showStopButton, delay: 0.5, animation: .easeInOut(duration: 0.1)) {
19 | Button(action: {
20 | viewModel.stopGeneratingContent()
21 | }) {
22 | Text("Stop generating response")
23 | .font(.footnote)
24 | .foregroundColor(.blue)
25 | }
26 | .buttonStyle(BorderlessButtonStyle())
27 | .transition(.asymmetric(insertion: .scale(scale: 0.5), removal: .move(edge: .top)))
28 | }
29 | }
30 | switch viewModel.state {
31 | case .none:
32 | EmptyView()
33 | case .waiting:
34 | TypingBubbleView()
35 | case .generating, .finished, .cancelled, .error:
36 | MessageBubbleView(sender: viewModel.sender, style: viewModel.state.isWaiting ? .typing: .regular, isError: viewModel.isError, availableWidth: availableWidth) {
37 | Text(viewModel.content)
38 | .textSelectionEnabled(viewModel.canCopyContents.value)
39 | }
40 | }
41 | }
42 | }
43 |
44 | private var showStopButton: Bool {
45 | switch viewModel.state {
46 | case .none, .waiting, .finished, .cancelled, .error:
47 | return false
48 | case .generating:
49 | return true
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/messages/MessagesView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessagesView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct MessagesView: View {
11 | @ObservedObject var viewModel: MessagesViewModel
12 |
13 | @State private var bannerHeight = Double(0)
14 |
15 | var body: some View {
16 | MessagesTableView(messages: viewModel.messages)
17 | .frame(maxWidth: .infinity, maxHeight: .infinity)
18 | .overlay {
19 | if viewModel.isBuiltForDebug {
20 | VStack {
21 | DebugBuildBannerView()
22 | .background(
23 | GeometryReader { geometry in
24 | Color.clear.preference(key: BannerHeightKey.self, value: geometry.size.height)
25 | }
26 | )
27 | Spacer()
28 | }
29 | }
30 | }
31 | .onPreferenceChange(BannerHeightKey.self) { newHeight in
32 | bannerHeight = newHeight
33 | }
34 | }
35 | }
36 |
37 | fileprivate struct BannerHeightKey: PreferenceKey {
38 | static var defaultValue: CGFloat { 0 }
39 | static func reduce(value: inout Value, nextValue: () -> Value) {
40 | value = value + nextValue()
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/LlamaChat/ui/chat/messages/TypingBubbleContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TypingBubbleView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct CircleView: View {
11 | @State var opacity: Double = 0.2
12 |
13 | let startDelay: Double
14 |
15 | var body: some View {
16 | Circle()
17 | .fill(.black)
18 | .frame(width: 8, height: 8)
19 | .opacity(opacity)
20 | .onAppear {
21 | withAnimation(.easeInOut(duration: 0.55).repeatForever(autoreverses: true).delay(startDelay)) {
22 | opacity = 0.4
23 | }
24 | }
25 | }
26 | }
27 |
28 | struct TypingBubbleContentView: View {
29 | var body: some View {
30 | HStack(spacing: 2) {
31 | CircleView(startDelay: 0)
32 | CircleView(startDelay: 0.33)
33 | CircleView(startDelay: 0.66)
34 | }
35 | .padding(.vertical, 10)
36 | .padding(.horizontal, 10)
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/AvatarPickerView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AvatarPickerView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 10/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct EmptyAvatarItemView: View {
11 | var selection: Binding
12 |
13 | @ViewBuilder var background: some View {
14 | if selection.wrappedValue == nil {
15 | Circle()
16 | .fill(.blue)
17 | .frame(width: 68, height: 68)
18 | }
19 | }
20 |
21 | var body: some View {
22 | Circle()
23 | .fill(Color(nsColor: .controlBackgroundColor))
24 | .frame(width: 62, height: 62)
25 | .overlay(
26 | Text("None")
27 | )
28 | .onTapGesture {
29 | selection.wrappedValue = nil
30 | }
31 | .background(
32 | background
33 | )
34 | }
35 | }
36 |
37 | fileprivate struct AvatarItemView: View {
38 | let resourceName: String
39 | var selection: Binding
40 |
41 | @ViewBuilder var background: some View {
42 | if selection.wrappedValue == resourceName {
43 | Circle()
44 | .fill(.blue)
45 | .frame(width: 68, height: 68)
46 | // Resources are a bit off-center oops
47 | .padding(.top, 2)
48 | }
49 | }
50 |
51 | var body: some View {
52 | Image(resourceName)
53 | .resizable()
54 | .scaledToFit()
55 | .frame(width: 64, height: 64)
56 | .onTapGesture {
57 | selection.wrappedValue = resourceName
58 | }
59 | .background(
60 | background
61 | )
62 | }
63 | }
64 |
65 | struct AvatarPickerView: View {
66 | var selectedAvatar: Binding
67 |
68 | @State private var isPickerPresented = false
69 |
70 | @ViewBuilder var picker: some View {
71 | if let avatarImageName = selectedAvatar.wrappedValue {
72 | Image(avatarImageName)
73 | .resizable()
74 |
75 | } else {
76 | Circle()
77 | .fill(.gray.opacity(0.2))
78 | .overlay(
79 | Image(systemName: "plus")
80 | .resizable()
81 | .scaledToFit()
82 | .frame(width: 16, height: 16)
83 | .foregroundColor(.gray)
84 | )
85 | }
86 | }
87 |
88 | var body: some View {
89 | picker
90 | .frame(width: 48, height: 48)
91 | .onTapGesture {
92 | isPickerPresented = true
93 | }
94 | .popover(isPresented: $isPickerPresented, arrowEdge: .bottom) {
95 | Grid {
96 | GridRow {
97 | EmptyAvatarItemView(selection: selectedAvatar)
98 | AvatarItemView(resourceName: "avatar-1", selection: selectedAvatar)
99 | AvatarItemView(resourceName: "avatar-2", selection: selectedAvatar)
100 | AvatarItemView(resourceName: "avatar-3", selection: selectedAvatar)
101 | }
102 | GridRow {
103 | AvatarItemView(resourceName: "avatar-4", selection: selectedAvatar)
104 | AvatarItemView(resourceName: "avatar-6", selection: selectedAvatar)
105 | AvatarItemView(resourceName: "avatar-7", selection: selectedAvatar)
106 | AvatarItemView(resourceName: "avatar-8", selection: selectedAvatar)
107 | }
108 | }
109 | .padding()
110 | }
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/BorderlessTextField.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BorderlessTextField.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct BorderlessTextField: NSViewRepresentable {
11 | var placeholder: String
12 | @Binding var text: String
13 |
14 | init(_ placeholder: String, text: Binding) {
15 | self.placeholder = placeholder
16 | _text = text
17 | }
18 |
19 | class Coordinator: NSObject, NSTextFieldDelegate {
20 | var parent: BorderlessTextField
21 |
22 | init(_ parent: BorderlessTextField) {
23 | self.parent = parent
24 | }
25 |
26 | func controlTextDidChange(_ obj: Notification) {
27 | if let textField = obj.object as? NSTextField {
28 | parent.text = textField.stringValue
29 | }
30 | }
31 | }
32 |
33 | func makeCoordinator() -> Coordinator {
34 | Coordinator(self)
35 | }
36 |
37 | func makeNSView(context: Context) -> NSTextField {
38 | let textField = NSTextField()
39 | textField.delegate = context.coordinator
40 | textField.focusRingType = .none
41 | textField.isBordered = false
42 | textField.font = NSFont.systemFont(ofSize: 13)
43 | textField.placeholderString = placeholder
44 | return textField
45 | }
46 |
47 | func updateNSView(_ nsView: NSTextField, context: Context) {
48 | nsView.stringValue = text
49 | nsView.placeholderString = placeholder
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/DebouncedView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DebouncedView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DebouncedView: View where Content: View {
11 | typealias ContentBuilder = () -> Content
12 |
13 | @State private var showView = false
14 |
15 | var isVisible: Bool
16 | let delay: Double
17 | var animation: Animation?
18 |
19 | let contentBuilder: ContentBuilder
20 | init(isVisible: Bool, delay: Double, animation: Animation? = nil, contentBuilder: @escaping ContentBuilder) {
21 | self.isVisible = isVisible
22 | self.delay = delay
23 | self.animation = animation
24 | self.contentBuilder = contentBuilder
25 | }
26 |
27 | var body: some View {
28 | VStack {
29 | // Do a sanity `isVisible` check for race conditions between setting `showView` to trye in the asyncAfter() and
30 | // `isVisible` becoming false.
31 | if showView && isVisible {
32 | contentBuilder()
33 | }
34 | }
35 | .animation(animation, value: showView)
36 | .onChange(of: isVisible) { newIsVisible in
37 | if newIsVisible {
38 | DispatchQueue.main.asyncAfter(deadline: .now() + delay) {
39 | self.showView = true
40 | }
41 | } else {
42 | showView = false
43 | }
44 | }
45 | .onAppear {
46 | if isVisible {
47 | DispatchQueue.main.asyncAfter(deadline: .now() + delay) {
48 | self.showView = true
49 | }
50 | }
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/DidEndEditingTextField.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DidEndEditingTextField.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DidEndEditingTextField: NSViewRepresentable {
11 | @Binding var text: String
12 | var didEndEditing: (String) -> Void
13 |
14 | class Coordinator: NSObject, NSTextFieldDelegate {
15 | var parent: DidEndEditingTextField
16 |
17 | init(_ parent: DidEndEditingTextField) {
18 | self.parent = parent
19 | }
20 |
21 | func controlTextDidChange(_ obj: Notification) {
22 | if let textField = obj.object as? NSTextField {
23 | parent.text = textField.stringValue
24 | }
25 | }
26 |
27 | func controlTextDidEndEditing(_ obj: Notification) {
28 | if let textField = obj.object as? NSTextField {
29 | parent.didEndEditing(textField.stringValue)
30 | }
31 | }
32 | }
33 |
34 | func makeCoordinator() -> Coordinator {
35 | Coordinator(self)
36 | }
37 |
38 | func makeNSView(context: Context) -> NSTextField {
39 | let textField = NSTextField()
40 | textField.delegate = context.coordinator
41 | return textField
42 | }
43 |
44 | func updateNSView(_ nsView: NSTextField, context: Context) {
45 | nsView.stringValue = text
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/NonEditableTextView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // NonEditableTextView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 08/04/2023.
6 | //
7 |
8 | import AppKit
9 | import SwiftUI
10 |
11 | class NonEditableTextViewModel: ObservableObject {
12 | typealias IsEmptyHandler = () -> Bool
13 | typealias AppendHandler = (NSAttributedString) -> Void
14 |
15 | private(set) var _initialString: NSMutableAttributedString
16 |
17 | enum State {
18 | case notConnected
19 | case connectedToTextView(isEmptyHandler: IsEmptyHandler, appendHandler: AppendHandler)
20 | }
21 |
22 | private var state: State = .notConnected
23 |
24 | var isEmpty: Bool {
25 | switch state {
26 | case .notConnected:
27 | return _initialString.length == 0
28 | case .connectedToTextView(isEmptyHandler: let isEmpty, appendHandler: _):
29 | return isEmpty()
30 | }
31 | }
32 |
33 | var initialString: NSAttributedString {
34 | return NSAttributedString(attributedString: _initialString)
35 | }
36 |
37 | init(string: String? = nil, attributes: [NSAttributedString.Key: Any]? = nil) {
38 | _initialString = NSMutableAttributedString(string: string ?? "", attributes: attributes)
39 | }
40 |
41 | func disconnectFromTextView() {
42 | state = .notConnected
43 | }
44 |
45 | func connect(isEmptyHandler: @escaping IsEmptyHandler, appendHandler: @escaping AppendHandler) {
46 | state = .connectedToTextView(isEmptyHandler: isEmptyHandler, appendHandler: appendHandler)
47 | }
48 |
49 | func append(attributedString: NSAttributedString) {
50 | switch state {
51 | case .notConnected:
52 | _initialString.append(attributedString)
53 | case .connectedToTextView(isEmptyHandler: _, appendHandler: let append):
54 | append(attributedString)
55 | }
56 | }
57 | }
58 |
59 | struct NonEditableTextView: NSViewRepresentable {
60 | @ObservedObject var viewModel: NonEditableTextViewModel
61 |
62 | enum ScrollBehavior {
63 | case `default`
64 | case pinToBottom
65 |
66 | var pinToBottom: Bool {
67 | switch self {
68 | case .default: return false
69 | case .pinToBottom: return true
70 | }
71 | }
72 | }
73 |
74 | let scrollBehavior: ScrollBehavior
75 |
76 | init(viewModel: NonEditableTextViewModel, scrollBehavior: ScrollBehavior = .default) {
77 | self.viewModel = viewModel
78 | self.scrollBehavior = scrollBehavior
79 | }
80 |
81 | init(string: String, font: NSFont, scrollBehavior: ScrollBehavior = .default) {
82 | viewModel = NonEditableTextViewModel(string: string, attributes: [.font: font])
83 | self.scrollBehavior = scrollBehavior
84 | }
85 |
86 | func makeNSView(context: Context) -> NSScrollView {
87 | let scrollView = NSTextView.scrollableTextView()
88 | guard let textView = scrollView.documentView as? NSTextView else {
89 | return scrollView
90 | }
91 | scrollView.documentView = textView
92 |
93 | textView.isEditable = false
94 | textView.textContainerInset = NSSize(width: 8, height: 8)
95 |
96 | context.coordinator.update(textView: textView, viewModel: viewModel)
97 |
98 | return scrollView
99 | }
100 |
101 | func updateNSView(_ nsView: NSScrollView, context: Context) {
102 | if let textView = nsView.documentView as? NSTextView {
103 | context.coordinator.update(textView: textView, viewModel: viewModel)
104 | }
105 | }
106 |
107 | class Coordinator {
108 | private var lastViewModel: NonEditableTextViewModel?
109 |
110 | let parent: NonEditableTextView
111 | init(_ parent: NonEditableTextView) {
112 | self.parent = parent
113 | }
114 |
115 | func update(textView: NSTextView, viewModel: NonEditableTextViewModel?) {
116 | guard let textStorage = textView.textStorage, let viewModel else { return }
117 |
118 | if viewModel !== lastViewModel {
119 | if let lastViewModel {
120 | lastViewModel.disconnectFromTextView()
121 | }
122 |
123 | viewModel.connect(isEmptyHandler: { [weak textStorage = textView.textStorage] in
124 | return textStorage?.length == 0
125 | }, appendHandler: { [weak self, weak textView, weak textStorage = textView.textStorage] attributedString in
126 | if let textView, textView.enclosingScrollView?.isScrolledToBottom() ?? false, self?.parent.scrollBehavior.pinToBottom ?? false {
127 | textView.scrollToEndOfDocument(nil)
128 | }
129 | textStorage?.append(attributedString)
130 | })
131 |
132 | textStorage.setAttributedString(viewModel.initialString)
133 | lastViewModel = viewModel
134 | }
135 | }
136 | }
137 |
138 | func makeCoordinator() -> Coordinator {
139 | return Coordinator(self)
140 | }
141 | }
142 |
--------------------------------------------------------------------------------
/LlamaChat/ui/components/Sliders.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Sliders.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 19/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | private let sliderLabelWidth = Double(40)
11 |
12 | fileprivate struct WrappedNSSlider: NSViewRepresentable {
13 | var value: Binding
14 | var range: ClosedRange
15 | var numberOfTickMarks: Int?
16 |
17 | func makeNSView(context: Context) -> NSSlider {
18 | let slider = NSSlider()
19 | slider.minValue = range.lowerBound
20 | slider.maxValue = range.upperBound
21 | slider.numberOfTickMarks = numberOfTickMarks ?? 0
22 | slider.target = context.coordinator
23 | slider.action = #selector(Coordinator.valueChanged(_:))
24 |
25 | return slider
26 | }
27 |
28 | func updateNSView(_ nsView: NSSlider, context: Context) {
29 | nsView.doubleValue = value.wrappedValue
30 | nsView.numberOfTickMarks = numberOfTickMarks ?? 0
31 | }
32 |
33 | func makeCoordinator() -> Coordinator {
34 | return Coordinator(self)
35 | }
36 |
37 | class Coordinator {
38 | private var lastHapticFeedbackMarkerPosition: Double?
39 |
40 | let parent: WrappedNSSlider
41 | init(_ parent: WrappedNSSlider) {
42 | self.parent = parent
43 | }
44 |
45 | @objc func valueChanged(_ sender: NSSlider) {
46 | let currentValue = sender.doubleValue
47 | if sender.numberOfTickMarks > 0 {
48 | let closest = sender.closestTickMarkValue(toValue: currentValue)
49 | if abs(closest - currentValue) < Double.ulpOfOne && lastHapticFeedbackMarkerPosition != closest {
50 | NSHapticFeedbackManager.defaultPerformer.perform(.generic, performanceTime: .drawCompleted)
51 | lastHapticFeedbackMarkerPosition = closest
52 | }
53 |
54 | if abs(closest - currentValue) > 0.01 {
55 | lastHapticFeedbackMarkerPosition = nil
56 | }
57 | }
58 |
59 | parent.value.wrappedValue = currentValue
60 | }
61 | }
62 | }
63 |
64 | struct DiscreteSliderView: View {
65 | var value: Binding
66 | var range: ClosedRange
67 | var isExponential: Bool = false
68 | var numberOfTickMarks: Int?
69 |
70 | var body: some View {
71 | if isExponential {
72 | let wrappedValue = Binding(
73 | // Take the log_2() value of the wrapped value and scale back to 0...1 by using the log_2() of the upper bound and lower bound.
74 | get: {
75 | let top = log2(Double(value.wrappedValue)) - log2(Double(range.lowerBound))
76 | let bottom = log2(Double(range.upperBound)) - log2(Double(range.lowerBound))
77 | return top / bottom
78 | },
79 | // Inverse function, simplified.
80 | set: { value.wrappedValue = Int(pow(Double(range.upperBound), $0) * pow(Double(range.lowerBound), (1.0 - $0))) }
81 | )
82 | HStack {
83 | Text("\(range.lowerBound)")
84 | .font(.footnote)
85 | .frame(width: sliderLabelWidth, alignment: .trailing)
86 | WrappedNSSlider(value: wrappedValue, range: 0...1, numberOfTickMarks: numberOfTickMarks)
87 | Text("\(range.upperBound, specifier: "%d")")
88 | .font(.footnote)
89 | .frame(width: sliderLabelWidth, alignment: .leading)
90 | }
91 | } else {
92 | let wrappedValue = Binding(
93 | get: { Double(value.wrappedValue) },
94 | set: { value.wrappedValue = Int($0) }
95 | )
96 | HStack {
97 | Text("\(range.lowerBound)")
98 | .font(.footnote)
99 | .frame(width: sliderLabelWidth, alignment: .trailing)
100 | WrappedNSSlider(value: wrappedValue, range: (Double(range.lowerBound)...Double(range.upperBound)), numberOfTickMarks: numberOfTickMarks)
101 | Text("\(range.upperBound, specifier: "%d")")
102 | .font(.footnote)
103 | .frame(width: sliderLabelWidth, alignment: .leading)
104 | }
105 | }
106 | }
107 | }
108 |
109 | struct ContinuousSliderView: View {
110 | var value: Binding
111 | var range: ClosedRange
112 | var numberOfTickMarks: Int?
113 |
114 | var body: some View {
115 | HStack {
116 | Text("\(range.lowerBound, specifier: "%.1f")")
117 | .font(.footnote)
118 | .frame(width: sliderLabelWidth, alignment: .trailing)
119 | WrappedNSSlider(value: value, range: range, numberOfTickMarks: numberOfTickMarks)
120 | Text("\(range.upperBound, specifier: "%.1f")")
121 | .font(.footnote)
122 | .frame(width: sliderLabelWidth, alignment: .leading)
123 | }
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/LlamaChat/ui/debug/DebugBuildBannerView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DebugBuildBannerView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 05/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct DebugBuildBannerView: View {
11 | var body: some View {
12 | HStack(alignment: .firstTextBaseline, spacing: 4) {
13 | Image(systemName: "exclamationmark.circle")
14 | .fontWeight(.bold)
15 | Text("Interacting with the chat models in Debug builds is really slow. For optimal performance, rebuild for Release.")
16 | Spacer()
17 | }
18 | .padding(.vertical, 6)
19 | .padding(.horizontal, 8)
20 | .background(.red)
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/LlamaChat/ui/restoration/StateRestoration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StateRestoration.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | protocol RestorableValue {}
11 |
12 | extension String: RestorableValue {}
13 | extension Int: RestorableValue {}
14 | extension Double: RestorableValue {}
15 | extension Bool: RestorableValue {}
16 | extension Data: RestorableValue {}
17 | extension Array: RestorableValue where Element == any RestorableValue {}
18 | extension Dictionary: RestorableValue where Key == String, Value == any RestorableValue {}
19 |
20 | protocol RestorableData {
21 | associatedtype DomainKey
22 |
23 | func getValue(for key: DomainKey) -> V?
24 | func set(value: V?, for key: DomainKey)
25 | }
26 |
27 | fileprivate class DomainScopedRestorableData: RestorableData where DomainKey: RawRepresentable, DomainKey.RawValue == String {
28 | private let domain: String
29 | private let stateRestoration: StateRestoration
30 |
31 | private lazy var restorationPayload: Dictionary = {
32 | let persisted = stateRestoration.loadDictionaryValue(for: domain)
33 | return persisted ?? Dictionary()
34 | }() {
35 | didSet {
36 | stateRestoration.set(dictionaryValue: restorationPayload, for: domain)
37 | }
38 | }
39 |
40 | init(domain: String, stateRestoration: StateRestoration) {
41 | self.domain = domain
42 | self.stateRestoration = stateRestoration
43 | }
44 |
45 | func getValue(for key: DomainKey) -> V? {
46 | return restorationPayload[key.rawValue] as? V
47 | }
48 |
49 | func set(value: V?, for key: DomainKey) {
50 | if let value {
51 | restorationPayload[key.rawValue] = value
52 | } else {
53 | restorationPayload.removeValue(forKey: key.rawValue)
54 | }
55 | }
56 | }
57 |
58 | // We're not using a Scene (so can't use SceneStorage) so build out this simple state restoration class.
59 | class StateRestoration: ObservableObject {
60 | func restorableData(for domain: String) -> any RestorableData where DomainKey: RawRepresentable, DomainKey.RawValue == String {
61 | return DomainScopedRestorableData(domain: domain, stateRestoration: self)
62 | }
63 |
64 | private func defaultsKey(for key: String) -> String {
65 | return "restoration.\(key)"
66 | }
67 |
68 | fileprivate func loadDictionaryValue(for key: String) -> Dictionary? {
69 | guard let rawDictionary = UserDefaults.standard.dictionary(forKey: defaultsKey(for: key)) else { return nil }
70 | return rawDictionary.compactMapValues(toRestorableValue(_:))
71 | }
72 |
73 | fileprivate func set(dictionaryValue: Dictionary, for key: String) {
74 | UserDefaults.standard.setValue(dictionaryValue, forKey: defaultsKey(for: key))
75 | }
76 |
77 | fileprivate func set(value: any RestorableValue, for key: String) {
78 | UserDefaults.standard.setValue(value, forKey: defaultsKey(for: key))
79 | }
80 | }
81 |
82 | fileprivate func toRestorableValue(_ value: Any?) -> RestorableValue? {
83 | guard let value else { return nil }
84 |
85 | if let value = value as? String {
86 | return value
87 | }
88 |
89 | if let dictionaryValue = value as? NSDictionary {
90 | var dictionary = Dictionary()
91 | for (key, value) in dictionaryValue {
92 | if let key = key as? String {
93 | toRestorableValue(value).map { dictionary[key] = $0 }
94 | }
95 | }
96 | return dictionary
97 | }
98 |
99 | if let arrayValue = value as? NSArray {
100 | var array = Array()
101 | for value in arrayValue {
102 | toRestorableValue(value).map { array.append($0) }
103 | }
104 | return array
105 | }
106 |
107 | if let value = value as? NSNumber {
108 | let valueType = value.objCType.pointee
109 | if valueType == boolType {
110 | return value.boolValue
111 | } else if valueType == intType {
112 | return value.intValue
113 | } else if valueType == doubleType {
114 | return value.doubleValue
115 | } else {
116 | return nil
117 | }
118 | }
119 |
120 | return nil
121 | }
122 |
123 | fileprivate let boolType = NSNumber(booleanLiteral: true).objCType.pointee
124 | fileprivate let intType = NSNumber(integerLiteral: 0).objCType.pointee
125 | fileprivate let doubleType = NSNumber(floatLiteral: 0).objCType.pointee
126 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/SettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SettingsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SettingsView: View {
11 | @ObservedObject var viewModel: SettingsViewModel
12 |
13 | init(viewModel: SettingsViewModel) {
14 | self.viewModel = viewModel
15 | }
16 |
17 | var body: some View {
18 | let selectedTabBinding = Binding(
19 | get: { viewModel.selectedTab },
20 | set: { viewModel.selectedTab = $0 }
21 | )
22 | TabView(selection: selectedTabBinding) {
23 | GeneralSettingsView(viewModel: viewModel.generalSettingsViewModel)
24 | .tabItem {
25 | Label("General", systemImage: "gearshape")
26 | }
27 | .tag(SettingsTab.general)
28 | SourcesSettingsView(viewModel: viewModel.sourcesSettingsViewModel)
29 | .tabItem {
30 | Label("Sources", systemImage: "ellipsis.bubble")
31 | }
32 | .tag(SettingsTab.sources)
33 | }
34 | .frame(minWidth: 800, idealWidth: 800, minHeight: 500, idealHeight: 500)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/SettingsWindowPresenter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SettingsWindowPresenter.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 17/04/2023.
6 | //
7 |
8 | import AppKit
9 |
10 | class SettingsWindowPresenter {
11 | enum SourcesTab {
12 | case properties
13 | case parameters
14 | }
15 |
16 | enum Deeplink {
17 | case general
18 | case sources(sourceId: ChatSource.ID?, sourcesTab: SourcesTab)
19 | }
20 |
21 | static let shared = SettingsWindowPresenter()
22 |
23 | var settingsViewModel: SettingsViewModel?
24 |
25 | private init() {}
26 |
27 | func present() {
28 | present(deeplinkingTo: nil)
29 | }
30 |
31 | func present(deeplinkingTo deeplink: Deeplink?) {
32 | if #available(macOS 13.0, *) {
33 | NSApp.sendAction(Selector(("showSettingsWindow:")), to: nil, from: nil)
34 | }
35 | else {
36 | NSApp.sendAction(Selector(("showPreferencesWindow:")), to: nil, from: nil)
37 | }
38 |
39 | // Not sure if this is a bug in SwiftUI but the Settings {} window is never cleaned up
40 | // so it's safe to just set this directly on settingsViewModel -- the change will be
41 | // picked up when the settings window is opened.
42 | if let deeplink {
43 | switch deeplink {
44 | case .general:
45 | settingsViewModel?.selectedTab = .general
46 | case .sources(sourceId: let sourceId, sourcesTab: let sourcesTab):
47 | let initialTab: SettingsViewModel.InitialSourcesTab
48 | switch sourcesTab {
49 | case .properties:
50 | initialTab = .properties
51 | case .parameters:
52 | initialTab = .parameters
53 | }
54 | settingsViewModel?.selectSourceInSourcesTab(forSourceWithId: sourceId, initialTab: initialTab)
55 | }
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/ConfirmSheetDeletionContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfirmDeleteSourceSheetContentView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | class ConfirmDeleteSourceSheetViewModel: ObservableObject {
11 | let chatSource: ChatSource
12 | let chatSources: ChatSources
13 | private let closeHandler: () -> Void
14 |
15 | init(
16 | chatSource: ChatSource,
17 | chatSources: ChatSources,
18 | closeHandler: @escaping () -> Void
19 | ) {
20 | self.chatSource = chatSource
21 | self.chatSources = chatSources
22 | self.closeHandler = closeHandler
23 | }
24 |
25 | func cancel() {
26 | closeHandler()
27 | }
28 |
29 | func delete() {
30 | chatSources.remove(source: chatSource)
31 | closeHandler()
32 | }
33 | }
34 |
35 | struct ConfirmDeleteSourceSheetContentView: View {
36 | let viewModel: ConfirmDeleteSourceSheetViewModel
37 |
38 | var body: some View {
39 | VStack(alignment: .leading, spacing: 4) {
40 | Text("Remove '\(viewModel.chatSource.name)'?")
41 | .font(.headline)
42 | Text("Are you sure you want to remove '\(viewModel.chatSource.name)' as a chat source? This cannot be undone.")
43 | .lineLimit(nil)
44 | .fixedSize(horizontal: false, vertical: true)
45 | HStack(spacing: 12) {
46 | Spacer()
47 | Button("Cancel") {
48 | viewModel.cancel()
49 | }
50 | Button("Remove") {
51 | viewModel.delete()
52 | }
53 | }
54 | .padding(.top, 16)
55 | }
56 | .padding(.horizontal, 20)
57 | .padding(.vertical, 28)
58 | .frame(maxWidth: 400)
59 | .fixedSize()
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/GeneralSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GeneralSettingsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 15/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct GeneralSettingsView: View {
11 | @ObservedObject var viewModel: GeneralSettingsViewModel
12 |
13 | var body: some View {
14 | let selectedThreadCount = Binding(
15 | get: { viewModel.numThreads },
16 | set: { viewModel.numThreads = $0 }
17 | )
18 | VStack {
19 | Spacer()
20 | HStack {
21 | Spacer()
22 | HStack {
23 | Picker("Run prediction on:", selection: selectedThreadCount) {
24 | ForEach(viewModel.threadCountRange, id: \.self) { value in
25 | Text("\(value)")
26 | .tag(value)
27 | }
28 | }
29 | .fixedSize()
30 | Text("CPU threads")
31 | }
32 | Spacer()
33 | }
34 | Spacer()
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/SourcesSettingsListView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourcesSettingsListView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SourcesSettingsSourceItemView: View {
11 | @ObservedObject var viewModel: SourcesSettingsSourceItemViewModel
12 |
13 | var body: some View {
14 | Text(viewModel.title)
15 | }
16 | }
17 |
18 | struct SourcesSettingsListView: View {
19 | @ObservedObject var viewModel: SourcesSettingsViewModel
20 |
21 | @ViewBuilder var heading: some View {
22 | VStack(spacing: 0) {
23 | HStack {
24 | Text("Source")
25 | .font(.system(size: 11))
26 | .padding(.vertical, 8)
27 | }
28 | .frame(maxWidth: .infinity)
29 | Divider()
30 | .foregroundColor(Color(nsColor: NSColor.separatorColor))
31 | }
32 | }
33 |
34 | @ViewBuilder var actionButtons: some View {
35 | HStack(spacing: 0) {
36 | Button(action: { viewModel.showAddSourceSheet() }, label: {
37 | Image(systemName: "plus")
38 | .padding(EdgeInsets(top: 8, leading: 10, bottom: 8, trailing: 6))
39 | })
40 | .buttonStyle(BorderlessButtonStyle())
41 | Button(action: {
42 | guard let selectedSourceId = viewModel.selectedSourceId else { return }
43 | viewModel.showConfirmDeleteSourceSheet(forSourceWithId: selectedSourceId)
44 | }, label: {
45 | Image(systemName: "minus")
46 | .padding(EdgeInsets(top: 8, leading: 6, bottom: 8, trailing: 8))
47 | })
48 | .disabled(viewModel.selectedSourceId == nil)
49 | .buttonStyle(BorderlessButtonStyle())
50 | Spacer()
51 | }
52 | }
53 |
54 | var body: some View {
55 | let selectionBinding = Binding(
56 | get: { viewModel.selectedSourceId },
57 | set: { viewModel.selectedSourceId = $0 }
58 | )
59 | ZStack {
60 | List(selection: selectionBinding) {
61 | Section(header: Text("Sources").frame(maxWidth: .infinity), content: {
62 | ForEach(viewModel.sources, id: \.id) { source in
63 | SourcesSettingsSourceItemView(viewModel: source)
64 | }
65 | })
66 | }
67 | .listStyle(PlainListStyle())
68 | VStack {
69 | Spacer()
70 | actionButtons
71 | }
72 | }
73 | .border(.separator)
74 | .onAppear {
75 | viewModel.selectFirstSourceIfEmpty()
76 | }
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/SourcesSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourcesSettingsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SourcesSettingsView: View {
11 | @ObservedObject var viewModel: SourcesSettingsViewModel
12 |
13 | @ViewBuilder var detailView: some View {
14 | if let detailViewModel = viewModel.detailViewModel {
15 | SourcesSettingsDetailView(viewModel: detailViewModel)
16 | .id(detailViewModel.id)
17 | } else {
18 | Text("Select a source to configure its settings")
19 | }
20 | }
21 |
22 | var body: some View {
23 | HStack(spacing: 0) {
24 | SourcesSettingsListView(viewModel: viewModel)
25 | .padding([.top, .leading, .bottom])
26 | .frame(width: 200)
27 | detailView
28 | .padding([.top])
29 | .frame(maxWidth: .infinity)
30 | }
31 | .sheet(isPresented: $viewModel.sheetPresented) {
32 | if let viewModel = viewModel.sheetViewModel as? ConfirmDeleteSourceSheetViewModel {
33 | ConfirmDeleteSourceSheetContentView(viewModel: viewModel)
34 | } else if let viewModel = viewModel.sheetViewModel as? AddSourceViewModel {
35 | AddSourceContentView(viewModel: viewModel)
36 | .interactiveDismissDisabled()
37 | }
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/detail/SourceSettingsPropertiesView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceSettingsPropertiesView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 19/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct NameRowView: View {
11 | @ObservedObject var viewModel: SourceSettingsPropertiesViewModel
12 |
13 | @State var name: String
14 |
15 | init(viewModel: SourceSettingsPropertiesViewModel) {
16 | self.viewModel = viewModel
17 | _name = State(wrappedValue: viewModel.name)
18 | }
19 |
20 | var body: some View {
21 | LabeledContent("Display Name") {
22 | DidEndEditingTextField(text: $name, didEndEditing: { newName in
23 | viewModel.updateName(newName)
24 | })
25 | }
26 | .onChange(of: viewModel.name) { newName in
27 | name = newName
28 | }
29 | }
30 | }
31 |
32 | fileprivate struct AvatarRowView: View {
33 | @ObservedObject var viewModel: SourceSettingsPropertiesViewModel
34 |
35 | @State var name: String
36 |
37 | init(viewModel: SourceSettingsPropertiesViewModel) {
38 | self.viewModel = viewModel
39 | _name = State(wrappedValue: viewModel.name)
40 | }
41 |
42 | var body: some View {
43 | let selectedAvatarBinding = Binding(
44 | get: { viewModel.avatarImageName },
45 | set: { viewModel.avatarImageName = $0 }
46 | )
47 | LabeledContent("Avatar") {
48 | AvatarPickerView(selectedAvatar: selectedAvatarBinding)
49 | }
50 | }
51 | }
52 |
53 |
54 | struct SourceSettingsPropertiesView: View {
55 | @ObservedObject var viewModel: SourceSettingsPropertiesViewModel
56 |
57 | var body: some View {
58 | Form {
59 | Section {
60 | NameRowView(viewModel: viewModel)
61 | AvatarRowView(viewModel: viewModel)
62 | }
63 | Section("Model") {
64 | LabeledContent("Model Type", value: viewModel.type)
65 | LabeledContent("Model Path") {
66 | HStack {
67 | Text(viewModel.modelPath)
68 | .font(.system(size: 11))
69 | .lineLimit(1)
70 | .truncationMode(.middle)
71 | .frame(maxWidth: 200)
72 | .help(viewModel.modelPath)
73 | Menu(content: {
74 | Button("Show in Finder...") {
75 | viewModel.showModelInFinder()
76 | }
77 | }, label: {
78 | Image(systemName: "ellipsis.circle")
79 | })
80 | .buttonStyle(.borderless)
81 | .menuIndicator(.hidden)
82 | }
83 | }
84 | LabeledContent("Model Size") {
85 | Text(viewModel.modelSize)
86 | }
87 | }
88 | Section("Prediction") {
89 | Toggle(isOn: $viewModel.useMlock) {
90 | VStack(alignment: .leading, spacing: 4) {
91 | Text("Keep Model in Memory")
92 | Text("Keeping the entire model in memory may lead to better performance for smaller models.")
93 | .font(.footnote)
94 | .foregroundColor(.gray)
95 | }
96 | }
97 | }
98 | }
99 | .formStyle(.grouped)
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/LlamaChat/ui/settings/tabs/sources/detail/SourcesSettingsDetailView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourcesSettingsDetailView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SourcesSettingsDetailView: View {
11 | @ObservedObject var viewModel: SourcesSettingsDetailViewModel
12 |
13 | @ViewBuilder var tabContent: some View {
14 | switch viewModel.selectedTab {
15 | case .properties:
16 | SourceSettingsPropertiesView(viewModel: viewModel.propertiesViewModel)
17 | case .parameters:
18 | SourceSettingsParametersView(viewModel: viewModel.parametersViewModel)
19 | }
20 | }
21 |
22 | var body: some View {
23 | VStack {
24 | Picker("", selection: $viewModel.selectedTab) {
25 | ForEach(SourcesSettingsDetailViewModel.Tab.allCases, id: \.self) { tab in
26 | Text(tab.label)
27 | }
28 | }
29 | .pickerStyle(.segmented)
30 | .fixedSize()
31 | tabContent
32 | Spacer()
33 | }
34 | }
35 | }
36 |
37 | fileprivate extension SourcesSettingsDetailViewModel.Tab {
38 | var label: String {
39 | switch self {
40 | case .properties: return "Properties"
41 | case .parameters: return "Parameters"
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/AddSourceContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AddSourceSheetContentView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct StepView: View where Content: View, PrimaryActions: View {
11 | @ObservedObject var viewModel: AddSourceViewModel
12 |
13 | typealias ContentBuilder = () -> Content
14 | typealias PrimaryActionsBuilder = () -> PrimaryActions
15 |
16 | @ViewBuilder let content: ContentBuilder
17 | @ViewBuilder let primaryActions: PrimaryActionsBuilder
18 |
19 | var body: some View {
20 | VStack(spacing: 0) {
21 | content()
22 | Spacer()
23 | HStack {
24 | Button("Cancel", action: { viewModel.cancel() })
25 | Spacer()
26 | primaryActions()
27 | }
28 | .padding(20)
29 | }
30 | }
31 | }
32 |
33 | struct AddSourceContentView: View {
34 | @ObservedObject var viewModel: AddSourceViewModel
35 |
36 | var body: some View {
37 | NavigationStack(path: $viewModel.navigationPath) {
38 | StepView(viewModel: viewModel, content: {
39 | SelectSourceTypeView(viewModel: viewModel.selectSourceTypeViewModel)
40 | .padding(EdgeInsets(top: 20, leading: 20, bottom: 0, trailing: 20))
41 | }, primaryActions: {})
42 | .navigationTitle("Add Chat Source")
43 | .navigationDestination(for: AddSourceStep.self) { step in
44 | switch step {
45 | case .configureSource:
46 | if let configureSourceViewModel = viewModel.configureSourceViewModel {
47 | StepView(viewModel: viewModel, content: {
48 | makeConfigureSourceView(from: configureSourceViewModel)
49 | .navigationTitle("Set up \(configureSourceViewModel.chatSourceType.readableName) model")
50 | }, primaryActions: {
51 | ConfigureSourcePrimaryActionsView(viewModel: configureSourceViewModel.primaryActionsViewModel)
52 | })
53 | }
54 | case .convertPyTorchSource:
55 | if let convertSourceViewModel = viewModel.convertSourceViewModel {
56 | StepView(viewModel: viewModel, content: {
57 | ConvertSourceView(viewModel: convertSourceViewModel)
58 | .navigationTitle("Convert PyTorch model files")
59 | }, primaryActions: {
60 | ConvertSourcePrimaryActionsView(viewModel: convertSourceViewModel)
61 | })
62 | }
63 | }
64 | }
65 | }
66 | .frame(width: 640, height: 450)
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/AddSourceFlowPresentationStyle.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AddSourceFlowPresentationStyle.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | struct AddSourceFlowPresentationStyle {
11 | let showTitle: Bool
12 | let showBackButton: Bool
13 |
14 | private init(showTitle: Bool, showBackButton: Bool) {
15 | self.showTitle = showTitle
16 | self.showBackButton = showBackButton
17 | }
18 |
19 | static var standalone = AddSourceFlowPresentationStyle(showTitle: true, showBackButton: true)
20 | static var embedded = AddSourceFlowPresentationStyle(showTitle: false, showBackButton: false)
21 | }
22 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/ConfigureLocalGgmlModelSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalGgmlModelSettingsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConfigureLocalGgmlModelSettingsView: View {
11 | @ObservedObject var viewModel: ConfigureLocalGgmlModelSettingsViewModel
12 |
13 | var body: some View {
14 | VStack(alignment: .leading) {
15 | ConfigureLocalModelPathSelectorView(viewModel: viewModel.pathSelectorViewModel)
16 | Text("Select the quantized \(viewModel.chatSourceType.readableName) model path. This should be called something like '\(viewModel.exampleModelPath)'")
17 | .font(.footnote)
18 | .padding(.top, 8)
19 | }
20 | ConfigureLocalModelSizePickerView(
21 | viewModel: viewModel.modelSizePickerViewModel,
22 | enabled: viewModel.modelState.isValid,
23 | unknownModelSizeAppearance: .regular
24 | )
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/ConfigureLocalModelSourceView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSourceView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct DisplayNameRowView: View {
11 | @ObservedObject var viewModel: ConfigureLocalModelSourceViewModel
12 |
13 | @FocusState var isNameFocused: Bool
14 |
15 | var body: some View {
16 | let nameBinding = Binding(
17 | get: { viewModel.name },
18 | set: { viewModel.name = $0 }
19 | )
20 | HStack {
21 | TextField("Display Name", text: nameBinding)
22 | .textFieldStyle(.squareBorder)
23 | .focused($isNameFocused)
24 | Button(action: {
25 | viewModel.generateName()
26 | }, label: { Image(systemName: "hands.sparkles.fill") })
27 | }
28 | .onAppear {
29 | isNameFocused = true
30 | }
31 | }
32 | }
33 |
34 | fileprivate struct AvatarRowView: View {
35 | @ObservedObject var viewModel: ConfigureLocalModelSourceViewModel
36 |
37 | @State var pickerPresented = false
38 |
39 | @ViewBuilder var picker: some View {
40 | if let avatarImageName = viewModel.avatarImageName {
41 | Image(avatarImageName)
42 | .resizable()
43 |
44 | } else {
45 | Circle()
46 | .fill(.gray.opacity(0.2))
47 | .overlay(
48 | Image(systemName: "plus")
49 | .resizable()
50 | .scaledToFit()
51 | .frame(width: 16, height: 16)
52 | .foregroundColor(.gray)
53 | )
54 | }
55 | }
56 |
57 | var body: some View {
58 | LabeledContent {
59 | let selectedAvatarBinding = Binding(
60 | get: { viewModel.avatarImageName },
61 | set: { viewModel.avatarImageName = $0 }
62 | )
63 | AvatarPickerView(selectedAvatar: selectedAvatarBinding)
64 | } label: {
65 | Text("Avatar")
66 | }
67 | }
68 | }
69 |
70 | struct ConfigureLocalModelSourceView: View {
71 | @ObservedObject var viewModel: ConfigureLocalModelSourceViewModel
72 |
73 | @State var selectedModelType: String = ""
74 |
75 | var body: some View {
76 | Form {
77 | Section {
78 | DisplayNameRowView(viewModel: viewModel)
79 | AvatarRowView(viewModel: viewModel)
80 | }
81 | ConfigureLocalModelSelectFormatView(viewModel: viewModel)
82 |
83 | if let settingsViewModel = viewModel.settingsViewModel {
84 | Section {
85 | if let settingsViewModel = settingsViewModel as? ConfigureLocalGgmlModelSettingsViewModel {
86 | ConfigureLocalGgmlModelSettingsView(viewModel: settingsViewModel)
87 | } else if let settingsViewModel = settingsViewModel as? ConfigureLocalPyTorchModelSettingsViewModel {
88 | ConfigureLocalPyTorchModelSettingsView(viewModel: settingsViewModel)
89 | }
90 | }
91 | }
92 | }
93 | .formStyle(.grouped)
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/ConfigureLocalPyTorchModelSettingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalPyTorchModelSettingsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConfigureLocalPyTorchModelSettingsView: View {
11 | @ObservedObject var viewModel: ConfigureLocalPyTorchModelSettingsViewModel
12 |
13 | var body: some View {
14 | switch viewModel.conversionState {
15 | case .unknown:
16 | EmptyView()
17 | case .loading:
18 | LabeledContent { Text("") } label: { Text("") }
19 | .overlay(
20 | DebouncedView(isVisible: true, delay: 0.2) {
21 | ProgressView()
22 | .progressViewStyle(.circular)
23 | .controlSize(.small)
24 | }
25 | )
26 | case .canConvert(let canConvert):
27 | if canConvert {
28 | ConfigureLocalModelSizePickerView(
29 | viewModel: viewModel.modelSizePickerViewModel,
30 | unknownModelSizeAppearance: .disabled
31 | )
32 | } else {
33 | LabeledContent { Text("") } label: {
34 | Text("Cannot automatically convert PyTorch model files. Please convert manually using the conversion steps outlined in the [llama.cpp repository](https://github.com/ggerganov/llama.cpp) and import them as a GGML model file.")
35 | .lineSpacing(2)
36 | .lineLimit(nil)
37 | .fixedSize(horizontal: false, vertical: true)
38 | }
39 | }
40 | }
41 | if viewModel.showPathSelector {
42 | VStack(alignment: .leading) {
43 | ConfigureLocalModelPathSelectorView(viewModel: viewModel.pathSelectorViewModel)
44 | if let files = viewModel.files {
45 | VStack(alignment: .leading, spacing: 4) {
46 | ForEach(files, id: \.url) { file in
47 | HStack(alignment: .firstTextBaseline, spacing: 4) {
48 | Image(systemName: file.found ? "checkmark.circle.fill" : "xmark.circle.fill")
49 | .foregroundColor(file.found ? .green : .red)
50 | Text(file.url.path)
51 | .foregroundColor(.gray)
52 | .lineLimit(1)
53 | .truncationMode(.head)
54 | }
55 | }
56 | }
57 | }
58 | }
59 | }
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/ConfigureSourcePrimaryActionsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureSourcePrimaryActionsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 13/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | protocol ConfigureSourcePrimaryActionsViewModelDelegate: AnyObject {
11 | func next()
12 | }
13 |
14 | class ConfigureSourcePrimaryActionsViewModel: ObservableObject {
15 | @Published var showContinueButton: Bool = false
16 | @Published var canContinue: Bool = false
17 | @Published var nextButtonTitle: String = "Add"
18 |
19 | weak var delegate: ConfigureSourcePrimaryActionsViewModelDelegate?
20 |
21 | func next() {
22 | delegate?.next()
23 | }
24 | }
25 |
26 | struct ConfigureSourcePrimaryActionsView: View {
27 | @ObservedObject var viewModel: ConfigureSourcePrimaryActionsViewModel
28 |
29 | var body: some View {
30 | HStack {
31 | Spacer()
32 | if viewModel.showContinueButton {
33 | Button(viewModel.nextButtonTitle) {
34 | viewModel.next()
35 | }
36 | .keyboardShortcut(.return)
37 | .disabled(!viewModel.canContinue)
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/components/ConfigureLocalModelPathSelectorView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelPathSelectorView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate extension VerticalAlignment {
11 | private enum LabelAlignment: AlignmentID {
12 | static func defaultValue(in dimension: ViewDimensions) -> CGFloat {
13 | return dimension[VerticalAlignment.center]
14 | }
15 | }
16 |
17 | static let label = VerticalAlignment(LabelAlignment.self)
18 | }
19 |
20 | fileprivate extension Alignment {
21 | static let label = Alignment(horizontal: .leading, vertical: .label)
22 | }
23 |
24 | struct ConfigureLocalModelPathSelectorView: View {
25 | @ObservedObject var viewModel: ConfigureLocalModelPathSelectorViewModel
26 |
27 | @ViewBuilder var label: some View {
28 | Text(viewModel.label)
29 | .alignmentGuide(.label) { d in
30 | d[VerticalAlignment.firstTextBaseline]
31 | }
32 | }
33 |
34 | @ViewBuilder var selectButton: some View {
35 | Button(action: {
36 | let panel = NSOpenPanel()
37 | panel.allowsMultipleSelection = viewModel.allowMultipleSelection
38 | panel.canChooseFiles = viewModel.selectionMode.canSelectFiles
39 | panel.canChooseDirectories = viewModel.selectionMode.canSelectDirectories
40 | if panel.runModal() == .OK {
41 | viewModel.modelPaths = panel.urls.map { $0.path }
42 | }
43 | }, label: {
44 | Text("Select...")
45 | })
46 | .alignmentGuide(.label) { d in
47 | d[VerticalAlignment.firstTextBaseline]
48 | }
49 | }
50 |
51 | @ViewBuilder var singlePathSelectorContent: some View {
52 | HStack(alignment: .label) {
53 | LabeledContent {
54 | VStack(alignment: .trailing, spacing: 4) {
55 | Text(viewModel.modelPaths.first ?? "No path selected")
56 | .lineLimit(1)
57 | .truncationMode(.head)
58 | .frame(maxWidth: 200, alignment: .trailing)
59 | .help(viewModel.modelPaths.first ?? "")
60 | if let errorMessage = viewModel.errorMessage {
61 | Text(errorMessage)
62 | .foregroundColor(.red)
63 | .font(.footnote)
64 | }
65 | }
66 | } label: {
67 | label
68 | }
69 | selectButton
70 | }
71 | }
72 |
73 | @ViewBuilder var multiplePathSelectorContent: some View {
74 | VStack(alignment: .leading) {
75 | HStack(alignment: .label) {
76 | LabeledContent {
77 | VStack(alignment: .trailing, spacing: 4) {
78 | Text(
79 | viewModel.modelPaths.isEmpty ? "No paths selected" : "\(viewModel.modelPaths.count) paths selected"
80 | )
81 | .frame(maxWidth: 200, alignment: .trailing)
82 | if let errorMessage = viewModel.errorMessage {
83 | Text(errorMessage)
84 | .foregroundColor(.red)
85 | .font(.footnote)
86 | }
87 | }
88 | } label: {
89 | label
90 | }
91 | selectButton
92 | }
93 | if !viewModel.modelPaths.isEmpty {
94 | VStack(alignment: .leading, spacing: 2) {
95 | ForEach(viewModel.modelPaths, id: \.self) { modelPath in
96 | Text(modelPath).foregroundColor(.gray)
97 | }
98 | }
99 | .padding(.top, 12)
100 | }
101 | }
102 | }
103 |
104 | var body: some View {
105 | if viewModel.allowMultipleSelection {
106 | multiplePathSelectorContent
107 | } else {
108 | singlePathSelectorContent
109 | }
110 | }
111 | }
112 |
113 | fileprivate extension ConfigureLocalModelPathSelectorViewModel.SelectionMode {
114 | var canSelectFiles: Bool {
115 | switch self {
116 | case .files: return true
117 | case .directories: return false
118 | }
119 | }
120 |
121 | var canSelectDirectories: Bool {
122 | switch self {
123 | case .files: return false
124 | case .directories: return true
125 | }
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/components/ConfigureLocalModelSelectFormatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSelectFormatView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConfigureLocalModelSelectFormatView: View {
11 | @ObservedObject var viewModel: ConfigureLocalModelSourceViewModel
12 |
13 | var body: some View {
14 | let sourceTypeBinding = Binding(
15 | get: { viewModel.modelSourceType },
16 | set: { viewModel.select(modelSourceType: $0) }
17 | )
18 | Section {
19 | Picker("Format", selection: sourceTypeBinding) {
20 | Text("Select Format")
21 | .foregroundColor(Color(nsColor: NSColor.disabledControlTextColor))
22 | .tag(ConfigureLocalModelSourceType?(nil))
23 | ForEach(ConfigureLocalModelSourceType.allCases) { source in
24 | Text(source.label).tag(ConfigureLocalModelSourceType?(source))
25 | .frame(maxWidth: .infinity, alignment: .trailing)
26 | .multilineTextAlignment(.trailing)
27 | }
28 | }
29 | } header: {
30 | VStack(alignment: .leading, spacing: 6) {
31 | Text("Model Settings")
32 | Text(viewModel.modelSourcingDescription)
33 | .font(.system(size: 12, weight: .regular))
34 | }
35 | .padding(.bottom, 12)
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/configure/components/ConfigureLocalModelSizePickerView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSizePickerView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConfigureLocalModelSizePickerView: View {
11 | @ObservedObject var viewModel: ConfigureLocalModelSizePickerViewModel
12 |
13 | enum UnknownModelSizeAppearance {
14 | case regular
15 | case disabled
16 |
17 | var isDisabled: Bool {
18 | switch self {
19 | case .regular: return false
20 | case .disabled: return true
21 | }
22 | }
23 | }
24 |
25 | let enabled: Bool
26 | let unknownModelSizeAppearance: UnknownModelSizeAppearance
27 |
28 | init(
29 | viewModel: ConfigureLocalModelSizePickerViewModel,
30 | enabled: Bool = true,
31 | unknownModelSizeAppearance: UnknownModelSizeAppearance
32 | ) {
33 | self.viewModel = viewModel
34 | self.enabled = enabled
35 | self.unknownModelSizeAppearance = unknownModelSizeAppearance
36 | }
37 |
38 | var body: some View {
39 | let modelTypeBinding = Binding(
40 | get: { viewModel.modelSize },
41 | set: { viewModel.modelSize = $0 }
42 | )
43 | Picker("Model Size", selection: modelTypeBinding) {
44 | Text(viewModel.label(for: .unknown))
45 | .foregroundColor(unknownModelSizeAppearance.isDisabled ? Color(nsColor: NSColor.disabledControlTextColor) : nil)
46 | .tag(ModelSize.unknown)
47 | if !unknownModelSizeAppearance.isDisabled {
48 | Divider()
49 | }
50 | ForEach([ModelSize.size7B, ModelSize.size13B, ModelSize.size30B, ModelSize.size65B]) { size in
51 | Text(viewModel.label(for: size)).tag(size)
52 | }
53 | }
54 | .disabled(!enabled)
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/convert/ConvertSourcePrimaryActionsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConvertSourcePrimaryActionsView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 13/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConvertSourcePrimaryActionsView: View {
11 | @ObservedObject var viewModel: ConvertSourceViewModel
12 |
13 | @ViewBuilder var primaryButton: some View {
14 | Button(action: {
15 | switch viewModel.state {
16 | case .finishedConverting:
17 | viewModel.finish()
18 | case .failedToConvert:
19 | viewModel.retryConversion()
20 | case .notStarted, .converting:
21 | viewModel.startConversion()
22 | }
23 | }) {
24 | switch viewModel.state {
25 | case .finishedConverting:
26 | Text("Finish")
27 | case .failedToConvert:
28 | Text("Retry")
29 | case .notStarted, .converting:
30 | Text("Start")
31 | }
32 | }
33 | .keyboardShortcut(.return)
34 | .disabled(viewModel.state.isConverting)
35 | }
36 |
37 | @ViewBuilder var stopButton: some View {
38 | Button(action: {
39 | viewModel.stopConversion()
40 | }) {
41 | Text("Stop")
42 | }
43 | }
44 |
45 | var body: some View {
46 | HStack {
47 | if viewModel.state.isConverting {
48 | stopButton
49 | }
50 | primaryButton
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/convert/ConvertSourceStepView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConvertSourceStepView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 13/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | fileprivate struct RunTimeLabel: View {
11 | @ObservedObject var viewModel: ConvertSourceStepViewModel
12 |
13 | @ViewBuilder var label: some View {
14 | if let runTime = viewModel.runTime {
15 | switch viewModel.state {
16 | case .notStarted, .skipped, .cancelled:
17 | EmptyView()
18 | case .running:
19 | Text("Running for \(String(format: "%.1f", floor(runTime))) seconds...")
20 | .font(.footnote)
21 | .padding(.horizontal, 14)
22 | .padding(.vertical, 8)
23 | case .finished:
24 | Text("Finished in \(String(format: "%.1f", runTime)) seconds")
25 | .font(.footnote)
26 | .padding(.horizontal, 14)
27 | .padding(.vertical, 8)
28 | }
29 | }
30 | }
31 |
32 | var body: some View {
33 | label
34 | }
35 | }
36 |
37 | fileprivate struct DetailView: View {
38 | @ObservedObject var viewModel: ConvertSourceStepViewModel
39 |
40 | var body: some View {
41 | VStack(spacing: 0) {
42 | NonEditableTextView(viewModel: viewModel.textViewModel, scrollBehavior: .pinToBottom)
43 | .frame(maxWidth: .infinity)
44 | .frame(height: 100)
45 | Rectangle()
46 | .fill(Color(nsColor: .separatorColor))
47 | .frame(maxWidth: .infinity)
48 | .padding(.horizontal, 0.5)
49 | .frame(height: 1)
50 | HStack(spacing: 0) {
51 | RunTimeLabel(viewModel: viewModel)
52 | Spacer()
53 | Rectangle()
54 | .fill(Color(nsColor: .separatorColor))
55 | .frame(maxHeight: .infinity)
56 | .padding(.vertical, 0.5)
57 | .frame(width: 1)
58 | Text("Exit Code: `\(viewModel.exitCode.map { String($0) } ?? "-")`")
59 | .font(.footnote)
60 | .padding(.horizontal, 14)
61 | .padding(.vertical, 8)
62 | }
63 | }
64 | .background(Color(nsColor: .controlBackgroundColor))
65 | .mask(RoundedRectangle(cornerRadius: 4))
66 | .overlay(
67 | RoundedRectangle(cornerRadius: 4)
68 | .stroke(Color(nsColor: .separatorColor))
69 | )
70 | }
71 | }
72 |
73 | struct ConvertSourceStepView: View {
74 | @ObservedObject var viewModel: ConvertSourceStepViewModel
75 |
76 | var body: some View {
77 | VStack {
78 | LabeledContent(content: {
79 | switch viewModel.state {
80 | case .notStarted:
81 | EmptyView()
82 | case .skipped:
83 | Image(systemName: "exclamationmark.octagon")
84 | .foregroundColor(.gray)
85 | case .running:
86 | ProgressView()
87 | .progressViewStyle(.circular)
88 | .controlSize(.small)
89 | case .cancelled:
90 | Image(systemName: "xmark.circle.fill")
91 | .foregroundColor(.red)
92 | case .finished(result: let result):
93 | switch result {
94 | case .success(let exitCode):
95 | if exitCode == 0 {
96 | Image(systemName: "checkmark.circle.fill")
97 | .foregroundColor(.green)
98 | } else {
99 | Image(systemName: "xmark.circle.fill")
100 | .foregroundColor(.red)
101 | }
102 | case .failure:
103 | Image(systemName: "xmark.circle.fill")
104 | .foregroundColor(.red)
105 | }
106 | }
107 | }, label: {
108 | HStack(spacing: 4) {
109 | Button(action: {
110 | viewModel.toggleExpansion()
111 | }, label: {
112 | Image(systemName: viewModel.expanded ? "arrowtriangle.down.fill" : "arrowtriangle.forward.fill")
113 | .resizable()
114 | .scaledToFit()
115 | .foregroundColor(.gray)
116 | .frame(width: 8, height: 8)
117 | .padding(.trailing, 4)
118 | })
119 | .buttonStyle(.borderless)
120 | Text(viewModel.label)
121 | }
122 | })
123 | if viewModel.expanded {
124 | DetailView(viewModel: viewModel)
125 | }
126 | }
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/convert/ConvertSourceView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConvertSourceView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 07/04/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ConvertSourceView: View {
11 | @ObservedObject var viewModel: ConvertSourceViewModel
12 |
13 | var body: some View {
14 | Form {
15 | switch viewModel.state {
16 | case .notStarted:
17 | Section {
18 | Text("LlamaChat will convert the PyTorch model weights to the .ggml format.\n\nAdditional disk space is required since the original file(s) are left untouched.")
19 | }
20 | case .converting, .failedToConvert, .finishedConverting:
21 | ForEach(viewModel.conversionSteps, id: \.id) { stepViewModel in
22 | ConvertSourceStepView(viewModel: stepViewModel)
23 | }
24 | }
25 | }
26 | .formStyle(.grouped)
27 | .navigationBarBackButtonHidden(viewModel.state.startedConverting)
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/type/SelectSourceTypeView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SelectSourceTypeView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SelectSourceTypeView: View {
11 | @ObservedObject var viewModel: SelectSourceTypeViewModel
12 |
13 | var body: some View {
14 | VStack(alignment: .leading, spacing: 20) {
15 | VStack(alignment: .leading, spacing: 4) {
16 | Text("To start interacting with one of the models, choose a chat source based on your available model data.")
17 | .lineLimit(nil)
18 | .fixedSize(horizontal: false, vertical: true)
19 | }
20 | .padding(.horizontal)
21 | SourceTypeSelectionView(viewModel: viewModel)
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/LlamaChat/ui/sources/type/SourceTypeSelectionView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceTypeSelectionView.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct SourceTypeSelectionRow: View {
11 | let source: SelectSourceTypeViewModel.Source
12 | let clickHandler: () -> Void
13 |
14 | @State var isHovered = false
15 |
16 | var body: some View {
17 | HStack {
18 | VStack(alignment: .leading, spacing: 4) {
19 | Text(source.name)
20 | .fontWeight(.bold)
21 | Text(source.description)
22 | if let learnMoreLink = source.learnMoreLink {
23 | Text("Learn More")
24 | .font(.footnote)
25 | .foregroundColor(.blue)
26 | .underline()
27 | .onHover { isHovered in
28 | // TODO: Use cursor rects to make this more robust.
29 | if isHovered {
30 | NSCursor.pointingHand.set()
31 | } else {
32 | NSCursor.arrow.set()
33 | }
34 | }
35 | .onTapGesture {
36 | NSWorkspace.shared.open(learnMoreLink)
37 | }
38 | }
39 | }
40 | .padding()
41 | Spacer()
42 | Image(systemName: "chevron.right")
43 | .padding(.trailing)
44 | }
45 | .background(isHovered ? Color("GroupedSelectionRowHover") : .clear)
46 | .onHover { isHovered = $0 }
47 | .onTapGesture {
48 | clickHandler()
49 | }
50 | }
51 | }
52 |
53 | struct SourceTypeSelectionView: View {
54 | var viewModel: SelectSourceTypeViewModel
55 |
56 | var body: some View {
57 | VStack(alignment: .leading, spacing: 0) {
58 | ForEach(viewModel.sources, id: \.id) { source in
59 | SourceTypeSelectionRow(source: source, clickHandler: {
60 | viewModel.select(sourceType: source.type)
61 | })
62 | if source.id != viewModel.sources.last?.id {
63 | Divider()
64 | }
65 | }
66 | }
67 | .background(Color(nsColor: .systemGray).opacity(0.05))
68 | .mask(RoundedRectangle(cornerRadius: 4))
69 | .overlay(
70 | RoundedRectangle(cornerRadius: 4)
71 | .stroke(Color(nsColor: .separatorColor))
72 | )
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/LlamaChat/util/FileUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileUtils.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | func applicationSupportDirectoryURL() -> URL? {
11 | guard let bundleIdentifier = Bundle.main.bundleIdentifier else { return nil }
12 |
13 | do {
14 | let url = try FileManager().url(for: .applicationSupportDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
15 | let appScopedDirectory = url.appendingPathComponent(bundleIdentifier, isDirectory: true)
16 |
17 | if !FileManager.default.fileExists(atPath: appScopedDirectory.path) {
18 | try FileManager.default.createDirectory(at: appScopedDirectory, withIntermediateDirectories: false)
19 | }
20 | return appScopedDirectory
21 | } catch {
22 | print("Error getting application support directory:", error)
23 | return nil
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/LlamaChat/util/ModelParameterUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelParameterUtils.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 20/04/2023.
6 | //
7 |
8 | import Foundation
9 | import llama
10 |
11 | private let minimumContextSize = UInt(512)
12 |
13 | func defaultModelParameters(for chatSourceType: ChatSourceType) -> ModelParameters {
14 | switch chatSourceType {
15 | case .llama:
16 | return ModelParameters.from(
17 | sessionConfig: LlamaSessionConfig.configurableDefaults
18 | .withHyperparameters { hyperparameters in
19 | hyperparameters.withContextSize(hyperparameters.contextSize.map { max($0, minimumContextSize) })
20 | }
21 | .build()
22 | )
23 | case .alpaca:
24 | return ModelParameters.from(
25 | sessionConfig: AlpacaSessionConfig.configurableDefaults
26 | .withHyperparameters { hyperparameters in
27 | hyperparameters.withContextSize(hyperparameters.contextSize.map { max($0, minimumContextSize) })
28 | }
29 | .build()
30 | )
31 | case .gpt4All:
32 | return ModelParameters.from(
33 | sessionConfig: GPT4AllSessionConfig.configurableDefaults
34 | .withHyperparameters { hyperparameters in
35 | hyperparameters.withContextSize(hyperparameters.contextSize.map { max($0, minimumContextSize) })
36 | }
37 | .build()
38 | )
39 | }
40 | }
41 |
42 | fileprivate extension ModelParameters {
43 | static func from(sessionConfig: SessionConfig) -> ModelParameters {
44 | return ModelParameters(
45 | seedValue: sessionConfig.seed,
46 | contextSize: sessionConfig.hyperparameters.contextSize,
47 | numberOfTokens: sessionConfig.numTokens,
48 | topP: sessionConfig.hyperparameters.topP,
49 | topK: sessionConfig.hyperparameters.topK,
50 | temperature: sessionConfig.hyperparameters.temperature,
51 | batchSize: sessionConfig.hyperparameters.batchSize,
52 | lastNTokensToPenalize: sessionConfig.hyperparameters.lastNTokensToPenalize,
53 | repeatPenalty: sessionConfig.hyperparameters.repeatPenalty
54 | )
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/ChatInfoViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatInfoViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 02/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class ChatInfoViewModel: ObservableObject {
12 | enum ModelStat {
13 | case none
14 | case unknown
15 | case loading
16 | case value(V)
17 |
18 | func map(_ transform: (_ value: V) -> ModelStat) -> ModelStat {
19 | switch self {
20 | case .none: return .none
21 | case .unknown: return .unknown
22 | case .loading: return .loading
23 | case .value(let value): return transform(value)
24 | }
25 | }
26 |
27 | var value: V? {
28 | switch self {
29 | case .none, .unknown, .loading:
30 | return nil
31 | case .value(let value):
32 | return value
33 | }
34 | }
35 | }
36 |
37 | private let chatModel: ChatModel
38 |
39 | var sourceId: ChatSource.ID {
40 | return chatModel.source.id
41 | }
42 |
43 | var name: String {
44 | return chatModel.source.name
45 | }
46 |
47 | var modelSize: String {
48 | switch chatModel.source.modelSize {
49 | case .unknown:
50 | return "Unknown"
51 | case .size7B:
52 | return "7B"
53 | case .size13B:
54 | return "13B"
55 | case .size30B:
56 | return "30B"
57 | case .size65B:
58 | return "65B"
59 | }
60 | }
61 |
62 | var modelType: String {
63 | return "\(chatModel.source.type.readableName) model"
64 | }
65 |
66 | @Published private(set) var context: ModelStat = .none
67 | @Published private(set) var contextTokenCount: ModelStat = .none
68 | @Published private(set) var canClearMessages: Bool
69 |
70 | // Parameters
71 | @Published var seedValue: Int32?
72 | @Published var contextSize: UInt = 0
73 | @Published var numberOfTokens: UInt = 0
74 | @Published var topP: Double = 0
75 | @Published var topK: UInt = 0
76 | @Published var temperature: Double = 0
77 | @Published var batchSize: UInt = 0
78 | @Published var lastNTokensToPenalize: UInt = 0
79 | @Published var repeatPenalty: Double = 0
80 |
81 | private(set) lazy var avatarViewModel = AvatarViewModel(chatSource: chatModel.source)
82 |
83 | init(chatModel: ChatModel) {
84 | self.chatModel = chatModel
85 |
86 | canClearMessages = !chatModel.messages.isEmpty
87 |
88 | chatModel
89 | .$messages
90 | .map { !$0.isEmpty }
91 | .assign(to: &$canClearMessages)
92 |
93 | chatModel.source.$modelParameters
94 | .map { $0.$seedValue }
95 | .switchToLatest()
96 | .assign(to: &$seedValue)
97 | chatModel.source.$modelParameters
98 | .map { $0.$contextSize }
99 | .switchToLatest()
100 | .assign(to: &$contextSize)
101 | chatModel.source.$modelParameters
102 | .map { $0.$numberOfTokens }
103 | .switchToLatest()
104 | .assign(to: &$numberOfTokens)
105 | chatModel.source.$modelParameters
106 | .map { $0.$topP }
107 | .switchToLatest()
108 | .assign(to: &$topP)
109 | chatModel.source.$modelParameters
110 | .map { $0.$topK }
111 | .switchToLatest()
112 | .assign(to: &$topK)
113 | chatModel.source.$modelParameters
114 | .map { $0.$temperature }
115 | .switchToLatest()
116 | .assign(to: &$temperature)
117 | chatModel.source.$modelParameters
118 | .map { $0.$batchSize }
119 | .switchToLatest()
120 | .assign(to: &$batchSize)
121 | chatModel.source.$modelParameters
122 | .map { $0.$lastNTokensToPenalize }
123 | .switchToLatest()
124 | .assign(to: &$lastNTokensToPenalize)
125 | chatModel.source.$modelParameters
126 | .map { $0.$repeatPenalty }
127 | .switchToLatest()
128 | .assign(to: &$repeatPenalty)
129 | }
130 |
131 | func clearMessages() {
132 | Task.init {
133 | await chatModel.clearMessages()
134 | }
135 | }
136 |
137 | func showInfo() {
138 | SettingsWindowPresenter.shared.present(deeplinkingTo: .sources(sourceId: chatModel.source.id, sourcesTab: .properties))
139 | }
140 |
141 | func configureParameters() {
142 | SettingsWindowPresenter.shared.present(deeplinkingTo: .sources(sourceId: chatModel.source.id, sourcesTab: .parameters))
143 | }
144 |
145 | func loadModelStats() {
146 | context = .loading
147 | contextTokenCount = .loading
148 |
149 | Task.init {
150 | do {
151 | let context = try await chatModel.loadContext()
152 | await MainActor.run {
153 | self.context = context.contextString.map { .value($0) } ?? .none
154 | let tokenCount = context.tokens?.count
155 | self.contextTokenCount = tokenCount.map { .value($0) } ?? .none
156 | }
157 | } catch {
158 | self.context = .unknown
159 | self.contextTokenCount = .unknown
160 | }
161 | }
162 | }
163 | }
164 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/ChatListViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatSourcesViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class ChatListItemViewModel: ObservableObject {
12 | private let chatSource: ChatSource
13 |
14 | var id: String { chatSource.id }
15 | var modelDescription: String {
16 | var suffix: String
17 | switch chatSource.modelSize {
18 | case .unknown: suffix = ""
19 | case .size7B: suffix = " (7B)"
20 | case .size13B: suffix = " (13B)"
21 | case .size30B: suffix = " (30B)"
22 | case .size65B: suffix = " (65B)"
23 | }
24 |
25 | return "\(chatSource.type.readableName)\(suffix)"
26 | }
27 | @Published var title: String
28 |
29 | private var subscriptions = Set()
30 |
31 | private(set) lazy var avatarViewModel = AvatarViewModel(chatSource: chatSource)
32 |
33 | private weak var chatListViewModel: ChatListViewModel?
34 |
35 | fileprivate init(chatSource: ChatSource, chatListViewModel: ChatListViewModel) {
36 | self.chatSource = chatSource
37 | self.chatListViewModel = chatListViewModel
38 | self.title = chatSource.name
39 | chatSource.$name.sink(receiveValue: { [weak self] newName in
40 | self?.title = newName
41 | }).store(in: &subscriptions)
42 | }
43 |
44 | func remove() {
45 | chatListViewModel?.removeSource(chatSource)
46 | }
47 | }
48 |
49 | class ChatListViewModel: ObservableObject {
50 | private let chatSources: ChatSources
51 | private weak var mainChatViewModel: MainChatViewModel?
52 |
53 | @Published private(set) var items: [ChatListItemViewModel]
54 | @Published private(set) var selectedSourceId: String?
55 |
56 | private var subscriptions = Set()
57 |
58 | init(chatSources: ChatSources, mainChatViewModel: MainChatViewModel) {
59 | self.chatSources = chatSources
60 | self.mainChatViewModel = mainChatViewModel
61 |
62 | items = []
63 | items = chatSources.sources.map { ChatListItemViewModel(chatSource: $0, chatListViewModel: self) }
64 |
65 | chatSources.$sources.sink(receiveValue: { newSources in
66 | self.items = newSources.map { ChatListItemViewModel(chatSource: $0, chatListViewModel: self) }
67 | }).store(in: &subscriptions)
68 | mainChatViewModel.$selectedSourceId.sink(receiveValue: { newSelectedSourceId in
69 | self.selectedSourceId = newSelectedSourceId
70 | }).store(in: &subscriptions)
71 | }
72 |
73 | func selectSource(with id: String?) {
74 | mainChatViewModel?.selectedSourceId = id
75 | }
76 |
77 | func removeSource(_ source: ChatSource) {
78 | mainChatViewModel?.removeChatSource(source)
79 | }
80 |
81 | func itemViewModel(with sourceId: String?) -> ChatListItemViewModel? {
82 | guard let sourceId else { return nil }
83 | return items.first(where: { $0.id == sourceId })
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/ChatViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ChatViewModel: ObservableObject {
11 | private let chatSource: ChatSource
12 | private let chatModels: ChatModels
13 | private let chatModel: ChatModel
14 |
15 | var sourceId: String {
16 | chatModel.source.id
17 | }
18 |
19 | var sourceName: String {
20 | chatModel.source.name
21 | }
22 |
23 | private(set) lazy var composeViewModel = ComposeViewModel(chatModel: chatModel)
24 | private(set) lazy var infoViewModel = ChatInfoViewModel(chatModel: chatModel)
25 | private(set) lazy var messagesViewModel = MessagesViewModel(chatModel: chatModel)
26 |
27 |
28 | init(chatSource: ChatSource, chatModels: ChatModels, messagesModel: MessagesModel) {
29 | self.chatSource = chatSource
30 | self.chatModels = chatModels
31 | self.chatModel = chatModels.chatModel(for: chatSource)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/ChatWindowContentViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MainChatViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class MainChatViewModel: ObservableObject {
12 | enum RestorableKey: String {
13 | case sidebarWidth
14 | case selectedSourceId
15 | }
16 |
17 | private let chatSources: ChatSources
18 | private let chatModels: ChatModels
19 | private let messagesModel: MessagesModel
20 | private let restorableData: any RestorableData
21 |
22 | @Published var selectedSourceId: ChatSource.ID? {
23 | didSet {
24 | restorableData.set(value: selectedSourceId, for: .selectedSourceId)
25 | }
26 | }
27 | @Published var sidebarWidth: Double? {
28 | didSet {
29 | restorableData.set(value: sidebarWidth, for: .sidebarWidth)
30 | }
31 | }
32 |
33 | @Published var sheetViewModel: (any ObservableObject)?
34 | @Published var sheetPresented = false
35 |
36 | lazy private(set) var chatListViewModel = ChatListViewModel(chatSources: chatSources, mainChatViewModel: self)
37 |
38 | private var subscriptions = Set()
39 |
40 | init(
41 | chatSources: ChatSources,
42 | chatModels: ChatModels,
43 | messagesModel: MessagesModel,
44 | stateRestoration: StateRestoration
45 | ) {
46 | self.chatSources = chatSources
47 | self.chatModels = chatModels
48 | self.messagesModel = messagesModel
49 | self.restorableData = stateRestoration.restorableData(for: "ChatWindow")
50 | _sidebarWidth = Published(initialValue: restorableData.getValue(for: .sidebarWidth) ?? 200)
51 | _selectedSourceId = Published(initialValue: restorableData.getValue(for: .selectedSourceId) ?? chatSources.sources.first?.id)
52 |
53 | // bit hacky but use receive(on:) to ensure chatSources.sources has been updated to its new value
54 | // to ensure consistent state (otherwise in the `sink()` chatSources.sources will not have been updated yet.
55 | chatSources.$sources
56 | .receive(on: DispatchQueue.main)
57 | .scan((nil as [ChatSource]?, chatSources.sources)) { (previous, current) in
58 | let lastCurrent = previous.1
59 | return (lastCurrent, current)
60 | }
61 | .sink { [weak self] (previousSources, newSources) in
62 | guard let self else { return }
63 |
64 | if newSources.count == 1 && (previousSources?.isEmpty ?? true) {
65 | self.selectedSourceId = newSources.first?.id
66 | }
67 |
68 | if !newSources.map({ $0.id }).contains(self.selectedSourceId) {
69 | if let previousIndex = previousSources?.firstIndex(where: { $0.id == self.selectedSourceId }) {
70 | let nextIndex = previousIndex > 0 ? previousIndex - 1 : previousIndex
71 | self.selectedSourceId = nextIndex < newSources.count ? newSources[nextIndex].id : nil
72 | } else {
73 | self.selectedSourceId = newSources.first?.id
74 | }
75 | }
76 | }.store(in: &subscriptions)
77 |
78 | $sheetViewModel.sink { [weak self] newSheetViewModel in
79 | self?.sheetPresented = newSheetViewModel != nil
80 | }.store(in: &subscriptions)
81 | }
82 |
83 | func makeChatViewModel(for sourceId: String) -> ChatViewModel? {
84 | guard let chatSource = chatSources.sources.first(where: { $0.id == sourceId }) else { return nil }
85 | return ChatViewModel(chatSource: chatSource, chatModels: chatModels, messagesModel: messagesModel)
86 | }
87 |
88 | func removeChatSource(_ chatSource: ChatSource) {
89 | sheetViewModel = ConfirmDeleteSourceSheetViewModel(
90 | chatSource: chatSource,
91 | chatSources: chatSources,
92 | closeHandler: { [weak self] in
93 | self?.sheetViewModel = nil
94 | }
95 | )
96 | }
97 |
98 | func presentAddSourceSheet() {
99 | sheetViewModel = AddSourceViewModel(chatSources: chatSources, closeHandler: { [weak self] _ in
100 | self?.sheetViewModel = nil
101 | })
102 | }
103 |
104 | func presentAddSourceSheetIfNeeded() {
105 | if chatSources.sources.isEmpty {
106 | presentAddSourceSheet()
107 | }
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/avatar/AvatarViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AvatarViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class AvatarViewModel: ObservableObject {
12 | enum Avatar {
13 | case initials(String)
14 | case image(named: String)
15 | }
16 |
17 | @Published var avatar: Avatar
18 |
19 | private let chatSource: ChatSource
20 |
21 | init(chatSource: ChatSource) {
22 | self.chatSource = chatSource
23 | avatar = makeAvatar(for: chatSource.avatarImageName, name: chatSource.name)
24 |
25 | chatSource.$avatarImageName
26 | .combineLatest(chatSource.$name)
27 | .map { newAvatarImageName, newName in
28 | makeAvatar(for: newAvatarImageName, name: newName)
29 | }
30 | .assign(to: &$avatar)
31 | }
32 | }
33 |
34 | private func makeAvatar(for avatarImageName: String?, name: String) -> AvatarViewModel.Avatar {
35 | if let avatarImageName {
36 | return .image(named: avatarImageName)
37 | } else {
38 | let initials = String(name.components(separatedBy: .whitespacesAndNewlines).map({$0.prefix(1)}).joined(separator: ""))
39 | return .initials(initials)
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/composer/ComposeViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ComposeViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class ComposeViewModel: ObservableObject {
12 | private let chatModel: ChatModel
13 |
14 | @Published var text: String = ""
15 | @Published var allowedToCompose: Bool
16 | @Published var canClearContext: Bool
17 |
18 | private var subscriptions = Set()
19 |
20 | init(chatModel: ChatModel) {
21 | self.chatModel = chatModel
22 | self.allowedToCompose = canCompose(for: chatModel.replyState)
23 | self.canClearContext = chatModel.canClearContext
24 |
25 | chatModel.$replyState.sink { replyState in
26 | self.allowedToCompose = canCompose(for: replyState)
27 | }.store(in: &subscriptions)
28 | chatModel.$canClearContext.sink { canClearContext in
29 | self.canClearContext = canClearContext
30 | }.store(in: &subscriptions)
31 | }
32 |
33 | func send(message: String) {
34 | chatModel.send(message: StaticMessage(content: message, sender: .me, sendDate: Date(), isError: false))
35 | text = ""
36 | }
37 |
38 | func clearContext() {
39 | chatModel.clearContext()
40 | }
41 | }
42 |
43 | private func canCompose(for replyState: ChatModel.ReplyState) -> Bool {
44 | switch replyState {
45 | case .none:
46 | return true
47 | case .responding, .waitingToRespond:
48 | return false
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/messages/ClearedContextMessageViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ClearedContextMessageViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 05/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class ClearedContextMessageViewModel: MessageViewModel {
12 | var id: UUID { message.id }
13 | var sender: Sender { message.sender }
14 | var sendDate: Date { message.sendDate }
15 |
16 | let canCopyContents = CurrentValueSubject(false)
17 |
18 | private let message: ClearedContextMessage
19 |
20 | init(message: ClearedContextMessage) {
21 | self.message = message
22 | }
23 |
24 | func copyContents() {}
25 | }
26 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/messages/GeneratedMessageViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GeneratedMessageViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 31/03/2023.
6 | //
7 |
8 | import AppKit
9 | import Foundation
10 | import Combine
11 |
12 | class GeneratedMessageViewModel: ObservableObject, MessageViewModel {
13 | var id: UUID { message.id }
14 |
15 | private let message: GeneratedMessage
16 |
17 | @Published var content: String
18 | @Published var state: MessageGenerationState
19 | @Published var isError: Bool = false
20 |
21 | var sender: Sender { return message.sender }
22 |
23 | private var subscriptions = Set()
24 |
25 | let canCopyContents = CurrentValueSubject(false)
26 |
27 | init(message: GeneratedMessage) {
28 | self.message = message
29 | content = message.content
30 | state = message.state
31 |
32 | message.contentDidChange.sink { [weak self] in
33 | self?.content = message.content
34 | }.store(in: &subscriptions)
35 | message.$state.sink { [weak self] newState in
36 | self?.state = newState
37 |
38 | switch newState {
39 | case .none, .error, .generating, .waiting:
40 | self?.canCopyContents.send(false)
41 | case .cancelled, .finished:
42 | self?.canCopyContents.send(true)
43 | }
44 | }.store(in: &subscriptions)
45 | message.$isError.sink { [weak self] newIsError in
46 | self?.isError = newIsError
47 | }.store(in: &subscriptions)
48 |
49 | canCopyContents.sink { [weak self] _ in self?.objectWillChange.send() }.store(in: &subscriptions)
50 | }
51 |
52 | func stopGeneratingContent() {
53 | message.cancelGeneration()
54 | }
55 |
56 | func copyContents() {
57 | switch state {
58 | case .none, .error, .generating, .waiting:
59 | break
60 | case .cancelled, .finished:
61 | NSPasteboard.general.prepareForNewContents()
62 | NSPasteboard.general.setString(content, forType: .string)
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/messages/MessageViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | protocol MessageViewModel {
12 | var id: UUID { get }
13 |
14 | var canCopyContents: CurrentValueSubject { get }
15 |
16 | func copyContents()
17 | }
18 |
19 | class ObservableMessageViewModel: ObservableObject {
20 | private let wrapped: MessageViewModel
21 | private var subscriptions = Set()
22 |
23 | @Published var canCopyContents: Bool
24 |
25 | var id: UUID { wrapped.id}
26 |
27 | init(_ wrapped: MessageViewModel) {
28 | self.wrapped = wrapped
29 | self.canCopyContents = wrapped.canCopyContents.value
30 | wrapped.canCopyContents
31 | .sink { [weak self] newCanCopyContents in self?.canCopyContents = newCanCopyContents }
32 | .store(in: &subscriptions)
33 | }
34 |
35 | func copyContents() {
36 | wrapped.copyContents()
37 | }
38 |
39 | func getUnderlyingViewModel() -> MessageViewModel {
40 | return wrapped
41 | }
42 |
43 | func get() -> T? {
44 | return wrapped as? T
45 | }
46 | }
47 |
48 | extension ObservableMessageViewModel: Equatable {
49 | static func == (lhs: ObservableMessageViewModel, rhs: ObservableMessageViewModel) -> Bool {
50 | return lhs.id == rhs.id
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/messages/MessagesViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessagesViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 26/03/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class MessagesViewModel: ObservableObject {
12 | private let chatModel: ChatModel
13 |
14 | #if DEBUG
15 | let isBuiltForDebug = true
16 | #else
17 | let isBuiltForDebug = false
18 | #endif
19 |
20 | @Published var messages: [ObservableMessageViewModel]
21 |
22 | private var subscriptions = Set()
23 |
24 | init(chatModel: ChatModel) {
25 | self.chatModel = chatModel
26 | messages = []
27 | messages = makeViewModels(from: chatModel.messages, in: self)
28 | chatModel.$messages.sink { newMessages in
29 | self.messages = makeViewModels(from: newMessages, in: self)
30 | }.store(in: &subscriptions)
31 | }
32 | }
33 |
34 | private func makeViewModels(from messages: [Message], in messagesViewModel: MessagesViewModel) -> [ObservableMessageViewModel] {
35 | return messages.compactMap { message in
36 | if let staticMessage = message as? StaticMessage {
37 | return ObservableMessageViewModel(StaticMessageViewModel(message: staticMessage))
38 | } else if let generatedMessage = message as? GeneratedMessage {
39 | return ObservableMessageViewModel(GeneratedMessageViewModel(message: generatedMessage))
40 | } else if let clearedContextMessage = message as? ClearedContextMessage {
41 | return ObservableMessageViewModel(ClearedContextMessageViewModel(message: clearedContextMessage))
42 | } else {
43 | print("Unsupported message type for \(message)")
44 | return nil
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/chat/messages/StaticMessageViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StaticMessageViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import AppKit
9 | import Foundation
10 | import Combine
11 |
12 | class StaticMessageViewModel: MessageViewModel {
13 | private let message: StaticMessage
14 |
15 | var id: UUID { message.id }
16 | var content: String {
17 | return message.content
18 | }
19 | var sender: Sender { message.sender }
20 | var isError: Bool { message.isError }
21 |
22 | let canCopyContents = CurrentValueSubject(true)
23 |
24 | init(message: StaticMessage) {
25 | self.message = message
26 | }
27 |
28 | func copyContents() {
29 | NSPasteboard.general.prepareForNewContents()
30 | NSPasteboard.general.setString(content, forType: .string)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/debug/ModelContextContentViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelContextContentViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 03/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class ModelContextContentViewModel: ObservableObject {
12 | enum Context {
13 | case empty
14 | case context(string: String, tokens: [ChatModel.ChatContext.Token])
15 |
16 | var isEmpty: Bool {
17 | switch self {
18 | case .empty: return true
19 | case .context: return false
20 | }
21 | }
22 | }
23 |
24 | enum ContextPresentation: String, Identifiable, CaseIterable {
25 | case text
26 | case tokens
27 | case both
28 |
29 | var id: String {
30 | return rawValue
31 | }
32 | }
33 |
34 | let chatSourceId: ChatSource.ID?
35 |
36 | private var chatSource: ChatSource? {
37 | didSet {
38 | hasSource = chatSource != nil
39 | }
40 | }
41 | private var chatModel: ChatModel?
42 |
43 | var chatSources: ChatSources? {
44 | didSet {
45 | updateState()
46 | }
47 | }
48 | var chatModels: ChatModels? {
49 | didSet {
50 | updateState()
51 | }
52 | }
53 |
54 | private var chatContext: ChatModel.ChatContext? {
55 | didSet {
56 | guard let chatContext, let contextString = chatContext.contextString, let tokens = chatContext.tokens else {
57 | context = .empty
58 | return
59 | }
60 | context = .context(string: contextString, tokens: tokens)
61 | }
62 | }
63 |
64 | @Published private(set) var contextPresentation: ContextPresentation = .text
65 |
66 | @Published private(set) var hasSource = false
67 | @Published private(set) var context: Context = .empty
68 |
69 | private var contextCancellable: AnyCancellable?
70 |
71 | init(chatSourceId: ChatSource.ID?) {
72 | self.chatSourceId = chatSourceId
73 | }
74 |
75 | func updateContextPresentation(_ contextPresentation: ContextPresentation) {
76 | self.contextPresentation = contextPresentation
77 | }
78 |
79 | private func updateState() {
80 | guard let chatSources, let chatModels else {
81 | contextCancellable = nil
82 | chatContext = nil
83 | return
84 | }
85 |
86 | guard let chatSource = chatSourceId.flatMap({ chatSources.source(for: $0) }) else {
87 | contextCancellable = nil
88 | chatContext = nil
89 | return
90 | }
91 |
92 | let chatModel = chatModels.chatModel(for: chatSource)
93 | chatContext = chatModel.lastChatContext
94 |
95 | contextCancellable = chatModel.$lastChatContext.receive(on: DispatchQueue.main).sink(receiveValue: { newContext in
96 | self.chatContext = newContext
97 | })
98 |
99 | self.chatSource = chatSource
100 | self.chatModel = chatModel
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/settings/GeneralSettingsViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GeneralSettingsViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 15/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class GeneralSettingsViewModel: ObservableObject {
11 | @Published var numThreads: Int {
12 | didSet {
13 | AppSettings.shared.numThreads = numThreads
14 | }
15 | }
16 |
17 | var threadCountRange: ClosedRange {
18 | return ProcessInfo.processInfo.threadCountRange
19 | }
20 |
21 | init() {
22 | numThreads = AppSettings.shared.numThreads
23 |
24 | AppSettings.shared.$numThreads.receive(on: DispatchQueue.main).assign(to: &$numThreads)
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/settings/SettingsViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SettingsWindowViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import AppKit
9 |
10 | enum SettingsTab {
11 | case general
12 | case sources
13 | }
14 |
15 | class SettingsViewModel: ObservableObject {
16 | enum InitialSourcesTab {
17 | case properties
18 | case parameters
19 | }
20 |
21 | private let chatSources: ChatSources
22 | private let stateRestoration: StateRestoration
23 |
24 | @Published var selectedTab: SettingsTab = .general
25 |
26 | private(set) lazy var generalSettingsViewModel = GeneralSettingsViewModel()
27 | private(set) lazy var sourcesSettingsViewModel = SourcesSettingsViewModel(chatSources: chatSources, stateRestoration: stateRestoration)
28 |
29 | init(chatSources: ChatSources, stateRestoration: StateRestoration) {
30 | self.chatSources = chatSources
31 | self.stateRestoration = stateRestoration
32 | }
33 |
34 | func selectSourceInSourcesTab(forSourceWithId sourceId: ChatSource.ID?, initialTab: InitialSourcesTab) {
35 | selectedTab = .sources
36 | if let sourceId {
37 | sourcesSettingsViewModel.selectedSourceId = sourceId
38 | let tab: SourcesSettingsDetailViewModel.Tab
39 | switch initialTab {
40 | case .properties:
41 | tab = .properties
42 | case .parameters:
43 | tab = .parameters
44 | }
45 | sourcesSettingsViewModel.detailViewModel?.selectedTab = tab
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/settings/sources/SourceSettingsPropertiesViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourceSettingsPropertiesViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 20/04/2023.
6 | //
7 |
8 | import AppKit
9 | import Combine
10 |
11 | class SourceSettingsPropertiesViewModel: ObservableObject {
12 | private let source: ChatSource
13 |
14 | var modelPath: String
15 |
16 | var type: String {
17 | return source.type.readableName
18 | }
19 | var modelSize: String {
20 | switch source.modelSize {
21 | case .unknown:
22 | return "Unknown"
23 | case .size7B:
24 | return "7B"
25 | case .size13B:
26 | return "13B"
27 | case .size30B:
28 | return "30B"
29 | case .size65B:
30 | return "65B"
31 | }
32 | }
33 |
34 | @Published private(set) var name: String
35 | @Published var avatarImageName: String?
36 |
37 | @Published var useMlock: Bool
38 |
39 | private var subscriptions = Set()
40 |
41 | init(source: ChatSource) {
42 | self.source = source
43 |
44 | modelPath = source.modelURL.path
45 | name = source.name
46 | avatarImageName = source.avatarImageName
47 | useMlock = source.useMlock
48 |
49 | source.$name.assign(to: &$name)
50 | $avatarImageName.assign(to: &source.$avatarImageName)
51 |
52 | source.$useMlock.assign(to: &$useMlock)
53 | $useMlock
54 | .removeDuplicates()
55 | .dropFirst()
56 | .sink { [weak source] in
57 | source?.useMlock = $0
58 | }
59 | .store(in: &subscriptions)
60 | }
61 |
62 | func updateName(_ newName: String) {
63 | source.name = newName
64 | }
65 |
66 | func showModelInFinder() {
67 | NSWorkspace.shared.activateFileViewerSelecting([source.modelURL])
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/settings/sources/SourcesSettingsDetailViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourcesSettingsDetailViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import AppKit
9 | import Combine
10 |
11 | class SourcesSettingsDetailViewModel: ObservableObject {
12 | enum Tab: CaseIterable {
13 | case properties
14 | case parameters
15 | }
16 |
17 | private let source: ChatSource
18 | private let stateRestoration: StateRestoration
19 |
20 | var id: ChatSource.ID { source.id }
21 |
22 | @Published var selectedTab: Tab
23 |
24 | private(set) lazy var propertiesViewModel = SourceSettingsPropertiesViewModel(source: source)
25 | private(set) lazy var parametersViewModel = SourceSettingsParametersViewModel(source: source, stateRestoration: stateRestoration)
26 |
27 | init(source: ChatSource, selectedTab: Tab?, stateRestoration: StateRestoration) {
28 | self.source = source
29 | self.selectedTab = selectedTab ?? .properties
30 | self.stateRestoration = stateRestoration
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/settings/sources/SourcesSettingsViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SourcesSettingsViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 |
11 | class SourcesSettingsSourceItemViewModel: ObservableObject {
12 | fileprivate let source: ChatSource
13 |
14 | var id: String { source.id }
15 | @Published var title: String
16 |
17 | private var subscriptions = Set()
18 |
19 | init(source: ChatSource) {
20 | self.source = source
21 | self.title = source.name
22 | source.$name.sink(receiveValue: { [weak self] newName in
23 | self?.title = newName
24 | }).store(in: &subscriptions)
25 | }
26 | }
27 |
28 | class SourcesSettingsViewModel: ObservableObject {
29 | private let chatSources: ChatSources
30 | private let stateRestoration: StateRestoration
31 |
32 | @Published var sources: [SourcesSettingsSourceItemViewModel]
33 | @Published var selectedSourceId: ChatSource.ID? {
34 | didSet {
35 | guard let selectedSourceId, let source = chatSources.source(for: selectedSourceId) else {
36 | detailViewModel = nil
37 | return
38 | }
39 |
40 | let oldDetailViewModel = detailViewModel
41 | detailViewModel = SourcesSettingsDetailViewModel(
42 | source: source,
43 | selectedTab: oldDetailViewModel?.selectedTab ?? .properties,
44 | stateRestoration: stateRestoration
45 | )
46 | }
47 | }
48 |
49 | @Published var detailViewModel: SourcesSettingsDetailViewModel?
50 | @Published var sheetViewModel: (any ObservableObject)?
51 |
52 | @Published var sheetPresented = false
53 |
54 | private var subscriptions = Set()
55 |
56 | init(chatSources: ChatSources, stateRestoration: StateRestoration) {
57 | self.chatSources = chatSources
58 | self.sources = chatSources.sources.map { SourcesSettingsSourceItemViewModel(source: $0) }
59 | self.stateRestoration = stateRestoration
60 |
61 | chatSources.$sources.sink(receiveValue: { sources in
62 | self.sources = sources.map { SourcesSettingsSourceItemViewModel(source: $0) }
63 | }).store(in: &subscriptions)
64 |
65 | // bit hacky but use receive(on:) to ensure chatSources.sources has been updated to its new value
66 | // to ensure consistent state (otherwise in the `sink()` chatSources.sources will not have been updated yet.
67 | chatSources.$sources
68 | .receive(on: DispatchQueue.main)
69 | .scan((nil as [ChatSource]?, chatSources.sources)) { (previous, current) in
70 | let lastCurrent = previous.1
71 | return (lastCurrent, current)
72 | }
73 | .sink(receiveValue: { [weak self] previousSources, newSources in
74 | guard let self else { return }
75 |
76 | if newSources.count == 1 && (previousSources?.isEmpty ?? true) {
77 | self.selectedSourceId = newSources.first?.id
78 | }
79 |
80 | if !newSources.map({ $0.id }).contains(self.selectedSourceId) {
81 | self.selectedSourceId = nil
82 | }
83 | }).store(in: &subscriptions)
84 |
85 | $sheetViewModel.sink { [weak self] newSheetViewModel in
86 | self?.sheetPresented = newSheetViewModel != nil
87 | }.store(in: &subscriptions)
88 | }
89 |
90 | func remove(_ source: ChatSource) {
91 | chatSources.remove(source: source)
92 | }
93 |
94 | func selectFirstSourceIfEmpty() {
95 | if selectedSourceId == nil {
96 | selectedSourceId = sources.first?.id
97 | }
98 | }
99 |
100 | func showAddSourceSheet() {
101 | sheetViewModel = AddSourceViewModel(chatSources: chatSources, closeHandler: { [weak self] newSource in
102 | self?.sheetViewModel = nil
103 | if let newSource {
104 | self?.selectedSourceId = newSource.id
105 | }
106 | })
107 | }
108 |
109 | func showConfirmDeleteSourceSheet(forSourceWithId sourceId: ChatSource.ID) {
110 | guard let source = chatSources.source(for: sourceId) else { return }
111 |
112 | sheetViewModel = ConfirmDeleteSourceSheetViewModel(
113 | chatSource: source,
114 | chatSources: chatSources,
115 | closeHandler: { [weak self] in
116 | self?.sheetViewModel = nil
117 | }
118 | )
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/AddSourceViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AddSourceViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import Foundation
9 | import llama
10 |
11 | enum AddSourceStep: Hashable {
12 | case configureSource
13 | case convertPyTorchSource
14 | }
15 |
16 | class AddSourceViewModel: ObservableObject {
17 | typealias CloseHandler = (_ newChatSource: ChatSource?) -> Void
18 |
19 | private let chatSources: ChatSources
20 | private let closeHandler: CloseHandler
21 |
22 | @Published var navigationPath = [AddSourceStep]()
23 |
24 | private(set) lazy var selectSourceTypeViewModel: SelectSourceTypeViewModel = {
25 | return SelectSourceTypeViewModel(chatSources: chatSources) { [weak self] sourceType in
26 | self?.configureSourceViewModel = self?.makeConfigureSourceViewModel(for: sourceType)
27 | self?.navigationPath.append(.configureSource)
28 | }
29 | }()
30 |
31 | private(set) var configureSourceViewModel: ConfigureSourceViewModel?
32 | private(set) var convertSourceViewModel: ConvertSourceViewModel?
33 |
34 | private var addedModel = false
35 |
36 | init(chatSources: ChatSources, closeHandler: @escaping CloseHandler) {
37 | self.chatSources = chatSources
38 | self.closeHandler = closeHandler
39 | }
40 |
41 | deinit {
42 | if !addedModel {
43 | convertSourceViewModel?.cleanUp_DANGEROUS()
44 | }
45 | }
46 |
47 | func cancel() {
48 | closeHandler(nil)
49 | }
50 |
51 | // MARK: - Private
52 |
53 | private func makeConfigureSourceViewModel(for sourceType: ChatSourceType) -> ConfigureSourceViewModel {
54 | let nextHandler: ConfigureLocalModelSourceViewModel.NextHandler = { [weak self] configuredSource in
55 | switch configuredSource.settings {
56 | case .ggmlModel(modelURL: let modelURL, modelSize: let modelSize):
57 | self?.add(
58 | source: ChatSource(
59 | name: configuredSource.name,
60 | avatarImageName: configuredSource.avatarImageName,
61 | type: sourceType,
62 | modelURL: modelURL,
63 | modelDirectoryId: nil,
64 | modelSize: modelSize,
65 | modelParameters: defaultModelParameters(for: sourceType),
66 | useMlock: false
67 | )
68 | )
69 | case .pyTorchCheckpoints(data: let validatedData, let modelSize):
70 | self?.convertSourceViewModel = self?.makeConvertSourceViewModel(
71 | with: sourceType,
72 | configuredSource: configuredSource,
73 | modelSize: modelSize,
74 | validatedData: validatedData
75 | )
76 | self?.navigationPath.append(.convertPyTorchSource)
77 | }
78 | }
79 |
80 | switch sourceType {
81 | case .llama:
82 | return makeConfigureLocalLlamaModelSourceViewModel(nextHandler: nextHandler)
83 | case .alpaca:
84 | return makeConfigureLocalAlpacaModelSourceViewModel(nextHandler:nextHandler)
85 | case .gpt4All:
86 | return makeConfigureLocalGPT4AllModelSourceViewModel(nextHandler:nextHandler)
87 | }
88 | }
89 |
90 | private func makeConvertSourceViewModel(
91 | with sourceType: ChatSourceType,
92 | configuredSource: ConfiguredSource,
93 | modelSize: ModelSize,
94 | validatedData: ValidatedModelConversionData
95 | ) -> ConvertSourceViewModel {
96 | return ConvertSourceViewModel(
97 | data: validatedData,
98 | completionHandler: { [weak self] modelURL, modelDirectory in
99 | self?.add(
100 | source: ChatSource(
101 | name: configuredSource.name,
102 | avatarImageName: configuredSource.avatarImageName,
103 | type: sourceType,
104 | modelURL: modelURL,
105 | modelDirectoryId: modelDirectory.id,
106 | modelSize: modelSize,
107 | modelParameters: defaultModelParameters(for: sourceType),
108 | useMlock: false
109 | )
110 | )
111 | },
112 | cancelHandler: { [weak self] in self?.closeHandler(nil) }
113 | )
114 | }
115 |
116 | private func add(source: ChatSource) {
117 | guard !addedModel else { return }
118 |
119 | chatSources.add(source: source)
120 | addedModel = true
121 | closeHandler(source)
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/ConfigureSourceViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureSourceViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import SwiftUI
9 | import Combine
10 |
11 | protocol ConfigureSourceViewModel {
12 | var chatSourceType: ChatSourceType { get }
13 |
14 | var primaryActionsViewModel: ConfigureSourcePrimaryActionsViewModel { get }
15 | }
16 |
17 | func makeConfigureLocalLlamaModelSourceViewModel(
18 | nextHandler: @escaping ConfigureLocalModelSourceViewModel.NextHandler
19 | ) -> ConfigureLocalModelSourceViewModel {
20 | return ConfigureLocalModelSourceViewModel(
21 | defaultName: "LLaMA",
22 | chatSourceType: .llama,
23 | exampleGgmlModelPath: "ggml-model-q4_0.bin",
24 | nextHandler: nextHandler
25 | )
26 | }
27 |
28 | func makeConfigureLocalAlpacaModelSourceViewModel(
29 | nextHandler: @escaping ConfigureLocalModelSourceViewModel.NextHandler
30 | ) -> ConfigureLocalModelSourceViewModel {
31 | return ConfigureLocalModelSourceViewModel(
32 | defaultName: "Alpaca",
33 | chatSourceType: .alpaca,
34 | exampleGgmlModelPath: "ggml-alpaca-7b-q4.bin",
35 | nextHandler: nextHandler
36 | )
37 | }
38 |
39 | func makeConfigureLocalGPT4AllModelSourceViewModel(
40 | nextHandler: @escaping ConfigureLocalModelSourceViewModel.NextHandler
41 | ) -> ConfigureLocalModelSourceViewModel {
42 | return ConfigureLocalModelSourceViewModel(
43 | defaultName: "GPT4All",
44 | chatSourceType: .gpt4All,
45 | exampleGgmlModelPath: "gpt4all-lora-quantized.bin",
46 | nextHandler: nextHandler
47 | )
48 | }
49 |
50 | @ViewBuilder func makeConfigureSourceView(from viewModel: ConfigureSourceViewModel) -> some View {
51 | if let viewModel = viewModel as? ConfigureLocalModelSourceViewModel {
52 | ConfigureLocalModelSourceView(viewModel: viewModel)
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/SelectSourceTypeViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SelectSourceTypeViewModel.swift
3 | // Camel
4 | //
5 | // Created by Alex Rozanski on 30/03/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class SelectSourceTypeViewModel: ObservableObject {
11 | typealias SelectSourceHandler = (ChatSourceType) -> Void
12 |
13 | struct Source {
14 | let id: String
15 | let type: ChatSourceType
16 | let name: String
17 | let description: String
18 | let learnMoreLink: URL?
19 | }
20 |
21 | @Published var sources: [Source]
22 |
23 | private let chatSources: ChatSources
24 | private let selectSourceHandler: SelectSourceHandler
25 |
26 | init(chatSources: ChatSources, selectSourceHandler: @escaping SelectSourceHandler) {
27 | self.chatSources = chatSources
28 | self.selectSourceHandler = selectSourceHandler
29 |
30 | sources = [ChatSourceType.alpaca, ChatSourceType.gpt4All, ChatSourceType.llama].map { type in
31 | switch type {
32 | case .llama:
33 | return Source(
34 | id: type.rawValue,
35 | type: type,
36 | name: "LLaMA",
37 | description: "The original Facebook LLaMA Large Language Model",
38 | learnMoreLink: URL(string: "https://github.com/facebookresearch/llama")
39 | )
40 | case .alpaca:
41 | return Source(
42 | id: type.rawValue,
43 | type: type,
44 | name: "Alpaca",
45 | description: "Stanford's Alpaca model: a fine-tuned instruction-following LLaMA model",
46 | learnMoreLink: URL(string: "https://github.com/tatsu-lab/stanford_alpaca")
47 | )
48 | case .gpt4All:
49 | return Source(
50 | id: type.rawValue,
51 | type: type,
52 | name: "GPT4All",
53 | description: "Nomic AI's assistant-style LLM based on LLaMA",
54 | learnMoreLink: URL(string: "https://github.com/nomic-ai/gpt4all")
55 | )
56 | }
57 | }
58 | }
59 |
60 | func select(sourceType: ChatSourceType) {
61 | selectSourceHandler(sourceType)
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/configure/components/ConfigureLocalModelPathSelectorViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelPathSelectorView.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ConfigureLocalModelPathSelectorViewModel: ObservableObject {
11 | enum SelectionMode {
12 | case files
13 | case directories
14 | }
15 |
16 | @Published var modelPaths: [String] = []
17 | @Published var errorMessage: String?
18 |
19 | var label: String {
20 | return customLabel ?? (allowMultipleSelection ? "Model Paths" : "Model Path")
21 | }
22 |
23 | let selectionMode: SelectionMode
24 | let allowMultipleSelection: Bool
25 | let customLabel: String?
26 |
27 | init(customLabel: String? = nil, selectionMode: SelectionMode = .files, allowMultipleSelection: Bool = false) {
28 | self.customLabel = customLabel
29 | self.selectionMode = selectionMode
30 | self.allowMultipleSelection = allowMultipleSelection
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/configure/components/ConfigureLocalModelSizePickerViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSizePickerViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import Foundation
9 |
10 | class ConfigureLocalModelSizePickerViewModel: ObservableObject {
11 | typealias LabelProvider = (_ modelSize: ModelSize, _ defaultProvider: (ModelSize) -> String) -> String
12 |
13 | @Published var modelSize: ModelSize = .unknown
14 |
15 | private let labelProvider: LabelProvider
16 |
17 | init(labelProvider: LabelProvider? = nil) {
18 | self.labelProvider = labelProvider ?? { modelSize, _ in
19 | defaultLabelProvider(modelSize)
20 | }
21 | }
22 |
23 | func label(for modelSize: ModelSize) -> String {
24 | labelProvider(modelSize, defaultLabelProvider)
25 | }
26 | }
27 |
28 | private func defaultLabelProvider(_ modelSize: ModelSize) -> String {
29 | switch modelSize {
30 | case .unknown: return "Unknown"
31 | case .size7B: return "7B"
32 | case .size13B: return "13B"
33 | case .size30B: return "30B"
34 | case .size65B: return "65B"
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/configure/settings/ConfigureLocalGgmlModelSettingsViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalGgmlModelSettingsViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 | import llama
11 |
12 | private func getInvalidModelTypeReason(from error: Error) -> ConfigureLocalGgmlModelSettingsViewModel.InvalidModelTypeReason {
13 | // Reason is always stored in the underlying error
14 | guard let underlyingError = ((error as NSError).underlyingErrors as [NSError]).first(where: { $0.domain == LlamaError.Domain }) else {
15 | return .unknown
16 | }
17 |
18 | if underlyingError.code == LlamaError.Code.invalidModelBadMagic.rawValue {
19 | return .invalidFileType
20 | }
21 |
22 | if underlyingError.code == LlamaError.Code.invalidModelUnversioned.rawValue || underlyingError.code == LlamaError.Code.invalidModelUnsupportedFileVersion.rawValue {
23 | return .unsupportedModelVersion
24 | }
25 |
26 | return .unknown
27 | }
28 |
29 | class ConfigureLocalGgmlModelSettingsViewModel: ObservableObject, ConfigureLocalModelSettingsViewModel {
30 | enum InvalidModelTypeReason {
31 | case unknown
32 | case invalidFileType
33 | case unsupportedModelVersion
34 | }
35 |
36 | enum ModelState {
37 | case none
38 | case invalidPath
39 | case invalidModel(_ reason: InvalidModelTypeReason)
40 | case valid(modelURL: URL)
41 |
42 | var isValid: Bool {
43 | switch self {
44 | case .none, .invalidPath, .invalidModel:
45 | return false
46 | case .valid:
47 | return true
48 | }
49 | }
50 | }
51 |
52 | let sourceSettings = CurrentValueSubject(nil)
53 |
54 | var sourceType: ConfigureLocalModelSourceType {
55 | return .ggml
56 | }
57 |
58 | private(set) lazy var pathSelectorViewModel = ConfigureLocalModelPathSelectorViewModel()
59 | private(set) lazy var modelSizePickerViewModel = ConfigureLocalModelSizePickerViewModel(labelProvider: { modelSize, defaultProvider in
60 | switch modelSize {
61 | case .unknown:
62 | return "Not Specified"
63 | case .size7B, .size13B, .size30B, .size65B:
64 | return defaultProvider(modelSize)
65 | }
66 | })
67 |
68 | @Published private(set) var modelState: ModelState = .none
69 |
70 | var modelPath: String? { return pathSelectorViewModel.modelPaths.first }
71 | var modelSize: ModelSize? { return modelSizePickerViewModel.modelSize }
72 |
73 | let chatSourceType: ChatSourceType
74 | let exampleModelPath: String
75 |
76 | private var subscriptions = Set()
77 |
78 | init(chatSourceType: ChatSourceType, exampleModelPath: String) {
79 | self.chatSourceType = chatSourceType
80 | self.exampleModelPath = exampleModelPath
81 |
82 | pathSelectorViewModel.$modelPaths.sink { [weak self] newPaths in
83 | guard let self, let modelPath = newPaths.first else {
84 | self?.modelState = .none
85 | return
86 | }
87 |
88 | guard FileManager().fileExists(atPath: modelPath) else {
89 | self.modelState = .invalidPath
90 | return
91 | }
92 |
93 | let modelURL = URL(fileURLWithPath: modelPath)
94 | do {
95 | try ModelUtils.validateModel(fileURL: modelURL)
96 | } catch {
97 | print(error)
98 | self.modelState = .invalidModel(getInvalidModelTypeReason(from: error))
99 | return
100 | }
101 |
102 | self.modelState = .valid(modelURL: modelURL)
103 |
104 | do {
105 | self.modelSizePickerViewModel.modelSize = (try ModelUtils.getModelType(forFileAt: URL(fileURLWithPath: modelPath))).toModelSize()
106 | } catch {
107 | print(error)
108 | }
109 | }.store(in: &subscriptions)
110 |
111 | $modelState.sink { [weak self] newModelState in
112 | switch newModelState {
113 | case .none, .valid:
114 | self?.pathSelectorViewModel.errorMessage = nil
115 | case .invalidPath:
116 | self?.pathSelectorViewModel.errorMessage = "Selected file is invalid"
117 | case .invalidModel(let reason):
118 | switch reason {
119 | case .unknown, .invalidFileType:
120 | self?.pathSelectorViewModel.errorMessage = "Selected file is not a valid model"
121 | case .unsupportedModelVersion:
122 | self?.pathSelectorViewModel.errorMessage = "Selected model is of an unsupported version"
123 | }
124 | }
125 | }.store(in: &subscriptions)
126 |
127 | $modelState
128 | .combineLatest(modelSizePickerViewModel.$modelSize)
129 | .sink { [weak self] modelState, modelSize in
130 | guard !modelSize.isUnknown else {
131 | self?.sourceSettings.send(nil)
132 | return
133 | }
134 |
135 | switch modelState {
136 | case .none, .invalidModel, .invalidPath:
137 | self?.sourceSettings.send(nil)
138 | case .valid(modelURL: let modelURL):
139 | self?.sourceSettings.send(.ggmlModel(modelURL: modelURL, modelSize: modelSize))
140 | }
141 | }.store(in: &subscriptions)
142 | }
143 | }
144 |
145 | fileprivate extension ModelType {
146 | func toModelSize() -> ModelSize {
147 | switch self {
148 | case .unknown: return .unknown
149 | case .size7B: return .size7B
150 | case .size13B: return .size13B
151 | case .size30B: return .size30B
152 | case .size65B: return .size65B
153 | }
154 | }
155 | }
156 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/configure/settings/ConfigureLocalModelSettingsViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSettingsViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 06/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 | import llama
11 |
12 | enum ConfigureLocalModelSourceType: String, Identifiable, CaseIterable {
13 | case pyTorch
14 | case ggml
15 |
16 | var id: String { return rawValue }
17 | }
18 |
19 | enum SourceSettings {
20 | case ggmlModel(modelURL: URL, modelSize: ModelSize)
21 | case pyTorchCheckpoints(data: ValidatedModelConversionData, modelSize: ModelSize)
22 | }
23 |
24 | protocol ConfigureLocalModelSettingsViewModel {
25 | var sourceType: ConfigureLocalModelSourceType { get }
26 | var modelSize: ModelSize? { get }
27 |
28 | var sourceSettings: CurrentValueSubject { get }
29 | }
30 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/configure/settings/ConfigureLocalModelSourceViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConfigureLocalModelSourceViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 01/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Combine
10 | import SwiftUI
11 |
12 | struct ConfiguredSource {
13 | let name: String
14 | let avatarImageName: String?
15 | let settings: SourceSettings
16 | }
17 |
18 | class ConfigureLocalModelSourceViewModel: ObservableObject, ConfigureSourceViewModel {
19 | typealias NextHandler = (ConfiguredSource) -> Void
20 |
21 | private lazy var nameGenerator = SourceNameGenerator()
22 |
23 | // MARK: - Info
24 |
25 | @Published var name: String
26 | @Published var avatarImageName: String?
27 |
28 | var modelType: String {
29 | return chatSourceType.readableName
30 | }
31 |
32 | var modelSourcingDescription: LocalizedStringKey {
33 | switch chatSourceType {
34 | case .llama:
35 | return "The LLaMA model checkpoints and tokenizer are required to add this chat source. Learn more and request access to these on the [LLaMA GitHub repo](https://github.com/facebookresearch/llama)."
36 | case .alpaca:
37 | return "The Alpaca model checkpoints and tokenizer are required to add this chat source. Learn more on the [Alpaca GitHub repo](https://github.com/tatsu-lab/stanford_alpaca)."
38 | case .gpt4All:
39 | return "The GPT4All .ggml model file is required to add this chat source. Learn more on the [llama.cpp GitHub repo](https://github.com/ggerganov/llama.cpp/blob/a0caa34/README.md#using-gpt4all)."
40 | }
41 | }
42 |
43 | // MARK: - Model Settings
44 |
45 | var settingsViewModels = [ConfigureLocalModelSourceType: ConfigureLocalModelSettingsViewModel]()
46 |
47 | @Published private(set) var modelSourceType: ConfigureLocalModelSourceType? = nil {
48 | didSet {
49 | guard let modelSourceType else {
50 | settingsViewModel = nil
51 | return
52 | }
53 |
54 | if let existingModel = settingsViewModels[modelSourceType] {
55 | settingsViewModel = existingModel
56 | } else {
57 | switch modelSourceType {
58 | case .pyTorch:
59 | let viewModel = ConfigureLocalPyTorchModelSettingsViewModel(chatSourceType: chatSourceType)
60 | viewModel.determineConversionStateIfNeeded()
61 | settingsViewModels[.pyTorch] = viewModel
62 | settingsViewModel = viewModel
63 | case .ggml:
64 | let viewModel = ConfigureLocalGgmlModelSettingsViewModel(
65 | chatSourceType: chatSourceType,
66 | exampleModelPath: exampleGgmlModelPath
67 | )
68 | settingsViewModels[.ggml] = viewModel
69 | settingsViewModel = viewModel
70 | }
71 | }
72 | }
73 | }
74 |
75 | @Published private(set) var settingsViewModel: ConfigureLocalModelSettingsViewModel?
76 |
77 | // MARK: - Validation
78 |
79 | let primaryActionsViewModel: ConfigureSourcePrimaryActionsViewModel
80 |
81 | let chatSourceType: ChatSourceType
82 | let exampleGgmlModelPath: String
83 | private let nextHandler: NextHandler
84 |
85 | private var subscriptions = Set()
86 |
87 | init(
88 | defaultName: String? = nil,
89 | chatSourceType: ChatSourceType,
90 | exampleGgmlModelPath: String,
91 | nextHandler: @escaping NextHandler
92 | ) {
93 | self.name = defaultName ?? ""
94 | self.chatSourceType = chatSourceType
95 | self.exampleGgmlModelPath = exampleGgmlModelPath
96 | self.nextHandler = nextHandler
97 | primaryActionsViewModel = ConfigureSourcePrimaryActionsViewModel()
98 | primaryActionsViewModel.delegate = self
99 |
100 | let configuredSource = $settingsViewModel
101 | .compactMap { $0 }
102 | .map { $0.sourceSettings }
103 | .switchToLatest()
104 |
105 | $name
106 | .map { !$0.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty }
107 | .combineLatest(configuredSource)
108 | .sink { [weak self] nameValid, configuredSource in
109 | self?.primaryActionsViewModel.canContinue = nameValid && configuredSource != nil
110 | }.store(in: &subscriptions)
111 |
112 | $modelSourceType
113 | .sink { [weak self] newSourceType in
114 | self?.primaryActionsViewModel.showContinueButton = newSourceType != nil
115 |
116 | if let newSourceType {
117 | switch newSourceType {
118 | case .pyTorch:
119 | self?.primaryActionsViewModel.nextButtonTitle = "Continue"
120 | case .ggml:
121 | self?.primaryActionsViewModel.nextButtonTitle = "Add"
122 | }
123 | }
124 | }.store(in: &subscriptions)
125 | }
126 |
127 | func generateName() {
128 | if let generatedName = nameGenerator.generateName(for: chatSourceType) {
129 | name = generatedName
130 | }
131 | }
132 |
133 | func select(modelSourceType: ConfigureLocalModelSourceType?) {
134 | self.modelSourceType = modelSourceType
135 | }
136 | }
137 |
138 | extension ConfigureLocalModelSourceType {
139 | var label: String {
140 | switch self {
141 | case .pyTorch: return "PyTorch Checkpoint (.pth)"
142 | case .ggml: return "GGML (.ggml)"
143 | }
144 | }
145 | }
146 |
147 | extension ConfigureLocalModelSourceViewModel: ConfigureSourcePrimaryActionsViewModelDelegate {
148 | func next() {
149 | guard let sourceSettings = settingsViewModel?.sourceSettings.value else { return }
150 | nextHandler(
151 | ConfiguredSource(name: name, avatarImageName: avatarImageName, settings: sourceSettings)
152 | )
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/sources/convert/ConvertSourceStepViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ConvertSourceStepViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 08/04/2023.
6 | //
7 |
8 | import AppKit
9 | import Foundation
10 | import Combine
11 | import llama
12 |
13 | class ConvertSourceStepViewModel: Identifiable, ObservableObject {
14 | enum State {
15 | case notStarted
16 | case skipped
17 | case running
18 | case cancelled
19 | case finished(result: Result)
20 |
21 | var canStart: Bool {
22 | switch self {
23 | case .notStarted: return true
24 | case .skipped, .running, .cancelled, .finished: return false
25 | }
26 | }
27 | }
28 |
29 | enum OutputType {
30 | case command
31 | case stdout
32 | case stderr
33 |
34 | var isCommand: Bool {
35 | switch self {
36 | case .command:
37 | return true
38 | case .stdout, .stderr:
39 | return false
40 | }
41 | }
42 | }
43 |
44 | typealias ID = String
45 |
46 | private let timer = Timer.publish(every: 0.1, on: .main, in: .common).autoconnect()
47 | private var timerSubscription: AnyCancellable?
48 | private var subscriptions = Set()
49 |
50 | @Published private(set) var state: State = .notStarted
51 | @Published private(set) var exitCode: Int32?
52 | @Published private(set) var expanded = false
53 |
54 | @Published private(set) var textViewModel = NonEditableTextViewModel()
55 | @Published var runTime: Double?
56 |
57 | private var lastOutputType: OutputType?
58 |
59 | var label: String {
60 | switch conversionStep.type {
61 | case .checkEnvironment:
62 | return "Checking environment"
63 | case .setUpEnvironment:
64 | return "Setting up environment"
65 | case .checkDependencies:
66 | return "Checking dependencies"
67 | case .convertModel:
68 | return "Converting model"
69 | case .quantizeModel:
70 | return "Quantizing model"
71 | }
72 | }
73 |
74 | let id: ID
75 | private let conversionStep: AnyConversionStep
76 |
77 | init(conversionStep: AnyConversionStep) {
78 | self.id = UUID().uuidString
79 | self.conversionStep = conversionStep
80 |
81 | conversionStep.$state
82 | .receive(on: DispatchQueue.main)
83 | .sink { [weak self] newState in
84 | guard let self else { return }
85 |
86 | switch newState {
87 | case .notStarted:
88 | self.state = .notStarted
89 | self.exitCode = nil
90 | case .skipped:
91 | self.state = .skipped
92 | self.exitCode = nil
93 | case .running:
94 | self.state = .running
95 | self.exitCode = nil
96 | case .cancelled:
97 | self.state = .cancelled
98 | self.exitCode = nil
99 | case .finished(result: let result):
100 | if let status = try? result.get(), status.exitCode == 0 {
101 | self.state = .finished(result: .success(status.exitCode))
102 | self.exitCode = status.exitCode
103 | } else {
104 | self.state = .finished(result: .success(1))
105 | self.exitCode = Int32(1)
106 | }
107 | }
108 | }.store(in: &subscriptions)
109 |
110 | $state
111 | .receive(on: DispatchQueue.main)
112 | .map { newState in
113 | switch newState {
114 | case .notStarted, .skipped, .running, .cancelled:
115 | return nil
116 | case .finished(result: let result):
117 | switch result {
118 | case .success(let exitCode):
119 | return exitCode
120 | case .failure:
121 | return nil
122 | }
123 | }
124 | }
125 | .assign(to: &$exitCode)
126 |
127 | conversionStep.$startDate
128 | .combineLatest(conversionStep.$runUntilDate)
129 | .receive(on: DispatchQueue.main)
130 | .map { startDate, endDate in
131 | guard let startDate, let endDate else { return nil }
132 | return endDate.timeIntervalSince(startDate)
133 | }
134 | .assign(to: &$runTime)
135 |
136 | conversionStep.commandOutput.sink { [weak self] output in self?.appendOutput(string: output, outputType: .command) }.store(in: &subscriptions)
137 | conversionStep.stdoutOutput.sink { [weak self] output in self?.appendOutput(string: output, outputType: .stdout) }.store(in: &subscriptions)
138 | conversionStep.stderrOutput.sink { [weak self] output in self?.appendOutput(string: output, outputType: .stderr) }.store(in: &subscriptions)
139 | }
140 |
141 | func toggleExpansion() {
142 | expanded = !expanded
143 | }
144 |
145 | private func appendOutput(string: String, outputType: OutputType) {
146 | if outputType != lastOutputType && !textViewModel.isEmpty {
147 | textViewModel.append(attributedString: NSAttributedString(string: "\n"))
148 | }
149 |
150 | var color: NSColor?
151 | switch outputType {
152 | case .command: color = NSColor.controlTextColor
153 | case .stdout: color = .gray
154 | case .stderr: color = .red
155 | }
156 |
157 | textViewModel.append(attributedString: makeFormattedText(string: string, color: color))
158 | lastOutputType = outputType
159 | }
160 | }
161 |
162 | private func makeFormattedText(string: String, color: NSColor? = nil) -> NSAttributedString {
163 | var attributes = [NSAttributedString.Key: Any]()
164 | attributes[.font] = NSFont.monospacedSystemFont(ofSize: 12, weight: .regular)
165 | if let color {
166 | attributes[.foregroundColor] = color
167 | }
168 |
169 | return NSAttributedString(string: string, attributes: attributes)
170 | }
171 |
--------------------------------------------------------------------------------
/LlamaChat/viewmodel/updates/CheckForUpdatesViewModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CheckForUpdatesViewModel.swift
3 | // LlamaChat
4 | //
5 | // Created by Alex Rozanski on 13/04/2023.
6 | //
7 |
8 | import Foundation
9 | import Sparkle
10 |
11 | final class CheckForUpdatesViewModel: ObservableObject {
12 | @Published var canCheckForUpdates = false
13 |
14 | let updaterController: SPUStandardUpdaterController
15 |
16 | init() {
17 | updaterController = SPUStandardUpdaterController(startingUpdater: true, updaterDelegate: nil, userDriverDelegate: nil)
18 | updaterController.updater.publisher(for: \.canCheckForUpdates).assign(to: &$canCheckForUpdates)
19 | }
20 |
21 | func checkForUpdates() {
22 | updaterController.updater.checkForUpdates()
23 | }
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | Chat with your favourite LLaMA models, right on your Mac
4 |
5 |
6 | **LlamaChat** is a macOS app that allows you to chat with [LLaMA](http://github.com/facebookresearch/llama), [Alpaca](https://github.com/tatsu-lab/stanford_alpaca) and [GPT4All](https://github.com/nomic-ai/gpt4all) models all running locally on your Mac.
7 |
8 |
9 |
10 | ## 🚀 Getting Started
11 |
12 | LlamaChat requires macOS 13 Ventura, and either an Intel or Apple Silicon processor.
13 |
14 | ### Direct Download
15 |
16 | Download a `.dmg` containing the latest version [👉 here 👈](https://llamachat.app/api/download).
17 |
18 | ### Building from Source
19 |
20 | ```bash
21 | git clone https://github.com/alexrozanski/LlamaChat.git
22 | cd LlamaChat
23 | open LlamaChat.xcodeproj
24 | ```
25 |
26 | **NOTE:** LlamaChat includes [Sparkle](https://github.com/sparkle-project/Sparkle) for autoupdates, which will fail to load if LlamaChat is not signed. Ensure that you use a valid signing certificate when building and running LlamaChat.
27 |
28 | **NOTE:** model inference runs really slowly in Debug builds, so if building from source make sure that the `Build Configuration` in `LlamaChat > Edit Scheme... > Run` is set to `Release`.
29 |
30 | ## ✨ Features
31 |
32 | - **Supported Models:** LlamaChat supports LLaMA, Alpaca and GPT4All models out of the box. Support for other models including [Vicuna](https://vicuna.lmsys.org/) and [Koala](https://bair.berkeley.edu/blog/2023/04/03/koala/) is coming soon. We are also looking for Chinese and French speakers to add support for [Chinese LLaMA/Alpaca](https://github.com/ymcui/Chinese-LLaMA-Alpaca) and [Vigogne](https://github.com/bofenghuang/vigogne).
33 | - **Flexible Model Formats:** LLamaChat is built on top of [llama.cpp](https://github.com/ggerganov/llama.cpp) and [llama.swift](https://github.com/alexrozanski/llama.swift). The app supports adding LLaMA models in either their raw `.pth` PyTorch checkpoints form or the `.ggml` format.
34 | - **Model Conversion:** If raw PyTorch checkpoints are added these can be converted to `.ggml` files compatible with LlamaChat and llama.cpp within the app.
35 | - **Chat History:** Chat history is persisted within the app. Both chat history and model context can be cleared at any time.
36 | - **Funky Avatars:** LlamaChat ships with [7 funky avatars](https://github.com/alexrozanski/LlamaChat/tree/main/LlamaChat/Assets.xcassets/avatars) that can be used with your chat sources.
37 | - **Advanced Source Naming:** LlamaChat uses Special Magic™ to generate playful names for your chat sources.
38 | - **Context Debugging:** For the keen ML enthusiasts, the current model context can be viewed for a chat in the info popover.
39 |
40 |
41 | ## 🔮 Models
42 |
43 | **NOTE:** LlamaChat doesn't ship with any model files and requires that you obtain these from the respective sources in accordance with their respective terms and conditions.
44 |
45 | - **Model formats:** LlamaChat allows you to use the LLaMA family of models in either their raw Python checkpoint form (`.pth`) or pre-converted `.ggml` file (the format used by [llama.cpp](https://github.com/ggerganov/llama.cpp), which powers LlamaChat).
46 | - **Using LLaMA models:** When importing LLaMA models in the `.pth` format:
47 | - You should select the appropriate parameter size directory (e.g. `7B`, `13B` etc) in the conversion flow, which includes the `consolidated.NN.pth` and `params.json` files.
48 | - As per the LLaMA model release, the parent directory should contain `tokenizer.model`. E.g. to use the LLaMA-13B model, your model directory should look something like the below, and you should select the `13B` directory:
49 |
50 | ```bash
51 | .
52 | │ ...
53 | ├── 13B
54 | │ ├── checklist.chk.txt
55 | │ ├── consolidated.00.pth
56 | │ ├── consolidated.01.pth
57 | │ └── params.json
58 | │ ...
59 | └── tokenizer.model
60 | ```
61 |
62 | - **Troubleshooting:** If using `.ggml` files, make sure these are up-to-date. If you run into problems, you may need to use the conversion scripts from [llama.cpp](https://github.com/ggerganov/llama.cpp):
63 | - For the GPT4All model, you may need to use [convert-gpt4all-to-ggml.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-gpt4all-to-ggml.py)
64 | - For the Alpaca model, you may need to use [convert-unversioned-ggml-to-ggml.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-unversioned-ggml-to-ggml.py)
65 | - You may also need to use [migrate-ggml-2023-03-30-pr613.py](https://github.com/ggerganov/llama.cpp/blob/master/migrate-ggml-2023-03-30-pr613.py) as well. For more information check out the [llama.cpp](https://github.com/ggerganov/llama.cpp) repo.
66 |
67 |
68 | ## 👩💻 Contributing
69 |
70 | Pull Requests and Issues are welcome and much appreciated. Please make sure to adhere to the [Code of Conduct](CODE_OF_CONDUCT.md) at all times.
71 |
72 | LlamaChat is fully built using Swift and SwiftUI, and makes use of [llama.swift](https://github.com/alexrozanski/llama.swift) under the hood to run inference and perform model operations.
73 |
74 | The project is mostly built using MVVM and makes heavy use of Combine and Swift Concurrency.
75 |
76 | ## ⚖️ License
77 |
78 | LlamaChat is licensed under the [MIT license](LICENSE).
79 |
--------------------------------------------------------------------------------
/Resources/banner-a5248619.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/Resources/banner-a5248619.png
--------------------------------------------------------------------------------
/Resources/dmg-background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/Resources/dmg-background.png
--------------------------------------------------------------------------------
/Resources/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexrozanski/LlamaChat/13c4ed641ad557899cbdb8c1d3724ca61d37accc/Resources/screenshot.png
--------------------------------------------------------------------------------
/Scripts/.gitignore:
--------------------------------------------------------------------------------
1 | *.dmg
2 |
3 | tmp/
4 |
--------------------------------------------------------------------------------
/Scripts/bump-version.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | REPO_ROOT=$(cd "$(dirname "$0")/.."; pwd)
4 | CONFIG_PATH="$REPO_ROOT/LlamaChat/LlamaChat.xcconfig"
5 |
6 | # Check for major/minor/patch argument
7 | if [ "$#" -ne 1 ]; then
8 | SCRIPT=$(readlink -f "${BASH_SOURCE[0]}")
9 | BASENAME=$(basename "$SCRIPT")
10 |
11 | echo "Usage: $BASENAME "
12 | echo ""
13 | echo "Bumps the incremental build as well as the major/minor/patch version of the marketing version."
14 | exit 1
15 | fi
16 |
17 | # Check for .xcconfig file
18 | if ! test -f "$CONFIG_PATH"; then
19 | echo ".xcconfig file missing at $CONFIG_PATH"
20 | exit 1
21 | fi
22 |
23 | # Parse current version numbers
24 | CURRENT_PROJECT_VERSION=$(grep "CURRENT_PROJECT_VERSION" "$CONFIG_PATH" | cut -d' ' -f3)
25 | MARKETING_VERSION=$(grep "MARKETING_VERSION" "$CONFIG_PATH" | cut -d' ' -f3)
26 |
27 | if [[ -z "$CURRENT_PROJECT_VERSION" || -z "$MARKETING_VERSION" ]]; then
28 | echo "Error: Unable to parse version numbers from xcconfig file."
29 | exit 1
30 | fi
31 |
32 | # Get semver version components
33 | IFS='.' read -r -a VERSION_COMPONENTS <<< "$MARKETING_VERSION"
34 | if [[ "${#VERSION_COMPONENTS[@]}" -ne 3 || -z "${VERSION_COMPONENTS[0]}" || -z "${VERSION_COMPONENTS[1]}" || -z "${VERSION_COMPONENTS[2]}" ]]; then
35 | echo "Error: Invalid version number."
36 | exit 1
37 | fi
38 |
39 | # Bump incremental version
40 | NEW_CURRENT_PROJECT_VERSION=$CURRENT_PROJECT_VERSION
41 | ((NEW_CURRENT_PROJECT_VERSION++))
42 |
43 | # Bump marketing version
44 | case "$1" in
45 | "major" )
46 | NEW_MARKETING_VERSION="$((${VERSION_COMPONENTS[0]} + 1)).0.0"
47 | ;;
48 | "minor" )
49 | NEW_MARKETING_VERSION="${VERSION_COMPONENTS[0]}.$((${VERSION_COMPONENTS[1]} + 1)).0"
50 | ;;
51 | "patch" )
52 | NEW_MARKETING_VERSION="${VERSION_COMPONENTS[0]}.${VERSION_COMPONENTS[1]}.$((${VERSION_COMPONENTS[2]} + 1))"
53 | ;;
54 | * )
55 | echo "Error: Invalid component argument '$1' - must be 'major', 'minor' or 'patch'."
56 | exit 1
57 | ;;
58 | esac
59 |
60 | # Write out
61 | echo "Bumping CURRENT_PROJECT_VERSION: $CURRENT_PROJECT_VERSION -> $NEW_CURRENT_PROJECT_VERSION"
62 | echo "Bumping MARKETING_VERSION: $MARKETING_VERSION -> $NEW_MARKETING_VERSION"
63 |
64 | sed -i '' "s/CURRENT_PROJECT_VERSION = $CURRENT_PROJECT_VERSION/CURRENT_PROJECT_VERSION = $NEW_CURRENT_PROJECT_VERSION/g" $CONFIG_PATH
65 | sed -i '' "s/MARKETING_VERSION = $MARKETING_VERSION/MARKETING_VERSION = $NEW_MARKETING_VERSION/g" $CONFIG_PATH
66 |
--------------------------------------------------------------------------------
/Scripts/make-dmg.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | REPO_ROOT=$(cd "$(dirname "$0")/.."; pwd)
4 | OUTPUT_DMG=$REPO_ROOT/Scripts/LlamaChat.dmg
5 |
6 | if [ "$#" -ne 1 ]; then
7 | SCRIPT=$(readlink -f "${BASH_SOURCE[0]}")
8 | BASENAME=$(basename "$SCRIPT")
9 |
10 | echo "Usage: $BASENAME "
11 | echo ""
12 | echo "All contents of will be copied into the disk image."
13 | exit 1
14 | fi
15 |
16 | if ! [ -d "$1/LlamaChat.app" ]; then
17 | echo "Error: Missing LlamaChat.app in $1"
18 | exit 1
19 | fi
20 |
21 | echo "Checking notarization status..."
22 |
23 | if ! spctl -a -vvv -t install "$1/LlamaChat.app" &> /dev/null; then
24 | echo "Error: LlamaChat.app should be notarized before packaging into a .dmg"
25 | exit 1
26 | fi
27 |
28 | echo "Making AppIcon.icns..."
29 | rm -rf tmp
30 | mkdir -p tmp/AppIcon.iconset
31 | cp $REPO_ROOT/LlamaChat/Assets.xcassets/AppIcon.appiconset/*.png tmp/AppIcon.iconset
32 |
33 | if ! iconutil -c icns tmp/AppIcon.iconset; then
34 | echo "Error: couldn't make AppIcon.icns"
35 | exit 1
36 | fi
37 |
38 | if ! command -v create-dmg &> /dev/null; then
39 | echo "Error: missing create-dmg. Install using 'brew install create-dmg'"
40 | exit 1
41 | fi
42 |
43 | test -f "$OUTPUT_DMG" && rm "$OUTPUT_DMG"
44 | create-dmg \
45 | --volname "LlamaChat" \
46 | --volicon "tmp/AppIcon.icns" \
47 | --background "$REPO_ROOT/Resources/dmg-background.png" \
48 | --window-pos 200 120 \
49 | --window-size 650 440 \
50 | --icon-size 128 \
51 | --icon "LlamaChat.app" 188 198 \
52 | --hide-extension "LlamaChat.app" \
53 | --app-drop-link 460 198 \
54 | "$OUTPUT_DMG" \
55 | "$1"
56 |
57 | rm -rf tmp
58 |
--------------------------------------------------------------------------------