├── LICENSE ├── README.md ├── SwiftUI-AI-Wrapper.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata └── SwiftUI-AI-Wrapper ├── Assets.xcassets ├── AccentColor.colorset │ └── Contents.json ├── AppIcon.appiconset │ └── Contents.json ├── Contents.json └── SystemAvatar.imageset │ ├── Contents.json │ └── SystemAvatar.png ├── CameraView.swift ├── ChatViews ├── ChatView.swift ├── InputMessageView.swift ├── MessageView.swift └── TypingIndicatorView.swift ├── ContentView.swift ├── Extensions ├── DateExtensions.swift ├── StringHash.swift └── UIImageExtensions.swift ├── HistoryView.swift ├── Models ├── ChatModel.swift ├── ConnectionRequest.swift └── HistoryModel.swift ├── PhotoPicker.swift ├── PhotoView.swift ├── Preview Content └── Preview Assets.xcassets │ └── Contents.json └── SwiftUI_AI_WrapperApp.swift /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Adam Lyttle 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenAI-Wrapper-SwiftUI 2 | 3 | An OpenAI Wrapper built in SwiftUI 4 | 5 | [![Watch the video](https://adamlyttleapps.com/demo/OpenAI-Wrapper-SwiftUI/demo-v3.png)](https://adamlyttleapps.com/demo/OpenAI-Wrapper-SwiftUI/demo.mp4) 6 | 7 | ## Overview 8 | 9 | AIWrapper-SwiftUI is a SwiftUI-based wrapper designed to leverage AI for vision-related tasks. This wrapper interacts with an OpenAI proxy script to secure API communications and protect your API key 10 | 11 | ## Features 12 | 13 | * SwiftUI Integration: Seamlessly integrates with SwiftUI projects 14 | * AI Vision Capabilities: Utilize AI to perform various vision-related tasks 15 | * Secure API Communication: Protect your OpenAI API key using a proxy script 16 | * Customizable Settings: Easily configure proxy script location and shared secret key 17 | 18 | ## Usage 19 | 20 | 1. **Open the code in XCode** 21 | 2. **Update the Proxy Script Location:** In the Models > ChatModel class, update the location property with the URL of your OpenAI proxy script. The source code for the openai_proxy.php script used in the demo is available at: [https://github.com/adamlyttleapps/OpenAI-Proxy-PHP](https://github.com/adamlyttleapps/OpenAI-Proxy-PHP). 22 | 23 | ``` 24 | //customize the location of the openai_proxy.php script 25 | private let location = "https://adamlyttleapps.com/demo/OpenAIProxy-PHP/openai_proxy.php" 26 | 27 | //create a shared secret key, requests to the server use an md5 hash with the shared secret 28 | private let sharedSecretKey = "secret_key" 29 | ``` 30 | 31 | 3. **Configure Privacy Settings**: If you are adding this wrapper to an existing project, ensure that you add the following keys to your Info.plist: 32 | 33 | ``` 34 | Privacy - Photo Library Usage Description 35 | Privacy - Camera Usage Description 36 | ``` 37 | 38 | These entries are necessary to request user permissions for accessing the photo library and camera. 39 | 40 | ## Contributions 41 | 42 | Contributions are welcome! Feel free to open an issue or submit a pull request on the [GitHub repository](https://github.com/adamlyttleapps/OpenAI-Wrapper-SwiftUI). 43 | 44 | ## MIT License 45 | 46 | This project is licensed under the MIT License. See the LICENSE file for more details. 47 | 48 | This README provides a clear overview of the project, detailed usage instructions, and additional sections like examples, contributions, and licensing, making it more comprehensive and user-friendly. 49 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 56; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 318DAF3D2C3BA1E900619995 /* SwiftUI_AI_WrapperApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF3C2C3BA1E900619995 /* SwiftUI_AI_WrapperApp.swift */; }; 11 | 318DAF3F2C3BA1E900619995 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF3E2C3BA1E900619995 /* ContentView.swift */; }; 12 | 318DAF412C3BA1EA00619995 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 318DAF402C3BA1EA00619995 /* Assets.xcassets */; }; 13 | 318DAF442C3BA1EA00619995 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 318DAF432C3BA1EA00619995 /* Preview Assets.xcassets */; }; 14 | 318DAF4B2C3BA22400619995 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF4A2C3BA22400619995 /* CameraView.swift */; }; 15 | 318DAF4D2C3BA29A00619995 /* ChatView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF4C2C3BA29A00619995 /* ChatView.swift */; }; 16 | 318DAF4F2C3BA2C200619995 /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF4E2C3BA2C200619995 /* MessageView.swift */; }; 17 | 318DAF512C3BA2DE00619995 /* HistoryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF502C3BA2DE00619995 /* HistoryView.swift */; }; 18 | 318DAF542C3BA2F800619995 /* HistoryModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF532C3BA2F800619995 /* HistoryModel.swift */; }; 19 | 318DAF562C3BA31F00619995 /* ChatModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF552C3BA31F00619995 /* ChatModel.swift */; }; 20 | 318DAF5A2C3BA3F400619995 /* UIImageExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF592C3BA3F400619995 /* UIImageExtensions.swift */; }; 21 | 318DAF5D2C3BA42800619995 /* PhotoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF5C2C3BA42800619995 /* PhotoView.swift */; }; 22 | 318DAF5F2C3BA44000619995 /* TypingIndicatorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF5E2C3BA44000619995 /* TypingIndicatorView.swift */; }; 23 | 318DAF612C3BA56A00619995 /* InputMessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF602C3BA56A00619995 /* InputMessageView.swift */; }; 24 | 318DAF632C3BA5C600619995 /* ConnectionRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF622C3BA5C600619995 /* ConnectionRequest.swift */; }; 25 | 318DAF652C3BA5FA00619995 /* DateExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF642C3BA5FA00619995 /* DateExtensions.swift */; }; 26 | 318DAF672C3BA62900619995 /* PhotoPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF662C3BA62900619995 /* PhotoPicker.swift */; }; 27 | 318DAF6B2C3CDE0100619995 /* StringHash.swift in Sources */ = {isa = PBXBuildFile; fileRef = 318DAF6A2C3CDE0100619995 /* StringHash.swift */; }; 28 | /* End PBXBuildFile section */ 29 | 30 | /* Begin PBXFileReference section */ 31 | 318DAF392C3BA1E900619995 /* SwiftUI-AI-Wrapper.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "SwiftUI-AI-Wrapper.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 32 | 318DAF3C2C3BA1E900619995 /* SwiftUI_AI_WrapperApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SwiftUI_AI_WrapperApp.swift; sourceTree = ""; }; 33 | 318DAF3E2C3BA1E900619995 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 34 | 318DAF402C3BA1EA00619995 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 35 | 318DAF432C3BA1EA00619995 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 36 | 318DAF4A2C3BA22400619995 /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = ""; }; 37 | 318DAF4C2C3BA29A00619995 /* ChatView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatView.swift; sourceTree = ""; }; 38 | 318DAF4E2C3BA2C200619995 /* MessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageView.swift; sourceTree = ""; }; 39 | 318DAF502C3BA2DE00619995 /* HistoryView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HistoryView.swift; sourceTree = ""; }; 40 | 318DAF532C3BA2F800619995 /* HistoryModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HistoryModel.swift; sourceTree = ""; }; 41 | 318DAF552C3BA31F00619995 /* ChatModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatModel.swift; sourceTree = ""; }; 42 | 318DAF592C3BA3F400619995 /* UIImageExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UIImageExtensions.swift; sourceTree = ""; }; 43 | 318DAF5C2C3BA42800619995 /* PhotoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoView.swift; sourceTree = ""; }; 44 | 318DAF5E2C3BA44000619995 /* TypingIndicatorView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TypingIndicatorView.swift; sourceTree = ""; }; 45 | 318DAF602C3BA56A00619995 /* InputMessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputMessageView.swift; sourceTree = ""; }; 46 | 318DAF622C3BA5C600619995 /* ConnectionRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConnectionRequest.swift; sourceTree = ""; }; 47 | 318DAF642C3BA5FA00619995 /* DateExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DateExtensions.swift; sourceTree = ""; }; 48 | 318DAF662C3BA62900619995 /* PhotoPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoPicker.swift; sourceTree = ""; }; 49 | 318DAF6A2C3CDE0100619995 /* StringHash.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StringHash.swift; sourceTree = ""; }; 50 | /* End PBXFileReference section */ 51 | 52 | /* Begin PBXFrameworksBuildPhase section */ 53 | 318DAF362C3BA1E900619995 /* Frameworks */ = { 54 | isa = PBXFrameworksBuildPhase; 55 | buildActionMask = 2147483647; 56 | files = ( 57 | ); 58 | runOnlyForDeploymentPostprocessing = 0; 59 | }; 60 | /* End PBXFrameworksBuildPhase section */ 61 | 62 | /* Begin PBXGroup section */ 63 | 318DAF302C3BA1E900619995 = { 64 | isa = PBXGroup; 65 | children = ( 66 | 318DAF3B2C3BA1E900619995 /* SwiftUI-AI-Wrapper */, 67 | 318DAF3A2C3BA1E900619995 /* Products */, 68 | ); 69 | sourceTree = ""; 70 | }; 71 | 318DAF3A2C3BA1E900619995 /* Products */ = { 72 | isa = PBXGroup; 73 | children = ( 74 | 318DAF392C3BA1E900619995 /* SwiftUI-AI-Wrapper.app */, 75 | ); 76 | name = Products; 77 | sourceTree = ""; 78 | }; 79 | 318DAF3B2C3BA1E900619995 /* SwiftUI-AI-Wrapper */ = { 80 | isa = PBXGroup; 81 | children = ( 82 | 318DAF3C2C3BA1E900619995 /* SwiftUI_AI_WrapperApp.swift */, 83 | 318DAF3E2C3BA1E900619995 /* ContentView.swift */, 84 | 318DAF502C3BA2DE00619995 /* HistoryView.swift */, 85 | 318DAF4A2C3BA22400619995 /* CameraView.swift */, 86 | 318DAF662C3BA62900619995 /* PhotoPicker.swift */, 87 | 318DAF5C2C3BA42800619995 /* PhotoView.swift */, 88 | 318DAF522C3BA2EF00619995 /* Models */, 89 | 318DAF682C3BADFE00619995 /* ChatViews */, 90 | 318DAF5B2C3BA3FD00619995 /* Extensions */, 91 | 318DAF402C3BA1EA00619995 /* Assets.xcassets */, 92 | 318DAF422C3BA1EA00619995 /* Preview Content */, 93 | ); 94 | path = "SwiftUI-AI-Wrapper"; 95 | sourceTree = ""; 96 | }; 97 | 318DAF422C3BA1EA00619995 /* Preview Content */ = { 98 | isa = PBXGroup; 99 | children = ( 100 | 318DAF432C3BA1EA00619995 /* Preview Assets.xcassets */, 101 | ); 102 | path = "Preview Content"; 103 | sourceTree = ""; 104 | }; 105 | 318DAF522C3BA2EF00619995 /* Models */ = { 106 | isa = PBXGroup; 107 | children = ( 108 | 318DAF622C3BA5C600619995 /* ConnectionRequest.swift */, 109 | 318DAF532C3BA2F800619995 /* HistoryModel.swift */, 110 | 318DAF552C3BA31F00619995 /* ChatModel.swift */, 111 | ); 112 | path = Models; 113 | sourceTree = ""; 114 | }; 115 | 318DAF5B2C3BA3FD00619995 /* Extensions */ = { 116 | isa = PBXGroup; 117 | children = ( 118 | 318DAF592C3BA3F400619995 /* UIImageExtensions.swift */, 119 | 318DAF642C3BA5FA00619995 /* DateExtensions.swift */, 120 | 318DAF6A2C3CDE0100619995 /* StringHash.swift */, 121 | ); 122 | path = Extensions; 123 | sourceTree = ""; 124 | }; 125 | 318DAF682C3BADFE00619995 /* ChatViews */ = { 126 | isa = PBXGroup; 127 | children = ( 128 | 318DAF4C2C3BA29A00619995 /* ChatView.swift */, 129 | 318DAF602C3BA56A00619995 /* InputMessageView.swift */, 130 | 318DAF4E2C3BA2C200619995 /* MessageView.swift */, 131 | 318DAF5E2C3BA44000619995 /* TypingIndicatorView.swift */, 132 | ); 133 | path = ChatViews; 134 | sourceTree = ""; 135 | }; 136 | /* End PBXGroup section */ 137 | 138 | /* Begin PBXNativeTarget section */ 139 | 318DAF382C3BA1E900619995 /* SwiftUI-AI-Wrapper */ = { 140 | isa = PBXNativeTarget; 141 | buildConfigurationList = 318DAF472C3BA1EA00619995 /* Build configuration list for PBXNativeTarget "SwiftUI-AI-Wrapper" */; 142 | buildPhases = ( 143 | 318DAF352C3BA1E900619995 /* Sources */, 144 | 318DAF362C3BA1E900619995 /* Frameworks */, 145 | 318DAF372C3BA1E900619995 /* Resources */, 146 | ); 147 | buildRules = ( 148 | ); 149 | dependencies = ( 150 | ); 151 | name = "SwiftUI-AI-Wrapper"; 152 | productName = "SwiftUI-AI-Wrapper"; 153 | productReference = 318DAF392C3BA1E900619995 /* SwiftUI-AI-Wrapper.app */; 154 | productType = "com.apple.product-type.application"; 155 | }; 156 | /* End PBXNativeTarget section */ 157 | 158 | /* Begin PBXProject section */ 159 | 318DAF312C3BA1E900619995 /* Project object */ = { 160 | isa = PBXProject; 161 | attributes = { 162 | BuildIndependentTargetsInParallel = 1; 163 | LastSwiftUpdateCheck = 1540; 164 | LastUpgradeCheck = 1540; 165 | TargetAttributes = { 166 | 318DAF382C3BA1E900619995 = { 167 | CreatedOnToolsVersion = 15.4; 168 | }; 169 | }; 170 | }; 171 | buildConfigurationList = 318DAF342C3BA1E900619995 /* Build configuration list for PBXProject "SwiftUI-AI-Wrapper" */; 172 | compatibilityVersion = "Xcode 14.0"; 173 | developmentRegion = en; 174 | hasScannedForEncodings = 0; 175 | knownRegions = ( 176 | en, 177 | Base, 178 | ); 179 | mainGroup = 318DAF302C3BA1E900619995; 180 | productRefGroup = 318DAF3A2C3BA1E900619995 /* Products */; 181 | projectDirPath = ""; 182 | projectRoot = ""; 183 | targets = ( 184 | 318DAF382C3BA1E900619995 /* SwiftUI-AI-Wrapper */, 185 | ); 186 | }; 187 | /* End PBXProject section */ 188 | 189 | /* Begin PBXResourcesBuildPhase section */ 190 | 318DAF372C3BA1E900619995 /* Resources */ = { 191 | isa = PBXResourcesBuildPhase; 192 | buildActionMask = 2147483647; 193 | files = ( 194 | 318DAF442C3BA1EA00619995 /* Preview Assets.xcassets in Resources */, 195 | 318DAF412C3BA1EA00619995 /* Assets.xcassets in Resources */, 196 | ); 197 | runOnlyForDeploymentPostprocessing = 0; 198 | }; 199 | /* End PBXResourcesBuildPhase section */ 200 | 201 | /* Begin PBXSourcesBuildPhase section */ 202 | 318DAF352C3BA1E900619995 /* Sources */ = { 203 | isa = PBXSourcesBuildPhase; 204 | buildActionMask = 2147483647; 205 | files = ( 206 | 318DAF632C3BA5C600619995 /* ConnectionRequest.swift in Sources */, 207 | 318DAF4D2C3BA29A00619995 /* ChatView.swift in Sources */, 208 | 318DAF512C3BA2DE00619995 /* HistoryView.swift in Sources */, 209 | 318DAF4B2C3BA22400619995 /* CameraView.swift in Sources */, 210 | 318DAF5F2C3BA44000619995 /* TypingIndicatorView.swift in Sources */, 211 | 318DAF562C3BA31F00619995 /* ChatModel.swift in Sources */, 212 | 318DAF6B2C3CDE0100619995 /* StringHash.swift in Sources */, 213 | 318DAF612C3BA56A00619995 /* InputMessageView.swift in Sources */, 214 | 318DAF652C3BA5FA00619995 /* DateExtensions.swift in Sources */, 215 | 318DAF672C3BA62900619995 /* PhotoPicker.swift in Sources */, 216 | 318DAF3F2C3BA1E900619995 /* ContentView.swift in Sources */, 217 | 318DAF5D2C3BA42800619995 /* PhotoView.swift in Sources */, 218 | 318DAF3D2C3BA1E900619995 /* SwiftUI_AI_WrapperApp.swift in Sources */, 219 | 318DAF542C3BA2F800619995 /* HistoryModel.swift in Sources */, 220 | 318DAF5A2C3BA3F400619995 /* UIImageExtensions.swift in Sources */, 221 | 318DAF4F2C3BA2C200619995 /* MessageView.swift in Sources */, 222 | ); 223 | runOnlyForDeploymentPostprocessing = 0; 224 | }; 225 | /* End PBXSourcesBuildPhase section */ 226 | 227 | /* Begin XCBuildConfiguration section */ 228 | 318DAF452C3BA1EA00619995 /* Debug */ = { 229 | isa = XCBuildConfiguration; 230 | buildSettings = { 231 | ALWAYS_SEARCH_USER_PATHS = NO; 232 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 233 | CLANG_ANALYZER_NONNULL = YES; 234 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 235 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 236 | CLANG_ENABLE_MODULES = YES; 237 | CLANG_ENABLE_OBJC_ARC = YES; 238 | CLANG_ENABLE_OBJC_WEAK = YES; 239 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 240 | CLANG_WARN_BOOL_CONVERSION = YES; 241 | CLANG_WARN_COMMA = YES; 242 | CLANG_WARN_CONSTANT_CONVERSION = YES; 243 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 244 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 245 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 246 | CLANG_WARN_EMPTY_BODY = YES; 247 | CLANG_WARN_ENUM_CONVERSION = YES; 248 | CLANG_WARN_INFINITE_RECURSION = YES; 249 | CLANG_WARN_INT_CONVERSION = YES; 250 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 251 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 252 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 253 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 254 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 255 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 256 | CLANG_WARN_STRICT_PROTOTYPES = YES; 257 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 258 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 259 | CLANG_WARN_UNREACHABLE_CODE = YES; 260 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 261 | COPY_PHASE_STRIP = NO; 262 | DEBUG_INFORMATION_FORMAT = dwarf; 263 | ENABLE_STRICT_OBJC_MSGSEND = YES; 264 | ENABLE_TESTABILITY = YES; 265 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 266 | GCC_C_LANGUAGE_STANDARD = gnu17; 267 | GCC_DYNAMIC_NO_PIC = NO; 268 | GCC_NO_COMMON_BLOCKS = YES; 269 | GCC_OPTIMIZATION_LEVEL = 0; 270 | GCC_PREPROCESSOR_DEFINITIONS = ( 271 | "DEBUG=1", 272 | "$(inherited)", 273 | ); 274 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 275 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 276 | GCC_WARN_UNDECLARED_SELECTOR = YES; 277 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 278 | GCC_WARN_UNUSED_FUNCTION = YES; 279 | GCC_WARN_UNUSED_VARIABLE = YES; 280 | IPHONEOS_DEPLOYMENT_TARGET = 17.5; 281 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 282 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 283 | MTL_FAST_MATH = YES; 284 | ONLY_ACTIVE_ARCH = YES; 285 | SDKROOT = iphoneos; 286 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; 287 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 288 | }; 289 | name = Debug; 290 | }; 291 | 318DAF462C3BA1EA00619995 /* Release */ = { 292 | isa = XCBuildConfiguration; 293 | buildSettings = { 294 | ALWAYS_SEARCH_USER_PATHS = NO; 295 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 296 | CLANG_ANALYZER_NONNULL = YES; 297 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 298 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 299 | CLANG_ENABLE_MODULES = YES; 300 | CLANG_ENABLE_OBJC_ARC = YES; 301 | CLANG_ENABLE_OBJC_WEAK = YES; 302 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 303 | CLANG_WARN_BOOL_CONVERSION = YES; 304 | CLANG_WARN_COMMA = YES; 305 | CLANG_WARN_CONSTANT_CONVERSION = YES; 306 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 307 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 308 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 309 | CLANG_WARN_EMPTY_BODY = YES; 310 | CLANG_WARN_ENUM_CONVERSION = YES; 311 | CLANG_WARN_INFINITE_RECURSION = YES; 312 | CLANG_WARN_INT_CONVERSION = YES; 313 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 314 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 315 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 316 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 317 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 318 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 319 | CLANG_WARN_STRICT_PROTOTYPES = YES; 320 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 321 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 322 | CLANG_WARN_UNREACHABLE_CODE = YES; 323 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 324 | COPY_PHASE_STRIP = NO; 325 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 326 | ENABLE_NS_ASSERTIONS = NO; 327 | ENABLE_STRICT_OBJC_MSGSEND = YES; 328 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 329 | GCC_C_LANGUAGE_STANDARD = gnu17; 330 | GCC_NO_COMMON_BLOCKS = YES; 331 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 332 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 333 | GCC_WARN_UNDECLARED_SELECTOR = YES; 334 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 335 | GCC_WARN_UNUSED_FUNCTION = YES; 336 | GCC_WARN_UNUSED_VARIABLE = YES; 337 | IPHONEOS_DEPLOYMENT_TARGET = 17.5; 338 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 339 | MTL_ENABLE_DEBUG_INFO = NO; 340 | MTL_FAST_MATH = YES; 341 | SDKROOT = iphoneos; 342 | SWIFT_COMPILATION_MODE = wholemodule; 343 | VALIDATE_PRODUCT = YES; 344 | }; 345 | name = Release; 346 | }; 347 | 318DAF482C3BA1EA00619995 /* Debug */ = { 348 | isa = XCBuildConfiguration; 349 | buildSettings = { 350 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 351 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 352 | CODE_SIGN_STYLE = Automatic; 353 | CURRENT_PROJECT_VERSION = 1; 354 | DEVELOPMENT_ASSET_PATHS = "\"SwiftUI-AI-Wrapper/Preview Content\""; 355 | DEVELOPMENT_TEAM = ""; 356 | ENABLE_PREVIEWS = YES; 357 | GENERATE_INFOPLIST_FILE = YES; 358 | INFOPLIST_KEY_NSCameraUsageDescription = "Access to your camera is required to take photos of objects"; 359 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Permission required to upload photos to the AI Wrapper"; 360 | INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; 361 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 362 | INFOPLIST_KEY_UILaunchScreen_Generation = YES; 363 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 364 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 365 | LD_RUNPATH_SEARCH_PATHS = ( 366 | "$(inherited)", 367 | "@executable_path/Frameworks", 368 | ); 369 | MARKETING_VERSION = 1.0; 370 | PRODUCT_BUNDLE_IDENTIFIER = "SwiftUI-AI-Wrapper"; 371 | PRODUCT_NAME = "$(TARGET_NAME)"; 372 | SWIFT_EMIT_LOC_STRINGS = YES; 373 | SWIFT_VERSION = 5.0; 374 | TARGETED_DEVICE_FAMILY = "1,2"; 375 | }; 376 | name = Debug; 377 | }; 378 | 318DAF492C3BA1EA00619995 /* Release */ = { 379 | isa = XCBuildConfiguration; 380 | buildSettings = { 381 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 382 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 383 | CODE_SIGN_STYLE = Automatic; 384 | CURRENT_PROJECT_VERSION = 1; 385 | DEVELOPMENT_ASSET_PATHS = "\"SwiftUI-AI-Wrapper/Preview Content\""; 386 | DEVELOPMENT_TEAM = ""; 387 | ENABLE_PREVIEWS = YES; 388 | GENERATE_INFOPLIST_FILE = YES; 389 | INFOPLIST_KEY_NSCameraUsageDescription = "Access to your camera is required to take photos of objects"; 390 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Permission required to upload photos to the AI Wrapper"; 391 | INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; 392 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 393 | INFOPLIST_KEY_UILaunchScreen_Generation = YES; 394 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 395 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; 396 | LD_RUNPATH_SEARCH_PATHS = ( 397 | "$(inherited)", 398 | "@executable_path/Frameworks", 399 | ); 400 | MARKETING_VERSION = 1.0; 401 | PRODUCT_BUNDLE_IDENTIFIER = "SwiftUI-AI-Wrapper"; 402 | PRODUCT_NAME = "$(TARGET_NAME)"; 403 | SWIFT_EMIT_LOC_STRINGS = YES; 404 | SWIFT_VERSION = 5.0; 405 | TARGETED_DEVICE_FAMILY = "1,2"; 406 | }; 407 | name = Release; 408 | }; 409 | /* End XCBuildConfiguration section */ 410 | 411 | /* Begin XCConfigurationList section */ 412 | 318DAF342C3BA1E900619995 /* Build configuration list for PBXProject "SwiftUI-AI-Wrapper" */ = { 413 | isa = XCConfigurationList; 414 | buildConfigurations = ( 415 | 318DAF452C3BA1EA00619995 /* Debug */, 416 | 318DAF462C3BA1EA00619995 /* Release */, 417 | ); 418 | defaultConfigurationIsVisible = 0; 419 | defaultConfigurationName = Release; 420 | }; 421 | 318DAF472C3BA1EA00619995 /* Build configuration list for PBXNativeTarget "SwiftUI-AI-Wrapper" */ = { 422 | isa = XCConfigurationList; 423 | buildConfigurations = ( 424 | 318DAF482C3BA1EA00619995 /* Debug */, 425 | 318DAF492C3BA1EA00619995 /* Release */, 426 | ); 427 | defaultConfigurationIsVisible = 0; 428 | defaultConfigurationName = Release; 429 | }; 430 | /* End XCConfigurationList section */ 431 | }; 432 | rootObject = 318DAF312C3BA1E900619995 /* Project object */; 433 | } 434 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "platform" : "ios", 6 | "size" : "1024x1024" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Assets.xcassets/SystemAvatar.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "SystemAvatar.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "author" : "xcode", 19 | "version" : 1 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Assets.xcassets/SystemAvatar.imageset/SystemAvatar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamlyttleapps/OpenAI-Wrapper-SwiftUI/4e7b16483e5528f80b72504a8ce20c2054d34597/SwiftUI-AI-Wrapper/Assets.xcassets/SystemAvatar.imageset/SystemAvatar.png -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/CameraView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | import AVFoundation 11 | 12 | struct CameraView: View { 13 | 14 | @Binding var isActive: Bool 15 | 16 | @State var screen: GeometryProxy? 17 | 18 | @State private var captureSession: AVCaptureSession? = AVCaptureSession() 19 | @State private var photoOutput: AVCapturePhotoOutput? = AVCapturePhotoOutput() 20 | private let photoCaptureDelegate = PhotoCaptureDelegate() 21 | 22 | @State private var isProcessing: Bool = false 23 | @State private var shutterFlash: Bool = false 24 | //let onShutterFlash: () -> Void 25 | 26 | let onCaptureImage: (UIImage) -> Void 27 | 28 | 29 | 30 | //image picker stuff 31 | @State private var showImagePicker: Bool = false 32 | //@State private var imagePickerOpacity: CGFloat = 0 33 | @State private var selectedImage: UIImage? = nil 34 | @State private var filename: String? = nil 35 | 36 | @Binding var showHistory: Bool 37 | 38 | @State private var flashOn: Bool = false 39 | 40 | @Environment(\.colorScheme) var colorScheme //colorScheme == .dark ? Color.white : Color.black 41 | 42 | 43 | func toggleFlashlight() { 44 | guard let device = AVCaptureDevice.default(for: .video), device.hasTorch else { return } 45 | 46 | flashOn = !flashOn 47 | 48 | do { 49 | try device.lockForConfiguration() 50 | device.torchMode = flashOn ? .on : .off 51 | device.unlockForConfiguration() 52 | } catch { 53 | print("Flashlight could not be used") 54 | } 55 | } 56 | 57 | //let processedImage: (UIImage, String) -> Void 58 | 59 | private var screenWidth: CGFloat { 60 | if let screen = screen { 61 | return screen.size.width 62 | } 63 | else { 64 | return 0 65 | } 66 | } 67 | 68 | private var screenHeight: CGFloat { 69 | if let screen = screen { 70 | return screen.size.width 71 | } 72 | else { 73 | return 0 74 | } 75 | } 76 | 77 | 78 | var body: some View { 79 | ZStack (alignment: .topLeading) { 80 | 81 | VStack { 82 | if let captureSession = captureSession, let photoOutput = photoOutput { 83 | CameraPreviewView(captureSession: captureSession, photoOutput: photoOutput, photoCaptureDelegate: photoCaptureDelegate) 84 | .frame(width: screen?.size.width, height: screen?.size.height, alignment: .center) 85 | .onAppear { 86 | checkCameraPermission() 87 | } 88 | } 89 | } 90 | .opacity(isActive ? 1 : (1 - 0.66)) 91 | 92 | if isProcessing { 93 | 94 | VStack { 95 | Spacer() 96 | HStack { 97 | Spacer() 98 | ProgressView() 99 | .progressViewStyle(CircularProgressViewStyle(tint: .white)) 100 | .padding(.bottom, 80) 101 | Spacer() 102 | } 103 | Spacer() 104 | } 105 | .background(shutterFlash ? Color.white : Color.black.opacity(0.66)) 106 | .onAppear { 107 | withAnimation { 108 | shutterFlash = false 109 | } 110 | } 111 | 112 | } 113 | else { 114 | 115 | HStack { 116 | Spacer() 117 | Button(action: { 118 | //show image selection 119 | self.toggleFlashlight() 120 | }) { 121 | if flashOn { 122 | Image(systemName: "bolt") 123 | .resizable() 124 | .aspectRatio(contentMode: .fit) 125 | .frame(width: 22, height: 22, alignment: .center) 126 | .clipped() 127 | .padding(.top) 128 | } 129 | else { 130 | Image(systemName: "bolt.slash") 131 | .resizable() 132 | .aspectRatio(contentMode: .fit) 133 | .frame(width: 22, height: 22, alignment: .center) 134 | .clipped() 135 | .padding(.top) 136 | } 137 | } 138 | .foregroundColor(.white) 139 | 140 | Spacer() 141 | } 142 | .padding(.horizontal) 143 | .padding(.top, (UIApplication.shared.windows.first?.safeAreaInsets.top ?? 0)) 144 | 145 | VStack { 146 | Spacer() 147 | 148 | HStack { 149 | Button(action: { 150 | //show image selection 151 | showImagePicker = true 152 | }) { 153 | Image(systemName: "photo.on.rectangle") 154 | .resizable() 155 | .aspectRatio(contentMode: .fit) 156 | .frame(width: 30, height: 30, alignment: .center) 157 | .clipped() 158 | .padding(25) 159 | } 160 | .foregroundColor(.white) 161 | .sheet(isPresented: $showImagePicker) { 162 | PhotoPicker(isPresented: $showImagePicker, selectedImage: $selectedImage, filename: $filename) 163 | .accentColor(.blue) 164 | } 165 | 166 | Spacer() 167 | Button(action: { 168 | if let photoOutput = photoOutput { 169 | print("==> capturePhoto") 170 | isProcessing = true 171 | let settings = AVCapturePhotoSettings() 172 | photoOutput.capturePhoto(with: settings, delegate: photoCaptureDelegate) 173 | //self.onShutterFlash() 174 | } 175 | }) { 176 | Image(systemName: "camera") 177 | .resizable() 178 | .aspectRatio(contentMode: .fit) 179 | .frame(width: 35, alignment: .center) 180 | .clipped() 181 | .padding(25) 182 | .background(Color.blue) 183 | .foregroundColor(.white) 184 | .clipShape(Circle()) 185 | } 186 | Spacer() 187 | // 188 | 189 | 190 | 191 | Button(action: { 192 | showHistory.toggle() 193 | }) { 194 | Image(systemName: "clock.arrow.circlepath") 195 | .resizable() 196 | .aspectRatio(contentMode: .fit) 197 | .frame(width: 30, alignment: .center) 198 | .clipped() 199 | .padding(25) 200 | } 201 | .foregroundColor(.white) 202 | .padding(.horizontal, 2.5) 203 | 204 | } 205 | .padding(.horizontal) 206 | } 207 | .padding(.bottom, 20 + (UIApplication.shared.windows.first?.safeAreaInsets.bottom ?? 0)) 208 | 209 | //white flash shows that the photo has been taken but also hides the transition from live to still 210 | 211 | 212 | //} 213 | 214 | } 215 | 216 | } 217 | .onChange(of: isProcessing) { value in 218 | if value { 219 | shutterFlash = true 220 | } 221 | } 222 | .onChange(of: showHistory) { value in 223 | if value { 224 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.6) { 225 | isActive = false 226 | } 227 | } 228 | else { 229 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { 230 | isActive = true 231 | } 232 | } 233 | } 234 | .onChange(of: isActive) { value in 235 | if isActive { 236 | self.startCameraSession() 237 | } 238 | else { 239 | self.stopCameraSession() 240 | } 241 | } 242 | .onChange(of: selectedImage) { selectedImage in 243 | DispatchQueue.main.async { 244 | if let selectedImage = selectedImage { 245 | self.selectedImage = selectedImage 246 | DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { 247 | isProcessing = false 248 | } 249 | onCaptureImage(selectedImage) 250 | } 251 | } 252 | } 253 | .edgesIgnoringSafeArea(.all) 254 | } 255 | 256 | 257 | private func stopCameraSession() { 258 | if let captureSession = captureSession, captureSession.isRunning { 259 | DispatchQueue.global(qos: .userInitiated).async { 260 | print("==> captureSession.stopRunning()") 261 | captureSession.stopRunning() 262 | self.captureSession = nil 263 | self.photoOutput = nil 264 | } 265 | } 266 | } 267 | 268 | private func startCameraSession() { 269 | print("==> startCameraSession") 270 | if let _ = captureSession {} else { 271 | captureSession = AVCaptureSession() 272 | } 273 | if let _ = photoOutput {} else { 274 | photoOutput = AVCapturePhotoOutput() 275 | } 276 | print("==> 1") 277 | if let captureSession = captureSession, !captureSession.isRunning { 278 | print("==> 2") 279 | DispatchQueue.global(qos: .userInitiated).async { 280 | print("==> captureSession.startRunning()") 281 | captureSession.startRunning() 282 | } 283 | } 284 | } 285 | 286 | private func setupCamera() { 287 | // Initialize and configure the capture session 288 | photoCaptureDelegate.onPhotoCapture = { image in 289 | //sets maximum height of image to 1000px (update here) 290 | if let resizedImage = image.resized(toHeight: max(1000, image.size.height)) { 291 | self.selectedImage = resizedImage 292 | } 293 | 294 | } 295 | } 296 | 297 | 298 | func checkCameraPermission() { 299 | switch AVCaptureDevice.authorizationStatus(for: .video) { 300 | case .authorized: 301 | // Already authorized 302 | setupCamera() 303 | case .notDetermined: 304 | // Request permission 305 | AVCaptureDevice.requestAccess(for: .video) { granted in 306 | if granted { 307 | setupCamera() 308 | } 309 | // Handle if not granted 310 | } 311 | case .denied, .restricted: 312 | // Permission denied or restricted, handle accordingly 313 | break 314 | @unknown default: 315 | break 316 | } 317 | } 318 | 319 | 320 | } 321 | 322 | 323 | struct CameraPreviewView: UIViewRepresentable { 324 | var captureSession: AVCaptureSession 325 | var photoOutput: AVCapturePhotoOutput 326 | var photoCaptureDelegate: AVCapturePhotoCaptureDelegate 327 | 328 | func makeUIView(context: Context) -> UIView { 329 | let view = UIView(frame: UIScreen.main.bounds) 330 | 331 | // Setup capture session 332 | captureSession.sessionPreset = .photo 333 | if let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) { 334 | guard let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice), 335 | captureSession.canAddInput(videoDeviceInput) else { return view } 336 | captureSession.addInput(videoDeviceInput) 337 | 338 | // Add photo output 339 | guard captureSession.canAddOutput(photoOutput) else { return view } 340 | captureSession.addOutput(photoOutput) 341 | 342 | // Add preview layer 343 | let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 344 | previewLayer.frame = view.bounds 345 | previewLayer.videoGravity = .resizeAspectFill 346 | //previewLayer.videoGravity = .resizeAspect // Change this line 347 | view.layer.addSublayer(previewLayer) 348 | 349 | // Start session 350 | DispatchQueue.global(qos: .userInitiated).async { 351 | self.captureSession.startRunning() 352 | } 353 | 354 | } 355 | 356 | return view 357 | } 358 | 359 | func updateUIView(_ uiView: UIView, context: Context) {} 360 | } 361 | 362 | class PhotoCaptureDelegate: NSObject, AVCapturePhotoCaptureDelegate { 363 | var onPhotoCapture: ((UIImage) -> Void)? 364 | 365 | func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) { 366 | guard let imageData = photo.fileDataRepresentation() else { return } 367 | if let image = UIImage(data: imageData) { 368 | onPhotoCapture?(image) 369 | } 370 | } 371 | } 372 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/ChatViews/ChatView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct ChatView: View { 12 | 13 | @Environment(\.colorScheme) var colorScheme 14 | @Environment(\.presentationMode) var presentationMode 15 | @Binding var isPresented: Bool 16 | @ObservedObject var chat: ChatModel 17 | @State var chatTitle: String = "New Chat" 18 | 19 | @State private var showMenuButtons: Bool = false 20 | 21 | var removeChat: ((ChatModel)->Void)? 22 | 23 | var body: some View { 24 | 25 | VStack (spacing: 10) { 26 | 27 | ScrollViewReader { proxy in 28 | 29 | ScrollView { 30 | 31 | VStack (spacing: 10) { 32 | ForEach(chat.messages) { message in 33 | MessageView(message: message).id(message.id) 34 | } 35 | if chat.isSending { 36 | MessageView(message: ChatMessage( 37 | role: .system, 38 | message: "..." 39 | )) 40 | .id("typing") 41 | } 42 | } 43 | .padding(.horizontal) 44 | .onChange(of: chat.messages.count) { _ in 45 | if let lastMessage = chat.messages.last { 46 | if chat.messages.count > 2 || lastMessage.role == .user || lastMessage.message == "..." { 47 | withAnimation { 48 | proxy.scrollTo("eof", anchor: .bottom) 49 | } 50 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { 51 | withAnimation { 52 | proxy.scrollTo("eof", anchor: .bottom) 53 | } 54 | } 55 | } 56 | if lastMessage.role == .system && lastMessage.message != "..." { 57 | DispatchQueue.global().async { 58 | let history = History() 59 | history.appendChat(chat) 60 | } 61 | } 62 | } 63 | } 64 | .onChange(of: chat.title) { value in 65 | if let chatTitle = chat.title, self.chatTitle == "New Chat" { 66 | self.chatTitle = chatTitle 67 | } 68 | } 69 | 70 | Rectangle() 71 | .id("eof") 72 | .frame(height: 10) 73 | .foregroundColor(.clear) 74 | 75 | } 76 | .navigationTitle(chatTitle) 77 | .navigationBarTitleDisplayMode(.inline) 78 | // 79 | } 80 | .onAppear { 81 | chatTitle = chat.title ?? "New Chat" 82 | } 83 | 84 | .toolbar { 85 | ToolbarItem(placement: .primaryAction) { 86 | //trailing buttons 87 | if let _ = removeChat { 88 | Button(action: { 89 | //action 90 | showMenuButtons = true 91 | }) { 92 | Image(systemName: "ellipsis.circle") 93 | } 94 | .actionSheet(isPresented: $showMenuButtons) { 95 | 96 | var buttons: [ActionSheet.Button] = [] 97 | 98 | if let removeChat = removeChat { 99 | buttons.append(.destructive(Text("Delete Chat"), action: { 100 | removeChat(chat) 101 | presentationMode.wrappedValue.dismiss() 102 | })) 103 | } 104 | 105 | buttons.append(.cancel()) 106 | 107 | return ActionSheet(title: Text("Chat Actions"), message: nil, 108 | buttons: buttons) 109 | } 110 | } 111 | } 112 | ToolbarItem(placement: .cancellationAction) { 113 | //leading buttons 114 | if let _ = removeChat {} else { 115 | Button(action: { 116 | //action 117 | presentationMode.wrappedValue.dismiss() 118 | }) { 119 | //Image(systenName: "multiply") 120 | Text("Close") 121 | } 122 | } 123 | } 124 | } 125 | 126 | VStack (spacing: 0) { 127 | MessageInputView(message: { message in 128 | chat.sendMessage(message: message) 129 | }) 130 | } 131 | .opacity(hasSystemMessage() ? 1 : 0) 132 | .disabled(hasSystemMessage() ? false : true) 133 | 134 | } 135 | .onAppear { 136 | if let _ = removeChat, let firstMessage = chat.messages.first { 137 | //this is loaded from history -- load the image from view 138 | if let _ = firstMessage.image {} //already loaded 139 | else { 140 | if let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first { 141 | let fileName = "\(firstMessage.id).jpg" 142 | let fileURL = documentsDirectory.appendingPathComponent(fileName) 143 | 144 | if FileManager.default.fileExists(atPath: fileURL.path), 145 | let imageData = try? Data(contentsOf: fileURL), 146 | let image = UIImage(data: imageData) { 147 | chat.messages[0].image = image 148 | } 149 | } 150 | } 151 | 152 | } 153 | } 154 | } 155 | 156 | func hasSystemMessage() -> Bool { 157 | 158 | if let firstSystemMessage = chat.messages.first(where: {$0.role == .system})?.message { //firstSystemMessage.contains("=") || firstSystemMessage.contains("equal") { 159 | return true 160 | } 161 | else { 162 | return false 163 | } 164 | 165 | } 166 | 167 | } 168 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/ChatViews/InputMessageView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct MessageInputView: View { 12 | 13 | @Environment(\.colorScheme) var colorScheme 14 | 15 | @State var value: String = "" 16 | //@State private var showingSearchField: Bool = false //triggers when search textbox is to be displayed 17 | @FocusState private var isFocused: Bool 18 | @State var focusOnView: Bool = false 19 | 20 | @State private var hideSearchValue: Bool = false 21 | 22 | var onChange: ((String) -> Void)? 23 | 24 | var message: (String) -> Void 25 | 26 | @State var height: CGFloat = 30 27 | 28 | var body: some View { 29 | 30 | VStack { 31 | HStack { 32 | HStack { 33 | //Image(systemName: "magnifyingglass") 34 | if(!hideSearchValue) { 35 | ZStack (alignment: .leading) { 36 | TextEditor(text: $value) 37 | .frame(height: height) 38 | .focused($isFocused) 39 | .onAppear { 40 | if focusOnView { 41 | isFocused = true 42 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { 43 | focusOnView = false 44 | } 45 | } 46 | } 47 | .onChange(of: value) { newValue in 48 | if newValue.last == "\n" { 49 | value = String(newValue.dropLast()) 50 | sendMessage() 51 | } 52 | } 53 | 54 | Text(value.isEmpty ? "Enter message..." : value) 55 | .opacity(value.isEmpty ? 0.5 : 0) 56 | .multilineTextAlignment(.center) 57 | .padding(.leading, 5) 58 | .padding(.top, 4) 59 | .background { 60 | GeometryReader { proxy in 61 | Rectangle() 62 | .foregroundColor(.clear) 63 | .onChange(of: value) { _ in 64 | height = min(120, max(30, proxy.size.height)) 65 | } 66 | } 67 | } 68 | .onTapGesture { 69 | isFocused = true 70 | } 71 | } 72 | .offset(y: -3) 73 | } 74 | Button(action: { 75 | sendMessage() 76 | }) { 77 | Image(systemName: "paperplane") 78 | } 79 | .accentColor(colorScheme == .dark ? .white : .black) 80 | } 81 | .padding(.horizontal) 82 | .padding(.vertical, 10) 83 | } 84 | } 85 | /*.toolbar { 86 | ToolbarItem(placement: .keyboard) { 87 | HStack { 88 | Button(action: { 89 | isFocused = false 90 | //search = "" 91 | }) { 92 | Text("Close") 93 | } 94 | Spacer() 95 | } 96 | } 97 | }*/ 98 | .accentColor(.black) 99 | 100 | } 101 | 102 | func sendMessage() { 103 | if !value.isEmpty { 104 | message(value) 105 | value = "" 106 | } 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/ChatViews/MessageView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct MessageView: View { 12 | @State private var confetti = 0 13 | @Environment(\.colorScheme) var colorScheme 14 | 15 | let message: ChatMessage 16 | var body: some View { 17 | HStack (alignment: .top) { 18 | if message.role == .user { 19 | Spacer() 20 | } 21 | else { 22 | Image("SystemAvatar") 23 | .resizable() 24 | .aspectRatio(contentMode: .fit) 25 | .frame(width: 45, height: 45, alignment: .center) 26 | .clipped() 27 | .mask { 28 | Circle() 29 | } 30 | } 31 | VStack (alignment: .leading) { 32 | VStack (alignment: .leading, spacing: 0) { 33 | if let image = message.image { 34 | NavigationLink(destination: PhotoView(image: image)) { 35 | Image(uiImage: image) 36 | .resizable() 37 | .aspectRatio(contentMode: .fill) 38 | .frame(width: .infinity, height: 120, alignment: .center) 39 | .clipped() 40 | } 41 | } 42 | if let message = message.message { 43 | if message == "..." && self.message.role == .system { 44 | TypingIndicatorView() 45 | .padding() 46 | .foregroundColor(.white) 47 | .background(colorScheme == .dark ? Color(.systemGray5) : Color(.systemGray6)) 48 | } 49 | else { 50 | 51 | VStack (alignment: .leading, spacing: 10) { 52 | 53 | HStack { 54 | Text(removeMessageResponses(message)) 55 | if message.contains("? location:") { 56 | Spacer() 57 | Image(systemName: "location.viewfinder") 58 | } 59 | } 60 | 61 | 62 | } 63 | .padding() 64 | .background(colorScheme == .dark ? Color(.systemGray5) : Color(.systemGray6)) 65 | } 66 | } 67 | } 68 | .contextMenu { 69 | Button(action: { 70 | if let message = message.message { 71 | UIPasteboard.general.string = message 72 | } 73 | else if let image = message.image { 74 | UIPasteboard.general.image = image 75 | } 76 | }) { 77 | Text("Copy") 78 | Image(systemName: "doc.on.doc") 79 | } 80 | /*if let image = message.message { 81 | Button(action: { 82 | ShareView(data: [message]) 83 | } 84 | 85 | }) { 86 | Text("Share") 87 | Image(systemName: "square.and.arrow.up") 88 | } 89 | }*/ 90 | } 91 | } 92 | .background(colorScheme == .dark ? Color(.systemGray5) : Color(.systemGray6)) 93 | .mask { 94 | RoundedRectangle(cornerRadius: 12) 95 | } 96 | .padding(.leading, message.role == .system ? 0 : 40) 97 | .padding(.trailing, message.role == .user ? 0 : 40) 98 | if message.role == .system { 99 | Spacer() 100 | } 101 | } 102 | } 103 | 104 | 105 | func removeMessageResponses(_ message: String) -> String { 106 | var parsedMessage = message 107 | 108 | // Remove ", options: []) 110 | parsedMessage = regex.stringByReplacingMatches(in: parsedMessage, options: [], range: NSRange(location: 0, length: parsedMessage.utf16.count), withTemplate: "") 111 | 112 | 113 | // Remove trailing whitespace and newline characters 114 | parsedMessage = parsedMessage.trimmingCharacters(in: .whitespacesAndNewlines) 115 | 116 | 117 | if let range = parsedMessage.range(of: "location:") { 118 | return String(parsedMessage[.. String { 127 | let speciesPrefix = "Species: " 128 | if let range = message.range(of: speciesPrefix) { 129 | let speciesStartIndex = range.upperBound 130 | var speciesEndIndex = speciesStartIndex 131 | 132 | // Find the end of the species name (either end of the string or next newline) 133 | if let newlineRange = message[speciesStartIndex...].range(of: "\n") { 134 | speciesEndIndex = newlineRange.lowerBound 135 | } else { 136 | speciesEndIndex = message.endIndex 137 | } 138 | 139 | // Extract and return the species name 140 | return String(message[speciesStartIndex.. x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct TypingIndicatorView: View { 12 | let animationDelay: Double = 0.2 13 | @State private var shouldAnimate = false 14 | 15 | var body: some View { 16 | HStack (spacing: 3) { 17 | ForEach(0..<3) { index in 18 | Circle() 19 | .frame(width: 12, height: 12) 20 | .foregroundColor(.black) 21 | .scaleEffect(shouldAnimate ? 1 : 0.5) 22 | .animation( 23 | Animation.easeInOut(duration: 0.6) 24 | .repeatForever(autoreverses: true) 25 | .delay(animationDelay * Double(index)), 26 | value: shouldAnimate 27 | ) 28 | } 29 | } 30 | .onAppear { 31 | shouldAnimate = true 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/ContentView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | import AVFoundation 11 | import Vision 12 | 13 | struct ContentView: View { 14 | 15 | @State private var chat: ChatModel? 16 | @State private var showChatSheet: Bool = false 17 | @State private var cameraIsActive: Bool = true 18 | @State private var showHistory: Bool = false 19 | 20 | var body: some View { 21 | 22 | ZStack (alignment: .top) { 23 | CameraView(isActive: $cameraIsActive, onCaptureImage: { image in 24 | 25 | 26 | //initialise the chat module first 27 | chat = ChatModel() 28 | 29 | //wait a moment and then send the first prompt (what is this?) 30 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { 31 | if chat!.messages.count == 0 { 32 | chat!.sendMessage(message: "What is this?", image: image) 33 | } 34 | } 35 | 36 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.7) { 37 | cameraIsActive = false 38 | } 39 | 40 | showChatSheet = true 41 | 42 | }, showHistory: $showHistory) 43 | .sheet(isPresented: $showHistory) { 44 | HistoryView(isPresented: $showHistory) 45 | } 46 | .sheet(item: $chat, onDismiss: { 47 | withAnimation { 48 | cameraIsActive = true 49 | } 50 | }) { chat in 51 | NavigationView { 52 | ChatView(isPresented: $showChatSheet, chat: chat) 53 | .onChange(of: showChatSheet) { value in 54 | if !value { 55 | self.chat = nil 56 | } 57 | } 58 | } 59 | } 60 | } 61 | .edgesIgnoringSafeArea(.all) 62 | .background(Color.black) 63 | 64 | } 65 | 66 | 67 | 68 | 69 | } 70 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Extensions/DateExtensions.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import Foundation 10 | extension Date { 11 | var shortDate: String { 12 | let dateFormatter = DateFormatter() 13 | dateFormatter.dateFormat = "MMM d" 14 | return dateFormatter.string(from: self) 15 | } 16 | var shortDateTime: String { 17 | let dateFormatter = DateFormatter() 18 | dateFormatter.dateFormat = "MM/dd hh:mm:ssa" 19 | return dateFormatter.string(from: self) 20 | } 21 | var time: String { 22 | let dateFormatter = DateFormatter() 23 | dateFormatter.dateFormat = "h:mma" 24 | return dateFormatter.string(from: self) 25 | } 26 | var relativeTime: String { 27 | let interval = Int(Date().timeIntervalSince(self)) 28 | 29 | if interval < 180 { 30 | return "just now" 31 | } else if interval < 3600 { 32 | return "\(interval / 60)m ago" 33 | } else if interval < 86400 { 34 | return "\(interval / 3600)h ago" 35 | } else if interval < 31536000 { 36 | return "\(interval / 86400)d ago" 37 | } else { 38 | return "ages ago" 39 | } 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Extensions/StringHash.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import CryptoKit 10 | import Foundation 11 | 12 | extension String { 13 | func hash() -> String { 14 | let data = Data(self.utf8) 15 | let hash = Insecure.MD5.hash(data: data) 16 | let hashValue = hash.compactMap { String(format: "%02x", $0) }.joined() 17 | return hashValue 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Extensions/UIImageExtensions.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import Foundation 10 | import SwiftUI 11 | 12 | func loadUIImage(_ imageName: String) -> UIImage? { 13 | if let uiImage = UIImage(named: imageName) { 14 | if let cgImage = uiImage.cgImage { 15 | return UIImage(cgImage: cgImage) 16 | } 17 | } 18 | return nil 19 | } 20 | 21 | extension UIImage { 22 | 23 | func cropped(to rect: CGRect) -> UIImage? { 24 | guard let cgImage = cgImage?.cropping(to: rect) else { return nil } 25 | return UIImage(cgImage: cgImage) 26 | } 27 | func aspectHeight(width: CGFloat) -> CGFloat { 28 | return (size.height / size.width) * width 29 | } 30 | func aspectWidth(height: CGFloat) -> CGFloat { 31 | return (size.width / size.height) * height 32 | } 33 | 34 | func getPixelColor(x: Int, y: Int) -> UIColor? { 35 | 36 | guard let cgImage = self.cgImage else { return nil } 37 | 38 | let colorSpace = CGColorSpaceCreateDeviceRGB() 39 | var pixelData: [UInt8] = [0, 0, 0, 0] 40 | 41 | if let context = CGContext(data: &pixelData, 42 | width: 1, 43 | height: 1, 44 | bitsPerComponent: 8, 45 | bytesPerRow: 4, 46 | space: colorSpace, 47 | bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue), 48 | let croppedImage = cgImage.cropping(to: CGRect(x: x, y: y, width: 1, height: 1)) { 49 | 50 | context.draw(croppedImage, in: CGRect(x: 0, y: 0, width: 1, height: 1)) 51 | let red = CGFloat(pixelData[0]) / 255.0 52 | let green = CGFloat(pixelData[1]) / 255.0 53 | let blue = CGFloat(pixelData[2]) / 255.0 54 | let alpha = CGFloat(pixelData[3]) / 255.0 55 | 56 | return UIColor(red: red, green: green, blue: blue, alpha: alpha) 57 | } 58 | 59 | return nil 60 | } 61 | 62 | 63 | var height: CGFloat { 64 | guard let cgImage = self.cgImage else { return 0 } 65 | return CGFloat(cgImage.height) 66 | } 67 | 68 | var width: CGFloat { 69 | guard let cgImage = self.cgImage else { return 0 } 70 | return CGFloat(cgImage.width) 71 | } 72 | 73 | func resized(toHeight height: CGFloat) -> UIImage? { 74 | let scale = height / self.size.height 75 | let newWidth = self.size.width * scale 76 | UIGraphicsBeginImageContext(CGSize(width: newWidth, height: height)) 77 | self.draw(in: CGRect(x: 0, y: 0, width: newWidth, height: height)) 78 | let newImage = UIGraphicsGetImageFromCurrentImageContext() 79 | UIGraphicsEndImageContext() 80 | return newImage 81 | } 82 | 83 | } 84 | 85 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/HistoryView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct HistoryView: View { 12 | @Binding var isPresented: Bool 13 | @State private var history: History?// = History() 14 | @State private var incrementId: Int = 0 15 | var body: some View { 16 | NavigationView { 17 | List { 18 | if let history = history { 19 | if history.chats.count > 0 { 20 | ForEach(history.chats.sorted { $0.date > $1.date }, id: \.id) { chat in 21 | NavigationLink(destination: ChatView(isPresented: .constant(false), chat: chat, chatTitle: chat.title ?? "New Chat", removeChat: { chat in 22 | history.removeChat(chat) 23 | incrementId += 1 24 | })) { 25 | HStack { 26 | if let image = chat.messages.first?.image { 27 | Image(uiImage: image) 28 | .resizable() 29 | .aspectRatio(contentMode: ContentMode.fill) 30 | .frame(width: 60, height: 60, alignment: .center) 31 | .mask { 32 | RoundedRectangle(cornerRadius: 6) 33 | } 34 | } 35 | VStack (alignment: .leading) { 36 | Text(chat.title ?? "New Chat") 37 | Text(chat.date.shortDateTime) 38 | .opacity(0.7) 39 | .font(.footnote) 40 | } 41 | } 42 | } 43 | } 44 | } 45 | else { 46 | HStack { 47 | Spacer() 48 | Text("There's no history to display yet") 49 | Spacer() 50 | } 51 | .padding(.top, 80) 52 | .listRowSeparator(.hidden) 53 | } 54 | } 55 | else { 56 | HStack { 57 | Spacer() 58 | ProgressView() 59 | .padding(.vertical, 100) 60 | .onAppear { 61 | DispatchQueue.global().async { 62 | let history = History() 63 | DispatchQueue.main.async { 64 | self.history = history 65 | } 66 | } 67 | } 68 | Spacer() 69 | } 70 | .listRowSeparator(.hidden) 71 | } 72 | } 73 | .id(incrementId) 74 | .listStyle(PlainListStyle()) 75 | .navigationTitle("History") 76 | .navigationBarTitleDisplayMode(.inline) 77 | .toolbar { 78 | //ToolbarItem(placement: .primaryAction) { 79 | //trailing buttons 80 | /*Button(action: { 81 | //action 82 | }) { 83 | Image(systemName: "magnifyingglass") 84 | }*/ 85 | //} 86 | ToolbarItem(placement: .cancellationAction) { 87 | //leading buttons 88 | Button(action: { 89 | //action 90 | isPresented = false 91 | }) { 92 | Text("Close") 93 | } 94 | .accentColor(.blue) 95 | } 96 | } 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Models/ChatModel.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import Foundation 10 | import SwiftUI 11 | 12 | class ChatModel: ObservableObject, Identifiable, Codable { 13 | let id: UUID 14 | @Published var messages: [ChatMessage] = [] 15 | @Published var isSending: Bool = false 16 | @Published var title: String? = nil 17 | @Published var date: Date 18 | 19 | //customize the location of the openai_proxy.php script 20 | //source code for openai_proxy.php available here: https://github.com/adamlyttleapps/OpenAI-Proxy-PHP 21 | 22 | private let location = "https://adamlyttleapps.com/demo/OpenAIProxy-PHP/openai_proxy.php" 23 | 24 | //create a shared secret key, requests to the server use an md5 hash with the shared secret 25 | private let sharedSecretKey = "secret_key" 26 | 27 | enum CodingKeys: String, CodingKey { 28 | case id 29 | case messages 30 | case isSending 31 | case title 32 | case date 33 | } 34 | 35 | init(id: UUID = UUID(), messages: [ChatMessage] = [], isSending: Bool = false, title: String? = nil, date: Date = Date()) { 36 | self.id = id 37 | self.messages = messages 38 | self.isSending = isSending 39 | self.title = title 40 | self.date = date 41 | } 42 | 43 | required init(from decoder: Decoder) throws { 44 | let container = try decoder.container(keyedBy: CodingKeys.self) 45 | id = try container.decode(UUID.self, forKey: .id) 46 | messages = try container.decode([ChatMessage].self, forKey: .messages) 47 | isSending = try container.decode(Bool.self, forKey: .isSending) 48 | title = try container.decodeIfPresent(String.self, forKey: .title) 49 | date = try container.decode(Date.self, forKey: .date) 50 | } 51 | 52 | func encode(to encoder: Encoder) throws { 53 | var container = encoder.container(keyedBy: CodingKeys.self) 54 | try container.encode(id, forKey: .id) 55 | try container.encode(messages, forKey: .messages) 56 | try container.encode(isSending, forKey: .isSending) 57 | try container.encode(title, forKey: .title) 58 | try container.encode(date, forKey: .date) 59 | } 60 | 61 | var messageData: String? { 62 | // Convert ChatModel instance to JSON 63 | do { 64 | let jsonData = try JSONEncoder().encode(self.messages) 65 | if let jsonString = String(data: jsonData, encoding: .utf8) { 66 | print(jsonString) 67 | return jsonString 68 | } 69 | } catch { 70 | print("Failed to encode ChatModel to JSON: \(error)") 71 | } 72 | return nil 73 | } 74 | 75 | 76 | func sendMessage(role: MessageRole = .user, message: String? = nil, image: UIImage? = nil) { 77 | 78 | appendMessage(role: role, message: message, image: image) 79 | self.isSending = true 80 | 81 | let parameters: [String: String] = [ 82 | "messages": self.messageData!, 83 | "hash": "\(self.messageData!)\(sharedSecretKey)".hash() 84 | ] 85 | 86 | let connectionRequest = ConnectionRequest() 87 | connectionRequest.fetchData(location, parameters: parameters) { [weak self] data,error in 88 | 89 | if let self = self { 90 | 91 | if let error = error, !error.isEmpty { 92 | print("ERROR") 93 | } 94 | else if let data = data { 95 | print("received data = \(data)") 96 | if let message = String(data: data, encoding: .utf8) { 97 | 98 | DispatchQueue.main.async { 99 | self.appendMessage(role: .system, message: message) 100 | self.isSending = false 101 | } 102 | } 103 | } 104 | 105 | if self.isSending { 106 | DispatchQueue.main.async { 107 | self.isSending = false 108 | } 109 | } 110 | 111 | } 112 | 113 | } 114 | 115 | 116 | 117 | } 118 | 119 | func appendMessage(role: MessageRole, message: String? = nil, image: UIImage? = nil) { 120 | self.date = Date() 121 | messages.append(ChatMessage( 122 | role: role, 123 | message: message, 124 | image: image 125 | )) 126 | } 127 | 128 | } 129 | 130 | enum MessageRole: String, Codable { 131 | case user 132 | case system 133 | } 134 | 135 | struct ChatMessage: Identifiable, Codable { 136 | let id: UUID 137 | let role: MessageRole 138 | var message: String? 139 | var image: UIImage? 140 | 141 | enum CodingKeys: String, CodingKey { 142 | case id 143 | case role 144 | case message 145 | case image 146 | } 147 | 148 | init(id: UUID = UUID(), role: MessageRole, message: String?, image: UIImage? = nil) { 149 | self.id = id 150 | self.role = role 151 | self.message = message 152 | self.image = image //?.jpegData(compressionQuality: 1.0) 153 | } 154 | 155 | init(from decoder: Decoder) throws { 156 | let container = try decoder.container(keyedBy: CodingKeys.self) 157 | id = try container.decode(UUID.self, forKey: .id) 158 | role = try container.decode(MessageRole.self, forKey: .role) 159 | message = try container.decodeIfPresent(String.self, forKey: .message) 160 | /*if let imageData = try container.decodeIfPresent(Data.self, forKey: .image) ?? nil { 161 | image = UIImage(data: imageData) 162 | }*/ 163 | } 164 | 165 | func encode(to encoder: Encoder) throws { 166 | var container = encoder.container(keyedBy: CodingKeys.self) 167 | try container.encode(id, forKey: .id) 168 | try container.encode(role, forKey: .role) 169 | try container.encode(message, forKey: .message) 170 | //try container.encode(image?.jpegData(compressionQuality: 1.0), forKey: .image) 171 | 172 | if let image = self.image, 173 | let resizedImage = self.resizedImage(image), 174 | let resizedImageData = resizedImage.jpegData(compressionQuality: 0.4) { 175 | let imageData = self.encodeToPercentEncodedString(resizedImageData) 176 | try container.encode(imageData, forKey: .image) 177 | } 178 | 179 | } 180 | 181 | private func resizedImage(_ image: UIImage) -> UIImage? { 182 | //increase size of image here: 183 | if image.size.height > 1000 { 184 | return image.resized(toHeight: 1000) 185 | } 186 | else { 187 | return image 188 | } 189 | } 190 | 191 | 192 | private func encodeToPercentEncodedString(_ data: Data) -> String { 193 | return data.map { String(format: "%%%02hhX", $0) }.joined() 194 | } 195 | 196 | 197 | 198 | 199 | } 200 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Models/ConnectionRequest.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import Foundation 10 | import Combine 11 | 12 | class ConnectionRequest: ObservableObject { 13 | @Published var isLoading: Bool = false 14 | var cancellable: AnyCancellable? 15 | 16 | func fetchData(_ url: String?, parameters: [String: String], completion: @escaping (Data?,String?) -> Void) { 17 | guard let urlString = url, let requestUrl = URL(string: urlString) else { 18 | //onError?("Invalid URL") 19 | completion(nil, "Invalid URL") 20 | return 21 | } 22 | 23 | //Setup connection 24 | var request = URLRequest(url: requestUrl) 25 | request.timeoutInterval = 60 26 | request.setValue("close", forHTTPHeaderField: "Connection") 27 | 28 | // Prepare a POST request 29 | request.httpMethod = "POST" 30 | request.addValue("application/x-www-form-urlencoded", forHTTPHeaderField: "Content-Type") 31 | let postString = parameters.map { "\($0.key)=\($0.value.addingPercentEncoding(withAllowedCharacters: .urlHostAllowed) ?? "")" }.joined(separator: "&") 32 | request.httpBody = postString.data(using: .utf8) 33 | 34 | isLoading = true 35 | 36 | print("==> fetching \(requestUrl.absoluteString)") 37 | 38 | let customQueue = DispatchQueue(label: "com.adamlyttleapps.ConnectionRequest") 39 | cancellable = URLSession.shared.dataTaskPublisher(for: request) 40 | .timeout(60, scheduler: customQueue) // 60 seconds timeout 41 | .receive(on: customQueue) 42 | .sink { completionStatus in 43 | self.isLoading = false 44 | switch completionStatus { 45 | case .failure(let error): 46 | completion(nil,error.localizedDescription) 47 | case .finished: 48 | break 49 | } 50 | } receiveValue: { data, _ in 51 | DispatchQueue.global().async { 52 | completion(data,nil) 53 | } 54 | } 55 | } 56 | 57 | deinit { 58 | cancellable?.cancel() 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Models/HistoryModel.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import Foundation 10 | 11 | class History: ObservableObject { 12 | @Published var chats: [ChatModel] 13 | init() { 14 | if let data = UserDefaults.standard.data(forKey: "chats"), 15 | let savedChats = try? JSONDecoder().decode([ChatModel].self, from: data) { 16 | chats = savedChats 17 | } else { 18 | chats = [] 19 | } 20 | } 21 | func appendChat(_ chat: ChatModel) { 22 | removeChat(chat, skipImageDeletion: true) 23 | chats.append(chat) 24 | 25 | DispatchQueue.global().async { 26 | if let firstMessage = chat.messages.first, let image = firstMessage.image { 27 | 28 | // Get the path to the documents directory 29 | if let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first { 30 | let fileName = "\(firstMessage.id).jpg" 31 | let fileURL = documentsDirectory.appendingPathComponent(fileName) 32 | 33 | 34 | // Check if the file already exists 35 | if !FileManager.default.fileExists(atPath: fileURL.path) { 36 | if let imageData = image.resized(toHeight: 300)?.jpegData(compressionQuality: 0.6) { 37 | // Write the data to the file 38 | do { 39 | try imageData.write(to: fileURL) 40 | print("Image saved successfully to \(fileURL)") 41 | } catch { 42 | print("Error saving image: \(error)") 43 | } 44 | } 45 | } else { 46 | print("File already exists at \(fileURL)") 47 | } 48 | 49 | } 50 | 51 | } 52 | } 53 | 54 | save() 55 | } 56 | func removeChat(_ chat: ChatModel, skipImageDeletion: Bool = false) { 57 | if let index = chats.firstIndex(where: { $0.id == chat.id }) { 58 | 59 | if !skipImageDeletion { 60 | if let firstMessage = chat.messages.first { 61 | //delete file 62 | if let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first { 63 | let fileName = "\(firstMessage.id).jpg" 64 | let fileURL = documentsDirectory.appendingPathComponent(fileName) 65 | 66 | do { 67 | // Check if the file exists before attempting to delete it 68 | if FileManager.default.fileExists(atPath: fileURL.path) { 69 | try FileManager.default.removeItem(at: fileURL) 70 | print("File successfully deleted.") 71 | } else { 72 | print("File does not exist.") 73 | } 74 | } catch { 75 | print("Error deleting file: \(error)") 76 | } 77 | } 78 | } 79 | } 80 | 81 | chats.remove(at: index) 82 | save() 83 | } 84 | } 85 | 86 | func save() { 87 | DispatchQueue.global().async { 88 | if let data = try? JSONEncoder().encode(self.chats) { 89 | UserDefaults.standard.set(data, forKey: "chats") 90 | } 91 | } 92 | } 93 | 94 | } 95 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/PhotoPicker.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | import PhotosUI 11 | 12 | struct PhotoPicker: UIViewControllerRepresentable { 13 | @Binding var isPresented: Bool 14 | @Binding var selectedImage: UIImage? 15 | @Binding var filename: String? 16 | 17 | func makeCoordinator() -> Coordinator { 18 | return Coordinator(self) 19 | } 20 | 21 | func makeUIViewController(context: Context) -> PHPickerViewController { 22 | var config = PHPickerConfiguration(photoLibrary: PHPhotoLibrary.shared())//PHPickerConfiguration() 23 | config.filter = .images 24 | config.selectionLimit = 1 25 | let picker = PHPickerViewController(configuration: config) 26 | picker.delegate = context.coordinator 27 | return picker 28 | } 29 | 30 | func updateUIViewController(_ uiViewController: PHPickerViewController, context: Context) { 31 | } 32 | 33 | class Coordinator: NSObject, PHPickerViewControllerDelegate { 34 | let parent: PhotoPicker 35 | 36 | init(_ parent: PhotoPicker) { 37 | self.parent = parent 38 | } 39 | 40 | 41 | 42 | func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) { 43 | parent.isPresented = false 44 | 45 | if let result = results.first { 46 | let assetID = result.assetIdentifier 47 | let options = PHImageRequestOptions() 48 | options.isSynchronous = false 49 | options.deliveryMode = .highQualityFormat 50 | options.isNetworkAccessAllowed = true 51 | 52 | if let assetID = assetID { 53 | PHPhotoLibrary.requestAuthorization { status in 54 | if status == .authorized { 55 | let asset = PHAsset.fetchAssets(withLocalIdentifiers: [assetID], options: nil).firstObject 56 | PHImageManager.default().requestImageDataAndOrientation(for: asset!, options: options) { imageData, dataUTI, orientation, info in 57 | if let imageData = imageData { 58 | DispatchQueue.main.async { 59 | self.parent.selectedImage = UIImage(data: imageData) 60 | // Get metadata 61 | if let imageSource = CGImageSourceCreateWithData(imageData as CFData, nil), 62 | let imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [CFString: Any] { 63 | // Get the original file format 64 | let fileFormat = imageProperties[kCGImageSourceTypeIdentifierHint] 65 | print("File format: \(fileFormat ?? "Unknown")") 66 | 67 | // Get the resolution 68 | let width = imageProperties[kCGImagePropertyPixelWidth] ?? "Unknown" 69 | let height = imageProperties[kCGImagePropertyPixelHeight] ?? "Unknown" 70 | print("Resolution: \(width) x \(height)") 71 | 72 | // Get the file size 73 | let fileSize = imageProperties[kCGImagePropertyFileSize] ?? "Unknown" 74 | print("File size: \(fileSize) bytes") 75 | 76 | 77 | // Get the filename 78 | let resources = PHAssetResource.assetResources(for: asset!) 79 | if let resource = resources.first { 80 | let filename = resource.originalFilename 81 | print("Filename: \(filename)") 82 | DispatchQueue.main.async { 83 | self.parent.filename = filename 84 | } 85 | } 86 | 87 | } 88 | } 89 | } 90 | } 91 | } else { 92 | print("Authorization not granted") 93 | } 94 | } 95 | } 96 | } 97 | } 98 | 99 | 100 | 101 | 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/PhotoView.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | struct PhotoView: View { 12 | 13 | let image: UIImage 14 | 15 | var body: some View { 16 | 17 | VStack { 18 | Image(uiImage: image) 19 | .resizable() 20 | .aspectRatio(contentMode: .fit) 21 | .clipped() 22 | } 23 | 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /SwiftUI-AI-Wrapper/SwiftUI_AI_WrapperApp.swift: -------------------------------------------------------------------------------- 1 | // AI Wrapper SwiftUI 2 | // Created by Adam Lyttle on 7/9/2024 3 | 4 | // Make cool stuff and share your build with me: 5 | 6 | // --> x.com/adamlyttleapps 7 | // --> github.com/adamlyttleapps 8 | 9 | import SwiftUI 10 | 11 | @main 12 | struct SwiftUI_AI_WrapperApp: App { 13 | var body: some Scene { 14 | WindowGroup { 15 | ContentView() 16 | } 17 | } 18 | } 19 | --------------------------------------------------------------------------------