├── .gitignore
├── .swiftformat
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── LICENSE
├── Package.resolved
├── Package.swift
├── PolyAIExample
├── PolyAIExample.xcodeproj
│ ├── project.pbxproj
│ └── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ ├── IDEWorkspaceChecks.plist
│ │ └── swiftpm
│ │ └── Package.resolved
├── PolyAIExample
│ ├── ApiKeysIntroView.swift
│ ├── Assets.xcassets
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ └── Contents.json
│ │ └── Contents.json
│ ├── MessageDemoObservabble.swift
│ ├── MessageDemoView.swift
│ ├── OptionsListView.swift
│ ├── PolyAIExample.entitlements
│ ├── PolyAIExampleApp.swift
│ └── Preview Content
│ │ └── Preview Assets.xcassets
│ │ └── Contents.json
├── PolyAIExampleTests
│ └── PolyAIExampleTests.swift
└── PolyAIExampleUITests
│ ├── PolyAIExampleUITests.swift
│ └── PolyAIExampleUITestsLaunchTests.swift
├── README.md
├── Sources
└── PolyAI
│ ├── Interfaces
│ ├── Parameters
│ │ ├── LLMMessageParameter.swift
│ │ └── LLMParameter.swift
│ └── Response
│ │ ├── Message
│ │ ├── LLMMessageResponse+Anthropic.swift
│ │ ├── LLMMessageResponse+OpenAI.swift
│ │ └── LLMMessageResponse.swift
│ │ └── Stream
│ │ ├── LLMMessageStreamResponse+Anthropic.swift
│ │ ├── LLMMessageStreamResponse+OpenAI.swift
│ │ └── LLMMessageStreamResponse.swift
│ ├── PolyAI.swift
│ └── Service
│ ├── DefaultPolyAIService.swift
│ ├── PolyAIService.swift
│ └── PolyAIServiceFactory.swift
└── Tests
└── PolyAITests
└── PolyAITests.swift
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | xcuserdata/
5 | DerivedData/
6 | .swiftpm/configuration/registries.json
7 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
8 | .netrc
9 |
--------------------------------------------------------------------------------
/.swiftformat:
--------------------------------------------------------------------------------
1 | --disable all
2 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 James Rochabrun
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "swiftanthropic",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/jamesrochabrun/SwiftAnthropic",
7 | "state" : {
8 | "revision" : "c921b625dcbcc0653343ae63c89d1cbfed0bd539",
9 | "version" : "2.1.3"
10 | }
11 | },
12 | {
13 | "identity" : "swiftopenai",
14 | "kind" : "remoteSourceControl",
15 | "location" : "https://github.com/jamesrochabrun/SwiftOpenAI",
16 | "state" : {
17 | "revision" : "3f5e195781e39ebbfec512118a1e43ec65fe4367",
18 | "version" : "4.0.6"
19 | }
20 | }
21 | ],
22 | "version" : 2
23 | }
24 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.9
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "PolyAI",
8 | platforms: [
9 | .iOS(.v15),
10 | .macOS(.v13)
11 | ],
12 | products: [
13 | // Products define the executables and libraries a package produces, making them visible to other packages.
14 | .library(
15 | name: "PolyAI",
16 | targets: ["PolyAI"]),
17 | ],
18 | dependencies: [
19 | // Dependencies declare other packages that this package depends on.
20 | .package(url: "https://github.com/jamesrochabrun/SwiftOpenAI", from: "4.0.6"),
21 | .package(url: "https://github.com/jamesrochabrun/SwiftAnthropic", from: "2.1.3"),
22 | ],
23 | targets: [
24 | // Targets are the basic building blocks of a package, defining a module or a test suite.
25 | // Targets can depend on other targets in this package and products from dependencies.
26 | .target(
27 | name: "PolyAI",
28 | dependencies: [
29 | .product(name: "SwiftOpenAI", package: "SwiftOpenAI"),
30 | .product(name: "SwiftAnthropic", package: "SwiftAnthropic"),
31 | ]),
32 | .testTarget(
33 | name: "PolyAITests",
34 | dependencies: ["PolyAI"]),
35 | ]
36 | )
37 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 60;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 7B7B5AD92BCCDFEB0051FF95 /* PolyAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5AD82BCCDFEB0051FF95 /* PolyAIExampleApp.swift */; };
11 | 7B7B5ADD2BCCDFED0051FF95 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7B7B5ADC2BCCDFED0051FF95 /* Assets.xcassets */; };
12 | 7B7B5AE12BCCDFED0051FF95 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7B7B5AE02BCCDFED0051FF95 /* Preview Assets.xcassets */; };
13 | 7B7B5AEB2BCCDFED0051FF95 /* PolyAIExampleTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5AEA2BCCDFED0051FF95 /* PolyAIExampleTests.swift */; };
14 | 7B7B5AF52BCCDFED0051FF95 /* PolyAIExampleUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5AF42BCCDFED0051FF95 /* PolyAIExampleUITests.swift */; };
15 | 7B7B5AF72BCCDFED0051FF95 /* PolyAIExampleUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5AF62BCCDFED0051FF95 /* PolyAIExampleUITestsLaunchTests.swift */; };
16 | 7B7B5B052BCCE0400051FF95 /* PolyAI in Frameworks */ = {isa = PBXBuildFile; productRef = 7B7B5B042BCCE0400051FF95 /* PolyAI */; };
17 | 7B7B5B072BCCE14A0051FF95 /* ApiKeysIntroView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5B062BCCE14A0051FF95 /* ApiKeysIntroView.swift */; };
18 | 7B7B5B092BCCE1960051FF95 /* OptionsListView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5B082BCCE1960051FF95 /* OptionsListView.swift */; };
19 | 7B7B5B0B2BCCE52F0051FF95 /* MessageDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5B0A2BCCE52F0051FF95 /* MessageDemoView.swift */; };
20 | 7B7B5B0D2BCCE55E0051FF95 /* MessageDemoObservabble.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7B5B0C2BCCE55E0051FF95 /* MessageDemoObservabble.swift */; };
21 | /* End PBXBuildFile section */
22 |
23 | /* Begin PBXContainerItemProxy section */
24 | 7B7B5AE72BCCDFED0051FF95 /* PBXContainerItemProxy */ = {
25 | isa = PBXContainerItemProxy;
26 | containerPortal = 7B7B5ACD2BCCDFEB0051FF95 /* Project object */;
27 | proxyType = 1;
28 | remoteGlobalIDString = 7B7B5AD42BCCDFEB0051FF95;
29 | remoteInfo = PolyAIExample;
30 | };
31 | 7B7B5AF12BCCDFED0051FF95 /* PBXContainerItemProxy */ = {
32 | isa = PBXContainerItemProxy;
33 | containerPortal = 7B7B5ACD2BCCDFEB0051FF95 /* Project object */;
34 | proxyType = 1;
35 | remoteGlobalIDString = 7B7B5AD42BCCDFEB0051FF95;
36 | remoteInfo = PolyAIExample;
37 | };
38 | /* End PBXContainerItemProxy section */
39 |
40 | /* Begin PBXFileReference section */
41 | 7B7B5AD52BCCDFEB0051FF95 /* PolyAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = PolyAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; };
42 | 7B7B5AD82BCCDFEB0051FF95 /* PolyAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PolyAIExampleApp.swift; sourceTree = ""; };
43 | 7B7B5ADC2BCCDFED0051FF95 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
44 | 7B7B5ADE2BCCDFED0051FF95 /* PolyAIExample.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = PolyAIExample.entitlements; sourceTree = ""; };
45 | 7B7B5AE02BCCDFED0051FF95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; };
46 | 7B7B5AE62BCCDFED0051FF95 /* PolyAIExampleTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PolyAIExampleTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
47 | 7B7B5AEA2BCCDFED0051FF95 /* PolyAIExampleTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PolyAIExampleTests.swift; sourceTree = ""; };
48 | 7B7B5AF02BCCDFED0051FF95 /* PolyAIExampleUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = PolyAIExampleUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
49 | 7B7B5AF42BCCDFED0051FF95 /* PolyAIExampleUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PolyAIExampleUITests.swift; sourceTree = ""; };
50 | 7B7B5AF62BCCDFED0051FF95 /* PolyAIExampleUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PolyAIExampleUITestsLaunchTests.swift; sourceTree = ""; };
51 | 7B7B5B062BCCE14A0051FF95 /* ApiKeysIntroView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ApiKeysIntroView.swift; sourceTree = ""; };
52 | 7B7B5B082BCCE1960051FF95 /* OptionsListView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OptionsListView.swift; sourceTree = ""; };
53 | 7B7B5B0A2BCCE52F0051FF95 /* MessageDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageDemoView.swift; sourceTree = ""; };
54 | 7B7B5B0C2BCCE55E0051FF95 /* MessageDemoObservabble.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageDemoObservabble.swift; sourceTree = ""; };
55 | /* End PBXFileReference section */
56 |
57 | /* Begin PBXFrameworksBuildPhase section */
58 | 7B7B5AD22BCCDFEB0051FF95 /* Frameworks */ = {
59 | isa = PBXFrameworksBuildPhase;
60 | buildActionMask = 2147483647;
61 | files = (
62 | 7B7B5B052BCCE0400051FF95 /* PolyAI in Frameworks */,
63 | );
64 | runOnlyForDeploymentPostprocessing = 0;
65 | };
66 | 7B7B5AE32BCCDFED0051FF95 /* Frameworks */ = {
67 | isa = PBXFrameworksBuildPhase;
68 | buildActionMask = 2147483647;
69 | files = (
70 | );
71 | runOnlyForDeploymentPostprocessing = 0;
72 | };
73 | 7B7B5AED2BCCDFED0051FF95 /* Frameworks */ = {
74 | isa = PBXFrameworksBuildPhase;
75 | buildActionMask = 2147483647;
76 | files = (
77 | );
78 | runOnlyForDeploymentPostprocessing = 0;
79 | };
80 | /* End PBXFrameworksBuildPhase section */
81 |
82 | /* Begin PBXGroup section */
83 | 7B7B5ACC2BCCDFEB0051FF95 = {
84 | isa = PBXGroup;
85 | children = (
86 | 7B7B5AD72BCCDFEB0051FF95 /* PolyAIExample */,
87 | 7B7B5AE92BCCDFED0051FF95 /* PolyAIExampleTests */,
88 | 7B7B5AF32BCCDFED0051FF95 /* PolyAIExampleUITests */,
89 | 7B7B5AD62BCCDFEB0051FF95 /* Products */,
90 | );
91 | sourceTree = "";
92 | };
93 | 7B7B5AD62BCCDFEB0051FF95 /* Products */ = {
94 | isa = PBXGroup;
95 | children = (
96 | 7B7B5AD52BCCDFEB0051FF95 /* PolyAIExample.app */,
97 | 7B7B5AE62BCCDFED0051FF95 /* PolyAIExampleTests.xctest */,
98 | 7B7B5AF02BCCDFED0051FF95 /* PolyAIExampleUITests.xctest */,
99 | );
100 | name = Products;
101 | sourceTree = "";
102 | };
103 | 7B7B5AD72BCCDFEB0051FF95 /* PolyAIExample */ = {
104 | isa = PBXGroup;
105 | children = (
106 | 7B7B5AD82BCCDFEB0051FF95 /* PolyAIExampleApp.swift */,
107 | 7B7B5B062BCCE14A0051FF95 /* ApiKeysIntroView.swift */,
108 | 7B7B5B082BCCE1960051FF95 /* OptionsListView.swift */,
109 | 7B7B5B0A2BCCE52F0051FF95 /* MessageDemoView.swift */,
110 | 7B7B5B0C2BCCE55E0051FF95 /* MessageDemoObservabble.swift */,
111 | 7B7B5ADC2BCCDFED0051FF95 /* Assets.xcassets */,
112 | 7B7B5ADE2BCCDFED0051FF95 /* PolyAIExample.entitlements */,
113 | 7B7B5ADF2BCCDFED0051FF95 /* Preview Content */,
114 | );
115 | path = PolyAIExample;
116 | sourceTree = "";
117 | };
118 | 7B7B5ADF2BCCDFED0051FF95 /* Preview Content */ = {
119 | isa = PBXGroup;
120 | children = (
121 | 7B7B5AE02BCCDFED0051FF95 /* Preview Assets.xcassets */,
122 | );
123 | path = "Preview Content";
124 | sourceTree = "";
125 | };
126 | 7B7B5AE92BCCDFED0051FF95 /* PolyAIExampleTests */ = {
127 | isa = PBXGroup;
128 | children = (
129 | 7B7B5AEA2BCCDFED0051FF95 /* PolyAIExampleTests.swift */,
130 | );
131 | path = PolyAIExampleTests;
132 | sourceTree = "";
133 | };
134 | 7B7B5AF32BCCDFED0051FF95 /* PolyAIExampleUITests */ = {
135 | isa = PBXGroup;
136 | children = (
137 | 7B7B5AF42BCCDFED0051FF95 /* PolyAIExampleUITests.swift */,
138 | 7B7B5AF62BCCDFED0051FF95 /* PolyAIExampleUITestsLaunchTests.swift */,
139 | );
140 | path = PolyAIExampleUITests;
141 | sourceTree = "";
142 | };
143 | /* End PBXGroup section */
144 |
145 | /* Begin PBXNativeTarget section */
146 | 7B7B5AD42BCCDFEB0051FF95 /* PolyAIExample */ = {
147 | isa = PBXNativeTarget;
148 | buildConfigurationList = 7B7B5AFA2BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExample" */;
149 | buildPhases = (
150 | 7B7B5AD12BCCDFEB0051FF95 /* Sources */,
151 | 7B7B5AD22BCCDFEB0051FF95 /* Frameworks */,
152 | 7B7B5AD32BCCDFEB0051FF95 /* Resources */,
153 | );
154 | buildRules = (
155 | );
156 | dependencies = (
157 | );
158 | name = PolyAIExample;
159 | packageProductDependencies = (
160 | 7B7B5B042BCCE0400051FF95 /* PolyAI */,
161 | );
162 | productName = PolyAIExample;
163 | productReference = 7B7B5AD52BCCDFEB0051FF95 /* PolyAIExample.app */;
164 | productType = "com.apple.product-type.application";
165 | };
166 | 7B7B5AE52BCCDFED0051FF95 /* PolyAIExampleTests */ = {
167 | isa = PBXNativeTarget;
168 | buildConfigurationList = 7B7B5AFD2BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExampleTests" */;
169 | buildPhases = (
170 | 7B7B5AE22BCCDFED0051FF95 /* Sources */,
171 | 7B7B5AE32BCCDFED0051FF95 /* Frameworks */,
172 | 7B7B5AE42BCCDFED0051FF95 /* Resources */,
173 | );
174 | buildRules = (
175 | );
176 | dependencies = (
177 | 7B7B5AE82BCCDFED0051FF95 /* PBXTargetDependency */,
178 | );
179 | name = PolyAIExampleTests;
180 | productName = PolyAIExampleTests;
181 | productReference = 7B7B5AE62BCCDFED0051FF95 /* PolyAIExampleTests.xctest */;
182 | productType = "com.apple.product-type.bundle.unit-test";
183 | };
184 | 7B7B5AEF2BCCDFED0051FF95 /* PolyAIExampleUITests */ = {
185 | isa = PBXNativeTarget;
186 | buildConfigurationList = 7B7B5B002BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExampleUITests" */;
187 | buildPhases = (
188 | 7B7B5AEC2BCCDFED0051FF95 /* Sources */,
189 | 7B7B5AED2BCCDFED0051FF95 /* Frameworks */,
190 | 7B7B5AEE2BCCDFED0051FF95 /* Resources */,
191 | );
192 | buildRules = (
193 | );
194 | dependencies = (
195 | 7B7B5AF22BCCDFED0051FF95 /* PBXTargetDependency */,
196 | );
197 | name = PolyAIExampleUITests;
198 | productName = PolyAIExampleUITests;
199 | productReference = 7B7B5AF02BCCDFED0051FF95 /* PolyAIExampleUITests.xctest */;
200 | productType = "com.apple.product-type.bundle.ui-testing";
201 | };
202 | /* End PBXNativeTarget section */
203 |
204 | /* Begin PBXProject section */
205 | 7B7B5ACD2BCCDFEB0051FF95 /* Project object */ = {
206 | isa = PBXProject;
207 | attributes = {
208 | BuildIndependentTargetsInParallel = 1;
209 | LastSwiftUpdateCheck = 1500;
210 | LastUpgradeCheck = 1500;
211 | TargetAttributes = {
212 | 7B7B5AD42BCCDFEB0051FF95 = {
213 | CreatedOnToolsVersion = 15.0;
214 | };
215 | 7B7B5AE52BCCDFED0051FF95 = {
216 | CreatedOnToolsVersion = 15.0;
217 | TestTargetID = 7B7B5AD42BCCDFEB0051FF95;
218 | };
219 | 7B7B5AEF2BCCDFED0051FF95 = {
220 | CreatedOnToolsVersion = 15.0;
221 | TestTargetID = 7B7B5AD42BCCDFEB0051FF95;
222 | };
223 | };
224 | };
225 | buildConfigurationList = 7B7B5AD02BCCDFEB0051FF95 /* Build configuration list for PBXProject "PolyAIExample" */;
226 | compatibilityVersion = "Xcode 14.0";
227 | developmentRegion = en;
228 | hasScannedForEncodings = 0;
229 | knownRegions = (
230 | en,
231 | Base,
232 | );
233 | mainGroup = 7B7B5ACC2BCCDFEB0051FF95;
234 | packageReferences = (
235 | 7B7B5B032BCCE0400051FF95 /* XCLocalSwiftPackageReference ".." */,
236 | );
237 | productRefGroup = 7B7B5AD62BCCDFEB0051FF95 /* Products */;
238 | projectDirPath = "";
239 | projectRoot = "";
240 | targets = (
241 | 7B7B5AD42BCCDFEB0051FF95 /* PolyAIExample */,
242 | 7B7B5AE52BCCDFED0051FF95 /* PolyAIExampleTests */,
243 | 7B7B5AEF2BCCDFED0051FF95 /* PolyAIExampleUITests */,
244 | );
245 | };
246 | /* End PBXProject section */
247 |
248 | /* Begin PBXResourcesBuildPhase section */
249 | 7B7B5AD32BCCDFEB0051FF95 /* Resources */ = {
250 | isa = PBXResourcesBuildPhase;
251 | buildActionMask = 2147483647;
252 | files = (
253 | 7B7B5AE12BCCDFED0051FF95 /* Preview Assets.xcassets in Resources */,
254 | 7B7B5ADD2BCCDFED0051FF95 /* Assets.xcassets in Resources */,
255 | );
256 | runOnlyForDeploymentPostprocessing = 0;
257 | };
258 | 7B7B5AE42BCCDFED0051FF95 /* Resources */ = {
259 | isa = PBXResourcesBuildPhase;
260 | buildActionMask = 2147483647;
261 | files = (
262 | );
263 | runOnlyForDeploymentPostprocessing = 0;
264 | };
265 | 7B7B5AEE2BCCDFED0051FF95 /* Resources */ = {
266 | isa = PBXResourcesBuildPhase;
267 | buildActionMask = 2147483647;
268 | files = (
269 | );
270 | runOnlyForDeploymentPostprocessing = 0;
271 | };
272 | /* End PBXResourcesBuildPhase section */
273 |
274 | /* Begin PBXSourcesBuildPhase section */
275 | 7B7B5AD12BCCDFEB0051FF95 /* Sources */ = {
276 | isa = PBXSourcesBuildPhase;
277 | buildActionMask = 2147483647;
278 | files = (
279 | 7B7B5B072BCCE14A0051FF95 /* ApiKeysIntroView.swift in Sources */,
280 | 7B7B5AD92BCCDFEB0051FF95 /* PolyAIExampleApp.swift in Sources */,
281 | 7B7B5B0B2BCCE52F0051FF95 /* MessageDemoView.swift in Sources */,
282 | 7B7B5B0D2BCCE55E0051FF95 /* MessageDemoObservabble.swift in Sources */,
283 | 7B7B5B092BCCE1960051FF95 /* OptionsListView.swift in Sources */,
284 | );
285 | runOnlyForDeploymentPostprocessing = 0;
286 | };
287 | 7B7B5AE22BCCDFED0051FF95 /* Sources */ = {
288 | isa = PBXSourcesBuildPhase;
289 | buildActionMask = 2147483647;
290 | files = (
291 | 7B7B5AEB2BCCDFED0051FF95 /* PolyAIExampleTests.swift in Sources */,
292 | );
293 | runOnlyForDeploymentPostprocessing = 0;
294 | };
295 | 7B7B5AEC2BCCDFED0051FF95 /* Sources */ = {
296 | isa = PBXSourcesBuildPhase;
297 | buildActionMask = 2147483647;
298 | files = (
299 | 7B7B5AF52BCCDFED0051FF95 /* PolyAIExampleUITests.swift in Sources */,
300 | 7B7B5AF72BCCDFED0051FF95 /* PolyAIExampleUITestsLaunchTests.swift in Sources */,
301 | );
302 | runOnlyForDeploymentPostprocessing = 0;
303 | };
304 | /* End PBXSourcesBuildPhase section */
305 |
306 | /* Begin PBXTargetDependency section */
307 | 7B7B5AE82BCCDFED0051FF95 /* PBXTargetDependency */ = {
308 | isa = PBXTargetDependency;
309 | target = 7B7B5AD42BCCDFEB0051FF95 /* PolyAIExample */;
310 | targetProxy = 7B7B5AE72BCCDFED0051FF95 /* PBXContainerItemProxy */;
311 | };
312 | 7B7B5AF22BCCDFED0051FF95 /* PBXTargetDependency */ = {
313 | isa = PBXTargetDependency;
314 | target = 7B7B5AD42BCCDFEB0051FF95 /* PolyAIExample */;
315 | targetProxy = 7B7B5AF12BCCDFED0051FF95 /* PBXContainerItemProxy */;
316 | };
317 | /* End PBXTargetDependency section */
318 |
319 | /* Begin XCBuildConfiguration section */
320 | 7B7B5AF82BCCDFED0051FF95 /* Debug */ = {
321 | isa = XCBuildConfiguration;
322 | buildSettings = {
323 | ALWAYS_SEARCH_USER_PATHS = NO;
324 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
325 | CLANG_ANALYZER_NONNULL = YES;
326 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
327 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
328 | CLANG_ENABLE_MODULES = YES;
329 | CLANG_ENABLE_OBJC_ARC = YES;
330 | CLANG_ENABLE_OBJC_WEAK = YES;
331 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
332 | CLANG_WARN_BOOL_CONVERSION = YES;
333 | CLANG_WARN_COMMA = YES;
334 | CLANG_WARN_CONSTANT_CONVERSION = YES;
335 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
336 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
337 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
338 | CLANG_WARN_EMPTY_BODY = YES;
339 | CLANG_WARN_ENUM_CONVERSION = YES;
340 | CLANG_WARN_INFINITE_RECURSION = YES;
341 | CLANG_WARN_INT_CONVERSION = YES;
342 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
343 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
344 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
345 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
346 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
347 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
348 | CLANG_WARN_STRICT_PROTOTYPES = YES;
349 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
350 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
351 | CLANG_WARN_UNREACHABLE_CODE = YES;
352 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
353 | COPY_PHASE_STRIP = NO;
354 | DEBUG_INFORMATION_FORMAT = dwarf;
355 | ENABLE_STRICT_OBJC_MSGSEND = YES;
356 | ENABLE_TESTABILITY = YES;
357 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
358 | GCC_C_LANGUAGE_STANDARD = gnu17;
359 | GCC_DYNAMIC_NO_PIC = NO;
360 | GCC_NO_COMMON_BLOCKS = YES;
361 | GCC_OPTIMIZATION_LEVEL = 0;
362 | GCC_PREPROCESSOR_DEFINITIONS = (
363 | "DEBUG=1",
364 | "$(inherited)",
365 | );
366 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
367 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
368 | GCC_WARN_UNDECLARED_SELECTOR = YES;
369 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
370 | GCC_WARN_UNUSED_FUNCTION = YES;
371 | GCC_WARN_UNUSED_VARIABLE = YES;
372 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
373 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
374 | MTL_FAST_MATH = YES;
375 | ONLY_ACTIVE_ARCH = YES;
376 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
377 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
378 | };
379 | name = Debug;
380 | };
381 | 7B7B5AF92BCCDFED0051FF95 /* Release */ = {
382 | isa = XCBuildConfiguration;
383 | buildSettings = {
384 | ALWAYS_SEARCH_USER_PATHS = NO;
385 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
386 | CLANG_ANALYZER_NONNULL = YES;
387 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
388 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
389 | CLANG_ENABLE_MODULES = YES;
390 | CLANG_ENABLE_OBJC_ARC = YES;
391 | CLANG_ENABLE_OBJC_WEAK = YES;
392 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
393 | CLANG_WARN_BOOL_CONVERSION = YES;
394 | CLANG_WARN_COMMA = YES;
395 | CLANG_WARN_CONSTANT_CONVERSION = YES;
396 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
397 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
398 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
399 | CLANG_WARN_EMPTY_BODY = YES;
400 | CLANG_WARN_ENUM_CONVERSION = YES;
401 | CLANG_WARN_INFINITE_RECURSION = YES;
402 | CLANG_WARN_INT_CONVERSION = YES;
403 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
404 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
405 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
406 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
407 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
408 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
409 | CLANG_WARN_STRICT_PROTOTYPES = YES;
410 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
411 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
412 | CLANG_WARN_UNREACHABLE_CODE = YES;
413 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
414 | COPY_PHASE_STRIP = NO;
415 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
416 | ENABLE_NS_ASSERTIONS = NO;
417 | ENABLE_STRICT_OBJC_MSGSEND = YES;
418 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
419 | GCC_C_LANGUAGE_STANDARD = gnu17;
420 | GCC_NO_COMMON_BLOCKS = YES;
421 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
422 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
423 | GCC_WARN_UNDECLARED_SELECTOR = YES;
424 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
425 | GCC_WARN_UNUSED_FUNCTION = YES;
426 | GCC_WARN_UNUSED_VARIABLE = YES;
427 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
428 | MTL_ENABLE_DEBUG_INFO = NO;
429 | MTL_FAST_MATH = YES;
430 | SWIFT_COMPILATION_MODE = wholemodule;
431 | };
432 | name = Release;
433 | };
434 | 7B7B5AFB2BCCDFED0051FF95 /* Debug */ = {
435 | isa = XCBuildConfiguration;
436 | buildSettings = {
437 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
438 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
439 | CODE_SIGN_ENTITLEMENTS = PolyAIExample/PolyAIExample.entitlements;
440 | CODE_SIGN_STYLE = Automatic;
441 | CURRENT_PROJECT_VERSION = 1;
442 | DEVELOPMENT_ASSET_PATHS = "\"PolyAIExample/Preview Content\"";
443 | DEVELOPMENT_TEAM = CQ45U4X9K3;
444 | ENABLE_HARDENED_RUNTIME = YES;
445 | ENABLE_PREVIEWS = YES;
446 | GENERATE_INFOPLIST_FILE = YES;
447 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
448 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
449 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
450 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
451 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
452 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
453 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
454 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
455 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
456 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
457 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
458 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
459 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
460 | MACOSX_DEPLOYMENT_TARGET = 14.0;
461 | MARKETING_VERSION = 1.0;
462 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExample;
463 | PRODUCT_NAME = "$(TARGET_NAME)";
464 | SDKROOT = auto;
465 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
466 | SWIFT_EMIT_LOC_STRINGS = YES;
467 | SWIFT_VERSION = 5.0;
468 | TARGETED_DEVICE_FAMILY = "1,2";
469 | };
470 | name = Debug;
471 | };
472 | 7B7B5AFC2BCCDFED0051FF95 /* Release */ = {
473 | isa = XCBuildConfiguration;
474 | buildSettings = {
475 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
476 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
477 | CODE_SIGN_ENTITLEMENTS = PolyAIExample/PolyAIExample.entitlements;
478 | CODE_SIGN_STYLE = Automatic;
479 | CURRENT_PROJECT_VERSION = 1;
480 | DEVELOPMENT_ASSET_PATHS = "\"PolyAIExample/Preview Content\"";
481 | DEVELOPMENT_TEAM = CQ45U4X9K3;
482 | ENABLE_HARDENED_RUNTIME = YES;
483 | ENABLE_PREVIEWS = YES;
484 | GENERATE_INFOPLIST_FILE = YES;
485 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
486 | "INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
487 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
488 | "INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
489 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
490 | "INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
491 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
492 | "INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
493 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
494 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
495 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
496 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
497 | "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
498 | MACOSX_DEPLOYMENT_TARGET = 14.0;
499 | MARKETING_VERSION = 1.0;
500 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExample;
501 | PRODUCT_NAME = "$(TARGET_NAME)";
502 | SDKROOT = auto;
503 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
504 | SWIFT_EMIT_LOC_STRINGS = YES;
505 | SWIFT_VERSION = 5.0;
506 | TARGETED_DEVICE_FAMILY = "1,2";
507 | };
508 | name = Release;
509 | };
510 | 7B7B5AFE2BCCDFED0051FF95 /* Debug */ = {
511 | isa = XCBuildConfiguration;
512 | buildSettings = {
513 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
514 | BUNDLE_LOADER = "$(TEST_HOST)";
515 | CODE_SIGN_STYLE = Automatic;
516 | CURRENT_PROJECT_VERSION = 1;
517 | DEVELOPMENT_TEAM = CQ45U4X9K3;
518 | GENERATE_INFOPLIST_FILE = YES;
519 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
520 | MACOSX_DEPLOYMENT_TARGET = 14.0;
521 | MARKETING_VERSION = 1.0;
522 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExampleTests;
523 | PRODUCT_NAME = "$(TARGET_NAME)";
524 | SDKROOT = auto;
525 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
526 | SWIFT_EMIT_LOC_STRINGS = NO;
527 | SWIFT_VERSION = 5.0;
528 | TARGETED_DEVICE_FAMILY = "1,2";
529 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/PolyAIExample.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/PolyAIExample";
530 | };
531 | name = Debug;
532 | };
533 | 7B7B5AFF2BCCDFED0051FF95 /* Release */ = {
534 | isa = XCBuildConfiguration;
535 | buildSettings = {
536 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
537 | BUNDLE_LOADER = "$(TEST_HOST)";
538 | CODE_SIGN_STYLE = Automatic;
539 | CURRENT_PROJECT_VERSION = 1;
540 | DEVELOPMENT_TEAM = CQ45U4X9K3;
541 | GENERATE_INFOPLIST_FILE = YES;
542 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
543 | MACOSX_DEPLOYMENT_TARGET = 14.0;
544 | MARKETING_VERSION = 1.0;
545 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExampleTests;
546 | PRODUCT_NAME = "$(TARGET_NAME)";
547 | SDKROOT = auto;
548 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
549 | SWIFT_EMIT_LOC_STRINGS = NO;
550 | SWIFT_VERSION = 5.0;
551 | TARGETED_DEVICE_FAMILY = "1,2";
552 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/PolyAIExample.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/PolyAIExample";
553 | };
554 | name = Release;
555 | };
556 | 7B7B5B012BCCDFED0051FF95 /* Debug */ = {
557 | isa = XCBuildConfiguration;
558 | buildSettings = {
559 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
560 | CODE_SIGN_STYLE = Automatic;
561 | CURRENT_PROJECT_VERSION = 1;
562 | DEVELOPMENT_TEAM = CQ45U4X9K3;
563 | GENERATE_INFOPLIST_FILE = YES;
564 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
565 | MACOSX_DEPLOYMENT_TARGET = 14.0;
566 | MARKETING_VERSION = 1.0;
567 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExampleUITests;
568 | PRODUCT_NAME = "$(TARGET_NAME)";
569 | SDKROOT = auto;
570 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
571 | SWIFT_EMIT_LOC_STRINGS = NO;
572 | SWIFT_VERSION = 5.0;
573 | TARGETED_DEVICE_FAMILY = "1,2";
574 | TEST_TARGET_NAME = PolyAIExample;
575 | };
576 | name = Debug;
577 | };
578 | 7B7B5B022BCCDFED0051FF95 /* Release */ = {
579 | isa = XCBuildConfiguration;
580 | buildSettings = {
581 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
582 | CODE_SIGN_STYLE = Automatic;
583 | CURRENT_PROJECT_VERSION = 1;
584 | DEVELOPMENT_TEAM = CQ45U4X9K3;
585 | GENERATE_INFOPLIST_FILE = YES;
586 | IPHONEOS_DEPLOYMENT_TARGET = 17.0;
587 | MACOSX_DEPLOYMENT_TARGET = 14.0;
588 | MARKETING_VERSION = 1.0;
589 | PRODUCT_BUNDLE_IDENTIFIER = jamesRochabrun.PolyAIExampleUITests;
590 | PRODUCT_NAME = "$(TARGET_NAME)";
591 | SDKROOT = auto;
592 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
593 | SWIFT_EMIT_LOC_STRINGS = NO;
594 | SWIFT_VERSION = 5.0;
595 | TARGETED_DEVICE_FAMILY = "1,2";
596 | TEST_TARGET_NAME = PolyAIExample;
597 | };
598 | name = Release;
599 | };
600 | /* End XCBuildConfiguration section */
601 |
602 | /* Begin XCConfigurationList section */
603 | 7B7B5AD02BCCDFEB0051FF95 /* Build configuration list for PBXProject "PolyAIExample" */ = {
604 | isa = XCConfigurationList;
605 | buildConfigurations = (
606 | 7B7B5AF82BCCDFED0051FF95 /* Debug */,
607 | 7B7B5AF92BCCDFED0051FF95 /* Release */,
608 | );
609 | defaultConfigurationIsVisible = 0;
610 | defaultConfigurationName = Release;
611 | };
612 | 7B7B5AFA2BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExample" */ = {
613 | isa = XCConfigurationList;
614 | buildConfigurations = (
615 | 7B7B5AFB2BCCDFED0051FF95 /* Debug */,
616 | 7B7B5AFC2BCCDFED0051FF95 /* Release */,
617 | );
618 | defaultConfigurationIsVisible = 0;
619 | defaultConfigurationName = Release;
620 | };
621 | 7B7B5AFD2BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExampleTests" */ = {
622 | isa = XCConfigurationList;
623 | buildConfigurations = (
624 | 7B7B5AFE2BCCDFED0051FF95 /* Debug */,
625 | 7B7B5AFF2BCCDFED0051FF95 /* Release */,
626 | );
627 | defaultConfigurationIsVisible = 0;
628 | defaultConfigurationName = Release;
629 | };
630 | 7B7B5B002BCCDFED0051FF95 /* Build configuration list for PBXNativeTarget "PolyAIExampleUITests" */ = {
631 | isa = XCConfigurationList;
632 | buildConfigurations = (
633 | 7B7B5B012BCCDFED0051FF95 /* Debug */,
634 | 7B7B5B022BCCDFED0051FF95 /* Release */,
635 | );
636 | defaultConfigurationIsVisible = 0;
637 | defaultConfigurationName = Release;
638 | };
639 | /* End XCConfigurationList section */
640 |
641 | /* Begin XCLocalSwiftPackageReference section */
642 | 7B7B5B032BCCE0400051FF95 /* XCLocalSwiftPackageReference ".." */ = {
643 | isa = XCLocalSwiftPackageReference;
644 | relativePath = ..;
645 | };
646 | /* End XCLocalSwiftPackageReference section */
647 |
648 | /* Begin XCSwiftPackageProductDependency section */
649 | 7B7B5B042BCCE0400051FF95 /* PolyAI */ = {
650 | isa = XCSwiftPackageProductDependency;
651 | productName = PolyAI;
652 | };
653 | /* End XCSwiftPackageProductDependency section */
654 | };
655 | rootObject = 7B7B5ACD2BCCDFEB0051FF95 /* Project object */;
656 | }
657 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "originHash" : "c8eb6b2f915d3d6d156f171a65fbf05e9aa02f3d807715e75e83ab9646a62032",
3 | "pins" : [
4 | {
5 | "identity" : "swiftanthropic",
6 | "kind" : "remoteSourceControl",
7 | "location" : "https://github.com/jamesrochabrun/SwiftAnthropic",
8 | "state" : {
9 | "branch" : "main",
10 | "revision" : "2bfd7ca13fabc4fbb47c7a97d7a0c1b4831bffd2"
11 | }
12 | },
13 | {
14 | "identity" : "swiftopenai",
15 | "kind" : "remoteSourceControl",
16 | "location" : "https://github.com/jamesrochabrun/SwiftOpenAI",
17 | "state" : {
18 | "branch" : "main",
19 | "revision" : "e589864f644f10e5a7868174208d414f06a0d27c"
20 | }
21 | }
22 | ],
23 | "version" : 3
24 | }
25 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/ApiKeysIntroView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ApiKeysIntroView.swift
3 | // PolyAIExample
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import SwiftUI
9 | import PolyAI
10 |
11 | struct ApiKeyIntroView: View {
12 |
13 | @State private var anthropicAPIKey = ""
14 | @State private var openAIAPIKey = ""
15 | @State private var geminiAPIKey = ""
16 | @State private var groqAPIKey = ""
17 | @State private var openRouterAPIKey = ""
18 | @State private var ollamaLocalHostURL = ""
19 |
20 | @State private var anthropicConfigAdded: Bool = false
21 | @State private var openAIConfigAdded: Bool = false
22 | @State private var ollamaConfigAdded: Bool = false
23 | @State private var groqConfigAdded: Bool = false
24 | @State private var openRouterConfigAdded: Bool = false
25 | @State private var geminiConfigAdded: Bool = false
26 |
27 | @State private var configurations: [LLMConfiguration] = []
28 |
29 | private var canNotContinue: Bool {
30 | configurations.isEmpty
31 | }
32 |
33 | var body: some View {
34 | NavigationStack {
35 | VStack {
36 | Spacer()
37 | VStack(spacing: 24) {
38 | LLMConfigurationView(
39 | provider: "Anthropic",
40 | configurationAdded: $anthropicConfigAdded,
41 | apiKey: $anthropicAPIKey) {
42 | configurations.append(.anthropic(apiKey: anthropicAPIKey))
43 | }
44 | LLMConfigurationView(
45 | provider: "OpenAI",
46 | configurationAdded: $openAIConfigAdded,
47 | apiKey: $openAIAPIKey) {
48 | configurations.append(.openAI(.api(key: openAIAPIKey)))
49 | }
50 | LLMConfigurationView(
51 | provider: "Gemini",
52 | configurationAdded: $geminiConfigAdded,
53 | apiKey: $geminiAPIKey) {
54 | configurations.append(.openAI(.gemini(apiKey: geminiAPIKey)))
55 | }
56 | LLMConfigurationView(
57 | provider: "Groq",
58 | configurationAdded: $groqConfigAdded,
59 | apiKey: $groqAPIKey) {
60 | configurations.append(.openAI(.groq(apiKey: groqAPIKey)))
61 | }
62 | LLMConfigurationView(
63 | provider: "OpenRouter",
64 | configurationAdded: $openRouterConfigAdded,
65 | apiKey: $openRouterAPIKey) {
66 | configurations.append(.openAI(.openRouter(apiKey: openRouterAPIKey)))
67 | }
68 | LLMConfigurationView(
69 | provider: "Ollama",
70 | configurationAdded: $ollamaConfigAdded,
71 | apiKey: $ollamaLocalHostURL) {
72 | configurations.append(.openAI(.ollama(url: ollamaLocalHostURL)))
73 | }
74 | }
75 | .buttonStyle(.bordered)
76 | .padding()
77 | .textFieldStyle(.roundedBorder)
78 | NavigationLink(destination: OptionsListView(service: PolyAIServiceFactory.serviceWith(configurations, debugEnabled: true))) {
79 | Text("Continue")
80 | .padding()
81 | .padding(.horizontal, 48)
82 | .foregroundColor(.white)
83 | .background(
84 | Capsule()
85 | .foregroundColor(canNotContinue ? .gray.opacity(0.2) : .pink))
86 | }
87 | .disabled(canNotContinue)
88 | Spacer()
89 | }
90 | .padding()
91 | .navigationTitle("Enter API Keys or URL's")
92 | }
93 | }
94 | }
95 |
96 | struct LLMConfigurationView: View {
97 |
98 | let provider: String
99 | @Binding var configurationAdded: Bool
100 | @Binding var apiKey: String
101 | let addConfig: () -> Void
102 |
103 | var body: some View {
104 | VStack(alignment: .leading) {
105 | HStack {
106 | TextField("Enter \(provider) API Key or URL", text: $apiKey)
107 | Button {
108 | addConfig()
109 | configurationAdded = true
110 | } label: {
111 | Image(systemName: "plus")
112 | }
113 | .disabled(apiKey.isEmpty)
114 | }
115 | Text("\(provider) added to PolyAI 🚀")
116 | .opacity(configurationAdded ? 1 : 0)
117 | }
118 | }
119 | }
120 |
121 | #Preview {
122 | ApiKeyIntroView()
123 | }
124 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "idiom" : "mac",
10 | "scale" : "1x",
11 | "size" : "16x16"
12 | },
13 | {
14 | "idiom" : "mac",
15 | "scale" : "2x",
16 | "size" : "16x16"
17 | },
18 | {
19 | "idiom" : "mac",
20 | "scale" : "1x",
21 | "size" : "32x32"
22 | },
23 | {
24 | "idiom" : "mac",
25 | "scale" : "2x",
26 | "size" : "32x32"
27 | },
28 | {
29 | "idiom" : "mac",
30 | "scale" : "1x",
31 | "size" : "128x128"
32 | },
33 | {
34 | "idiom" : "mac",
35 | "scale" : "2x",
36 | "size" : "128x128"
37 | },
38 | {
39 | "idiom" : "mac",
40 | "scale" : "1x",
41 | "size" : "256x256"
42 | },
43 | {
44 | "idiom" : "mac",
45 | "scale" : "2x",
46 | "size" : "256x256"
47 | },
48 | {
49 | "idiom" : "mac",
50 | "scale" : "1x",
51 | "size" : "512x512"
52 | },
53 | {
54 | "idiom" : "mac",
55 | "scale" : "2x",
56 | "size" : "512x512"
57 | }
58 | ],
59 | "info" : {
60 | "author" : "xcode",
61 | "version" : 1
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/MessageDemoObservabble.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageDemoObservabble.swift
3 | // PolyAIExample
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import Foundation
9 | import PolyAI
10 | import SwiftUI
11 |
12 | @MainActor
13 | @Observable class MessageDemoObservable {
14 |
15 | let service: PolyAIService
16 | var message: String = ""
17 | var errorMessage: String = ""
18 | var isLoading = false
19 |
20 | init(service: PolyAIService) {
21 | self.service = service
22 | }
23 |
24 | func createMessage(
25 | parameters: LLMParameter) async throws
26 | {
27 | task = Task {
28 | do {
29 | isLoading = true
30 | let message = try await service.createMessage(parameters)
31 | isLoading = false
32 | self.message = message.contentDescription
33 | } catch {
34 | self.errorMessage = "\(error)"
35 | }
36 | }
37 | }
38 |
39 | func streamMessage(
40 | parameters: LLMParameter) async throws
41 | {
42 | task = Task {
43 | do {
44 | isLoading = true
45 | let stream = try await service.streamMessage(parameters)
46 | isLoading = false
47 | for try await result in stream {
48 | self.message += result.content ?? ""
49 | }
50 | } catch {
51 | self.errorMessage = "\(error)"
52 | }
53 | }
54 | }
55 |
56 | func cancelStream() {
57 | task?.cancel()
58 | }
59 |
60 | func clearMessage() {
61 | message = ""
62 | }
63 |
64 | // MARK: Private
65 |
66 | private var task: Task? = nil
67 |
68 | }
69 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/MessageDemoView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MessageDemoView.swift
3 | // PolyAIExample
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import Foundation
9 | import PhotosUI
10 | import PolyAI
11 | import SwiftUI
12 |
13 | @MainActor
14 | struct MessageDemoView: View {
15 |
16 | let observable: MessageDemoObservable
17 | @State private var prompt = ""
18 | @State private var selectedItems: [PhotosPickerItem] = []
19 | @State private var selectedImages: [Image] = []
20 | @State private var selectedImagesEncoded: [String] = []
21 | @State private var selectedSegment: LLM = .anthropic
22 |
23 | enum LLM: String, Identifiable, CaseIterable {
24 | case openAI
25 | case anthropic
26 | case gemini
27 | case llama3
28 | case groq
29 | case openRouter
30 | case deepSeek
31 |
32 | var id: String { rawValue }
33 | }
34 |
35 | var body: some View {
36 | ScrollView {
37 | VStack {
38 | picker
39 | Text(observable.errorMessage)
40 | .foregroundColor(.red)
41 | messageView
42 | }
43 | .padding()
44 | }
45 | .overlay(
46 | Group {
47 | if observable.isLoading {
48 | ProgressView()
49 | } else {
50 | EmptyView()
51 | }
52 | }
53 | ).safeAreaInset(edge: .bottom) {
54 | VStack(spacing: 0) {
55 | selectedImagesView
56 | textArea
57 | }
58 | }
59 | }
60 |
61 | var textArea: some View {
62 | HStack(spacing: 4) {
63 | TextField("Enter prompt", text: $prompt, axis: .vertical)
64 | .textFieldStyle(.roundedBorder)
65 | .padding()
66 | photoPicker
67 | Button {
68 | Task {
69 | let parameters: LLMParameter
70 | switch selectedSegment {
71 | case .openAI:
72 | parameters = .openAI(
73 | model: .o1Preview,
74 | messages: [
75 | .init(role: .user, content: prompt)
76 | ])
77 | case .groq:
78 | parameters = .groq(
79 | model: "deepseek-r1-distill-llama-70b",
80 | messages: [
81 | .init(role: .user, content: prompt)
82 | ])
83 | case .deepSeek:
84 | parameters = .deepSeek(
85 | model: "deepseek-reasoner",
86 | messages: [
87 | .init(role: .user, content: prompt)
88 | ])
89 | case .openRouter:
90 | parameters = .openRouter(
91 | model: "deepseek/deepseek-r1:free",
92 | messages: [
93 | .init(role: .user, content: prompt)
94 | ])
95 | case .anthropic:
96 | parameters = .anthropic(
97 | model: .claude3Sonnet,
98 | messages: [
99 | .init(role: .user, content: prompt)
100 | ],
101 | maxTokens: 1024)
102 | case .gemini:
103 | parameters = .gemini(
104 | model: "gemini-1.0-pro", messages: [
105 | .init(role: .user, content: prompt)
106 | ], maxTokens: 2000)
107 | case .llama3:
108 | parameters = .ollama(
109 | model: "llama3.1",
110 | messages: [
111 | .init(role: .user, content: prompt)
112 | ],
113 | maxTokens: 1000)
114 | }
115 | try await observable.streamMessage(parameters: parameters)
116 | }
117 | } label: {
118 | Image(systemName: "paperplane")
119 | }
120 | .keyboardShortcut(.return)
121 | .buttonStyle(.bordered)
122 | }
123 | .padding()
124 | }
125 |
126 | var picker: some View {
127 | Picker("Options", selection: $selectedSegment) {
128 | ForEach(LLM.allCases) { llm in
129 | Text(llm.rawValue).tag(llm)
130 | }
131 | }
132 | .pickerStyle(SegmentedPickerStyle())
133 | .padding()
134 | }
135 |
136 | var messageView: some View {
137 | VStack(spacing: 24) {
138 | HStack {
139 | Button("Cancel") {
140 | observable.cancelStream()
141 | }
142 | Button("Clear Message") {
143 | observable.clearMessage()
144 | }
145 | }
146 | Text(observable.message)
147 | }
148 | .buttonStyle(.bordered)
149 | }
150 |
151 | var photoPicker: some View {
152 | PhotosPicker(selection: $selectedItems, matching: .images) {
153 | Image(systemName: "photo")
154 | }
155 | .onChange(of: selectedItems) {
156 | Task {
157 | selectedImages.removeAll()
158 | for item in selectedItems {
159 |
160 | if let data = try? await item.loadTransferable(type: Data.self) {
161 | if let uiImage = UIImage(data: data), let resizedImageData = uiImage.jpegData(compressionQuality: 0.7) {
162 | // Make sure the resized image is below the size limit
163 | // This is needed as Claude allows a max of 5Mb size per image.
164 | if resizedImageData.count < 5_242_880 { // 5 MB in bytes
165 | let base64String = resizedImageData.base64EncodedString()
166 | selectedImagesEncoded.append(base64String)
167 | let image = Image(uiImage: UIImage(data: resizedImageData)!)
168 | selectedImages.append(image)
169 | } else {
170 | // Handle the error - maybe resize to an even smaller size or show an error message to the user
171 | }
172 | }
173 | }
174 | }
175 | }
176 | }
177 | }
178 |
179 | var selectedImagesView: some View {
180 | HStack(spacing: 0) {
181 | ForEach(0..
2 |
3 |
4 |
5 | com.apple.security.app-sandbox
6 |
7 | com.apple.security.files.user-selected.read-only
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/PolyAIExampleApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIExampleApp.swift
3 | // PolyAIExample
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import SwiftUI
9 | import PolyAI
10 |
11 | @main
12 | struct PolyAIExampleApp: App {
13 | var body: some Scene {
14 | WindowGroup {
15 | ApiKeyIntroView()
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExample/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExampleTests/PolyAIExampleTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIExampleTests.swift
3 | // PolyAIExampleTests
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import XCTest
9 |
10 | final class PolyAIExampleTests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 | }
15 |
16 | override func tearDownWithError() throws {
17 | // Put teardown code here. This method is called after the invocation of each test method in the class.
18 | }
19 |
20 | func testExample() throws {
21 | // This is an example of a functional test case.
22 | // Use XCTAssert and related functions to verify your tests produce the correct results.
23 | // Any test you write for XCTest can be annotated as throws and async.
24 | // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error.
25 | // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards.
26 | }
27 |
28 | func testPerformanceExample() throws {
29 | // This is an example of a performance test case.
30 | measure {
31 | // Put the code you want to measure the time of here.
32 | }
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExampleUITests/PolyAIExampleUITests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIExampleUITests.swift
3 | // PolyAIExampleUITests
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import XCTest
9 |
10 | final class PolyAIExampleUITests: XCTestCase {
11 |
12 | override func setUpWithError() throws {
13 | // Put setup code here. This method is called before the invocation of each test method in the class.
14 |
15 | // In UI tests it is usually best to stop immediately when a failure occurs.
16 | continueAfterFailure = false
17 |
18 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
19 | }
20 |
21 | override func tearDownWithError() throws {
22 | // Put teardown code here. This method is called after the invocation of each test method in the class.
23 | }
24 |
25 | func testExample() throws {
26 | // UI tests must launch the application that they test.
27 | let app = XCUIApplication()
28 | app.launch()
29 |
30 | // Use XCTAssert and related functions to verify your tests produce the correct results.
31 | }
32 |
33 | func testLaunchPerformance() throws {
34 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
35 | // This measures how long it takes to launch your application.
36 | measure(metrics: [XCTApplicationLaunchMetric()]) {
37 | XCUIApplication().launch()
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/PolyAIExample/PolyAIExampleUITests/PolyAIExampleUITestsLaunchTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIExampleUITestsLaunchTests.swift
3 | // PolyAIExampleUITests
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import XCTest
9 |
10 | final class PolyAIExampleUITestsLaunchTests: XCTestCase {
11 |
12 | override class var runsForEachTargetApplicationUIConfiguration: Bool {
13 | true
14 | }
15 |
16 | override func setUpWithError() throws {
17 | continueAfterFailure = false
18 | }
19 |
20 | func testLaunch() throws {
21 | let app = XCUIApplication()
22 | app.launch()
23 |
24 | // Insert steps here to perform after app launch but before taking a screenshot,
25 | // such as logging into a test account or navigating somewhere in the app
26 |
27 | let attachment = XCTAttachment(screenshot: app.screenshot())
28 | attachment.name = "Launch Screen"
29 | attachment.lifetime = .keepAlways
30 | add(attachment)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PolyAI
2 |
3 |
4 | 
5 | [](https://lbesson.mit-license.org/)
6 | [](https://github.com/apple/swift)
7 | [](https://developer.apple.com/documentation/swiftui)
8 | [](https://developer.apple.com/xcode/)
9 | [](https://github.com/apple/swift-package-manager)
10 | [](https://buymeacoffee.com/jamesrochabrun)
11 |
12 | An open-source Swift package that simplifies LLM message completions, designed for multi-model applications. It supports multiple providers while adhering to OpenAI-compatible APIs and Anthropic APIs, enabling Swift developers to integrate different AI models seamlessly.
13 |
14 | ## Description
15 |
16 | ### OpenAI Compatibility
17 |
18 | Easily call various LLM APIs using the OpenAI format, with built-in support for multiple models and providers through the [SwiftOpenAI](https://github.com/jamesrochabrun/SwiftOpenAI) package.
19 |
20 | Supported Providers:
21 |
22 | - OpenAI
23 | - Azure
24 | - Groq
25 | - DeepSeek
26 | - Google Gemini
27 | - OpenRouter
28 | - Ollama
29 | - [llama3](https://ollama.com/library/llama3)
30 | - [mistral](https://ollama.com/library/mistral)
31 |
32 | Note: When using OpenAI-compatible configurations, you can identify them by the .openAI enum prefix in the configuration structure.
33 |
34 | Example:
35 |
36 | ```swift
37 | .openAI(.gemini(apiKey: "your_gemini_api_key_here"))
38 | ```
39 |
40 | ### Anthropic
41 |
42 | Additionally, Anthropic API is supported through the [SwiftAnthropic](https://github.com/jamesrochabrun/SwiftAnthropic)
43 |
44 | ## Table of Contents
45 |
46 | - [Installation](#installation)
47 | - [Usage](#usage)
48 | - [Message](#message)
49 | - [Collaboration](#collaboration)
50 | - [OpenAI Azure](#openAI-azure)
51 | - [Groq](#groq)
52 | - [OpenRouter](#open-router)
53 | - [DeepSeek](#deepseek)
54 | - [OpenAI AIProxy](#openai-aiproxy)
55 | - [Ollama](#ollama)
56 |
57 | ## Installation
58 |
59 | ### Swift Package Manager
60 |
61 | 1. Open your Swift project in Xcode.
62 | 2. Go to `File` -> `Add Package Dependency`.
63 | 3. In the search bar, enter [this URL](https://github.com/jamesrochabrun/PolyAI).
64 | 4. Choose the version you'd like to install.
65 | 5. Click `Add Package`.
66 |
67 | ### Important
68 |
69 | ⚠️ Please take precautions to keep your API keys secure.
70 |
71 | > Remember that your API keys are a secret! Do not share it with others or expose
72 | > it in any client-side code (browsers, apps). Production requests must be
73 | > routed through your backend server where your API keys can be securely
74 | > loaded from an environment variable or key management service.
75 |
76 | ## Functionalities
77 |
78 | - [x] Chat completions
79 | - [x] Chat completions with stream
80 | - [ ] Tool use
81 | - [ ] Image as input
82 |
83 | ## Usage
84 |
85 | To interface with different LLMs, you need only to supply the corresponding LLM configuration and adjust the parameters accordingly.
86 |
87 | First, import the PolyAI package:
88 |
89 | ```swift
90 | import PolyAI
91 | ```
92 |
93 | Then, define the LLM configurations.
94 |
95 | Currently, the package supports OpenAI, Azure, Anthropic, Gemini, Groq, DeepSeek, and OpenRouter. Additionally, you can use Ollama to run local models like Llama 3 or Mistral through OpenAI-compatible endpoints.
96 |
97 | ```swift
98 |
99 | // OpenAI
100 | let openAIConfiguration: LLMConfiguration = .openAI(.api(key: "your_openai_api_key_here"))
101 |
102 | // Gemini
103 | let geminiConfiguration: LLMConfiguration = .openAI(.gemini(apiKey: "your_gemini_api_key_here"))
104 |
105 | // Groq
106 | let groqConfiguration: LLMConfiguration = .openAI(.groq(apiKey: "your_groq_api_key_here"))
107 |
108 | // Ollama
109 | let ollamaConfiguration: LLMConfiguration = .openAI(.ollama(url: "http://localhost:11434"))
110 |
111 | // OpenRouter
112 | let openRouterConfiguration: LLMConfiguration = .openAI(.openRouter(apiKey: "your_open-router_api_key_here"))
113 |
114 | // DeepSeek
115 | let deepSeekConfiguration: LLMConfiguration = .openAI(.deepSeek(apiKey: "your_deepseek_api_key_here"))
116 |
117 | // Anthropic
118 | let anthropicConfiguration: LLMConfiguration = .anthropic(apiKey: "your_anthropic_api_key_here")
119 |
120 | let configurations = [openAIConfiguration, anthropicConfiguration, geminiConfiguration, ollamaConfiguration]
121 | ```
122 |
123 | With the configurations set, initialize the service:
124 |
125 | ```swift
126 | let service = PolyAIServiceFactory.serviceWith(configurations)
127 | ```
128 |
129 | Now, you have access to all the models offered by these providers in a single package. 🚀
130 |
131 | ## Message
132 |
133 | To send a message using OpenAI:
134 |
135 | ```swift
136 | let prompt = "How are you today?"
137 | let parameters: LLMParameter = .openAI(model: .o1Preview, messages: [.init(role: .user, content: prompt)])
138 | let stream = try await service.streamMessage(parameters)
139 | ```
140 | To interact with Anthropic instead, all you need to do is change just one line of code! 🔥
141 |
142 | ```swift
143 | let prompt = "How are you today?"
144 | let parameters: LLMParameter = .anthropic(model: .claude3Sonnet, messages: [.init(role: .user, content: prompt)], maxTokens: 1024)
145 | let stream = try await service.streamMessage(parameters)
146 | ```
147 |
148 | To interact with Gemini instead, all you need to do (again) is change just one line of code! 🔥
149 |
150 | ```swift
151 | let prompt = "How are you today?"
152 | let parameters: LLMParameter = .gemini(model: ""gemini-1.5-pro-latest", messages: [.init(role: .user, content: prompt)], maxTokens: 2000)
153 | let stream = try await service.streamMessage(parameters)
154 | ```
155 |
156 | To interact with local models using Ollama, all you need to do(again) is change just one line of code! 🔥
157 |
158 | ```swift
159 | let prompt = "How are you today?"
160 | let parameters: LLMParameter = .ollama(model: "llama3", messages: [.init(role: .user, content: prompt)], maxTokens: 2000)
161 | let stream = try await service.streamMessage(parameters)
162 | ```
163 |
164 | As demonstrated, simply switch the LLMParameter to the desired provider.
165 |
166 | ## OpenAI Azure
167 |
168 | To access the OpenAI API via Azure, you can use the following configuration setup.
169 |
170 | ```swift
171 | let azureConfiguration: LLMConfiguration = .openAI(.azure(configuration: .init(resourceName: "YOUR_RESOURCE_NAME", openAIAPIKey: .apiKey("YOUR_API_KEY"), apiVersion: "THE_API_VERSION")))
172 | ```
173 |
174 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#azure-openai).
175 |
176 | ## Groq
177 |
178 | To access Groq, use the following configuration setup.
179 |
180 | ```swift
181 | let groqConfiguration: LLMConfiguration = .openAI(.groq(apiKey: "your_groq_api_key_here"))
182 | ```
183 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#groq).
184 |
185 | ## OpenRouter
186 |
187 | To access OpenRouter, use the following configuration setup.
188 |
189 | ```swift
190 | let openRouterConfiguration: LLMConfiguration = .openAI(.openRouter(apiKey: "your_open-router_api_key_here"))
191 | ```
192 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#openrouter).
193 |
194 | ## DeepSeek
195 |
196 | To access DeepSeek, use the following configuration setup.
197 |
198 | ```swift
199 | let deepSeekConfiguration: LLMConfiguration = .openAI(.deepSeek(apiKey: "your_deepseek_api_key_here"))
200 | ```
201 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#deepseek).
202 |
203 | ## OpenAI AIProxy
204 |
205 | To access the OpenAI API via AIProxy, use the following configuration setup.
206 |
207 | ```swift
208 | let aiProxyConfiguration: LLMConfiguration = .openAI(.aiProxy(aiproxyPartialKey: "hardcode_partial_key_here", aiproxyDeviceCheckBypass: "hardcode_device_check_bypass_here"))
209 | ```
210 |
211 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#aiproxy).
212 |
213 | ### Ollama
214 |
215 | To interact with local models using [Ollama OpenAI compatibility endpoints](https://ollama.com/blog/openai-compatibility), use the following configuration setup.
216 |
217 | 1 - Download [Ollama](https://ollama.com/) if yo don't have it installed already.
218 | 2 - Download the model you need, e.g for `llama3` type in terminal:
219 | ```
220 | ollama pull llama3
221 | ```
222 |
223 | Once you have the model installed locally you are ready to use PolyAI!
224 |
225 | ```swift
226 | let ollamaConfiguration: LLMConfiguration = .ollama(url: "http://localhost:11434")
227 | ```
228 | More information can be found [here](https://github.com/jamesrochabrun/SwiftOpenAI?tab=readme-ov-file#ollama).
229 |
230 | ## Collaboration
231 |
232 | Open a PR for any proposed change pointing it to `main` branch.
233 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Parameters/LLMMessageParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: LLMMessageParameter
11 |
12 | /// A protocol defining the basic requirements for a message parameter used with LLM services.
13 | public protocol LLMMessageParameter {
14 |
15 | var role: String { get }
16 | var content: String { get }
17 | }
18 |
19 | // MARK: LLMMessage
20 |
21 | public struct LLMMessage: LLMMessageParameter {
22 |
23 | /// The role of the sender in the conversation, such as "user" or "assistant".
24 | public var role: String
25 |
26 | /// The content of the message being sent.
27 | public var content: String
28 |
29 | public enum Role: String {
30 | case user
31 | case assistant
32 | case system
33 | }
34 |
35 | /// Initializes a new message with specified role and content.
36 | /// - Parameters:
37 | /// - role: The role of the sender of the message.
38 | /// - content: The content of the message.
39 | public init(
40 | role: Role,
41 | content: String)
42 | {
43 | self.role = role.rawValue
44 | self.content = content
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Parameters/LLMParameter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMParameter.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 | import SwiftAnthropic
11 |
12 | /// An enum representing the parameters required to interface with different LLM services.
13 | public enum LLMParameter {
14 |
15 | /// Represents a configuration for interacting with OpenAI's models.
16 | /// - Parameters:
17 | /// - model: The specific model of OpenAI to use.
18 | /// - messages: An array of messages to send to the model.
19 | /// - maxTokens: An optional maximum number of tokens to generate. Defaults to `nil`.
20 | case openAI(model: SwiftOpenAI.Model, messages: [LLMMessage], maxTokens: Int? = nil)
21 |
22 | /// Represents a configuration for interacting with OpenAI's models.
23 | /// - Parameters:
24 | /// - model: The specific model of OpenAI to use.
25 | /// - messages: An array of messages to send to the model.
26 | /// - maxTokens: An optional maximum number of tokens to generate. Defaults to `nil`.
27 | case azure(model: SwiftOpenAI.Model, messages: [LLMMessage], maxTokens: Int? = nil)
28 |
29 | /// Represents a configuration for interacting with OpenAI's models.
30 | /// - Parameters:
31 | /// - model: The specific model of OpenAI to use.
32 | /// - messages: An array of messages to send to the model.
33 | /// - maxTokens: An optional maximum number of tokens to generate. Defaults to `nil`.
34 | case aiProxy(model: SwiftOpenAI.Model, messages: [LLMMessage], maxTokens: Int? = nil)
35 |
36 | /// Represents a configuration for interacting with Gemini's models.
37 | /// - Parameters:
38 | /// - model: The specific model of Gemini to use.
39 | /// - messages: An array of messages to send to the model.
40 | /// - maxTokens: The maximum number of tokens to generate.
41 | case gemini(model: String, messages: [LLMMessage], maxTokens: Int? = nil)
42 |
43 | /// Represents a configuration for interacting with Groq''s models.
44 | /// - Parameters:
45 | /// - model: The specific model of Groq to use.
46 | /// - messages: An array of messages to send to the model.
47 | /// - maxTokens: The maximum number of tokens to generate.
48 | case groq(model: String, messages: [LLMMessage], maxTokens: Int? = nil)
49 |
50 | /// Represents a configuration for interacting with DeepSeek's models.
51 | /// - Parameters:
52 | /// - model: The specific model of DeepSeek to use.
53 | /// - messages: An array of messages to send to the model.
54 | /// - maxTokens: The maximum number of tokens to generate.
55 | case deepSeek(model: String, messages: [LLMMessage], maxTokens: Int? = nil)
56 |
57 | /// Represents a configuration for interacting with OpenRouter's models.
58 | /// - Parameters:
59 | /// - model: The specific model of OpenRouter to use.
60 | /// - messages: An array of messages to send to the model.
61 | /// - maxTokens: The maximum number of tokens to generate.
62 | case openRouter(model: String, messages: [LLMMessage], maxTokens: Int? = nil)
63 |
64 | /// Represents a configuration for interacting with Anthropic's models.
65 | /// - Parameters:
66 | /// - model: The specific model of Anthropic to use.
67 | /// - messages: An array of messages to send to the model.
68 | /// - maxTokens: The maximum number of tokens to generate.
69 | case anthropic(model: SwiftAnthropic.Model, messages: [LLMMessage], maxTokens: Int)
70 |
71 | /// Represents a configuration for interacting with Ollama's models.
72 | /// - Parameters:
73 | /// - model: The specific model. e.g: "llama3"
74 | /// - messages: An array of messages to send to the model.
75 | /// - maxTokens: The maximum number of tokens to generate.
76 | case ollama(model: String, messages: [LLMMessage], maxTokens: Int)
77 |
78 | /// A computed property that returns the name of the LLM service based on the case.
79 | var llmService: String {
80 | switch self {
81 | case .openAI: return "OpenAI"
82 | case .anthropic: return "Anthropic"
83 | case .gemini: return "Gemini"
84 | case .ollama(let model, _, _): return model
85 | case .azure: return "Azure"
86 | case .aiProxy: return "AIProxy"
87 | case .groq: return "Groq"
88 | case .deepSeek: return "DeepSeek"
89 | case .openRouter: return "OpenRouter"
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Message/LLMMessageResponse+Anthropic.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageResponse+Anthropic.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/15/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftAnthropic
10 |
11 | // MARK: Anthropic
12 |
13 | extension MessageResponse: LLMMessageResponse {
14 | public var id: String? {
15 | nil
16 | }
17 |
18 | public var model: String? {
19 | nil
20 | }
21 |
22 | public var createdAt: Int? {
23 | nil
24 | }
25 |
26 | public var contentDescription: String {
27 | content.map { contentItem in
28 | switch contentItem {
29 | case let .text(text, _):
30 | return text
31 | case let .toolUse(toolUSe):
32 | return "Tool: \(toolUSe.name)"
33 | case .thinking:
34 | return ""
35 | }
36 | }.first ?? ""
37 | }
38 |
39 | public var usageMetrics: UsageMetrics {
40 | ChatUsageMetrics(inputTokens: usage.inputTokens ?? 0, outputTokens: usage.outputTokens, totalTokens: nil)
41 | }
42 |
43 | public var tools: [ToolUsage] {
44 | []
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Message/LLMMessageResponse+OpenAI.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageResponse+OpenAI.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/15/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | // MARK: OpenAI
12 |
13 | extension ChatCompletionObject: LLMMessageResponse {
14 | public var createdAt: Int? {
15 | created
16 | }
17 |
18 | public var contentDescription: String {
19 | choices?.first?.message?.content ?? ""
20 | }
21 |
22 | public var usageMetrics: UsageMetrics {
23 | ChatUsageMetrics(
24 | inputTokens: usage?.promptTokens ?? 0,
25 | outputTokens: usage?.completionTokens ?? 0,
26 | totalTokens: usage?.totalTokens ?? 0
27 | )
28 | }
29 |
30 | public var tools: [ToolUsage] {
31 | []
32 | }
33 |
34 | public var role: String {
35 | choices?.first?.message?.role ?? "unknown"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Message/LLMMessageResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageResponse.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: LLMMessageResponse
11 |
12 | /// A protocol defining the required properties for a response from an LLM service.
13 | public protocol LLMMessageResponse {
14 | /// A unique identifier for the message.
15 | var id: String? { get }
16 |
17 | /// The model of the LLM that processed the message.
18 | var model: String? { get }
19 |
20 | /// The role associated with the message, such as "user" or "assistant".
21 | var role: String { get }
22 |
23 | /// The creation time of the message as an epoch timestamp, optional.
24 | var createdAt: Int? { get }
25 |
26 | /// A description of the message's content.
27 | var contentDescription: String { get }
28 |
29 | /// Metrics detailing the usage of the LLM in terms of tokens.
30 | var usageMetrics: UsageMetrics { get }
31 |
32 | /// Tools to be used for function calling.
33 | var tools: [ToolUsage] { get }
34 | }
35 |
36 | /// A protocol defining metrics related to token usage by an LLM.
37 | public protocol UsageMetrics {
38 | /// The number of input tokens provided to the LLM.
39 | var inputTokens: Int { get }
40 |
41 | /// The number of output tokens produced by the LLM.
42 | var outputTokens: Int { get }
43 |
44 | /// The total number of tokens used, optional.
45 | var totalTokens: Int? { get }
46 | }
47 |
48 | /// A structure implementing the UsageMetrics protocol for chat applications.
49 | struct ChatUsageMetrics: UsageMetrics {
50 | let inputTokens: Int
51 | let outputTokens: Int
52 | let totalTokens: Int?
53 | }
54 |
55 | /// A protocol defining the properties required for tracking the usage of tools within an LLM service.
56 | public protocol ToolUsage {
57 | /// An optional unique identifier for the tool.
58 | var toolId: String? { get }
59 |
60 | /// The name of the tool used.
61 | var toolName: String { get }
62 |
63 | /// A dictionary containing inputs provided to the tool, if applicable.
64 | var toolInput: [String: String]? { get }
65 | }
66 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Stream/LLMMessageStreamResponse+Anthropic.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageStreamResponse+Anthropic.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/15/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftAnthropic
10 |
11 | // MARK: Anthropic
12 |
13 | extension MessageStreamResponse: LLMMessageStreamResponse {
14 |
15 | public var content: String? {
16 | delta?.text
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Stream/LLMMessageStreamResponse+OpenAI.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageStreamResponse+OpenAI.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/15/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | // MARK: OpenAI
12 |
13 | extension ChatCompletionChunkObject: LLMMessageStreamResponse {
14 | public var content: String? {
15 | choices?.first?.delta?.content
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Interfaces/Response/Stream/LLMMessageStreamResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // LLMMessageStreamResponse.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/14/24.
6 | //
7 |
8 | import Foundation
9 |
10 | // MARK: LLMMessageStreamResponse
11 |
12 | /// A protocol defining the essential property for a response that streams from an LLM service.
13 | public protocol LLMMessageStreamResponse {
14 | /// The content of the response, which may be nil if the response does not contain textual data.
15 | var content: String? { get }
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/PolyAI/PolyAI.swift:
--------------------------------------------------------------------------------
1 | // The Swift Programming Language
2 | // https://docs.swift.org/swift-book
3 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Service/DefaultPolyAIService.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DefaultPolyAIService.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftAnthropic
10 | import SwiftOpenAI
11 |
12 | // MARK: Error
13 |
14 | enum PolyAIError: Error {
15 | case missingLLMConfiguration(String)
16 | }
17 |
18 | struct DefaultPolyAIService: PolyAIService {
19 |
20 | // OpenAI compatible
21 | private var openAIService: OpenAIService?
22 | private var azureOpenAIService: OpenAIService?
23 | private var aiProxyOpenAIService: OpenAIService?
24 | private var geminiService: OpenAIService?
25 | private var groqService: OpenAIService?
26 | private var deepSeekService: OpenAIService?
27 | private var openRouterService: OpenAIService?
28 | private var ollamaOpenAIServiceCompatible: OpenAIService?
29 |
30 | // Anthropic
31 | private var anthropicService: AnthropicService?
32 |
33 | init(configurations: [LLMConfiguration],
34 | debugEnabled: Bool = false)
35 | {
36 | for configuration in configurations {
37 | switch configuration {
38 | case .openAI(let configuration):
39 | switch configuration {
40 | case .api(let key, let organizationID, let configuration, let decoder):
41 | openAIService = OpenAIServiceFactory.service(apiKey: key, organizationID: organizationID, configuration: configuration, decoder: decoder, debugEnabled: debugEnabled)
42 |
43 | case .azure(let azureConfiguration, let urlSessionConfiguration, let decoder):
44 | azureOpenAIService = OpenAIServiceFactory.service(azureConfiguration: azureConfiguration, urlSessionConfiguration: urlSessionConfiguration, decoder: decoder, debugEnabled: debugEnabled)
45 |
46 | case .aiProxy(let aiproxyPartialKey, let aiproxyClientID):
47 | aiProxyOpenAIService = OpenAIServiceFactory.service(aiproxyPartialKey: aiproxyPartialKey, aiproxyClientID: aiproxyClientID, debugEnabled: debugEnabled)
48 |
49 | case .gemini(let apiKey, _, _):
50 | let baseURL = "https://generativelanguage.googleapis.com"
51 | let version = "v1beta"
52 |
53 | let service = OpenAIServiceFactory.service(
54 | apiKey: apiKey,
55 | overrideBaseURL: baseURL,
56 | overrideVersion: version,
57 | debugEnabled: debugEnabled)
58 | geminiService = service
59 |
60 | case .groq(apiKey: let apiKey, _, _):
61 | groqService = OpenAIServiceFactory.service(
62 | apiKey: apiKey,
63 | overrideBaseURL: "https://api.groq.com/",
64 | proxyPath: "openai",
65 | debugEnabled: debugEnabled)
66 |
67 | case .deepSeek(apiKey: let apiKey, configuration: _, decoder: _):
68 | deepSeekService = OpenAIServiceFactory.service(
69 | apiKey: apiKey,
70 | overrideBaseURL: "https://api.deepseek.com",
71 | debugEnabled: debugEnabled)
72 |
73 | case .openRouter(apiKey: let apiKey, _, _, let extraHeaders):
74 | openRouterService = OpenAIServiceFactory.service(
75 | apiKey: apiKey,
76 | overrideBaseURL: "https://openrouter.ai",
77 | proxyPath: "api",
78 | extraHeaders: extraHeaders,
79 | debugEnabled: debugEnabled)
80 |
81 | case .ollama(let url):
82 | ollamaOpenAIServiceCompatible = OpenAIServiceFactory.service(baseURL: url, debugEnabled: debugEnabled)
83 | }
84 |
85 | case .anthropic(let apiKey, let configuration, let betaHeaders):
86 | anthropicService = AnthropicServiceFactory.service(
87 | apiKey: apiKey,
88 | betaHeaders: betaHeaders,
89 | configuration: configuration,
90 | debugEnabled: debugEnabled)
91 |
92 | }
93 | }
94 | }
95 |
96 | // MARK: Message
97 |
98 | func createMessage(
99 | _ parameter: LLMParameter)
100 | async throws -> LLMMessageResponse
101 | {
102 | switch parameter {
103 | case .groq(let model, let messages, let maxTokens),
104 | .gemini(let model, let messages, let maxTokens),
105 | .deepSeek(let model, let messages, let maxTokens),
106 | .openRouter(let model, let messages, let maxTokens):
107 |
108 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
109 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
110 | let service = try openAIService(for: parameter)
111 | return try await service.startChat(parameters: messageParameter)
112 |
113 | case .openAI(let model, let messages, let maxTokens),
114 | .azure(let model, let messages, let maxTokens),
115 | .aiProxy(let model, let messages, let maxTokens):
116 |
117 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
118 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: model, maxTokens: maxTokens)
119 | let service = try openAIService(for: parameter)
120 | return try await service.startChat(parameters: messageParameter)
121 |
122 | case .anthropic(let model, let messages, let maxTokens):
123 |
124 | guard let anthropicService else {
125 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
126 | }
127 | // Remove all system messages as Anthropic uses the system message as a parameter and not as part of the messages array.
128 | let messageParams: [SwiftAnthropic.MessageParameter.Message] = messages.compactMap { message in
129 | guard message.role != "system" else {
130 | return nil // Skip "system" roles
131 | }
132 | return MessageParameter.Message(
133 | role: SwiftAnthropic.MessageParameter.Message.Role(rawValue: message.role) ?? .user,
134 | content: .text(message.content)
135 | )
136 | }
137 | let systemMessage = messages.first { $0.role == "system" }
138 | let messageParameter = MessageParameter(model: model, messages: messageParams, maxTokens: maxTokens, system: .text(systemMessage?.content ?? ""), stream: false)
139 | return try await anthropicService.createMessage(messageParameter)
140 |
141 | case .ollama(let model, let messages, let maxTokens):
142 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
143 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
144 | let service = try openAIService(for: parameter)
145 | return try await service.startChat(parameters: messageParameter)
146 | }
147 | }
148 |
149 | func streamMessage(
150 | _ parameter: LLMParameter)
151 | async throws -> AsyncThrowingStream
152 | {
153 | switch parameter {
154 | case .groq(let model, let messages, let maxTokens),
155 | .gemini(let model, let messages, let maxTokens),
156 | .deepSeek(let model, let messages, let maxTokens),
157 | .openRouter(let model, let messages, let maxTokens):
158 |
159 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
160 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
161 | let service = try openAIService(for: parameter)
162 | let stream = try await service.startStreamedChat(parameters: messageParameter)
163 | return try mapToLLMMessageStreamResponse(stream: stream)
164 |
165 | case .openAI(let model, let messages, let maxTokens),
166 | .azure(let model, let messages, let maxTokens),
167 | .aiProxy(let model, let messages, let maxTokens):
168 |
169 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
170 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: model, maxTokens: maxTokens)
171 | let service = try openAIService(for: parameter)
172 | let stream = try await service.startStreamedChat(parameters: messageParameter)
173 | return try mapToLLMMessageStreamResponse(stream: stream)
174 |
175 | case .anthropic(let model, let messages, let maxTokens):
176 | guard let anthropicService else {
177 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
178 | }
179 | // Remove all system messages as Anthropic uses the system message as a parameter and not as part of the messages array.
180 | let messageParams: [SwiftAnthropic.MessageParameter.Message] = messages.compactMap { message in
181 | guard message.role != "system" else {
182 | return nil // Skip "system" roles
183 | }
184 | return MessageParameter.Message(
185 | role: SwiftAnthropic.MessageParameter.Message.Role(rawValue: message.role) ?? .user,
186 | content: .text(message.content)
187 | )
188 | }
189 | let systemMessage = messages.first { $0.role == "system" }
190 | let messageParameter = MessageParameter(model: model, messages: messageParams, maxTokens: maxTokens, system: .text(systemMessage?.content ?? ""))
191 | let stream = try await anthropicService.streamMessage(messageParameter)
192 | return try mapToLLMMessageStreamResponse(stream: stream)
193 |
194 | case .ollama(let model, let messages, let maxTokens):
195 | let messageParams: [SwiftOpenAI.ChatCompletionParameters.Message] = messages.map { .init(role: .init(rawValue: $0.role) ?? .user, content: .text($0.content)) }
196 | let messageParameter = ChatCompletionParameters(messages: messageParams, model: .custom(model), maxTokens: maxTokens)
197 | let service = try openAIService(for: parameter)
198 | let stream = try await service.startStreamedChat(parameters: messageParameter)
199 | return try mapToLLMMessageStreamResponse(stream: stream)
200 | }
201 | }
202 |
203 | private func mapToLLMMessageStreamResponse(stream: AsyncThrowingStream)
204 | throws -> AsyncThrowingStream
205 | {
206 | let mappedStream = AsyncThrowingStream { continuation in
207 | Task {
208 | do {
209 | for try await chunk in stream {
210 | continuation.yield(chunk)
211 | }
212 | continuation.finish()
213 | } catch {
214 | continuation.finish(throwing: error)
215 | }
216 | }
217 | }
218 | return mappedStream
219 | }
220 | }
221 |
222 | extension DefaultPolyAIService {
223 |
224 | private func openAIService(
225 | for parameter: LLMParameter)
226 | throws -> OpenAIService
227 | {
228 | switch parameter {
229 | case .groq:
230 | guard let service = groqService else {
231 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
232 | }
233 | return service
234 |
235 | case .gemini:
236 | guard let service = geminiService else {
237 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
238 | }
239 | return service
240 |
241 | case .openAI:
242 | guard let service = openAIService else {
243 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
244 | }
245 | return service
246 |
247 | case .azure:
248 | guard let service = azureOpenAIService else {
249 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
250 | }
251 | return service
252 |
253 | case .aiProxy:
254 | guard let service = aiProxyOpenAIService else {
255 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
256 | }
257 | return service
258 |
259 | case .ollama:
260 | guard let service = ollamaOpenAIServiceCompatible else {
261 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
262 | }
263 | return service
264 |
265 | case .deepSeek(model: let model, messages: let messages, maxTokens: let maxTokens):
266 | guard let service = deepSeekService else {
267 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
268 | }
269 | return service
270 | case .openRouter(model: let model, messages: let messages, maxTokens: let maxTokens):
271 | guard let service = openRouterService else {
272 | throw PolyAIError.missingLLMConfiguration("You Must provide a valid configuration for the \(parameter.llmService) API")
273 | }
274 | return service
275 | case .anthropic:
276 | throw PolyAIError.missingLLMConfiguration("Anthropic does not use OpenAIService")
277 | }
278 | }
279 | }
280 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Service/PolyAIService.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIService.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 | import SwiftOpenAI
10 |
11 | /// Represents configurations for different LLM providers.
12 | public enum LLMConfiguration {
13 |
14 | case openAI(OpenAICompatible)
15 |
16 | public enum OpenAICompatible {
17 | /// Configuration for accessing OpenAI's API.
18 | /// - Parameters:
19 | /// - apiKey: The API key for authenticating requests to OpenAI.
20 | /// - organizationID: Optional organization ID for OpenAI usage.
21 | /// - configuration: The URLSession configuration to use for network requests. Defaults to `.default`.
22 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
23 | case api(key: String, organizationID: String? = nil, configuration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init())
24 | /// Configuration for accessing OpenAI's API.
25 | /// - Parameters:
26 | /// - configuration: The AzureOpenAIConfiguration.
27 | /// - urlSessionConfiguration: The URLSession configuration to use for network requests. Defaults to `.default`.
28 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
29 | case azure(configuration: AzureOpenAIConfiguration, urlSessionConfiguration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init())
30 | /// Configuration for accessing OpenAI's API.
31 | /// - Parameters:
32 | /// - aiproxyPartialKey: The partial key provided in the 'API Keys' section of the AIProxy dashboard.
33 | /// Please see the integration guide for acquiring your key, at https://www.aiproxy.pro/docs
34 | /// - aiproxyClientID: If your app already has client or user IDs that you want to annotate AIProxy requests
35 | /// with, you can pass a clientID here. If you do not have existing client or user IDs, leave
36 | /// the `clientID` argument out, and IDs will be generated automatically for you.
37 | case aiProxy(aiproxyPartialKey: String, aiproxyClientID: String? = nil)
38 | /// Configuration for accessing Gemini's API.
39 | /// - Parameters:
40 | /// - apiKey: The API key for authenticating requests to Gemini.
41 | /// - urlSessionConfiguration: The URLSession configuration to use for network requests. Defaults to `.default`.
42 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
43 | case gemini(apiKey: String, configuration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init())
44 | /// Configuration for accessing Groq's API.
45 | /// - Parameters:
46 | /// - apiKey: The API key for authenticating requests to Gemini.
47 | /// - urlSessionConfiguration: The URLSession configuration to use for network requests. Defaults to `.default`.
48 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
49 | case groq(apiKey: String, configuration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init())
50 | /// Configuration for accessing DeepSeek''s API.
51 | /// - Parameters:
52 | /// - apiKey: The API key for authenticating requests to Gemini.
53 | /// - urlSessionConfiguration: The URLSession configuration to use for network requests. Defaults to `.default`.
54 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
55 | case deepSeek(apiKey: String, configuration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init())
56 | /// Configuration for accessing Groq's API.
57 | /// - Parameters:
58 | /// - apiKey: The API key for authenticating requests to OpenRouter.
59 | /// - urlSessionConfiguration: The URLSession configuration to use for network requests. Defaults to `.default`.
60 | /// - decoder: The JSON decoder used for decoding responses. Defaults to a new instance of `JSONDecoder`.
61 | /// - extraHeaders: Optional. Site URL and title for rankings on openrouter.ai
62 | case openRouter(apiKey: String, configuration: URLSessionConfiguration = .default, decoder: JSONDecoder = .init(), extraHeaders: [String: String]? = nil)
63 | /// Configuration for accessing Ollama models using OpenAI endpoints compatibility.
64 | /// - Parameters:
65 | /// - url: The local host URL. e.g "http://localhost:11434"
66 | case ollama(url: String)
67 | }
68 |
69 | /// Configuration for accessing Anthropic's API.
70 | /// - Parameters:
71 | /// - apiKey: The API key for authenticating requests to Anthropic.
72 | /// - configuration: The URLSession configuration to use for network requests. Defaults to `.default`.
73 | /// - betaHeaders: An array of headers for Anthropic's beta features.
74 | case anthropic(apiKey: String, configuration: URLSessionConfiguration = .default, betaHeaders: [String]? = nil)
75 | }
76 |
77 | /// Defines the interface for a service that interacts with Large Language Models (LLMs).
78 | public protocol PolyAIService {
79 |
80 | /// Initializes a new instance with the specified configurations for LLM providers.
81 | /// - Parameters:
82 | /// - configurations: An array of `LLMConfiguration` items, each specifying settings for a different LLM provider.
83 | /// - debugEnabled: Logs requests if `true`
84 | init(configurations: [LLMConfiguration], debugEnabled: Bool)
85 |
86 | // MARK: Message
87 |
88 | /// Creates a message to an LLM based on the provided parameters.
89 | ///
90 | /// - Parameter parameter: The LLMParameter defining the LLM request details.
91 | /// - Returns: A response conforming to `LLMMessageResponse`.
92 | /// - Throws: An error if the message creation fails.
93 | func createMessage(
94 | _ parameter: LLMParameter)
95 | async throws -> LLMMessageResponse
96 |
97 | /// Streams messages rom an LLM based on the provided parameters.
98 | ///
99 | /// - Parameter parameter: The LLMParameter defining the LLM request details.
100 | /// - Returns: An asynchronous stream of responses conforming to `LLMMessageStreamResponse`.
101 | /// - Throws: An error if the streaming operation fails.
102 | func streamMessage(
103 | _ parameter: LLMParameter)
104 | async throws -> AsyncThrowingStream
105 | }
106 |
107 | extension LLMConfiguration {
108 |
109 | public var rawValue: String {
110 | switch self {
111 | case .openAI(let openAICompatible):
112 | switch openAICompatible {
113 | case .api: "OpenAI"
114 | case .azure: "Azure"
115 | case .aiProxy: "AiProxy"
116 | case .gemini: "Gemini"
117 | case .groq: "Groq"
118 | case .ollama: "Ollama"
119 | case .openRouter: "OpenRouter"
120 | case .deepSeek: "DeepSeek"
121 | }
122 | case .anthropic: "Anthropic"
123 | }
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/Sources/PolyAI/Service/PolyAIServiceFactory.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PolyAIServiceFactory.swift
3 | //
4 | //
5 | // Created by James Rochabrun on 4/6/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct PolyAIServiceFactory {
11 |
12 | public static func serviceWith(
13 | _ configurations: [LLMConfiguration],
14 | debugEnabled: Bool = false)
15 | -> PolyAIService
16 | {
17 | DefaultPolyAIService(configurations: configurations, debugEnabled: debugEnabled)
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Tests/PolyAITests/PolyAITests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import PolyAI
3 |
4 | final class PolyAITests: XCTestCase {
5 | func testExample() throws {
6 | // XCTest Documentation
7 | // https://developer.apple.com/documentation/xctest
8 |
9 | // Defining Test Cases and Test Methods
10 | // https://developer.apple.com/documentation/xctest/defining_test_cases_and_test_methods
11 | }
12 | }
13 |
--------------------------------------------------------------------------------