├── .DS_Store
├── .gitignore
├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── contents.xcworkspacedata
├── Examples
├── SwiftUICallView.swift
└── UIKitCallViewController.swift
├── LICENSE
├── Package.resolved
├── Package.swift
├── README.md
├── Sources
├── Constants
│ └── Constants.swift
├── Models
│ ├── AppMessage.swift
│ ├── ConversationUpdate.swift
│ ├── FunctionCall.swift
│ ├── Metadata.swift
│ ├── ModelOutput.swift
│ ├── SpeechUpdate.swift
│ ├── StatusUpdate.swift
│ ├── Transcript.swift
│ ├── UserInterrupted.swift
│ ├── VapiError.swift
│ ├── VoiceInput.swift
│ └── WebCallResponse.swift
├── NetworkManager.swift
└── Vapi.swift
└── Tests
└── VapiTests.swift
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/VapiAI/client-sdk-ios/ee89eeb693235842aea1150cb68626f37c7e8c41/.DS_Store
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## User settings
6 | xcuserdata/
7 |
8 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
9 | *.xcscmblueprint
10 | *.xccheckout
11 |
12 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
13 | build/
14 | DerivedData/
15 | *.moved-aside
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 |
28 | ## App packaging
29 | *.ipa
30 | *.dSYM.zip
31 | *.dSYM
32 |
33 | ## Playgrounds
34 | timeline.xctimeline
35 | playground.xcworkspace
36 |
37 | # Swift Package Manager
38 | #
39 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
40 | # Packages/
41 | # Package.pins
42 | # Package.resolved
43 | # *.xcodeproj
44 | #
45 | # Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata
46 | # hence it is not needed unless you have added a package configuration file to your project
47 | # .swiftpm
48 |
49 | .build/
50 |
51 | # CocoaPods
52 | #
53 | # We recommend against adding the Pods directory to your .gitignore. However
54 | # you should judge for yourself, the pros and cons are mentioned at:
55 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
56 | #
57 | # Pods/
58 | #
59 | # Add this line if you want to avoid checking in source code from the Xcode workspace
60 | # *.xcworkspace
61 |
62 | # Carthage
63 | #
64 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
65 | # Carthage/Checkouts
66 |
67 | Carthage/Build/
68 |
69 | # Accio dependency management
70 | Dependencies/
71 | .accio/
72 |
73 | # fastlane
74 | #
75 | # It is recommended to not store the screenshots in the git repo.
76 | # Instead, use fastlane to re-generate the screenshots whenever they are needed.
77 | # For more information about the recommended setup visit:
78 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
79 |
80 | fastlane/report.xml
81 | fastlane/Preview.html
82 | fastlane/screenshots/**/*.png
83 | fastlane/test_output
84 |
85 | # Code Injection
86 | #
87 | # After new code Injection tools there's a generated folder /iOSInjectionProject
88 | # https://github.com/johnno1962/injectionforxcode
89 |
90 | iOSInjectionProject/
91 |
--------------------------------------------------------------------------------
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Examples/SwiftUICallView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import Vapi
3 | import Combine
4 |
5 | class CallManager: ObservableObject {
6 | enum CallState {
7 | case started, loading, ended
8 | }
9 |
10 | @Published var callState: CallState = .ended
11 | var vapiEvents = [Vapi.Event]()
12 | private var cancellables = Set()
13 | let vapi: Vapi
14 |
15 | init() {
16 | vapi = Vapi(
17 | publicKey: ""
18 | )
19 | }
20 |
21 | func setupVapi() {
22 | vapi.eventPublisher
23 | .sink { [weak self] event in
24 | self?.vapiEvents.append(event)
25 | switch event {
26 | case .callDidStart:
27 | self?.callState = .started
28 | case .callDidEnd:
29 | self?.callState = .ended
30 | case .speechUpdate:
31 | print(event)
32 | case .conversationUpdate:
33 | print(event)
34 | case .functionCall:
35 | print(event)
36 | case .hang:
37 | print(event)
38 | case .metadata:
39 | print(event)
40 | case .transcript:
41 | print(event)
42 | case .statusUpdate:
43 | print(event)
44 | case .modelOutput:
45 | print(event)
46 | case .userInterrupted:
47 | print(event)
48 | case .voiceInput:
49 | print(event)
50 | case .error(let error):
51 | print("Error: \(error)")
52 | }
53 | }
54 | .store(in: &cancellables)
55 | }
56 |
57 | @MainActor
58 | func handleCallAction() async {
59 | if callState == .ended {
60 | await startCall()
61 | } else {
62 | endCall()
63 | }
64 | }
65 |
66 | @MainActor
67 | func startCall() async {
68 | callState = .loading
69 | let assistant = [
70 | "model": [
71 | "provider": "openai",
72 | "model": "gpt-3.5-turbo",
73 | "messages": [
74 | ["role":"system", "content":"You are an assistant."]
75 | ],
76 | ],
77 | "firstMessage": "Hey there",
78 | "voice": "jennifer-playht"
79 | ] as [String : Any]
80 | do {
81 | try await vapi.start(assistant: assistant)
82 | } catch {
83 | print("Error starting call: \(error)")
84 | callState = .ended
85 | }
86 | }
87 |
88 | func endCall() {
89 | vapi.stop()
90 | }
91 | }
92 |
93 | struct ContentView: View {
94 | @StateObject private var callManager = CallManager()
95 |
96 | var body: some View {
97 | ZStack {
98 | // Background gradient
99 | LinearGradient(gradient: Gradient(colors: [Color.blue.opacity(0.7), Color.purple.opacity(0.7)]), startPoint: .top, endPoint: .bottom)
100 | .edgesIgnoringSafeArea(.all)
101 |
102 | VStack(spacing: 20) {
103 | Text("Vapi Call Interface")
104 | .font(.largeTitle)
105 | .fontWeight(.bold)
106 | .foregroundColor(.white)
107 |
108 | Spacer()
109 |
110 | Text(callManager.callStateText)
111 | .font(.title)
112 | .fontWeight(.semibold)
113 | .foregroundColor(.white)
114 | .padding()
115 | .background(callManager.callStateColor)
116 | .cornerRadius(10)
117 |
118 | Spacer()
119 |
120 | Button(action: {
121 | Task {
122 | await callManager.handleCallAction()
123 | }
124 | }) {
125 | Text(callManager.buttonText)
126 | .font(.title2)
127 | .fontWeight(.bold)
128 | .foregroundColor(.white)
129 | .padding()
130 | .frame(maxWidth: .infinity)
131 | .background(callManager.buttonColor)
132 | .cornerRadius(10)
133 | }
134 | .disabled(callManager.callState == .loading)
135 | .padding(.horizontal, 40)
136 |
137 | Spacer()
138 | }
139 | }
140 | .onAppear {
141 | callManager.setupVapi()
142 | }
143 | }
144 | }
145 |
146 | extension CallManager {
147 | var callStateText: String {
148 | switch callState {
149 | case .started: return "Call in Progress"
150 | case .loading: return "Connecting..."
151 | case .ended: return "Call Off"
152 | }
153 | }
154 |
155 | var callStateColor: Color {
156 | switch callState {
157 | case .started: return .green.opacity(0.8)
158 | case .loading: return .orange.opacity(0.8)
159 | case .ended: return .gray.opacity(0.8)
160 | }
161 | }
162 |
163 | var buttonText: String {
164 | callState == .loading ? "Loading..." : (callState == .ended ? "Start Call" : "End Call")
165 | }
166 |
167 | var buttonColor: Color {
168 | callState == .loading ? .gray : (callState == .ended ? .green : .red)
169 | }
170 | }
171 |
--------------------------------------------------------------------------------
/Examples/UIKitCallViewController.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 | import Vapi
3 |
4 | class CallViewController: UIViewController {
5 | private var vapi: Vapi?
6 |
7 | override func viewDidLoad() {
8 | super.viewDidLoad()
9 | setupVapi()
10 | }
11 |
12 | private func setupVapi() {
13 | let config = Vapi.Configuration(publicKey: "your_public_key")
14 | vapi = Vapi(configuration: config)
15 |
16 | vapi?.eventPublisher.sink { [weak self] event in
17 | switch event {
18 | case .callDidStart:
19 | self?.updateUIForCallStart()
20 | case .callDidEnd:
21 | self?.updateUIForCallEnd()
22 | default:
23 | break
24 | }
25 | }.store(in: &cancellables)
26 | }
27 |
28 | @IBAction func startCallPressed(_ sender: UIButton) {
29 | do {
30 | try vapi?.start(assistantId: "your_assistant_id")
31 | } catch {
32 | print("Error starting call: \(error)")
33 | }
34 | }
35 |
36 | @IBAction func stopCallPressed(_ sender: UIButton) {
37 | vapi?.stop()
38 | }
39 |
40 | private func updateUIForCallStart() {
41 | // Update UI to show call has started
42 | }
43 |
44 | private func updateUIForCallEnd() {
45 | // Update UI to show call has ended
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Vapi
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "pins" : [
3 | {
4 | "identity" : "daily-client-ios",
5 | "kind" : "remoteSourceControl",
6 | "location" : "https://github.com/daily-co/daily-client-ios",
7 | "state" : {
8 | "revision" : "1b84803a17766240007f11c553ca7debbfcef33b",
9 | "version" : "0.22.0"
10 | }
11 | }
12 | ],
13 | "version" : 2
14 | }
15 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.9
2 | import PackageDescription
3 |
4 | let package = Package(
5 | name: "Vapi",
6 | platforms: [
7 | .iOS(.v13),
8 | .macOS(.v12),
9 | ],
10 | products: [
11 | .library(
12 | name: "Vapi",
13 | targets: ["Vapi"]
14 | ),
15 | ],
16 | dependencies: [
17 | .package(url: "https://github.com/daily-co/daily-client-ios", from: "0.2.0"),
18 | ],
19 | targets: [
20 | .target(
21 | name: "Vapi",
22 | dependencies: [
23 | .product(name: "Daily", package: "daily-client-ios")
24 | ],
25 | path: "Sources"
26 | ),
27 | .testTarget(
28 | name: "VapiTests",
29 | dependencies: ["Vapi"],
30 | path: "Tests"
31 | ),
32 | ]
33 | )
34 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Vapi iOS SDK
2 |
3 | This package lets you start Vapi calls directly in your iOS app.
4 |
5 | ## Requirements
6 |
7 | - iOS 13.0 or later
8 |
9 | ## Installation
10 |
11 | ### Swift Package Manager
12 |
13 | In Xcode, go to File -> Add Packages... and enter the following URL in 'Search or Enter Package URL' textbox in the top right corner of that window: https://github.com/VapiAI/ios
14 |
15 | Pick the desired dependency rule (under “Dependency Rule”), as well as build target (under “Add to Project”) and click “Add Package”.
16 |
17 | ### In Package.swift
18 |
19 | To depend on the Vapi package, you can declare your dependency in your `Package.swift`:
20 |
21 | ```swift
22 | .package(url: "https://github.com/VapiAI/ios", branch: "main"),
23 | ```
24 |
25 | and add `"Vapi"` to your application/library target, `dependencies`, e.g. like this:
26 |
27 | ```swift
28 | .target(name: "YourApp", dependencies: [
29 | .product(name: "Vapi", package: "ios")
30 | ],
31 | ```
32 |
33 | ## App Setup
34 |
35 | You will need to update your project's Info.plist to add three new entries with the following keys:
36 |
37 | - NSMicrophoneUsageDescription
38 | - UIBackgroundModes
39 |
40 | For the first two key's values, provide user-facing strings explaining why your app is asking for microphone access.
41 |
42 | UIBackgroundModes is handled slightly differently and will resolve to an array. For its first item, specify the value voip. This ensures that audio will continue uninterrupted when your app is sent to the background.
43 |
44 | To add the new entries through Xcode, open the Info.plist and add the following four entries (Camera is optional):
45 |
46 | | Key | Type | Value |
47 | |--------------------------------------|--------|----------------------------------------------|
48 | | Privacy - Camera Usage Description| String "Your app name needs camera access to work
49 | | Privacy - Microphone Usage Description| String | "Your app name needs microphone access to work" |
50 | | Required background modes | Array | 1 item |
51 | | ---> Item 0 | String | "App provides Voice over IP services" |
52 |
53 | If you view the raw file contents of Info.plist, it should look like this:
54 |
55 | ```xml
56 |
57 | ...
58 | NSMicrophoneUsageDescription
59 | Your app name needs microphone access to work
60 | NSCameraUsageDescription
61 | Your app name needs camera access to work
62 | UIBackgroundModes
63 |
64 | voip
65 | audio
66 |
67 | ...
68 |
69 | ```
70 |
71 | ## Usage
72 |
73 | ### 1. Starting a Call
74 |
75 | - **Methods:**
76 | - `start(assistantId: String, assistantOverrides: [String: Any] = [:])`
77 | - `start(assistant: [String: Any], assistantOverrides: [String: Any] = [:])`
78 | - **Description:**
79 | - Use these methods to initiate a new call. You can either start a call by passing an `assistantId` or by providing an `assistant` dictionary with specific parameters.
80 | - These methods throw an error if there's already an ongoing call to ensure that only one call is active at any time.
81 | - The `assistantOverrides` parameter is optional and allows you to override an assistant's default settings or set variables. For example, if the first message is "Hello {{name}}", you can set `assistantOverrides` to `["variableValues": ["name": "Alice"]]` to replace `{{name}}` with `Alice`.
82 |
83 | ### 2. Stopping a Call
84 |
85 | - **Method:** `stop()`
86 | - **Description:**
87 | - This method ends an ongoing call.
88 | - It's an asynchronous operation, ensuring the call is properly disconnected.
89 |
90 | ### 3. Muting a Call
91 |
92 | - **Methods:**
93 | - `setMuted()`
94 | - `isMuted()`
95 | - **Description:**
96 | - Use these methods to mute or unmute an ongoing call.
97 | - Parameters: `muted` is a boolean value indicating whether the audio should be muted.
98 | - `setMuted(false)` will mute the audio and `setMuted(true)` will unmute audio.
99 |
100 | ### 4. Handling Events
101 |
102 | - **Overview:** The SDK provides various events that you can listen to for handling different aspects of the call lifecycle and interactions.
103 | - **Key Events:**
104 | - `callDidStart`: Emitted when a call successfully begins.
105 | - `callDidEnd`: Emitted when a call is successfully ended.
106 | - `appMessageReceived([String: Any], from: Daily.ParticipantID)`: Occurs when a message is received during the call. Live transcripts and function calls will be sent through this.
107 | - `error(Swift.Error)`: Triggered if there's an error during the call setup or execution.
108 |
109 | ### Implementing in Your Project
110 |
111 | To see these methods in action, refer to our example files:
112 |
113 | - **SwiftUI Example:** Check the `SwiftUICallView.swift` file in the Example folder. This file demonstrates the integration of these methods in a SwiftUI view.
114 | - **UIKit Example:** Look at the `UIKitCallViewController.swift` file for an example of using the Vapi SDK within a UIKit view controller.
115 |
116 | These examples will guide you through effectively implementing and managing voice calls in your iOS applications using the Vapi SDK.
117 |
118 | ## License
119 |
120 | ```
121 | MIT License
122 |
123 | Copyright (c) 2023 Vapi Labs Inc.
124 |
125 | Permission is hereby granted, free of charge, to any person obtaining a copy
126 | of this software and associated documentation files (the "Software"), to deal
127 | in the Software without restriction, including without limitation the rights
128 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
129 | copies of the Software, and to permit persons to whom the Software is
130 | furnished to do so, subject to the following conditions:
131 |
132 | The above copyright notice and this permission notice shall be included in all
133 | copies or substantial portions of the Software.
134 |
135 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
136 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
137 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
138 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
139 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
140 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
141 | SOFTWARE.
142 | ```
143 |
--------------------------------------------------------------------------------
/Sources/Constants/Constants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Constants.swift
3 | //
4 | //
5 | // Created by CITMark on 03/09/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | extension String {
11 | /// This value represents the `username` of the remote `Daily.Participant` for the `Vapi Speaker`
12 | static let remoteParticipantVapiSpeaker = "Vapi Speaker"
13 | }
14 |
--------------------------------------------------------------------------------
/Sources/Models/AppMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppMessage.swift
3 | //
4 | //
5 | // Created by Brent Whitman on 2024-01-15.
6 | //
7 |
8 | import Foundation
9 |
10 | struct AppMessage: Codable {
11 | enum MessageType: String, Codable {
12 | case hang
13 | case functionCall = "function-call"
14 | case transcript
15 | case speechUpdate = "speech-update"
16 | case metadata
17 | case conversationUpdate = "conversation-update"
18 | case modelOutput = "model-output"
19 | case statusUpdate = "status-update"
20 | case voiceInput = "voice-input"
21 | case userInterrupted = "user-interrupted"
22 | }
23 |
24 | let type: MessageType
25 | }
26 |
--------------------------------------------------------------------------------
/Sources/Models/ConversationUpdate.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct Message: Codable {
4 | public enum Role: String, Codable {
5 | case user = "user"
6 | case assistant = "assistant"
7 | case system = "system"
8 | }
9 |
10 | public let role: Role
11 | public let content: String
12 | }
13 |
14 | public struct ConversationUpdate: Codable {
15 | public let conversation: [Message]
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/Models/FunctionCall.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FunctionCall.swift
3 | //
4 | //
5 | // Created by Brent Whitman on 2024-01-15.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct FunctionCall {
11 | enum CodingKeys: CodingKey {
12 | case name
13 | case parameters
14 | }
15 |
16 | public let name: String
17 | public let parameters: [String: Any]
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/Models/Metadata.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct Metadata: Codable {
4 | public let metadata: String
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/Models/ModelOutput.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct ModelOutput: Codable {
4 | public let output: String
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/Models/SpeechUpdate.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct SpeechUpdate: Codable {
4 | public enum Status: String, Codable {
5 | case started
6 | case stopped
7 | }
8 |
9 | public enum Role: String, Codable {
10 | case assistant
11 | case user
12 | }
13 |
14 | public let status: Status
15 | public let role: Role
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/Models/StatusUpdate.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | public struct StatusUpdate: Codable {
4 | public let status: String
5 | }
6 |
--------------------------------------------------------------------------------
/Sources/Models/Transcript.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Transcript.swift
3 | //
4 | //
5 | // Created by Brent Whitman on 2024-01-15.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct Transcript: Codable {
11 | public enum TranscriptType: String, Codable {
12 | case final
13 | case partial
14 | }
15 |
16 | public enum Role: String, Codable {
17 | case assistant
18 | case user
19 | }
20 |
21 | public let role: Role
22 | public let transcriptType: TranscriptType
23 | public let transcript: String
24 | }
25 |
26 |
27 |
--------------------------------------------------------------------------------
/Sources/Models/UserInterrupted.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UserInterrupted.swift
3 | // Vapi
4 | //
5 | // Created by Abizar Bagasrawala on 11/1/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct UserInterrupted: Codable {}
11 |
--------------------------------------------------------------------------------
/Sources/Models/VapiError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VapiError.swift
3 | //
4 | //
5 | // Created by Andrew Carter on 12/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | public enum VapiError: Swift.Error {
11 | case invalidURL
12 | case customError(String)
13 | case existingCallInProgress
14 | case noCallInProgress
15 | case decodingError(message: String, response: String? = nil)
16 | case invalidJsonData
17 | }
18 |
--------------------------------------------------------------------------------
/Sources/Models/VoiceInput.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VoiceInput.swift
3 | // Vapi
4 | //
5 | // Created by Abizar Bagasrawala on 11/1/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct VoiceInput: Codable {
11 | public let input: String
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Models/WebCallResponse.swift:
--------------------------------------------------------------------------------
1 | //
2 | // WebCallResponse.swift
3 | //
4 | //
5 | // Created by Andrew Carter on 12/13/23.
6 | //
7 |
8 | import Foundation
9 |
10 | public struct ArtifactPlan: Decodable {
11 | public let videoRecordingEnabled: Bool
12 | }
13 |
14 | public struct WebCallResponse: Decodable {
15 | let webCallUrl: URL
16 | public let id: String
17 | public let artifactPlan: ArtifactPlan?
18 | }
19 |
--------------------------------------------------------------------------------
/Sources/NetworkManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Copyright (c) Vapi
3 | //
4 |
5 | import Foundation
6 |
7 | class NetworkManager {
8 |
9 | private let session = URLSession(configuration: .default)
10 |
11 | func perform(request: URLRequest) async throws -> T {
12 | let (data, _) = try await session.data(for: request)
13 | do {
14 | let result = try JSONDecoder().decode(T.self, from: data)
15 | return result
16 | } catch {
17 | let responseString = String(data: data, encoding: .utf8)
18 | throw VapiError.decodingError(message: error.localizedDescription, response: responseString)
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Sources/Vapi.swift:
--------------------------------------------------------------------------------
1 | import Combine
2 | import Daily
3 | import Foundation
4 |
5 | // Define the nested message structure
6 | struct VapiMessageContent: Encodable {
7 | public let role: String
8 | public let content: String
9 | }
10 |
11 | // Define the top-level app message structure
12 | public struct VapiMessage: Encodable {
13 | public let type: String
14 | let message: VapiMessageContent
15 |
16 | public init(type: String, role: String, content: String) {
17 | self.type = type
18 | self.message = VapiMessageContent(role: role, content: content)
19 | }
20 | }
21 |
22 | public final class Vapi: CallClientDelegate {
23 |
24 | // MARK: - Supporting Types
25 |
26 | /// A configuration that contains the host URL and the client token.
27 | ///
28 | /// This configuration is serializable via `Codable`.
29 | public struct Configuration: Codable, Hashable, Sendable {
30 | public var host: String
31 | public var publicKey: String
32 | fileprivate static let defaultHost = "api.vapi.ai"
33 |
34 | init(publicKey: String, host: String) {
35 | self.host = host
36 | self.publicKey = publicKey
37 | }
38 | }
39 |
40 | public enum Event {
41 | case callDidStart
42 | case callDidEnd
43 | case transcript(Transcript)
44 | case functionCall(FunctionCall)
45 | case speechUpdate(SpeechUpdate)
46 | case metadata(Metadata)
47 | case conversationUpdate(ConversationUpdate)
48 | case statusUpdate(StatusUpdate)
49 | case modelOutput(ModelOutput)
50 | case userInterrupted(UserInterrupted)
51 | case voiceInput(VoiceInput)
52 | case hang
53 | case error(Swift.Error)
54 | }
55 |
56 | // MARK: - Properties
57 |
58 | public let configuration: Configuration
59 |
60 | fileprivate let eventSubject = PassthroughSubject()
61 |
62 | private let networkManager = NetworkManager()
63 | private var call: CallClient?
64 |
65 | // MARK: - Computed Properties
66 |
67 | private var publicKey: String {
68 | configuration.publicKey
69 | }
70 |
71 | /// A Combine publisher that clients can subscribe to for API events.
72 | public var eventPublisher: AnyPublisher {
73 | eventSubject.eraseToAnyPublisher()
74 | }
75 |
76 | @MainActor public var localAudioLevel: Float? {
77 | call?.localAudioLevel
78 | }
79 |
80 | @MainActor public var remoteAudioLevel: Float? {
81 | call?.remoteParticipantsAudioLevel.values.first
82 | }
83 |
84 | @MainActor public var audioDeviceType: AudioDeviceType? {
85 | call?.audioDevice
86 | }
87 |
88 | private var isMicrophoneMuted: Bool = false
89 | private var isAssistantMuted: Bool = false
90 |
91 | // MARK: - Init
92 |
93 | public init(configuration: Configuration) {
94 | self.configuration = configuration
95 |
96 | Daily.setLogLevel(.off)
97 | }
98 |
99 | public convenience init(publicKey: String) {
100 | self.init(configuration: .init(publicKey: publicKey, host: Configuration.defaultHost))
101 | }
102 |
103 | public convenience init(publicKey: String, host: String? = nil) {
104 | self.init(configuration: .init(publicKey: publicKey, host: host ?? Configuration.defaultHost))
105 | }
106 |
107 | // MARK: - Instance Methods
108 |
109 | public func start(
110 | assistantId: String, metadata: [String: Any] = [:], assistantOverrides: [String: Any] = [:]
111 | ) async throws -> WebCallResponse {
112 | guard self.call == nil else {
113 | throw VapiError.existingCallInProgress
114 | }
115 |
116 | let body = [
117 | "assistantId": assistantId, "metadata": metadata, "assistantOverrides": assistantOverrides
118 | ] as [String: Any]
119 |
120 | return try await self.startCall(body: body)
121 | }
122 |
123 | public func start(
124 | assistant: [String: Any], metadata: [String: Any] = [:], assistantOverrides: [String: Any] = [:]
125 | ) async throws -> WebCallResponse {
126 | guard self.call == nil else {
127 | throw VapiError.existingCallInProgress
128 | }
129 |
130 | let body = [
131 | "assistant": assistant, "metadata": metadata, "assistantOverrides": assistantOverrides
132 | ] as [String: Any]
133 |
134 | return try await self.startCall(body: body)
135 | }
136 |
137 | public func stop() {
138 | Task {
139 | do {
140 | try await call?.leave()
141 | call = nil
142 | } catch {
143 | self.callDidFail(with: error)
144 | }
145 | }
146 | }
147 |
148 | public func send(message: VapiMessage) async throws {
149 | do {
150 | // Use JSONEncoder to convert the message to JSON Data
151 | let jsonData = try JSONEncoder().encode(message)
152 |
153 | // Debugging: Print the JSON data to verify its format (optional)
154 | if let jsonString = String(data: jsonData, encoding: .utf8) {
155 | print(jsonString)
156 | }
157 |
158 | // Send the JSON data to all targets
159 | try await self.call?.sendAppMessage(json: jsonData, to: .all)
160 | } catch {
161 | // Handle encoding error
162 | print("Error encoding message to JSON: \(error)")
163 | throw error // Re-throw the error to be handled by the caller
164 | }
165 | }
166 |
167 | public func setMuted(_ muted: Bool) async throws {
168 | guard let call = self.call else {
169 | throw VapiError.noCallInProgress
170 | }
171 |
172 | do {
173 | try await call.setInputEnabled(.microphone, !muted)
174 | self.isMicrophoneMuted = muted
175 | if muted {
176 | print("Audio muted")
177 | } else {
178 | print("Audio unmuted")
179 | }
180 | } catch {
181 | print("Failed to set mute state: \(error)")
182 | throw error
183 | }
184 | }
185 |
186 | public func isMuted() async throws {
187 | guard let call = self.call else {
188 | throw VapiError.noCallInProgress
189 | }
190 |
191 | let shouldBeMuted = !self.isMicrophoneMuted
192 |
193 | do {
194 | try await call.setInputEnabled(.microphone, !shouldBeMuted)
195 | self.isMicrophoneMuted = shouldBeMuted
196 | if shouldBeMuted {
197 | print("Audio muted")
198 | } else {
199 | print("Audio unmuted")
200 | }
201 | } catch {
202 | print("Failed to toggle mute state: \(error)")
203 | throw error
204 | }
205 | }
206 |
207 | public func setAssistantMuted(_ muted: Bool) async throws {
208 | guard let call else {
209 | throw VapiError.noCallInProgress
210 | }
211 |
212 | do {
213 | let remoteParticipants = await call.participants.remote
214 |
215 | // First retrieve the assistant where the user name is "Vapi Speaker", this is the one we will unsubscribe from or subscribe too
216 | guard let assistant = remoteParticipants.first(where: { $0.value.info.username == .remoteParticipantVapiSpeaker })?.value else { return }
217 |
218 | // Then we update the subscription to `staged` if muted which means we don't receive audio
219 | // but we'll still receive the response. If we unmute it we set it back to `subscribed` so we start
220 | // receiving audio again. This is taken from Daily examples.
221 | _ = try await call.updateSubscriptions(
222 | forParticipants: .set([
223 | assistant.id: .set(
224 | profile: .set(.base),
225 | media: .set(
226 | microphone: .set(
227 | subscriptionState: muted ? .set(.staged) : .set(.subscribed)
228 | )
229 | )
230 | )
231 | ])
232 | )
233 | isAssistantMuted = muted
234 | } catch {
235 | print("Failed to set subscription state to \(muted ? "Staged" : "Subscribed") for remote assistant")
236 | throw error
237 | }
238 | }
239 |
240 | /// This method sets the `AudioDeviceType` of the current called to the passed one if it's not the same as the current one
241 | /// - Parameter audioDeviceType: can either be `bluetooth`, `speakerphone`, `wired` or `earpiece`
242 | public func setAudioDeviceType(_ audioDeviceType: AudioDeviceType) async throws {
243 | guard let call else {
244 | throw VapiError.noCallInProgress
245 | }
246 |
247 | guard await self.audioDeviceType != audioDeviceType else {
248 | print("Not updating AudioDeviceType because it is the same")
249 | return
250 | }
251 |
252 | do {
253 | try await call.setPreferredAudioDevice(audioDeviceType)
254 | } catch {
255 | print("Failed to change the AudioDeviceType with error: \(error)")
256 | throw error
257 | }
258 | }
259 |
260 | private func joinCall(url: URL, recordVideo: Bool) {
261 | Task { @MainActor in
262 | do {
263 | let call = CallClient()
264 | call.delegate = self
265 | self.call = call
266 |
267 | _ = try await call.join(
268 | url: url,
269 | settings: .init(
270 | inputs: .set(
271 | camera: .set(.enabled(recordVideo)),
272 | microphone: .set(.enabled(true))
273 | )
274 | )
275 | )
276 |
277 | if(!recordVideo) {
278 | return
279 | }
280 |
281 | _ = try await call.startRecording(
282 | streamingSettings: .init(
283 | video: .init(
284 | width:1280,
285 | height:720,
286 | backgroundColor: "#FF1F2D3D"
287 | )
288 | )
289 | )
290 | } catch {
291 | callDidFail(with: error)
292 | }
293 | }
294 | }
295 |
296 | private func makeURL(for path: String) -> URL? {
297 | var components = URLComponents()
298 | // Check if the host is localhost, set the scheme to http and port to 3001; otherwise, set the scheme to https
299 | if configuration.host == "localhost" {
300 | components.scheme = "http"
301 | components.port = 3001
302 | } else {
303 | components.scheme = "https"
304 | }
305 | components.host = configuration.host
306 | components.path = path
307 | return components.url
308 | }
309 |
310 | private func makeURLRequest(for url: URL) -> URLRequest {
311 | var request = URLRequest(url: url)
312 | request.httpMethod = "POST"
313 | request.addValue("Bearer \(publicKey)", forHTTPHeaderField: "Authorization")
314 | request.addValue("application/json", forHTTPHeaderField: "Content-Type")
315 | return request
316 | }
317 |
318 | private func startCall(body: [String: Any]) async throws -> WebCallResponse {
319 | guard let url = makeURL(for: "/call/web") else {
320 | callDidFail(with: VapiError.invalidURL)
321 | throw VapiError.customError("Unable to create web call")
322 | }
323 |
324 | var request = makeURLRequest(for: url)
325 |
326 | do {
327 | request.httpBody = try JSONSerialization.data(withJSONObject: body)
328 | } catch {
329 | self.callDidFail(with: error)
330 | throw VapiError.customError(error.localizedDescription)
331 | }
332 |
333 | do {
334 | let response: WebCallResponse = try await networkManager.perform(request: request)
335 | let isVideoRecordingEnabled = response.artifactPlan?.videoRecordingEnabled ?? false
336 | joinCall(url: response.webCallUrl, recordVideo: isVideoRecordingEnabled)
337 | return response
338 | } catch {
339 | callDidFail(with: error)
340 | throw VapiError.customError(error.localizedDescription)
341 | }
342 | }
343 |
344 | private func unescapeAppMessage(_ jsonData: Data) -> (Data, String?) {
345 | guard let jsonString = String(data: jsonData, encoding: .utf8) else {
346 | return (jsonData, nil)
347 | }
348 |
349 | // Remove the leading and trailing double quotes
350 | let trimmedString = jsonString.trimmingCharacters(in: CharacterSet(charactersIn: "\""))
351 | // Replace escaped backslashes
352 | let unescapedString = trimmedString.replacingOccurrences(of: "\\\\", with: "\\")
353 | // Replace escaped double quotes
354 | let unescapedJSON = unescapedString.replacingOccurrences(of: "\\\"", with: "\"")
355 |
356 | let unescapedData = unescapedJSON.data(using: .utf8) ?? jsonData
357 |
358 | return (unescapedData, unescapedJSON)
359 | }
360 |
361 | public func startLocalAudioLevelObserver() async throws {
362 | do {
363 | try await call?.startLocalAudioLevelObserver()
364 | } catch {
365 | throw error
366 | }
367 | }
368 |
369 | public func startRemoteParticipantsAudioLevelObserver() async throws {
370 | do {
371 | try await call?.startRemoteParticipantsAudioLevelObserver()
372 | } catch {
373 | throw error
374 | }
375 | }
376 |
377 | // MARK: - CallClientDelegate
378 |
379 | func callDidJoin() {
380 | print("Successfully joined call.")
381 | // Note: the call start event will be sent once the assistant has joined and is listening
382 | }
383 |
384 | func callDidLeave() {
385 | print("Successfully left call.")
386 |
387 | self.eventSubject.send(.callDidEnd)
388 | self.call = nil
389 | }
390 |
391 | func callDidFail(with error: Swift.Error) {
392 | print("Got error while joining/leaving call: \(error).")
393 |
394 | self.eventSubject.send(.error(error))
395 | self.call = nil
396 | }
397 |
398 | public func callClient(_ callClient: CallClient, participantUpdated participant: Participant) {
399 | let isPlayable = participant.media?.microphone.state == Daily.MediaState.playable
400 | let isVapiSpeaker = participant.info.username == "Vapi Speaker"
401 | let shouldSendAppMessage = isPlayable && isVapiSpeaker
402 |
403 | guard shouldSendAppMessage else {
404 | return
405 | }
406 |
407 | do {
408 | let message: [String: Any] = ["message": "playable"]
409 | let jsonData = try JSONSerialization.data(withJSONObject: message, options: [])
410 |
411 | Task {
412 | try await call?.sendAppMessage(json: jsonData, to: .all)
413 | }
414 | } catch {
415 | print("Error sending message: \(error.localizedDescription)")
416 | }
417 | }
418 |
419 | public func callClient(_ callClient: CallClient, callStateUpdated state: CallState) {
420 | switch (state) {
421 | case CallState.left:
422 | self.callDidLeave()
423 | break
424 | case CallState.joined:
425 | self.callDidJoin()
426 | break
427 | default:
428 | break
429 | }
430 | }
431 |
432 | public func callClient(_ callClient: Daily.CallClient, appMessageAsJson jsonData: Data, from participantID: Daily.ParticipantID) {
433 | do {
434 | let (unescapedData, unescapedString) = unescapeAppMessage(jsonData)
435 |
436 | // Detect listening message first since it's a string rather than JSON
437 | guard unescapedString != "listening" else {
438 | eventSubject.send(.callDidStart)
439 | return
440 | }
441 |
442 | // Parse the JSON data generically to determine the type of event
443 | let decoder = JSONDecoder()
444 | let appMessage = try decoder.decode(AppMessage.self, from: unescapedData)
445 | // Parse the JSON data again, this time using the specific type
446 | let event: Event
447 | switch appMessage.type {
448 | case .functionCall:
449 | guard let messageDictionary = try JSONSerialization.jsonObject(with: unescapedData, options: []) as? [String: Any] else {
450 | throw VapiError.decodingError(message: "App message isn't a valid JSON object")
451 | }
452 |
453 | guard let functionCallDictionary = messageDictionary["functionCall"] as? [String: Any] else {
454 | throw VapiError.decodingError(message: "App message missing functionCall")
455 | }
456 |
457 | guard let name = functionCallDictionary[FunctionCall.CodingKeys.name.stringValue] as? String else {
458 | throw VapiError.decodingError(message: "App message missing name")
459 | }
460 |
461 | guard let parameters = functionCallDictionary[FunctionCall.CodingKeys.parameters.stringValue] as? [String: Any] else {
462 | throw VapiError.decodingError(message: "App message missing parameters")
463 | }
464 |
465 |
466 | let functionCall = FunctionCall(name: name, parameters: parameters)
467 | event = Event.functionCall(functionCall)
468 | case .hang:
469 | event = Event.hang
470 | case .transcript:
471 | let transcript = try decoder.decode(Transcript.self, from: unescapedData)
472 | event = Event.transcript(transcript)
473 | case .speechUpdate:
474 | let speechUpdate = try decoder.decode(SpeechUpdate.self, from: unescapedData)
475 | event = Event.speechUpdate(speechUpdate)
476 | case .metadata:
477 | let metadata = try decoder.decode(Metadata.self, from: unescapedData)
478 | event = Event.metadata(metadata)
479 | case .conversationUpdate:
480 | let conv = try decoder.decode(ConversationUpdate.self, from: unescapedData)
481 | event = Event.conversationUpdate(conv)
482 | case .statusUpdate:
483 | let statusUpdate = try decoder.decode(StatusUpdate.self, from: unescapedData)
484 | event = Event.statusUpdate(statusUpdate)
485 | case .modelOutput:
486 | let modelOutput = try decoder.decode(ModelOutput.self, from: unescapedData)
487 | event = Event.modelOutput(modelOutput)
488 | case .userInterrupted:
489 | let userInterrupted = UserInterrupted()
490 | event = Event.userInterrupted(userInterrupted)
491 | case .voiceInput:
492 | let voiceInput = try decoder.decode(VoiceInput.self, from: unescapedData)
493 | event = Event.voiceInput(voiceInput)
494 | }
495 | eventSubject.send(event)
496 | } catch {
497 | let messageText = String(data: jsonData, encoding: .utf8)
498 | print("Error parsing app message \"\(messageText ?? "")\": \(error.localizedDescription)")
499 | }
500 | }
501 | }
502 |
--------------------------------------------------------------------------------
/Tests/VapiTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import Vapi
3 |
4 | final class VapiTests: XCTestCase {
5 | func testExample() throws {}
6 | }
7 |
--------------------------------------------------------------------------------