├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── pull_request_template.md
└── workflows
│ └── pull_request.yml
├── .gitignore
├── LICENSE
├── Makefile
├── Package.resolved
├── Package.swift
├── README.md
├── Sources
└── Speechly
│ ├── Audio
│ ├── AudioRecorder.swift
│ └── AudioRecorderProtocol.swift
│ ├── AudioContext.swift
│ ├── Cache
│ ├── CacheProtocol.swift
│ └── UserDefaultsCache.swift
│ ├── Client.swift
│ ├── Entity.swift
│ ├── GRPC
│ ├── GRPC.swift
│ ├── GRPCAddress.swift
│ └── Promisable.swift
│ ├── Identity
│ ├── ApiAccessToken.swift
│ ├── CachingIdentityClient.swift
│ ├── IdentityClient.swift
│ └── IdentityClientProtocol.swift
│ ├── Intent.swift
│ ├── Resources
│ └── Images.xcassets
│ │ ├── Contents.json
│ │ ├── lock.imageset
│ │ ├── Contents.json
│ │ └── lock.pdf
│ │ ├── mic-button-frame.imageset
│ │ ├── Contents.json
│ │ └── mic-button-frame.pdf
│ │ ├── mic-button-fx.imageset
│ │ ├── Contents.json
│ │ └── mic-button-fx.pdf
│ │ ├── mic-no-permission.imageset
│ │ ├── Contents.json
│ │ └── mic-no-permission.pdf
│ │ ├── mic-no-support.imageset
│ │ ├── Contents.json
│ │ └── mic-no-support.pdf
│ │ ├── mic.imageset
│ │ ├── Contents.json
│ │ └── mic.pdf
│ │ └── power-on.imageset
│ │ ├── Contents.json
│ │ └── power-on.pdf
│ ├── SLU
│ ├── SluClient.swift
│ └── SluClientProtocol.swift
│ ├── Segment.swift
│ ├── SpeechlyProtoParseable.swift
│ ├── SpeechlyProtocol.swift
│ ├── Transcript.swift
│ └── UI
│ ├── MicrophoneButtonView.swift
│ ├── SpeechBubbleView.swift
│ └── TranscriptView.swift
├── Speechly.podspec
├── Tests
├── LinuxMain.swift
└── SpeechlyTests
│ ├── SpeechlyTests.swift
│ └── XCTestManifests.swift
└── docs
├── ApiAccessToken.md
├── ApiAccessToken_AuthScope.md
├── ApiAccessToken_TokenType.md
├── AudioContext.md
├── AudioRecorder.md
├── AudioRecorderDelegate.md
├── AudioRecorderProtocol.md
├── AudioRecorder_AudioRecorderError.md
├── CacheProtocol.md
├── CachingIdentityClient.md
├── Client.md
├── Client_SpeechlyClientInitError.md
├── Entity.md
├── Entity_ID.md
├── GRPCAddress.md
├── GRPCAddress_ParseError.md
├── Home.md
├── IdentityClient.md
├── IdentityClientProtocol.md
├── IdentityClient_IdentityClientError.md
├── Intent.md
├── InvalidSLUState.md
├── MicrophoneButtonDelegate.md
├── MicrophoneButtonView.md
├── Promisable.md
├── Segment.md
├── SluClient.md
├── SluClientDelegate.md
├── SluClientProtocol.md
├── SluConfig.md
├── SpeechBubbleView.md
├── SpeechlyDelegate.md
├── SpeechlyError.md
├── SpeechlyProtocol.md
├── Transcript.md
├── TranscriptView.md
├── UserDefaultsCache.md
├── _Footer.md
├── _Sidebar.md
├── makeChannel(addr:group:).md
├── makeChannel(addr:loopCount:).md
└── makeTokenCallOptions(token:).md
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: File a bug report
4 | title: ""
5 | labels: bug
6 | assignees: ""
7 | ---
8 |
9 | ### Describe the bug
10 |
11 | A clear and concise description of what the bug is.
12 |
13 | ### Steps to reproduce
14 |
15 | Steps to reproduce the behavior:
16 |
17 | 1. Go to '...'
18 | 2. Click on '....'
19 | 3. Scroll down to '....'
20 | 4. See error
21 |
22 | ### Expected behaviour
23 |
24 | A clear and concise description of what you expected to happen.
25 |
26 | ### Screenshots
27 |
28 | If applicable, add screenshots to help explain your problem.
29 |
30 | ### Environment
31 |
32 | - Platform: [e.g. Mobile, Desktop, React Native]
33 | - OS: [e.g. iOS]
34 | - Browser [e.g. chrome, safari]
35 | - Version [e.g. 22]
36 | - Package version [e.g. 1.2.3]
37 |
38 | ### Additional context
39 |
40 | Add any other context about the problem here.
41 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Enhancement / Feature request
3 | about: Suggest an enhancement or request a feature from this project
4 | title: ""
5 | labels: enhancement
6 | assignees: ""
7 | ---
8 |
9 | ### What
10 |
11 | Describe the feature or enhancement that you are proposing.
12 |
13 | ### Why
14 |
15 | Describe a clear and concise reasoning behind your request. Feel free to provide example use-cases.
16 |
17 | ### How
18 |
19 | Describe your preferred solution and / or how you would implement it.
20 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ### What
2 |
3 | Describe the scope of changes in this pull request.
4 |
5 | ### Why
6 |
7 | Describe the reasoning behind this pull request.
8 |
--------------------------------------------------------------------------------
/.github/workflows/pull_request.yml:
--------------------------------------------------------------------------------
1 | name: 'Release build'
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'master'
7 | pull_request:
8 | branches:
9 | - 'master'
10 |
11 | jobs:
12 | macos-build:
13 | runs-on: macos-latest
14 | steps:
15 | - name: Checkout code
16 | uses: actions/checkout@v2
17 |
18 | - name: Show XCode version
19 | run: xcodebuild -version
20 |
21 | - name: List device simulators
22 | run: xcrun xctrace list devices
23 |
24 | - name: Install Make
25 | run: brew install make
26 |
27 | - name: Install deps
28 | run: make deps
29 |
30 | - name: Run unit tests
31 | run: make test
32 |
33 | - name: Make Release build
34 | run: make release
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Various settings
2 | *.pbxuser
3 | !default.pbxuser
4 | *.mode1v3
5 | !default.mode1v3
6 | *.mode2v3
7 | !default.mode2v3
8 | *.perspectivev3
9 | !default.perspectivev3
10 | xcuserdata/
11 |
12 | ## Other
13 | *.moved-aside
14 | *.xccheckout
15 | *.xcscmblueprint
16 | .DS_Store
17 |
18 | ## Obj-C/Swift specific
19 | *.hmap
20 | *.ipa
21 | *.dSYM.zip
22 | *.dSYM
23 |
24 | ## Playgrounds
25 | timeline.xctimeline
26 | playground.xcworkspace
27 |
28 | # Swift Package Manager
29 | #
30 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
31 | Packages/
32 | .build/
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Speechly
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Common vars
2 | SWIFT := swift
3 | XCODE := xcodebuild
4 | BUILDPATH := ./.build
5 | DOCSPATH := ./docs
6 |
7 | # Build vars
8 | SOURCES := $(shell find ./Sources -name '*.swift')
9 | ARCHPATH := $(BUILDPATH)/artifacts
10 | BUILDFLAGS := -scheme speechly-ios-client -sdk iphoneos -destination 'generic/platform=iOS'
11 |
12 | # Test vars
13 | TESTSOURCES := $(shell find ./Tests -name '*.swift')
14 | TESTFLAGS := -scheme speechly-ios-client -sdk iphonesimulator -destination 'platform=iOS Simulator,name=iPhone 12'
15 |
16 | # Build state vars
17 | DEBUGBUILD := $(ARCHPATH)/debug.xcarchive
18 | RELEASEBUILD := $(ARCHPATH)/release.xcarchive
19 |
20 | # Common
21 |
22 | .PHONY:
23 | all: deps test release docs
24 |
25 | .PHONY:
26 | deps: Package.swift
27 | $(SWIFT) package resolve
28 |
29 | .PHONY:
30 | test:
31 | $(XCODE) test $(TESTFLAGS)
32 |
33 | .PHONY:
34 | debug: $(DEBUGBUILD)
35 |
36 | .PHONY:
37 | release: $(RELEASEBUILD)
38 |
39 | .PHONY:
40 | clean:
41 | @$(SWIFT) package clean
42 | @rm -rf $(BUILDPATH)
43 | @rm -rf $(DOCSPATH)
44 |
45 | $(DOCSPATH): $(SOURCES)
46 | $(SWIFT) doc generate ./Sources/ --module-name Speechly --output $(DOCSPATH) --base-url ""
47 | @sed -i.bak -E 's/(\[.+\])\((.+)\)/\1(\2.md)/g' docs/*.md && rm docs/*.md.bak
48 |
49 | $(RELEASEBUILD): $(SOURCES) Package.swift
50 | $(XCODE) archive $(BUILDFLAGS) -archivePath "$(ARCHPATH)/release" -configuration Release
51 |
52 | $(DEBUGBUILD): $(SOURCES) Package.swift
53 | $(XCODE) archive $(BUILDFLAGS) -archivePath "$(ARCHPATH)/debug" -configuration Debug
54 |
--------------------------------------------------------------------------------
/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "object": {
3 | "pins": [
4 | {
5 | "package": "speechly-api",
6 | "repositoryURL": "https://github.com/speechly/api.git",
7 | "state": {
8 | "branch": null,
9 | "revision": "50609dc553a253ac238926e8e7532d03845bfee1",
10 | "version": "0.9.0"
11 | }
12 | },
13 | {
14 | "package": "grpc-swift",
15 | "repositoryURL": "https://github.com/grpc/grpc-swift.git",
16 | "state": {
17 | "branch": null,
18 | "revision": "0970e57cd2c196cf31eec55417b76b30caca1f35",
19 | "version": "1.18.0"
20 | }
21 | },
22 | {
23 | "package": "SnapKit",
24 | "repositoryURL": "https://github.com/SnapKit/SnapKit.git",
25 | "state": {
26 | "branch": null,
27 | "revision": "f222cbdf325885926566172f6f5f06af95473158",
28 | "version": "5.6.0"
29 | }
30 | },
31 | {
32 | "package": "swift-atomics",
33 | "repositoryURL": "https://github.com/apple/swift-atomics.git",
34 | "state": {
35 | "branch": null,
36 | "revision": "6c89474e62719ddcc1e9614989fff2f68208fe10",
37 | "version": "1.1.0"
38 | }
39 | },
40 | {
41 | "package": "swift-collections",
42 | "repositoryURL": "https://github.com/apple/swift-collections.git",
43 | "state": {
44 | "branch": null,
45 | "revision": "937e904258d22af6e447a0b72c0bc67583ef64a2",
46 | "version": "1.0.4"
47 | }
48 | },
49 | {
50 | "package": "swift-log",
51 | "repositoryURL": "https://github.com/apple/swift-log.git",
52 | "state": {
53 | "branch": null,
54 | "revision": "32e8d724467f8fe623624570367e3d50c5638e46",
55 | "version": "1.5.2"
56 | }
57 | },
58 | {
59 | "package": "swift-nio",
60 | "repositoryURL": "https://github.com/apple/swift-nio.git",
61 | "state": {
62 | "branch": null,
63 | "revision": "6213ba7a06febe8fef60563a4a7d26a4085783cf",
64 | "version": "2.54.0"
65 | }
66 | },
67 | {
68 | "package": "swift-nio-extras",
69 | "repositoryURL": "https://github.com/apple/swift-nio-extras.git",
70 | "state": {
71 | "branch": null,
72 | "revision": "0e0d0aab665ff1a0659ce75ac003081f2b1c8997",
73 | "version": "1.19.0"
74 | }
75 | },
76 | {
77 | "package": "swift-nio-http2",
78 | "repositoryURL": "https://github.com/apple/swift-nio-http2.git",
79 | "state": {
80 | "branch": null,
81 | "revision": "a8ccf13fa62775277a5d56844878c828bbb3be1a",
82 | "version": "1.27.0"
83 | }
84 | },
85 | {
86 | "package": "swift-nio-ssl",
87 | "repositoryURL": "https://github.com/apple/swift-nio-ssl.git",
88 | "state": {
89 | "branch": null,
90 | "revision": "e866a626e105042a6a72a870c88b4c531ba05f83",
91 | "version": "2.24.0"
92 | }
93 | },
94 | {
95 | "package": "swift-nio-transport-services",
96 | "repositoryURL": "https://github.com/apple/swift-nio-transport-services.git",
97 | "state": {
98 | "branch": null,
99 | "revision": "41f4098903878418537020075a4d8a6e20a0b182",
100 | "version": "1.17.0"
101 | }
102 | },
103 | {
104 | "package": "SwiftProtobuf",
105 | "repositoryURL": "https://github.com/apple/swift-protobuf.git",
106 | "state": {
107 | "branch": null,
108 | "revision": "f25867a208f459d3c5a06935dceb9083b11cd539",
109 | "version": "1.22.0"
110 | }
111 | }
112 | ]
113 | },
114 | "version": 1
115 | }
116 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.3
2 |
3 | import PackageDescription
4 |
5 | let package = Package(
6 | name: "speechly-ios-client",
7 | platforms: [
8 | .iOS(.v12)
9 | ],
10 | products: [
11 | .library(
12 | name: "Speechly",
13 | targets: ["Speechly"]
14 | ),
15 | ],
16 | dependencies: [
17 | .package(url: "https://github.com/apple/swift-nio.git", from: "2.0.0"),
18 | .package(url: "https://github.com/grpc/grpc-swift.git", from: "1.0.0"),
19 | .package(url: "https://github.com/SnapKit/SnapKit.git", from: "5.0.0"),
20 | .package(name: "speechly-api", url: "https://github.com/speechly/api.git", from: "0.8.0"),
21 | ],
22 | targets: [
23 | .target(
24 | name: "Speechly",
25 | dependencies: [
26 | .product(name: "NIO", package: "swift-nio"),
27 | .product(name: "GRPC", package: "grpc-swift"),
28 | .product(name: "SnapKit", package: "SnapKit"),
29 | .product(name: "SpeechlyAPI", package: "speechly-api"),
30 | ],
31 | resources: [.process("Resources")]
32 | ),
33 | .testTarget(
34 | name: "SpeechlyTests",
35 | dependencies: ["Speechly"]
36 | ),
37 | ]
38 | )
39 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 
5 | 
6 |
7 | [Website](https://www.speechly.com/)
8 | ·
9 | [Docs](https://docs.speechly.com/)
10 | ·
11 | [Support](https://github.com/speechly/speechly/discussions)
12 | ·
13 | [Blog](https://www.speechly.com/blog/)
14 | ·
15 | [Login](https://api.speechly.com/dashboard/)
16 |
17 |
18 |
19 |
20 | # Speechly iOS Client
21 |
22 | 
23 | [](LICENSE)
24 |
25 | This repository contains the source code for the iOS client for [Speechly](https://www.speechly.com/?utm_source=github&utm_medium=ios-client&utm_campaign=text) SLU API. Speechly allows you to easily build applications with voice-enabled UIs.
26 |
27 | ## Installation
28 |
29 | ### Swift package dependency
30 |
31 | The client is distributed using [Swift Package Manager](https://swift.org/package-manager/), so you can use it by adding it as a dependency to your `Package.swift`:
32 |
33 | ```swift
34 | // swift-tools-version:5.3
35 |
36 | import PackageDescription
37 |
38 | let package = Package(
39 | name: "MySpeechlyApp",
40 | dependencies: [
41 | .package(name: "speechly-ios-client", url: "https://github.com/speechly/ios-client.git", from: "0.3.0"),
42 | ],
43 | targets: [
44 | .target(
45 | name: "MySpeechlyApp",
46 | dependencies: []),
47 | .testTarget(
48 | name: "MySpeechlyAppTests",
49 | dependencies: ["MySpeechlyApp"]),
50 | ]
51 | )
52 | ```
53 |
54 | And then running `swift package resolve`.
55 |
56 | ### Xcode package dependency
57 |
58 | If you are using Xcode, check out the [official tutorial for adding package dependencies to your app](https://developer.apple.com/documentation/xcode/adding_package_dependencies_to_your_app).
59 |
60 | ## Client Usage
61 |
62 | The client exposes methods for starting and stopping the recognition, as well as a delegate protocol to implement for receiving recognition results. The `startContext()` method will open the microphone device and stream audio to the API, and the `stopContext()` method will close the audio context and the microphone.
63 |
64 | Note: the application's `Info.plist` needs to include key `NSMicrophoneUsageDescription` to actually enable microphone access. The value is a string that iOS presents to the user when requesting permission to access the microphone.
65 |
66 | ```swift
67 | import Foundation
68 | import Speechly
69 |
70 | class SpeechlyManager {
71 | let client: Speechly.Client
72 |
73 | public init() {
74 | client = try! Speechly.Client(
75 | // Specify your Speechly application's identifier here.
76 | appId: UUID(uuidString: "your-speechly-app-id")!,
77 | // or, if you want to use the project based login, set projectId.
78 | //projectId: UUID(uuidString: "your-speechly-project-id")!,
79 | )
80 |
81 | client.delegate = self
82 | }
83 |
84 | public func start() {
85 | // Use this to unmute the microphone and start recognising user's voice input.
86 | // You can call this when e.g. a button is pressed.
87 | // startContext accepts an optional `appId` parameter, if you need to specify it
88 | // per context.
89 | client.startContext()
90 | }
91 |
92 | public func stop() {
93 | // Use this to mute the microphone and stop recognising user's voice input.
94 | // You can call this when e.g. a button is depressed.
95 | client.stopContext()
96 | }
97 | }
98 |
99 | // Implement the `Speechly.SpeechlyDelegate` for reacting to recognition results.
100 | extension SpeechlyManager: SpeechlyDelegate {
101 | // (Optional) Use this method for telling the user that recognition has started.
102 | func speechlyClientDidStartContext(_: SpeechlyProtocol) {
103 | print("Speechly client has started an audio stream!")
104 | }
105 |
106 | // (Optional) Use this method for telling the user that recognition has finished.
107 | func speechlyClientDidStopContext(_: SpeechlyProtocol) {
108 | print("Speechly client has finished an audio stream!")
109 | }
110 |
111 | // Use this method for receiving recognition results.
112 | func speechlyClientDidUpdateSegment(_ client: SpeechlyProtocol, segment: Segment) {
113 | print("Received a new recognition result from Speechly!")
114 |
115 | // What the user wants the app to do, (e.g. "book" a hotel).
116 | print("Intent:", segment.intent)
117 |
118 | // How the user wants the action to be taken, (e.g. "in New York", "for tomorrow").
119 | print("Entities:", segment.entities)
120 |
121 | // The text transcript of what the user has said.
122 | // Use this to communicate to the user that your app understands them.
123 | print("Transcripts:", segment.transcripts)
124 | }
125 | }
126 | ```
127 |
128 | ## User Interface Components
129 |
130 | The client library also includes a couple of ready-made UI components which can be used together with `Speechly.Client`.
131 |
132 | `MicrophoneButtonView` presents a microphone button using build-in icons and visual effects which you can replace with your own if needed. The microphone button protocol can be forwarded to `Speechly.Client` instance easily.
133 |
134 | `TranscriptView` visualizes the transcripts received in the `speechlyClientDidUpdateSegment` callback, automatically highlighting recognized entities. For other callbacks, see [the protocol docs](docs/SpeechlyProtocol.md).
135 |
136 | These can be used, for example, in the following way (`UIKit`):
137 |
138 | ```swift
139 | import UIKit
140 | import Speechly
141 |
142 | class ViewController: UIViewController {
143 | private let manager = SpeechlyManager()
144 |
145 | override func viewDidLoad() {
146 | super.viewDidLoad()
147 | view.backgroundColor = UIColor.white
148 | manager.addViews(view: view)
149 | }
150 | }
151 |
152 | class SpeechlyManager {
153 | private let client: Speechly.Client
154 | private let transcriptView = TranscriptView()
155 |
156 | private lazy var speechButton = MicrophoneButtonView(delegate: self)
157 |
158 | public init() {
159 | client = try! Speechly.Client(
160 | appId: UUID(uuidString: "your-speechly-app-id")!
161 | )
162 | client.delegate = self
163 | speechButton.holdToTalkText = "Hold to talk"
164 | speechButton.pressedScale = 1.5
165 | transcriptView.autohideInterval = 3
166 | }
167 |
168 | public func addViews(view: UIView) {
169 | view.addSubview(transcriptView)
170 | view.addSubview(speechButton)
171 |
172 | transcriptView.snp.makeConstraints { (make) in
173 | make.top.left.equalTo(view.safeAreaLayoutGuide).inset(20)
174 | make.right.lessThanOrEqualTo(view.safeAreaLayoutGuide).inset(20)
175 | }
176 |
177 | speechButton.snp.makeConstraints { (make) in
178 | make.centerX.equalToSuperview()
179 | make.bottom.equalTo(view.safeAreaLayoutGuide).inset(20)
180 | }
181 | }
182 |
183 | public func start() {
184 | client.startContext()
185 | }
186 |
187 | public func stop() {
188 | client.stopContext()
189 | }
190 | }
191 |
192 | extension SpeechlyManager: MicrophoneButtonDelegate {
193 | func didOpenMicrophone(_ button: MicrophoneButtonView) {
194 | self.start()
195 | }
196 | func didCloseMicrophone(_ button: MicrophoneButtonView) {
197 | self.stop()
198 | }
199 | }
200 |
201 | extension SpeechlyManager: SpeechlyDelegate {
202 | func speechlyClientDidUpdateSegment(_ client: SpeechlyProtocol, segment: Segment) {
203 | DispatchQueue.main.async {
204 | self.transcriptView.configure(segment: segment, animated: true)
205 | }
206 | }
207 | }
208 | ```
209 |
210 | For a `SwiftUI` example, check out the [Speechly iOS Client Example](https://github.com/speechly/speechly/tree/main/examples/ios-client-example) app.
211 |
212 | ## Documentation
213 |
214 | Check out [official Speechly documentation](https://docs.speechly.com/client-libraries/ios/) for tutorials and guides on how to use this client.
215 |
216 | You can also find the [speechly-ios-client documentation in the repo](docs/Home.md).
217 |
218 | ## Contributing
219 |
220 | If you want to fix a bug or add new functionality, feel free to open an issue and start the discussion. Generally it's much better to have a discussion first, before submitting a PR, since it eliminates potential design problems further on.
221 |
222 | ### Requirements
223 |
224 | - Swift 5.3+
225 | - Xcode 12+
226 | - Make
227 | - swift-doc
228 |
229 | Make sure you have Xcode and command-line tools installed. The rest of tools can be installed using e.g. Homebrew:
230 |
231 | ```sh
232 | brew install swift make swiftdocorg/formulae/swift-doc
233 | ```
234 |
235 | ### Building the project
236 |
237 | You can use various Make targets for building the project. Feel free to check out [the Makefile](./Makefile), but most commonly used tasks are:
238 |
239 | ```sh
240 | # Install dependencies, run tests, build release version and generate docs.
241 | # Won't do anything if everything worked fine and nothing was changed in source code / package manifest.
242 | make all
243 |
244 | # Cleans the build directory, will cause `make all` to run stuff again.
245 | make clean
246 | ```
247 |
248 | ## About Speechly
249 |
250 | Speechly is a developer tool for building real-time multimodal voice user interfaces. It enables developers and designers to enhance their current touch user interface with voice functionalities for better user experience. Speechly key features:
251 |
252 | #### Speechly key features
253 |
254 | - Fully streaming API
255 | - Multi modal from the ground up
256 | - Easy to configure for any use case
257 | - Fast to integrate to any touch screen application
258 | - Supports natural corrections such as "Show me red – i mean blue t-shirts"
259 | - Real time visual feedback encourages users to go on with their voice
260 |
--------------------------------------------------------------------------------
/Sources/Speechly/Audio/AudioRecorder.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import AVFoundation
3 |
4 | // MARK: - AudioRecorder definition
5 |
6 | /// An audio recorder implementation that uses AVFoundation audio engine for capturing the input.
7 | ///
8 | /// The recorder uses an audio buffer and converter for dispatching data chunks
9 | /// in the required sample rate, channel count and format.
10 | public class AudioRecorder {
11 | /// Errors thrown by the audio recorder.
12 | public enum AudioRecorderError: Error {
13 | case outputFormatError
14 | }
15 |
16 | private let _sampleRate: Double
17 | private let _channels: UInt32
18 |
19 | private let hostTimeFrequency: Double
20 | private var stopRequestedAt: UInt64?
21 |
22 | private let audioQueue: DispatchQueue
23 | private var audioEngine: AVAudioEngine
24 |
25 | private let delegateQueue: DispatchQueue
26 | private weak var _delegate: AudioRecorderDelegate? = nil
27 |
28 | /// Create a new audio recorder.
29 | ///
30 | /// - Parameters:
31 | /// - sampleRate: The sample rate to use for recording, in Hertz.
32 | /// - channels: The amount of audio channels to capture.
33 | /// - format: The audio format to use for capture (e.g. PCM16).
34 | /// - audioQueue: `DispatchQueue` to use for handling audio data from the microphone.
35 | /// - delegateQueue: `DispatchQueue` to use when calling delegate.
36 | /// - prepareOnInit: If `true`, the recorder will prepare audio engine when initialised.
37 | /// Otherwise it will be prepared separately.
38 | ///
39 | /// - Important: This initialiser will throw IFF `prepareOnInit` is set to `true`.
40 | public init(
41 | sampleRate: Double,
42 | channels: UInt32,
43 | format: AVAudioCommonFormat = .pcmFormatInt16,
44 | audioQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.AudioRecorder.audioQueue"),
45 | delegateQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.AudioRecorder.delegateQueue"),
46 | prepareOnInit: Bool = true
47 | ) throws {
48 | guard let outputFormat = AVAudioFormat(
49 | commonFormat: format,
50 | sampleRate: sampleRate,
51 | channels: channels,
52 | interleaved: true
53 | ) else {
54 | throw AudioRecorderError.outputFormatError
55 | }
56 |
57 | var timebaseInfo = mach_timebase_info_data_t()
58 | if mach_timebase_info(&timebaseInfo) == KERN_SUCCESS {
59 | self.hostTimeFrequency = Double(timebaseInfo.denom) / Double(timebaseInfo.numer)
60 | } else {
61 | self.hostTimeFrequency = 1
62 | }
63 |
64 | self._sampleRate = sampleRate
65 | self._channels = channels
66 | self.delegateQueue = delegateQueue
67 | self.audioQueue = audioQueue
68 | self.audioEngine = AVAudioEngine()
69 |
70 | let inputNode = self.audioEngine.inputNode
71 | let inputFormat = inputNode.outputFormat(forBus: 0)
72 | let formatConverter = AVAudioConverter(from: inputFormat, to: outputFormat)!
73 |
74 | inputNode.installTap(
75 | onBus: 0,
76 | bufferSize: AVAudioFrameCount(inputFormat.sampleRate * 0.1),
77 | format: nil
78 | ) { [weak self] (buffer, time) in
79 | self?.audioQueue.async { [weak self] in
80 | guard let self = self else { return }
81 |
82 | let outputBuffer = AVAudioPCMBuffer(
83 | pcmFormat: outputFormat,
84 | frameCapacity: AVAudioFrameCount(outputFormat.sampleRate * 0.1)
85 | )!
86 |
87 | var error: NSError? = nil
88 | formatConverter.convert(
89 | to: outputBuffer,
90 | error: &error,
91 | withInputFrom: { inNumPackets, outStatus in
92 | outStatus.pointee = AVAudioConverterInputStatus.haveData
93 | return buffer
94 | }
95 | )
96 |
97 | if error != nil {
98 | self.delegateQueue.async {
99 | self.delegate?.audioRecorderDidCatchError(self, error: error!)
100 | }
101 |
102 | self.audioEngine.stop()
103 |
104 | return
105 | }
106 |
107 | if let channelData = outputBuffer.int16ChannelData {
108 | let channels = UnsafeBufferPointer(start: channelData, count: 1)
109 | let bufferLengthInNanos = Double(NSEC_PER_SEC) * Double(outputBuffer.frameLength) / outputFormat.sampleRate
110 | let endBufferTime = time.hostTime + UInt64(bufferLengthInNanos * self.hostTimeFrequency)
111 |
112 | let data = Data(
113 | bytes: channels[0],
114 | count: Int(
115 | outputBuffer.frameLength *
116 | outputBuffer.format.streamDescription.pointee.mBytesPerFrame
117 | )
118 | )
119 |
120 | self.delegateQueue.async {
121 | self.delegate?.audioRecorderDidReceiveData(self, audioData: data)
122 | }
123 |
124 | // Check if stop has been requested, and this buffer ends after requested stop.
125 | // We won't cut the buffer as we are anyway over the actual stop,
126 | // so having sub 100ms accuracy in stopping is not relevant.
127 | if let stopRequestedAt = self.stopRequestedAt, endBufferTime >= stopRequestedAt {
128 | self.delegateQueue.async {
129 | self.delegate?.audioRecorderDidStop(self)
130 | }
131 |
132 | self.reset()
133 | }
134 | }
135 | }
136 | }
137 |
138 | if prepareOnInit {
139 | try self.prepareAudioSession()
140 | }
141 | }
142 |
143 | deinit {
144 | self.audioEngine.stop()
145 | self.audioEngine.reset()
146 | }
147 |
148 | private var isAudioSessionPrepared = false
149 | private func prepareAudioSession() throws {
150 | if self.isAudioSessionPrepared {
151 | return
152 | }
153 |
154 | let audioSession = AVAudioSession.sharedInstance()
155 | try audioSession.setCategory(.playAndRecord, mode: .voiceChat)
156 | let enableFront = true
157 |
158 | if enableFront, audioSession.availableInputs?.count == 1,
159 | let microphone = audioSession.availableInputs?.first(where: { $0.portType == .builtInMic })
160 | {
161 | if let frontDataSource = microphone.dataSources?.first(where: { $0.orientation?.rawValue == AVAudioSession.Orientation.front.rawValue }) {
162 | if frontDataSource.supportedPolarPatterns?.contains(AVAudioSession.PolarPattern(rawValue: "Cardioid")) ?? false {
163 | try frontDataSource.setPreferredPolarPattern(AVAudioSession.PolarPattern(rawValue: "Cardioid"))
164 | }
165 |
166 | try audioSession.setInputDataSource(frontDataSource)
167 | }
168 | }
169 |
170 | self.audioEngine.prepare()
171 | self.isAudioSessionPrepared = true
172 | }
173 |
174 | private func reset() {
175 | self.stopRequestedAt = nil
176 |
177 | self.audioEngine.pause()
178 | self.audioEngine.reset()
179 | }
180 | }
181 |
182 | // MARK: - AudioRecorderProtocol conformance
183 |
184 | extension AudioRecorder: AudioRecorderProtocol {
185 | public var channels: UInt32 {
186 | return self._channels
187 | }
188 |
189 | public var sampleRate: Double {
190 | return self._sampleRate
191 | }
192 |
193 | public weak var delegate: AudioRecorderDelegate? {
194 | get {
195 | return self._delegate
196 | }
197 |
198 | set(newValue) {
199 | self.delegateQueue.sync(flags: .barrier) {
200 | self.reset()
201 | self._delegate = newValue
202 | }
203 | }
204 | }
205 |
206 | public func start() throws {
207 | if self.stopRequestedAt != nil {
208 | // Force stop previous recordings without sending the possible missing piece of buffer.
209 | // Should be fine as we are already in a delayed stop. But is not fully accurate
210 | // vs. implementing buffer slicing and dicing. This can cut the tail end of the previous
211 | // utterance that can be problematic, but the start of a new one is more important than
212 | // the tail of the previous as it is already delayed a bit.
213 | self.delegate?.audioRecorderDidStop(self)
214 | self.reset()
215 | }
216 |
217 | try self.prepareAudioSession()
218 | try AVAudioSession.sharedInstance().setActive(true)
219 | try self.audioEngine.start()
220 | }
221 |
222 | public func stop() {
223 | // Stop only queues a timed stop half a second from now, as having a bit of a tail has better
224 | // results than cutting of directly at the end of a word. Which is something users seems to do a lot.
225 | let halfSecondInHostNanos = UInt64(Double(NSEC_PER_SEC / 2) * self.hostTimeFrequency)
226 | self.stopRequestedAt = mach_absolute_time() + halfSecondInHostNanos
227 | }
228 |
229 | public func suspend() throws {
230 | self.audioEngine.stop()
231 | self.audioEngine.reset()
232 |
233 | try AVAudioSession.sharedInstance().setActive(false)
234 |
235 | self.stopRequestedAt = nil
236 | self.isAudioSessionPrepared = false
237 | }
238 |
239 | public func resume() throws {
240 | try self.prepareAudioSession()
241 | }
242 | }
243 |
--------------------------------------------------------------------------------
/Sources/Speechly/Audio/AudioRecorderProtocol.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // MARK: - AudioRecorderProtocol definition.
4 |
5 | /// A protocol for capturing audio data from input sources (microphones).
6 | ///
7 | /// An audio recorder is supposed to capture audio data from a microphone
8 | /// with a pre-configured sample rate and channel count.
9 | /// It should also provide the functionality for starting and stopping the capture as well as
10 | /// preparing the recorder and resetting it to default state
11 | ///
12 | /// The data, errors and events should be dispatched to the delegate.
13 | public protocol AudioRecorderProtocol {
14 | /// The delegate that will receive the data, errors and events from the recorder.
15 | var delegate: AudioRecorderDelegate? { get set }
16 |
17 | /// The sample rate used for recording.
18 | var sampleRate: Double { get }
19 |
20 | /// The amount of channels captured by the recorder.
21 | var channels: UInt32 { get }
22 |
23 | /// Starts the recorder.
24 | ///
25 | /// - Important: It MUST be valid to start a non-prepared recorder.
26 | /// In that case the recorder should prepare itself on the first start.
27 | /// Also, it should be possible to call `start` consecutively multiple times.
28 | /// The semantics of such behavior are decided by the implementation.
29 | func start() throws
30 |
31 | /// Starts the recorder.
32 | ///
33 | /// - Important: It should be possible to call `stop` consecutively multiple times.
34 | /// The semantics of such behavior are decided by the implementation.
35 | func stop()
36 |
37 | /// Suspends the recorder, telling it to release any resources.
38 | func suspend() throws
39 |
40 | /// Resumes the recorder, re-initialising any resources needed for audio capture.
41 | func resume() throws
42 | }
43 |
44 | // MARK: - AudioRecorderDelegate definition.
45 |
46 | /// Delegate called when audio recorder receives some data or an error, or when it has been stopped.
47 | public protocol AudioRecorderDelegate: AnyObject {
48 | /// Called when the recorder catches an error.
49 | ///
50 | /// - Parameter error: The error which was caught.
51 | func audioRecorderDidCatchError(_ audioRecorder: AudioRecorderProtocol, error: Error)
52 |
53 | /// Called after the recorder has received some audio data.
54 | ///
55 | /// - Parameter audioData: The data chunk received from the input.
56 | func audioRecorderDidReceiveData(_ audioRecorder: AudioRecorderProtocol, audioData: Data)
57 |
58 | /// Called after the recorder has stopped recording.
59 | func audioRecorderDidStop(_ audioRecorder: AudioRecorderProtocol)
60 | }
61 |
62 | // MARK: - AudioRecorderDelegate default implementation.
63 |
64 | public extension AudioRecorderDelegate {
65 | func audioRecorderDidReceiveData(_ audioRecorder: AudioRecorderProtocol, audioData: Data) {}
66 | func audioRecorderDidCatchError(_ audioRecorder: AudioRecorderProtocol, error: Error) {}
67 | func audioRecorderDidStop(_ audioRecorder: AudioRecorderProtocol) {}
68 | }
69 |
--------------------------------------------------------------------------------
/Sources/Speechly/AudioContext.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // MARK: - SpeechContext definition.
4 |
5 | /// The speech recognition context.
6 | ///
7 | /// A single context aggregates messages from SLU API, which correspond to the audio portion
8 | /// sent to the API within a single recognition stream.
9 | public struct AudioContext: Hashable, Identifiable {
10 | private var _segments: [Segment] = []
11 | private var _indexedSegments: [Int:Segment] = [:]
12 | private var _segmentsAreDirty: Bool = false
13 |
14 | /// The ID of the segment, assigned by the API.
15 | public let id: String
16 |
17 | /// The segments belonging to the segment, can be empty if there was nothing recognised from the audio.
18 | public var segments: [Segment] {
19 | mutating get {
20 | if self._segmentsAreDirty {
21 | self._segments = Array(self._indexedSegments.values).sorted()
22 | self._segmentsAreDirty = false
23 | }
24 |
25 | return self._segments
26 | }
27 |
28 | set(newValue) {
29 | self._segments = newValue.sorted()
30 | self._indexedSegments = newValue.reduce(into: [Int:Segment]()) { (acc, segment) in
31 | acc[segment.segmentId] = segment
32 | }
33 | }
34 | }
35 |
36 | /// Creates a new empty speech context.
37 | ///
38 | /// - Parameter id: The identifier of the context.
39 | public init(id: String) {
40 | self.id = id
41 | }
42 |
43 | /// Creates a new speech context.
44 | ///
45 | /// - Parameters:
46 | /// - id: The identifier of the context.
47 | /// - segments: The segments which belong to the context.
48 | ///
49 | /// - Important: this initialiser does not check whether `segments` have `id` set as their `contextId` values,
50 | /// so it is possible to pass segments to this initialiser that don't belong to this context
51 | /// according to the identifiers.
52 | public init(id: String, segments: [Segment]) {
53 | self.init(id: id)
54 | self.segments = segments
55 | }
56 | }
57 |
58 | // MARK: - Comparable protocol conformance.
59 |
60 | extension AudioContext: Comparable {
61 | public static func < (lhs: AudioContext, rhs: AudioContext) -> Bool {
62 | return lhs.id < rhs.id
63 | }
64 |
65 | public static func <= (lhs: AudioContext, rhs: AudioContext) -> Bool {
66 | return lhs.id <= rhs.id
67 | }
68 |
69 | public static func >= (lhs: AudioContext, rhs: AudioContext) -> Bool {
70 | return lhs.id >= rhs.id
71 | }
72 |
73 | public static func > (lhs: AudioContext, rhs: AudioContext) -> Bool {
74 | return lhs.id > rhs.id
75 | }
76 | }
77 |
78 | // MARK: - Parsing logic implementation.
79 |
80 | extension AudioContext {
81 | mutating func addTranscripts(_ value: [Transcript], segmentId: Int) throws -> Segment {
82 | return try self.updateSegment(id: segmentId, transform: { segment in
83 | for t in value {
84 | try segment.addTranscript(t)
85 | }
86 | })
87 | }
88 |
89 | mutating func addEntities(_ value: [Entity], segmentId: Int) throws -> Segment {
90 | return try self.updateSegment(id: segmentId, transform: { segment in
91 | for e in value {
92 | try segment.addEntity(e)
93 | }
94 | })
95 | }
96 |
97 | mutating func addIntent(_ value: Intent, segmentId: Int) throws -> Segment {
98 | return try self.updateSegment(id: segmentId, transform: { segment in try segment.setIntent(value) })
99 | }
100 |
101 | mutating func finaliseSegment(segmentId: Int) throws -> Segment {
102 | return try self.updateSegment(id: segmentId, transform: { segment in try segment.finalise() })
103 | }
104 |
105 | mutating func finalise() throws -> AudioContext {
106 | for (k, v) in self._indexedSegments {
107 | if !v.isFinal {
108 | self._indexedSegments.removeValue(forKey: k)
109 | }
110 | }
111 |
112 | self._segmentsAreDirty = true
113 |
114 | return self
115 | }
116 |
117 | private mutating func updateSegment(id: Int, transform: (inout Segment) throws -> Void) rethrows -> Segment {
118 | var segment = self._indexedSegments[id] ?? Segment(segmentId: id, contextId: self.id)
119 |
120 | try transform(&segment)
121 |
122 | self._indexedSegments[id] = segment
123 | self._segmentsAreDirty = true
124 |
125 | return segment
126 | }
127 | }
128 |
129 |
--------------------------------------------------------------------------------
/Sources/Speechly/Cache/CacheProtocol.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | /// A protocol for a cache storage.
4 | ///
5 | /// The purpose of a cache storage is to persistently store string keys and values.
6 | /// The cache is used for storing things like device identifiers, authentication tokens and such.
7 | public protocol CacheProtocol {
8 | /// Adds a value with a specified key to the cache.
9 | ///
10 | /// - Parameters:
11 | /// - value: The value to store in the cache.
12 | /// - forKey: The key to use for addressing the value.
13 | func setValue(_ value: String, forKey: String)
14 |
15 | /// Retrieves the value from the cache using the provided key.
16 | ///
17 | /// - Parameters:
18 | /// - forKey: The key to use for addressing the value.
19 | /// - Returns: The value stored in the cache or `nil` if no value could be found for the key provided.
20 | func getValue(forKey: String) -> String?
21 | }
22 |
--------------------------------------------------------------------------------
/Sources/Speechly/Cache/UserDefaultsCache.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // MARK: - UserDefaultsStorage definition.
4 |
5 | /// A cache implementation that uses `UserDefaults` as the backing storage.
6 | public class UserDefaultsCache {
7 | private let storage: UserDefaults
8 |
9 | /// Creates a new `UserDefaultsCache` instance.
10 | public convenience init() {
11 | self.init(storage: UserDefaults.standard)
12 | }
13 |
14 | /// Creates a new `UserDefaultsCache` instance.
15 | ///
16 | /// - Parameters:
17 | /// - storage: The `UserDefaults` storage to use as the backend.
18 | public init(storage: UserDefaults) {
19 | self.storage = storage
20 | }
21 | }
22 |
23 | // MARK: - CacheStorageProtocol conformance.
24 |
25 | extension UserDefaultsCache: CacheProtocol {
26 | public func setValue(_ value: String, forKey: String) {
27 | self.storage.set(value, forKey: forKey)
28 | }
29 |
30 | public func getValue(forKey: String) -> String? {
31 | return self.storage.string(forKey: forKey)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/Sources/Speechly/Entity.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SpeechlyAPI
3 |
4 | // MARK: - SpeechEntity definition.
5 |
6 | /// A speech entity.
7 | ///
8 | /// An entity is a specific object in the phrase that falls into some kind of category,
9 | /// e.g. in a SAL example `*book book a [burger restaurant](restaurant_type) for [tomorrow](date)`
10 | /// "burger restaurant" would be an entity of type `restaurant_type`,
11 | /// and "tomorrow" would be an entity of type `date`.
12 | ///
13 | /// An entity has a start and end indices which map to the indices of `SpeechTranscript`s,
14 | /// e.g. in the example `*book book a [burger restaurant](restaurant_type) for [tomorrow](date)` it would be:
15 | ///
16 | /// * Entity "burger restaurant" - `startIndex = 2, endIndex = 3`
17 | /// * Entity "tomorrow" - `startIndex = 5, endIndex = 5`
18 | ///
19 | /// The start index is inclusive, but the end index is exclusive, i.e. the interval is `[startIndex, endIndex)`.
20 | public struct Entity: Hashable, Identifiable {
21 | /// A custom ID implementation for `SpeechEntity`.
22 | /// Since entities have two indices, start and end,
23 | /// this struct encapsulates the two for indexing and sorting purposes.
24 | public struct ID: Hashable, Comparable {
25 | /// The start index.
26 | public let start: Int
27 |
28 | /// The end index.
29 | public let end: Int
30 |
31 | public static func < (lhs: ID, rhs: ID) -> Bool {
32 | return lhs.start < rhs.start
33 | }
34 |
35 | public static func <= (lhs: ID, rhs: ID) -> Bool {
36 | return lhs.start <= rhs.start
37 | }
38 |
39 | public static func >= (lhs: ID, rhs: ID) -> Bool {
40 | return lhs.start >= rhs.start
41 | }
42 |
43 | public static func > (lhs: ID, rhs: ID) -> Bool {
44 | return lhs.start > rhs.start
45 | }
46 | }
47 |
48 | /// The identifier of the entity, unique within a `SpeechSegment`.
49 | /// Consists of the combination of start and end indices.
50 | public let id: ID
51 |
52 | /// The value of the entity, as detected by the API and defined by SAL.
53 | ///
54 | /// Given SAL `*book book a [burger restaurant](restaurant_type)` and an audio `book an italian place`,
55 | /// The value will be `italian place`.
56 | public let value: String
57 |
58 | /// The type (or class) of the entity, as detected by the API and defined by SAL.
59 | ///
60 | /// Given SAL `*book book a [burger restaurant](restaurant_type)` and an audio `book an italian place`,
61 | /// The type will be `restaurant_type`.
62 | public let type: String
63 |
64 | /// Start index of the entity, correlates with an index of some `SpeechTranscript` in a `SpeechSegment`.
65 | public let startIndex: Int
66 |
67 | /// End index of the entity, correlates with an index of some `SpeechTranscript` in a `SpeechSegment`.
68 | public let endIndex: Int
69 |
70 | /// The status of the entity.
71 | /// `true` for finalised entities, `false` otherwise.
72 | ///
73 | /// - Important: if the entity is not final, its values may change.
74 | public let isFinal: Bool
75 |
76 | /// Creates a new entity.
77 | ///
78 | /// - Parameters:
79 | /// - value: the value of the entity.
80 | /// - type: the type of the entity.
81 | /// - startIndex: the index of the beginning of the entity in a segment.
82 | /// - endIndex: the index of the end of the entity in a segment.
83 | /// - isFinal: the status of the entity.
84 | public init(value: String, type: String, startIndex: Int, endIndex: Int, isFinal: Bool) {
85 | self.value = value
86 | self.type = type
87 | self.startIndex = startIndex
88 | self.endIndex = endIndex
89 | self.isFinal = isFinal
90 | self.id = ID(start: startIndex, end: endIndex)
91 | }
92 | }
93 |
94 | // MARK: - Comparable protocol conformance.
95 |
96 | extension Entity: Comparable {
97 | public static func < (lhs: Entity, rhs: Entity) -> Bool {
98 | return lhs.id < rhs.id
99 | }
100 |
101 | public static func <= (lhs: Entity, rhs: Entity) -> Bool {
102 | return lhs.id <= rhs.id
103 | }
104 |
105 | public static func >= (lhs: Entity, rhs: Entity) -> Bool {
106 | return lhs.id >= rhs.id
107 | }
108 |
109 | public static func > (lhs: Entity, rhs: Entity) -> Bool {
110 | return lhs.id > rhs.id
111 | }
112 | }
113 |
114 | // MARK: - SluProtoParseable implementation.
115 |
116 | extension Entity: SpeechlyProtoParseable {
117 | typealias EntityProto = Speechly_Slu_V1_SLUEntity
118 |
119 | static func parseProto(message: EntityProto, isFinal: Bool) -> Entity {
120 | return self.init(
121 | value: message.value,
122 | type: message.entity,
123 | startIndex: Int(message.startPosition),
124 | endIndex: Int(message.endPosition),
125 | isFinal: isFinal
126 | )
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/Sources/Speechly/GRPC/GRPC.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import GRPC
3 | import NIO
4 |
5 | /// A function that creates a new gRPC channel for the provided address.
6 | /// It will also create a NIO eventloop group with the specified loop count.
7 | ///
8 | /// - Parameters:
9 | /// - addr: The address of the gRPC server to connect to.
10 | /// - loopCount: The number of event loops to create in the event loop group.
11 | /// - Returns: A gRPC channel connected to given server address and backed by a platform-specific eventloop group.
12 | public func makeChannel(addr: String, loopCount: Int) throws -> GRPCChannel {
13 | let group = PlatformSupport.makeEventLoopGroup(loopCount: loopCount)
14 | return try makeChannel(addr: addr, group: group)
15 | }
16 |
17 | /// A function that creates a new gRPC channel for the provided address.
18 | ///
19 | /// - Parameters:
20 | /// - addr: The address of the gRPC server to connect to.
21 | /// - group: The NIO evenloop group to use for backing the channel.
22 | /// - Returns: A gRPC channel connected to given server address and backed by given eventloop group.
23 | public func makeChannel(addr: String, group: EventLoopGroup) throws -> GRPCChannel {
24 | let address = try GRPCAddress(addr: addr)
25 | let builder = { () -> ClientConnection.Builder in
26 | switch address.secure {
27 | case true:
28 | return ClientConnection.usingPlatformAppropriateTLS(for: group)
29 | case false:
30 | return ClientConnection.insecure(group: group)
31 | }
32 | }()
33 |
34 | return builder.connect(host: address.host, port: address.port)
35 | }
36 |
37 | /// A function that creates new gRPC call options (metadata) that contains an authorisation token.
38 | ///
39 | /// The resulting metadata has a pair that looks like `Authorization: Bearer ${token}`.
40 | ///
41 | /// - Parameters:
42 | /// - token: The token to use.
43 | /// - Returns: A `CallOptions` that contain custom metadata with the token as authorization bearer.
44 | public func makeTokenCallOptions(token: String) -> CallOptions {
45 | return CallOptions(
46 | customMetadata: [
47 | "Authorization": "Bearer \(token)"
48 | ]
49 | )
50 | }
51 |
52 |
--------------------------------------------------------------------------------
/Sources/Speechly/GRPC/GRPCAddress.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | /// A gRPC service address.
4 | ///
5 | /// Encapsulates together the host, the port and secure / non-secure properties for connecting to gRPC service endpoints.
6 | public struct GRPCAddress {
7 | /// Errors thrown when parsing the address.
8 | public enum ParseError: Error {
9 | /// Thrown when the address contains an invalid scheme.
10 | case unsupportedScheme
11 |
12 | /// Thrown when the address contains a URL that cannot be parsed with `URL.init(string: addr)`.
13 | case unsupportedURL
14 |
15 | /// Thrown when the address does not contain a valid host.
16 | case missingHost
17 | }
18 |
19 | /// The host of the remote gRPC service.
20 | public let host: String
21 |
22 | /// The port of the remote gRPC service.
23 | public let port: Int
24 |
25 | /// Whether the connection should use TLS.
26 | public let secure: Bool
27 |
28 | /// Creates a new gRPC address.
29 | ///
30 | /// - Parameters:
31 | /// - host: The host of the remote gRPC service.
32 | /// - port: The port of the remote gRPC service.
33 | /// - secure: Whether the connection to the service should use TLS.
34 | public init(host: String, port: Int, secure: Bool) {
35 | self.host = host
36 | self.port = port
37 | self.secure = secure
38 | }
39 |
40 | /// Creates a new gRPC address.
41 | ///
42 | /// - Parameters:
43 | /// - addr: The address of the remote gRPC service.
44 | ///
45 | /// - Important: The address should be a valid URI with one of the supported custom schemes:
46 | /// - `grpc://` represents the non-secure URI.
47 | /// - `grpc+tls://` represents a URI that should use TLS for connection.
48 | public init(addr: String) throws {
49 | let schemeIdx: String.Index
50 | let secure: Bool
51 |
52 | let r1 = addr.range(of: "grpc://")
53 | let r2 = addr.range(of: "grpc+tls://")
54 |
55 | if r1 != nil {
56 | schemeIdx = r1!.upperBound
57 | secure = false
58 | } else if r2 != nil {
59 | schemeIdx = r2!.upperBound
60 | secure = true
61 | } else {
62 | throw ParseError.unsupportedScheme
63 | }
64 |
65 | let url = URL(string: addr.suffix(from: schemeIdx).base)
66 | if url == nil {
67 | throw ParseError.unsupportedURL
68 | }
69 |
70 | if url!.host == nil {
71 | throw ParseError.missingHost
72 | }
73 |
74 | var port = secure ? 443 : 80
75 | if url!.port != nil {
76 | port = url!.port!
77 | }
78 |
79 | self.init(host: url!.host!, port: port, secure: secure)
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/Sources/Speechly/GRPC/Promisable.swift:
--------------------------------------------------------------------------------
1 | import NIO
2 |
3 | /// A protocol that defines methods for making succeeded and failed futures.
4 | public protocol Promisable {
5 | /// Creates a new succeeded future with value `value`.
6 | ///
7 | /// - Parameter value: The value to wrap in the future
8 | /// - Returns: An `EventLoopFuture` that always succeeds with `value`.
9 | func makeSucceededFuture(_ value: T) -> EventLoopFuture
10 |
11 | /// Creates a new failed future with error `error`.
12 | ///
13 | /// - Parameter error: The error to wrap in the future
14 | /// - Returns: An `EventLoopFuture` that always fails with `error`.
15 | func makeFailedFuture(_ error: Error) -> EventLoopFuture
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/Speechly/Identity/ApiAccessToken.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // MARK: - ApiAccessToken definition.
4 |
5 | /// A struct representing an access token returned by Speechly Identity service.
6 | ///
7 | /// The token is required for other application-specific Speechly APIs like Speechly SLU API.
8 | public struct ApiAccessToken: Hashable {
9 | /// Token authorisation scopes.
10 | /// They determine which services can be accessed with this token.
11 | public enum AuthScope {
12 | /// Speechly SLU service.
13 | case SLU
14 |
15 | /// Speechly WLU service.
16 | case WLU
17 | }
18 |
19 | /// Type of token, determines the possible Speechly Apps that are accessible.
20 | public enum TokenType {
21 | /// Token can be used to access a single application.
22 | case Application
23 | /// Token can be used with all applications in the project.
24 | case Project
25 | }
26 |
27 | /// Speechly application identifier.
28 | public let appId: UUID?
29 |
30 | /// Speechly project identifier
31 | public let projectId: UUID?
32 |
33 | /// Speechly device identifier.
34 | public let deviceId: UUID
35 |
36 | /// Token expiration timestamp.
37 | public let expiresAt: Date
38 |
39 | /// Authorised token scopes.
40 | public let scopes: Set
41 |
42 | /// Raw token value which is passed to the services.
43 | public let tokenString: String
44 |
45 | /// Creates a new token from a raw token representation, returned by Identity API.
46 | ///
47 | /// - Important: This initialiser will return `nil` if the string value could not be decoded.
48 | ///
49 | /// - Parameter tokenString:raw token value obtained from Identity API or cache.
50 | public init?(tokenString: String) {
51 | guard let decoded = parseToken(tokenString) else {
52 | return nil
53 | }
54 |
55 | let appId = decoded.appId.flatMap{UUID(uuidString: $0)}
56 | let projectId = decoded.projectId.flatMap{UUID(uuidString: $0)}
57 | if appId == nil && projectId == nil {
58 | return nil
59 | }
60 | guard let deviceId = UUID(uuidString: decoded.deviceId) else {
61 | return nil
62 | }
63 |
64 | self.init(
65 | appId: appId,
66 | projectId: projectId,
67 | deviceId: deviceId,
68 | expiresAt: Date(timeIntervalSince1970: TimeInterval(decoded.exp)),
69 | scopes: parseScope(decoded.scope),
70 | tokenString: tokenString
71 | )
72 | }
73 |
74 | /// Creates a new token.
75 | ///
76 | /// - Important: This initialiser WILL NOT attempt to decode and validate the `tokenString`.
77 | ///
78 | /// - Parameters:
79 | /// - appId: Speechly application identifier.
80 | /// - deviceId: Speechly device identifier.
81 | /// - expiresAt: Token expiration timestamp.
82 | /// - scopes: Authorised token scopes.
83 | /// - tokenString - Raw token value which is passed to the services.
84 | public init(appId: UUID?, projectId: UUID?, deviceId: UUID, expiresAt: Date, scopes: Set, tokenString: String) {
85 | self.appId = appId
86 | self.projectId = projectId
87 | self.deviceId = deviceId
88 | self.expiresAt = expiresAt
89 | self.scopes = scopes
90 | self.tokenString = tokenString
91 | }
92 |
93 | /// Validates the token against provided identifiers and expiration time.
94 | ///
95 | /// - Parameters:
96 | /// - appId: Speechly application identifier to match against.
97 | /// - deviceId: Speechly device identifier to match against.
98 | /// - expiresIn: Time interval within which the token should still be valid.
99 | /// - Returns: `true` if the token is valid, `false` otherwise.
100 | public func validate(key: UUID, deviceId: UUID, expiresIn: TimeInterval) -> Bool {
101 | return (self.appId == key || self.projectId == key) && self.deviceId == deviceId && self.validateExpiry(expiresIn: expiresIn)
102 | }
103 |
104 | /// Get the token key (appId or projectId) for caching or hashing.
105 | public func key() -> UUID {
106 | if let appId = self.appId {
107 | return appId
108 | }
109 | return self.projectId!
110 | }
111 |
112 | /// Validates token expiration time.
113 | ///
114 | /// - Parameters:
115 | /// - expiresIn: Time interval within which the token should still be valid.
116 | /// - Returns: `true` if the token will not expire in that time interval, `false` otherwise.
117 | public func validateExpiry(expiresIn: TimeInterval) -> Bool {
118 | return !self.expiresAt.timeIntervalSinceNow.isLessThanOrEqualTo(expiresIn)
119 | }
120 | }
121 |
122 | // MARK: - Internal token parsing logic
123 |
124 | private struct DecodedToken: Decodable {
125 | let appId: String?
126 | let projectId: String?
127 | let deviceId: String
128 | let scope: String
129 | let exp: Int
130 | }
131 |
132 | private func parseToken(_ token: String) -> DecodedToken? {
133 | guard case let parts = token.split(separator: "."), parts.count == 3 else {
134 | return nil
135 | }
136 |
137 | guard let decoded = base64Decode(String(parts[1])) else {
138 | return nil
139 | }
140 |
141 | return try? JSONDecoder().decode(DecodedToken.self, from: decoded)
142 | }
143 |
144 | private func parseScope(_ scope: String) -> Set {
145 | var scopes: Set = []
146 |
147 | for s in scope.split(separator: " ") {
148 | switch(s) {
149 | case "slu":
150 | scopes.update(with: ApiAccessToken.AuthScope.SLU)
151 | case "wlu":
152 | scopes.update(with: ApiAccessToken.AuthScope.WLU)
153 | default:
154 | continue
155 | }
156 | }
157 |
158 | return scopes
159 | }
160 |
161 | private func base64Decode(_ value: String) -> Data? {
162 | var st = value
163 | if (value.count % 4 != 0){
164 | st += String(repeating: "=", count: (value.count % 4))
165 | }
166 |
167 | return Data(base64Encoded: st)
168 | }
169 |
--------------------------------------------------------------------------------
/Sources/Speechly/Identity/CachingIdentityClient.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import NIO
3 |
4 | // MARK: - CachingIdentityClient definition.
5 |
6 | /// A client for Speechly Identity gRPC API which provides token caching functionality.
7 | ///
8 | /// The cache is implemented as read-through and transparent for the consumer.
9 | public class CachingIdentityClient {
10 | /// The protocol constraints for backing base Identity client.
11 | public typealias PromisableClient = IdentityClientProtocol & Promisable
12 |
13 | private let baseClient: PromisableClient
14 | private let cache: CacheProtocol
15 |
16 | private let defaultExpiresIn: TimeInterval = 60 * 60
17 | private var memCache: [String: ApiAccessToken] = [:]
18 |
19 | /// Creates a new client.
20 | ///
21 | /// - Parameters:
22 | /// - baseClient: A base Identity client to use for fetching tokens.
23 | /// - cache: A cache to use for storing tokens.
24 | public init(baseClient: PromisableClient, cache: CacheProtocol) {
25 | self.baseClient = baseClient
26 | self.cache = cache
27 | }
28 | }
29 |
30 | // MARK: - IdentityClientProtocol conformance.
31 |
32 | extension CachingIdentityClient: IdentityClientProtocol {
33 | public func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture {
34 | let token = loadToken(key: appId, deviceId: deviceId)
35 |
36 | if token != nil && token!.validate(key: appId, deviceId: deviceId, expiresIn: defaultExpiresIn) {
37 | return self.baseClient.makeSucceededFuture(token!)
38 | }
39 |
40 | return self.baseClient
41 | .authenticate(appId: appId, deviceId: deviceId)
42 | .map({ newToken in self.storeToken(token: newToken) })
43 | }
44 |
45 | public func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture {
46 | let token = loadToken(key: projectId, deviceId: deviceId)
47 |
48 | if token != nil && token!.validate(key: projectId, deviceId: deviceId, expiresIn: defaultExpiresIn) {
49 | return self.baseClient.makeSucceededFuture(token!)
50 | }
51 |
52 | return self.baseClient
53 | .authenticateProject(projectId: projectId, deviceId: deviceId)
54 | .map({ newToken in self.storeToken(token: newToken) })
55 | }
56 |
57 | private func loadToken(key: UUID, deviceId: UUID) -> ApiAccessToken? {
58 | let cacheKey = makeCacheKey(key: key, deviceId: deviceId)
59 |
60 | if let val = self.memCache[cacheKey] {
61 | return val
62 | }
63 |
64 | guard let cachedValue = cache.getValue(forKey: cacheKey) else {
65 | return nil
66 | }
67 |
68 | return ApiAccessToken(tokenString: cachedValue)
69 | }
70 |
71 | private func storeToken(token: ApiAccessToken) -> ApiAccessToken {
72 | let cacheKey = makeCacheKey(key: token.key(), deviceId: token.deviceId)
73 |
74 | self.memCache[cacheKey] = token
75 | self.cache.setValue(token.tokenString, forKey: cacheKey)
76 |
77 | return token
78 | }
79 |
80 | private func makeCacheKey(key: UUID, deviceId: UUID) -> String {
81 | return "authToken.\(key.hashValue).\(deviceId.hashValue)"
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/Sources/Speechly/Identity/IdentityClient.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import NIO
3 | import GRPC
4 | import SpeechlyAPI
5 | import os.log
6 |
7 | // MARK: - IdentityClient definition.
8 |
9 | /// A client for Speechly Identity gRPC API.
10 | ///
11 | /// Exposes functionality for authenticating identifiers in exchange for API access tokens.
12 | public class IdentityClient {
13 | private let group: EventLoopGroup
14 | private let client: IdentityApiClient
15 |
16 | /// Creates a new client.
17 | ///
18 | /// - Parameters:
19 | /// - addr: The address of Speechly Identity API service.
20 | /// - loopGroup: `NIO.EventLoopGroup` to use for the client.
21 | public convenience init(addr: String, loopGroup: EventLoopGroup) throws {
22 | let channel = try makeChannel(addr: addr, group: loopGroup)
23 | let client = Speechly_Identity_V2_IdentityAPIClient(channel: channel)
24 |
25 | self.init(group: loopGroup, client: client)
26 | }
27 |
28 | /// Alias for Speechly Identity client protocol.
29 | public typealias IdentityApiClient = Speechly_Identity_V2_IdentityAPIClientProtocol
30 |
31 | /// Creates a new client.
32 | ///
33 | /// - Parameters:
34 | /// - loopGroup: `NIO.EventLoopGroup` to use for the client.
35 | /// - client: `IdentityApiClient` implementation.
36 | public init(group: EventLoopGroup, client: IdentityApiClient) {
37 | self.group = group
38 | self.client = client
39 | }
40 |
41 | deinit {
42 | do {
43 | try self.client.channel.close().wait()
44 | } catch {
45 | os_log("gRPC channel close failed: %@", log: speechly, type: .error, String(describing: error))
46 | }
47 | }
48 | }
49 |
50 | // MARK: - Promisable protocol conformance.
51 |
52 | extension IdentityClient: Promisable {
53 | public func makeFailedFuture(_ error: Error) -> EventLoopFuture {
54 | return self
55 | .group.next()
56 | .makeFailedFuture(error)
57 | }
58 |
59 | public func makeSucceededFuture(_ value: AuthToken) -> EventLoopFuture {
60 | return self
61 | .group.next()
62 | .makeSucceededFuture(value)
63 | }
64 | }
65 |
66 | // MARK: - IdentityClientProtocol conformance.
67 |
68 | extension IdentityClient: IdentityClientProtocol {
69 | typealias IdentityLoginRequest = Speechly_Identity_V2_LoginRequest
70 |
71 | /// Errors returned by the client.
72 | public enum IdentityClientError: Error {
73 | /// The error returned if the API returns an invalid access token.
74 | case invalidTokenPayload
75 | }
76 |
77 | public func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture {
78 | let request = IdentityLoginRequest.with {
79 | $0.application.appID = appId.uuidString.lowercased()
80 | $0.deviceID = deviceId.uuidString.lowercased()
81 | }
82 |
83 | return self.client.login(request).response.flatMapThrowing { response throws in
84 | guard let token = ApiAccessToken(tokenString: response.token) else {
85 | throw IdentityClientError.invalidTokenPayload
86 | }
87 | os_log("Login: appId=%@, deviceId=%@", log: speechly, type: .debug, request.application.appID, request.deviceID)
88 |
89 | return token
90 | }
91 | }
92 |
93 | public func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture {
94 | let request = IdentityLoginRequest.with {
95 | $0.project.projectID = projectId.uuidString.lowercased()
96 | $0.deviceID = deviceId.uuidString.lowercased()
97 | }
98 |
99 | return self.client.login(request).response.flatMapThrowing { response throws in
100 | guard let token = ApiAccessToken(tokenString: response.token) else {
101 | throw IdentityClientError.invalidTokenPayload
102 | }
103 | os_log("Login: projectId=%@, deviceId=%@", log: speechly, type: .debug, request.project.projectID, request.deviceID)
104 |
105 | return token
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/Sources/Speechly/Identity/IdentityClientProtocol.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import NIO
3 |
4 | /// Protocol that defines a client for Speechly Identity API.
5 | public protocol IdentityClientProtocol {
6 | /// Exchanges application and device identifiers for an access token to Speechly API.
7 | ///
8 | /// - Parameters:
9 | /// - appId: Speechly application identifier.
10 | /// - deviceId: Device identifier.
11 | /// - Returns: A future that succeeds with an access token or fails with an error if authentication fails.
12 | func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture
13 |
14 | /// Exchanges project and device identifiers for an access token to Speechly API.
15 | ///
16 | /// - Parameters:
17 | /// - projectId: Speechly project identifier. All applications in the project are accesible during connection.
18 | /// - deviceId: Device identifier.
19 | /// - Returns: A future that succeeds with an access token or fails with an error if authentication fails.
20 | func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture
21 | }
22 |
--------------------------------------------------------------------------------
/Sources/Speechly/Intent.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SpeechlyAPI
3 |
4 | // MARK: - SpeechIntent definition.
5 |
6 | /// A speech intent.
7 | ///
8 | /// An intent is part of a phrase which defines the action of the phrase,
9 | /// e.g. a phrase "book a restaurant and send an invitation to John" contains two intents,
10 | /// "book" and "send an invitation".
11 | ///
12 | /// Intents can and should be used to dispatch the action that the user wants to do in the app
13 | /// (e.g. book a meeting, schedule a flight, reset the form).
14 | public struct Intent: Hashable {
15 | /// An empty intent. Can be used as default value in other places.
16 | public static let Empty = Intent(value: "", isFinal: false)
17 |
18 | /// The value of the intent, as defined in Speechly application configuration.
19 | /// e.g. in the example `*book book a [burger restaurant](restaurant_type)` it would be `book`.
20 | public let value: String
21 |
22 | /// The status of the intent.
23 | /// `true` for finalised intents, `false` otherwise.
24 | ///
25 | /// - Important: if the intent is not final, its values may change.
26 | public let isFinal: Bool
27 |
28 | /// Creates a new intent.
29 | ///
30 | /// - Parameters:
31 | /// - value: the value of the intent.
32 | /// - isFinal: the status of the intent.
33 | public init(value: String, isFinal: Bool) {
34 | self.value = value
35 | self.isFinal = isFinal
36 | }
37 | }
38 |
39 | // MARK: - Identifiable protocol conformance.
40 |
41 | extension Intent: Identifiable {
42 | public var id: String {
43 | return self.value
44 | }
45 | }
46 |
47 | // MARK: - Comparable protocol conformance.
48 |
49 | extension Intent: Comparable {
50 | public static func < (lhs: Intent, rhs: Intent) -> Bool {
51 | return lhs.value < rhs.value
52 | }
53 |
54 | public static func <= (lhs: Intent, rhs: Intent) -> Bool {
55 | return lhs.value <= rhs.value
56 | }
57 |
58 | public static func >= (lhs: Intent, rhs: Intent) -> Bool {
59 | return lhs.value >= rhs.value
60 | }
61 |
62 | public static func > (lhs: Intent, rhs: Intent) -> Bool {
63 | return lhs.value > rhs.value
64 | }
65 | }
66 |
67 | // MARK: - SluProtoParseable implementation.
68 |
69 | extension Intent: SpeechlyProtoParseable {
70 | typealias IntentProto = Speechly_Slu_V1_SLUIntent
71 |
72 | static func parseProto(message: IntentProto, isFinal: Bool) -> Intent {
73 | return self.init(value: message.intent, isFinal: isFinal)
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/lock.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "lock.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/lock.imageset/lock.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/lock.imageset/lock.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-button-frame.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "mic-button-frame.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-button-frame.imageset/mic-button-frame.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/mic-button-frame.imageset/mic-button-frame.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-button-fx.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "mic-button-fx.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-button-fx.imageset/mic-button-fx.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/mic-button-fx.imageset/mic-button-fx.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-no-permission.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "mic-no-permission.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-no-permission.imageset/mic-no-permission.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/mic-no-permission.imageset/mic-no-permission.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-no-support.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "mic-no-support.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic-no-support.imageset/mic-no-support.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/mic-no-support.imageset/mic-no-support.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "mic.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/mic.imageset/mic.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/mic.imageset/mic.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/power-on.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "power-on.pdf",
5 | "idiom" : "universal"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/Sources/Speechly/Resources/Images.xcassets/power-on.imageset/power-on.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/speechly/ios-client/f2cddd13a1379a0233ad33049c1ac1baadd010dc/Sources/Speechly/Resources/Images.xcassets/power-on.imageset/power-on.pdf
--------------------------------------------------------------------------------
/Sources/Speechly/SLU/SluClient.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import Dispatch
3 | import GRPC
4 | import NIO
5 | import SpeechlyAPI
6 | import os.log
7 |
8 | // MARK: - SLU service definition.
9 |
10 | /// Possible invalid states of the client, eg. if `startContext` is called without connecting to API first.
11 | public enum InvalidSLUState: Error {
12 | case notConnected
13 | case contextAlreadyStarted
14 | case contextNotStarted
15 | }
16 |
17 | /// An SluClientProtocol that is implemented on top of public Speechly SLU gRPC API.
18 | /// Uses `swift-grpc` for handling gRPC streams and connectivity.
19 | public class SluClient {
20 |
21 | typealias DisconnectTimer = DispatchWorkItem
22 |
23 | private enum State {
24 | case idle
25 | case connected(DisconnectTimer, SluStream)
26 | case streaming(SluStream)
27 | }
28 |
29 | private var state: State = .idle
30 | private let group: EventLoopGroup
31 | private let client: SluApiClient
32 |
33 | private let delegateQueue: DispatchQueue
34 | private weak var _delegate: SluClientDelegate? = nil
35 |
36 | /// Creates a new client.
37 | ///
38 | /// - Parameters:
39 | /// - addr: The address of Speechly SLU API to connect to.
40 | /// - loopGroup: The `NIO.EventLoopGroup` to use in the client.
41 | /// - delegateQueue: The `DispatchQueue` to use for calling the delegate.
42 | public convenience init(
43 | addr: String,
44 | loopGroup: EventLoopGroup,
45 | delegateQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.SluClient.delegateQueue")
46 | ) throws {
47 | let channel = try makeChannel(addr: addr, group: loopGroup)
48 | let client = Speechly_Slu_V1_SLUClient(channel: channel)
49 |
50 | self.init(client: client, group: loopGroup, delegateQueue: delegateQueue)
51 | }
52 |
53 | /// An alias for Speechly SLU client protocol.
54 | public typealias SluApiClient = Speechly_Slu_V1_SLUClientProtocol
55 |
56 | /// Creates a new client.
57 | ///
58 | /// - Parameters:
59 | /// - client: The `SluApiClient` to use for creating SLU streams.
60 | /// - group: The `NIO.EventLoopGroup` to use in the client.
61 | /// - delegateQueue: The `DispatchQueue` to use for calling the delegate.
62 | public init(client: SluApiClient, group: EventLoopGroup, delegateQueue: DispatchQueue) {
63 | self.client = client
64 | self.group = group
65 | self.delegateQueue = delegateQueue
66 | }
67 |
68 | deinit {
69 | do {
70 | try self.disconnect().wait()
71 | } catch {
72 | os_log("SLU stream disconnect failed: %@", log: speechly, type: .error, String(describing: error))
73 | }
74 |
75 | do {
76 | try self.client.channel.close().wait()
77 | } catch {
78 | os_log("gRPC channel close failed: %@", log: speechly, type: .error, String(describing: error))
79 | }
80 | }
81 | }
82 |
83 | // MARK: - SluClientProtocol conformance.
84 |
85 | extension SluClient: SluClientProtocol {
86 | public weak var delegate: SluClientDelegate? {
87 | get {
88 | return self._delegate
89 | }
90 |
91 | set(newDelegate) {
92 | self.delegateQueue.sync(flags: .barrier) {
93 | self._delegate = newDelegate
94 | }
95 | }
96 | }
97 |
98 | private typealias SluStream = BidirectionalStreamingCall
99 | private typealias SluRequestProto = Speechly_Slu_V1_SLURequest
100 | private typealias SluResponseProto = Speechly_Slu_V1_SLUResponse
101 |
102 | public func connect(token: ApiAccessToken, config: SluConfig) -> EventLoopFuture {
103 | switch self.state {
104 | case .streaming, .connected(_, _):
105 | return self.group.next().makeSucceededVoidFuture()
106 | case .idle:
107 | return self.makeStream(token: token, config: config)
108 | .map { (timer, stream) in
109 | self.state = .connected(timer, stream)
110 | }
111 | }
112 | }
113 |
114 | public func disconnect() -> EventLoopFuture {
115 | switch self.state {
116 | case .streaming(let stream):
117 | return self.stopContext()
118 | .flatMap { self.stopStream(stream: stream) }
119 | .map { _ in
120 | return
121 | }
122 | case .connected(let timer, let stream):
123 | return self.stopStream(stream: stream)
124 | .map { _ in
125 | timer.cancel()
126 | return
127 | }
128 | case .idle:
129 | return self.group.next().makeSucceededVoidFuture()
130 | }
131 | }
132 |
133 | public func startContext(appId: String? = nil) -> EventLoopFuture {
134 | switch self.state {
135 | case .idle:
136 | return self.group.next().makeFailedFuture(InvalidSLUState.notConnected)
137 | case .streaming(_):
138 | return self.group.next().makeFailedFuture(InvalidSLUState.contextAlreadyStarted)
139 | case let .connected(timer, stream):
140 | timer.cancel()
141 | return stream.sendMessage(SluRequestProto.with {
142 | $0.start = SluStartProto.with {
143 | $0.appID = appId ?? ""
144 | $0.options = [SluStartOptionProto.with {
145 | $0.key = "timezone"
146 | $0.value = [TimeZone.current.identifier]
147 | }]
148 | }
149 | })
150 | .map {
151 | self.state = .streaming(stream)
152 | }
153 | }
154 | }
155 |
156 | public func stopContext() -> EventLoopFuture {
157 | switch self.state {
158 | case .idle:
159 | return self.group.next().makeFailedFuture(InvalidSLUState.notConnected)
160 | case .connected(_, _):
161 | return self.group.next().makeFailedFuture(InvalidSLUState.contextNotStarted)
162 | case let .streaming(stream):
163 | return stream
164 | .sendMessage(SluRequestProto.with {
165 | $0.stop = SluStopProto()
166 | })
167 | .map {
168 | self.state = .connected(self.makeDisconnectTimer(), stream)
169 | }
170 | }
171 | }
172 |
173 | public func resume() -> EventLoopFuture {
174 | // If there is somehow still an active stream, discard it, because it's most likely corrupted.
175 | return self.disconnect()
176 | }
177 |
178 | public func suspend() -> EventLoopFuture {
179 | return self.disconnect()
180 | }
181 |
182 | public func write(data: Data) -> EventLoopFuture {
183 | switch self.state {
184 | case .idle:
185 | return self.group.next().makeFailedFuture(InvalidSLUState.notConnected)
186 | case .connected(_, _):
187 | return self.group.next().makeFailedFuture(InvalidSLUState.contextNotStarted)
188 | case let .streaming(stream):
189 | return stream
190 | .sendMessage(SluRequestProto.with {
191 | $0.audio = data
192 | })
193 | }
194 | }
195 |
196 | private typealias SluConfigProto = Speechly_Slu_V1_SLUConfig
197 | private typealias SluStartProto = Speechly_Slu_V1_SLUStart
198 | private typealias SluStopProto = Speechly_Slu_V1_SLUStop
199 | private typealias SluStartOptionProto = Speechly_Slu_V1_SLUStart.Option
200 |
201 | private func makeStream(token: ApiAccessToken, config: SluConfig) -> EventLoopFuture<(DisconnectTimer, SluStream)> {
202 | os_log("Connecting to SLU API", log: speechly, type: .debug)
203 | let callOptions = makeTokenCallOptions(token: token.tokenString)
204 |
205 | let stream = self.client.stream(
206 | callOptions: callOptions,
207 | handler: { response in self.handleResponse(response: response) }
208 | )
209 |
210 | stream.status.whenComplete { result in
211 | switch result {
212 | case let .failure(error):
213 | self.delegateQueue.async {
214 | self.delegate?.sluClientDidCatchError(self, error: error)
215 | }
216 | case let .success(status):
217 | self.delegateQueue.async {
218 | self.delegate?.sluClientDidStopStream(self, status: status)
219 | }
220 | }
221 | self.state = .idle
222 | }
223 |
224 | return stream
225 | .sendMessage(SluRequestProto.with {
226 | $0.config = SluConfigProto.with {
227 | $0.encoding = .linear16
228 | $0.sampleRateHertz = Int32(config.sampleRate)
229 | $0.channels = Int32(config.channels)
230 | }
231 | })
232 | .map {
233 | return (self.makeDisconnectTimer(), stream)
234 | }
235 | }
236 |
237 | private func makeDisconnectTimer() -> DisconnectTimer {
238 | let task = DispatchWorkItem {
239 | do {
240 | try self.disconnect().wait()
241 | } catch {
242 | os_log("Disconnect stream failed: %@", log: speechly, type: .error, String(describing: error))
243 | }
244 | }
245 | DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 30, execute: task)
246 | return task
247 | }
248 |
249 | private func stopStream(stream: SluStream) -> EventLoopFuture {
250 | os_log("Disconnect SLU stream", log: speechly, type: .debug)
251 | // Make a promise that's passed to stream.cancel().
252 | let promise = self.group.next().makePromise(of: Void.self)
253 |
254 | // Once stream is canceled, we want to wait until the server closes the stream from its end.
255 | let future: EventLoopFuture = promise.futureResult
256 | .flatMap {
257 | self.state = .idle
258 | return stream.status
259 | }
260 |
261 | stream.sendEnd(promise: promise)
262 | // Cancel the stream.
263 | //stream.cancel(promise: promise)
264 | return future
265 | }
266 |
267 | private func handleResponse(response: SluResponseProto) -> Void {
268 | self.delegateQueue.async {
269 | let contextId = response.audioContext
270 | let segmentId = Int(response.segmentID)
271 |
272 | switch(response.streamingResponse) {
273 | case .started:
274 | self.delegate?.sluClientDidReceiveContextStart(self, contextId: contextId)
275 | case .finished:
276 | self.delegate?.sluClientDidReceiveContextStop(self, contextId: contextId)
277 | case let .tentativeTranscript(transcript):
278 | self.delegate?.sluClientDidReceiveTentativeTranscript(
279 | self, contextId: contextId, segmentId: segmentId, transcript: transcript
280 | )
281 | case let .tentativeEntities(entities):
282 | self.delegate?.sluClientDidReceiveTentativeEntities(
283 | self, contextId: contextId, segmentId: segmentId, entities: entities
284 | )
285 | case let .tentativeIntent(intent):
286 | self.delegate?.sluClientDidReceiveTentativeIntent(
287 | self, contextId: contextId, segmentId: segmentId, intent: intent
288 | )
289 | case let .transcript(transcript):
290 | self.delegate?.sluClientDidReceiveTranscript(
291 | self, contextId: contextId, segmentId: segmentId, transcript: transcript
292 | )
293 | case let .entity(entity):
294 | self.delegate?.sluClientDidReceiveEntity(
295 | self, contextId: contextId, segmentId: segmentId, entity: entity
296 | )
297 | case let .intent(intent):
298 | self.delegate?.sluClientDidReceiveIntent(
299 | self, contextId: contextId, segmentId: segmentId, intent: intent
300 | )
301 | case .segmentEnd:
302 | self.delegate?.sluClientDidReceiveSegmentEnd(self, contextId: contextId, segmentId: segmentId)
303 | default:
304 | return
305 | }
306 | }
307 | }
308 | }
309 |
--------------------------------------------------------------------------------
/Sources/Speechly/SLU/SluClientProtocol.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import NIO
3 | import GRPC
4 | import SpeechlyAPI
5 |
6 | // MARK: - SluClientProtocol definition.
7 |
8 | /// A protocol defining a client for Speechly SLU API.
9 | ///
10 | /// It exposes functionality for starting and stopping SLU recognition streams
11 | /// and a delegate for receiving the responses.
12 | ///
13 | /// - Important: Current approach allows only one recognition stream to be active at any time.
14 | public protocol SluClientProtocol {
15 | /// A delegate which is called when the client receives messages from the API or catches errors.
16 | var delegate: SluClientDelegate? { get set }
17 |
18 | /// Connects to the SLU API.
19 | ///
20 | /// - Important: Calling `connect` again will first disconnect and then connect again.
21 | ///
22 | /// - Parameters:
23 | /// - token: An auth token received from Speechly Identity API.
24 | /// - config: The configuration of the SLU stream.
25 | /// - Returns: A future which will be fullfilled when the stream has been connected.
26 | func connect(token: ApiAccessToken, config: SluConfig) -> EventLoopFuture
27 |
28 | /// Disconnects the current connection to the SLU API.
29 | ///
30 | /// If there is an active `Context`, it is cancelled.
31 | ///
32 | /// - Returns: A future which is fulfilled when the stream has been disconnected.
33 | func disconnect() -> EventLoopFuture
34 |
35 | /// Starts a new SLU recognition stream.
36 | ///
37 | /// - Important: Calling `startContext` again will stop previous context and start a new one.
38 | ///
39 | /// - Parameters:
40 | /// - appId: The target appId for the audio, if not set in the token.
41 | /// - Returns: A future which will be fullfilled when the stream has been started.
42 | func startContext(appId: String?) -> EventLoopFuture
43 |
44 | /// Stops the current SLU recognition stream
45 | ///
46 | /// - Returns: A future which will be fullfilled when the stream has been closed from the client side.
47 | func stopContext() -> EventLoopFuture
48 |
49 | /// Suspends the client by terminating any in-flight streams and disconnecting the channels.
50 | ///
51 | /// - Returns: A future which will be fullfilled when the streams and channels are cleaned up.
52 | func suspend() -> EventLoopFuture
53 |
54 | /// Resumes the client by restoring the channels and cleaning up any stale streams.
55 | ///
56 | /// - Returns: A future which will be fullfilled when the channels are restored.
57 | func resume() -> EventLoopFuture
58 |
59 | /// Writes audio data on the current stream.
60 | ///
61 | /// - Important: If there is currently no stream, this will return a future that succeeds with `false`,
62 | /// indicating that a write has been lost.
63 | ///
64 | /// - Parameters:
65 | /// - data: The audio data to write to the stream
66 | /// - Returns: A future which will be fullfilled when the data has been sent.
67 | func write(data: Data) -> EventLoopFuture
68 | }
69 |
70 | /// SLU stream configuration describes the audio data sent to the stream.
71 | /// If misconfigured, the recognition stream will not produce any useful results.
72 | public struct SluConfig {
73 | /// The sample rate of the audio sent to the stream, in Hertz.
74 | public let sampleRate: Double
75 |
76 | /// The number of channels in the audio sent to the stream.
77 | public let channels: UInt32
78 | }
79 |
80 | // MARK: - SluClientDelegate definition.
81 |
82 | /// Delegate called when an SLU client receives messages from the API or catches an error.
83 | //
84 | /// The intended use of this protocol is with `SluClientProtocol`.
85 | ///
86 | /// - Important: In order to avoid retain cycles, classes implementing this delegate
87 | /// MUST NOT maintain a strong reference to the `SluClientProtocol`.
88 | public protocol SluClientDelegate: AnyObject {
89 | /// An alias for tentative transcript message.
90 | typealias TentativeTranscript = Speechly_Slu_V1_SLUTentativeTranscript
91 |
92 | /// An alias for tentative entities message.
93 | typealias TentativeEntities = Speechly_Slu_V1_SLUTentativeEntities
94 |
95 | /// An alias for tentative intent message.
96 | typealias TentativeIntent = Speechly_Slu_V1_SLUIntent
97 |
98 | /// An alias for final transcript message.
99 | typealias Transcript = Speechly_Slu_V1_SLUTranscript
100 |
101 | /// An alias for final entity message.
102 | typealias Entity = Speechly_Slu_V1_SLUEntity
103 |
104 | /// An alias for final intent message.
105 | typealias Intent = Speechly_Slu_V1_SLUIntent
106 |
107 | /// Called when the client catches an error.
108 | ///
109 | /// - Parameters:
110 | /// - error: The error which was caught.
111 | func sluClientDidCatchError(_ sluClient: SluClientProtocol, error: Error)
112 |
113 | /// Called when a recognition stream is stopped from the server side.
114 | ///
115 | /// - Parameters:
116 | /// - status: The status that the stream was closed with.
117 | func sluClientDidStopStream(_ sluClient: SluClientProtocol, status: GRPCStatus)
118 |
119 | /// Called when a recognition stream receives an audio context start message.
120 | ///
121 | /// - Parameters:
122 | /// - contextId: The ID of the context that was started by the server.
123 | func sluClientDidReceiveContextStart(_ sluClient: SluClientProtocol, contextId: String)
124 |
125 | /// Called when a recognition stream receives an audio context stop message.
126 | ///
127 | /// - Parameters:
128 | /// - contextId: The ID of the context that was stopped by the server.
129 | func sluClientDidReceiveContextStop(_ sluClient: SluClientProtocol, contextId: String)
130 |
131 | /// Called when a recognition stream receives an segment end message.
132 | ///
133 | /// - Parameters:
134 | /// - contextId: The ID of the context that the segment belongs to.
135 | /// - segmentId: The ID of the segment which has ended.
136 | func sluClientDidReceiveSegmentEnd(
137 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int
138 | )
139 |
140 | /// Called when a recognition stream receives a tentative transcript message.
141 | ///
142 | /// - Parameters:
143 | /// - contextId: The ID of the context that the segment belongs to.
144 | /// - segmentId: The ID of the segment which the transcript belongs to.
145 | /// - transcript: The tentative transcript message.
146 | func sluClientDidReceiveTentativeTranscript(
147 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: TentativeTranscript
148 | )
149 |
150 | /// Called when a recognition stream receives a tentative entities message.
151 | ///
152 | /// - Parameters:
153 | /// - contextId: The ID of the context that the segment belongs to.
154 | /// - segmentId: The ID of the segment which the entities belongs to.
155 | /// - entities: The tentative entities message.
156 | func sluClientDidReceiveTentativeEntities(
157 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entities: TentativeEntities
158 | )
159 |
160 | /// Called when a recognition stream receives a tentative intent message.
161 | ///
162 | /// - Parameters:
163 | /// - contextId: The ID of the context that the segment belongs to.
164 | /// - segmentId: The ID of the segment which the intent belongs to.
165 | /// - intent: The tentative intent message.
166 | func sluClientDidReceiveTentativeIntent(
167 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: TentativeIntent
168 | )
169 |
170 | /// Called when a recognition stream receives a final transcript message.
171 | ///
172 | /// - Parameters:
173 | /// - contextId: The ID of the context that the segment belongs to.
174 | /// - segmentId: The ID of the segment which the transcript belongs to.
175 | /// - transcript: The transcript message.
176 | func sluClientDidReceiveTranscript(
177 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: Transcript
178 | )
179 |
180 | /// Called when a recognition stream receives a final entity message.
181 | ///
182 | /// - Parameters:
183 | /// - contextId: The ID of the context that the segment belongs to.
184 | /// - segmentId: The ID of the segment which the entity belongs to.
185 | /// - entity: The entity message.
186 | func sluClientDidReceiveEntity(
187 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entity: Entity
188 | )
189 |
190 | /// Called when a recognition stream receives a final intent message.
191 | ///
192 | /// - Parameters:
193 | /// - contextId: The ID of the context that the segment belongs to.
194 | /// - segmentId: The ID of the segment which the intent belongs to.
195 | /// - intent: The intent message.
196 | func sluClientDidReceiveIntent(
197 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: Intent
198 | )
199 | }
200 |
201 | // MARK: - SluClientDelegate default implementation.
202 |
203 | public extension SluClientDelegate {
204 | func sluClientDidCatchError(_ sluClient: SluClientProtocol, error: Error){}
205 | func sluClientDidStopStream(_ sluClient: SluClientProtocol, status: GRPCStatus){}
206 | func sluClientDidReceiveContextStart(_ sluClient: SluClientProtocol, contextId: String){}
207 | func sluClientDidReceiveContextStop(_ sluClient: SluClientProtocol, contextId: String){}
208 | func sluClientDidReceiveSegmentEnd(
209 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int
210 | ){}
211 | func sluClientDidReceiveTentativeTranscript(
212 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: TentativeTranscript
213 | ){}
214 | func sluClientDidReceiveTentativeEntities(
215 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entities: TentativeEntities
216 | ){}
217 | func sluClientDidReceiveTentativeIntent(
218 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: TentativeIntent
219 | ){}
220 | func sluClientDidReceiveTranscript(
221 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: Speechly.Transcript
222 | ){}
223 | func sluClientDidReceiveEntity(
224 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entity: Speechly.Entity
225 | ){}
226 | func sluClientDidReceiveIntent(
227 | _ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: Speechly.Intent
228 | ){}
229 | }
230 |
--------------------------------------------------------------------------------
/Sources/Speechly/Segment.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | // MARK: - SpeechSegment definition.
4 |
5 | /// A segment is a part of a recognition context (or a phrase) which is defined by an intent.
6 | ///
7 | /// e.g. a phrase "book a restaurant and send an invitation to John" contains two intents,
8 | /// "book" and "send an invitation". Thus, the phrase will also contain two segments, "book a restaurant" and
9 | /// "send an invitation to John". A segment has to have exactly one intent that defines it, but it's allowed to have
10 | /// any number of entities and transcripts.
11 | ///
12 | /// A segment can be final or tentative. Final segments are guaranteed to only contain final intent, entities
13 | /// and transcripts. Tentative segments can have a mix of final and tentative parts.
14 | public struct Segment: Hashable, Identifiable {
15 | private var _entities: [Entity] = []
16 | private var _transcripts: [Transcript] = []
17 | private var _indexedEntities: [Entity.ID:Entity] = [:]
18 | private var _indexedTranscripts: [Int:Transcript] = [:]
19 |
20 | /// A unique identifier of the segment.
21 | public let id: String
22 |
23 | /// The identifier of the segment, which is unique when combined with `contextId`.
24 | public let segmentId: Int
25 |
26 | /// A unique identifier of the `SpeechContext` that the segment belongs to
27 | public let contextId: String
28 |
29 | /// The status of the segment. `true` when the segment is finalised, `false` otherwise.
30 | public var isFinal: Bool = false
31 |
32 | /// The intent of the segment. Returns an empty tentative intent by default.
33 | public var intent: Intent = Intent.Empty
34 |
35 | /// The entities belonging to the segment.
36 | public var entities: [Entity] {
37 | get {
38 | return self._entities
39 | }
40 |
41 | set(newValue) {
42 | self._entities = newValue.sorted()
43 |
44 | self._indexedEntities = newValue.reduce(into: [Entity.ID:Entity]()) { (acc, entity) in
45 | acc[entity.id] = entity
46 | }
47 | }
48 | }
49 |
50 | /// The transcripts belonging to the segment.
51 | public var transcripts: [Transcript] {
52 | get {
53 | return self._transcripts
54 | }
55 |
56 | set(newValue) {
57 | self._transcripts = newValue.sorted()
58 |
59 | self._indexedTranscripts = newValue.reduce(into: [Int:Transcript]()) { (acc, transcript) in
60 | acc[transcript.index] = transcript
61 | }
62 | }
63 | }
64 |
65 | /// Creates a new tentative segment with empty intent, entities and transcripts.
66 | ///
67 | /// - Parameters:
68 | /// - segmentId: The identifier of the segment within a `SpeechContext`.
69 | /// - contextId: The identifier of the `SpeechContext` that this segment belongs to.
70 | public init(segmentId: Int, contextId: String) {
71 | self.segmentId = segmentId
72 | self.contextId = contextId
73 | self.id = "\(contextId)-\(segmentId)"
74 | }
75 |
76 | /// Creates a new segment with provided parameters.
77 | ///
78 | /// - Parameters:
79 | /// - segmentId: The identifier of the segment within a `SpeechContext`.
80 | /// - contextId: The identifier of the `SpeechContext` that this segment belongs to.
81 | /// - isFinal: Indicates whether the segment is final or tentative.
82 | /// - intent: The intent of the segment.
83 | /// - entities: The entities belonging to the segment.
84 | /// - transcripts: The transcripts belonging to the segment.
85 | ///
86 | /// - Important: this initialiser does not check for consistency. Passing non-final intent, entities or transcripts
87 | /// alongside `isFinal: true` will violate the guarantee that a final segment will only contain final parts.
88 | public init(
89 | segmentId: Int,
90 | contextId: String,
91 | isFinal: Bool,
92 | intent: Intent,
93 | entities: [Entity],
94 | transcripts: [Transcript]
95 | ) {
96 | self.init(segmentId: segmentId, contextId: contextId)
97 |
98 | self.isFinal = isFinal
99 | self.intent = intent
100 | self.entities = entities
101 | self.transcripts = transcripts
102 | }
103 | }
104 |
105 | // MARK: - Comparable protocol conformance.
106 |
107 | extension Segment: Comparable {
108 | public static func < (lhs: Segment, rhs: Segment) -> Bool {
109 | return lhs.id < rhs.id
110 | }
111 |
112 | public static func <= (lhs: Segment, rhs: Segment) -> Bool {
113 | return lhs.id <= rhs.id
114 | }
115 |
116 | public static func >= (lhs: Segment, rhs: Segment) -> Bool {
117 | return lhs.id >= rhs.id
118 | }
119 |
120 | public static func > (lhs: Segment, rhs: Segment) -> Bool {
121 | return lhs.id > rhs.id
122 | }
123 | }
124 |
125 | // MARK: - Parsing logic implementation.
126 |
127 | extension Segment {
128 | enum SegmentParseError: Error {
129 | case transcriptFinalised, entityFinalised, intentFinalised
130 | case emptyTranscript, emptyIntent
131 | case segmentFinalised
132 | }
133 |
134 | mutating func setIntent(_ value: Intent) throws {
135 | if self.isFinal {
136 | throw SegmentParseError.segmentFinalised
137 | }
138 |
139 | if self.intent.isFinal {
140 | throw SegmentParseError.intentFinalised
141 | }
142 |
143 | self.intent = value
144 | }
145 |
146 | mutating func addEntity(_ value: Entity) throws {
147 | if self.isFinal {
148 | throw SegmentParseError.segmentFinalised
149 | }
150 |
151 | if let e = self._indexedEntities[value.id], e.isFinal {
152 | throw SegmentParseError.entityFinalised
153 | }
154 |
155 | self._indexedEntities[value.id] = value
156 | self._entities = Array(self._indexedEntities.values).sorted()
157 | }
158 |
159 | mutating func addTranscript(_ value: Transcript) throws {
160 | if self.isFinal {
161 | throw SegmentParseError.segmentFinalised
162 | }
163 |
164 | if let t = self._indexedTranscripts[value.index], t.isFinal {
165 | throw SegmentParseError.transcriptFinalised
166 | }
167 |
168 | self._indexedTranscripts[value.index] = value
169 | self._transcripts = Array(self._indexedTranscripts.values).sorted()
170 | }
171 |
172 | mutating func finalise() throws {
173 | if self.isFinal {
174 | return
175 | }
176 |
177 | if !self.intent.isFinal {
178 | throw SegmentParseError.emptyIntent
179 | }
180 |
181 | for (k, v) in self._indexedTranscripts {
182 | if !v.isFinal {
183 | self._indexedTranscripts.removeValue(forKey: k)
184 | }
185 | }
186 |
187 | if self.transcripts.count == 0 {
188 | throw SegmentParseError.emptyTranscript
189 | }
190 |
191 | for (k, v) in self._indexedEntities {
192 | if !v.isFinal {
193 | self._indexedEntities.removeValue(forKey: k)
194 | }
195 | }
196 |
197 | self.isFinal = true
198 | self._entities = Array(self._indexedEntities.values).sorted()
199 | self._transcripts = Array(self._indexedTranscripts.values).sorted()
200 | }
201 | }
202 |
--------------------------------------------------------------------------------
/Sources/Speechly/SpeechlyProtoParseable.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SwiftProtobuf
3 |
4 | /// A protocol for data types that can be parsed from protocol buffers messages.
5 | ///
6 | /// Unfortunately there isn't a good way to restrict this protocol to a subset of messages specific to Speechly SLU API,
7 | /// hence it's using a general `SwiftProtobuf.Message` as a type.
8 | protocol SpeechlyProtoParseable {
9 | /// The message that can be parsed.
10 | associatedtype Message = SwiftProtobuf.Message
11 |
12 | /// Creates a new instance of `Self` from the `message`.
13 | ///
14 | /// - Parameters:
15 | /// - message: The protobuf message to parse.
16 | /// - isFinal: Whether the message comes from a final API response.
17 | /// - Returns: An instance of `Self`.
18 | static func parseProto(message: Message, isFinal: Bool) -> Self
19 | }
20 |
--------------------------------------------------------------------------------
/Sources/Speechly/SpeechlyProtocol.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import NIO
3 |
4 | // MARK: - SpeechClientProtocol definition.
5 |
6 | /// A speech client protocol.
7 | ///
8 | /// The purpose of a speech client is to abstract away the handling of audio recording and API streaming,
9 | /// providing the user with a high-level abstraction over the microphone speech recognition.
10 | public protocol SpeechlyProtocol {
11 | /// A delegate which is called when the client has received and parsed messages from the API.
12 | /// The delegate will also be called when the client catches an error.
13 | var delegate: SpeechlyDelegate? { get set }
14 |
15 | /// Start a new recognition context and unmute the microphone.
16 | ///
17 | /// - Parameters:
18 | /// - appId: Define a specific Speechly appId to send the audio to. Not needed if the appId can be inferred from login.
19 | ///
20 | /// - Important: Calling `start` again after another `start` call will stop the previous recognition context.
21 | /// Starting a recognition context is an asynchronous operation.
22 | /// Use `speechlyClientDidStart` method in `SpeechClientDelegate` protocol for acknowledgments from the client.
23 | func startContext(appId: String?)
24 |
25 | /// Stop current recognition context and mute the microphone.
26 | ///
27 | /// - Important: Calling `stop` again after another `stop` call is a no-op.
28 | /// Stopping a recognition context is an asynchronous operation.
29 | /// Use `speechlyClientDidStop` method in `SpeechClientDelegate` protocol for acknowledgments from the client.
30 | func stopContext()
31 |
32 | /// Suspend the client, releasing any resources and cleaning up any pending contexts.
33 | ///
34 | /// This method should be used when your application is about to enter background state.
35 | func suspend()
36 |
37 | /// Resume the client, re-initialing necessary resources to continue the operation.
38 | ///
39 | /// This method should be used when your application is about to leave background state.
40 | func resume() throws
41 | }
42 |
43 | // MARK: - SpeechClientDelegate definition.
44 |
45 | /// Delegate called when a speech client handles messages from the API or catches an error.
46 | ///
47 | /// The intended use of this protocol is with `SpeechClientProtocol`.
48 | ///
49 | /// - Important: In order to avoid retain cycles, classes implementing this delegate
50 | /// MUST NOT maintain a strong reference to the `SpeechClientProtocol`.
51 | public protocol SpeechlyDelegate: AnyObject {
52 | /// Called when the client catches an error.
53 | ///
54 | /// - Parameters:
55 | /// - error: The error which was caught.
56 | func speechlyClientDidCatchError(_ speechlyClient: SpeechlyProtocol, error: SpeechlyError)
57 |
58 | /// Called after the client has acknowledged a recognition context start.
59 | func speechlyClientDidStartContext(_ speechlyClient: SpeechlyProtocol)
60 |
61 | /// Called after the client has acknowledged a recognition context stop.
62 | func speechlyClientDidStopContext(_ speechlyClient: SpeechlyProtocol)
63 |
64 | /// Called after the client has processed an update to current `SpeechSegment`.
65 | ///
66 | /// When the client receives messages from the API, it will use them to update the state of current speech segment,
67 | /// and dispatch the updated state to the delegate. The delegate can use these updates to react to the user input
68 | /// by using the intent, entities and transcripts contained in the segment.
69 | ///
70 | /// Only one segment is active at a time, but since the processing is asynchronous,
71 | /// it is possible to have out-of-order delivery of segments.
72 | ///
73 | /// - Parameters:
74 | /// - segment: The speech segment that has been updated.
75 | func speechlyClientDidUpdateSegment(_ speechlyClient: SpeechlyProtocol, segment: Segment)
76 |
77 | /// Called after the client has received a new transcript message from the API.
78 | ///
79 | /// - Parameters:
80 | /// - contextId: The ID of the recognition context that the transcript belongs to.
81 | /// - segmentId: The ID of the speech segment that the transcript belongs to.
82 | /// - transcript: The transcript received from the API.
83 | func speechlyClientDidReceiveTranscript(
84 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, transcript: Transcript
85 | )
86 |
87 | /// Called after the client has received a new entity message from the API.
88 | ///
89 | /// - Parameters:
90 | /// - contextId: The ID of the recognition context that the entity belongs to.
91 | /// - segmentId: The ID of the speech segment that the entity belongs to.
92 | /// - entity: The entity received from the API.
93 | func speechlyClientDidReceiveEntity(
94 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, entity: Entity
95 | )
96 |
97 | /// Called after the client has received a new intent message from the API.
98 | ///
99 | /// - Parameters:
100 | /// - contextId: The ID of the recognition context that the intent belongs to.
101 | /// - segmentId: The ID of the speech segment that the intent belongs to.
102 | /// - transcript: The intent received from the API.
103 | func speechlyClientDidReceiveIntent(
104 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, intent: Intent
105 | )
106 | }
107 |
108 | /// Errors caught by `SpeechClientProtocol` and dispatched to `SpeechClientDelegate`.
109 | public enum SpeechlyError: Error {
110 | /// A network-level error.
111 | /// Usually these errors are unrecoverable and require a full restart of the client.
112 | case networkError(String)
113 |
114 | /// An error within the audio recorder stack.
115 | /// Normally these errors are recoverable and do not require any special handling.
116 | /// However, these errors will result in downgraded recognition performance.
117 | case audioError(String)
118 |
119 | /// An error within the API.
120 | /// Normally these errors are recoverable, but they may result in dropped API responses.
121 | case apiError(String)
122 |
123 | /// An error within the API message parsing logic.
124 | /// These errors are fully recoverable, but will result in missed speech segment updates.
125 | case parseError(String)
126 | }
127 |
128 | // MARK: - SpeechClientDelegate default implementation.
129 |
130 | public extension SpeechlyDelegate {
131 |
132 | func speechlyClientDidStartContext(_ speechlyClient: SpeechlyProtocol) {}
133 | func speechlyClientDidStopContext(_ speechlyClient: SpeechlyProtocol) {}
134 | func speechlyClientDidCatchError(_ speechlyClient: SpeechlyProtocol, error: SpeechlyError) {}
135 | func speechlyClientDidUpdateSegment(_ speechlyClient: SpeechlyProtocol, segment: Segment) {}
136 | func speechlyClientDidReceiveTranscript(
137 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, transcript: Transcript
138 | ) {}
139 | func speechlyClientDidReceiveEntity(
140 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, entity: Entity
141 | ) {}
142 | func speechlyClientDidReceiveIntent(
143 | _ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, intent: Intent
144 | ) {}
145 | }
146 |
--------------------------------------------------------------------------------
/Sources/Speechly/Transcript.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import SpeechlyAPI
3 |
4 | // MARK: - SpeechTranscript definition.
5 |
6 | /// A speech transcript.
7 | ///
8 | /// A transcript is a single word in a phrase recognised from the audio.
9 | /// e.g. a phrase "two glasses" will have two transcripts, "two" and "glasses".
10 | public struct Transcript: Hashable {
11 | /// The index of the transcript in the phrase.
12 | public let index: Int
13 |
14 | /// The value of the transcript, e.g. "glasses".
15 | /// The case is not guaranteed, it is up to the consumer to decide whether to change it or not.
16 | public let value: String
17 |
18 | /// The time offset of the beginning of the transcript in the audio, relative to the beginning of the phrase.
19 | public let startOffset: TimeInterval
20 |
21 | /// The time offset of the end of the transcript in the audio, relative to the beginning of the phrase.
22 | public let endOffset: TimeInterval
23 |
24 | /// The status of the transcript.
25 | /// `true` for finalised intents, `false` otherwise.
26 | ///
27 | /// - Important: if the transcript is not final, its value may change.
28 | public let isFinal: Bool
29 |
30 | /// Creates a new transcript.
31 | ///
32 | /// - Parameters:
33 | /// - index: the index of the transcript.
34 | /// - value: the value of the transcript.
35 | /// - startOffset: the time offset of the beginning of the transcript in the phrase.
36 | /// - endOffset: the time offset of the end of the transcript in the phrase.
37 | /// - isFinal: the status of the transcript.
38 | public init(index: Int, value: String, startOffset: TimeInterval, endOffset: TimeInterval, isFinal: Bool) {
39 | self.value = value
40 | self.index = index
41 | self.startOffset = startOffset
42 | self.endOffset = endOffset
43 | self.isFinal = isFinal
44 | }
45 | }
46 |
47 | // MARK: - Identifiable protocol conformance.
48 |
49 | extension Transcript: Identifiable {
50 | public var id: Int {
51 | return self.index
52 | }
53 | }
54 |
55 | // MARK: - `Comparable` protocol conformance.
56 |
57 | extension Transcript: Comparable {
58 | public static func < (lhs: Transcript, rhs: Transcript) -> Bool {
59 | return lhs.index < rhs.index
60 | }
61 |
62 | public static func <= (lhs: Transcript, rhs: Transcript) -> Bool {
63 | return lhs.index <= rhs.index
64 | }
65 |
66 | public static func >= (lhs: Transcript, rhs: Transcript) -> Bool {
67 | return lhs.index >= rhs.index
68 | }
69 |
70 | public static func > (lhs: Transcript, rhs: Transcript) -> Bool {
71 | return lhs.index > rhs.index
72 | }
73 | }
74 |
75 | // MARK: - SluProtoParseable implementation.
76 |
77 | extension Transcript: SpeechlyProtoParseable {
78 | typealias TranscriptProto = Speechly_Slu_V1_SLUTranscript
79 |
80 | static func parseProto(message: TranscriptProto, isFinal: Bool) -> Transcript {
81 | return self.init(
82 | index: Int(message.index),
83 | value: message.word,
84 | startOffset: Double(message.startTime) / 1000,
85 | endOffset: Double(message.endTime) / 1000,
86 | isFinal: isFinal
87 | )
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/Sources/Speechly/UI/MicrophoneButtonView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | import AVFoundation
3 | import UIKit
4 | import SnapKit
5 |
6 | public protocol MicrophoneButtonDelegate {
7 | func didOpenMicrophone(_ button: MicrophoneButtonView)
8 |
9 | func didCloseMicrophone(_ button: MicrophoneButtonView)
10 |
11 | func speechButtonImageForAuthorizationStatus(_ button: MicrophoneButtonView, status: AVAuthorizationStatus) -> UIImage?
12 | }
13 |
14 | public extension MicrophoneButtonDelegate {
15 | func speechButtonImageForAuthorizationStatus(_ button: MicrophoneButtonView, status: AVAuthorizationStatus) -> UIImage? {
16 | return nil
17 | }
18 | }
19 |
20 | public class MicrophoneButtonView: UIView {
21 |
22 | private let diameter: CGFloat
23 |
24 | var delegate: MicrophoneButtonDelegate?
25 |
26 | public init(diameter: CGFloat = 80, delegate: MicrophoneButtonDelegate) {
27 | self.diameter = diameter
28 | self.delegate = delegate
29 |
30 | super.init(frame: .zero)
31 |
32 | addSubview(contentView)
33 | addSubview(speechBubbleView)
34 |
35 | contentView.addSubview(blurEffectView)
36 | contentView.addSubview(borderView)
37 | contentView.addSubview(iconView)
38 |
39 | snp.makeConstraints { (make) in
40 | make.width.height.equalTo(diameter)
41 | }
42 |
43 | contentView.snp.makeConstraints { (make) in
44 | make.center.equalToSuperview()
45 | }
46 |
47 | blurEffectView.snp.makeConstraints { (make) in
48 | make.center.equalToSuperview()
49 | }
50 |
51 | borderView.snp.makeConstraints { (make) in
52 | make.edges.equalToSuperview()
53 | }
54 |
55 | iconView.snp.makeConstraints { (make) in
56 | make.center.equalToSuperview()
57 | }
58 |
59 | speechBubbleView.snp.makeConstraints { (make) in
60 | make.centerX.equalToSuperview()
61 | make.bottom.equalTo(snp.top).offset(-4)
62 | }
63 |
64 | let tap = UITapGestureRecognizer(target: self, action: #selector(didTap))
65 | addGestureRecognizer(tap)
66 |
67 | let press = UILongPressGestureRecognizer(target: self, action: #selector(didPress(_:)))
68 | press.minimumPressDuration = 0.1
69 | addGestureRecognizer(press)
70 |
71 | let center = NotificationCenter.default
72 | center.addObserver(forName: UIApplication.didBecomeActiveNotification, object: nil, queue: nil) { [weak self] _ in
73 | self?.initializeRotationAnimation()
74 | self?.reloadAuthorizationStatus()
75 | }
76 |
77 | speechBubbleView.hide(animated: false)
78 |
79 | func initializeState() {
80 | borderImage = image(named: "mic-button-frame")
81 |
82 | blurEffectImage = image(named: "mic-button-fx")
83 |
84 | holdToTalkText = "Hold to talk"
85 |
86 | isPressed = false
87 | }
88 |
89 | initializeState()
90 | initializeRotationAnimation()
91 |
92 | isAccessibilityElement = true
93 | accessibilityTraits = [.button]
94 | }
95 |
96 | required init?(coder: NSCoder) {
97 | fatalError("init(coder:) has not been implemented")
98 | }
99 |
100 | public var borderImage: UIImage? {
101 | didSet {
102 | borderView.image = borderImage
103 | }
104 | }
105 |
106 | public var blurEffectImage: UIImage? {
107 | didSet {
108 | blurEffectView.image = blurEffectImage
109 | }
110 | }
111 |
112 | public var holdToTalkText: String! {
113 | didSet {
114 | speechBubbleView.text = holdToTalkText.uppercased()
115 | }
116 | }
117 |
118 | public var pressedScale: CGFloat = 1.5
119 |
120 | private var normalScale: CGFloat {
121 | return diameter / borderView.intrinsicContentSize.width
122 | }
123 |
124 | public private(set) var isPressed: Bool = false {
125 | didSet {
126 | let scale = normalScale * (isPressed ? pressedScale : 1)
127 |
128 | contentView.transform = CGAffineTransform(scaleX: scale, y: scale)
129 | blurEffectView.alpha = isPressed ? 1 : 0
130 |
131 | if isPressed != oldValue {
132 | if audioAuthorizationStatus == .authorized {
133 | if let delegate = delegate {
134 | if isPressed {
135 | delegate.didOpenMicrophone(self)
136 | } else {
137 | delegate.didCloseMicrophone(self)
138 | }
139 | }
140 | } else {
141 | if isPressed {
142 | didTap()
143 | }
144 | }
145 | }
146 |
147 | if speechBubbleView.isShowing {
148 | speechBubbleView.hide()
149 | }
150 | }
151 | }
152 |
153 | private let contentView = UIView()
154 |
155 | private let iconView = UIImageView()
156 |
157 | private let borderView = UIImageView()
158 |
159 | private let blurEffectView = UIImageView()
160 |
161 | private let speechBubbleView = SpeechBubbleView()
162 |
163 | private func initializeRotationAnimation() {
164 | blurEffectView.startRotating()
165 | borderView.startRotating()
166 | }
167 |
168 | @objc private func didTap() {
169 | switch audioAuthorizationStatus {
170 | case .authorized:
171 | if speechBubbleView.isShowing {
172 | speechBubbleView.pulse()
173 | } else {
174 | speechBubbleView.show()
175 | }
176 |
177 | case .notDetermined:
178 | AVCaptureDevice.requestAccess(for: .audio) { _ in
179 | DispatchQueue.main.async {
180 | self.reloadAuthorizationStatus()
181 | }
182 | }
183 |
184 | case .denied, .restricted:
185 | let settingsURL = URL(string: UIApplication.openSettingsURLString)!
186 | UIApplication.shared.open(settingsURL, options: [:], completionHandler: nil)
187 |
188 | @unknown default:
189 | break
190 | }
191 | }
192 |
193 | @objc private func didPress(_ sender: UILongPressGestureRecognizer) {
194 | let point = sender.location(in: self)
195 | let isInside = self.point(inside: point, with: nil)
196 |
197 | let isPressed: Bool
198 |
199 | switch sender.state {
200 | case .began:
201 | isPressed = true
202 | case .ended, .cancelled:
203 | isPressed = false
204 | default:
205 | isPressed = isInside
206 | }
207 |
208 | if isPressed != self.isPressed {
209 | UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseInOut, animations: {
210 | self.isPressed = isPressed
211 | }, completion: nil)
212 | }
213 | }
214 |
215 | private var audioAuthorizationStatus: AVAuthorizationStatus {
216 | return AVCaptureDevice.authorizationStatus(for: .audio)
217 | }
218 |
219 | public func reloadAuthorizationStatus() {
220 | if let image = delegate?.speechButtonImageForAuthorizationStatus(self, status: audioAuthorizationStatus) {
221 | iconView.image = image
222 | } else {
223 | switch audioAuthorizationStatus {
224 | case .authorized:
225 | iconView.image = image(named: "mic")
226 | case .notDetermined:
227 | iconView.image = image(named: "power-on")
228 | case .denied, .restricted:
229 | iconView.image = image(named: "mic-no-permission")
230 | @unknown default:
231 | break
232 | }
233 | }
234 | }
235 |
236 | private func image(named name: String) -> UIImage? {
237 | return UIImage(named: name, in: Bundle.module, compatibleWith: nil)
238 | }
239 | }
240 |
241 | private extension UIView {
242 |
243 | func startRotating(duration: TimeInterval = 2) {
244 | let rotation = CABasicAnimation(keyPath: "transform.rotation.z")
245 | rotation.toValue = NSNumber(value: Double.pi * 2)
246 | rotation.duration = duration
247 | rotation.isCumulative = true
248 | rotation.repeatCount = .infinity
249 | layer.add(rotation, forKey: "rotation")
250 | }
251 |
252 | func stopRotating() {
253 | layer.removeAnimation(forKey: "rotation")
254 | }
255 | }
256 |
--------------------------------------------------------------------------------
/Sources/Speechly/UI/SpeechBubbleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SpeechBubbleView.swift
3 | // Speechly
4 | //
5 | // Created by Janne Käki on 4.2.2021.
6 | //
7 |
8 | import Foundation
9 | import UIKit
10 | import SnapKit
11 |
12 | public class SpeechBubbleView: UIView {
13 |
14 | public init() {
15 | super.init(frame: .zero)
16 |
17 | pointerView.transform = CGAffineTransform(rotationAngle: CGFloat.pi / 4)
18 |
19 | addSubview(contentView)
20 |
21 | contentView.addSubview(pointerView)
22 | contentView.addSubview(textLabel)
23 |
24 | contentView.snp.makeConstraints { (make) in
25 | make.top.left.right.equalToSuperview()
26 | }
27 |
28 | pointerView.snp.makeConstraints { (make) in
29 | make.centerX.equalToSuperview()
30 | make.centerY.equalTo(contentView.snp.bottom)
31 | make.width.height.equalTo(16)
32 | make.bottom.equalTo(self).inset(4)
33 | }
34 |
35 | textLabel.snp.makeConstraints { (make) in
36 | make.top.bottom.equalToSuperview().inset(12)
37 | make.left.right.equalToSuperview().inset(24)
38 | }
39 |
40 | func initializeStyle() {
41 | font = UIFont(name: "AvenirNextCondensed-Bold", size: 17)
42 | color = UIColor.darkGray
43 | textColor = UIColor.white
44 | }
45 |
46 | initializeStyle()
47 | }
48 |
49 | required init?(coder: NSCoder) {
50 | fatalError("init(coder:) has not been implemented")
51 | }
52 |
53 | public var isShowing: Bool {
54 | return alpha > 0
55 | }
56 |
57 | public func show(animated: Bool = true) {
58 | let updates = {
59 | self.alpha = 1
60 | self.transform = .identity
61 | }
62 |
63 | if animated {
64 | UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut, animations: updates, completion: { _ in
65 | self.restartAutohideTimer()
66 | })
67 | } else {
68 | updates()
69 | restartAutohideTimer()
70 | }
71 | }
72 |
73 | public func hide(animated: Bool = true) {
74 | let updates = {
75 | self.alpha = 0
76 | self.transform = CGAffineTransform(scaleX: 0.8, y: 0.8)
77 | }
78 |
79 | if animated {
80 | UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut, animations: updates, completion: nil)
81 | } else {
82 | updates()
83 | }
84 | }
85 |
86 | public var autohideInterval: TimeInterval? = 3 {
87 | didSet {
88 | if autohideInterval != oldValue {
89 | restartAutohideTimer()
90 | }
91 | }
92 | }
93 |
94 | public func pulse(duration: TimeInterval = 0.5, scale: CGFloat = 1.2) {
95 | UIView.animate(withDuration: duration / 2, delay: 0, options: .curveEaseInOut, animations: {
96 | self.transform = CGAffineTransform(scaleX: scale, y: scale)
97 | }, completion: { _ in
98 | UIView.animate(withDuration: duration / 2, delay: 0, options: .curveEaseInOut, animations: {
99 | self.transform = .identity
100 | }, completion: nil)
101 | })
102 |
103 | restartAutohideTimer()
104 | }
105 |
106 | public var text: String? {
107 | get {
108 | return textLabel.text
109 | }
110 | set {
111 | textLabel.text = newValue
112 | }
113 | }
114 |
115 | public var font: UIFont! {
116 | get {
117 | return textLabel.font
118 | }
119 | set {
120 | textLabel.font = newValue
121 | }
122 | }
123 |
124 | public var textColor: UIColor! {
125 | get {
126 | return textLabel.textColor
127 | }
128 | set {
129 | textLabel.textColor = newValue
130 | }
131 | }
132 |
133 | public var color: UIColor! {
134 | didSet {
135 | contentView.backgroundColor = color
136 | pointerView.backgroundColor = color
137 | }
138 | }
139 |
140 | private let textLabel = UILabel()
141 |
142 | private let contentView = UIView()
143 |
144 | private let pointerView = UIView()
145 |
146 | private var autohideTimer: Timer?
147 |
148 | private func restartAutohideTimer() {
149 | autohideTimer?.invalidate()
150 |
151 | guard let autohideInterval = autohideInterval else {
152 | return
153 | }
154 |
155 | autohideTimer = Timer.scheduledTimer(withTimeInterval: autohideInterval, repeats: false) { [weak self] _ in
156 | guard let self = self, self.isShowing else {
157 | return
158 | }
159 | self.hide()
160 | }
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/Sources/Speechly/UI/TranscriptView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SpeechTranscriptView.swift
3 | // Speechly
4 | //
5 | // Created by Janne Käki on 5.2.2021.
6 | //
7 |
8 | import Foundation
9 | import UIKit
10 | import SnapKit
11 |
12 | public class TranscriptView: UIView {
13 |
14 | public init() {
15 | super.init(frame: .zero)
16 |
17 | backgroundColor = UIColor.black
18 |
19 | alpha = 0
20 | }
21 |
22 | required init?(coder: NSCoder) {
23 | fatalError("init(coder:) has not been implemented")
24 | }
25 |
26 | public private(set) var segment: Speechly.Segment?
27 |
28 | public func configure(segment: Speechly.Segment?, animated: Bool) {
29 | self.segment = segment
30 |
31 | reloadText(animated: animated)
32 | }
33 |
34 | public func hide(animated: Bool) {
35 | configure(segment: nil, animated: animated)
36 | }
37 |
38 | public var font: UIFont = UIFont(name: "AvenirNextCondensed-Bold", size: 20)! {
39 | didSet {
40 | reloadText()
41 | }
42 | }
43 |
44 | public var textColor: UIColor = UIColor.white {
45 | didSet {
46 | reloadText()
47 | }
48 | }
49 |
50 | public var highlightedTextColor: UIColor = UIColor(red: 30 / 255.0, green: 211 / 255.0, blue: 242 / 255.0, alpha: 1) {
51 | didSet {
52 | reloadText()
53 | }
54 | }
55 |
56 | public var autohideInterval: TimeInterval? = 3 {
57 | didSet {
58 | if autohideInterval != oldValue {
59 | restartAutohideTimer()
60 | }
61 | }
62 | }
63 |
64 | private var labels: [SpeechTranscriptLabel] = []
65 |
66 | private var autohideTimer: Timer?
67 |
68 | private func reloadText(animated: Bool = false) {
69 | if let segment = segment {
70 | for (index, transcript) in segment.transcripts.enumerated() {
71 | var label: SpeechTranscriptLabel! = (index < labels.count) ? labels[index] : nil
72 | if label == nil {
73 | label = SpeechTranscriptLabel(parent: self)
74 | addSubview(label)
75 | label.snp.makeConstraints { (make) in
76 | make.edges.equalToSuperview().inset(12)
77 | }
78 | labels.append(label)
79 | label.alpha = 0
80 | }
81 |
82 | let entity = segment.entity(for: transcript)
83 |
84 | label.configure(segment: segment, transcript: transcript, entity: entity)
85 |
86 | if animated {
87 | UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut, animations: {
88 | label.alpha = 1
89 | }, completion: nil)
90 | } else {
91 | label.alpha = 1
92 | }
93 | }
94 | }
95 |
96 | for (index, label) in labels.enumerated() {
97 | if index >= (segment?.transcripts ?? []).count {
98 | if animated {
99 | UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut, animations: {
100 | label.alpha = 0
101 | }, completion: { _ in
102 | label.text = " "
103 | })
104 | } else {
105 | label.text = " "
106 | label.alpha = 0
107 | }
108 | }
109 | }
110 |
111 | if animated {
112 | UIView.animate(withDuration: 0.3, delay: 0, options: .curveEaseInOut, animations: {
113 | self.alpha = (self.segment != nil) ? 1 : 0
114 | }, completion: nil)
115 | } else {
116 | alpha = (segment != nil) ? 1 : 0
117 | }
118 |
119 | restartAutohideTimer()
120 | }
121 |
122 | private func restartAutohideTimer() {
123 | autohideTimer?.invalidate()
124 |
125 | guard let autohideInterval = autohideInterval else {
126 | return
127 | }
128 |
129 | autohideTimer = Timer.scheduledTimer(withTimeInterval: autohideInterval, repeats: false) { [weak self] _ in
130 | self?.hide(animated: true)
131 | }
132 | }
133 | }
134 |
135 | class SpeechTranscriptLabel: UILabel {
136 |
137 | private(set) var transcript: Speechly.Transcript?
138 | private(set) var entity: Speechly.Entity?
139 |
140 | private unowned let parent: TranscriptView
141 |
142 | init(parent: TranscriptView) {
143 | self.parent = parent
144 |
145 | super.init(frame: .zero)
146 |
147 | text = " "
148 | numberOfLines = 0
149 | }
150 |
151 | required init?(coder: NSCoder) {
152 | fatalError("init(coder:) has not been implemented")
153 | }
154 |
155 | func configure(segment: Speechly.Segment, transcript: Speechly.Transcript, entity: Speechly.Entity?) {
156 | self.transcript = transcript
157 |
158 | let shouldHighlightEntity = entity != nil && self.entity == nil && segment.isFinal
159 | self.entity = segment.isFinal ? entity : nil
160 |
161 | attributedText = segment.attributedText(attributedBy: { (transcript, entity) in
162 | let color: UIColor
163 | if transcript == self.transcript {
164 | if segment.isFinal {
165 | color = (entity != nil) ? parent.highlightedTextColor : parent.textColor
166 | } else {
167 | color = parent.textColor
168 | }
169 | } else {
170 | color = UIColor.clear
171 | }
172 |
173 | return [
174 | .font: parent.font,
175 | .foregroundColor: color
176 | ]
177 | })
178 |
179 | if shouldHighlightEntity {
180 | UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseInOut, animations: {
181 | self.transform = CGAffineTransform(translationX: 0, y: -self.font.lineHeight / 4)
182 | }, completion: { _ in
183 | UIView.animate(withDuration: 0.2, delay: 0, options: .curveEaseInOut, animations: {
184 | self.transform = .identity
185 | }, completion: nil)
186 | })
187 | }
188 | }
189 | }
190 |
191 | extension Speechly.Segment {
192 |
193 | typealias AttributeProvider = (_ transcript: Speechly.Transcript, _ entity: Speechly.Entity?) -> [NSAttributedString.Key: Any]
194 |
195 | func entity(for transcript: Speechly.Transcript) -> Speechly.Entity? {
196 | return entities.first(where: {
197 | transcript.index >= $0.startIndex && transcript.index < $0.endIndex
198 | })
199 | }
200 |
201 | func attributedText(attributedBy attributeProvider: AttributeProvider) -> NSAttributedString {
202 | let attributedText = NSMutableAttributedString()
203 |
204 | for (index, transcript) in transcripts.enumerated() {
205 | var text = transcript.value
206 | if index > 0 {
207 | text = " " + text
208 | }
209 |
210 | let entity = self.entity(for: transcript)
211 |
212 | let attributes = attributeProvider(transcript, entity)
213 |
214 | let attributedTranscript = NSAttributedString(string: text, attributes: attributes)
215 |
216 | attributedText.append(attributedTranscript)
217 | }
218 |
219 | return attributedText
220 | }
221 | }
222 |
--------------------------------------------------------------------------------
/Speechly.podspec:
--------------------------------------------------------------------------------
1 | Pod::Spec.new do |s|
2 | s.name = 'Speechly'
3 | s.version = '0.3.2'
4 | s.summary = 'Swift iOS client for Speechly SLU API.'
5 |
6 | s.homepage = 'https://github.com/speechly/ios-client'
7 | s.license = { :type => 'MIT', :file => 'LICENSE' }
8 | s.author = { :name => 'Speechly' }
9 | s.source = { :git => 'https://github.com/speechly/ios-client.git', :tag => s.version.to_s }
10 |
11 | s.ios.deployment_target = '12.0'
12 | s.swift_version = '5.7'
13 |
14 | s.source_files = 'Sources/Speechly/**', 'Sources/Speechly/*/**'
15 | s.exclude_files = 'Sources/Speechly/UI/'
16 |
17 | s.dependency 'SpeechlyAPI'
18 | s.dependency 'SwiftNIO', '~> 2.40.0'
19 | s.dependency 'gRPC-Swift', '~> 1.8.0'
20 | end
21 |
--------------------------------------------------------------------------------
/Tests/LinuxMain.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | import SpeechlyTests
4 |
5 | var tests = [XCTestCaseEntry]()
6 | tests += SpeechlyTests.allTests()
7 | XCTMain(tests)
8 |
--------------------------------------------------------------------------------
/Tests/SpeechlyTests/SpeechlyTests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 | @testable import Speechly
3 |
4 | final class SpeechlyTests: XCTestCase {
5 | func testExample() {
6 | let transcript = Transcript(index: 0, value: "test", startOffset: 0, endOffset: 1, isFinal: true)
7 | XCTAssertEqual(transcript.value, "test")
8 | }
9 |
10 | static var allTests = [
11 | ("testExample", testExample),
12 | ]
13 | }
14 |
--------------------------------------------------------------------------------
/Tests/SpeechlyTests/XCTestManifests.swift:
--------------------------------------------------------------------------------
1 | import XCTest
2 |
3 | #if !canImport(ObjectiveC)
4 | public func allTests() -> [XCTestCaseEntry] {
5 | return [
6 | testCase(SpeechlyTests.allTests),
7 | ]
8 | }
9 | #endif
10 |
--------------------------------------------------------------------------------
/docs/ApiAccessToken.md:
--------------------------------------------------------------------------------
1 | # ApiAccessToken
2 |
3 | A struct representing an access token returned by Speechly Identity service.
4 |
5 | ``` swift
6 | public struct ApiAccessToken: Hashable
7 | ```
8 |
9 | The token is required for other application-specific Speechly APIs like Speechly SLU API.
10 |
11 | ## Inheritance
12 |
13 | `Hashable`
14 |
15 | ## Initializers
16 |
17 | ### `init?(tokenString:)`
18 |
19 | Creates a new token from a raw token representation, returned by Identity API.
20 |
21 | ``` swift
22 | public init?(tokenString: String)
23 | ```
24 |
25 | >
26 |
27 | #### Parameters
28 |
29 | - tokenString: raw token value obtained from Identity API or cache.
30 |
31 | ### `init(appId:projectId:deviceId:expiresAt:scopes:tokenString:)`
32 |
33 | Creates a new token.
34 |
35 | ``` swift
36 | public init(appId: UUID?, projectId: UUID?, deviceId: UUID, expiresAt: Date, scopes: Set, tokenString: String)
37 | ```
38 |
39 | >
40 |
41 | - tokenString - Raw token value which is passed to the services.
42 |
43 | #### Parameters
44 |
45 | - appId: Speechly application identifier.
46 | - deviceId: Speechly device identifier.
47 | - expiresAt: Token expiration timestamp.
48 | - scopes: Authorised token scopes.
49 |
50 | ## Properties
51 |
52 | ### `appId`
53 |
54 | Speechly application identifier.
55 |
56 | ``` swift
57 | let appId: UUID?
58 | ```
59 |
60 | ### `projectId`
61 |
62 | Speechly project identifier
63 |
64 | ``` swift
65 | let projectId: UUID?
66 | ```
67 |
68 | ### `deviceId`
69 |
70 | Speechly device identifier.
71 |
72 | ``` swift
73 | let deviceId: UUID
74 | ```
75 |
76 | ### `expiresAt`
77 |
78 | Token expiration timestamp.
79 |
80 | ``` swift
81 | let expiresAt: Date
82 | ```
83 |
84 | ### `scopes`
85 |
86 | Authorised token scopes.
87 |
88 | ``` swift
89 | let scopes: Set
90 | ```
91 |
92 | ### `tokenString`
93 |
94 | Raw token value which is passed to the services.
95 |
96 | ``` swift
97 | let tokenString: String
98 | ```
99 |
100 | ## Methods
101 |
102 | ### `validate(key:deviceId:expiresIn:)`
103 |
104 | Validates the token against provided identifiers and expiration time.
105 |
106 | ``` swift
107 | public func validate(key: UUID, deviceId: UUID, expiresIn: TimeInterval) -> Bool
108 | ```
109 |
110 | #### Parameters
111 |
112 | - appId: Speechly application identifier to match against.
113 | - deviceId: Speechly device identifier to match against.
114 | - expiresIn: Time interval within which the token should still be valid.
115 |
116 | #### Returns
117 |
118 | `true` if the token is valid, `false` otherwise.
119 |
120 | ### `key()`
121 |
122 | Get the token key (appId or projectId) for caching or hashing.
123 |
124 | ``` swift
125 | public func key() -> UUID
126 | ```
127 |
128 | ### `validateExpiry(expiresIn:)`
129 |
130 | Validates token expiration time.
131 |
132 | ``` swift
133 | public func validateExpiry(expiresIn: TimeInterval) -> Bool
134 | ```
135 |
136 | #### Parameters
137 |
138 | - expiresIn: Time interval within which the token should still be valid.
139 |
140 | #### Returns
141 |
142 | `true` if the token will not expire in that time interval, `false` otherwise.
143 |
--------------------------------------------------------------------------------
/docs/ApiAccessToken_AuthScope.md:
--------------------------------------------------------------------------------
1 | # ApiAccessToken.AuthScope
2 |
3 | Token authorisation scopes.
4 | They determine which services can be accessed with this token.
5 |
6 | ``` swift
7 | public enum AuthScope
8 | ```
9 |
10 | ## Enumeration Cases
11 |
12 | ### `SLU`
13 |
14 | Speechly SLU service.
15 |
16 | ``` swift
17 | case SLU
18 | ```
19 |
20 | ### `WLU`
21 |
22 | Speechly WLU service.
23 |
24 | ``` swift
25 | case WLU
26 | ```
27 |
--------------------------------------------------------------------------------
/docs/ApiAccessToken_TokenType.md:
--------------------------------------------------------------------------------
1 | # ApiAccessToken.TokenType
2 |
3 | Type of token, determines the possible Speechly Apps that are accessible.
4 |
5 | ``` swift
6 | public enum TokenType
7 | ```
8 |
9 | ## Enumeration Cases
10 |
11 | ### `Application`
12 |
13 | Token can be used to access a single application.
14 |
15 | ``` swift
16 | case Application
17 | ```
18 |
19 | ### `Project`
20 |
21 | Token can be used with all applications in the project.
22 |
23 | ``` swift
24 | case Project
25 | ```
26 |
--------------------------------------------------------------------------------
/docs/AudioContext.md:
--------------------------------------------------------------------------------
1 | # AudioContext
2 |
3 | The speech recognition context.
4 |
5 | ``` swift
6 | public struct AudioContext: Hashable, Identifiable
7 | ```
8 |
9 | A single context aggregates messages from SLU API, which correspond to the audio portion
10 | sent to the API within a single recognition stream.
11 |
12 | ## Inheritance
13 |
14 | `Comparable`, `Hashable`, `Identifiable`
15 |
16 | ## Initializers
17 |
18 | ### `init(id:)`
19 |
20 | Creates a new empty speech context.
21 |
22 | ``` swift
23 | public init(id: String)
24 | ```
25 |
26 | #### Parameters
27 |
28 | - id: The identifier of the context.
29 |
30 | ### `init(id:segments:)`
31 |
32 | Creates a new speech context.
33 |
34 | ``` swift
35 | public init(id: String, segments: [Segment])
36 | ```
37 |
38 | >
39 |
40 | #### Parameters
41 |
42 | - id: The identifier of the context.
43 | - segments: The segments which belong to the context.
44 |
45 | ## Properties
46 |
47 | ### `id`
48 |
49 | The ID of the segment, assigned by the API.
50 |
51 | ``` swift
52 | let id: String
53 | ```
54 |
55 | ### `segments`
56 |
57 | The segments belonging to the segment, can be empty if there was nothing recognised from the audio.
58 |
59 | ``` swift
60 | var segments: [Segment]
61 | ```
62 |
63 | ## Methods
64 |
65 | ### `<(lhs:rhs:)`
66 |
67 | ``` swift
68 | public static func <(lhs: AudioContext, rhs: AudioContext) -> Bool
69 | ```
70 |
71 | ### `<=(lhs:rhs:)`
72 |
73 | ``` swift
74 | public static func <=(lhs: AudioContext, rhs: AudioContext) -> Bool
75 | ```
76 |
77 | ### `>=(lhs:rhs:)`
78 |
79 | ``` swift
80 | public static func >=(lhs: AudioContext, rhs: AudioContext) -> Bool
81 | ```
82 |
83 | ### `>(lhs:rhs:)`
84 |
85 | ``` swift
86 | public static func >(lhs: AudioContext, rhs: AudioContext) -> Bool
87 | ```
88 |
--------------------------------------------------------------------------------
/docs/AudioRecorder.md:
--------------------------------------------------------------------------------
1 | # AudioRecorder
2 |
3 | An audio recorder implementation that uses AVFoundation audio engine for capturing the input.
4 |
5 | ``` swift
6 | public class AudioRecorder
7 | ```
8 |
9 | The recorder uses an audio buffer and converter for dispatching data chunks
10 | in the required sample rate, channel count and format.
11 |
12 | ## Inheritance
13 |
14 | [`AudioRecorderProtocol`](AudioRecorderProtocol.md)
15 |
16 | ## Initializers
17 |
18 | ### `init(sampleRate:channels:format:audioQueue:delegateQueue:prepareOnInit:)`
19 |
20 | Create a new audio recorder.
21 |
22 | ``` swift
23 | public init(sampleRate: Double, channels: UInt32, format: AVAudioCommonFormat = .pcmFormatInt16, audioQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.AudioRecorder.audioQueue"), delegateQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.AudioRecorder.delegateQueue"), prepareOnInit: Bool = true) throws
24 | ```
25 |
26 | >
27 |
28 | #### Parameters
29 |
30 | - sampleRate: The sample rate to use for recording, in Hertz.
31 | - channels: The amount of audio channels to capture.
32 | - format: The audio format to use for capture (e.g. PCM16).
33 | - audioQueue: `DispatchQueue` to use for handling audio data from the microphone.
34 | - delegateQueue: `DispatchQueue` to use when calling delegate.
35 | - prepareOnInit: If `true`, the recorder will prepare audio engine when initialised. Otherwise it will be prepared separately.
36 |
37 | ## Properties
38 |
39 | ### `channels`
40 |
41 | ``` swift
42 | var channels: UInt32
43 | ```
44 |
45 | ### `sampleRate`
46 |
47 | ``` swift
48 | var sampleRate: Double
49 | ```
50 |
51 | ### `delegate`
52 |
53 | ``` swift
54 | var delegate: AudioRecorderDelegate?
55 | ```
56 |
57 | ## Methods
58 |
59 | ### `start()`
60 |
61 | ``` swift
62 | public func start() throws
63 | ```
64 |
65 | ### `stop()`
66 |
67 | ``` swift
68 | public func stop()
69 | ```
70 |
71 | ### `suspend()`
72 |
73 | ``` swift
74 | public func suspend() throws
75 | ```
76 |
77 | ### `resume()`
78 |
79 | ``` swift
80 | public func resume() throws
81 | ```
82 |
--------------------------------------------------------------------------------
/docs/AudioRecorderDelegate.md:
--------------------------------------------------------------------------------
1 | # AudioRecorderDelegate
2 |
3 | Delegate called when audio recorder receives some data or an error, or when it has been stopped.
4 |
5 | ``` swift
6 | public protocol AudioRecorderDelegate: class
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `class`
12 |
13 | ## Requirements
14 |
15 | ### audioRecorderDidCatchError(\_:error:)
16 |
17 | Called when the recorder catches an error.
18 |
19 | ``` swift
20 | func audioRecorderDidCatchError(_ audioRecorder: AudioRecorderProtocol, error: Error)
21 | ```
22 |
23 | #### Parameters
24 |
25 | - error: The error which was caught.
26 |
27 | ### audioRecorderDidReceiveData(\_:audioData:)
28 |
29 | Called after the recorder has received some audio data.
30 |
31 | ``` swift
32 | func audioRecorderDidReceiveData(_ audioRecorder: AudioRecorderProtocol, audioData: Data)
33 | ```
34 |
35 | #### Parameters
36 |
37 | - audioData: The data chunk received from the input.
38 |
39 | ### audioRecorderDidStop(\_:)
40 |
41 | Called after the recorder has stopped recording.
42 |
43 | ``` swift
44 | func audioRecorderDidStop(_ audioRecorder: AudioRecorderProtocol)
45 | ```
46 |
--------------------------------------------------------------------------------
/docs/AudioRecorderProtocol.md:
--------------------------------------------------------------------------------
1 | # AudioRecorderProtocol
2 |
3 | A protocol for capturing audio data from input sources (microphones).
4 |
5 | ``` swift
6 | public protocol AudioRecorderProtocol
7 | ```
8 |
9 | An audio recorder is supposed to capture audio data from a microphone
10 | with a pre-configured sample rate and channel count.
11 | It should also provide the functionality for starting and stopping the capture as well as
12 | preparing the recorder and resetting it to default state
13 |
14 | The data, errors and events should be dispatched to the delegate.
15 |
16 | ## Requirements
17 |
18 | ### delegate
19 |
20 | The delegate that will receive the data, errors and events from the recorder.
21 |
22 | ``` swift
23 | var delegate: AudioRecorderDelegate?
24 | ```
25 |
26 | ### sampleRate
27 |
28 | The sample rate used for recording.
29 |
30 | ``` swift
31 | var sampleRate: Double
32 | ```
33 |
34 | ### channels
35 |
36 | The amount of channels captured by the recorder.
37 |
38 | ``` swift
39 | var channels: UInt32
40 | ```
41 |
42 | ### start()
43 |
44 | Starts the recorder.
45 |
46 | ``` swift
47 | func start() throws
48 | ```
49 |
50 | >
51 |
52 | ### stop()
53 |
54 | Starts the recorder.
55 |
56 | ``` swift
57 | func stop()
58 | ```
59 |
60 | >
61 |
62 | ### suspend()
63 |
64 | Suspends the recorder, telling it to release any resources.
65 |
66 | ``` swift
67 | func suspend() throws
68 | ```
69 |
70 | ### resume()
71 |
72 | Resumes the recorder, re-initialising any resources needed for audio capture.
73 |
74 | ``` swift
75 | func resume() throws
76 | ```
77 |
--------------------------------------------------------------------------------
/docs/AudioRecorder_AudioRecorderError.md:
--------------------------------------------------------------------------------
1 | # AudioRecorder.AudioRecorderError
2 |
3 | Errors thrown by the audio recorder.
4 |
5 | ``` swift
6 | public enum AudioRecorderError
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `outputFormatError`
16 |
17 | ``` swift
18 | case outputFormatError
19 | ```
20 |
--------------------------------------------------------------------------------
/docs/CacheProtocol.md:
--------------------------------------------------------------------------------
1 | # CacheProtocol
2 |
3 | A protocol for a cache storage.
4 |
5 | ``` swift
6 | public protocol CacheProtocol
7 | ```
8 |
9 | The purpose of a cache storage is to persistently store string keys and values.
10 | The cache is used for storing things like device identifiers, authentication tokens and such.
11 |
12 | ## Requirements
13 |
14 | ### setValue(\_:forKey:)
15 |
16 | Adds a value with a specified key to the cache.
17 |
18 | ``` swift
19 | func setValue(_ value: String, forKey: String)
20 | ```
21 |
22 | #### Parameters
23 |
24 | - value: The value to store in the cache.
25 | - forKey: The key to use for addressing the value.
26 |
27 | ### getValue(forKey:)
28 |
29 | Retrieves the value from the cache using the provided key.
30 |
31 | ``` swift
32 | func getValue(forKey: String) -> String?
33 | ```
34 |
35 | #### Parameters
36 |
37 | - forKey: The key to use for addressing the value.
38 |
39 | #### Returns
40 |
41 | The value stored in the cache or `nil` if no value could be found for the key provided.
42 |
--------------------------------------------------------------------------------
/docs/CachingIdentityClient.md:
--------------------------------------------------------------------------------
1 | # CachingIdentityClient
2 |
3 | A client for Speechly Identity gRPC API which provides token caching functionality.
4 |
5 | ``` swift
6 | public class CachingIdentityClient
7 | ```
8 |
9 | The cache is implemented as read-through and transparent for the consumer.
10 |
11 | ## Inheritance
12 |
13 | [`IdentityClientProtocol`](IdentityClientProtocol.md)
14 |
15 | ## Nested Type Aliases
16 |
17 | ### `PromisableClient`
18 |
19 | The protocol constraints for backing base Identity client.
20 |
21 | ``` swift
22 | public typealias PromisableClient = IdentityClientProtocol & Promisable
23 | ```
24 |
25 | ## Initializers
26 |
27 | ### `init(baseClient:cache:)`
28 |
29 | Creates a new client.
30 |
31 | ``` swift
32 | public init(baseClient: PromisableClient, cache: CacheProtocol)
33 | ```
34 |
35 | #### Parameters
36 |
37 | - baseClient: A base Identity client to use for fetching tokens.
38 | - cache: A cache to use for storing tokens.
39 |
40 | ## Methods
41 |
42 | ### `authenticate(appId:deviceId:)`
43 |
44 | ``` swift
45 | public func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture
46 | ```
47 |
48 | ### `authenticateProject(projectId:deviceId:)`
49 |
50 | ``` swift
51 | public func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture
52 | ```
53 |
--------------------------------------------------------------------------------
/docs/Client.md:
--------------------------------------------------------------------------------
1 | # Client
2 |
3 | A client that implements `SpeechClientProtocol` on top of Speechly SLU API and an audio recorder.
4 |
5 | ``` swift
6 | public class Client
7 | ```
8 |
9 | The client handles both the audio and the API streams, as well as API authentication,
10 | caching access tokens and dispatching data to delegate.
11 |
12 | The client is ready to use once initialised.
13 |
14 | ## Inheritance
15 |
16 | [`AudioRecorderDelegate`](AudioRecorderDelegate), [`SluClientDelegate`](SluClientDelegate), [`SpeechlyProtocol`](SpeechlyProtocol.md)
17 |
18 | ## Initializers
19 |
20 | ### `init(appId:projectId:prepareOnInit:identityAddr:sluAddr:eventLoopGroup:delegateDispatchQueue:)`
21 |
22 | Creates a new `SpeechClient`.
23 |
24 | ``` swift
25 | public convenience init(appId: UUID? = nil, projectId: UUID? = nil, prepareOnInit: Bool = true, identityAddr: String = "grpc+tls://api.speechly.com", sluAddr: String = "grpc+tls://api.speechly.com", eventLoopGroup: EventLoopGroup = PlatformSupport.makeEventLoopGroup(loopCount: 1), delegateDispatchQueue: DispatchQueue = DispatchQueue(label: "com.speechly.Client.delegateQueue")) throws
26 | ```
27 |
28 | #### Parameters
29 |
30 | - appId: Speechly application identifier. Eiither appId or projectId is needed.
31 | - projectId: Speechly projectt identifier. Eiither appId or projectId is needed.
32 | - prepareOnInit: Whether the client should prepare on initialisation. Preparing means initialising the audio stack and fetching the authentication token for the API.
33 | - identityAddr: The address of Speechly Identity gRPC service. Defaults to Speechly public API endpoint.
34 | - sluAddr: The address of Speechly SLU gRPC service. Defaults to Speechly public API endpoint.
35 | - eventLoopGroup: SwiftNIO event loop group to use.
36 | - delegateDispatchQueue: `DispatchQueue` to use for dispatching calls to the delegate.
37 |
38 | ### `init(appId:projectId:prepareOnInit:sluClient:identityClient:cache:audioRecorder:delegateDispatchQueue:)`
39 |
40 | Creates a new `SpeechClient`.
41 |
42 | ``` swift
43 | public init(appId: UUID? = nil, projectId: UUID? = nil, prepareOnInit: Bool, sluClient: SluClientProtocol, identityClient: IdentityClientProtocol, cache: CacheProtocol, audioRecorder: AudioRecorderProtocol, delegateDispatchQueue: DispatchQueue) throws
44 | ```
45 |
46 | #### Parameters
47 |
48 | - appId: Speechly application identifier. Eiither appId or projectId is needed.
49 | - projectId: Speechly projectt identifier. Eiither appId or projectId is needed.
50 | - prepareOnInit: Whether the client should prepare on initialisation. Preparing means initialising the audio stack and fetching the authentication token for the API.
51 | - sluClient: An implementation of a client for Speechly SLU API.
52 | - identityClient: An implementation of a client for Speechly Identity API.
53 | - cache: An implementation of a cache protocol.
54 | - audioRecorder: An implementaion of an audio recorder.
55 | - delegateDispatchQueue: `DispatchQueue` to use for dispatching calls to the delegate.
56 |
57 | ## Properties
58 |
59 | ### `delegate`
60 |
61 | ``` swift
62 | var delegate: SpeechlyDelegate?
63 | ```
64 |
65 | ## Methods
66 |
67 | ### `audioRecorderDidStop(_:)`
68 |
69 | ``` swift
70 | public func audioRecorderDidStop(_: AudioRecorderProtocol)
71 | ```
72 |
73 | ### `audioRecorderDidReceiveData(_:audioData:)`
74 |
75 | ``` swift
76 | public func audioRecorderDidReceiveData(_: AudioRecorderProtocol, audioData: Data)
77 | ```
78 |
79 | ### `audioRecorderDidCatchError(_:error:)`
80 |
81 | ``` swift
82 | public func audioRecorderDidCatchError(_: AudioRecorderProtocol, error: Error)
83 | ```
84 |
85 | ### `sluClientDidCatchError(_:error:)`
86 |
87 | ``` swift
88 | public func sluClientDidCatchError(_ sluClient: SluClientProtocol, error: Error)
89 | ```
90 |
91 | ### `sluClientDidStopStream(_:status:)`
92 |
93 | ``` swift
94 | public func sluClientDidStopStream(_ sluClient: SluClientProtocol, status: GRPCStatus)
95 | ```
96 |
97 | ### `sluClientDidReceiveContextStart(_:contextId:)`
98 |
99 | ``` swift
100 | public func sluClientDidReceiveContextStart(_ sluClient: SluClientProtocol, contextId: String)
101 | ```
102 |
103 | ### `sluClientDidReceiveContextStop(_:contextId:)`
104 |
105 | ``` swift
106 | public func sluClientDidReceiveContextStop(_ sluClient: SluClientProtocol, contextId: String)
107 | ```
108 |
109 | ### `sluClientDidReceiveTentativeTranscript(_:contextId:segmentId:transcript:)`
110 |
111 | ``` swift
112 | public func sluClientDidReceiveTentativeTranscript(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: SluClientDelegate.TentativeTranscript)
113 | ```
114 |
115 | ### `sluClientDidReceiveTentativeEntities(_:contextId:segmentId:entities:)`
116 |
117 | ``` swift
118 | public func sluClientDidReceiveTentativeEntities(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entities: SluClientDelegate.TentativeEntities)
119 | ```
120 |
121 | ### `sluClientDidReceiveTentativeIntent(_:contextId:segmentId:intent:)`
122 |
123 | ``` swift
124 | public func sluClientDidReceiveTentativeIntent(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: SluClientDelegate.TentativeIntent)
125 | ```
126 |
127 | ### `sluClientDidReceiveTranscript(_:contextId:segmentId:transcript:)`
128 |
129 | ``` swift
130 | public func sluClientDidReceiveTranscript(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: SluClientDelegate.Transcript)
131 | ```
132 |
133 | ### `sluClientDidReceiveEntity(_:contextId:segmentId:entity:)`
134 |
135 | ``` swift
136 | public func sluClientDidReceiveEntity(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entity: SluClientDelegate.Entity)
137 | ```
138 |
139 | ### `sluClientDidReceiveIntent(_:contextId:segmentId:intent:)`
140 |
141 | ``` swift
142 | public func sluClientDidReceiveIntent(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: SluClientDelegate.Intent)
143 | ```
144 |
145 | ### `sluClientDidReceiveSegmentEnd(_:contextId:segmentId:)`
146 |
147 | ``` swift
148 | public func sluClientDidReceiveSegmentEnd(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int)
149 | ```
150 |
151 | ### `startContext(appId:)`
152 |
153 | ``` swift
154 | public func startContext(appId: String? = nil)
155 | ```
156 |
157 | ### `stopContext()`
158 |
159 | ``` swift
160 | public func stopContext()
161 | ```
162 |
163 | ### `suspend()`
164 |
165 | ``` swift
166 | public func suspend()
167 | ```
168 |
169 | ### `resume()`
170 |
171 | ``` swift
172 | public func resume() throws
173 | ```
174 |
--------------------------------------------------------------------------------
/docs/Client_SpeechlyClientInitError.md:
--------------------------------------------------------------------------------
1 | # Client.SpeechlyClientInitError
2 |
3 | Represents different error situations when initializing the SpeechlyClient.
4 |
5 | ``` swift
6 | public enum SpeechlyClientInitError
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `keysMissing`
16 |
17 | no appId or projectId given.
18 |
19 | ``` swift
20 | case keysMissing
21 | ```
22 |
--------------------------------------------------------------------------------
/docs/Entity.md:
--------------------------------------------------------------------------------
1 | # Entity
2 |
3 | A speech entity.
4 |
5 | ``` swift
6 | public struct Entity: Hashable, Identifiable
7 | ```
8 |
9 | An entity is a specific object in the phrase that falls into some kind of category,
10 | e.g. in a SAL example `*book book a [burger restaurant](restaurant_type) for [tomorrow](date)`
11 | "burger restaurant" would be an entity of type `restaurant_type`,
12 | and "tomorrow" would be an entity of type `date`.
13 |
14 | An entity has a start and end indices which map to the indices of `SpeechTranscript`s,
15 | e.g. in the example `*book book a [burger restaurant](restaurant_type) for [tomorrow](date)` it would be:
16 |
17 | - Entity "burger restaurant" - `startIndex = 2, endIndex = 3`
18 |
19 | - Entity "tomorrow" - `startIndex = 5, endIndex = 5`
20 |
21 | The start index is inclusive, but the end index is exclusive, i.e. the interval is `[startIndex, endIndex)`.
22 |
23 | ## Inheritance
24 |
25 | `Comparable`, `Hashable`, `Identifiable`
26 |
27 | ## Initializers
28 |
29 | ### `init(value:type:startIndex:endIndex:isFinal:)`
30 |
31 | Creates a new entity.
32 |
33 | ``` swift
34 | public init(value: String, type: String, startIndex: Int, endIndex: Int, isFinal: Bool)
35 | ```
36 |
37 | #### Parameters
38 |
39 | - value: the value of the entity.
40 | - type: the type of the entity.
41 | - startIndex: the index of the beginning of the entity in a segment.
42 | - endIndex: the index of the end of the entity in a segment.
43 | - isFinal: the status of the entity.
44 |
45 | ## Properties
46 |
47 | ### `id`
48 |
49 | The identifier of the entity, unique within a `SpeechSegment`.
50 | Consists of the combination of start and end indices.
51 |
52 | ``` swift
53 | let id: ID
54 | ```
55 |
56 | ### `value`
57 |
58 | The value of the entity, as detected by the API and defined by SAL.
59 |
60 | ``` swift
61 | let value: String
62 | ```
63 |
64 | Given SAL `*book book a [burger restaurant](restaurant_type)` and an audio `book an italian place`,
65 | The value will be `italian place`.
66 |
67 | ### `type`
68 |
69 | The type (or class) of the entity, as detected by the API and defined by SAL.
70 |
71 | ``` swift
72 | let type: String
73 | ```
74 |
75 | Given SAL `*book book a [burger restaurant](restaurant_type)` and an audio `book an italian place`,
76 | The type will be `restaurant_type`.
77 |
78 | ### `startIndex`
79 |
80 | Start index of the entity, correlates with an index of some `SpeechTranscript` in a `SpeechSegment`.
81 |
82 | ``` swift
83 | let startIndex: Int
84 | ```
85 |
86 | ### `endIndex`
87 |
88 | End index of the entity, correlates with an index of some `SpeechTranscript` in a `SpeechSegment`.
89 |
90 | ``` swift
91 | let endIndex: Int
92 | ```
93 |
94 | ### `isFinal`
95 |
96 | The status of the entity.
97 | `true` for finalised entities, `false` otherwise.
98 |
99 | ``` swift
100 | let isFinal: Bool
101 | ```
102 |
103 | >
104 |
105 | ## Methods
106 |
107 | ### `<(lhs:rhs:)`
108 |
109 | ``` swift
110 | public static func <(lhs: Entity, rhs: Entity) -> Bool
111 | ```
112 |
113 | ### `<=(lhs:rhs:)`
114 |
115 | ``` swift
116 | public static func <=(lhs: Entity, rhs: Entity) -> Bool
117 | ```
118 |
119 | ### `>=(lhs:rhs:)`
120 |
121 | ``` swift
122 | public static func >=(lhs: Entity, rhs: Entity) -> Bool
123 | ```
124 |
125 | ### `>(lhs:rhs:)`
126 |
127 | ``` swift
128 | public static func >(lhs: Entity, rhs: Entity) -> Bool
129 | ```
130 |
--------------------------------------------------------------------------------
/docs/Entity_ID.md:
--------------------------------------------------------------------------------
1 | # Entity.ID
2 |
3 | A custom ID implementation for `SpeechEntity`.
4 | Since entities have two indices, start and end,
5 | this struct encapsulates the two for indexing and sorting purposes.
6 |
7 | ``` swift
8 | public struct ID: Hashable, Comparable
9 | ```
10 |
11 | ## Inheritance
12 |
13 | `Comparable`, `Hashable`
14 |
15 | ## Properties
16 |
17 | ### `start`
18 |
19 | The start index.
20 |
21 | ``` swift
22 | let start: Int
23 | ```
24 |
25 | ### `end`
26 |
27 | The end index.
28 |
29 | ``` swift
30 | let end: Int
31 | ```
32 |
33 | ## Methods
34 |
35 | ### `<(lhs:rhs:)`
36 |
37 | ``` swift
38 | public static func <(lhs: ID, rhs: ID) -> Bool
39 | ```
40 |
41 | ### `<=(lhs:rhs:)`
42 |
43 | ``` swift
44 | public static func <=(lhs: ID, rhs: ID) -> Bool
45 | ```
46 |
47 | ### `>=(lhs:rhs:)`
48 |
49 | ``` swift
50 | public static func >=(lhs: ID, rhs: ID) -> Bool
51 | ```
52 |
53 | ### `>(lhs:rhs:)`
54 |
55 | ``` swift
56 | public static func >(lhs: ID, rhs: ID) -> Bool
57 | ```
58 |
--------------------------------------------------------------------------------
/docs/GRPCAddress.md:
--------------------------------------------------------------------------------
1 | # GRPCAddress
2 |
3 | A gRPC service address.
4 |
5 | ``` swift
6 | public struct GRPCAddress
7 | ```
8 |
9 | Encapsulates together the host, the port and secure / non-secure properties for connecting to gRPC service endpoints.
10 |
11 | ## Initializers
12 |
13 | ### `init(host:port:secure:)`
14 |
15 | Creates a new gRPC address.
16 |
17 | ``` swift
18 | public init(host: String, port: Int, secure: Bool)
19 | ```
20 |
21 | #### Parameters
22 |
23 | - host: The host of the remote gRPC service.
24 | - port: The port of the remote gRPC service.
25 | - secure: Whether the connection to the service should use TLS.
26 |
27 | ### `init(addr:)`
28 |
29 | Creates a new gRPC address.
30 |
31 | ``` swift
32 | public init(addr: String) throws
33 | ```
34 |
35 | >
36 |
37 | #### Parameters
38 |
39 | - addr: The address of the remote gRPC service.
40 |
41 | ## Properties
42 |
43 | ### `host`
44 |
45 | The host of the remote gRPC service.
46 |
47 | ``` swift
48 | let host: String
49 | ```
50 |
51 | ### `port`
52 |
53 | The port of the remote gRPC service.
54 |
55 | ``` swift
56 | let port: Int
57 | ```
58 |
59 | ### `secure`
60 |
61 | Whether the connection should use TLS.
62 |
63 | ``` swift
64 | let secure: Bool
65 | ```
66 |
--------------------------------------------------------------------------------
/docs/GRPCAddress_ParseError.md:
--------------------------------------------------------------------------------
1 | # GRPCAddress.ParseError
2 |
3 | Errors thrown when parsing the address.
4 |
5 | ``` swift
6 | public enum ParseError
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `unsupportedScheme`
16 |
17 | Thrown when the address contains an invalid scheme.
18 |
19 | ``` swift
20 | case unsupportedScheme
21 | ```
22 |
23 | ### `unsupportedURL`
24 |
25 | Thrown when the address contains a URL that cannot be parsed with `URL.init(string: addr)`.
26 |
27 | ``` swift
28 | case unsupportedURL
29 | ```
30 |
31 | ### `missingHost`
32 |
33 | Thrown when the address does not contain a valid host.
34 |
35 | ``` swift
36 | case missingHost
37 | ```
38 |
--------------------------------------------------------------------------------
/docs/Home.md:
--------------------------------------------------------------------------------
1 | # Types
2 |
3 | - [AudioRecorder](AudioRecorder.md):
4 | An audio recorder implementation that uses AVFoundation audio engine for capturing the input.
5 | - [AudioRecorder.AudioRecorderError](AudioRecorder_AudioRecorderError.md):
6 | Errors thrown by the audio recorder.
7 | - [AudioContext](AudioContext.md):
8 | The speech recognition context.
9 | - [UserDefaultsCache](UserDefaultsCache.md):
10 | A cache implementation that uses `UserDefaults` as the backing storage.
11 | - [Client](Client.md):
12 | A client that implements `SpeechClientProtocol` on top of Speechly SLU API and an audio recorder.
13 | - [Client.SpeechlyClientInitError](Client_SpeechlyClientInitError.md):
14 | Represents different error situations when initializing the SpeechlyClient.
15 | - [Entity](Entity.md):
16 | A speech entity.
17 | - [Entity.ID](Entity_ID.md):
18 | A custom ID implementation for `SpeechEntity`.
19 | Since entities have two indices, start and end,
20 | this struct encapsulates the two for indexing and sorting purposes.
21 | - [GRPCAddress](GRPCAddress.md):
22 | A gRPC service address.
23 | - [GRPCAddress.ParseError](GRPCAddress_ParseError.md):
24 | Errors thrown when parsing the address.
25 | - [ApiAccessToken](ApiAccessToken.md):
26 | A struct representing an access token returned by Speechly Identity service.
27 | - [ApiAccessToken.AuthScope](ApiAccessToken_AuthScope.md):
28 | Token authorisation scopes.
29 | They determine which services can be accessed with this token.
30 | - [ApiAccessToken.TokenType](ApiAccessToken_TokenType.md):
31 | Type of token, determines the possible Speechly Apps that are accessible.
32 | - [CachingIdentityClient](CachingIdentityClient.md):
33 | A client for Speechly Identity gRPC API which provides token caching functionality.
34 | - [IdentityClient](IdentityClient.md):
35 | A client for Speechly Identity gRPC API.
36 | - [IdentityClient.IdentityClientError](IdentityClient_IdentityClientError.md):
37 | Errors returned by the client.
38 | - [Intent](Intent.md):
39 | A speech intent.
40 | - [InvalidSLUState](InvalidSLUState.md):
41 | Possible invalid states of the client, eg. if `startContext` is called without connecting to API first.
42 | - [SluClient](SluClient.md):
43 | An SluClientProtocol that is implemented on top of public Speechly SLU gRPC API.
44 | Uses `swift-grpc` for handling gRPC streams and connectivity.
45 | - [SluConfig](SluConfig.md):
46 | SLU stream configuration describes the audio data sent to the stream.
47 | If misconfigured, the recognition stream will not produce any useful results.
48 | - [Segment](Segment.md):
49 | A segment is a part of a recognition context (or a phrase) which is defined by an intent.
50 | - [SpeechlyError](SpeechlyError.md):
51 | Errors caught by `SpeechClientProtocol` and dispatched to `SpeechClientDelegate`.
52 | - [Transcript](Transcript.md):
53 | A speech transcript.
54 | - [MicrophoneButtonView](MicrophoneButtonView.md)
55 | - [SpeechBubbleView](SpeechBubbleView.md)
56 | - [TranscriptView](TranscriptView.md)
57 |
58 | # Protocols
59 |
60 | - [AudioRecorderProtocol](AudioRecorderProtocol.md):
61 | A protocol for capturing audio data from input sources (microphones).
62 | - [AudioRecorderDelegate](AudioRecorderDelegate.md):
63 | Delegate called when audio recorder receives some data or an error, or when it has been stopped.
64 | - [CacheProtocol](CacheProtocol.md):
65 | A protocol for a cache storage.
66 | - [Promisable](Promisable.md):
67 | A protocol that defines methods for making succeeded and failed futures.
68 | - [IdentityClientProtocol](IdentityClientProtocol.md):
69 | Protocol that defines a client for Speechly Identity API.
70 | - [SluClientProtocol](SluClientProtocol.md):
71 | A protocol defining a client for Speechly SLU API.
72 | - [SluClientDelegate](SluClientDelegate.md):
73 | Delegate called when an SLU client receives messages from the API or catches an error.
74 | The intended use of this protocol is with `SluClientProtocol`.
75 | - [SpeechlyProtocol](SpeechlyProtocol.md):
76 | A speech client protocol.
77 | - [SpeechlyDelegate](SpeechlyDelegate.md):
78 | Delegate called when a speech client handles messages from the API or catches an error.
79 | - [MicrophoneButtonDelegate](MicrophoneButtonDelegate.md)
80 |
81 | # Global Functions
82 |
83 | - [makeChannel(addr:loopCount:)](makeChannel\(addr:loopCount:\).md):
84 | A function that creates a new gRPC channel for the provided address.
85 | It will also create a NIO eventloop group with the specified loop count.
86 | - [makeChannel(addr:group:)](makeChannel\(addr:group:\).md):
87 | A function that creates a new gRPC channel for the provided address.
88 | - [makeTokenCallOptions(token:)](makeTokenCallOptions\(token:\).md):
89 | A function that creates new gRPC call options (metadata) that contains an authorisation token.
90 |
--------------------------------------------------------------------------------
/docs/IdentityClient.md:
--------------------------------------------------------------------------------
1 | # IdentityClient
2 |
3 | A client for Speechly Identity gRPC API.
4 |
5 | ``` swift
6 | public class IdentityClient
7 | ```
8 |
9 | Exposes functionality for authenticating identifiers in exchange for API access tokens.
10 |
11 | ## Inheritance
12 |
13 | [`Promisable`](Promisable), [`IdentityClientProtocol`](IdentityClientProtocol.md)
14 |
15 | ## Nested Type Aliases
16 |
17 | ### `IdentityApiClient`
18 |
19 | Alias for Speechly Identity client protocol.
20 |
21 | ``` swift
22 | public typealias IdentityApiClient = Speechly_Identity_V2_IdentityAPIClientProtocol
23 | ```
24 |
25 | ## Initializers
26 |
27 | ### `init(addr:loopGroup:)`
28 |
29 | Creates a new client.
30 |
31 | ``` swift
32 | public convenience init(addr: String, loopGroup: EventLoopGroup) throws
33 | ```
34 |
35 | #### Parameters
36 |
37 | - addr: The address of Speechly Identity API service.
38 | - loopGroup: `NIO.EventLoopGroup` to use for the client.
39 |
40 | ### `init(group:client:)`
41 |
42 | Creates a new client.
43 |
44 | ``` swift
45 | public init(group: EventLoopGroup, client: IdentityApiClient)
46 | ```
47 |
48 | #### Parameters
49 |
50 | - loopGroup: `NIO.EventLoopGroup` to use for the client.
51 | - client: `IdentityApiClient` implementation.
52 |
53 | ## Methods
54 |
55 | ### `makeFailedFuture(_:)`
56 |
57 | ``` swift
58 | public func makeFailedFuture(_ error: Error) -> EventLoopFuture
59 | ```
60 |
61 | ### `makeSucceededFuture(_:)`
62 |
63 | ``` swift
64 | public func makeSucceededFuture(_ value: AuthToken) -> EventLoopFuture
65 | ```
66 |
67 | ### `authenticate(appId:deviceId:)`
68 |
69 | ``` swift
70 | public func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture
71 | ```
72 |
73 | ### `authenticateProject(projectId:deviceId:)`
74 |
75 | ``` swift
76 | public func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture
77 | ```
78 |
--------------------------------------------------------------------------------
/docs/IdentityClientProtocol.md:
--------------------------------------------------------------------------------
1 | # IdentityClientProtocol
2 |
3 | Protocol that defines a client for Speechly Identity API.
4 |
5 | ``` swift
6 | public protocol IdentityClientProtocol
7 | ```
8 |
9 | ## Requirements
10 |
11 | ### authenticate(appId:deviceId:)
12 |
13 | Exchanges application and device identifiers for an access token to Speechly API.
14 |
15 | ``` swift
16 | func authenticate(appId: UUID, deviceId: UUID) -> EventLoopFuture
17 | ```
18 |
19 | #### Parameters
20 |
21 | - appId: Speechly application identifier.
22 | - deviceId: Device identifier.
23 |
24 | #### Returns
25 |
26 | A future that succeeds with an access token or fails with an error if authentication fails.
27 |
28 | ### authenticateProject(projectId:deviceId:)
29 |
30 | Exchanges project and device identifiers for an access token to Speechly API.
31 |
32 | ``` swift
33 | func authenticateProject(projectId: UUID, deviceId: UUID) -> EventLoopFuture
34 | ```
35 |
36 | #### Parameters
37 |
38 | - projectId: Speechly project identifier. All applications in the project are accesible during connection.
39 | - deviceId: Device identifier.
40 |
41 | #### Returns
42 |
43 | A future that succeeds with an access token or fails with an error if authentication fails.
44 |
--------------------------------------------------------------------------------
/docs/IdentityClient_IdentityClientError.md:
--------------------------------------------------------------------------------
1 | # IdentityClient.IdentityClientError
2 |
3 | Errors returned by the client.
4 |
5 | ``` swift
6 | public enum IdentityClientError
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `invalidTokenPayload`
16 |
17 | The error returned if the API returns an invalid access token.
18 |
19 | ``` swift
20 | case invalidTokenPayload
21 | ```
22 |
--------------------------------------------------------------------------------
/docs/Intent.md:
--------------------------------------------------------------------------------
1 | # Intent
2 |
3 | A speech intent.
4 |
5 | ``` swift
6 | public struct Intent: Hashable
7 | ```
8 |
9 | An intent is part of a phrase which defines the action of the phrase,
10 | e.g. a phrase "book a restaurant and send an invitation to John" contains two intents,
11 | "book" and "send an invitation".
12 |
13 | Intents can and should be used to dispatch the action that the user wants to do in the app
14 | (e.g. book a meeting, schedule a flight, reset the form).
15 |
16 | ## Inheritance
17 |
18 | `Comparable`, `Hashable`, `Identifiable`
19 |
20 | ## Initializers
21 |
22 | ### `init(value:isFinal:)`
23 |
24 | Creates a new intent.
25 |
26 | ``` swift
27 | public init(value: String, isFinal: Bool)
28 | ```
29 |
30 | #### Parameters
31 |
32 | - value: the value of the intent.
33 | - isFinal: the status of the intent.
34 |
35 | ## Properties
36 |
37 | ### `Empty`
38 |
39 | An empty intent. Can be used as default value in other places.
40 |
41 | ``` swift
42 | let Empty
43 | ```
44 |
45 | ### `value`
46 |
47 | The value of the intent, as defined in Speechly application configuration.
48 | e.g. in the example `*book book a [burger restaurant](restaurant_type)` it would be `book`.
49 |
50 | ``` swift
51 | let value: String
52 | ```
53 |
54 | ### `isFinal`
55 |
56 | The status of the intent.
57 | `true` for finalised intents, `false` otherwise.
58 |
59 | ``` swift
60 | let isFinal: Bool
61 | ```
62 |
63 | >
64 |
65 | ### `id`
66 |
67 | ``` swift
68 | var id: String
69 | ```
70 |
71 | ## Methods
72 |
73 | ### `<(lhs:rhs:)`
74 |
75 | ``` swift
76 | public static func <(lhs: Intent, rhs: Intent) -> Bool
77 | ```
78 |
79 | ### `<=(lhs:rhs:)`
80 |
81 | ``` swift
82 | public static func <=(lhs: Intent, rhs: Intent) -> Bool
83 | ```
84 |
85 | ### `>=(lhs:rhs:)`
86 |
87 | ``` swift
88 | public static func >=(lhs: Intent, rhs: Intent) -> Bool
89 | ```
90 |
91 | ### `>(lhs:rhs:)`
92 |
93 | ``` swift
94 | public static func >(lhs: Intent, rhs: Intent) -> Bool
95 | ```
96 |
--------------------------------------------------------------------------------
/docs/InvalidSLUState.md:
--------------------------------------------------------------------------------
1 | # InvalidSLUState
2 |
3 | Possible invalid states of the client, eg. if `startContext` is called without connecting to API first.
4 |
5 | ``` swift
6 | public enum InvalidSLUState
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `notConnected`
16 |
17 | ``` swift
18 | case notConnected
19 | ```
20 |
21 | ### `contextAlreadyStarted`
22 |
23 | ``` swift
24 | case contextAlreadyStarted
25 | ```
26 |
27 | ### `contextNotStarted`
28 |
29 | ``` swift
30 | case contextNotStarted
31 | ```
32 |
--------------------------------------------------------------------------------
/docs/MicrophoneButtonDelegate.md:
--------------------------------------------------------------------------------
1 | # MicrophoneButtonDelegate
2 |
3 | ``` swift
4 | public protocol MicrophoneButtonDelegate
5 | ```
6 |
7 | ## Requirements
8 |
9 | ### didOpenMicrophone(\_:)
10 |
11 | ``` swift
12 | func didOpenMicrophone(_ button: MicrophoneButtonView)
13 | ```
14 |
15 | ### didCloseMicrophone(\_:)
16 |
17 | ``` swift
18 | func didCloseMicrophone(_ button: MicrophoneButtonView)
19 | ```
20 |
21 | ### speechButtonImageForAuthorizationStatus(\_:status:)
22 |
23 | ``` swift
24 | func speechButtonImageForAuthorizationStatus(_ button: MicrophoneButtonView, status: AVAuthorizationStatus) -> UIImage?
25 | ```
26 |
--------------------------------------------------------------------------------
/docs/MicrophoneButtonView.md:
--------------------------------------------------------------------------------
1 | # MicrophoneButtonView
2 |
3 | ``` swift
4 | public class MicrophoneButtonView: UIView
5 | ```
6 |
7 | ## Inheritance
8 |
9 | `UIView`
10 |
11 | ## Initializers
12 |
13 | ### `init(diameter:delegate:)`
14 |
15 | ``` swift
16 | public init(diameter: CGFloat = 80, delegate: MicrophoneButtonDelegate)
17 | ```
18 |
19 | ## Properties
20 |
21 | ### `borderImage`
22 |
23 | ``` swift
24 | var borderImage: UIImage?
25 | ```
26 |
27 | ### `blurEffectImage`
28 |
29 | ``` swift
30 | var blurEffectImage: UIImage?
31 | ```
32 |
33 | ### `holdToTalkText`
34 |
35 | ``` swift
36 | var holdToTalkText: String!
37 | ```
38 |
39 | ### `pressedScale`
40 |
41 | ``` swift
42 | var pressedScale: CGFloat = 1.5
43 | ```
44 |
45 | ### `isPressed`
46 |
47 | ``` swift
48 | var isPressed: Bool = false
49 | ```
50 |
51 | ## Methods
52 |
53 | ### `reloadAuthorizationStatus()`
54 |
55 | ``` swift
56 | public func reloadAuthorizationStatus()
57 | ```
58 |
--------------------------------------------------------------------------------
/docs/Promisable.md:
--------------------------------------------------------------------------------
1 | # Promisable
2 |
3 | A protocol that defines methods for making succeeded and failed futures.
4 |
5 | ``` swift
6 | public protocol Promisable
7 | ```
8 |
9 | ## Requirements
10 |
11 | ### makeSucceededFuture(\_:)
12 |
13 | Creates a new succeeded future with value `value`.
14 |
15 | ``` swift
16 | func makeSucceededFuture(_ value: T) -> EventLoopFuture
17 | ```
18 |
19 | #### Parameters
20 |
21 | - value: The value to wrap in the future
22 |
23 | #### Returns
24 |
25 | An `EventLoopFuture` that always succeeds with `value`.
26 |
27 | ### makeFailedFuture(\_:)
28 |
29 | Creates a new failed future with error `error`.
30 |
31 | ``` swift
32 | func makeFailedFuture(_ error: Error) -> EventLoopFuture
33 | ```
34 |
35 | #### Parameters
36 |
37 | - error: The error to wrap in the future
38 |
39 | #### Returns
40 |
41 | An `EventLoopFuture` that always fails with `error`.
42 |
--------------------------------------------------------------------------------
/docs/Segment.md:
--------------------------------------------------------------------------------
1 | # Segment
2 |
3 | A segment is a part of a recognition context (or a phrase) which is defined by an intent.
4 |
5 | ``` swift
6 | public struct Segment: Hashable, Identifiable
7 | ```
8 |
9 | e.g. a phrase "book a restaurant and send an invitation to John" contains two intents,
10 | "book" and "send an invitation". Thus, the phrase will also contain two segments, "book a restaurant" and
11 | "send an invitation to John". A segment has to have exactly one intent that defines it, but it's allowed to have
12 | any number of entities and transcripts.
13 |
14 | A segment can be final or tentative. Final segments are guaranteed to only contain final intent, entities
15 | and transcripts. Tentative segments can have a mix of final and tentative parts.
16 |
17 | ## Inheritance
18 |
19 | `Comparable`, `Hashable`, `Identifiable`
20 |
21 | ## Initializers
22 |
23 | ### `init(segmentId:contextId:)`
24 |
25 | Creates a new tentative segment with empty intent, entities and transcripts.
26 |
27 | ``` swift
28 | public init(segmentId: Int, contextId: String)
29 | ```
30 |
31 | #### Parameters
32 |
33 | - segmentId: The identifier of the segment within a `SpeechContext`.
34 | - contextId: The identifier of the `SpeechContext` that this segment belongs to.
35 |
36 | ### `init(segmentId:contextId:isFinal:intent:entities:transcripts:)`
37 |
38 | Creates a new segment with provided parameters.
39 |
40 | ``` swift
41 | public init(segmentId: Int, contextId: String, isFinal: Bool, intent: Intent, entities: [Entity], transcripts: [Transcript])
42 | ```
43 |
44 | >
45 |
46 | #### Parameters
47 |
48 | - segmentId: The identifier of the segment within a `SpeechContext`.
49 | - contextId: The identifier of the `SpeechContext` that this segment belongs to.
50 | - isFinal: Indicates whether the segment is final or tentative.
51 | - intent: The intent of the segment.
52 | - entities: The entities belonging to the segment.
53 | - transcripts: The transcripts belonging to the segment.
54 |
55 | ## Properties
56 |
57 | ### `id`
58 |
59 | A unique identifier of the segment.
60 |
61 | ``` swift
62 | let id: String
63 | ```
64 |
65 | ### `segmentId`
66 |
67 | The identifier of the segment, which is unique when combined with `contextId`.
68 |
69 | ``` swift
70 | let segmentId: Int
71 | ```
72 |
73 | ### `contextId`
74 |
75 | A unique identifier of the `SpeechContext` that the segment belongs to
76 |
77 | ``` swift
78 | let contextId: String
79 | ```
80 |
81 | ### `isFinal`
82 |
83 | The status of the segment. `true` when the segment is finalised, `false` otherwise.
84 |
85 | ``` swift
86 | var isFinal: Bool = false
87 | ```
88 |
89 | ### `intent`
90 |
91 | The intent of the segment. Returns an empty tentative intent by default.
92 |
93 | ``` swift
94 | var intent: Intent = Intent.Empty
95 | ```
96 |
97 | ### `entities`
98 |
99 | The entities belonging to the segment.
100 |
101 | ``` swift
102 | var entities: [Entity]
103 | ```
104 |
105 | ### `transcripts`
106 |
107 | The transcripts belonging to the segment.
108 |
109 | ``` swift
110 | var transcripts: [Transcript]
111 | ```
112 |
113 | ## Methods
114 |
115 | ### `<(lhs:rhs:)`
116 |
117 | ``` swift
118 | public static func <(lhs: Segment, rhs: Segment) -> Bool
119 | ```
120 |
121 | ### `<=(lhs:rhs:)`
122 |
123 | ``` swift
124 | public static func <=(lhs: Segment, rhs: Segment) -> Bool
125 | ```
126 |
127 | ### `>=(lhs:rhs:)`
128 |
129 | ``` swift
130 | public static func >=(lhs: Segment, rhs: Segment) -> Bool
131 | ```
132 |
133 | ### `>(lhs:rhs:)`
134 |
135 | ``` swift
136 | public static func >(lhs: Segment, rhs: Segment) -> Bool
137 | ```
138 |
--------------------------------------------------------------------------------
/docs/SluClient.md:
--------------------------------------------------------------------------------
1 | # SluClient
2 |
3 | An SluClientProtocol that is implemented on top of public Speechly SLU gRPC API.
4 | Uses `swift-grpc` for handling gRPC streams and connectivity.
5 |
6 | ``` swift
7 | public class SluClient
8 | ```
9 |
10 | ## Inheritance
11 |
12 | [`SluClientProtocol`](SluClientProtocol.md)
13 |
14 | ## Nested Type Aliases
15 |
16 | ### `SluApiClient`
17 |
18 | An alias for Speechly SLU client protocol.
19 |
20 | ``` swift
21 | public typealias SluApiClient = Speechly_Slu_V1_SLUClientProtocol
22 | ```
23 |
24 | ## Initializers
25 |
26 | ### `init(addr:loopGroup:delegateQueue:)`
27 |
28 | Creates a new client.
29 |
30 | ``` swift
31 | public convenience init(addr: String, loopGroup: EventLoopGroup, delegateQueue: DispatchQueue = DispatchQueue(label: "com.speechly.iosclient.SluClient.delegateQueue")) throws
32 | ```
33 |
34 | #### Parameters
35 |
36 | - addr: The address of Speechly SLU API to connect to.
37 | - loopGroup: The `NIO.EventLoopGroup` to use in the client.
38 | - delegateQueue: The `DispatchQueue` to use for calling the delegate.
39 |
40 | ### `init(client:group:delegateQueue:)`
41 |
42 | Creates a new client.
43 |
44 | ``` swift
45 | public init(client: SluApiClient, group: EventLoopGroup, delegateQueue: DispatchQueue)
46 | ```
47 |
48 | #### Parameters
49 |
50 | - client: The `SluApiClient` to use for creating SLU streams.
51 | - group: The `NIO.EventLoopGroup` to use in the client.
52 | - delegateQueue: The `DispatchQueue` to use for calling the delegate.
53 |
54 | ## Properties
55 |
56 | ### `delegate`
57 |
58 | ``` swift
59 | var delegate: SluClientDelegate?
60 | ```
61 |
62 | ## Methods
63 |
64 | ### `connect(token:config:)`
65 |
66 | ``` swift
67 | public func connect(token: ApiAccessToken, config: SluConfig) -> EventLoopFuture
68 | ```
69 |
70 | ### `disconnect()`
71 |
72 | ``` swift
73 | public func disconnect() -> EventLoopFuture
74 | ```
75 |
76 | ### `startContext(appId:)`
77 |
78 | ``` swift
79 | public func startContext(appId: String? = nil) -> EventLoopFuture
80 | ```
81 |
82 | ### `stopContext()`
83 |
84 | ``` swift
85 | public func stopContext() -> EventLoopFuture
86 | ```
87 |
88 | ### `resume()`
89 |
90 | ``` swift
91 | public func resume() -> EventLoopFuture
92 | ```
93 |
94 | ### `suspend()`
95 |
96 | ``` swift
97 | public func suspend() -> EventLoopFuture
98 | ```
99 |
100 | ### `write(data:)`
101 |
102 | ``` swift
103 | public func write(data: Data) -> EventLoopFuture
104 | ```
105 |
--------------------------------------------------------------------------------
/docs/SluClientDelegate.md:
--------------------------------------------------------------------------------
1 | # SluClientDelegate
2 |
3 | Delegate called when an SLU client receives messages from the API or catches an error.
4 | The intended use of this protocol is with `SluClientProtocol`.
5 |
6 | ``` swift
7 | public protocol SluClientDelegate: class
8 | ```
9 |
10 | >
11 |
12 | ## Inheritance
13 |
14 | `class`
15 |
16 | ## Requirements
17 |
18 | ### sluClientDidCatchError(\_:error:)
19 |
20 | Called when the client catches an error.
21 |
22 | ``` swift
23 | func sluClientDidCatchError(_ sluClient: SluClientProtocol, error: Error)
24 | ```
25 |
26 | #### Parameters
27 |
28 | - error: The error which was caught.
29 |
30 | ### sluClientDidStopStream(\_:status:)
31 |
32 | Called when a recognition stream is stopped from the server side.
33 |
34 | ``` swift
35 | func sluClientDidStopStream(_ sluClient: SluClientProtocol, status: GRPCStatus)
36 | ```
37 |
38 | #### Parameters
39 |
40 | - status: The status that the stream was closed with.
41 |
42 | ### sluClientDidReceiveContextStart(\_:contextId:)
43 |
44 | Called when a recognition stream receives an audio context start message.
45 |
46 | ``` swift
47 | func sluClientDidReceiveContextStart(_ sluClient: SluClientProtocol, contextId: String)
48 | ```
49 |
50 | #### Parameters
51 |
52 | - contextId: The ID of the context that was started by the server.
53 |
54 | ### sluClientDidReceiveContextStop(\_:contextId:)
55 |
56 | Called when a recognition stream receives an audio context stop message.
57 |
58 | ``` swift
59 | func sluClientDidReceiveContextStop(_ sluClient: SluClientProtocol, contextId: String)
60 | ```
61 |
62 | #### Parameters
63 |
64 | - contextId: The ID of the context that was stopped by the server.
65 |
66 | ### sluClientDidReceiveSegmentEnd(\_:contextId:segmentId:)
67 |
68 | Called when a recognition stream receives an segment end message.
69 |
70 | ``` swift
71 | func sluClientDidReceiveSegmentEnd(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int)
72 | ```
73 |
74 | #### Parameters
75 |
76 | - contextId: The ID of the context that the segment belongs to.
77 | - segmentId: The ID of the segment which has ended.
78 |
79 | ### sluClientDidReceiveTentativeTranscript(\_:contextId:segmentId:transcript:)
80 |
81 | Called when a recognition stream receives a tentative transcript message.
82 |
83 | ``` swift
84 | func sluClientDidReceiveTentativeTranscript(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: TentativeTranscript)
85 | ```
86 |
87 | #### Parameters
88 |
89 | - contextId: The ID of the context that the segment belongs to.
90 | - segmentId: The ID of the segment which the transcript belongs to.
91 | - transcript: The tentative transcript message.
92 |
93 | ### sluClientDidReceiveTentativeEntities(\_:contextId:segmentId:entities:)
94 |
95 | Called when a recognition stream receives a tentative entities message.
96 |
97 | ``` swift
98 | func sluClientDidReceiveTentativeEntities(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entities: TentativeEntities)
99 | ```
100 |
101 | #### Parameters
102 |
103 | - contextId: The ID of the context that the segment belongs to.
104 | - segmentId: The ID of the segment which the entities belongs to.
105 | - entities: The tentative entities message.
106 |
107 | ### sluClientDidReceiveTentativeIntent(\_:contextId:segmentId:intent:)
108 |
109 | Called when a recognition stream receives a tentative intent message.
110 |
111 | ``` swift
112 | func sluClientDidReceiveTentativeIntent(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: TentativeIntent)
113 | ```
114 |
115 | #### Parameters
116 |
117 | - contextId: The ID of the context that the segment belongs to.
118 | - segmentId: The ID of the segment which the intent belongs to.
119 | - intent: The tentative intent message.
120 |
121 | ### sluClientDidReceiveTranscript(\_:contextId:segmentId:transcript:)
122 |
123 | Called when a recognition stream receives a final transcript message.
124 |
125 | ``` swift
126 | func sluClientDidReceiveTranscript(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, transcript: Transcript)
127 | ```
128 |
129 | #### Parameters
130 |
131 | - contextId: The ID of the context that the segment belongs to.
132 | - segmentId: The ID of the segment which the transcript belongs to.
133 | - transcript: The transcript message.
134 |
135 | ### sluClientDidReceiveEntity(\_:contextId:segmentId:entity:)
136 |
137 | Called when a recognition stream receives a final entity message.
138 |
139 | ``` swift
140 | func sluClientDidReceiveEntity(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, entity: Entity)
141 | ```
142 |
143 | #### Parameters
144 |
145 | - contextId: The ID of the context that the segment belongs to.
146 | - segmentId: The ID of the segment which the entity belongs to.
147 | - entity: The entity message.
148 |
149 | ### sluClientDidReceiveIntent(\_:contextId:segmentId:intent:)
150 |
151 | Called when a recognition stream receives a final intent message.
152 |
153 | ``` swift
154 | func sluClientDidReceiveIntent(_ sluClient: SluClientProtocol, contextId: String, segmentId: Int, intent: Intent)
155 | ```
156 |
157 | #### Parameters
158 |
159 | - contextId: The ID of the context that the segment belongs to.
160 | - segmentId: The ID of the segment which the intent belongs to.
161 | - intent: The intent message.
162 |
--------------------------------------------------------------------------------
/docs/SluClientProtocol.md:
--------------------------------------------------------------------------------
1 | # SluClientProtocol
2 |
3 | A protocol defining a client for Speechly SLU API.
4 |
5 | ``` swift
6 | public protocol SluClientProtocol
7 | ```
8 |
9 | It exposes functionality for starting and stopping SLU recognition streams
10 | and a delegate for receiving the responses.
11 |
12 | >
13 |
14 | ## Requirements
15 |
16 | ### delegate
17 |
18 | A delegate which is called when the client receives messages from the API or catches errors.
19 |
20 | ``` swift
21 | var delegate: SluClientDelegate?
22 | ```
23 |
24 | ### connect(token:config:)
25 |
26 | Connects to the SLU API.
27 |
28 | ``` swift
29 | func connect(token: ApiAccessToken, config: SluConfig) -> EventLoopFuture
30 | ```
31 |
32 | >
33 |
34 | #### Parameters
35 |
36 | - token: An auth token received from Speechly Identity API.
37 | - config: The configuration of the SLU stream.
38 |
39 | #### Returns
40 |
41 | A future which will be fullfilled when the stream has been connected.
42 |
43 | ### disconnect()
44 |
45 | Disconnects the current connection to the SLU API.
46 |
47 | ``` swift
48 | func disconnect() -> EventLoopFuture
49 | ```
50 |
51 | If there is an active `Context`, it is cancelled.
52 |
53 | #### Returns
54 |
55 | A future which is fulfilled when the stream has been disconnected.
56 |
57 | ### startContext(appId:)
58 |
59 | Starts a new SLU recognition stream.
60 |
61 | ``` swift
62 | func startContext(appId: String?) -> EventLoopFuture
63 | ```
64 |
65 | >
66 |
67 | #### Parameters
68 |
69 | - appId: The target appId for the audio, if not set in the token.
70 |
71 | #### Returns
72 |
73 | A future which will be fullfilled when the stream has been started.
74 |
75 | ### stopContext()
76 |
77 | Stops the current SLU recognition stream
78 |
79 | ``` swift
80 | func stopContext() -> EventLoopFuture
81 | ```
82 |
83 | #### Returns
84 |
85 | A future which will be fullfilled when the stream has been closed from the client side.
86 |
87 | ### suspend()
88 |
89 | Suspends the client by terminating any in-flight streams and disconnecting the channels.
90 |
91 | ``` swift
92 | func suspend() -> EventLoopFuture
93 | ```
94 |
95 | #### Returns
96 |
97 | A future which will be fullfilled when the streams and channels are cleaned up.
98 |
99 | ### resume()
100 |
101 | Resumes the client by restoring the channels and cleaning up any stale streams.
102 |
103 | ``` swift
104 | func resume() -> EventLoopFuture
105 | ```
106 |
107 | #### Returns
108 |
109 | A future which will be fullfilled when the channels are restored.
110 |
111 | ### write(data:)
112 |
113 | Writes audio data on the current stream.
114 |
115 | ``` swift
116 | func write(data: Data) -> EventLoopFuture
117 | ```
118 |
119 | >
120 |
121 | #### Parameters
122 |
123 | - data: The audio data to write to the stream
124 |
125 | #### Returns
126 |
127 | A future which will be fullfilled when the data has been sent.
128 |
--------------------------------------------------------------------------------
/docs/SluConfig.md:
--------------------------------------------------------------------------------
1 | # SluConfig
2 |
3 | SLU stream configuration describes the audio data sent to the stream.
4 | If misconfigured, the recognition stream will not produce any useful results.
5 |
6 | ``` swift
7 | public struct SluConfig
8 | ```
9 |
10 | ## Properties
11 |
12 | ### `sampleRate`
13 |
14 | The sample rate of the audio sent to the stream, in Hertz.
15 |
16 | ``` swift
17 | let sampleRate: Double
18 | ```
19 |
20 | ### `channels`
21 |
22 | The number of channels in the audio sent to the stream.
23 |
24 | ``` swift
25 | let channels: UInt32
26 | ```
27 |
--------------------------------------------------------------------------------
/docs/SpeechBubbleView.md:
--------------------------------------------------------------------------------
1 | # SpeechBubbleView
2 |
3 | ``` swift
4 | public class SpeechBubbleView: UIView
5 | ```
6 |
7 | ## Inheritance
8 |
9 | `UIView`
10 |
11 | ## Initializers
12 |
13 | ### `init()`
14 |
15 | ``` swift
16 | public init()
17 | ```
18 |
19 | ## Properties
20 |
21 | ### `isShowing`
22 |
23 | ``` swift
24 | var isShowing: Bool
25 | ```
26 |
27 | ### `autohideInterval`
28 |
29 | ``` swift
30 | var autohideInterval: TimeInterval? = 3
31 | ```
32 |
33 | ### `text`
34 |
35 | ``` swift
36 | var text: String?
37 | ```
38 |
39 | ### `font`
40 |
41 | ``` swift
42 | var font: UIFont!
43 | ```
44 |
45 | ### `textColor`
46 |
47 | ``` swift
48 | var textColor: UIColor!
49 | ```
50 |
51 | ### `color`
52 |
53 | ``` swift
54 | var color: UIColor!
55 | ```
56 |
57 | ## Methods
58 |
59 | ### `show(animated:)`
60 |
61 | ``` swift
62 | public func show(animated: Bool = true)
63 | ```
64 |
65 | ### `hide(animated:)`
66 |
67 | ``` swift
68 | public func hide(animated: Bool = true)
69 | ```
70 |
71 | ### `pulse(duration:scale:)`
72 |
73 | ``` swift
74 | public func pulse(duration: TimeInterval = 0.5, scale: CGFloat = 1.2)
75 | ```
76 |
--------------------------------------------------------------------------------
/docs/SpeechlyDelegate.md:
--------------------------------------------------------------------------------
1 | # SpeechlyDelegate
2 |
3 | Delegate called when a speech client handles messages from the API or catches an error.
4 |
5 | ``` swift
6 | public protocol SpeechlyDelegate: class
7 | ```
8 |
9 | The intended use of this protocol is with `SpeechClientProtocol`.
10 |
11 | >
12 |
13 | ## Inheritance
14 |
15 | `class`
16 |
17 | ## Requirements
18 |
19 | ### speechlyClientDidCatchError(\_:error:)
20 |
21 | Called when the client catches an error.
22 |
23 | ``` swift
24 | func speechlyClientDidCatchError(_ speechlyClient: SpeechlyProtocol, error: SpeechlyError)
25 | ```
26 |
27 | #### Parameters
28 |
29 | - error: The error which was caught.
30 |
31 | ### speechlyClientDidStartContext(\_:)
32 |
33 | Called after the client has acknowledged a recognition context start.
34 |
35 | ``` swift
36 | func speechlyClientDidStartContext(_ speechlyClient: SpeechlyProtocol)
37 | ```
38 |
39 | ### speechlyClientDidStopContext(\_:)
40 |
41 | Called after the client has acknowledged a recognition context stop.
42 |
43 | ``` swift
44 | func speechlyClientDidStopContext(_ speechlyClient: SpeechlyProtocol)
45 | ```
46 |
47 | ### speechlyClientDidUpdateSegment(\_:segment:)
48 |
49 | Called after the client has processed an update to current `SpeechSegment`.
50 |
51 | ``` swift
52 | func speechlyClientDidUpdateSegment(_ speechlyClient: SpeechlyProtocol, segment: Segment)
53 | ```
54 |
55 | When the client receives messages from the API, it will use them to update the state of current speech segment,
56 | and dispatch the updated state to the delegate. The delegate can use these updates to react to the user input
57 | by using the intent, entities and transcripts contained in the segment.
58 |
59 | Only one segment is active at a time, but since the processing is asynchronous,
60 | it is possible to have out-of-order delivery of segments.
61 |
62 | #### Parameters
63 |
64 | - segment: The speech segment that has been updated.
65 |
66 | ### speechlyClientDidReceiveTranscript(\_:contextId:segmentId:transcript:)
67 |
68 | Called after the client has received a new transcript message from the API.
69 |
70 | ``` swift
71 | func speechlyClientDidReceiveTranscript(_ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, transcript: Transcript)
72 | ```
73 |
74 | #### Parameters
75 |
76 | - contextId: The ID of the recognition context that the transcript belongs to.
77 | - segmentId: The ID of the speech segment that the transcript belongs to.
78 | - transcript: The transcript received from the API.
79 |
80 | ### speechlyClientDidReceiveEntity(\_:contextId:segmentId:entity:)
81 |
82 | Called after the client has received a new entity message from the API.
83 |
84 | ``` swift
85 | func speechlyClientDidReceiveEntity(_ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, entity: Entity)
86 | ```
87 |
88 | #### Parameters
89 |
90 | - contextId: The ID of the recognition context that the entity belongs to.
91 | - segmentId: The ID of the speech segment that the entity belongs to.
92 | - entity: The entity received from the API.
93 |
94 | ### speechlyClientDidReceiveIntent(\_:contextId:segmentId:intent:)
95 |
96 | Called after the client has received a new intent message from the API.
97 |
98 | ``` swift
99 | func speechlyClientDidReceiveIntent(_ speechlyClient: SpeechlyProtocol, contextId: String, segmentId: Int, intent: Intent)
100 | ```
101 |
102 | #### Parameters
103 |
104 | - contextId: The ID of the recognition context that the intent belongs to.
105 | - segmentId: The ID of the speech segment that the intent belongs to.
106 | - transcript: The intent received from the API.
107 |
--------------------------------------------------------------------------------
/docs/SpeechlyError.md:
--------------------------------------------------------------------------------
1 | # SpeechlyError
2 |
3 | Errors caught by `SpeechClientProtocol` and dispatched to `SpeechClientDelegate`.
4 |
5 | ``` swift
6 | public enum SpeechlyError
7 | ```
8 |
9 | ## Inheritance
10 |
11 | `Error`
12 |
13 | ## Enumeration Cases
14 |
15 | ### `networkError`
16 |
17 | A network-level error.
18 | Usually these errors are unrecoverable and require a full restart of the client.
19 |
20 | ``` swift
21 | case networkError(: String)
22 | ```
23 |
24 | ### `audioError`
25 |
26 | An error within the audio recorder stack.
27 | Normally these errors are recoverable and do not require any special handling.
28 | However, these errors will result in downgraded recognition performance.
29 |
30 | ``` swift
31 | case audioError(: String)
32 | ```
33 |
34 | ### `apiError`
35 |
36 | An error within the API.
37 | Normally these errors are recoverable, but they may result in dropped API responses.
38 |
39 | ``` swift
40 | case apiError(: String)
41 | ```
42 |
43 | ### `parseError`
44 |
45 | An error within the API message parsing logic.
46 | These errors are fully recoverable, but will result in missed speech segment updates.
47 |
48 | ``` swift
49 | case parseError(: String)
50 | ```
51 |
--------------------------------------------------------------------------------
/docs/SpeechlyProtocol.md:
--------------------------------------------------------------------------------
1 | # SpeechlyProtocol
2 |
3 | A speech client protocol.
4 |
5 | ``` swift
6 | public protocol SpeechlyProtocol
7 | ```
8 |
9 | The purpose of a speech client is to abstract away the handling of audio recording and API streaming,
10 | providing the user with a high-level abstraction over the microphone speech recognition.
11 |
12 | ## Requirements
13 |
14 | ### delegate
15 |
16 | A delegate which is called when the client has received and parsed messages from the API.
17 | The delegate will also be called when the client catches an error.
18 |
19 | ``` swift
20 | var delegate: SpeechlyDelegate?
21 | ```
22 |
23 | ### startContext(appId:)
24 |
25 | Start a new recognition context and unmute the microphone.
26 |
27 | ``` swift
28 | func startContext(appId: String?)
29 | ```
30 |
31 | >
32 |
33 | #### Parameters
34 |
35 | - appId: Define a specific Speechly appId to send the audio to. Not needed if the appId can be inferred from login.
36 |
37 | ### stopContext()
38 |
39 | Stop current recognition context and mute the microphone.
40 |
41 | ``` swift
42 | func stopContext()
43 | ```
44 |
45 | >
46 |
47 | ### suspend()
48 |
49 | Suspend the client, releasing any resources and cleaning up any pending contexts.
50 |
51 | ``` swift
52 | func suspend()
53 | ```
54 |
55 | This method should be used when your application is about to enter background state.
56 |
57 | ### resume()
58 |
59 | Resume the client, re-initialing necessary resources to continue the operation.
60 |
61 | ``` swift
62 | func resume() throws
63 | ```
64 |
65 | This method should be used when your application is about to leave background state.
66 |
--------------------------------------------------------------------------------
/docs/Transcript.md:
--------------------------------------------------------------------------------
1 | # Transcript
2 |
3 | A speech transcript.
4 |
5 | ``` swift
6 | public struct Transcript: Hashable
7 | ```
8 |
9 | A transcript is a single word in a phrase recognised from the audio.
10 | e.g. a phrase "two glasses" will have two transcripts, "two" and "glasses".
11 |
12 | ## Inheritance
13 |
14 | `Comparable`, `Hashable`, `Identifiable`
15 |
16 | ## Initializers
17 |
18 | ### `init(index:value:startOffset:endOffset:isFinal:)`
19 |
20 | Creates a new transcript.
21 |
22 | ``` swift
23 | public init(index: Int, value: String, startOffset: TimeInterval, endOffset: TimeInterval, isFinal: Bool)
24 | ```
25 |
26 | #### Parameters
27 |
28 | - index: the index of the transcript.
29 | - value: the value of the transcript.
30 | - startOffset: the time offset of the beginning of the transcript in the phrase.
31 | - endOffset: the time offset of the end of the transcript in the phrase.
32 | - isFinal: the status of the transcript.
33 |
34 | ## Properties
35 |
36 | ### `index`
37 |
38 | The index of the transcript in the phrase.
39 |
40 | ``` swift
41 | let index: Int
42 | ```
43 |
44 | ### `value`
45 |
46 | The value of the transcript, e.g. "glasses".
47 | The case is not guaranteed, it is up to the consumer to decide whether to change it or not.
48 |
49 | ``` swift
50 | let value: String
51 | ```
52 |
53 | ### `startOffset`
54 |
55 | The time offset of the beginning of the transcript in the audio, relative to the beginning of the phrase.
56 |
57 | ``` swift
58 | let startOffset: TimeInterval
59 | ```
60 |
61 | ### `endOffset`
62 |
63 | The time offset of the end of the transcript in the audio, relative to the beginning of the phrase.
64 |
65 | ``` swift
66 | let endOffset: TimeInterval
67 | ```
68 |
69 | ### `isFinal`
70 |
71 | The status of the transcript.
72 | `true` for finalised intents, `false` otherwise.
73 |
74 | ``` swift
75 | let isFinal: Bool
76 | ```
77 |
78 | >
79 |
80 | ### `id`
81 |
82 | ``` swift
83 | var id: Int
84 | ```
85 |
86 | ## Methods
87 |
88 | ### `<(lhs:rhs:)`
89 |
90 | ``` swift
91 | public static func <(lhs: Transcript, rhs: Transcript) -> Bool
92 | ```
93 |
94 | ### `<=(lhs:rhs:)`
95 |
96 | ``` swift
97 | public static func <=(lhs: Transcript, rhs: Transcript) -> Bool
98 | ```
99 |
100 | ### `>=(lhs:rhs:)`
101 |
102 | ``` swift
103 | public static func >=(lhs: Transcript, rhs: Transcript) -> Bool
104 | ```
105 |
106 | ### `>(lhs:rhs:)`
107 |
108 | ``` swift
109 | public static func >(lhs: Transcript, rhs: Transcript) -> Bool
110 | ```
111 |
--------------------------------------------------------------------------------
/docs/TranscriptView.md:
--------------------------------------------------------------------------------
1 | # TranscriptView
2 |
3 | ``` swift
4 | public class TranscriptView: UIView
5 | ```
6 |
7 | ## Inheritance
8 |
9 | `UIView`
10 |
11 | ## Initializers
12 |
13 | ### `init()`
14 |
15 | ``` swift
16 | public init()
17 | ```
18 |
19 | ## Properties
20 |
21 | ### `segment`
22 |
23 | ``` swift
24 | var segment: Speechly.Segment?
25 | ```
26 |
27 | ### `font`
28 |
29 | ``` swift
30 | var font: UIFont = UIFont(name: "AvenirNextCondensed-Bold", size: 20)!
31 | ```
32 |
33 | ### `textColor`
34 |
35 | ``` swift
36 | var textColor: UIColor = UIColor.white
37 | ```
38 |
39 | ### `highlightedTextColor`
40 |
41 | ``` swift
42 | var highlightedTextColor: UIColor
43 | ```
44 |
45 | ### `autohideInterval`
46 |
47 | ``` swift
48 | var autohideInterval: TimeInterval? = 3
49 | ```
50 |
51 | ## Methods
52 |
53 | ### `configure(segment:animated:)`
54 |
55 | ``` swift
56 | public func configure(segment: Speechly.Segment?, animated: Bool)
57 | ```
58 |
59 | ### `hide(animated:)`
60 |
61 | ``` swift
62 | public func hide(animated: Bool)
63 | ```
64 |
--------------------------------------------------------------------------------
/docs/UserDefaultsCache.md:
--------------------------------------------------------------------------------
1 | # UserDefaultsCache
2 |
3 | A cache implementation that uses `UserDefaults` as the backing storage.
4 |
5 | ``` swift
6 | public class UserDefaultsCache
7 | ```
8 |
9 | ## Inheritance
10 |
11 | [`CacheProtocol`](CacheProtocol.md)
12 |
13 | ## Initializers
14 |
15 | ### `init()`
16 |
17 | Creates a new `UserDefaultsCache` instance.
18 |
19 | ``` swift
20 | public convenience init()
21 | ```
22 |
23 | ### `init(storage:)`
24 |
25 | Creates a new `UserDefaultsCache` instance.
26 |
27 | ``` swift
28 | public init(storage: UserDefaults)
29 | ```
30 |
31 | #### Parameters
32 |
33 | - storage: The `UserDefaults` storage to use as the backend.
34 |
35 | ## Methods
36 |
37 | ### `setValue(_:forKey:)`
38 |
39 | ``` swift
40 | public func setValue(_ value: String, forKey: String)
41 | ```
42 |
43 | ### `getValue(forKey:)`
44 |
45 | ``` swift
46 | public func getValue(forKey: String) -> String?
47 | ```
48 |
--------------------------------------------------------------------------------
/docs/_Footer.md:
--------------------------------------------------------------------------------
1 | Generated at 2021-04-09T07:37:18+0300 using [swift-doc](https://github.com/SwiftDocOrg/swift-doc.md) 1.0.0-beta.5.
2 |
--------------------------------------------------------------------------------
/docs/_Sidebar.md:
--------------------------------------------------------------------------------
1 |
2 | Types
3 |
4 | - [ApiAccessToken](ApiAccessToken.md)
5 | - [ApiAccessToken.AuthScope](ApiAccessToken.AuthScope.md)
6 | - [ApiAccessToken.TokenType](ApiAccessToken.TokenType.md)
7 | - [AudioContext](AudioContext.md)
8 | - [AudioRecorder](AudioRecorder.md)
9 | - [AudioRecorder.AudioRecorderError](AudioRecorder.AudioRecorderError.md)
10 | - [CachingIdentityClient](CachingIdentityClient.md)
11 | - [Client](Client.md)
12 | - [Client.SpeechlyClientInitError](Client.SpeechlyClientInitError.md)
13 | - [Entity](Entity.md)
14 | - [Entity.ID](Entity.ID.md)
15 | - [GRPCAddress](GRPCAddress.md)
16 | - [GRPCAddress.ParseError](GRPCAddress.ParseError.md)
17 | - [IdentityClient](IdentityClient.md)
18 | - [IdentityClient.IdentityClientError](IdentityClient.IdentityClientError.md)
19 | - [Intent](Intent.md)
20 | - [InvalidSLUState](InvalidSLUState.md)
21 | - [MicrophoneButtonView](MicrophoneButtonView.md)
22 | - [Segment](Segment.md)
23 | - [SluClient](SluClient.md)
24 | - [SluConfig](SluConfig.md)
25 | - [SpeechBubbleView](SpeechBubbleView.md)
26 | - [SpeechlyError](SpeechlyError.md)
27 | - [Transcript](Transcript.md)
28 | - [TranscriptView](TranscriptView.md)
29 | - [UserDefaultsCache](UserDefaultsCache.md)
30 |
31 |
32 |
33 |
34 | Protocols
35 |
36 | - [AudioRecorderDelegate](AudioRecorderDelegate.md)
37 | - [AudioRecorderProtocol](AudioRecorderProtocol.md)
38 | - [CacheProtocol](CacheProtocol.md)
39 | - [IdentityClientProtocol](IdentityClientProtocol.md)
40 | - [MicrophoneButtonDelegate](MicrophoneButtonDelegate.md)
41 | - [Promisable](Promisable.md)
42 | - [SluClientDelegate](SluClientDelegate.md)
43 | - [SluClientProtocol](SluClientProtocol.md)
44 | - [SpeechlyDelegate](SpeechlyDelegate.md)
45 | - [SpeechlyProtocol](SpeechlyProtocol.md)
46 |
47 |
48 |
49 |
50 | Global Functions
51 |
52 | - [makeChannel(addr:group:)](makeChannel\(addr:group:\).md)
53 | - [makeChannel(addr:loopCount:)](makeChannel\(addr:loopCount:\).md)
54 | - [makeTokenCallOptions(token:)](makeTokenCallOptions\(token:\).md)
55 |
56 |
57 |
--------------------------------------------------------------------------------
/docs/makeChannel(addr:group:).md:
--------------------------------------------------------------------------------
1 | # makeChannel(addr:group:)
2 |
3 | A function that creates a new gRPC channel for the provided address.
4 |
5 | ``` swift
6 | public func makeChannel(addr: String, group: EventLoopGroup) throws -> GRPCChannel
7 | ```
8 |
9 | ## Parameters
10 |
11 | - addr: The address of the gRPC server to connect to.
12 | - group: The NIO evenloop group to use for backing the channel.
13 |
14 | ## Returns
15 |
16 | A gRPC channel connected to given server address and backed by given eventloop group.
17 |
--------------------------------------------------------------------------------
/docs/makeChannel(addr:loopCount:).md:
--------------------------------------------------------------------------------
1 | # makeChannel(addr:loopCount:)
2 |
3 | A function that creates a new gRPC channel for the provided address.
4 | It will also create a NIO eventloop group with the specified loop count.
5 |
6 | ``` swift
7 | public func makeChannel(addr: String, loopCount: Int) throws -> GRPCChannel
8 | ```
9 |
10 | ## Parameters
11 |
12 | - addr: The address of the gRPC server to connect to.
13 | - loopCount: The number of event loops to create in the event loop group.
14 |
15 | ## Returns
16 |
17 | A gRPC channel connected to given server address and backed by a platform-specific eventloop group.
18 |
--------------------------------------------------------------------------------
/docs/makeTokenCallOptions(token:).md:
--------------------------------------------------------------------------------
1 | # makeTokenCallOptions(token:)
2 |
3 | A function that creates new gRPC call options (metadata) that contains an authorisation token.
4 |
5 | ``` swift
6 | public func makeTokenCallOptions(token: String) -> CallOptions
7 | ```
8 |
9 | The resulting metadata has a pair that looks like `Authorization: Bearer ${token}`.
10 |
11 | ## Parameters
12 |
13 | - token: The token to use.
14 |
15 | ## Returns
16 |
17 | A `CallOptions` that contain custom metadata with the token as authorization bearer.
18 |
--------------------------------------------------------------------------------