├── .gitignore ├── LICENSE ├── README.md └── TextAndVoice ├── TextAndVoice.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata └── TextAndVoice ├── AppDelegate.swift ├── Assets.xcassets └── AppIcon.appiconset │ ├── Contents.json │ ├── Icon-60.png │ ├── Icon-60@2x.png │ ├── Icon-60@3x.png │ ├── Icon-Small29@2x.png │ ├── Icon-Small29@3x-1.png │ ├── Icon-Small29@3x.png │ ├── Icon-Spotlight-40.png │ ├── Icon-Spotlight-40@2x.png │ └── icon1024.png ├── Base.lproj ├── LaunchScreen.storyboard └── Main.storyboard ├── Info.plist ├── TextAndVoice.xcdatamodeld ├── .xccurrentversion └── TextAndVoice.xcdatamodel │ └── contents ├── ViewController.swift ├── VoiceTextView.swift ├── 再别康桥.mp3 ├── 实时语音转换文本 ├── RealDataRecordController.swift └── RealDataRecordController.xib ├── 文字转语音 ├── TextToVoiceController.swift └── TextToVoiceController.xib ├── 本地语音转换文本 ├── LocalVoiceController.swift └── LocalVoiceController.xib └── 语音转文字 ├── VoiceToTextController.swift └── VoiceToTextController.xib /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | 25 | ## Obj-C/Swift specific 26 | *.hmap 27 | *.ipa 28 | *.dSYM.zip 29 | *.dSYM 30 | 31 | ## Playgrounds 32 | timeline.xctimeline 33 | playground.xcworkspace 34 | 35 | # Swift Package Manager 36 | # 37 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. 38 | # Packages/ 39 | # Package.pins 40 | .build/ 41 | 42 | # CocoaPods 43 | # 44 | # We recommend against adding the Pods directory to your .gitignore. However 45 | # you should judge for yourself, the pros and cons are mentioned at: 46 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 47 | # 48 | # Pods/ 49 | 50 | # Carthage 51 | # 52 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 53 | # Carthage/Checkouts 54 | 55 | Carthage/Build 56 | 57 | # fastlane 58 | # 59 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 60 | # screenshots whenever they are needed. 61 | # For more information about the recommended setup visit: 62 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 63 | 64 | fastlane/report.xml 65 | fastlane/Preview.html 66 | fastlane/screenshots 67 | fastlane/test_output 68 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 coderQuanjun 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TextAndVoice 2 | # Swift语音和文本的转换 3 | > 相关博客 4 | - 简书地址: http://www.jianshu.com/p/aa4b44e192fd 5 | - CSDN地址: http://blog.csdn.net/ShmilyCoder/article/details/78872486 6 | - GitHub地址: https://github.com/CoderTitan/TextAndVoice 7 | 8 | > 谈到语音和文本的转换, 就要说到语音转文本和文本转语音两大技术 9 | - 文本转语音是iOS7以后的技术, 用到的是AVFoundation框架 10 | - 语音转文本是iOS10以后, 苹果发布的一个`Speech`框架 11 | - 下面先介绍一下简单的文本转语音 12 | - [GitHub上Demo地址](https://github.com/CoderTitan/TextAndVoice) 13 | - 项目包括 14 | - 文本转语音 15 | - 实时语音转文本 16 | - 本地语音转文本 17 | - 录音保存本地,转文本 18 | 19 | ## 一. 文本转语音 20 | - 文本转语音技术, 简称TTS (是`Text To Speech`的缩写), [语音合成苹果官方文档](https://developer.apple.com/documentation/avfoundation/speech_synthesis) 21 | - 是苹果iOS7以后新增的功能, 使用AVFoundation 库 22 | - 下面介绍一下需要用到的类 23 | 24 | ### 1. `AVSpeechSynthesizer`: 语音合成器 25 | #### 1-1. 属性 26 | 27 | ``` 28 | //是否正在语音播放 29 | open var isSpeaking: Bool { get } 30 | 31 | //是否停止语音播放 32 | open var isPaused: Bool { get } 33 | ``` 34 | 35 | #### 1-2. 方法 36 | 37 | ``` 38 | //播放语音 39 | open func speak(_ utterance: AVSpeechUtterance) 40 | 41 | //停止语音播放 42 | open func stopSpeaking(at boundary: AVSpeechBoundary) -> Bool 43 | 44 | //暂停语音播放 45 | open func pauseSpeaking(at boundary: AVSpeechBoundary) -> Bool 46 | 47 | //继续语音播放 48 | open func continueSpeaking() -> Bool 49 | 50 | //(iOS10以上, 输出通道) 51 | open var outputChannels: [AVAudioSessionChannelDescription]? 52 | ``` 53 | 54 | ### 2. AVSpeechBoundary 55 | - 描述语音可能被暂停或停止的枚举值 56 | 57 | ``` 58 | case immediate 59 | //表示发言应该暂停或立即停止。 60 | 61 | case word 62 | //说完整个词语之后再暂停或者停止 63 | ``` 64 | 65 | ### 3. AVSpeechUtterance 66 | - 可以将文本和成一段语音的类, 或者说就是一段要播放的语音 67 | #### 3-1. 属性 68 | 69 | ``` 70 | //使用的声音 71 | open var voice: AVSpeechSynthesisVoice? 72 | 73 | //文本属性 74 | open var speechString: String { get } 75 | 76 | //富文本属性 77 | @available(iOS 10.0, *) 78 | open var attributedSpeechString: NSAttributedString { get } 79 | 80 | //说话的速度 81 | open var rate: Float 82 | //提供了两个语速 AVSpeechUtteranceMinimumSpeechRate和 AVSpeechUtteranceMaximumSpeechRate和AVSpeechUtteranceDefaultSpeechRate 83 | 84 | //说话的基线音高, [0.5 - 2] Default = 1 85 | open var pitchMultiplier: Float 86 | 87 | //说话音量, [0-1] Default = 1 88 | open var volume: Float 89 | 90 | //开始一段语音之前等待的时间 91 | open var preUtteranceDelay: TimeInterval 92 | 93 | //语音合成器在当前语音结束之后处理下一个排队的语音之前需要等待的时间, 默认0.0 94 | open var postUtteranceDelay: TimeInterval 95 | ``` 96 | 97 | #### 3-2. 初始化方法 98 | 99 | ``` 100 | public init(string: String) 101 | 102 | @available(iOS 10.0, *) 103 | public init(attributedString string: NSAttributedString) 104 | ``` 105 | 106 | ### 4. AVSpeechSynthesisVoice 107 | - 用于语音合成的独特声音, 主要是不同的语言和地区 108 | - 所支持的所有语言种类详见最底部附录 109 | 110 | #### 4-1. 相关属性 111 | 112 | ``` 113 | //获得当前的语言 114 | open var language: String { get } 115 | 116 | //返回用户当前语言环境的代码 117 | @available(iOS 9.0, *) 118 | open var identifier: String { get } 119 | 120 | @available(iOS 9.0, *) 121 | open var name: String { get } 122 | 123 | @available(iOS 9.0, *) 124 | open var quality: AVSpeechSynthesisVoiceQuality { get } 125 | 126 | ``` 127 | 128 | #### 4-2. 相关方法 129 | 130 | ``` 131 | init?(language: String?) 132 | //返回指定语言和语言环境的语音对象。 133 | 134 | class func speechVoices() 135 | //返回所有可用的语音。 136 | 137 | 138 | class func currentLanguageCode() 139 | //返回用户当前语言环境的代码。 140 | ``` 141 | 142 | 143 | ### 5. AVSpeechSynthesizerDelegate代理 144 | - 所有代理方法都是支持iOS7.0以上的系统 145 | 146 | ``` 147 | //开始播放 148 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didStart utterance: AVSpeechUtterance) 149 | 150 | //播放完成 151 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) 152 | 153 | //暂停播放 154 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didPause utterance: AVSpeechUtterance) 155 | 156 | //继续播放 157 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didContinue utterance: AVSpeechUtterance) 158 | 159 | //取消播放 160 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) 161 | 162 | //将要播放某一段话 163 | optional public func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, willSpeakRangeOfSpeechString characterRange: NSRange, utterance: AVSpeechUtterance) 164 | 165 | ``` 166 | 167 | ### 6. 具体功能的核心代码 168 | 169 | ``` 170 | //MARK: 开始/停止转换 171 | extension TextToVoiceController{ 172 | //开始转换 173 | fileprivate func startTranslattion(){ 174 | //1. 创建需要合成的声音类型 175 | let voice = AVSpeechSynthesisVoice(language: "zh-CN") 176 | 177 | //2. 创建合成的语音类 178 | let utterance = AVSpeechUtterance(string: textView.text) 179 | utterance.rate = AVSpeechUtteranceDefaultSpeechRate 180 | utterance.voice = voice 181 | utterance.volume = 1 182 | utterance.postUtteranceDelay = 0.1 183 | utterance.pitchMultiplier = 1 184 | //开始播放 185 | avSpeech.speak(utterance) 186 | } 187 | 188 | //暂停播放 189 | fileprivate func pauseTranslation(){ 190 | avSpeech.pauseSpeaking(at: .immediate) 191 | } 192 | 193 | //继续播放 194 | fileprivate func continueSpeek(){ 195 | avSpeech.continueSpeaking() 196 | } 197 | 198 | //取消播放 199 | fileprivate func cancleSpeek(){ 200 | avSpeech.stopSpeaking(at: .immediate) 201 | } 202 | } 203 | 204 | ``` 205 | 206 | ## 二. 语音转文本 207 | - 在2016 WWDC大会上,Apple公司介绍了一个很好的语音识别的API,那就是Speech框架 208 | - Speech框架支持iOS10以上系统 209 | - [Speech框架官方文档](https://developer.apple.com/documentation/speech) 210 | - 下面简单介绍一下主要的操作类 211 | 212 | ### 1. `SFSpeechRecognizer`: 语音识别器 213 | - 这个类是语音识别的操作类 214 | - 用于语音识别用户权限的申请,语言环境的设置,语音模式的设置以及向Apple服务发送语音识别的请求 215 | - 初始化方法 216 | 217 | ``` 218 | //这个初始化方法将默认以设备当前的语言环境作为语音识别的语言环境 219 | public convenience init?() 220 | 221 | //根据支持的语言初始化 222 | public init?(locale: Locale) 223 | //示例 224 | let recognize = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN")) 225 | ``` 226 | - 类方法 227 | 228 | ``` 229 | //获取所有支持的语言 230 | open class func supportedLocales() -> Set 231 | 232 | //获取当前用户权限状态 233 | open class func authorizationStatus() -> SFSpeechRecognizerAuthorizationStatus 234 | 235 | //申请语音识别用户权限 236 | open class func requestAuthorization(_ handler: @escaping (SFSpeechRecognizerAuthorizationStatus) -> Swift.Void) 237 | ``` 238 | 239 | - 其他属性 240 | 241 | ``` 242 | var isAvailable: Bool 243 | //指示语音识别器是否可用 244 | 245 | var locale: Locale 246 | //当前语音识别器的语言环境 247 | 248 | class func supportedLocales() 249 | //获取语音识别所有支持的语言 250 | 251 | var queue: OperationQueue 252 | //语音识别器用于识别任务处理程序和委托消息的队列 253 | ``` 254 | 255 | - 相关方法 256 | 257 | ``` 258 | //识别与指定请求关联的音频来源的语音,使用指定的协议返回结果 259 | open func recognitionTask(with request: SFSpeechRecognitionRequest, resultHandler: @escaping (SFSpeechRecognitionResult?, Error?) -> Swift.Void) -> SFSpeechRecognitionTask 260 | 261 | //识别与指定请求关联的音频源的语音, 使用闭包结果 262 | open func recognitionTask(with request: SFSpeechRecognitionRequest, delegate: SFSpeechRecognitionTaskDelegate) -> SFSpeechRecognitionTask 263 | ``` 264 | - 代理 265 | 266 | ``` 267 | weak var delegate: SFSpeechRecognizerDelegate? { get set } 268 | 269 | //代理方法: 监视语音识别器的可用性 270 | func speechRecognizer(SFSpeechRecognizer, availabilityDidChange: Bool) 271 | ``` 272 | 273 | ### 2. `SFSpeechRecognitionRequest` 274 | - 语音识别请求类,需要通过其子类来进行实例化 275 | - 相关属性 276 | 277 | ``` 278 | 279 | var contextualStrings: [String] 280 | //一系列应该被识别的语言种类 281 | 282 | var shouldReportPartialResults: Bool 283 | //是否获取每个语句的最终结果。 284 | 285 | var taskHint: SFSpeechRecognitionTaskHint 286 | //正在执行的语音识别的类型 287 | 288 | var interactionIdentifier: String? 289 | //标识与请求关联的识别请求对象的字符串 290 | ``` 291 | - 子类 292 | - `SFSpeechURLRecognitionRequest` 293 | - `SFSpeechAudioBufferRecognitionRequest` 294 | 295 | #### 2-1. `SFSpeechURLRecognitionRequest` 296 | - 通过制定的URL路径识别本地的语音 297 | - 方法和属性 298 | 299 | ``` 300 | //创建一个语音识别请求,使用指定的URL进行初始化 301 | public init(url URL: URL) 302 | 303 | //获取当前的usl路径 304 | open var url: URL { get } 305 | ``` 306 | 307 | #### 2-2. `SFSpeechAudioBufferRecognitionRequest` 308 | - 识别音频缓冲区中提供的语音的请求 309 | - 识别即时语音, 类似于iPhone 中的Siri 310 | - [官方文档](https://developer.apple.com/documentation/speech/sfspeechaudiobufferrecognitionrequest) 311 | - 音频缓冲区相关方法属性 312 | 313 | ``` 314 | func append(AVAudioPCMBuffer) 315 | //将PCM格式的音频追加到识别请求的末尾。 316 | 317 | func appendAudioSampleBuffer(CMSampleBuffer) 318 | //将音频附加到识别请求的末尾。 319 | 320 | func endAudio() 321 | //完成输入 322 | 323 | ``` 324 | - 获取音频格式 325 | 326 | ``` 327 | var nativeAudioFormat: AVAudioFormat 328 | //用于最佳语音识别的首选音频格式。 329 | ``` 330 | 331 | ### 3. `SFSpeechRecognitionTask` 332 | - 语音识别请求结果类 333 | - 语音识别任务,监视识别进度 334 | - 相关方法属性 335 | 336 | ``` 337 | func cancel() 338 | //取消当前的语音识别任务。 339 | 340 | var isCancelled: Bool 341 | //语音识别任务是否已被取消。 342 | 343 | func finish() 344 | //停止接受新的音频,并完成已接受的音频输入处理 345 | 346 | var isFinishing: Bool 347 | //音频输入是否已停止。 348 | 349 | var state: SFSpeechRecognitionTaskState 350 | //获取语音识别任务的当前状态。 351 | 352 | var error: Error? 353 | //在语音识别任务期间发生的错误的错误对象。 354 | 355 | ``` 356 | 357 | #### 3-1. `SFSpeechRecognitionTaskDelegate`协议 358 | 359 | ``` 360 | //当开始检测音频源中的语音时首先调用此方法 361 | optional public func speechRecognitionDidDetectSpeech(_ task: SFSpeechRecognitionTask) 362 | 363 | //当识别出一条可用的信息后 会调用 364 | //apple的语音识别服务会根据提供的音频源识别出多个可能的结果 每有一条结果可用 都会调用此方法 365 | optional public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) 366 | 367 | //当识别完成所有可用的结果后调用 368 | optional public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishRecognition recognitionResult: SFSpeechRecognitionResult) 369 | 370 | //当不再接受音频输入时调用 即开始处理语音识别任务时调用 371 | optional public func speechRecognitionTaskFinishedReadingAudio(_ task: SFSpeechRecognitionTask) 372 | 373 | //当语音识别任务被取消时调用 374 | optional public func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) 375 | 376 | //语音识别任务完成时被调用 377 | optional public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) 378 | 379 | ``` 380 | 381 | ### 4. SFTranscription 382 | - 语音转换后的信息类, 包含改短语音信息的类 383 | - 你所说的一句话,可能是有好几个词语拼成的,`formattedString`就是你所说的那句话,`segments`就是你所说的你那句话的组成每个单词的集合 384 | 385 | ``` 386 | //返回了一条表达语音译文的字符数据 387 | open var formattedString: String { get } 388 | 389 | //所有的可能的识别数据 390 | open var segments: [SFTranscriptionSegment] { get } 391 | 392 | ``` 393 | 394 | ### 5. SFTranscriptionSegment 395 | - 语音转换中的音频节点类 396 | - 相关属性 397 | 398 | ``` 399 | //当前节点识别后的文本信息 400 | open var substring: String { get } 401 | 402 | //当前节点识别后的文本信息在整体识别语句中的位置 403 | open var substringRange: NSRange { get } 404 | 405 | //当前节点的音频时间戳 406 | open var timestamp: TimeInterval { get } 407 | 408 | //当前节点音频的持续时间 409 | open var duration: TimeInterval { get } 410 | 411 | //可信度/准确度 0-1之间 412 | open var confidence: Float { get } 413 | 414 | //关于此节点的其他可能的识别结果 415 | open var alternativeSubstrings: [String] { get } 416 | 417 | ``` 418 | 419 | ### 6. `SFSpeechRecognitionResult`: 语音识别结果类 420 | - 是语音识别结果的封装,其中包含了许多套平行的识别信息,其每一份识别信息都有可信度属性来描述其准确程度 421 | - 该类只是语音识别结果的一个封装,真正的识别信息定义在SFTranscription类中 422 | 423 | ``` 424 | //准确性最高的识别实例 425 | @NSCopying open var bestTranscription: SFTranscription { get } 426 | 427 | //识别到的多套语音转换信息数组 其会按照准确度进行排序 428 | open var transcriptions: [SFTranscription] { get } 429 | 430 | //是否已经完成 如果YES 则所有所有识别信息都已经获取完成 431 | open var isFinal: Bool { get } 432 | ``` 433 | 434 | ## 三. 语音识别转文本 435 | - 添加Speech框架 436 | - `import Speech` 437 | - `info.plist`必须添加相关权限 438 | 439 | ``` 440 | Privacy - Speech Recognition Usage Description 441 | //语音识别权限 442 | 443 | Privacy - Microphone Usage Description 444 | //麦克风使用权限 445 | ``` 446 | - 判断用户授权 447 | - 在使用speech framework做语音识别之前,你必须首先得到用户的允许 448 | - 因为不仅仅只有本地的ios设备会进行识别,苹果的服务器也会识别 449 | - 所有的语音数据都会被传递到苹果的后台进行处理 450 | - 因此,获取用户授权是强制必须的 451 | 452 | 453 | ``` 454 | ///语音识别权限认证 455 | fileprivate func addSpeechRecordLimit(){ 456 | SFSpeechRecognizer.requestAuthorization { (state) in 457 | var isEnable = false 458 | switch state { 459 | case .authorized: 460 | isEnable = true 461 | print("已授权语音识别") 462 | case .notDetermined: 463 | isEnable = false 464 | print("没有授权语音识别") 465 | case .denied: 466 | isEnable = false 467 | print("用户已拒绝访问语音识别") 468 | case .restricted: 469 | isEnable = false 470 | print("不能在该设备上进行语音识别") 471 | } 472 | DispatchQueue.main.async { 473 | self.recordBtn.isEnabled = isEnable 474 | self.recordBtn.backgroundColor = isEnable ? UIColor(red: 255/255.0, green: 64/255.0, blue: 64/255.0, alpha: 1) : UIColor.lightGray 475 | } 476 | } 477 | } 478 | 479 | 480 | ``` 481 | - 再然后就是初始化相关请求和识别类处理相关语音 482 | - [详细代码参考GitHub的Demo地址](https://github.com/CoderTitan/TextAndVoice) 483 | 484 | --- 485 | 486 | ## 附录: 487 | ### `AVSpeechSynthesisVoice`支持的语言种类 488 | ``` 489 | ar-SA 沙特阿拉伯(阿拉伯文) 490 | 491 | en-ZA, 南非(英文) 492 | 493 | nl-BE, 比利时(荷兰文) 494 | 495 | en-AU, 澳大利亚(英文) 496 | 497 | th-TH, 泰国(泰文) 498 | 499 | de-DE, 德国(德文) 500 | 501 | en-US, 美国(英文) 502 | 503 | pt-BR, 巴西(葡萄牙文) 504 | 505 | pl-PL, 波兰(波兰文) 506 | 507 | en-IE, 爱尔兰(英文) 508 | 509 | el-GR, 希腊(希腊文) 510 | 511 | id-ID, 印度尼西亚(印度尼西亚文) 512 | 513 | sv-SE, 瑞典(瑞典文) 514 | 515 | tr-TR, 土耳其(土耳其文) 516 | 517 | pt-PT, 葡萄牙(葡萄牙文) 518 | 519 | ja-JP, 日本(日文) 520 | 521 | ko-KR, 南朝鲜(朝鲜文) 522 | 523 | hu-HU, 匈牙利(匈牙利文) 524 | 525 | cs-CZ, 捷克共和国(捷克文) 526 | 527 | da-DK, 丹麦(丹麦文) 528 | 529 | es-MX, 墨西哥(西班牙文) 530 | 531 | fr-CA, 加拿大(法文) 532 | 533 | nl-NL, 荷兰(荷兰文) 534 | 535 | fi-FI, 芬兰(芬兰文) 536 | 537 | es-ES, 西班牙(西班牙文) 538 | 539 | it-IT, 意大利(意大利文) 540 | 541 | he-IL, 以色列(希伯莱文,阿拉伯文) 542 | 543 | no-NO, 挪威(挪威文) 544 | 545 | ro-RO, 罗马尼亚(罗马尼亚文) 546 | 547 | zh-HK, 香港(中文) 548 | 549 | zh-TW, 台湾(中文) 550 | 551 | sk-SK, 斯洛伐克(斯洛伐克文) 552 | 553 | zh-CN, 中国(中文) 554 | 555 | ru-RU, 俄罗斯(俄文) 556 | 557 | en-GB, 英国(英文) 558 | 559 | fr-FR, 法国(法文) 560 | 561 | hi-IN 印度(印度文) 562 | ``` 563 | 564 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 47E87A651FE7BA30006DF756 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87A641FE7BA30006DF756 /* AppDelegate.swift */; }; 11 | 47E87A671FE7BA30006DF756 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87A661FE7BA30006DF756 /* ViewController.swift */; }; 12 | 47E87A6A1FE7BA30006DF756 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 47E87A681FE7BA30006DF756 /* Main.storyboard */; }; 13 | 47E87A6D1FE7BA30006DF756 /* TextAndVoice.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 47E87A6B1FE7BA30006DF756 /* TextAndVoice.xcdatamodeld */; }; 14 | 47E87A6F1FE7BA30006DF756 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 47E87A6E1FE7BA30006DF756 /* Assets.xcassets */; }; 15 | 47E87A721FE7BA30006DF756 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 47E87A701FE7BA30006DF756 /* LaunchScreen.storyboard */; }; 16 | 47E87A7D1FE7C03E006DF756 /* TextToVoiceController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87A7B1FE7C03E006DF756 /* TextToVoiceController.swift */; }; 17 | 47E87A7E1FE7C03E006DF756 /* TextToVoiceController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 47E87A7C1FE7C03E006DF756 /* TextToVoiceController.xib */; }; 18 | 47E87A811FE7C050006DF756 /* VoiceToTextController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87A7F1FE7C050006DF756 /* VoiceToTextController.swift */; }; 19 | 47E87A821FE7C050006DF756 /* VoiceToTextController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 47E87A801FE7C050006DF756 /* VoiceToTextController.xib */; }; 20 | 47E87AAB1FEA2ADE006DF756 /* LocalVoiceController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87AA91FEA2ADE006DF756 /* LocalVoiceController.swift */; }; 21 | 47E87AAC1FEA2ADE006DF756 /* LocalVoiceController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 47E87AAA1FEA2ADE006DF756 /* LocalVoiceController.xib */; }; 22 | 47E87AAF1FEA30BA006DF756 /* RealDataRecordController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 47E87AAD1FEA30BA006DF756 /* RealDataRecordController.swift */; }; 23 | 47E87AB01FEA30BA006DF756 /* RealDataRecordController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 47E87AAE1FEA30BA006DF756 /* RealDataRecordController.xib */; }; 24 | 47E87AB41FEA4097006DF756 /* 再别康桥.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 47E87AB31FEA4097006DF756 /* 再别康桥.mp3 */; }; 25 | 770060FF21BA4579001C91F6 /* VoiceTextView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 770060FE21BA4579001C91F6 /* VoiceTextView.swift */; }; 26 | /* End PBXBuildFile section */ 27 | 28 | /* Begin PBXFileReference section */ 29 | 47E87A611FE7BA30006DF756 /* TextAndVoice.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TextAndVoice.app; sourceTree = BUILT_PRODUCTS_DIR; }; 30 | 47E87A641FE7BA30006DF756 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 31 | 47E87A661FE7BA30006DF756 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 32 | 47E87A691FE7BA30006DF756 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 33 | 47E87A6C1FE7BA30006DF756 /* TextAndVoice.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = TextAndVoice.xcdatamodel; sourceTree = ""; }; 34 | 47E87A6E1FE7BA30006DF756 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 35 | 47E87A711FE7BA30006DF756 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 36 | 47E87A731FE7BA30006DF756 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 37 | 47E87A7B1FE7C03E006DF756 /* TextToVoiceController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TextToVoiceController.swift; sourceTree = ""; }; 38 | 47E87A7C1FE7C03E006DF756 /* TextToVoiceController.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = TextToVoiceController.xib; sourceTree = ""; }; 39 | 47E87A7F1FE7C050006DF756 /* VoiceToTextController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceToTextController.swift; sourceTree = ""; }; 40 | 47E87A801FE7C050006DF756 /* VoiceToTextController.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = VoiceToTextController.xib; sourceTree = ""; }; 41 | 47E87AA91FEA2ADE006DF756 /* LocalVoiceController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocalVoiceController.swift; sourceTree = ""; }; 42 | 47E87AAA1FEA2ADE006DF756 /* LocalVoiceController.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = LocalVoiceController.xib; sourceTree = ""; }; 43 | 47E87AAD1FEA30BA006DF756 /* RealDataRecordController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RealDataRecordController.swift; sourceTree = ""; }; 44 | 47E87AAE1FEA30BA006DF756 /* RealDataRecordController.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = RealDataRecordController.xib; sourceTree = ""; }; 45 | 47E87AB31FEA4097006DF756 /* 再别康桥.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = "再别康桥.mp3"; sourceTree = ""; }; 46 | 770060FE21BA4579001C91F6 /* VoiceTextView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceTextView.swift; sourceTree = ""; }; 47 | /* End PBXFileReference section */ 48 | 49 | /* Begin PBXFrameworksBuildPhase section */ 50 | 47E87A5E1FE7BA30006DF756 /* Frameworks */ = { 51 | isa = PBXFrameworksBuildPhase; 52 | buildActionMask = 2147483647; 53 | files = ( 54 | ); 55 | runOnlyForDeploymentPostprocessing = 0; 56 | }; 57 | /* End PBXFrameworksBuildPhase section */ 58 | 59 | /* Begin PBXGroup section */ 60 | 47E87A581FE7BA30006DF756 = { 61 | isa = PBXGroup; 62 | children = ( 63 | 47E87A631FE7BA30006DF756 /* TextAndVoice */, 64 | 47E87A621FE7BA30006DF756 /* Products */, 65 | ); 66 | sourceTree = ""; 67 | }; 68 | 47E87A621FE7BA30006DF756 /* Products */ = { 69 | isa = PBXGroup; 70 | children = ( 71 | 47E87A611FE7BA30006DF756 /* TextAndVoice.app */, 72 | ); 73 | name = Products; 74 | sourceTree = ""; 75 | }; 76 | 47E87A631FE7BA30006DF756 /* TextAndVoice */ = { 77 | isa = PBXGroup; 78 | children = ( 79 | 47E87AA81FEA2AA7006DF756 /* 本地语音转换文本 */, 80 | 47E87AA71FEA2AA7006DF756 /* 实时语音转换文本 */, 81 | 47E87A791FE7C01A006DF756 /* 文字转语音 */, 82 | 47E87A7A1FE7C01A006DF756 /* 语音转文字 */, 83 | 47E87A641FE7BA30006DF756 /* AppDelegate.swift */, 84 | 47E87A661FE7BA30006DF756 /* ViewController.swift */, 85 | 770060FE21BA4579001C91F6 /* VoiceTextView.swift */, 86 | 47E87A681FE7BA30006DF756 /* Main.storyboard */, 87 | 47E87A6E1FE7BA30006DF756 /* Assets.xcassets */, 88 | 47E87A701FE7BA30006DF756 /* LaunchScreen.storyboard */, 89 | 47E87AB31FEA4097006DF756 /* 再别康桥.mp3 */, 90 | 47E87A731FE7BA30006DF756 /* Info.plist */, 91 | 47E87A6B1FE7BA30006DF756 /* TextAndVoice.xcdatamodeld */, 92 | ); 93 | path = TextAndVoice; 94 | sourceTree = ""; 95 | }; 96 | 47E87A791FE7C01A006DF756 /* 文字转语音 */ = { 97 | isa = PBXGroup; 98 | children = ( 99 | 47E87A7B1FE7C03E006DF756 /* TextToVoiceController.swift */, 100 | 47E87A7C1FE7C03E006DF756 /* TextToVoiceController.xib */, 101 | ); 102 | path = "文字转语音"; 103 | sourceTree = ""; 104 | }; 105 | 47E87A7A1FE7C01A006DF756 /* 语音转文字 */ = { 106 | isa = PBXGroup; 107 | children = ( 108 | 47E87A7F1FE7C050006DF756 /* VoiceToTextController.swift */, 109 | 47E87A801FE7C050006DF756 /* VoiceToTextController.xib */, 110 | ); 111 | path = "语音转文字"; 112 | sourceTree = ""; 113 | }; 114 | 47E87AA71FEA2AA7006DF756 /* 实时语音转换文本 */ = { 115 | isa = PBXGroup; 116 | children = ( 117 | 47E87AAD1FEA30BA006DF756 /* RealDataRecordController.swift */, 118 | 47E87AAE1FEA30BA006DF756 /* RealDataRecordController.xib */, 119 | ); 120 | path = "实时语音转换文本"; 121 | sourceTree = ""; 122 | }; 123 | 47E87AA81FEA2AA7006DF756 /* 本地语音转换文本 */ = { 124 | isa = PBXGroup; 125 | children = ( 126 | 47E87AA91FEA2ADE006DF756 /* LocalVoiceController.swift */, 127 | 47E87AAA1FEA2ADE006DF756 /* LocalVoiceController.xib */, 128 | ); 129 | path = "本地语音转换文本"; 130 | sourceTree = ""; 131 | }; 132 | /* End PBXGroup section */ 133 | 134 | /* Begin PBXNativeTarget section */ 135 | 47E87A601FE7BA30006DF756 /* TextAndVoice */ = { 136 | isa = PBXNativeTarget; 137 | buildConfigurationList = 47E87A761FE7BA30006DF756 /* Build configuration list for PBXNativeTarget "TextAndVoice" */; 138 | buildPhases = ( 139 | 47E87A5D1FE7BA30006DF756 /* Sources */, 140 | 47E87A5E1FE7BA30006DF756 /* Frameworks */, 141 | 47E87A5F1FE7BA30006DF756 /* Resources */, 142 | ); 143 | buildRules = ( 144 | ); 145 | dependencies = ( 146 | ); 147 | name = TextAndVoice; 148 | productName = TextAndVoice; 149 | productReference = 47E87A611FE7BA30006DF756 /* TextAndVoice.app */; 150 | productType = "com.apple.product-type.application"; 151 | }; 152 | /* End PBXNativeTarget section */ 153 | 154 | /* Begin PBXProject section */ 155 | 47E87A591FE7BA30006DF756 /* Project object */ = { 156 | isa = PBXProject; 157 | attributes = { 158 | LastSwiftUpdateCheck = 0920; 159 | LastUpgradeCheck = 0920; 160 | ORGANIZATIONNAME = CoderJun; 161 | TargetAttributes = { 162 | 47E87A601FE7BA30006DF756 = { 163 | CreatedOnToolsVersion = 9.2; 164 | ProvisioningStyle = Manual; 165 | }; 166 | }; 167 | }; 168 | buildConfigurationList = 47E87A5C1FE7BA30006DF756 /* Build configuration list for PBXProject "TextAndVoice" */; 169 | compatibilityVersion = "Xcode 8.0"; 170 | developmentRegion = en; 171 | hasScannedForEncodings = 0; 172 | knownRegions = ( 173 | en, 174 | Base, 175 | ); 176 | mainGroup = 47E87A581FE7BA30006DF756; 177 | productRefGroup = 47E87A621FE7BA30006DF756 /* Products */; 178 | projectDirPath = ""; 179 | projectRoot = ""; 180 | targets = ( 181 | 47E87A601FE7BA30006DF756 /* TextAndVoice */, 182 | ); 183 | }; 184 | /* End PBXProject section */ 185 | 186 | /* Begin PBXResourcesBuildPhase section */ 187 | 47E87A5F1FE7BA30006DF756 /* Resources */ = { 188 | isa = PBXResourcesBuildPhase; 189 | buildActionMask = 2147483647; 190 | files = ( 191 | 47E87AAC1FEA2ADE006DF756 /* LocalVoiceController.xib in Resources */, 192 | 47E87A7E1FE7C03E006DF756 /* TextToVoiceController.xib in Resources */, 193 | 47E87A721FE7BA30006DF756 /* LaunchScreen.storyboard in Resources */, 194 | 47E87AB41FEA4097006DF756 /* 再别康桥.mp3 in Resources */, 195 | 47E87A821FE7C050006DF756 /* VoiceToTextController.xib in Resources */, 196 | 47E87AB01FEA30BA006DF756 /* RealDataRecordController.xib in Resources */, 197 | 47E87A6F1FE7BA30006DF756 /* Assets.xcassets in Resources */, 198 | 47E87A6A1FE7BA30006DF756 /* Main.storyboard in Resources */, 199 | ); 200 | runOnlyForDeploymentPostprocessing = 0; 201 | }; 202 | /* End PBXResourcesBuildPhase section */ 203 | 204 | /* Begin PBXSourcesBuildPhase section */ 205 | 47E87A5D1FE7BA30006DF756 /* Sources */ = { 206 | isa = PBXSourcesBuildPhase; 207 | buildActionMask = 2147483647; 208 | files = ( 209 | 47E87AAF1FEA30BA006DF756 /* RealDataRecordController.swift in Sources */, 210 | 47E87AAB1FEA2ADE006DF756 /* LocalVoiceController.swift in Sources */, 211 | 47E87A7D1FE7C03E006DF756 /* TextToVoiceController.swift in Sources */, 212 | 47E87A811FE7C050006DF756 /* VoiceToTextController.swift in Sources */, 213 | 770060FF21BA4579001C91F6 /* VoiceTextView.swift in Sources */, 214 | 47E87A671FE7BA30006DF756 /* ViewController.swift in Sources */, 215 | 47E87A6D1FE7BA30006DF756 /* TextAndVoice.xcdatamodeld in Sources */, 216 | 47E87A651FE7BA30006DF756 /* AppDelegate.swift in Sources */, 217 | ); 218 | runOnlyForDeploymentPostprocessing = 0; 219 | }; 220 | /* End PBXSourcesBuildPhase section */ 221 | 222 | /* Begin PBXVariantGroup section */ 223 | 47E87A681FE7BA30006DF756 /* Main.storyboard */ = { 224 | isa = PBXVariantGroup; 225 | children = ( 226 | 47E87A691FE7BA30006DF756 /* Base */, 227 | ); 228 | name = Main.storyboard; 229 | sourceTree = ""; 230 | }; 231 | 47E87A701FE7BA30006DF756 /* LaunchScreen.storyboard */ = { 232 | isa = PBXVariantGroup; 233 | children = ( 234 | 47E87A711FE7BA30006DF756 /* Base */, 235 | ); 236 | name = LaunchScreen.storyboard; 237 | sourceTree = ""; 238 | }; 239 | /* End PBXVariantGroup section */ 240 | 241 | /* Begin XCBuildConfiguration section */ 242 | 47E87A741FE7BA30006DF756 /* Debug */ = { 243 | isa = XCBuildConfiguration; 244 | buildSettings = { 245 | ALWAYS_SEARCH_USER_PATHS = NO; 246 | CLANG_ANALYZER_NONNULL = YES; 247 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 248 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 249 | CLANG_CXX_LIBRARY = "libc++"; 250 | CLANG_ENABLE_MODULES = YES; 251 | CLANG_ENABLE_OBJC_ARC = YES; 252 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 253 | CLANG_WARN_BOOL_CONVERSION = YES; 254 | CLANG_WARN_COMMA = YES; 255 | CLANG_WARN_CONSTANT_CONVERSION = YES; 256 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 257 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 258 | CLANG_WARN_EMPTY_BODY = YES; 259 | CLANG_WARN_ENUM_CONVERSION = YES; 260 | CLANG_WARN_INFINITE_RECURSION = YES; 261 | CLANG_WARN_INT_CONVERSION = YES; 262 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 263 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 264 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 265 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 266 | CLANG_WARN_STRICT_PROTOTYPES = YES; 267 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 268 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 269 | CLANG_WARN_UNREACHABLE_CODE = YES; 270 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 271 | CODE_SIGN_IDENTITY = "iPhone Developer"; 272 | COPY_PHASE_STRIP = NO; 273 | DEBUG_INFORMATION_FORMAT = dwarf; 274 | ENABLE_STRICT_OBJC_MSGSEND = YES; 275 | ENABLE_TESTABILITY = YES; 276 | GCC_C_LANGUAGE_STANDARD = gnu11; 277 | GCC_DYNAMIC_NO_PIC = NO; 278 | GCC_NO_COMMON_BLOCKS = YES; 279 | GCC_OPTIMIZATION_LEVEL = 0; 280 | GCC_PREPROCESSOR_DEFINITIONS = ( 281 | "DEBUG=1", 282 | "$(inherited)", 283 | ); 284 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 285 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 286 | GCC_WARN_UNDECLARED_SELECTOR = YES; 287 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 288 | GCC_WARN_UNUSED_FUNCTION = YES; 289 | GCC_WARN_UNUSED_VARIABLE = YES; 290 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 291 | MTL_ENABLE_DEBUG_INFO = YES; 292 | ONLY_ACTIVE_ARCH = YES; 293 | SDKROOT = iphoneos; 294 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 295 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 296 | }; 297 | name = Debug; 298 | }; 299 | 47E87A751FE7BA30006DF756 /* Release */ = { 300 | isa = XCBuildConfiguration; 301 | buildSettings = { 302 | ALWAYS_SEARCH_USER_PATHS = NO; 303 | CLANG_ANALYZER_NONNULL = YES; 304 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 305 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 306 | CLANG_CXX_LIBRARY = "libc++"; 307 | CLANG_ENABLE_MODULES = YES; 308 | CLANG_ENABLE_OBJC_ARC = YES; 309 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 310 | CLANG_WARN_BOOL_CONVERSION = YES; 311 | CLANG_WARN_COMMA = YES; 312 | CLANG_WARN_CONSTANT_CONVERSION = YES; 313 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 314 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 315 | CLANG_WARN_EMPTY_BODY = YES; 316 | CLANG_WARN_ENUM_CONVERSION = YES; 317 | CLANG_WARN_INFINITE_RECURSION = YES; 318 | CLANG_WARN_INT_CONVERSION = YES; 319 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 320 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 321 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 322 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 323 | CLANG_WARN_STRICT_PROTOTYPES = YES; 324 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 325 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 326 | CLANG_WARN_UNREACHABLE_CODE = YES; 327 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 328 | CODE_SIGN_IDENTITY = "iPhone Developer"; 329 | COPY_PHASE_STRIP = NO; 330 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 331 | ENABLE_NS_ASSERTIONS = NO; 332 | ENABLE_STRICT_OBJC_MSGSEND = YES; 333 | GCC_C_LANGUAGE_STANDARD = gnu11; 334 | GCC_NO_COMMON_BLOCKS = YES; 335 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 336 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 337 | GCC_WARN_UNDECLARED_SELECTOR = YES; 338 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 339 | GCC_WARN_UNUSED_FUNCTION = YES; 340 | GCC_WARN_UNUSED_VARIABLE = YES; 341 | IPHONEOS_DEPLOYMENT_TARGET = 11.2; 342 | MTL_ENABLE_DEBUG_INFO = NO; 343 | SDKROOT = iphoneos; 344 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 345 | VALIDATE_PRODUCT = YES; 346 | }; 347 | name = Release; 348 | }; 349 | 47E87A771FE7BA30006DF756 /* Debug */ = { 350 | isa = XCBuildConfiguration; 351 | buildSettings = { 352 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 353 | CODE_SIGN_IDENTITY = "iPhone Distribution"; 354 | CODE_SIGN_STYLE = Manual; 355 | DEVELOPMENT_TEAM = RL8C83227L; 356 | INFOPLIST_FILE = TextAndVoice/Info.plist; 357 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 358 | PRODUCT_BUNDLE_IDENTIFIER = com.candylink; 359 | PRODUCT_NAME = "$(TARGET_NAME)"; 360 | PROVISIONING_PROFILE_SPECIFIER = candylinkHoc; 361 | SWIFT_VERSION = 4.0; 362 | TARGETED_DEVICE_FAMILY = "1,2"; 363 | }; 364 | name = Debug; 365 | }; 366 | 47E87A781FE7BA30006DF756 /* Release */ = { 367 | isa = XCBuildConfiguration; 368 | buildSettings = { 369 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 370 | CODE_SIGN_IDENTITY = "iPhone Distribution"; 371 | CODE_SIGN_STYLE = Manual; 372 | DEVELOPMENT_TEAM = RL8C83227L; 373 | INFOPLIST_FILE = TextAndVoice/Info.plist; 374 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 375 | PRODUCT_BUNDLE_IDENTIFIER = com.candylink; 376 | PRODUCT_NAME = "$(TARGET_NAME)"; 377 | PROVISIONING_PROFILE_SPECIFIER = candylinkHoc; 378 | SWIFT_VERSION = 4.0; 379 | TARGETED_DEVICE_FAMILY = "1,2"; 380 | }; 381 | name = Release; 382 | }; 383 | /* End XCBuildConfiguration section */ 384 | 385 | /* Begin XCConfigurationList section */ 386 | 47E87A5C1FE7BA30006DF756 /* Build configuration list for PBXProject "TextAndVoice" */ = { 387 | isa = XCConfigurationList; 388 | buildConfigurations = ( 389 | 47E87A741FE7BA30006DF756 /* Debug */, 390 | 47E87A751FE7BA30006DF756 /* Release */, 391 | ); 392 | defaultConfigurationIsVisible = 0; 393 | defaultConfigurationName = Release; 394 | }; 395 | 47E87A761FE7BA30006DF756 /* Build configuration list for PBXNativeTarget "TextAndVoice" */ = { 396 | isa = XCConfigurationList; 397 | buildConfigurations = ( 398 | 47E87A771FE7BA30006DF756 /* Debug */, 399 | 47E87A781FE7BA30006DF756 /* Release */, 400 | ); 401 | defaultConfigurationIsVisible = 0; 402 | defaultConfigurationName = Release; 403 | }; 404 | /* End XCConfigurationList section */ 405 | 406 | /* Begin XCVersionGroup section */ 407 | 47E87A6B1FE7BA30006DF756 /* TextAndVoice.xcdatamodeld */ = { 408 | isa = XCVersionGroup; 409 | children = ( 410 | 47E87A6C1FE7BA30006DF756 /* TextAndVoice.xcdatamodel */, 411 | ); 412 | currentVersion = 47E87A6C1FE7BA30006DF756 /* TextAndVoice.xcdatamodel */; 413 | path = TextAndVoice.xcdatamodeld; 414 | sourceTree = ""; 415 | versionGroupType = wrapper.xcdatamodel; 416 | }; 417 | /* End XCVersionGroup section */ 418 | }; 419 | rootObject = 47E87A591FE7BA30006DF756 /* Project object */; 420 | } 421 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/18. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import CoreData 11 | 12 | @UIApplicationMain 13 | class AppDelegate: UIResponder, UIApplicationDelegate { 14 | 15 | var window: UIWindow? 16 | 17 | 18 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 19 | // Override point for customization after application launch. 20 | return true 21 | } 22 | 23 | func applicationWillResignActive(_ application: UIApplication) { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 26 | } 27 | 28 | func applicationDidEnterBackground(_ application: UIApplication) { 29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 31 | } 32 | 33 | func applicationWillEnterForeground(_ application: UIApplication) { 34 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 35 | } 36 | 37 | func applicationDidBecomeActive(_ application: UIApplication) { 38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 39 | } 40 | 41 | func applicationWillTerminate(_ application: UIApplication) { 42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 43 | // Saves changes in the application's managed object context before the application terminates. 44 | self.saveContext() 45 | } 46 | 47 | // MARK: - Core Data stack 48 | 49 | lazy var persistentContainer: NSPersistentContainer = { 50 | /* 51 | The persistent container for the application. This implementation 52 | creates and returns a container, having loaded the store for the 53 | application to it. This property is optional since there are legitimate 54 | error conditions that could cause the creation of the store to fail. 55 | */ 56 | let container = NSPersistentContainer(name: "TextAndVoice") 57 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 58 | if let error = error as NSError? { 59 | // Replace this implementation with code to handle the error appropriately. 60 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 61 | 62 | /* 63 | Typical reasons for an error here include: 64 | * The parent directory does not exist, cannot be created, or disallows writing. 65 | * The persistent store is not accessible, due to permissions or data protection when the device is locked. 66 | * The device is out of space. 67 | * The store could not be migrated to the current model version. 68 | Check the error message to determine what the actual problem was. 69 | */ 70 | fatalError("Unresolved error \(error), \(error.userInfo)") 71 | } 72 | }) 73 | return container 74 | }() 75 | 76 | // MARK: - Core Data Saving support 77 | 78 | func saveContext () { 79 | let context = persistentContainer.viewContext 80 | if context.hasChanges { 81 | do { 82 | try context.save() 83 | } catch { 84 | // Replace this implementation with code to handle the error appropriately. 85 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 86 | let nserror = error as NSError 87 | fatalError("Unresolved error \(nserror), \(nserror.userInfo)") 88 | } 89 | } 90 | } 91 | 92 | } 93 | 94 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "size" : "20x20", 5 | "idiom" : "iphone", 6 | "filename" : "Icon-Spotlight-40.png", 7 | "scale" : "2x" 8 | }, 9 | { 10 | "size" : "20x20", 11 | "idiom" : "iphone", 12 | "filename" : "Icon-60.png", 13 | "scale" : "3x" 14 | }, 15 | { 16 | "size" : "29x29", 17 | "idiom" : "iphone", 18 | "filename" : "Icon-Small29@2x.png", 19 | "scale" : "2x" 20 | }, 21 | { 22 | "size" : "29x29", 23 | "idiom" : "iphone", 24 | "filename" : "Icon-Small29@3x.png", 25 | "scale" : "3x" 26 | }, 27 | { 28 | "size" : "40x40", 29 | "idiom" : "iphone", 30 | "filename" : "Icon-Spotlight-40@2x.png", 31 | "scale" : "2x" 32 | }, 33 | { 34 | "size" : "40x40", 35 | "idiom" : "iphone", 36 | "filename" : "Icon-Small29@3x-1.png", 37 | "scale" : "3x" 38 | }, 39 | { 40 | "size" : "60x60", 41 | "idiom" : "iphone", 42 | "filename" : "Icon-60@2x.png", 43 | "scale" : "2x" 44 | }, 45 | { 46 | "size" : "60x60", 47 | "idiom" : "iphone", 48 | "filename" : "Icon-60@3x.png", 49 | "scale" : "3x" 50 | }, 51 | { 52 | "size" : "1024x1024", 53 | "idiom" : "ios-marketing", 54 | "filename" : "icon1024.png", 55 | "scale" : "1x" 56 | } 57 | ], 58 | "info" : { 59 | "version" : 1, 60 | "author" : "xcode" 61 | } 62 | } -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60@2x.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-60@3x.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@2x.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@3x-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@3x-1.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Small29@3x.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Spotlight-40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Spotlight-40.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Spotlight-40@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/Icon-Spotlight-40@2x.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/icon1024.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/Assets.xcassets/AppIcon.appiconset/icon1024.png -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSMicrophoneUsageDescription 24 | 麦克风访问权限 25 | NSSpeechRecognitionUsageDescription 26 | 语音识别访问权限 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIMainStoryboardFile 30 | Main 31 | UIRequiredDeviceCapabilities 32 | 33 | armv7 34 | 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationPortrait 38 | UIInterfaceOrientationLandscapeLeft 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | UIInterfaceOrientationLandscapeLeft 46 | UIInterfaceOrientationLandscapeRight 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/TextAndVoice.xcdatamodeld/.xccurrentversion: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | _XCCurrentVersionName 6 | TextAndVoice.xcdatamodel 7 | 8 | 9 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/TextAndVoice.xcdatamodeld/TextAndVoice.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/ViewController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/18. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | 12 | class ViewController: UIViewController { 13 | 14 | @IBOutlet weak var tableView: UITableView! 15 | fileprivate var titleArr = ["文字转语音", "实时语音转换文本", "本地语音转换文本", "一段录音转文字"] 16 | override func viewDidLoad() { 17 | super.viewDidLoad() 18 | 19 | title = "列表" 20 | } 21 | } 22 | 23 | // MARK: UITableViewDataSource 24 | extension ViewController: UITableViewDelegate, UITableViewDataSource{ 25 | func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { 26 | return titleArr.count 27 | } 28 | func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { 29 | var cell = tableView.dequeueReusableCell(withIdentifier: "cell") 30 | if cell == nil{ 31 | cell = UITableViewCell(style: .default, reuseIdentifier: "cell") 32 | } 33 | cell?.textLabel?.text = titleArr[indexPath.row] 34 | cell?.accessoryType = .disclosureIndicator 35 | cell?.selectionStyle = .none 36 | return cell! 37 | } 38 | func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { 39 | let vcs = [TextToVoiceController(), RealDataRecordController(), LocalVoiceController(), VoiceToTextController()] 40 | let vc = vcs[indexPath.row] 41 | vc.title = titleArr[indexPath.row] 42 | navigationController?.pushViewController(vc, animated: true) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/VoiceTextView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VoiceTextView.swift 3 | // TextAndVoice 4 | // 5 | // Created by quanjunt on 2018/12/7. 6 | // Copyright © 2018 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | class VoiceTextView: UITextView { 12 | 13 | 14 | 15 | required init?(coder aDecoder: NSCoder) { 16 | super.init(coder: aDecoder) 17 | 18 | isUserInteractionEnabled = true 19 | addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(lableClick))) 20 | } 21 | 22 | override func awakeFromNib() { 23 | super.awakeFromNib() 24 | 25 | isUserInteractionEnabled = true 26 | addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(lableClick))) 27 | } 28 | 29 | 30 | @objc fileprivate func lableClick() { 31 | becomeFirstResponder() 32 | 33 | let menu = UIMenuController.shared 34 | menu.setTargetRect(frame, in: superview ?? self) 35 | menu.setMenuVisible(true, animated: true) 36 | } 37 | 38 | override var canBecomeFirstResponder: Bool { 39 | return true 40 | } 41 | 42 | override func canPerformAction(_ action: Selector, withSender sender: Any?) -> Bool { 43 | if action == #selector(copy(menu:)) { 44 | return true 45 | } 46 | return false 47 | } 48 | 49 | @objc fileprivate func copy(menu: UIMenuController) { 50 | let pause = UIPasteboard.general 51 | pause.string = self.text 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/再别康桥.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CoderTitan/TextAndVoice/0f0b716a6d773bdfe21f84c6848b0a407e0a7251/TextAndVoice/TextAndVoice/再别康桥.mp3 -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/实时语音转换文本/RealDataRecordController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // RealDataRecordController.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/20. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Speech 11 | import AVFoundation 12 | 13 | class RealDataRecordController: UIViewController { 14 | 15 | @IBOutlet weak var textLabel: UITextView! 16 | @IBOutlet weak var recordBtn: UIButton! 17 | fileprivate var recordRequest: SFSpeechAudioBufferRecognitionRequest? 18 | fileprivate var recordTask: SFSpeechRecognitionTask? 19 | fileprivate let audioEngine = AVAudioEngine() 20 | fileprivate lazy var recognizer: SFSpeechRecognizer = {// 21 | let recognize = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN")) 22 | recognize?.delegate = self 23 | return recognize! 24 | }() 25 | 26 | override func viewDidLoad() { 27 | super.viewDidLoad() 28 | 29 | addSpeechRecordLimit() 30 | } 31 | 32 | override func viewWillDisappear(_ animated: Bool) { 33 | super.viewWillDisappear(animated) 34 | stopRecognize() 35 | } 36 | 37 | //开始/停止录音 38 | @IBAction func recordAction(_ sender: UIButton) { 39 | let isStart = sender.currentTitle!.contains("开始") 40 | recordBtn.setTitle(isStart ? "停止录音" : "开始录音", for: .normal) 41 | isStart ? startRecognize() : stopRecognize() 42 | } 43 | 44 | @IBAction func labelCopClick(_ sender: Any) { 45 | let pause = UIPasteboard.general 46 | pause.string = textLabel.text 47 | } 48 | } 49 | 50 | //MARK: 录音识别 51 | extension RealDataRecordController{ 52 | //开始识别 53 | fileprivate func startRecognize(){ 54 | //1. 停止当前任务 55 | stopRecognize() 56 | 57 | //2. 创建音频会话 58 | let session = AVAudioSession.sharedInstance() 59 | do{ 60 | try session.setCategory(AVAudioSessionCategoryRecord) 61 | try session.setMode(AVAudioSessionModeMeasurement) 62 | //激活Session 63 | try session.setActive(true, with: .notifyOthersOnDeactivation) 64 | }catch{ 65 | print("Throws:\(error)") 66 | } 67 | 68 | //3. 创建识别请求 69 | recordRequest = SFSpeechAudioBufferRecognitionRequest() 70 | 71 | let inputNode = audioEngine.inputNode 72 | 73 | //开始识别获取文字 74 | recordTask = recognizer.recognitionTask(with: recordRequest!, resultHandler: { (result, error) in 75 | if result != nil { 76 | var text = "" 77 | for trans in result!.transcriptions{ 78 | text += trans.formattedString 79 | } 80 | self.textLabel.text = text 81 | 82 | if result!.isFinal{ 83 | self.audioEngine.stop() 84 | inputNode.removeTap(onBus: 0) 85 | self.recordRequest = nil 86 | self.recordTask = nil 87 | self.recordBtn.isEnabled = true 88 | } 89 | } 90 | }) 91 | let recordFormat = inputNode.outputFormat(forBus: 0) 92 | inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordFormat, block: { (buffer, time) in 93 | self.recordRequest?.append(buffer) 94 | }) 95 | audioEngine.prepare() 96 | do { 97 | try audioEngine.start() 98 | } catch { 99 | print("Throws:\(error)") 100 | } 101 | } 102 | 103 | //停止识别 104 | fileprivate func stopRecognize(){ 105 | if recordTask != nil{ 106 | recordTask?.cancel() 107 | recordTask = nil 108 | } 109 | removeTask() 110 | } 111 | 112 | //销毁录音任务 113 | fileprivate func removeTask(){ 114 | self.audioEngine.stop() 115 | audioEngine.inputNode.removeTap(onBus: 0) 116 | self.recordRequest = nil 117 | self.recordTask = nil 118 | self.recordBtn.isEnabled = true 119 | } 120 | 121 | ///语音识别权限认证 122 | fileprivate func addSpeechRecordLimit(){ 123 | SFSpeechRecognizer.requestAuthorization { (state) in 124 | var isEnable = false 125 | switch state { 126 | case .authorized: 127 | isEnable = true 128 | print("已授权语音识别") 129 | case .notDetermined: 130 | isEnable = false 131 | print("没有授权语音识别") 132 | case .denied: 133 | isEnable = false 134 | print("用户已拒绝访问语音识别") 135 | case .restricted: 136 | isEnable = false 137 | print("不能在该设备上进行语音识别") 138 | } 139 | DispatchQueue.main.async { 140 | self.recordBtn.isEnabled = isEnable 141 | self.recordBtn.backgroundColor = isEnable ? UIColor(red: 255/255.0, green: 64/255.0, blue: 64/255.0, alpha: 1) : UIColor.lightGray 142 | } 143 | } 144 | } 145 | } 146 | 147 | //MARK: 148 | extension RealDataRecordController: SFSpeechRecognizerDelegate{ 149 | //监视语音识别器的可用性 150 | func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) { 151 | recordBtn.isEnabled = available 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/实时语音转换文本/RealDataRecordController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 35 | 45 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/文字转语音/TextToVoiceController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // TextToVoiceController.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/18. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | class TextToVoiceController: UIViewController { 13 | 14 | fileprivate let avSpeech = AVSpeechSynthesizer() 15 | 16 | @IBOutlet weak var textView: UITextView! 17 | @IBOutlet weak var startButton: UIButton! 18 | @IBOutlet weak var pauseButton: UIButton! 19 | @IBOutlet weak var willSpeekLabel: UILabel! 20 | override func viewDidLoad() { 21 | super.viewDidLoad() 22 | 23 | title = "文字转语音" 24 | avSpeech.delegate = self 25 | } 26 | 27 | override func viewWillDisappear(_ animated: Bool) { 28 | super.viewWillDisappear(animated) 29 | cancleSpeek() 30 | } 31 | 32 | //开始转换 33 | @IBAction func translationAction(_ sender: UIButton) { 34 | let isStart = sender.currentTitle!.contains("开始") 35 | textView.resignFirstResponder() 36 | startButton.setTitle(isStart ? "取消播放" : "开始播放", for: .normal) 37 | isStart ? startTranslattion() : cancleSpeek() 38 | } 39 | 40 | //暂停播放 41 | @IBAction func pauseOrContinueAction(_ sender: UIButton) { 42 | let isPause = sender.currentTitle!.contains("暂停") 43 | pauseButton.setTitle(isPause ? "继续播放" : "暂停播放", for: .normal) 44 | isPause ? pauseTranslation() : continueSpeek() 45 | } 46 | 47 | } 48 | 49 | //MARK: 开始/停止转换 50 | extension TextToVoiceController{ 51 | //开始转换 52 | fileprivate func startTranslattion(){ 53 | //1. 创建需要合成的声音类型 54 | let voice = AVSpeechSynthesisVoice(language: "zh-CN") 55 | 56 | //2. 创建合成的语音类 57 | let utterance = AVSpeechUtterance(string: textView.text) 58 | utterance.rate = AVSpeechUtteranceDefaultSpeechRate 59 | utterance.voice = voice 60 | utterance.volume = 1 61 | utterance.postUtteranceDelay = 0.1 62 | utterance.pitchMultiplier = 1 63 | //开始播放 64 | avSpeech.speak(utterance) 65 | } 66 | 67 | //暂停播放 68 | fileprivate func pauseTranslation(){ 69 | avSpeech.pauseSpeaking(at: .immediate) 70 | } 71 | 72 | //继续播放 73 | fileprivate func continueSpeek(){ 74 | avSpeech.continueSpeaking() 75 | } 76 | 77 | //取消播放 78 | fileprivate func cancleSpeek(){ 79 | avSpeech.stopSpeaking(at: .immediate) 80 | } 81 | } 82 | 83 | //MARK: AVSpeechSynthesizerDelegate 84 | extension TextToVoiceController: AVSpeechSynthesizerDelegate{ 85 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didStart utterance: AVSpeechUtterance) { 86 | print("开始播放") 87 | } 88 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) { 89 | startButton.setTitle("开始播放", for: .normal) 90 | } 91 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didPause utterance: AVSpeechUtterance) { 92 | print("暂停播放") 93 | } 94 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didContinue utterance: AVSpeechUtterance) { 95 | print("继续播放") 96 | } 97 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) { 98 | print("取消播放") 99 | } 100 | func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, willSpeakRangeOfSpeechString characterRange: NSRange, utterance: AVSpeechUtterance) { 101 | // print(characterRange.location, "-----", characterRange.length) 102 | let subStr = utterance.speechString.dropFirst(characterRange.location).description 103 | let rangeStr = subStr.dropLast(subStr.count - characterRange.length).description 104 | willSpeekLabel.text = rangeStr 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/文字转语音/TextToVoiceController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 谈到语音和文本的转换, 就要说到语音转文本和文本转语音两大技术, 34 | 文本转语音是iOS7以后的技术, 用到的是AVFoundation框架, 35 | 语音转文本是iOS10以后, 苹果发布的一个Speech框架, 36 | 下面先介绍一下简单的文本转语音 37 | 38 | 39 | 40 | 41 | 51 | 61 | 71 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/本地语音转换文本/LocalVoiceController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // LocalVoiceController.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/20. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import Speech 11 | import AVFoundation 12 | 13 | class LocalVoiceController: UIViewController { 14 | 15 | @IBOutlet weak var textLabel: UITextView! 16 | @IBOutlet weak var recordButton: UIButton! 17 | 18 | fileprivate var recordTask: SFSpeechRecognitionTask?///语音识别对象的结果 19 | fileprivate lazy var recordRequest: SFSpeechURLRecognitionRequest = { 20 | let url = URL(fileURLWithPath: Bundle.main.path(forResource: "再别康桥", ofType: "mp3") ?? "") 21 | let recordRequest = SFSpeechURLRecognitionRequest(url: url) 22 | recordRequest.shouldReportPartialResults = true 23 | return recordRequest 24 | }() 25 | fileprivate lazy var recognizer: SFSpeechRecognizer = {// 26 | let recognize = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN")) 27 | recognize?.delegate = self 28 | return recognize! 29 | }() 30 | 31 | override func viewDidLoad() { 32 | super.viewDidLoad() 33 | 34 | addSpeechRecordLimit() 35 | } 36 | 37 | override func viewWillDisappear(_ animated: Bool) { 38 | super.viewWillDisappear(animated) 39 | stopRecognize() 40 | } 41 | 42 | //开始/停止识别 43 | @IBAction func recogedAction(_ sender: UIButton) { 44 | let isStart = sender.currentTitle!.contains("开始") 45 | recordButton.setTitle(isStart ? "停止语音识别" : "开始语音识别", for: .normal) 46 | isStart ? startRecognize() : stopRecognize() 47 | } 48 | 49 | 50 | @IBAction func labelCopClick(_ sender: Any) { 51 | let pause = UIPasteboard.general 52 | pause.string = textLabel.text 53 | } 54 | } 55 | 56 | extension LocalVoiceController{ 57 | //开始识别 58 | fileprivate func startRecognize(){ 59 | stopRecognize() 60 | 61 | //开始识别获取文字 62 | recordTask = recognizer.recognitionTask(with: recordRequest, resultHandler: { (result, error) in 63 | if result == nil { return } 64 | var text = "" 65 | for trans in result!.transcriptions{ 66 | text += trans.formattedString 67 | } 68 | self.textLabel.text = text 69 | }) 70 | } 71 | 72 | //停止识别 73 | fileprivate func stopRecognize(){ 74 | if recordTask != nil{ 75 | recordTask?.cancel() 76 | recordTask = nil 77 | } 78 | } 79 | 80 | ///语音识别权限认证 81 | fileprivate func addSpeechRecordLimit(){ 82 | SFSpeechRecognizer.requestAuthorization { (state) in 83 | var isEnable = false 84 | switch state { 85 | case .authorized: 86 | isEnable = true 87 | print("已授权语音识别") 88 | case .notDetermined: 89 | isEnable = false 90 | print("没有授权语音识别") 91 | case .denied: 92 | isEnable = false 93 | print("用户已拒绝访问语音识别") 94 | case .restricted: 95 | isEnable = false 96 | print("不能在该设备上进行语音识别") 97 | } 98 | DispatchQueue.main.async { 99 | self.recordButton.isEnabled = isEnable 100 | self.recordButton.backgroundColor = isEnable ? UIColor(red: 255/255.0, green: 64/255.0, blue: 64/255.0, alpha: 1) : UIColor.lightGray 101 | } 102 | } 103 | } 104 | } 105 | 106 | 107 | extension LocalVoiceController: SFSpeechRecognizerDelegate{ 108 | //语音识别是否可用 109 | func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) { 110 | recordButton.isEnabled = available 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/本地语音转换文本/LocalVoiceController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 35 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/语音转文字/VoiceToTextController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // VoiceToTextController.swift 3 | // TextAndVoice 4 | // 5 | // Created by iOS_Tian on 2017/12/18. 6 | // Copyright © 2017年 CoderJun. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | import Speech 12 | 13 | class VoiceToTextController: UIViewController { 14 | 15 | @IBOutlet weak var recordButton: UIButton! 16 | @IBOutlet weak var timeLabel: UILabel! 17 | @IBOutlet weak var recordLabel: UITextView! 18 | @IBOutlet weak var speechButton: UIButton! 19 | 20 | //MARK: 录音/播放相关 21 | fileprivate var recordTime = 0 //录音时长 22 | fileprivate var playTime = 0 //播放时长 23 | fileprivate var recordTimer: Timer? //录音定时器 24 | fileprivate var playTimer: Timer?//播放定时器 25 | fileprivate var audioPlay = AVAudioPlayer()//播放器 26 | fileprivate var audioRecord: AVAudioRecorder?//录音器 27 | fileprivate lazy var audioSession: AVAudioSession = {//音频会话者 28 | let session = AVAudioSession.sharedInstance() 29 | do{ 30 | try session.setCategory(AVAudioSessionCategoryPlayAndRecord) 31 | try session.setMode(AVAudioSessionModeMeasurement) 32 | try session.setActive(true, with: .notifyOthersOnDeactivation) 33 | }catch{ 34 | print("Throws:\(error)") 35 | } 36 | return session 37 | }() 38 | 39 | //MARK: 语音识别功能 40 | // fileprivate var recordRequest = SFSpeechAudioBufferRecognitionRequest()///处理语音识别请求 41 | fileprivate var recordTask = SFSpeechRecognitionTask()///语音识别对象的结果 42 | fileprivate var audioEngine = AVAudioEngine() 43 | fileprivate lazy var recognizer: SFSpeechRecognizer = {// 44 | let recognize = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN")) 45 | recognize?.delegate = self 46 | return recognize! 47 | }() 48 | 49 | override func viewDidLoad() { 50 | super.viewDidLoad() 51 | 52 | setupAVFoundation() 53 | addSpeechRecordLimit() 54 | } 55 | 56 | fileprivate func setupAVFoundation(){ 57 | //1. 初始化录音设备 58 | let recordSetting = [ 59 | //采样率 8000/11025/22050/44100/96000(影响音频的质量) 60 | AVSampleRateKey: NSNumber(value: 8000), 61 | //音频格式 62 | AVFormatIDKey: NSNumber(value: kAudioFormatLinearPCM), 63 | //采样位数 8、16、24、32 默认为16 64 | AVLinearPCMBitDepthKey: NSNumber(value: 16), 65 | //音频通道数 1 或 2 66 | AVNumberOfChannelsKey: NSNumber(value: 1), 67 | //录音质量 68 | AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.high.rawValue)) 69 | ] 70 | do{ 71 | try audioRecord = AVAudioRecorder(url: saveDirectoryURL(), settings: recordSetting) 72 | }catch{ 73 | print("Throws:\(error)") 74 | } 75 | } 76 | 77 | //开始/停止录音 78 | @IBAction func recordAction(_ sender: UIButton) { 79 | let isStart = sender.currentTitle!.contains("开始") 80 | recordButton.setTitle(isStart ? "停止录音" : "开始录音", for: .normal) 81 | isStart ? startRecord() : stopRecord() 82 | } 83 | 84 | //语音转文本 85 | @IBAction func startTranslation(_ sender: UIButton) { 86 | 87 | let recordRequest = SFSpeechURLRecognitionRequest(url: audioRecord!.url) 88 | recordRequest.shouldReportPartialResults = true 89 | recordTask = recognizer.recognitionTask(with: recordRequest, resultHandler: { (result, error) in 90 | let text = result?.bestTranscription.formattedString 91 | self.recordLabel.text = text 92 | }) 93 | } 94 | 95 | //语音播放 96 | @IBAction func playRecordAction(_ sender: UIButton) { 97 | //启动定时器 98 | playTime = recordTime 99 | playTimer = Timer(timeInterval: 1, target: self, selector: #selector(playTimerAction(_:)), userInfo: nil, repeats: true) 100 | RunLoop.main.add(playTimer!, forMode: .commonModes) 101 | 102 | if !audioRecord!.isRecording { 103 | do{ 104 | try audioPlay = AVAudioPlayer(contentsOf: audioRecord!.url) 105 | }catch{ 106 | print("Throws:\(error)") 107 | } 108 | audioPlay.play() 109 | } 110 | } 111 | 112 | @IBAction func labelCopClick(_ sender: Any) { 113 | let pause = UIPasteboard.general 114 | pause.string = recordLabel.text 115 | } 116 | } 117 | 118 | //MARK: 语音转换 119 | extension VoiceToTextController{ 120 | ///语音识别权限认证 121 | fileprivate func addSpeechRecordLimit(){ 122 | SFSpeechRecognizer.requestAuthorization { (state) in 123 | var isEnable = false 124 | switch state { 125 | case .authorized: 126 | isEnable = true 127 | print("已授权语音识别") 128 | case .notDetermined: 129 | isEnable = false 130 | print("没有授权语音识别") 131 | case .denied: 132 | isEnable = false 133 | print("用户已拒绝访问语音识别") 134 | case .restricted: 135 | isEnable = false 136 | print("不能在该设备上进行语音识别") 137 | } 138 | DispatchQueue.main.async { 139 | self.speechButton.isEnabled = isEnable 140 | self.speechButton.backgroundColor = isEnable ? UIColor(red: 255/255.0, green: 64/255.0, blue: 64/255.0, alpha: 1) : UIColor.lightGray 141 | } 142 | } 143 | } 144 | } 145 | 146 | //MARK: 147 | extension VoiceToTextController: SFSpeechRecognizerDelegate{ 148 | 149 | } 150 | 151 | //MARK: 录音 152 | extension VoiceToTextController{ 153 | //停止录音 154 | fileprivate func stopRecord(){ 155 | if audioRecord!.isRecording { 156 | audioRecord?.stop() 157 | } 158 | timeLabel.text = "录音时长\(recordTime)秒" 159 | removeRecordTimer() 160 | } 161 | 162 | //开始录音 163 | fileprivate func startRecord(){ 164 | recordTime = 0 165 | 166 | //1. 启动定时器 167 | recordTimer = Timer(timeInterval: 1, target: self, selector: #selector(timerAutoScroll(_:)), userInfo: nil, repeats: true) 168 | RunLoop.main.add(recordTimer!, forMode: .commonModes) 169 | 170 | //2. 准备录音 171 | audioRecord?.prepareToRecord() 172 | 173 | //3. 开始录音 174 | audioRecord?.record() 175 | } 176 | 177 | //录音保存路径 178 | fileprivate func saveDirectoryURL() -> URL{ 179 | //按照时间为文件名存储, 格式为.caf 180 | let currentDate = Date() 181 | let formatter = DateFormatter() 182 | formatter.dateFormat = "yyyy-MM-dd HH:mm:ss" 183 | let recordName = formatter.string(from: currentDate) + ".caf" 184 | 185 | //存储路径 186 | let filMan = FileManager.default 187 | let url = filMan.urls(for: .documentDirectory, in: .userDomainMask).first ?? URL(string: "")! 188 | return url.appendingPathComponent(recordName) 189 | } 190 | } 191 | 192 | //MARK: 定时器方法 193 | extension VoiceToTextController{ 194 | //播放定时器方法 195 | @objc fileprivate func playTimerAction(_ sender:Timer){ 196 | playTime -= 1 197 | timeLabel.text = "\(playTime)秒" 198 | if playTime <= 0{ 199 | timeLabel.text = "播放结束" 200 | removePlayTimer() 201 | } 202 | } 203 | 204 | //录音定时器方法 205 | @objc fileprivate func timerAutoScroll(_ sender:Timer){ 206 | recordTime += 1 207 | timeLabel.text = "\(recordTime)秒" 208 | } 209 | 210 | //销毁录音定时器 211 | fileprivate func removeRecordTimer(){ 212 | if recordTimer != nil { 213 | recordTimer?.invalidate() 214 | recordTimer = nil 215 | } 216 | } 217 | 218 | //销毁播放定时器 219 | fileprivate func removePlayTimer(){ 220 | if playTimer != nil { 221 | playTimer?.invalidate() 222 | playTimer = nil 223 | } 224 | } 225 | } 226 | 227 | /* 228 | //1. 定义音频的编码参数,决定录制音频文件的格式、音质、容量大小等 229 | let recordSetting = [ 230 | //采样率 8000/11025/22050/44100/96000(影响音频的质量) 231 | AVSampleRateKey: NSNumber(value: 8000), 232 | //音频格式 233 | AVFormatIDKey: NSNumber(value: kAudioFormatLinearPCM), 234 | //采样位数 8、16、24、32 默认为16 235 | AVLinearPCMBitDepthKey: NSNumber(value: 16), 236 | //音频通道数 1 或 2 237 | AVNumberOfChannelsKey: NSNumber(value: 1), 238 | //录音质量 239 | AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.high.rawValue)) 240 | ] 241 | */ 242 | -------------------------------------------------------------------------------- /TextAndVoice/TextAndVoice/语音转文字/VoiceToTextController.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 35 | 45 | 55 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | --------------------------------------------------------------------------------