├── FZSpeakDemo.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ └── fzh.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ └── fzh.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── FZSpeakDemo ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── FZProgerssHud │ ├── FZProgressHudView.h │ ├── FZProgressHudView.m │ ├── alert_error_icon.png │ ├── alert_error_icon@2x.png │ ├── alert_success_icon.png │ ├── alert_success_icon@2x.png │ ├── refresh.png │ └── refresh@2x.png ├── FZSpeakClass │ ├── FZSpeakClass.h │ ├── FZSpeechEvaluator.h │ ├── FZSpeechEvaluator.m │ ├── FZSpeechRecognizer.h │ ├── FZSpeechRecognizer.m │ ├── FZSpeechSynthesizer.h │ ├── FZSpeechSynthesizer.m │ └── ISEResultXmlParser │ │ ├── ISEResult.h │ │ ├── ISEResult.m │ │ ├── ISEResultFinal.h │ │ ├── ISEResultFinal.m │ │ ├── ISEResultPhone.h │ │ ├── ISEResultPhone.m │ │ ├── ISEResultReadSentence.h │ │ ├── ISEResultReadSentence.m │ │ ├── ISEResultReadSyllable.h │ │ ├── ISEResultReadSyllable.m │ │ ├── ISEResultReadWord.h │ │ ├── ISEResultReadWord.m │ │ ├── ISEResultSentence.h │ │ ├── ISEResultSentence.m │ │ ├── ISEResultSyll.h │ │ ├── ISEResultSyll.m │ │ ├── ISEResultTools.h │ │ ├── ISEResultTools.m │ │ ├── ISEResultWord.h │ │ ├── ISEResultWord.m │ │ ├── ISEResultXmlParser.h │ │ └── ISEResultXmlParser.m ├── Info.plist ├── ViewController.h ├── ViewController.m ├── iflyMSC.framework │ ├── Headers │ │ ├── IFlyAudioSession.h │ │ ├── IFlyContact.h │ │ ├── IFlyDataUploader.h │ │ ├── IFlyDebugLog.h │ │ ├── IFlyISVDelegate.h │ │ ├── IFlyISVRecognizer.h │ │ ├── IFlyMSC.h │ │ ├── IFlyPcmRecorder.h │ │ ├── IFlyRecognizerView.h │ │ ├── IFlyRecognizerViewDelegate.h │ │ ├── IFlyResourceUtil.h │ │ ├── IFlySetting.h │ │ ├── IFlySpeechConstant.h │ │ ├── IFlySpeechError.h │ │ ├── IFlySpeechEvaluator.h │ │ ├── IFlySpeechEvaluatorDelegate.h │ │ ├── IFlySpeechEvent.h │ │ ├── IFlySpeechRecognizer.h │ │ ├── IFlySpeechRecognizerDelegate.h │ │ ├── IFlySpeechSynthesizer.h │ │ ├── IFlySpeechSynthesizerDelegate.h │ │ ├── IFlySpeechUtility.h │ │ ├── IFlyUserWords.h │ │ ├── IFlyVoiceWakeuper.h │ │ └── IFlyVoiceWakeuperDelegate.h │ └── iflyMSC └── main.m └── README.md /FZSpeakDemo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 3A155BBA21246EC4007DC61E /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BB921246EC4007DC61E /* AppDelegate.m */; }; 11 | 3A155BBD21246EC4007DC61E /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BBC21246EC4007DC61E /* ViewController.m */; }; 12 | 3A155BC021246EC4007DC61E /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3A155BBE21246EC4007DC61E /* Main.storyboard */; }; 13 | 3A155BC221246EC8007DC61E /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 3A155BC121246EC8007DC61E /* Assets.xcassets */; }; 14 | 3A155BC521246EC8007DC61E /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3A155BC321246EC8007DC61E /* LaunchScreen.storyboard */; }; 15 | 3A155BC821246EC8007DC61E /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BC721246EC8007DC61E /* main.m */; }; 16 | 3A155BED21246ED6007DC61E /* FZSpeechRecognizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD021246ED6007DC61E /* FZSpeechRecognizer.m */; }; 17 | 3A155BEE21246ED6007DC61E /* FZSpeechEvaluator.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD121246ED6007DC61E /* FZSpeechEvaluator.m */; }; 18 | 3A155BEF21246ED6007DC61E /* FZSpeechSynthesizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD221246ED6007DC61E /* FZSpeechSynthesizer.m */; }; 19 | 3A155BF021246ED6007DC61E /* ISEResultSyll.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD621246ED6007DC61E /* ISEResultSyll.m */; }; 20 | 3A155BF121246ED6007DC61E /* ISEResult.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD721246ED6007DC61E /* ISEResult.m */; }; 21 | 3A155BF221246ED6007DC61E /* ISEResultReadSyllable.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BD821246ED6007DC61E /* ISEResultReadSyllable.m */; }; 22 | 3A155BF321246ED6007DC61E /* ISEResultReadSentence.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BDA21246ED6007DC61E /* ISEResultReadSentence.m */; }; 23 | 3A155BF421246ED6007DC61E /* ISEResultReadWord.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BDB21246ED6007DC61E /* ISEResultReadWord.m */; }; 24 | 3A155BF521246ED6007DC61E /* ISEResultPhone.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BDC21246ED6007DC61E /* ISEResultPhone.m */; }; 25 | 3A155BF621246ED6007DC61E /* ISEResultSentence.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BDD21246ED6007DC61E /* ISEResultSentence.m */; }; 26 | 3A155BF721246ED6007DC61E /* ISEResultWord.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BE721246ED6007DC61E /* ISEResultWord.m */; }; 27 | 3A155BF821246ED6007DC61E /* ISEResultXmlParser.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BE821246ED6007DC61E /* ISEResultXmlParser.m */; }; 28 | 3A155BF921246ED6007DC61E /* ISEResultTools.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BE921246ED6007DC61E /* ISEResultTools.m */; }; 29 | 3A155BFA21246ED6007DC61E /* ISEResultFinal.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155BEA21246ED6007DC61E /* ISEResultFinal.m */; }; 30 | 3A155BFD21246FA2007DC61E /* iflyMSC.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155BFC21246FA2007DC61E /* iflyMSC.framework */; }; 31 | 3A155BFF21246FAC007DC61E /* CoreTelephony.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155BFE21246FAC007DC61E /* CoreTelephony.framework */; }; 32 | 3A155C0121246FB2007DC61E /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155C0021246FB2007DC61E /* AVFoundation.framework */; }; 33 | 3A155C0321246FB9007DC61E /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155C0221246FB9007DC61E /* AudioToolbox.framework */; }; 34 | 3A155C0521246FBF007DC61E /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155C0421246FBF007DC61E /* libz.tbd */; }; 35 | 3A155C0721246FC5007DC61E /* libc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155C0621246FC5007DC61E /* libc++.tbd */; }; 36 | 3A155C0921246FCA007DC61E /* SystemConfiguration.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3A155C0821246FCA007DC61E /* SystemConfiguration.framework */; }; 37 | 3A155C232124704E007DC61E /* alert_success_icon.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C1B2124704E007DC61E /* alert_success_icon.png */; }; 38 | 3A155C242124704E007DC61E /* alert_error_icon@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C1C2124704E007DC61E /* alert_error_icon@2x.png */; }; 39 | 3A155C252124704E007DC61E /* refresh@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C1D2124704E007DC61E /* refresh@2x.png */; }; 40 | 3A155C262124704E007DC61E /* FZProgressHudView.m in Sources */ = {isa = PBXBuildFile; fileRef = 3A155C1E2124704E007DC61E /* FZProgressHudView.m */; }; 41 | 3A155C272124704E007DC61E /* alert_error_icon.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C1F2124704E007DC61E /* alert_error_icon.png */; }; 42 | 3A155C282124704E007DC61E /* alert_success_icon@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C212124704E007DC61E /* alert_success_icon@2x.png */; }; 43 | 3A155C292124704E007DC61E /* refresh.png in Resources */ = {isa = PBXBuildFile; fileRef = 3A155C222124704E007DC61E /* refresh.png */; }; 44 | /* End PBXBuildFile section */ 45 | 46 | /* Begin PBXFileReference section */ 47 | 3A155BB521246EC4007DC61E /* FZSpeakDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FZSpeakDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 48 | 3A155BB821246EC4007DC61E /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 49 | 3A155BB921246EC4007DC61E /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 50 | 3A155BBB21246EC4007DC61E /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 51 | 3A155BBC21246EC4007DC61E /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 52 | 3A155BBF21246EC4007DC61E /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 53 | 3A155BC121246EC8007DC61E /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 54 | 3A155BC421246EC8007DC61E /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 55 | 3A155BC621246EC8007DC61E /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 56 | 3A155BC721246EC8007DC61E /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 57 | 3A155BCF21246ED6007DC61E /* FZSpeakClass.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FZSpeakClass.h; sourceTree = ""; }; 58 | 3A155BD021246ED6007DC61E /* FZSpeechRecognizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FZSpeechRecognizer.m; sourceTree = ""; }; 59 | 3A155BD121246ED6007DC61E /* FZSpeechEvaluator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FZSpeechEvaluator.m; sourceTree = ""; }; 60 | 3A155BD221246ED6007DC61E /* FZSpeechSynthesizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FZSpeechSynthesizer.m; sourceTree = ""; }; 61 | 3A155BD321246ED6007DC61E /* FZSpeechRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FZSpeechRecognizer.h; sourceTree = ""; }; 62 | 3A155BD421246ED6007DC61E /* FZSpeechSynthesizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FZSpeechSynthesizer.h; sourceTree = ""; }; 63 | 3A155BD621246ED6007DC61E /* ISEResultSyll.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultSyll.m; sourceTree = ""; }; 64 | 3A155BD721246ED6007DC61E /* ISEResult.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResult.m; sourceTree = ""; }; 65 | 3A155BD821246ED6007DC61E /* ISEResultReadSyllable.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultReadSyllable.m; sourceTree = ""; }; 66 | 3A155BD921246ED6007DC61E /* ISEResultWord.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultWord.h; sourceTree = ""; }; 67 | 3A155BDA21246ED6007DC61E /* ISEResultReadSentence.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultReadSentence.m; sourceTree = ""; }; 68 | 3A155BDB21246ED6007DC61E /* ISEResultReadWord.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultReadWord.m; sourceTree = ""; }; 69 | 3A155BDC21246ED6007DC61E /* ISEResultPhone.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultPhone.m; sourceTree = ""; }; 70 | 3A155BDD21246ED6007DC61E /* ISEResultSentence.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultSentence.m; sourceTree = ""; }; 71 | 3A155BDE21246ED6007DC61E /* ISEResultFinal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultFinal.h; sourceTree = ""; }; 72 | 3A155BDF21246ED6007DC61E /* ISEResultXmlParser.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultXmlParser.h; sourceTree = ""; }; 73 | 3A155BE021246ED6007DC61E /* ISEResultTools.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultTools.h; sourceTree = ""; }; 74 | 3A155BE121246ED6007DC61E /* ISEResult.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResult.h; sourceTree = ""; }; 75 | 3A155BE221246ED6007DC61E /* ISEResultSyll.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultSyll.h; sourceTree = ""; }; 76 | 3A155BE321246ED6007DC61E /* ISEResultReadSyllable.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultReadSyllable.h; sourceTree = ""; }; 77 | 3A155BE421246ED6007DC61E /* ISEResultPhone.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultPhone.h; sourceTree = ""; }; 78 | 3A155BE521246ED6007DC61E /* ISEResultReadSentence.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultReadSentence.h; sourceTree = ""; }; 79 | 3A155BE621246ED6007DC61E /* ISEResultReadWord.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultReadWord.h; sourceTree = ""; }; 80 | 3A155BE721246ED6007DC61E /* ISEResultWord.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultWord.m; sourceTree = ""; }; 81 | 3A155BE821246ED6007DC61E /* ISEResultXmlParser.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultXmlParser.m; sourceTree = ""; }; 82 | 3A155BE921246ED6007DC61E /* ISEResultTools.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultTools.m; sourceTree = ""; }; 83 | 3A155BEA21246ED6007DC61E /* ISEResultFinal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ISEResultFinal.m; sourceTree = ""; }; 84 | 3A155BEB21246ED6007DC61E /* ISEResultSentence.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ISEResultSentence.h; sourceTree = ""; }; 85 | 3A155BEC21246ED6007DC61E /* FZSpeechEvaluator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FZSpeechEvaluator.h; sourceTree = ""; }; 86 | 3A155BFC21246FA2007DC61E /* iflyMSC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = iflyMSC.framework; path = FZSpeakDemo/iflyMSC.framework; sourceTree = ""; }; 87 | 3A155BFE21246FAC007DC61E /* CoreTelephony.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreTelephony.framework; path = System/Library/Frameworks/CoreTelephony.framework; sourceTree = SDKROOT; }; 88 | 3A155C0021246FB2007DC61E /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 89 | 3A155C0221246FB9007DC61E /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; }; 90 | 3A155C0421246FBF007DC61E /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; }; 91 | 3A155C0621246FC5007DC61E /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; }; 92 | 3A155C0821246FCA007DC61E /* SystemConfiguration.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = SystemConfiguration.framework; path = System/Library/Frameworks/SystemConfiguration.framework; sourceTree = SDKROOT; }; 93 | 3A155C1B2124704E007DC61E /* alert_success_icon.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = alert_success_icon.png; sourceTree = ""; }; 94 | 3A155C1C2124704E007DC61E /* alert_error_icon@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "alert_error_icon@2x.png"; sourceTree = ""; }; 95 | 3A155C1D2124704E007DC61E /* refresh@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "refresh@2x.png"; sourceTree = ""; }; 96 | 3A155C1E2124704E007DC61E /* FZProgressHudView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FZProgressHudView.m; sourceTree = ""; }; 97 | 3A155C1F2124704E007DC61E /* alert_error_icon.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = alert_error_icon.png; sourceTree = ""; }; 98 | 3A155C202124704E007DC61E /* FZProgressHudView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FZProgressHudView.h; sourceTree = ""; }; 99 | 3A155C212124704E007DC61E /* alert_success_icon@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "alert_success_icon@2x.png"; sourceTree = ""; }; 100 | 3A155C222124704E007DC61E /* refresh.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = refresh.png; sourceTree = ""; }; 101 | /* End PBXFileReference section */ 102 | 103 | /* Begin PBXFrameworksBuildPhase section */ 104 | 3A155BB221246EC4007DC61E /* Frameworks */ = { 105 | isa = PBXFrameworksBuildPhase; 106 | buildActionMask = 2147483647; 107 | files = ( 108 | 3A155C0921246FCA007DC61E /* SystemConfiguration.framework in Frameworks */, 109 | 3A155C0721246FC5007DC61E /* libc++.tbd in Frameworks */, 110 | 3A155C0521246FBF007DC61E /* libz.tbd in Frameworks */, 111 | 3A155C0321246FB9007DC61E /* AudioToolbox.framework in Frameworks */, 112 | 3A155C0121246FB2007DC61E /* AVFoundation.framework in Frameworks */, 113 | 3A155BFF21246FAC007DC61E /* CoreTelephony.framework in Frameworks */, 114 | 3A155BFD21246FA2007DC61E /* iflyMSC.framework in Frameworks */, 115 | ); 116 | runOnlyForDeploymentPostprocessing = 0; 117 | }; 118 | /* End PBXFrameworksBuildPhase section */ 119 | 120 | /* Begin PBXGroup section */ 121 | 3A155BAC21246EC4007DC61E = { 122 | isa = PBXGroup; 123 | children = ( 124 | 3A155BB721246EC4007DC61E /* FZSpeakDemo */, 125 | 3A155BB621246EC4007DC61E /* Products */, 126 | 3A155BFB21246FA2007DC61E /* Frameworks */, 127 | ); 128 | sourceTree = ""; 129 | }; 130 | 3A155BB621246EC4007DC61E /* Products */ = { 131 | isa = PBXGroup; 132 | children = ( 133 | 3A155BB521246EC4007DC61E /* FZSpeakDemo.app */, 134 | ); 135 | name = Products; 136 | sourceTree = ""; 137 | }; 138 | 3A155BB721246EC4007DC61E /* FZSpeakDemo */ = { 139 | isa = PBXGroup; 140 | children = ( 141 | 3A155C1A2124704E007DC61E /* FZProgerssHud */, 142 | 3A155BCE21246ED6007DC61E /* FZSpeakClass */, 143 | 3A155BB821246EC4007DC61E /* AppDelegate.h */, 144 | 3A155BB921246EC4007DC61E /* AppDelegate.m */, 145 | 3A155BBB21246EC4007DC61E /* ViewController.h */, 146 | 3A155BBC21246EC4007DC61E /* ViewController.m */, 147 | 3A155BBE21246EC4007DC61E /* Main.storyboard */, 148 | 3A155BC121246EC8007DC61E /* Assets.xcassets */, 149 | 3A155BC321246EC8007DC61E /* LaunchScreen.storyboard */, 150 | 3A155BC621246EC8007DC61E /* Info.plist */, 151 | 3A155BC721246EC8007DC61E /* main.m */, 152 | ); 153 | path = FZSpeakDemo; 154 | sourceTree = ""; 155 | }; 156 | 3A155BCE21246ED6007DC61E /* FZSpeakClass */ = { 157 | isa = PBXGroup; 158 | children = ( 159 | 3A155BCF21246ED6007DC61E /* FZSpeakClass.h */, 160 | 3A155BD321246ED6007DC61E /* FZSpeechRecognizer.h */, 161 | 3A155BD021246ED6007DC61E /* FZSpeechRecognizer.m */, 162 | 3A155BEC21246ED6007DC61E /* FZSpeechEvaluator.h */, 163 | 3A155BD121246ED6007DC61E /* FZSpeechEvaluator.m */, 164 | 3A155BD421246ED6007DC61E /* FZSpeechSynthesizer.h */, 165 | 3A155BD221246ED6007DC61E /* FZSpeechSynthesizer.m */, 166 | 3A155BD521246ED6007DC61E /* ISEResultXmlParser */, 167 | ); 168 | path = FZSpeakClass; 169 | sourceTree = ""; 170 | }; 171 | 3A155BD521246ED6007DC61E /* ISEResultXmlParser */ = { 172 | isa = PBXGroup; 173 | children = ( 174 | 3A155BD621246ED6007DC61E /* ISEResultSyll.m */, 175 | 3A155BD721246ED6007DC61E /* ISEResult.m */, 176 | 3A155BD821246ED6007DC61E /* ISEResultReadSyllable.m */, 177 | 3A155BD921246ED6007DC61E /* ISEResultWord.h */, 178 | 3A155BDA21246ED6007DC61E /* ISEResultReadSentence.m */, 179 | 3A155BDB21246ED6007DC61E /* ISEResultReadWord.m */, 180 | 3A155BDC21246ED6007DC61E /* ISEResultPhone.m */, 181 | 3A155BDD21246ED6007DC61E /* ISEResultSentence.m */, 182 | 3A155BDE21246ED6007DC61E /* ISEResultFinal.h */, 183 | 3A155BDF21246ED6007DC61E /* ISEResultXmlParser.h */, 184 | 3A155BE021246ED6007DC61E /* ISEResultTools.h */, 185 | 3A155BE121246ED6007DC61E /* ISEResult.h */, 186 | 3A155BE221246ED6007DC61E /* ISEResultSyll.h */, 187 | 3A155BE321246ED6007DC61E /* ISEResultReadSyllable.h */, 188 | 3A155BE421246ED6007DC61E /* ISEResultPhone.h */, 189 | 3A155BE521246ED6007DC61E /* ISEResultReadSentence.h */, 190 | 3A155BE621246ED6007DC61E /* ISEResultReadWord.h */, 191 | 3A155BE721246ED6007DC61E /* ISEResultWord.m */, 192 | 3A155BE821246ED6007DC61E /* ISEResultXmlParser.m */, 193 | 3A155BE921246ED6007DC61E /* ISEResultTools.m */, 194 | 3A155BEA21246ED6007DC61E /* ISEResultFinal.m */, 195 | 3A155BEB21246ED6007DC61E /* ISEResultSentence.h */, 196 | ); 197 | path = ISEResultXmlParser; 198 | sourceTree = ""; 199 | }; 200 | 3A155BFB21246FA2007DC61E /* Frameworks */ = { 201 | isa = PBXGroup; 202 | children = ( 203 | 3A155C0821246FCA007DC61E /* SystemConfiguration.framework */, 204 | 3A155C0621246FC5007DC61E /* libc++.tbd */, 205 | 3A155C0421246FBF007DC61E /* libz.tbd */, 206 | 3A155C0221246FB9007DC61E /* AudioToolbox.framework */, 207 | 3A155C0021246FB2007DC61E /* AVFoundation.framework */, 208 | 3A155BFE21246FAC007DC61E /* CoreTelephony.framework */, 209 | 3A155BFC21246FA2007DC61E /* iflyMSC.framework */, 210 | ); 211 | name = Frameworks; 212 | sourceTree = ""; 213 | }; 214 | 3A155C1A2124704E007DC61E /* FZProgerssHud */ = { 215 | isa = PBXGroup; 216 | children = ( 217 | 3A155C202124704E007DC61E /* FZProgressHudView.h */, 218 | 3A155C1E2124704E007DC61E /* FZProgressHudView.m */, 219 | 3A155C1B2124704E007DC61E /* alert_success_icon.png */, 220 | 3A155C1C2124704E007DC61E /* alert_error_icon@2x.png */, 221 | 3A155C1D2124704E007DC61E /* refresh@2x.png */, 222 | 3A155C1F2124704E007DC61E /* alert_error_icon.png */, 223 | 3A155C212124704E007DC61E /* alert_success_icon@2x.png */, 224 | 3A155C222124704E007DC61E /* refresh.png */, 225 | ); 226 | path = FZProgerssHud; 227 | sourceTree = ""; 228 | }; 229 | /* End PBXGroup section */ 230 | 231 | /* Begin PBXNativeTarget section */ 232 | 3A155BB421246EC4007DC61E /* FZSpeakDemo */ = { 233 | isa = PBXNativeTarget; 234 | buildConfigurationList = 3A155BCB21246EC8007DC61E /* Build configuration list for PBXNativeTarget "FZSpeakDemo" */; 235 | buildPhases = ( 236 | 3A155BB121246EC4007DC61E /* Sources */, 237 | 3A155BB221246EC4007DC61E /* Frameworks */, 238 | 3A155BB321246EC4007DC61E /* Resources */, 239 | ); 240 | buildRules = ( 241 | ); 242 | dependencies = ( 243 | ); 244 | name = FZSpeakDemo; 245 | productName = FZSpeakDemo; 246 | productReference = 3A155BB521246EC4007DC61E /* FZSpeakDemo.app */; 247 | productType = "com.apple.product-type.application"; 248 | }; 249 | /* End PBXNativeTarget section */ 250 | 251 | /* Begin PBXProject section */ 252 | 3A155BAD21246EC4007DC61E /* Project object */ = { 253 | isa = PBXProject; 254 | attributes = { 255 | LastUpgradeCheck = 0940; 256 | ORGANIZATIONNAME = "付正"; 257 | TargetAttributes = { 258 | 3A155BB421246EC4007DC61E = { 259 | CreatedOnToolsVersion = 9.4.1; 260 | }; 261 | }; 262 | }; 263 | buildConfigurationList = 3A155BB021246EC4007DC61E /* Build configuration list for PBXProject "FZSpeakDemo" */; 264 | compatibilityVersion = "Xcode 9.3"; 265 | developmentRegion = en; 266 | hasScannedForEncodings = 0; 267 | knownRegions = ( 268 | en, 269 | Base, 270 | ); 271 | mainGroup = 3A155BAC21246EC4007DC61E; 272 | productRefGroup = 3A155BB621246EC4007DC61E /* Products */; 273 | projectDirPath = ""; 274 | projectRoot = ""; 275 | targets = ( 276 | 3A155BB421246EC4007DC61E /* FZSpeakDemo */, 277 | ); 278 | }; 279 | /* End PBXProject section */ 280 | 281 | /* Begin PBXResourcesBuildPhase section */ 282 | 3A155BB321246EC4007DC61E /* Resources */ = { 283 | isa = PBXResourcesBuildPhase; 284 | buildActionMask = 2147483647; 285 | files = ( 286 | 3A155C242124704E007DC61E /* alert_error_icon@2x.png in Resources */, 287 | 3A155C272124704E007DC61E /* alert_error_icon.png in Resources */, 288 | 3A155BC521246EC8007DC61E /* LaunchScreen.storyboard in Resources */, 289 | 3A155C232124704E007DC61E /* alert_success_icon.png in Resources */, 290 | 3A155BC221246EC8007DC61E /* Assets.xcassets in Resources */, 291 | 3A155C252124704E007DC61E /* refresh@2x.png in Resources */, 292 | 3A155C282124704E007DC61E /* alert_success_icon@2x.png in Resources */, 293 | 3A155C292124704E007DC61E /* refresh.png in Resources */, 294 | 3A155BC021246EC4007DC61E /* Main.storyboard in Resources */, 295 | ); 296 | runOnlyForDeploymentPostprocessing = 0; 297 | }; 298 | /* End PBXResourcesBuildPhase section */ 299 | 300 | /* Begin PBXSourcesBuildPhase section */ 301 | 3A155BB121246EC4007DC61E /* Sources */ = { 302 | isa = PBXSourcesBuildPhase; 303 | buildActionMask = 2147483647; 304 | files = ( 305 | 3A155BBD21246EC4007DC61E /* ViewController.m in Sources */, 306 | 3A155BF521246ED6007DC61E /* ISEResultPhone.m in Sources */, 307 | 3A155BC821246EC8007DC61E /* main.m in Sources */, 308 | 3A155BF821246ED6007DC61E /* ISEResultXmlParser.m in Sources */, 309 | 3A155BBA21246EC4007DC61E /* AppDelegate.m in Sources */, 310 | 3A155BF721246ED6007DC61E /* ISEResultWord.m in Sources */, 311 | 3A155BF421246ED6007DC61E /* ISEResultReadWord.m in Sources */, 312 | 3A155BF321246ED6007DC61E /* ISEResultReadSentence.m in Sources */, 313 | 3A155BEE21246ED6007DC61E /* FZSpeechEvaluator.m in Sources */, 314 | 3A155BFA21246ED6007DC61E /* ISEResultFinal.m in Sources */, 315 | 3A155BF221246ED6007DC61E /* ISEResultReadSyllable.m in Sources */, 316 | 3A155BEF21246ED6007DC61E /* FZSpeechSynthesizer.m in Sources */, 317 | 3A155BF021246ED6007DC61E /* ISEResultSyll.m in Sources */, 318 | 3A155C262124704E007DC61E /* FZProgressHudView.m in Sources */, 319 | 3A155BF921246ED6007DC61E /* ISEResultTools.m in Sources */, 320 | 3A155BF621246ED6007DC61E /* ISEResultSentence.m in Sources */, 321 | 3A155BF121246ED6007DC61E /* ISEResult.m in Sources */, 322 | 3A155BED21246ED6007DC61E /* FZSpeechRecognizer.m in Sources */, 323 | ); 324 | runOnlyForDeploymentPostprocessing = 0; 325 | }; 326 | /* End PBXSourcesBuildPhase section */ 327 | 328 | /* Begin PBXVariantGroup section */ 329 | 3A155BBE21246EC4007DC61E /* Main.storyboard */ = { 330 | isa = PBXVariantGroup; 331 | children = ( 332 | 3A155BBF21246EC4007DC61E /* Base */, 333 | ); 334 | name = Main.storyboard; 335 | sourceTree = ""; 336 | }; 337 | 3A155BC321246EC8007DC61E /* LaunchScreen.storyboard */ = { 338 | isa = PBXVariantGroup; 339 | children = ( 340 | 3A155BC421246EC8007DC61E /* Base */, 341 | ); 342 | name = LaunchScreen.storyboard; 343 | sourceTree = ""; 344 | }; 345 | /* End PBXVariantGroup section */ 346 | 347 | /* Begin XCBuildConfiguration section */ 348 | 3A155BC921246EC8007DC61E /* Debug */ = { 349 | isa = XCBuildConfiguration; 350 | buildSettings = { 351 | ALWAYS_SEARCH_USER_PATHS = NO; 352 | CLANG_ANALYZER_NONNULL = YES; 353 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 354 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 355 | CLANG_CXX_LIBRARY = "libc++"; 356 | CLANG_ENABLE_MODULES = YES; 357 | CLANG_ENABLE_OBJC_ARC = YES; 358 | CLANG_ENABLE_OBJC_WEAK = YES; 359 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 360 | CLANG_WARN_BOOL_CONVERSION = YES; 361 | CLANG_WARN_COMMA = YES; 362 | CLANG_WARN_CONSTANT_CONVERSION = YES; 363 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 364 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 365 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 366 | CLANG_WARN_EMPTY_BODY = YES; 367 | CLANG_WARN_ENUM_CONVERSION = YES; 368 | CLANG_WARN_INFINITE_RECURSION = YES; 369 | CLANG_WARN_INT_CONVERSION = YES; 370 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 371 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 372 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 373 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 374 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 375 | CLANG_WARN_STRICT_PROTOTYPES = YES; 376 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 377 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 378 | CLANG_WARN_UNREACHABLE_CODE = YES; 379 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 380 | CODE_SIGN_IDENTITY = "iPhone Developer"; 381 | COPY_PHASE_STRIP = NO; 382 | DEBUG_INFORMATION_FORMAT = dwarf; 383 | ENABLE_STRICT_OBJC_MSGSEND = YES; 384 | ENABLE_TESTABILITY = YES; 385 | GCC_C_LANGUAGE_STANDARD = gnu11; 386 | GCC_DYNAMIC_NO_PIC = NO; 387 | GCC_NO_COMMON_BLOCKS = YES; 388 | GCC_OPTIMIZATION_LEVEL = 0; 389 | GCC_PREPROCESSOR_DEFINITIONS = ( 390 | "DEBUG=1", 391 | "$(inherited)", 392 | ); 393 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 394 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 395 | GCC_WARN_UNDECLARED_SELECTOR = YES; 396 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 397 | GCC_WARN_UNUSED_FUNCTION = YES; 398 | GCC_WARN_UNUSED_VARIABLE = YES; 399 | IPHONEOS_DEPLOYMENT_TARGET = 11.4; 400 | MTL_ENABLE_DEBUG_INFO = YES; 401 | ONLY_ACTIVE_ARCH = YES; 402 | SDKROOT = iphoneos; 403 | }; 404 | name = Debug; 405 | }; 406 | 3A155BCA21246EC8007DC61E /* Release */ = { 407 | isa = XCBuildConfiguration; 408 | buildSettings = { 409 | ALWAYS_SEARCH_USER_PATHS = NO; 410 | CLANG_ANALYZER_NONNULL = YES; 411 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 412 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 413 | CLANG_CXX_LIBRARY = "libc++"; 414 | CLANG_ENABLE_MODULES = YES; 415 | CLANG_ENABLE_OBJC_ARC = YES; 416 | CLANG_ENABLE_OBJC_WEAK = YES; 417 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 418 | CLANG_WARN_BOOL_CONVERSION = YES; 419 | CLANG_WARN_COMMA = YES; 420 | CLANG_WARN_CONSTANT_CONVERSION = YES; 421 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 422 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 423 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 424 | CLANG_WARN_EMPTY_BODY = YES; 425 | CLANG_WARN_ENUM_CONVERSION = YES; 426 | CLANG_WARN_INFINITE_RECURSION = YES; 427 | CLANG_WARN_INT_CONVERSION = YES; 428 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 429 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 430 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 431 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 432 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 433 | CLANG_WARN_STRICT_PROTOTYPES = YES; 434 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 435 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 436 | CLANG_WARN_UNREACHABLE_CODE = YES; 437 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 438 | CODE_SIGN_IDENTITY = "iPhone Developer"; 439 | COPY_PHASE_STRIP = NO; 440 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 441 | ENABLE_NS_ASSERTIONS = NO; 442 | ENABLE_STRICT_OBJC_MSGSEND = YES; 443 | GCC_C_LANGUAGE_STANDARD = gnu11; 444 | GCC_NO_COMMON_BLOCKS = YES; 445 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 446 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 447 | GCC_WARN_UNDECLARED_SELECTOR = YES; 448 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 449 | GCC_WARN_UNUSED_FUNCTION = YES; 450 | GCC_WARN_UNUSED_VARIABLE = YES; 451 | IPHONEOS_DEPLOYMENT_TARGET = 11.4; 452 | MTL_ENABLE_DEBUG_INFO = NO; 453 | SDKROOT = iphoneos; 454 | VALIDATE_PRODUCT = YES; 455 | }; 456 | name = Release; 457 | }; 458 | 3A155BCC21246EC8007DC61E /* Debug */ = { 459 | isa = XCBuildConfiguration; 460 | buildSettings = { 461 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 462 | CLANG_ENABLE_OBJC_ARC = YES; 463 | CODE_SIGN_IDENTITY = "iPhone Developer"; 464 | CODE_SIGN_STYLE = Automatic; 465 | DEVELOPMENT_TEAM = 7A99RNRSDE; 466 | ENABLE_BITCODE = NO; 467 | FRAMEWORK_SEARCH_PATHS = ( 468 | "$(inherited)", 469 | "$(PROJECT_DIR)/FZSpeakDemo", 470 | ); 471 | INFOPLIST_FILE = FZSpeakDemo/Info.plist; 472 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 473 | LD_RUNPATH_SEARCH_PATHS = ( 474 | "$(inherited)", 475 | "@executable_path/Frameworks", 476 | ); 477 | PRODUCT_BUNDLE_IDENTIFIER = Reapal.FZSpeakDemo; 478 | PRODUCT_NAME = "$(TARGET_NAME)"; 479 | PROVISIONING_PROFILE_SPECIFIER = ""; 480 | TARGETED_DEVICE_FAMILY = "1,2"; 481 | }; 482 | name = Debug; 483 | }; 484 | 3A155BCD21246EC8007DC61E /* Release */ = { 485 | isa = XCBuildConfiguration; 486 | buildSettings = { 487 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 488 | CLANG_ENABLE_OBJC_ARC = YES; 489 | CODE_SIGN_IDENTITY = "iPhone Developer"; 490 | CODE_SIGN_STYLE = Automatic; 491 | DEVELOPMENT_TEAM = 7A99RNRSDE; 492 | ENABLE_BITCODE = NO; 493 | FRAMEWORK_SEARCH_PATHS = ( 494 | "$(inherited)", 495 | "$(PROJECT_DIR)/FZSpeakDemo", 496 | ); 497 | INFOPLIST_FILE = FZSpeakDemo/Info.plist; 498 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 499 | LD_RUNPATH_SEARCH_PATHS = ( 500 | "$(inherited)", 501 | "@executable_path/Frameworks", 502 | ); 503 | PRODUCT_BUNDLE_IDENTIFIER = Reapal.FZSpeakDemo; 504 | PRODUCT_NAME = "$(TARGET_NAME)"; 505 | PROVISIONING_PROFILE_SPECIFIER = ""; 506 | TARGETED_DEVICE_FAMILY = "1,2"; 507 | }; 508 | name = Release; 509 | }; 510 | /* End XCBuildConfiguration section */ 511 | 512 | /* Begin XCConfigurationList section */ 513 | 3A155BB021246EC4007DC61E /* Build configuration list for PBXProject "FZSpeakDemo" */ = { 514 | isa = XCConfigurationList; 515 | buildConfigurations = ( 516 | 3A155BC921246EC8007DC61E /* Debug */, 517 | 3A155BCA21246EC8007DC61E /* Release */, 518 | ); 519 | defaultConfigurationIsVisible = 0; 520 | defaultConfigurationName = Release; 521 | }; 522 | 3A155BCB21246EC8007DC61E /* Build configuration list for PBXNativeTarget "FZSpeakDemo" */ = { 523 | isa = XCConfigurationList; 524 | buildConfigurations = ( 525 | 3A155BCC21246EC8007DC61E /* Debug */, 526 | 3A155BCD21246EC8007DC61E /* Release */, 527 | ); 528 | defaultConfigurationIsVisible = 0; 529 | defaultConfigurationName = Release; 530 | }; 531 | /* End XCConfigurationList section */ 532 | }; 533 | rootObject = 3A155BAD21246EC4007DC61E /* Project object */; 534 | } 535 | -------------------------------------------------------------------------------- /FZSpeakDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /FZSpeakDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /FZSpeakDemo.xcodeproj/project.xcworkspace/xcuserdata/fzh.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo.xcodeproj/project.xcworkspace/xcuserdata/fzh.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /FZSpeakDemo.xcodeproj/xcuserdata/fzh.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /FZSpeakDemo.xcodeproj/xcuserdata/fzh.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | FZSpeakDemo.xcscheme 8 | 9 | orderHint 10 | 0 11 | 12 | FZSpeakDemo.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 0 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /FZSpeakDemo/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // FZSpeakDemo 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /FZSpeakDemo/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // FZSpeakDemo 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | #import "ViewController.h" 11 | #import "IFlyMSC/IFlyMSC.h" 12 | 13 | @interface AppDelegate () 14 | 15 | @end 16 | 17 | @implementation AppDelegate 18 | 19 | 20 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 21 | // Override point for customization after application launch. 22 | 23 | //配置语音识别 24 | //Set log level 25 | [IFlySetting setLogFile:LVL_NONE]; 26 | //Set whether to output log messages in Xcode console 27 | [IFlySetting showLogcat:NO]; 28 | // 初始化讯飞应用 29 | NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@",@"56d801df"]; 30 | [IFlySpeechUtility createUtility:initString]; 31 | 32 | if([UINavigationBar conformsToProtocol:@protocol(UIAppearanceContainer)]) { 33 | [UINavigationBar appearance].tintColor = [UIColor whiteColor]; 34 | [[UINavigationBar appearance] setTitleTextAttributes:@{NSFontAttributeName : [UIFont boldSystemFontOfSize:18], NSForegroundColorAttributeName : [UIColor whiteColor]}]; 35 | [[UINavigationBar appearance] setBarTintColor:[UIColor colorWithRed:(51)/255.f green:(171)/255.f blue:(160)/255.f alpha:1.f]]; 36 | [[UINavigationBar appearance] setTranslucent:NO]; 37 | } 38 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 39 | ViewController *vc = [[ViewController alloc] init]; 40 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:vc]; 41 | self.window.rootViewController = navigationController; 42 | self.window.backgroundColor = [UIColor whiteColor]; 43 | [self.window makeKeyAndVisible]; 44 | 45 | return YES; 46 | } 47 | 48 | 49 | - (void)applicationWillResignActive:(UIApplication *)application { 50 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 51 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 52 | } 53 | 54 | 55 | - (void)applicationDidEnterBackground:(UIApplication *)application { 56 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 57 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 58 | } 59 | 60 | 61 | - (void)applicationWillEnterForeground:(UIApplication *)application { 62 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 63 | } 64 | 65 | 66 | - (void)applicationDidBecomeActive:(UIApplication *)application { 67 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 68 | } 69 | 70 | 71 | - (void)applicationWillTerminate:(UIApplication *)application { 72 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 73 | } 74 | 75 | 76 | @end 77 | -------------------------------------------------------------------------------- /FZSpeakDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /FZSpeakDemo/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /FZSpeakDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /FZSpeakDemo/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/FZProgressHudView.h: -------------------------------------------------------------------------------- 1 | // 2 | // FZProgressHudView.h 3 | // 4 | // Created by Zhang Cheng on 13-12-13. 5 | // Copyright (c) 2013年 Zhang Cheng. All rights reserved. 6 | // 7 | 8 | #import 9 | 10 | #define SCREEN_WIDTH [UIScreen mainScreen].bounds.size.width 11 | #define SCREEN_HEIGHT [UIScreen mainScreen].bounds.size.height 12 | 13 | @interface FZProgressHudView : UIView 14 | 15 | @property (strong, nonatomic) UIView *hudView; 16 | @property (strong, nonatomic) UIImageView *statusImageView; 17 | @property (strong, nonatomic) UIImageView *activityIndicatorImageView; 18 | @property (strong, nonatomic) UILabel *statusLabel; 19 | @property (strong, nonatomic) NSTimer *timer; 20 | @property (weak, nonatomic) UIView *targetView; //覆盖在哪一个view上面 21 | 22 | - (id)initWithTargetView:(UIView *) view; 23 | - (void)startWork:(NSString *)workName; 24 | - (void)hideHudWithSuccess:(NSString *)successString andDuration:(NSTimeInterval)duration; 25 | - (void)hideHudWIthFailure:(NSString *)failureString andDuration:(NSTimeInterval)duration; 26 | - (void)showHudWithSuccess:(NSString *)successString andDuration:(NSTimeInterval)duration; 27 | - (void)showHudWithFailure:(NSString *)failureString andDuration:(NSTimeInterval)duration; 28 | - (void)hideHudImmediately; 29 | 30 | + (FZProgressHudView *)shareInstance; 31 | 32 | @end 33 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/FZProgressHudView.m: -------------------------------------------------------------------------------- 1 | // 2 | // FZProgressHudView.m 3 | // 4 | // Created by Zhang Cheng on 13-12-13. 5 | // Copyright (c) 2013年 Zhang Cheng. All rights reserved. 6 | // 7 | 8 | #import "FZProgressHudView.h" 9 | #import 10 | 11 | #define HUDWIDTH 140 12 | #define STATUSIMAGEWIDTH 40 //状态提示图片的长,宽,该数字根据图片大小自行设定 13 | #define FONT [UIFont systemFontOfSize:17] 14 | #define LABELHEIGHT 20 //文字label的默认高度是20 15 | #define SPACE 8 //把每个subview 之间的间距定位5 16 | 17 | @implementation FZProgressHudView 18 | 19 | + (FZProgressHudView *)shareInstance 20 | { 21 | static FZProgressHudView * hud = nil; 22 | @synchronized(self) { 23 | if (!hud) { 24 | hud = [[FZProgressHudView alloc]init]; 25 | } 26 | } 27 | return hud; 28 | } 29 | 30 | - (id)initWithTargetView:(UIView *) view 31 | { 32 | if (self = [super init]) 33 | { 34 | CGRect rect = CGRectMake((HUDWIDTH - STATUSIMAGEWIDTH) / 2, SPACE, STATUSIMAGEWIDTH, STATUSIMAGEWIDTH); 35 | 36 | self.activityIndicatorImageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"refresh"]]; 37 | self.activityIndicatorImageView.hidden = YES; 38 | self.activityIndicatorImageView.frame = rect; 39 | 40 | self.statusImageView = [[UIImageView alloc] initWithFrame:rect]; 41 | self.statusImageView.hidden = YES; 42 | 43 | self.statusLabel = [[UILabel alloc] initWithFrame:CGRectZero]; 44 | self.statusLabel.numberOfLines = 0; 45 | self.statusLabel.font = FONT; 46 | self.statusLabel.backgroundColor = [UIColor clearColor]; 47 | self.statusLabel.textColor = [UIColor whiteColor]; 48 | self.statusLabel.textAlignment = NSTextAlignmentCenter; 49 | 50 | // UIBlurEffect * blur = [UIBlurEffect effectWithStyle:UIBlurEffectStyleExtraLight]; 51 | // UIVisualEffectView * effe = [[UIVisualEffectView alloc]initWithEffect:blur]; 52 | // effe.frame = rect; 53 | 54 | self.hudView = [[UIView alloc]init]; 55 | self.hudView.layer.cornerRadius = 8; 56 | self.hudView.layer.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.7].CGColor; 57 | [self.hudView addSubview:self.activityIndicatorImageView]; 58 | [self.hudView addSubview:self.statusLabel]; 59 | [self.hudView addSubview:self.statusImageView]; 60 | [self addSubview:self.hudView]; 61 | 62 | self.targetView = view; 63 | self.backgroundColor = [UIColor clearColor]; 64 | 65 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(resumeAnimation) name:UIApplicationWillEnterForegroundNotification object:nil]; 66 | } 67 | return self; 68 | } 69 | 70 | #pragma mark 开始一项等待工作 71 | - (void)startWork:(NSString *)workName 72 | { 73 | if (self.activityIndicatorImageView.hidden == YES) 74 | { 75 | self.activityIndicatorImageView.hidden = NO; 76 | [self imageAnimation]; 77 | self.statusImageView.hidden = YES; 78 | } 79 | [self calculateContentSize:workName]; 80 | } 81 | 82 | - (void)calculateContentSize:(NSString *) workName 83 | { 84 | if (self.superview == nil) 85 | { 86 | self.frame = self.targetView.bounds; 87 | [self.targetView addSubview:self]; 88 | self.alpha = 1.0f; 89 | } 90 | CGSize size = [workName boundingRectWithSize:CGSizeMake(HUDWIDTH - 2 * SPACE, 1000) options:NSStringDrawingTruncatesLastVisibleLine|NSStringDrawingUsesLineFragmentOrigin|NSStringDrawingUsesFontLeading attributes:[NSDictionary dictionaryWithObjectsAndKeys:FONT,NSFontAttributeName, nil] context:NULL].size; 91 | CGFloat height = (size.height < LABELHEIGHT) ? LABELHEIGHT : size.height; 92 | self.statusLabel.text = workName; 93 | self.statusLabel.frame = CGRectMake(SPACE, SPACE * 2 + STATUSIMAGEWIDTH, HUDWIDTH - 2 * SPACE, size.height); 94 | self.hudView.frame = CGRectMake(0, 0, HUDWIDTH, 3 * SPACE + STATUSIMAGEWIDTH + height); 95 | 96 | CGPoint windowCenter = CGPointMake(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2); 97 | self.hudView.center = [self convertPoint:windowCenter fromView:[UIApplication sharedApplication].keyWindow]; 98 | } 99 | 100 | #pragma mark 工作结束,显示失败或成功 101 | - (void)hideHudWithSuccess:(NSString *)successString andDuration:(NSTimeInterval)duration 102 | { 103 | [self startWork:successString]; 104 | [self hideStatusView:NO andDuration:duration]; 105 | } 106 | 107 | - (void)hideHudWIthFailure:(NSString *)failureString andDuration:(NSTimeInterval)duration 108 | { 109 | NSRange range = [failureString rangeOfString:@"请求超时"]; 110 | if ((range.length > 0) && (range.location != NSNotFound)) { 111 | failureString = @"请求超时"; 112 | } 113 | NSRange range2 = [failureString rangeOfString:@"互联网的连接"]; 114 | if ((range2.length > 0) && (range2.location != NSNotFound)) { 115 | failureString = @"网络连接失败,请稍后重试"; 116 | } 117 | 118 | [self startWork:failureString]; 119 | [self hideStatusView:YES andDuration:duration]; 120 | } 121 | 122 | #pragma mark 直接显示提示信息 123 | - (void)showHudWithSuccess:(NSString *)successString andDuration:(NSTimeInterval)duration 124 | { 125 | [self startWork:successString]; 126 | [self hideStatusView:NO andDuration:duration]; 127 | } 128 | 129 | - (void)showHudWithFailure:(NSString *)failureString andDuration:(NSTimeInterval)duration 130 | { 131 | NSRange range = [failureString rangeOfString:@"请求超时"]; 132 | if ((range.length > 0) && (range.location != NSNotFound)) { 133 | failureString = @"请求超时"; 134 | } 135 | 136 | NSRange range2 = [failureString rangeOfString:@"互联网的连接"]; 137 | if ((range2.length > 0) && (range2.location != NSNotFound)) { 138 | failureString = @"网络连接失败,请稍后重试"; 139 | } 140 | 141 | [self startWork:failureString]; 142 | [self hideStatusView:YES andDuration:duration]; 143 | } 144 | 145 | #pragma mark 隐藏 146 | - (void)hideStatusView:(BOOL) isError andDuration:(NSTimeInterval)duration 147 | { 148 | self.activityIndicatorImageView.hidden = YES; 149 | self.statusImageView.hidden = NO; 150 | if (isError) 151 | self.statusImageView.image = [UIImage imageNamed:@"alert_error_icon"]; 152 | else 153 | self.statusImageView.image = [UIImage imageNamed:@"alert_success_icon"]; 154 | 155 | if (self.timer) 156 | { 157 | [self.timer invalidate]; 158 | self.timer = nil; 159 | } 160 | self.timer = [NSTimer scheduledTimerWithTimeInterval:duration target:self selector:@selector(timerFired) userInfo:nil repeats:NO]; 161 | } 162 | 163 | - (void)timerFired 164 | { 165 | [UIView animateWithDuration:0.3 animations:^(void){ 166 | self.alpha = 0.0; 167 | }completion:^(BOOL finished){ 168 | [self removeFromSuperview]; 169 | }]; 170 | [self.timer invalidate]; 171 | self.timer = nil; 172 | } 173 | 174 | - (void)hideHudImmediately 175 | { 176 | if (self.superview) 177 | { 178 | [self.activityIndicatorImageView.layer removeAllAnimations]; 179 | self.activityIndicatorImageView.hidden = YES; 180 | [self removeFromSuperview]; 181 | } 182 | } 183 | 184 | - (void)imageAnimation 185 | { 186 | CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:@"transform.rotation.z"]; 187 | animation.toValue = [NSNumber numberWithDouble:M_PI_2]; 188 | animation.duration = 0.2f; 189 | animation.cumulative = YES; 190 | animation.repeatCount = HUGE_VALF; 191 | [self.activityIndicatorImageView.layer addAnimation:animation forKey:@"activityIndicatorAnimation"]; 192 | } 193 | 194 | - (void)resumeAnimation 195 | { 196 | if (self.superview) 197 | { 198 | [self imageAnimation]; 199 | } 200 | } 201 | 202 | - (void)dealloc 203 | { 204 | [self.timer invalidate]; 205 | self.timer = nil; 206 | [[NSNotificationCenter defaultCenter] removeObserver:self]; 207 | } 208 | 209 | @end 210 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/alert_error_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/alert_error_icon.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/alert_error_icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/alert_error_icon@2x.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/alert_success_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/alert_success_icon.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/alert_success_icon@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/alert_success_icon@2x.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/refresh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/refresh.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZProgerssHud/refresh@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/FZProgerssHud/refresh@2x.png -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeakClass.h: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeakClass.h 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/13. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "FZSpeechEvaluator.h" 10 | #import "FZSpeechRecognizer.h" 11 | #import "FZSpeechSynthesizer.h" 12 | 13 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechEvaluator.h: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechEvaluator.h 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | // 语音评测 9 | 10 | #import 11 | #import // 引入讯飞语音库 12 | 13 | typedef enum : NSUInteger { 14 | XF_Audio_Evaluation_Begain = 0, //开始录音 15 | XF_Audio_Evaluation_Volume, //录音音量 16 | XF_Audio_Evaluation_End, //停止录音 17 | XF_Audio_Evaluation_Cancel, //取消录音 18 | XF_Audio_Evaluation_Result, //评测结果 19 | XF_Audio_Evaluation_Error, //评测失败 20 | } XF_Audio_Evaluation_Type; //语音测评状态 21 | 22 | typedef void (^XFAudioEvaCallback)(XF_Audio_Evaluation_Type type, float progress, NSString *resultMsg); 23 | 24 | @interface FZSpeechEvaluator : NSObject 25 | 26 | @property (nonatomic, assign) XF_Audio_Evaluation_Type evaluationType; 27 | 28 | @property(nonatomic, strong) IFlySpeechEvaluator *iFlySpeechEvaluator; // 定义语音测评对象 29 | 30 | @property(nonatomic, copy) XFAudioEvaCallback xf_evacallback; 31 | 32 | + (instancetype)sharedInstance; 33 | 34 | /** 35 | 语音测评 36 | 37 | @param text 评测内容 38 | @param callback 评测结果返回 39 | */ 40 | + (void)xf_AudioEvaluationOfText: (NSString *)text callback:(void(^)(XF_Audio_Evaluation_Type type, float progress, NSString *resultMsg))callback; 41 | 42 | /*! 43 | * 停止录音
44 | * 调用此函数会停止录音,并开始进行语音识别 45 | */ 46 | - (void)stopListening; 47 | 48 | /*! 49 | * 取消本次会话 50 | */ 51 | - (void)cancel; 52 | 53 | @end 54 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechEvaluator.m: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechEvaluator.m 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "FZSpeechEvaluator.h" 10 | #import "ISEResult.h" 11 | #import "ISEResultXmlParser.h" 12 | 13 | @interface FZSpeechEvaluator() 14 | 15 | @end 16 | 17 | @implementation FZSpeechEvaluator 18 | 19 | + (instancetype)sharedInstance { 20 | static id sharedInstance; 21 | static dispatch_once_t onceToken; 22 | dispatch_once(&onceToken, ^{ 23 | sharedInstance = [[self alloc] init]; 24 | }); 25 | return sharedInstance; 26 | } 27 | 28 | #pragma mark --- 语音测评 29 | /** 30 | * 懒加载getter方法 31 | */ 32 | - (IFlySpeechEvaluator *)iFlySpeechEvaluator { 33 | if (!_iFlySpeechEvaluator) { 34 | // 初始化语音测评 35 | _iFlySpeechEvaluator = [IFlySpeechEvaluator sharedInstance]; 36 | _iFlySpeechEvaluator.delegate = self; 37 | // 设置测评语种【中文:zh_cn,中文台湾:zh_tw,美英:en_us】 38 | [_iFlySpeechEvaluator setParameter:@"zh_cn" forKey:[IFlySpeechConstant LANGUAGE]]; 39 | // 设置测评题型【read_syllable(英文评测不支持):单字;read_word:词语;read_sentence:句子;read_chapter(待开放):篇章】 40 | [_iFlySpeechEvaluator setParameter:@"read_sentence" forKey:[IFlySpeechConstant ISE_CATEGORY]]; 41 | // 设置试题编码类型 42 | [_iFlySpeechEvaluator setParameter:@"utf-8" forKey:[IFlySpeechConstant TEXT_ENCODING]]; 43 | // 设置前、后端点超时【0-10000(单位ms)】 44 | [_iFlySpeechEvaluator setParameter:@"5000" forKey:[IFlySpeechConstant VAD_BOS]]; // 默认5000ms 45 | [_iFlySpeechEvaluator setParameter:@"1800" forKey:[IFlySpeechConstant VAD_EOS]]; // 默认1800ms 46 | // 设置录音超时,设置成-1则无超时限制(单位:ms,默认30000) 47 | [_iFlySpeechEvaluator setParameter:@"-1" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]]; 48 | // 设置结果等级,不同等级对应不同的详细程度【complete:完整 ;plain:简单】 49 | [_iFlySpeechEvaluator setParameter:@"complete" forKey:[IFlySpeechConstant ISE_RESULT_LEVEL]]; 50 | } 51 | return _iFlySpeechEvaluator; 52 | } 53 | 54 | #pragma mark --- 语音评测 55 | + (void)xf_AudioEvaluationOfText:(NSString *)text callback:(void (^)(XF_Audio_Evaluation_Type type, float progress, NSString *resultMsg))callback 56 | { 57 | // 2.开始语音测评 58 | // NSData *textData = [text dataUsingEncoding:NSUTF8StringEncoding]; 59 | Byte bomHeader[] = { 0xEF, 0xBB, 0xBF }; 60 | NSMutableData *buffer = [NSMutableData dataWithBytes:bomHeader length:sizeof(bomHeader)]; 61 | [buffer appendData:[text dataUsingEncoding:NSUTF8StringEncoding]]; 62 | [[FZSpeechEvaluator sharedInstance].iFlySpeechEvaluator startListening:buffer params:nil]; 63 | 64 | [FZSpeechEvaluator sharedInstance].xf_evacallback = callback; 65 | } 66 | 67 | /*! 68 | * 停止录音
69 | * 调用此函数会停止录音,并开始进行语音识别 70 | */ 71 | - (void)stopListening 72 | { 73 | [_iFlySpeechEvaluator stopListening]; 74 | } 75 | 76 | /*! 77 | * 取消本次会话 78 | */ 79 | - (void)cancel 80 | { 81 | [_iFlySpeechEvaluator cancel]; 82 | } 83 | 84 | /*! 85 | * 音量和数据回调 86 | * 87 | * @param volume 音量 88 | * @param buffer 音频数据 89 | */ 90 | - (void)onVolumeChanged:(int)volume buffer:(NSData *)buffer 91 | { 92 | NSLog(@"音量..."); 93 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_Volume,volume,nil); 94 | } 95 | 96 | /*! 97 | * 开始录音回调
98 | * 当调用了`startListening`函数之后,如果没有发生错误则会回调此函数。如果发生错误则回调onCompleted:函数 99 | */ 100 | - (void)onBeginOfSpeech 101 | { 102 | NSLog(@"开始录音"); 103 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_Begain,0,nil); 104 | } 105 | 106 | /*! 107 | * 停止录音回调
108 | * 当调用了`stopListening`函数或者引擎内部自动检测到断点,如果没有发生错误则回调此函数。
109 | * 如果发生错误则回调onCompleted:函数 110 | */ 111 | - (void)onEndOfSpeech 112 | { 113 | NSLog(@"停止录音"); 114 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_End,100,nil); 115 | } 116 | 117 | /*! 118 | * 正在取消 119 | */ 120 | - (void)onCancel 121 | { 122 | NSLog(@"正在取消"); 123 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_Cancel,100,nil); 124 | } 125 | 126 | /*! 127 | * 评测错误回调 128 | * 129 | * 在进行语音评测过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理.当errorCode没有错误时,表示此次会话正常结束,否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。 130 | * 131 | * @param errorCode 错误描述类 132 | */ 133 | - (void)onCompleted:(IFlySpeechError *)errorCode 134 | { 135 | NSLog(@"评测错误"); 136 | if(errorCode && errorCode.errorCode!=0){ 137 | NSLog(@"Error:%d %@",[errorCode errorCode],[errorCode errorDesc]); 138 | } 139 | if (errorCode.errorCode == 20001) { 140 | //没有网络 141 | UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"需要访问网络" message:@"请在系统设置中开启网络服务" delegate:self cancelButtonTitle:@"取消" otherButtonTitles:@"去设置", nil]; 142 | [alertView show]; 143 | return; 144 | } 145 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_Error,errorCode.errorCode,errorCode.errorDesc); 146 | } 147 | 148 | /*! 149 | * 评测结果回调
150 | * 在评测过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。 151 | * 152 | * @param results -[out] 评测结果。 153 | * @param isLast -[out] 是否最后一条结果 154 | */ 155 | - (void)onResults:(NSData *)results isLast:(BOOL)isLast 156 | { 157 | NSLog(@"评测结果"); 158 | NSString *showText = @""; 159 | 160 | const char* chResult=[results bytes]; 161 | 162 | BOOL isUTF8=[[self.iFlySpeechEvaluator parameterForKey:[IFlySpeechConstant RESULT_ENCODING]]isEqualToString:@"utf-8"]; 163 | NSString* strResults=nil; 164 | if(isUTF8){ 165 | strResults=[[NSString alloc] initWithBytes:chResult length:[results length] encoding:NSUTF8StringEncoding]; 166 | }else{ 167 | NSLog(@"result encoding: gb2312"); 168 | NSStringEncoding encoding = CFStringConvertEncodingToNSStringEncoding(kCFStringEncodingGB_18030_2000); 169 | strResults=[[NSString alloc] initWithBytes:chResult length:[results length] encoding:encoding]; 170 | } 171 | if(strResults){ 172 | showText = [showText stringByAppendingString:strResults]; 173 | } 174 | NSLog(@"评测结果:%@",showText); 175 | 176 | ISEResultXmlParser* parser=[[ISEResultXmlParser alloc] init]; 177 | parser.delegate=self; 178 | [parser parserXml:showText]; 179 | } 180 | 181 | #pragma mark - ISEResultXmlParserDelegate 182 | -(void)onISEResultXmlParser:(NSXMLParser *)parser Error:(NSError*)error{ 183 | 184 | } 185 | 186 | -(void)onISEResultXmlParserResult:(ISEResult*)result 187 | { 188 | NSLog(@"----%@",[result toString]); 189 | 190 | NSDictionary *resultDic = [self returnResultDicWithResultStr: [result toString]]; 191 | float resultScore = [[resultDic objectForKey:@"Total Score"] floatValue]; 192 | resultScore = resultScore*20; 193 | [FZSpeechEvaluator sharedInstance].xf_evacallback(XF_Audio_Evaluation_Result,resultScore,[resultDic objectForKey:@"Content"]); 194 | } 195 | 196 | #pragma mark --- 解析结果 197 | -(NSDictionary *)returnResultDicWithResultStr:(NSString *)resultStr 198 | { 199 | NSMutableDictionary *resultDict = [[NSMutableDictionary alloc]init]; 200 | //分割整体和局部 201 | NSArray *allArr = [resultStr componentsSeparatedByString:@"[Read Details]:"]; 202 | //解析外部整体解析结果部分 203 | NSArray *totalArr = [allArr[0] componentsSeparatedByString:@"[ISE Results]"]; 204 | NSMutableArray *totalArray = [NSMutableArray arrayWithArray:[[totalArr[1] stringByReplacingOccurrencesOfString:@"\n" withString:@":"] componentsSeparatedByString:@":"]]; 205 | [totalArray removeObjectAtIndex:0]; 206 | [totalArray removeObjectAtIndex:totalArray.count-1]; 207 | for (int i = 0; i < totalArray.count; i++) { 208 | [resultDict setObject:totalArray[i+1] forKey:totalArray[i]]; 209 | i++; 210 | } 211 | //解析局部数据结果部分 212 | NSArray *bodyArr = [allArr[1] componentsSeparatedByString:@"\n\n"]; 213 | NSMutableArray *bodyArray = [[NSMutableArray alloc]init]; 214 | for (int i = 0; i < bodyArr.count-1; i++) { 215 | NSString *str = [bodyArr[i] stringByReplacingOccurrencesOfString:@"\n" withString:@""]; 216 | NSArray *arr = [str componentsSeparatedByString:@"└"]; 217 | NSString *bodyResult = arr[0]; 218 | NSArray *bodyStr = [bodyResult componentsSeparatedByString:@" "]; 219 | NSMutableDictionary *bodyDict = [[NSMutableDictionary alloc]init]; 220 | [bodyDict setObject:[bodyResult substringWithRange:NSMakeRange(5, 1)] forKey:@"word"]; 221 | [bodyDict setObject:bodyStr[1] forKey:@"pinyin"]; 222 | [bodyDict setObject:[bodyStr[3] substringWithRange:NSMakeRange(4, 1)] forKey:@"Dur"]; 223 | [bodyArray addObject:bodyDict]; 224 | } 225 | 226 | [resultDict setObject:bodyArray forKey:@"bodyList"]; 227 | 228 | return resultDict; 229 | } 230 | 231 | -(void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex 232 | { 233 | if (buttonIndex == 1) { 234 | NSURL*url = [NSURL URLWithString:UIApplicationOpenSettingsURLString]; 235 | if( [[UIApplication sharedApplication]canOpenURL:url] ) { 236 | [[UIApplication sharedApplication]openURL:url]; 237 | } 238 | } 239 | } 240 | 241 | @end 242 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechRecognizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechRecognizer.h 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | // 语音听写 9 | 10 | #import 11 | #import // 引入讯飞语音库 12 | 13 | typedef void (^XFAudioRecognizerCallback)(NSString *resText,NSError *eroor); 14 | 15 | @interface FZSpeechRecognizer : NSObject 16 | 17 | @property(nonatomic, strong) IFlySpeechRecognizer *iFlySpeechRecognizer; // 定义语音听写对象 18 | 19 | @property(nonatomic, copy) XFAudioRecognizerCallback xf_recogcallback; 20 | 21 | + (instancetype)sharedInstance; 22 | 23 | /** 24 | 语音听写 25 | 26 | @param callback 听写结果回调 27 | */ 28 | + (void)xf_AudioRecognizerResult: (void(^)(NSString *resText,NSError *error))callback; 29 | 30 | /*! 31 | * 停止录音
32 | * 调用此函数会停止录音,并开始进行语音识别 33 | */ 34 | - (void) stopListening; 35 | 36 | /*! 37 | * 取消本次会话 38 | */ 39 | - (void) cancel; 40 | 41 | @end 42 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechRecognizer.m: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechRecognizer.m 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "FZSpeechRecognizer.h" 10 | 11 | @interface FZSpeechRecognizer() 12 | 13 | @end 14 | 15 | @implementation FZSpeechRecognizer 16 | 17 | + (instancetype)sharedInstance { 18 | static id sharedInstance; 19 | static dispatch_once_t onceToken; 20 | dispatch_once(&onceToken, ^{ 21 | sharedInstance = [[self alloc] init]; 22 | }); 23 | return sharedInstance; 24 | } 25 | 26 | #pragma mark --- 语音听写 27 | - (IFlySpeechRecognizer *)iFlySpeechRecognizer { 28 | if (!_iFlySpeechRecognizer) { 29 | _iFlySpeechRecognizer = [IFlySpeechRecognizer sharedInstance]; 30 | _iFlySpeechRecognizer.delegate = self; 31 | // 设置听写模式 32 | [_iFlySpeechRecognizer setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]]; 33 | // 设置录音保存文件名 34 | [_iFlySpeechRecognizer setParameter:@"asrview.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]]; 35 | //set timeout of recording 36 | [_iFlySpeechRecognizer setParameter:@"30000" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]]; 37 | //set VAD timeout of end of speech(EOS) 38 | [_iFlySpeechRecognizer setParameter:@"3000" forKey:[IFlySpeechConstant VAD_EOS]]; 39 | //set VAD timeout of beginning of speech(BOS) 40 | [_iFlySpeechRecognizer setParameter:@"3000" forKey:[IFlySpeechConstant VAD_BOS]]; 41 | //set network timeout 42 | [_iFlySpeechRecognizer setParameter:@"20000" forKey:[IFlySpeechConstant NET_TIMEOUT]]; 43 | } 44 | return _iFlySpeechRecognizer; 45 | } 46 | 47 | #pragma mark --- 语音听写 48 | + (void)xf_AudioRecognizerResult:(void(^)(NSString *resText,NSError *error))callback 49 | { 50 | // 3.开始语音听写 51 | [[FZSpeechRecognizer sharedInstance].iFlySpeechRecognizer startListening]; 52 | [FZSpeechRecognizer sharedInstance].xf_recogcallback = callback; 53 | } 54 | 55 | /*! 56 | * 停止录音
57 | * 调用此函数会停止录音,并开始进行语音识别 58 | */ 59 | - (void) stopListening 60 | { 61 | [_iFlySpeechRecognizer stopListening]; 62 | } 63 | 64 | /*! 65 | * 取消本次会话 66 | */ 67 | - (void) cancel 68 | { 69 | [_iFlySpeechRecognizer cancel]; 70 | } 71 | 72 | /*! 73 | * 识别结果回调 74 | * 75 | * 在进行语音识别过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理,当errorCode没有错误时,表示此次会话正常结束;否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。 76 | * 77 | * @param errorCode 错误描述 78 | */ 79 | - (void) onCompleted:(IFlySpeechError *) errorCode 80 | { 81 | NSLog(@"听写出错"); 82 | if (errorCode.errorCode == 0) { 83 | return; 84 | } 85 | NSString *desc = NSLocalizedString(@"fzh.correctSpeak", @""); 86 | NSDictionary *userInfo = @{ NSLocalizedDescriptionKey : desc }; 87 | NSError *resultError = [NSError errorWithDomain:errorCode.errorDesc 88 | code:errorCode.errorCode 89 | userInfo:userInfo]; 90 | [FZSpeechRecognizer sharedInstance].xf_recogcallback(nil,resultError); 91 | } 92 | 93 | /*! 94 | * 识别结果回调 95 | * 96 | * 在识别过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。
97 | * 使用results的示例如下: 98 | *

 99 |  *  - (void) onResults:(NSArray *) results{
100 |  *     NSMutableString *result = [[NSMutableString alloc] init];
101 |  *     NSDictionary *dic = [results objectAtIndex:0];
102 |  *     for (NSString *key in dic){
103 |  *        [result appendFormat:@"%@",key];//合并结果
104 |  *     }
105 |  *   }
106 |  *  
107 | * 108 | * @param results -[out] 识别结果,NSArray的第一个元素为NSDictionary,NSDictionary的key为识别结果,sc为识别结果的置信度。 109 | * @param isLast -[out] 是否最后一个结果 110 | */ 111 | - (void) onResults:(NSArray *) results isLast:(BOOL)isLast 112 | { 113 | NSLog(@"听写结果"); 114 | NSMutableString *resultString = [[NSMutableString alloc] init]; 115 | NSDictionary *dic = results[0]; 116 | 117 | for (NSString *key in dic) { 118 | [resultString appendFormat:@"%@",key]; 119 | } 120 | 121 | NSString * resultFromJson = [self stringFromJson:resultString]; 122 | 123 | NSLog(@"听到的结果:%@",resultFromJson); 124 | if (resultFromJson.length == 0) { 125 | resultFromJson = @"什么都没听到呢"; 126 | } 127 | [FZSpeechRecognizer sharedInstance].xf_recogcallback(resultFromJson,nil); 128 | } 129 | 130 | #pragma mark --- 解析听到语音内容 131 | /** 132 | parse JSON data 133 | params,for example: 134 | {"sn":1,"ls":true,"bg":0,"ed":0,"ws":[{"bg":0,"cw":[{"w":"白日","sc":0}]},{"bg":0,"cw":[{"w":"依山","sc":0}]},{"bg":0,"cw":[{"w":"尽","sc":0}]},{"bg":0,"cw":[{"w":"黄河入海流","sc":0}]},{"bg":0,"cw":[{"w":"。","sc":0}]}]} 135 | **/ 136 | - (NSString *)stringFromJson:(NSString*)params 137 | { 138 | if (params == NULL) { 139 | return nil; 140 | } 141 | 142 | NSMutableString *tempStr = [[NSMutableString alloc] init]; 143 | NSDictionary *resultDic = [NSJSONSerialization JSONObjectWithData: 144 | [params dataUsingEncoding:NSUTF8StringEncoding] options:kNilOptions error:nil]; 145 | 146 | if (resultDic!= nil) { 147 | NSArray *wordArray = [resultDic objectForKey:@"ws"]; 148 | 149 | for (int i = 0; i < [wordArray count]; i++) { 150 | NSDictionary *wsDic = [wordArray objectAtIndex: i]; 151 | NSArray *cwArray = [wsDic objectForKey:@"cw"]; 152 | 153 | for (int j = 0; j < [cwArray count]; j++) { 154 | NSDictionary *wDic = [cwArray objectAtIndex:j]; 155 | NSString *str = [wDic objectForKey:@"w"]; 156 | [tempStr appendString: str]; 157 | } 158 | } 159 | } 160 | return tempStr; 161 | } 162 | 163 | @end 164 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechSynthesizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechSynthesizer.h 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | // 语音合成 9 | 10 | #import 11 | #import // 引入讯飞语音库 12 | 13 | typedef enum : NSUInteger { 14 | XF_Audio_Synthesize_Progress = 0, //合成进度 15 | XF_Audio_Speak_Begain, //开始播放 16 | XF_Audio_Speak_Progress, //播放进度 17 | XF_Audio_Speak_End, //播放结束 18 | } XF_Audio_Synthesize_Type; //语音合成 19 | 20 | typedef void (^XFAudioSynCallback)(XF_Audio_Synthesize_Type type, NSInteger progress); 21 | 22 | @interface FZSpeechSynthesizer : NSObject 23 | 24 | @property(nonatomic, strong) IFlySpeechSynthesizer *iFlySpeechSynthesizer; // 定义语音合成对象 25 | 26 | @property(nonatomic, copy) XFAudioSynCallback xf_syncallback; 27 | 28 | + (instancetype)sharedInstance; 29 | 30 | /** 31 | 语音合成 32 | 33 | @param text 合成内容 34 | @param callback 回调结果 35 | */ 36 | + (void)xf_AudioSynthesizeOfText: (NSString *)text callback:(void (^)(XF_Audio_Synthesize_Type type,NSInteger progress))callback; 37 | 38 | /** 39 | 语音合成 40 | 41 | @param text 合成内容 42 | @param people 设置发音人 43 | @param callback 回调结果 44 | */ 45 | + (void)xf_AudioSynthesizeOfText: (NSString *)text fromPeople:(NSString *)people callback:(void (^)(XF_Audio_Synthesize_Type type,NSInteger progress))callback; 46 | 47 | /*! 48 | * 暂停/恢复播放
49 | * 暂停播放之后,合成不会暂停,仍会继续,如果发生错误则会回调错误`onCompleted` 50 | * 自动判断是否是暂停状态,如果是暂停,调用后恢复播放;如果是播放,调用后暂停播放。 51 | */ 52 | - (void) resumeOrPauseSpeaking; 53 | 54 | /*! 55 | * 停止播放并停止合成 56 | */ 57 | - (void) stopSpeaking; 58 | 59 | @end 60 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/FZSpeechSynthesizer.m: -------------------------------------------------------------------------------- 1 | // 2 | // FZSpeechSynthesizer.m 3 | // CorrectSpeak 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "FZSpeechSynthesizer.h" 10 | 11 | @interface FZSpeechSynthesizer() 12 | 13 | @property(nonatomic, copy) NSString *people; 14 | 15 | @end 16 | 17 | @implementation FZSpeechSynthesizer 18 | 19 | + (instancetype)sharedInstance { 20 | static id sharedInstance; 21 | static dispatch_once_t onceToken; 22 | dispatch_once(&onceToken, ^{ 23 | sharedInstance = [[self alloc] init]; 24 | }); 25 | return sharedInstance; 26 | } 27 | 28 | #pragma mark --- 语音合成 29 | /** 30 | * 懒加载getter方法 31 | */ 32 | - (IFlySpeechSynthesizer *)iFlySpeechSynthesizer { 33 | if(!_iFlySpeechSynthesizer) { 34 | // 初始化语音合成 35 | _iFlySpeechSynthesizer = [IFlySpeechSynthesizer sharedInstance]; 36 | _iFlySpeechSynthesizer.delegate = self; 37 | // 语速【0-100】 38 | [_iFlySpeechSynthesizer setParameter:@"50" forKey:[IFlySpeechConstant SPEED]]; 39 | // 音量【0-100】 40 | [_iFlySpeechSynthesizer setParameter:@"50" forKey:[IFlySpeechConstant VOLUME]]; 41 | // 发音人【小燕:xiaoyan;小宇:xiaoyu;凯瑟琳:catherine;亨利:henry;玛丽:vimary;小研:vixy;小琪:vixq;小峰:vixf;小梅:vixl;小莉:vixq;小蓉(四川话):vixr;小芸:vixyun;小坤:vixk;小强:vixqa;小莹:vixying;小新:vixx;楠楠:vinn;老孙:vils】 42 | if (!_people) { 43 | _people = @"xiaoyan"; 44 | } 45 | [_iFlySpeechSynthesizer setParameter:_people forKey:[IFlySpeechConstant VOICE_NAME]]; 46 | // 音频采样率【8000或16000】 47 | [_iFlySpeechSynthesizer setParameter:@"16000" forKey:[IFlySpeechConstant SAMPLE_RATE]]; 48 | // 保存音频路径(默认在Document目录下) 49 | [_iFlySpeechSynthesizer setParameter:@"tts.pcm" forKey:[IFlySpeechConstant TTS_AUDIO_PATH]]; 50 | //文本编码格式 51 | [_iFlySpeechSynthesizer setParameter:@"unicode" forKey:[IFlySpeechConstant TEXT_ENCODING]]; 52 | } 53 | return _iFlySpeechSynthesizer; 54 | } 55 | 56 | #pragma mark --- 合成语音 57 | + (void)xf_AudioSynthesizeOfText:(NSString *)text callback:(void (^)(XF_Audio_Synthesize_Type type,NSInteger progress))callback 58 | { 59 | // 1.开始合成说话 60 | [[FZSpeechSynthesizer sharedInstance].iFlySpeechSynthesizer startSpeaking:text]; 61 | [FZSpeechSynthesizer sharedInstance].xf_syncallback = callback; 62 | } 63 | + (void)xf_AudioSynthesizeOfText:(NSString *)text fromPeople:(NSString *)people callback:(void (^)(XF_Audio_Synthesize_Type type,NSInteger progress))callback 64 | { 65 | [[FZSpeechSynthesizer sharedInstance] setPeople:people]; 66 | [FZSpeechSynthesizer xf_AudioSynthesizeOfText:text callback:nil]; 67 | [FZSpeechSynthesizer sharedInstance].xf_syncallback = callback; 68 | } 69 | 70 | /*! 71 | * 暂停/恢复播放
72 | * 暂停播放之后,合成不会暂停,仍会继续,如果发生错误则会回调错误`onCompleted` 73 | * 自动判断是否是暂停状态,如果是暂停,调用后恢复播放;如果是播放,调用后暂停播放。 74 | */ 75 | - (void)resumeOrPauseSpeaking 76 | { 77 | if (_iFlySpeechSynthesizer.isSpeaking) { 78 | [_iFlySpeechSynthesizer pauseSpeaking]; 79 | } else { 80 | [_iFlySpeechSynthesizer resumeSpeaking]; 81 | } 82 | } 83 | 84 | /*! 85 | * 停止播放并停止合成 86 | */ 87 | - (void)stopSpeaking 88 | { 89 | if (_iFlySpeechSynthesizer.isSpeaking) { 90 | [_iFlySpeechSynthesizer stopSpeaking]; 91 | } 92 | } 93 | 94 | /** 95 | * 设置发音人 96 | */ 97 | - (void)setPeople:(NSString *)people { 98 | _people = people; 99 | _iFlySpeechSynthesizer = nil; 100 | } 101 | 102 | #pragma mark --- 语音合成代理方法 103 | /** 104 | * 合成缓冲进度【0-100】 105 | */ 106 | - (void)onBufferProgress:(int)progress message:(NSString *)msg { 107 | NSLog(@"合成缓冲进度:%d/100",progress); 108 | if ([FZSpeechSynthesizer sharedInstance].xf_syncallback != nil) { 109 | [FZSpeechSynthesizer sharedInstance].xf_syncallback(0,progress); 110 | } 111 | } 112 | /** 113 | * 合成开始 114 | */ 115 | - (void)onSpeakBegin { 116 | NSLog(@"合成播放开始!"); 117 | if ([FZSpeechSynthesizer sharedInstance].xf_syncallback != nil) { 118 | [FZSpeechSynthesizer sharedInstance].xf_syncallback(1,0); 119 | } 120 | } 121 | /** 122 | * 合成播放进度【0-100】 123 | */ 124 | - (void)onSpeakProgress:(int)progress beginPos:(int)beginPos endPos:(int)endPos { 125 | NSLog(@"合成播放进度:%d/100",progress); 126 | if ([FZSpeechSynthesizer sharedInstance].xf_syncallback != nil) { 127 | [FZSpeechSynthesizer sharedInstance].xf_syncallback(2,progress); 128 | } 129 | } 130 | /** 131 | * 合成结束 132 | */ 133 | - (void)onCompleted:(IFlySpeechError *)error { 134 | NSLog(@"合成结束!"); 135 | //语音合成 136 | if ([FZSpeechSynthesizer sharedInstance].xf_syncallback != nil) { 137 | [FZSpeechSynthesizer sharedInstance].xf_syncallback(3,100); 138 | } 139 | } 140 | 141 | @end 142 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResult.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResult.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * ISE Result 13 | */ 14 | @interface ISEResult : NSObject 15 | 16 | /** 17 | * Language:English(en)、Chinese(cn) 18 | */ 19 | @property(nonatomic,strong)NSString* language; 20 | 21 | /** 22 | * Category:read_syllable(cn)、read_word、read_sentence 23 | */ 24 | @property(nonatomic,strong)NSString* category; 25 | 26 | /** 27 | * Beginning of frame,10ms per frame 28 | */ 29 | @property(nonatomic,assign)int beg_pos; 30 | 31 | /** 32 | * End of frame 33 | */ 34 | @property(nonatomic,assign)int end_pos; 35 | 36 | /** 37 | * Content of ISE 38 | */ 39 | @property(nonatomic,strong)NSString* content; 40 | 41 | /** 42 | * Total score 43 | */ 44 | @property(nonatomic,assign)float total_score; 45 | 46 | /** 47 | * Duration(cn) 48 | */ 49 | @property(nonatomic,assign)int time_len; 50 | 51 | /** 52 | * Exception info(en) 53 | */ 54 | @property(nonatomic,strong)NSString* except_info; 55 | 56 | /** 57 | * Whether or not dirty read(cn) 58 | */ 59 | @property(nonatomic,assign)BOOL is_rejected; 60 | 61 | /** 62 | * The lable of sentence in xml results 63 | */ 64 | @property(nonatomic,strong)NSMutableArray* sentences; 65 | 66 | -(NSString*) toString; 67 | 68 | @end 69 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResult.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResult.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResult.h" 10 | 11 | @implementation ISEResult 12 | 13 | -(NSString*) toString{ 14 | return @""; 15 | } 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultFinal.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultFinal.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResult.h" 10 | 11 | @interface ISEResultFinal : ISEResult 12 | 13 | @property(nonatomic,assign) int ret; 14 | 15 | -(NSString*) toString; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultFinal.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultFinal.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResultFinal.h" 10 | 11 | @implementation ISEResultFinal 12 | 13 | -(NSString*) toString{ 14 | NSString* resultString=[NSString stringWithFormat:@"Returned Value:%d,Total Score:%f",self.ret,self.total_score]; 15 | return resultString; 16 | } 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultPhone.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultPhone.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * The lable of Phone in xml results 13 | */ 14 | @interface ISEResultPhone : NSObject 15 | 16 | /** 17 | * Beginning of frame,10ms per frame 18 | */ 19 | @property(nonatomic, assign)int beg_pos; 20 | 21 | /** 22 | * End of frame 23 | */ 24 | @property(nonatomic, assign)int end_pos; 25 | 26 | /** 27 | * Content of Phone 28 | */ 29 | @property(nonatomic, strong)NSString* content; 30 | 31 | /** 32 | * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace) 33 | */ 34 | @property(nonatomic, assign)int dp_message; 35 | 36 | /** 37 | * Duration(cn) 38 | */ 39 | @property(nonatomic, assign)int time_len; 40 | 41 | /** 42 | * Get the standard phonetic symbol of content(en) 43 | */ 44 | - (NSString*) getStdSymbol; 45 | 46 | 47 | @end 48 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultPhone.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultPhone.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultPhone.h" 10 | #import "ISEResultTools.h" 11 | 12 | @implementation ISEResultPhone 13 | 14 | /** 15 | * Get the standard phonetic symbol of content(en) 16 | */ 17 | - (NSString*) getStdSymbol{ 18 | 19 | if(self.content){ 20 | NSString* stdSymbol=[ISEResultTools toStdSymbol:self.content]; 21 | return stdSymbol?stdSymbol:self.content; 22 | } 23 | 24 | return self.content; 25 | } 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadSentence.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadSentence.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResult.h" 10 | 11 | @interface ISEResultReadSentence : ISEResult 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadSentence.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadSentence.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResultReadSentence.h" 10 | #import "ISEResultTools.h" 11 | 12 | @implementation ISEResultReadSentence 13 | 14 | -(instancetype)init{ 15 | if(self=[super init]){ 16 | self.category=@"read_sentence"; 17 | } 18 | return self; 19 | } 20 | 21 | -(NSString*) toString{ 22 | NSString* buffer = [[NSString alloc] init]; 23 | 24 | if ([@"cn" isEqualToString:self.language]) { 25 | buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"]; 26 | buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content]; 27 | buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len]; 28 | buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score]; 29 | buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]]; 30 | 31 | } else { 32 | if (self.is_rejected) { 33 | buffer=[buffer stringByAppendingFormat:@"Dirty Read,"]; 34 | 35 | buffer=[buffer stringByAppendingFormat:@"except_info:%@\n\n",self.except_info]; 36 | } 37 | 38 | buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"]; 39 | buffer=[buffer stringByAppendingFormat:@"Content:%@\n",self.content]; 40 | // buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len]; 41 | buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score]; 42 | buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageEN:self.sentences]]; 43 | } 44 | 45 | return buffer; 46 | } 47 | 48 | @end 49 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadSyllable.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadSyllable.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResult.h" 10 | 11 | @interface ISEResultReadSyllable : ISEResult 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadSyllable.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadSyllable.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResultReadSyllable.h" 10 | #import "ISEResultTools.h" 11 | 12 | @implementation ISEResultReadSyllable 13 | 14 | 15 | -(instancetype)init{ 16 | if(self=[super init]){ 17 | self.category = @"read_syllable"; 18 | self.language = @"cn"; 19 | } 20 | return self; 21 | } 22 | 23 | -(NSString*) toString{ 24 | NSString* buffer = [[NSString alloc] init]; 25 | 26 | buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"]; 27 | buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content]; 28 | buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len]; 29 | buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score]; 30 | buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]]; 31 | 32 | return buffer; 33 | } 34 | 35 | @end 36 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadWord.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadWord.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResult.h" 10 | 11 | @interface ISEResultReadWord : ISEResult 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultReadWord.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultReadWord.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/7. 6 | // 7 | // 8 | 9 | #import "ISEResultReadWord.h" 10 | #import "ISEResultTools.h" 11 | 12 | @implementation ISEResultReadWord 13 | 14 | -(instancetype)init{ 15 | if(self=[super init]){ 16 | self.category=@"read_word"; 17 | } 18 | return self; 19 | } 20 | 21 | -(NSString*) toString{ 22 | NSString* buffer = [[NSString alloc] init]; 23 | 24 | if ([@"cn" isEqualToString:self.language]) { 25 | buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"]; 26 | buffer=[buffer stringByAppendingFormat:@"Content:%@\n" ,self.content]; 27 | buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len]; 28 | buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score]; 29 | buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageCN:self.sentences]]; 30 | 31 | } else { 32 | if (self.is_rejected) { 33 | buffer=[buffer stringByAppendingFormat:@"Dirty Read,"]; 34 | 35 | buffer=[buffer stringByAppendingFormat:@"except_info:%@\n\n",self.except_info]; 36 | } 37 | 38 | buffer=[buffer stringByAppendingFormat:@"[ISE Results]\n"]; 39 | buffer=[buffer stringByAppendingFormat:@"Content:%@\n",self.content]; 40 | // buffer=[buffer stringByAppendingFormat:@"Duration:%d\n",self.time_len]; 41 | buffer=[buffer stringByAppendingFormat:@"Total Score:%f\n",self.total_score]; 42 | buffer=[buffer stringByAppendingFormat:@"[Read Details]:%@\n",[ISEResultTools formatDetailsForLanguageEN:self.sentences]]; 43 | } 44 | 45 | return buffer; 46 | } 47 | 48 | @end 49 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultSentence.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultSentence.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * The lable of sentence in xml results 13 | */ 14 | @interface ISEResultSentence : NSObject 15 | 16 | /** 17 | * Beginning of frame,10ms per frame 18 | */ 19 | @property(nonatomic, assign)int beg_pos; 20 | 21 | /** 22 | * End of frame 23 | */ 24 | @property(nonatomic, assign)int end_pos; 25 | 26 | /** 27 | * Content of Sentence 28 | */ 29 | @property(nonatomic, strong)NSString* content; 30 | 31 | /** 32 | * Total score 33 | */ 34 | @property(nonatomic, assign)float total_score; 35 | 36 | /** 37 | * Duration(cn) 38 | */ 39 | @property(nonatomic, assign)int time_len; 40 | 41 | /** 42 | * The index of Sentence(en) 43 | */ 44 | @property(nonatomic, assign)int index; 45 | 46 | /** 47 | * Count of words in Sentence(en) 48 | */ 49 | @property(nonatomic, assign)int word_count; 50 | 51 | /** 52 | * Word array in Sentence 53 | */ 54 | @property(nonatomic, strong)NSMutableArray* words; 55 | 56 | @end 57 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultSentence.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultSentence.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultSentence.h" 10 | 11 | @implementation ISEResultSentence 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultSyll.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultSyll.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * The lable of Syll in xml results 13 | */ 14 | @interface ISEResultSyll : NSObject 15 | 16 | /** 17 | * Beginning of frame,10ms per frame 18 | */ 19 | @property(nonatomic, assign)int beg_pos; 20 | 21 | /** 22 | * End of frame 23 | */ 24 | @property(nonatomic, assign)int end_pos; 25 | 26 | /** 27 | * Content of Syll 28 | */ 29 | @property(nonatomic, strong)NSString* content; 30 | 31 | /** 32 | * Pin Yin(cn),number represents tone,5 represents light tone,for example, fen1 33 | */ 34 | @property(nonatomic, strong)NSString* symbol; 35 | 36 | /** 37 | * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace) 38 | */ 39 | @property(nonatomic, assign)int dp_message; 40 | 41 | /** 42 | * Duration(cn) 43 | */ 44 | @property(nonatomic, assign)int time_len; 45 | 46 | /** 47 | * Phonetic array in Syll 48 | */ 49 | @property(nonatomic, strong)NSMutableArray* phones; 50 | 51 | /** 52 | * Get the standard phonetic symbol of content(en) 53 | */ 54 | - (NSString*) getStdSymbol; 55 | 56 | @end 57 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultSyll.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultSyll.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultSyll.h" 10 | #import "ISEResultTools.h" 11 | 12 | @implementation ISEResultSyll 13 | 14 | /** 15 | * Get the standard phonetic symbol of content(en) 16 | */ 17 | - (NSString*) getStdSymbol{ 18 | 19 | NSArray* symbols=[self.content componentsSeparatedByString:@" "]; 20 | NSString* stdSymbol=[[NSString alloc] init]; 21 | 22 | for (int i = 0; i < [symbols count]; ++i) { 23 | stdSymbol = [stdSymbol stringByAppendingString:[ISEResultTools toStdSymbol:symbols[i]]]; 24 | } 25 | 26 | return stdSymbol; 27 | } 28 | 29 | @end 30 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultTools.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultTools.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | 12 | FOUNDATION_EXPORT NSString* const KCIFlyResultNormal; 13 | FOUNDATION_EXPORT NSString* const KCIFlyResultMiss; 14 | FOUNDATION_EXPORT NSString* const KCIFlyResultAdd; 15 | FOUNDATION_EXPORT NSString* const KCIFlyResultRepeat; 16 | FOUNDATION_EXPORT NSString* const KCIFlyResultReplace; 17 | 18 | FOUNDATION_EXPORT NSString* const KCIFlyResultNoise; 19 | FOUNDATION_EXPORT NSString* const KCIFlyResultMute; 20 | 21 | 22 | @interface ISEResultTools : NSObject 23 | 24 | /*! 25 | * Get the standard phonetic symbol of symbol 26 | * 27 | * @param symbol iFlytek phonetic symbol 28 | * 29 | * @return if not exit,return symbol itself 30 | */ 31 | +(NSString*) toStdSymbol:(NSString*) symbol; 32 | 33 | 34 | /*! 35 | * Get the message of dpMessage 36 | */ 37 | + (NSString*)translateDpMessageInfo:(int)dpMessage; 38 | 39 | /*! 40 | * Get the message of content 41 | */ 42 | + (NSString*)translateContentInfo:(NSString*) content; 43 | 44 | 45 | /** 46 | * Get the format details from sentences in chinese 47 | * 48 | * @param sentences sentences in chinese 49 | * @return the format details 50 | */ 51 | + (NSString*)formatDetailsForLanguageCN:(NSArray*) sentences ; 52 | 53 | /** 54 | * Get the format details from sentences in english 55 | * 56 | * @param sentences sentences in english 57 | * @return the format details 58 | */ 59 | + (NSString*)formatDetailsForLanguageEN:(NSArray*) sentences ; 60 | 61 | @end 62 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultTools.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultTools.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultTools.h" 10 | #import "ISEResultPhone.h" 11 | #import "ISEResultSyll.h" 12 | #import "ISEResultWord.h" 13 | #import "ISEResultSentence.h" 14 | 15 | @implementation ISEResultTools 16 | 17 | 18 | +(NSString*) toStdSymbol:(NSString*) symbol{ 19 | 20 | if(!symbol){ 21 | return symbol; 22 | } 23 | 24 | /** 25 | * The mapping table between iFlytek phonetic symbol and standard phonetic symbol(en) 26 | */ 27 | static NSDictionary* _gISEResultPhoneHashDic; 28 | 29 | static dispatch_once_t onceToken; 30 | dispatch_once(&onceToken, ^{ 31 | _gISEResultPhoneHashDic=@{ 32 | @"aa" : @"ɑ:", 33 | @"oo" : @"ɔ", 34 | @"ae" : @"æ", 35 | @"ah" : @"ʌ", 36 | @"ao" : @"ɔ:", 37 | @"aw" : @"aʊ", 38 | @"ax" : @"ə", 39 | @"ay" : @"aɪ", 40 | @"eh" : @"e", 41 | @"er" : @"ə:", 42 | @"ey" : @"eɪ", 43 | @"ih" : @"ɪ", 44 | @"iy" : @"i:", 45 | @"ow" : @"əʊ", 46 | @"oy" : @"ɔɪ", 47 | @"uh" : @"ʊ", 48 | @"uw" : @"ʊ:", 49 | @"ch" : @"tʃ", 50 | @"dh" : @"ð", 51 | @"hh" : @"h", 52 | @"jh" : @"dʒ", 53 | @"ng" : @"ŋ", 54 | @"sh" : @"ʃ", 55 | @"th" : @"θ", 56 | @"zh" : @"ʒ", 57 | @"y" : @"j", 58 | @"d" : @"d", 59 | @"k" : @"k", 60 | @"l" : @"l", 61 | @"m" : @"m", 62 | @"n" : @"n", 63 | @"b" : @"b", 64 | @"f" : @"f", 65 | @"g" : @"g", 66 | @"p" : @"p", 67 | @"r" : @"r", 68 | @"s" : @"s", 69 | @"t" : @"t", 70 | @"v" : @"v", 71 | @"w" : @"w", 72 | @"z" : @"z", 73 | @"ar" : @"eə", 74 | @"ir" : @"iə", 75 | @"ur" : @"ʊə", 76 | @"tr" : @"tr", 77 | @"dr" : @"dr", 78 | @"ts" : @"ts", 79 | @"dz" : @"dz" 80 | }; 81 | 82 | }); 83 | 84 | NSString* stdsymbol=[_gISEResultPhoneHashDic objectForKey:symbol]; 85 | return stdsymbol?stdsymbol:symbol; 86 | 87 | } 88 | 89 | 90 | NSString* const KCIFlyResultNormal=@"Right"; 91 | NSString* const KCIFlyResultMiss=@"Skip"; 92 | NSString* const KCIFlyResultAdd=@"Duplicate"; 93 | NSString* const KCIFlyResultRepeat=@"Readback"; 94 | NSString* const KCIFlyResultReplace=@"Replace"; 95 | 96 | NSString* const KCIFlyResultNoise=@"Noise"; 97 | NSString* const KCIFlyResultMute=@"Mute"; 98 | 99 | + (NSString*)translateDpMessageInfo:(int)dpMessage { 100 | 101 | static NSDictionary* _gISEResultDpMessageHashDic; 102 | 103 | static dispatch_once_t onceToken; 104 | dispatch_once(&onceToken, ^{ 105 | _gISEResultDpMessageHashDic=@{ 106 | @0 : KCIFlyResultNormal, 107 | @16 : KCIFlyResultMiss, 108 | @32 : KCIFlyResultAdd, 109 | @64 : KCIFlyResultRepeat, 110 | @128 : KCIFlyResultReplace 111 | }; 112 | }); 113 | 114 | NSString* transDpMessage=[_gISEResultDpMessageHashDic objectForKey:[NSNumber numberWithInt:dpMessage]]; 115 | return transDpMessage; 116 | } 117 | 118 | + (NSString*)translateContentInfo:(NSString*) content { 119 | 120 | if(!content){ 121 | return nil; 122 | } 123 | 124 | static NSDictionary* _gISEResultContentHashDic; 125 | 126 | static dispatch_once_t onceToken; 127 | dispatch_once(&onceToken, ^{ 128 | _gISEResultContentHashDic=@{ 129 | @"sil" : KCIFlyResultMute, 130 | @"silv" : KCIFlyResultMute, 131 | @"fil" : KCIFlyResultNoise 132 | }; 133 | }); 134 | 135 | NSString* transContent=[_gISEResultContentHashDic objectForKey:content]; 136 | return transContent?transContent:content; 137 | } 138 | 139 | 140 | /** 141 | * Get the format details from sentences in chinese 142 | * 143 | * @param sentences sentences in chinese 144 | * @return the format details 145 | */ 146 | + (NSString*)formatDetailsForLanguageCN:(NSArray*) sentences { 147 | NSString* buffer =[[NSString alloc] init]; 148 | if (!sentences) { 149 | return nil; 150 | } 151 | 152 | for (ISEResultSentence* sentence in sentences ) { 153 | 154 | if (nil == sentence.words) { 155 | continue; 156 | } 157 | 158 | for (ISEResultWord* word in sentence.words) { 159 | NSString* wContent=[ISEResultTools translateContentInfo:word.content]; 160 | if ([KCIFlyResultNoise isEqualToString:wContent] || [KCIFlyResultMute isEqualToString:wContent]){ 161 | continue; 162 | } 163 | buffer=[buffer stringByAppendingFormat:@"\nWord[%@] %@ Dur:%d",wContent,word.symbol,word.time_len]; 164 | 165 | if (!word.sylls) { 166 | continue; 167 | } 168 | 169 | for (ISEResultSyll* syll in word.sylls) { 170 | NSString* syContent=[ISEResultTools translateContentInfo:[syll content]]; 171 | if ([KCIFlyResultNoise isEqualToString:syContent] || [KCIFlyResultMute isEqualToString:syContent]){ 172 | continue; 173 | } 174 | 175 | buffer=[buffer stringByAppendingFormat:@"\n└Syllable[%@] %@ Dur:%d",syContent,syll.symbol,syll.time_len]; 176 | if (!syll.phones) { 177 | continue; 178 | } 179 | 180 | for (ISEResultPhone* phone in syll.phones) { 181 | NSString* pContent=[ISEResultTools translateContentInfo:[phone content]]; 182 | NSString* pDpMessage=[ISEResultTools translateDpMessageInfo:phone.dp_message]; 183 | buffer=[buffer stringByAppendingFormat:@"\n\t└Phoneme[%@] Dur:%d Msg:%@",pContent,phone.time_len,pDpMessage]; 184 | } 185 | 186 | } 187 | buffer=[buffer stringByAppendingString:@"\n"]; 188 | } 189 | } 190 | return buffer; 191 | 192 | } 193 | 194 | /** 195 | * Get the format details from sentences in english 196 | * 197 | * @param sentences sentences in english 198 | * @return the format details 199 | */ 200 | + (NSString*)formatDetailsForLanguageEN:(NSArray*) sentences { 201 | NSString* buffer =[[NSString alloc] init]; 202 | if (!sentences) { 203 | return nil; 204 | } 205 | 206 | for (ISEResultSentence* sentence in sentences ) { 207 | NSString* sContent=[ISEResultTools translateContentInfo:sentence.content]; 208 | if ([KCIFlyResultNoise isEqualToString:sContent] || [KCIFlyResultMute isEqualToString:sContent]){ 209 | continue; 210 | } 211 | 212 | if (nil == sentence.words) { 213 | continue; 214 | } 215 | for (ISEResultWord* word in sentence.words) { 216 | NSString* wContent=[ISEResultTools translateContentInfo:word.content]; 217 | NSString* wDpMessage=[ISEResultTools translateDpMessageInfo:word.dp_message]; 218 | if ([KCIFlyResultNoise isEqualToString:wContent] || [KCIFlyResultMute isEqualToString:wContent]){ 219 | continue; 220 | } 221 | buffer=[buffer stringByAppendingFormat:@"\nWord[%@] Msg:%@ Score:%f",wContent,wDpMessage,word.total_score]; 222 | 223 | if (!word.sylls) { 224 | buffer=[buffer stringByAppendingString:@"\n"]; 225 | continue; 226 | } 227 | 228 | for (ISEResultSyll* syll in word.sylls) { 229 | NSString* syContent=[ISEResultTools translateContentInfo:[syll getStdSymbol]]; 230 | buffer=[buffer stringByAppendingFormat:@"\n└Syllable[%@] ",syContent]; 231 | if (!syll.phones) { 232 | continue; 233 | } 234 | 235 | for (ISEResultPhone* phone in syll.phones) { 236 | NSString* pContent=[ISEResultTools translateContentInfo:[phone getStdSymbol]]; 237 | NSString* pDpMessage=[ISEResultTools translateDpMessageInfo:phone.dp_message]; 238 | buffer=[buffer stringByAppendingFormat:@"\n\t└Phoneme[%@] Msg:%@",pContent,pDpMessage]; 239 | } 240 | 241 | } 242 | buffer=[buffer stringByAppendingString:@"\n"]; 243 | } 244 | } 245 | return buffer; 246 | } 247 | 248 | @end 249 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultWord.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultWord.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * The lable of Word in xml results 13 | */ 14 | @interface ISEResultWord : NSObject 15 | 16 | /** 17 | * Beginning of frame,10ms per frame 18 | */ 19 | @property(nonatomic, assign)int beg_pos; 20 | 21 | /** 22 | * End of frame 23 | */ 24 | @property(nonatomic, assign)int end_pos; 25 | 26 | /** 27 | * Content of Word 28 | */ 29 | @property(nonatomic, strong)NSString* content; 30 | 31 | /** 32 | * Read message:0(Right),16(Skip),32(Duplicate),64(Readback),128(Replace) 33 | */ 34 | @property(nonatomic, assign)int dp_message; 35 | 36 | /** 37 | * The index of Word in chapter(en) 38 | */ 39 | @property(nonatomic, assign)int global_index; 40 | 41 | /** 42 | * The index of Word in sentense(en) 43 | */ 44 | @property(nonatomic, assign)int index; 45 | 46 | /** 47 | * Pin Yin(cn),number represents tone,5 represents light tone,for example, fen1 48 | */ 49 | @property(nonatomic, strong)NSString* symbol; 50 | 51 | /** 52 | * Duration(cn) 53 | */ 54 | @property(nonatomic, assign)int time_len; 55 | 56 | /** 57 | * Total score(en) 58 | */ 59 | @property(nonatomic, assign)float total_score; 60 | 61 | /** 62 | * Syll array in Word 63 | */ 64 | @property(nonatomic, strong)NSMutableArray* sylls; 65 | 66 | @end 67 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultWord.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultWord.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultWord.h" 10 | 11 | @implementation ISEResultWord 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultXmlParser.h: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultXmlParser.h 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import 10 | 11 | @class ISEResult; 12 | 13 | @protocol ISEResultXmlParserDelegate 14 | 15 | -(void)onISEResultXmlParser:(NSXMLParser *)parser Error:(NSError*)error; 16 | -(void)onISEResultXmlParserResult:(ISEResult*)result; 17 | 18 | @end 19 | 20 | @interface ISEResultXmlParser : NSObject 21 | 22 | @property (nonatomic, weak) id delegate; 23 | 24 | /*! 25 | * parse xml results for ISE 26 | */ 27 | - (void)parserXml:(NSString*) xml; 28 | 29 | @end 30 | -------------------------------------------------------------------------------- /FZSpeakDemo/FZSpeakClass/ISEResultXmlParser/ISEResultXmlParser.m: -------------------------------------------------------------------------------- 1 | // 2 | // ISEResultXmlParser.m 3 | // IFlyMSCDemo 4 | // 5 | // Created by 张剑 on 15/3/6. 6 | // 7 | // 8 | 9 | #import "ISEResultXmlParser.h" 10 | #import "ISEResult.h" 11 | #import "ISEResultPhone.h" 12 | #import "ISEResultSyll.h" 13 | #import "ISEResultWord.h" 14 | #import "ISEResultSentence.h" 15 | #import "ISEResultFinal.h" 16 | #import "ISEResultReadSyllable.h" 17 | #import "ISEResultReadWord.h" 18 | #import "ISEResultReadSentence.h" 19 | 20 | @interface ISEResultXmlParser () 21 | 22 | @property(nonatomic,retain)ISEResult* xmlResult; 23 | 24 | @property(nonatomic,assign)BOOL isPlainResult; 25 | @property(nonatomic,assign)BOOL isRecPaperPassed; 26 | @property(nonatomic,retain)ISEResultPhone* phone; 27 | @property(nonatomic,retain)ISEResultSyll* syll; 28 | @property(nonatomic,retain)ISEResultWord* word; 29 | @property(nonatomic,retain)ISEResultSentence* sentence; 30 | 31 | @end 32 | 33 | @implementation ISEResultXmlParser 34 | 35 | void readTotalResult(ISEResult* result, NSDictionary* attrDic); 36 | ISEResultPhone* createPhone(NSDictionary* attrDic); 37 | ISEResultSyll* createSyll(NSDictionary* attrDic); 38 | ISEResultWord* createWord(NSDictionary* attrDic); 39 | ISEResultSentence* createSentence(NSDictionary* attrDic); 40 | 41 | - (void)clearAllProperty{ 42 | self.isPlainResult=NO; 43 | self.isRecPaperPassed=NO; 44 | self.phone=nil; 45 | self.syll=nil; 46 | self.word=nil; 47 | self.sentence=nil; 48 | 49 | } 50 | 51 | - (void)parserXml:(NSString*) xml{ 52 | 53 | [self clearAllProperty]; 54 | if(xml){ 55 | 56 | self.xmlResult = nil; 57 | 58 | NSData* xmlData=[xml dataUsingEncoding:NSUTF8StringEncoding]; 59 | NSXMLParser *parser = [[NSXMLParser alloc] initWithData:xmlData]; 60 | [parser setShouldProcessNamespaces:NO]; 61 | [parser setShouldReportNamespacePrefixes:NO]; 62 | [parser setShouldResolveExternalEntities:NO]; 63 | 64 | [parser setDelegate:self]; 65 | [parser parse]; 66 | } 67 | else{ 68 | if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParserResult:)]) { 69 | [self.delegate onISEResultXmlParserResult:self.xmlResult]; 70 | } 71 | } 72 | } 73 | 74 | 75 | #pragma mark - tools 76 | 77 | 78 | void readTotalResult(ISEResult* result, NSDictionary* attrDic) { 79 | result.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue]; 80 | result.end_pos = [[attrDic objectForKey:@"end_pos"] intValue]; 81 | result.content = [attrDic objectForKey:@"content"]; 82 | result.total_score = [[attrDic objectForKey:@"total_score"] floatValue]; 83 | result.time_len = [[attrDic objectForKey:@"time_len"] intValue]; 84 | result.except_info = [attrDic objectForKey:@"except_info"]; 85 | result.is_rejected = [[attrDic objectForKey:@"is_rejected"] boolValue]; 86 | } 87 | 88 | ISEResultPhone* createPhone(NSDictionary* attrDic) { 89 | ISEResultPhone* phone=[[ISEResultPhone alloc] init]; 90 | phone.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue]; 91 | phone.end_pos = [[attrDic objectForKey:@"end_pos"] intValue]; 92 | phone.content = [attrDic objectForKey:@"content"]; 93 | phone.dp_message = [[attrDic objectForKey:@"dp_message"] intValue]; 94 | phone.time_len = [[attrDic objectForKey:@"time_len"] intValue]; 95 | return phone; 96 | } 97 | 98 | ISEResultSyll* createSyll(NSDictionary* attrDic) { 99 | ISEResultSyll* syll=[[ISEResultSyll alloc] init]; 100 | syll.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue]; 101 | syll.end_pos = [[attrDic objectForKey:@"end_pos"] intValue]; 102 | syll.content = [attrDic objectForKey:@"content"]; 103 | syll.symbol = [attrDic objectForKey:@"symbol"]; 104 | syll.dp_message = [[attrDic objectForKey:@"dp_message"] intValue]; 105 | syll.time_len = [[attrDic objectForKey:@"time_len"] intValue]; 106 | return syll; 107 | } 108 | 109 | ISEResultWord* createWord(NSDictionary* attrDic) { 110 | ISEResultWord* word=[[ISEResultWord alloc] init]; 111 | word.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue]; 112 | word.end_pos = [[attrDic objectForKey:@"end_pos"] intValue]; 113 | word.content = [attrDic objectForKey:@"content"]; 114 | word.symbol = [attrDic objectForKey:@"symbol"]; 115 | word.dp_message = [[attrDic objectForKey:@"dp_message"] intValue]; 116 | word.time_len = [[attrDic objectForKey:@"time_len"] intValue]; 117 | word.total_score = [[attrDic objectForKey:@"total_score"] floatValue]; 118 | word.global_index = [[attrDic objectForKey:@"global_index"] intValue]; 119 | word.index = [[attrDic objectForKey:@"index"] intValue]; 120 | return word; 121 | } 122 | 123 | ISEResultSentence* createSentence(NSDictionary* attrDic) { 124 | ISEResultSentence* sentence=[[ISEResultSentence alloc] init];; 125 | sentence.beg_pos = [[attrDic objectForKey:@"beg_pos"] intValue]; 126 | sentence.end_pos = [[attrDic objectForKey:@"end_pos"] intValue]; 127 | sentence.content = [attrDic objectForKey:@"content"]; 128 | sentence.time_len = [[attrDic objectForKey:@"time_len"] intValue]; 129 | sentence.index = [[attrDic objectForKey:@"index"] intValue]; 130 | sentence.word_count = [[attrDic objectForKey:@"word_count"] intValue]; 131 | return sentence; 132 | } 133 | 134 | #pragma mark - NSXMLParser delegate 135 | - (void) parserDidStartDocument:(NSXMLParser *)parser{ 136 | } 137 | 138 | - (void) parserDidEndDocument:(NSXMLParser *)parser{ 139 | } 140 | 141 | - (void) parser:(NSXMLParser *)parser 142 | didStartElement:(NSString *)elementName 143 | namespaceURI:(NSString *)namespaceURI 144 | qualifiedName:(NSString *)qualifiedName 145 | attributes:(NSDictionary *)attributeDict{ 146 | 147 | 148 | //complete 149 | if([@"rec_paper" isEqualToString:elementName]){ 150 | _isRecPaperPassed=YES; 151 | } 152 | else if([@"read_syllable" isEqualToString:elementName]){ 153 | if(!_isRecPaperPassed){ 154 | _xmlResult=[[ISEResultReadSyllable alloc] init]; 155 | } 156 | else{ 157 | readTotalResult(self.xmlResult, attributeDict); 158 | } 159 | 160 | } 161 | else if([@"read_word" isEqualToString:elementName]){ 162 | if(!_isRecPaperPassed){ 163 | _xmlResult=[[ISEResultReadWord alloc] init]; 164 | NSString* lan=[attributeDict objectForKey:@"lan"]; 165 | _xmlResult.language=lan?lan:@"cn"; 166 | } 167 | else{ 168 | readTotalResult(self.xmlResult, attributeDict); 169 | } 170 | 171 | } 172 | else if([@"read_sentence" isEqualToString:elementName]||[@"read_chapter" isEqualToString:elementName]){ 173 | if(!_isRecPaperPassed){ 174 | _xmlResult=[[ISEResultReadSentence alloc] init]; 175 | NSString* lan=[attributeDict objectForKey:@"lan"]; 176 | _xmlResult.language=lan?lan:@"cn"; 177 | } 178 | else{ 179 | readTotalResult(self.xmlResult, attributeDict); 180 | } 181 | 182 | } 183 | else if([@"sentence" isEqualToString:elementName]){ 184 | if(_xmlResult&&!_xmlResult.sentences){ 185 | _xmlResult.sentences=[[NSMutableArray alloc] init]; 186 | } 187 | _sentence=createSentence(attributeDict); 188 | } 189 | else if([@"word" isEqualToString:elementName]){ 190 | if(_sentence && !_sentence.words){ 191 | _sentence.words=[[NSMutableArray alloc] init]; 192 | } 193 | _word=createWord(attributeDict); 194 | } 195 | else if([@"syll" isEqualToString:elementName]){ 196 | if(_word && !_word.sylls){ 197 | _word.sylls=[[NSMutableArray alloc] init]; 198 | } 199 | _syll=createSyll(attributeDict); 200 | } 201 | else if([@"phone" isEqualToString:elementName]){ 202 | if(_syll && !_syll.phones){ 203 | _syll.phones=[[NSMutableArray alloc] init]; 204 | } 205 | _phone=createPhone(attributeDict); 206 | } 207 | 208 | //plain 209 | if([@"FinalResult" isEqualToString:elementName]){ 210 | self.isPlainResult=YES; 211 | _xmlResult = [[ISEResultFinal alloc] init]; 212 | } 213 | else if([@"ret" isEqualToString:elementName]){ 214 | [(ISEResultFinal *)_xmlResult setRet:[[attributeDict objectForKey:@"value"] intValue]]; 215 | } 216 | else if([@"total_score" isEqualToString:elementName]){ 217 | [(ISEResultFinal *)_xmlResult setTotal_score:[[attributeDict objectForKey:@"value"] floatValue]]; 218 | } 219 | else if([@"xml_result" isEqualToString:elementName]){ 220 | self.isPlainResult=NO; 221 | } 222 | 223 | } 224 | 225 | - (void) parser:(NSXMLParser *)parser foundCharacters:(NSString *)string { 226 | 227 | } 228 | 229 | - (void) parser:(NSXMLParser *)parser foundIgnorableWhitespace:(NSString *)whitespaceString{ 230 | 231 | } 232 | 233 | 234 | 235 | - (void) parser:(NSXMLParser *) parser 236 | didEndElement:(NSString *) elementName 237 | namespaceURI:(NSString *) namespaceURI 238 | qualifiedName:(NSString *) qualifiedName{ 239 | 240 | 241 | if([@"phone" isEqualToString:elementName]){ 242 | [_syll.phones addObject:_phone]; 243 | _phone=nil; 244 | } 245 | else if([@"syll" isEqualToString:elementName]){ 246 | [_word.sylls addObject:_syll]; 247 | _syll=nil; 248 | } 249 | else if([@"word" isEqualToString:elementName]){ 250 | [_sentence.words addObject:_word]; 251 | _word=nil; 252 | } 253 | else if([@"sentence" isEqualToString:elementName]){ 254 | [_xmlResult.sentences addObject:_sentence]; 255 | _sentence=nil; 256 | } 257 | else if([@"read_syllable" isEqualToString:elementName] || 258 | [@"read_word" isEqualToString:elementName] || 259 | [@"read_sentence" isEqualToString:elementName] || 260 | [@"read_chapter" isEqualToString:elementName] || 261 | [@"FinalResult" isEqualToString:elementName] ){ 262 | 263 | [parser abortParsing]; 264 | if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParserResult:)]) { 265 | [self.delegate onISEResultXmlParserResult:self.xmlResult]; 266 | } 267 | } 268 | 269 | } 270 | 271 | - (void) parser:(NSXMLParser *)parser parseErrorOccurred:(NSError *)parseError{ 272 | if (self.delegate && [self.delegate respondsToSelector:@selector(onISEResultXmlParser:Error:)]) { 273 | [self.delegate onISEResultXmlParser:parser Error:parseError]; 274 | } 275 | } 276 | 277 | @end 278 | 279 | -------------------------------------------------------------------------------- /FZSpeakDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleDisplayName 8 | 语音Demo 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | NSMicrophoneUsageDescription 26 | 需要您的同意访问麦克风 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIMainStoryboardFile 30 | Main 31 | UIRequiredDeviceCapabilities 32 | 33 | armv7 34 | 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationPortrait 38 | UIInterfaceOrientationLandscapeLeft 39 | UIInterfaceOrientationLandscapeRight 40 | 41 | UISupportedInterfaceOrientations~ipad 42 | 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | UIInterfaceOrientationLandscapeLeft 46 | UIInterfaceOrientationLandscapeRight 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /FZSpeakDemo/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // FZSpeakDemo 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /FZSpeakDemo/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // FZSpeakDemo 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import "ViewController.h" 10 | #import "FZSpeakClass.h" 11 | #import "FZProgressHudView.h" 12 | 13 | @interface ViewController () 14 | { 15 | UITextView *textV; 16 | NSString *textVStr; 17 | } 18 | 19 | @property (nonatomic, strong) FZProgressHudView *hudView; 20 | 21 | @end 22 | 23 | @implementation ViewController 24 | 25 | - (void)viewDidLoad { 26 | [super viewDidLoad]; 27 | // Do any additional setup after loading the view, typically from a nib. 28 | 29 | self.hudView = [[FZProgressHudView alloc] initWithTargetView:[[[UIApplication sharedApplication] windows] lastObject]]; 30 | 31 | textV = [[UITextView alloc]initWithFrame:CGRectMake(10, 10, SCREEN_WIDTH-20, 150)]; 32 | textV.layer.borderWidth = 0.5; 33 | textV.layer.borderColor = [UIColor lightGrayColor].CGColor; 34 | textV.text = @"语音操作,方便生活"; 35 | [self.view addSubview:textV]; 36 | 37 | textVStr = @""; 38 | 39 | NSArray *titleArr = @[@"语音合成",@"语音评测",@"语音听写"]; 40 | for (int i = 0; i < titleArr.count; i++) { 41 | UIButton *btn = [self buttonWithTitle:titleArr[i] frame:CGRectMake(10+i*(10+(SCREEN_WIDTH-40)/3), 180, (SCREEN_WIDTH-40)/3, 40) action:@selector(didClickBtn:) AddView:self.view]; 42 | btn.tag = 1000+i; 43 | } 44 | } 45 | 46 | -(void)didClickBtn:(UIButton *)btn 47 | { 48 | if (btn.tag == 1000) { 49 | //语音合成 50 | [self audioSynthesiz]; 51 | } else if (btn.tag == 1001) { 52 | //语音评测 53 | [self AudioEvaluation]; 54 | } else { 55 | //语音听写 56 | [self AudioRecognizerResult]; 57 | } 58 | } 59 | 60 | #pragma mark --- 语音评测 61 | -(void)AudioEvaluation 62 | { 63 | //语音评测 64 | [FZSpeechEvaluator xf_AudioEvaluationOfText:textV.text callback:^(XF_Audio_Evaluation_Type type, float progress, NSString *resultMsg) { 65 | if (type == XF_Audio_Evaluation_Begain) { 66 | //开始录音 67 | [self.hudView startWork:@"开始录音"]; 68 | } else if (type == XF_Audio_Evaluation_Volume) { 69 | //录音音量 70 | [self.hudView startWork:[NSString stringWithFormat:@"录音音量:%ld",(long)progress]]; 71 | } else if (type == XF_Audio_Evaluation_End) { 72 | //停止录音 73 | [self.hudView startWork:@"停止录音"]; 74 | } else if (type == XF_Audio_Evaluation_Cancel) { 75 | //取消录音 76 | [self.hudView startWork:@"取消录音"]; 77 | } else if (type == XF_Audio_Evaluation_Result) { 78 | //评测结果 79 | if (progress == 0) { 80 | [self.hudView showHudWithFailure:@"你好像读的不对哦" andDuration:1.2]; 81 | } else { 82 | [self.hudView showHudWithSuccess:[NSString stringWithFormat:@"评测“%@”结果评分:%.2f",resultMsg,progress] andDuration:2.0]; 83 | } 84 | } else { 85 | //评测出错 86 | if (progress != 0) { 87 | [self.hudView showHudWithFailure:[NSString stringWithFormat:@"评测出错:code=%.2f,msg=%@",progress,resultMsg] andDuration:1.0]; 88 | } 89 | } 90 | }]; 91 | } 92 | 93 | #pragma mark --- 语音听写 94 | -(void)AudioRecognizerResult 95 | { 96 | //语音听写 97 | [self.hudView startWork:@"请讲话"]; 98 | self->textVStr = @""; 99 | [FZSpeechRecognizer xf_AudioRecognizerResult:^(NSString *resText, NSError *error) { 100 | if (!error) { 101 | self->textVStr = [NSString stringWithFormat:@"%@%@",self->textVStr,resText]; 102 | self->textV.text = self->textVStr; 103 | [self.hudView showHudWithSuccess:resText andDuration:2.0]; 104 | } else { 105 | [self.hudView showHudWithFailure:[NSString stringWithFormat:@"eroorCode:%ld eroorMsg:%@",(long)[error code],[error localizedDescription]] andDuration:1.0]; 106 | } 107 | }]; 108 | } 109 | 110 | #pragma mark --- 语音合成 111 | -(void)audioSynthesiz 112 | { 113 | //语音合成 114 | [FZSpeechSynthesizer xf_AudioSynthesizeOfText:textV.text fromPeople:@"xiaoyan" callback:^(XF_Audio_Synthesize_Type type, NSInteger progress) { 115 | if (type == XF_Audio_Synthesize_Progress) { 116 | //语音合成进度 117 | [self.hudView startWork:[NSString stringWithFormat:@"正在合成,进度:%ld / 100",progress]]; 118 | } else if (type == XF_Audio_Speak_Begain) { 119 | //开始播放 120 | [self.hudView startWork:@"开始播放"]; 121 | } else if (type == XF_Audio_Speak_Progress) { 122 | //播放进度 123 | [self.hudView startWork:[NSString stringWithFormat:@"正在播放,进度:%ld / 100",progress]]; 124 | } else { 125 | //播放结束 126 | [self.hudView showHudWithSuccess:@"播放结束" andDuration:0.5]; 127 | } 128 | }]; 129 | } 130 | 131 | #pragma mark --- 创建button公共方法 132 | /**使用示例:[self buttonWithTitle:@"点 击" frame:CGRectMake((self.view.frame.size.width - 150)/2, (self.view.frame.size.height - 40)/3, 150, 40) action:@selector(didClickButton) AddView:self.view];*/ 133 | -(UIButton *)buttonWithTitle:(NSString *)title frame:(CGRect)frame action:(SEL)action AddView:(id)view 134 | { 135 | UIButton *button = [UIButton buttonWithType:UIButtonTypeSystem]; 136 | button.frame = frame; 137 | button.backgroundColor = [UIColor lightGrayColor]; 138 | [button setTitle:title forState:UIControlStateNormal]; 139 | [button addTarget:self action:action forControlEvents:UIControlEventTouchDown]; 140 | [view addSubview:button]; 141 | return button; 142 | } 143 | 144 | - (void)didReceiveMemoryWarning { 145 | [super didReceiveMemoryWarning]; 146 | // Dispose of any resources that can be recreated. 147 | } 148 | 149 | 150 | @end 151 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyAudioSession.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyAudioSession.h 3 | // MSCDemo 4 | // 5 | // Created by AlexHHC on 1/9/14. 6 | // 7 | // 8 | 9 | #import 10 | 11 | /** 12 | * 音频环境初始化,设置AVAudioSession的Category属性。 13 | */ 14 | @interface IFlyAudioSession : NSObject 15 | 16 | /** 17 | * 初始化播音环境,主要用于合成播放器。 18 | * 19 | * 此接口主要根据原来的音频环境,重新优化设置AVAudioSession的Category属性值。
20 | * 若原来的Category属性值为AVAudioSessionCategoryPlayAndRecord,则添加AVAudioSessionCategoryOptionDefaultToSpeaker|AVAudioSessionCategoryOptionAllowBluetooth选项;若为其他Category属性值且isMPCenter为NO,则设置Category属性值为AVAudioSessionCategoryPlayback,选项为AVAudioSessionCategoryOptionMixWithOthers;若为其他Category属性值且isMPCenter为YES,则保持原来的设置,不做任何更改。 21 | * 22 | * @param isMPCenter 是否初始化MPPlayerCenter:0不初始化,1初始化。此参数只在AVAudioSession的Category属性值不为AVAudioSessionCategoryPlayAndRecord时设置有效。 23 | */ 24 | +(void) initPlayingAudioSession:(BOOL)isMPCenter; 25 | 26 | /** 27 | * 初始化录音环境,主要用于识别录音器。 28 | * 29 | * 设置AVAudioSession的Category属性值为AVAudioSessionCategoryPlayAndRecord,选项为AVAudioSessionCategoryOptionDefaultToSpeaker|AVAudioSessionCategoryOptionAllowBluetooth。 30 | * 31 | * @return 成功返回YES,失败返回NO 32 | */ 33 | +(BOOL) initRecordingAudioSession; 34 | 35 | @end 36 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyContact.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyContact.h 3 | // msc 4 | // 5 | // Created by ypzhao on 13-3-1. 6 | // Copyright (c) 2013年 IFLYTEK. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 此接口为获取通信录中的联系人。
13 | * 获取联系人是为了在进行语音识别时(sms)能更好的识别出您说的人名,联系人上传是属于个性化的一部分。 14 | */ 15 | @interface IFlyContact : NSObject 16 | 17 | /*! 18 | * 获取联系人。
19 | * 调用此方法需要添加 AddressBook.framework 和 Contacts.framework到工程中,调用此方法后可以直接将通信录中的联系人转化为语音云识别的数据结构。您可以将获取的数据通过IFlyDataUploader类,上传到语音云,我们只获取通信录中的人名。 20 | * 21 | * @return 返回联系人信息 22 | */ 23 | - (NSString *) contact; 24 | @end 25 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyDataUploader.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyDataUploader.h 3 | // MSC 4 | // 5 | // Created by ypzhao on 13-4-8. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | 12 | @class IFlySpeechError; 13 | 14 | /*! 15 | * 数据上传类,主要用于上传语法文件或上传联系人、词表等个性化数据。 16 | */ 17 | @interface IFlyDataUploader : NSObject 18 | 19 | /*! 20 | * 数据名称 21 | */ 22 | @property(nonatomic,copy) NSString *dataName; 23 | /*! 24 | * 数据 25 | */ 26 | @property(nonatomic,copy) NSString *data; 27 | 28 | /*! 29 | * 上传完成回调 30 | * 31 | * @param result 结果 32 | * @param error 错误码 33 | */ 34 | typedef void(^IFlyUploadDataCompletionHandler)(NSString* result,IFlySpeechError * error); 35 | 36 | /*! 37 | * 上传数据 38 | * 此函数用于上传数据,下载的过程是**异步**的。 39 | * 40 | * @param completionHandler -[in] 上传完成回调 41 | * @param name -[in] 上传的内容名称,名称最好和你要上传的数据内容相关,不可以为nil 42 | * @param data -[in] 上传的数据,以utf8编码,不可以为nil 43 | */ 44 | - (void) uploadDataWithCompletionHandler:(IFlyUploadDataCompletionHandler)completionHandler name:(NSString *)name data:(NSString *)data; 45 | 46 | /*! 47 | * 设置上传数据参数 48 | * 49 | * @param parameter 参数值 50 | * @param key 参数名 51 | */ 52 | -(void) setParameter:(NSString*) parameter forKey:(NSString*) key; 53 | 54 | @end 55 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyDebugLog.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyDebugLog.h 3 | // MSC 4 | 5 | // description: 程序中的log处理类 6 | 7 | // Created by ypzhao on 12-11-22. 8 | // Copyright (c) 2012年 iflytek. All rights reserved. 9 | // 10 | 11 | #import 12 | 13 | /*! 14 | * 调试信息 15 | */ 16 | @interface IFlyDebugLog : NSObject 17 | 18 | /*! 19 | * 打印调试信息 20 | * 21 | * @param format -[in] 要打印的内容格式 22 | * @param ... -[in] 要打印的内容 23 | */ 24 | + (void) showLog:(NSString *)format, ...; 25 | 26 | /*! 27 | * 将log写入文件中 28 | */ 29 | + (void) writeLog; 30 | 31 | /*! 32 | * 设置是否显示log 33 | * 34 | * @param showLog YES:显示;NO:不显示 35 | */ 36 | + (void) setShowLog:(BOOL) showLog; 37 | @end 38 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyISVDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyISVDelegate.h 3 | // msc_UI 4 | // 5 | // Created by admin on 14-9-15. 6 | // Copyright (c) 2014年 iflytek. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | 12 | @class IFlySpeechError; 13 | 14 | /*! 15 | * 声纹回调协议 16 | */ 17 | @protocol IFlyISVDelegate 18 | 19 | /*! 20 | * 声纹结果回调 21 | * 22 | * @param dic 结果 23 | */ 24 | -(void) onResult:(NSDictionary *)dic; 25 | 26 | /*! 27 | * 错误码回调 28 | * 29 | * @param errorCode 错误码 30 | */ 31 | -(void) onCompleted:(IFlySpeechError *) errorCode; 32 | 33 | @optional 34 | 35 | /*! 36 | * 等待结果 37 | */ 38 | -(void) onRecognition; 39 | 40 | /*! 41 | * 音量改变回调 42 | * 43 | * @param volume 音量值 44 | */ 45 | -(void) onVolumeChanged: (int)volume; 46 | 47 | @end 48 | 49 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyISVRecognizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyISVRecognizer.h 3 | // ISV 4 | // 5 | // Created by wangdan on 14-9-6. 6 | // Copyright (c) 2014年 IFlyTEK. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | #import "IFlyISVDelegate.h" 12 | 13 | /** 14 | * 声纹接口类 15 | */ 16 | @interface IFlyISVRecognizer : NSObject 17 | { 18 | 19 | } 20 | 21 | /*! 22 | * The delegate of FlyISVRecognizer responsing to IFlyISVDelegate. 23 | */ 24 | @property (assign) id delegate; 25 | 26 | 27 | /*! 28 | * FlyISVRecognizer is a kind of Singleton calss.The function can be used as below:
29 | * IFLyISVRecognizer *recognizer=[IFlyISVRecognizer creteRecognizer: self]; 30 | */ 31 | +(instancetype) sharedInstance; 32 | 33 | 34 | /*! 35 | * Genrerate a serial number password
36 | * Princeple:
37 | * 1.Number serial has no 1 in itself;
38 | * 2.The nuber serial has no same number("98765432"is right while "99876543" is wrong) 39 | * 40 | * @param length the serial number's length,length of "98765432" is 8,generally length is 8 and other value is forbidden 41 | */ 42 | -(NSString*) generatePassword:(int)length; 43 | 44 | 45 | 46 | /*! 47 | * Used to get password from server 48 | * 49 | * @param pwdt when pwdt is 1,the function will return chinese text;while pwdt is 2, the funciton will return number serial 50 | */ 51 | -(NSArray*) getPasswordList:(int)pwdt; 52 | 53 | 54 | /*! 55 | * Used to judge if the engine is running in listenning 56 | * 57 | * @return YES: the engine is listenning;
No : the engine is not listenning 58 | */ 59 | -(BOOL) isListening; 60 | 61 | 62 | 63 | /*! 64 | * Used to query or delete the voiceprint model in server 65 | * 66 | * @param cmd "del": delete model;
"que": query model; 67 | * @param authid: user id ,can be @"tianxia" or other; 68 | * @param pwdt voiceprint type
69 | * 1: fixed txt voiceprint code ,like @"我的地盘我做主";
70 | * 2: free voiceprint code , user can speek anything,but 5 times trainning the speech shall be same;
71 | * 3: number serial voiceprint code ,like @"98765432" and so on. 72 | * @param ptxt voiceprint txt,only fixed voiceprint and number serial have this,in free voiceprint model this param shall be set nil. 73 | * @param vid another voiceprint type model,user can use this to query or delete model in server can be @"jakillasdfasdjjjlajlsdfhdfdsadff",totally 32 bits;
74 | * NOTES:
75 | * when vid is not nil,then the server will judge the vid first; while the vid is nil, server can still query or delete the voiceprint model by other params. 76 | */ 77 | -(BOOL) sendRequest:(NSString*)cmd authid:(NSString *)auth_id pwdt:(int)pwdt ptxt:(NSString *)ptxt vid:(NSString *)vid err:(int *)err; 78 | 79 | 80 | /*! 81 | * Set the voiceprint params 82 | * 83 | * | key | value | 84 | * |:---------------:|:-------------------------------------------------:| 85 | * | sst | @"train" or @"verify" | 86 | * | auth_id | @"tianxia" or other | 87 | * | sub | @"ivp" | 88 | * | ptxt | | 89 | * | rgn | @"5" | 90 | * | pwdt | @"1",or @"2", or @"3" | 91 | * | auf | @"audio/L16;rate=16000" or @"audio/L16;rate=8000" | 92 | * | vad_enable | @"1" or @"0" | 93 | * | vad_timeout | @"3000" | 94 | * | vad_speech_tail | @"100" | 95 | * 96 | * @param value 参数值 97 | * @param key 参数类型 98 | * 99 | * @return 设置成功返回YES,失败返回NO 100 | */ 101 | -(BOOL) setParameter:(NSString *)value forKey:(NSString *)key; 102 | 103 | 104 | 105 | /*! 106 | * Get the voiceprint params used the same as function of setParameter 107 | */ 108 | -(NSString*) getParameter:(NSString *)key; 109 | 110 | 111 | /*! 112 | * Start recording 113 | */ 114 | -(void) startListening; 115 | 116 | 117 | /*! 118 | * Stop recording 119 | */ 120 | -(void) stopListening; 121 | 122 | 123 | /*! 124 | * Cancel recording,like function stopListening 125 | */ 126 | -(void) cancel; /* cancel recognization */ 127 | 128 | 129 | 130 | 131 | @end 132 | 133 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyMSC.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyMSC.h 3 | // msc 4 | // 5 | // Created by 张剑 on 15/1/14. 6 | // Copyright (c) 2015年 iflytek. All rights reserved. 7 | // 8 | 9 | #ifndef MSC_IFlyMSC_h 10 | #define MSC_IFlyMSC_h 11 | 12 | #import "IFlyAudioSession.h" 13 | #import "IFlyContact.h" 14 | #import "IFlyDataUploader.h" 15 | #import "IFlyDebugLog.h" 16 | #import "IFlyISVDelegate.h" 17 | #import "IFlyISVRecognizer.h" 18 | #import "IFlyRecognizerView.h" 19 | #import "IFlyRecognizerViewDelegate.h" 20 | #import "IFlyResourceUtil.h" 21 | #import "IFlySetting.h" 22 | #import "IFlySpeechConstant.h" 23 | #import "IFlySpeechError.h" 24 | #import "IFlySpeechEvaluator.h" 25 | #import "IFlySpeechEvaluatorDelegate.h" 26 | #import "IFlySpeechEvent.h" 27 | #import "IFlySpeechRecognizer.h" 28 | #import "IFlySpeechRecognizerDelegate.h" 29 | #import "IFlySpeechSynthesizer.h" 30 | #import "IFlySpeechSynthesizerDelegate.h" 31 | #import "IFlySpeechUtility.h" 32 | #import "IFlyUserWords.h" 33 | #import "IFlyPcmRecorder.h" 34 | #import "IFlyVoiceWakeuper.h" 35 | #import "IFlyVoiceWakeuperDelegate.h" 36 | 37 | 38 | 39 | #endif 40 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyPcmRecorder.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyPcmRecorder.h 3 | // MSC 4 | 5 | // description: 6 | 7 | // Created by ypzhao on 12-11-15. 8 | // Copyright (c) 2012年 iflytek. All rights reserved. 9 | // 10 | 11 | #import 12 | 13 | #import 14 | #import 15 | #import 16 | #import 17 | #import 18 | 19 | 20 | @class IFlyPcmRecorder; 21 | 22 | /*! 23 | * 录音协议 24 | */ 25 | @protocol IFlyPcmRecorderDelegate 26 | 27 | /*! 28 | * 回调音频数据 29 | * 30 | * @param buffer 音频数据 31 | * @param size 表示音频的长度 32 | */ 33 | - (void) onIFlyRecorderBuffer: (const void *)buffer bufferSize:(int)size; 34 | 35 | /*! 36 | * 回调音频的错误码 37 | * 38 | * @param recoder 录音器 39 | * @param error 错误码 40 | */ 41 | - (void) onIFlyRecorderError:(IFlyPcmRecorder*)recoder theError:(int) error; 42 | 43 | @optional 44 | 45 | /*! 46 | * 回调录音音量 47 | * 48 | * @param power 音量值 49 | */ 50 | - (void) onIFlyRecorderVolumeChanged:(int) power; 51 | 52 | @end 53 | 54 | 55 | /*! 56 | * 录音器控件 57 | */ 58 | @interface IFlyPcmRecorder : NSObject 59 | 60 | /*! 61 | * 录音委托对象 62 | */ 63 | @property (nonatomic,assign) id delegate; 64 | 65 | /*! 66 | * 用于设置是否在录音结束后发送Deactive通知,默认是YES:发送 67 | */ 68 | @property (nonatomic,assign) BOOL isNeedDeActive; 69 | 70 | /*! 71 | * 单例模式 72 | * 73 | * @return 返回录音对象单例 74 | */ 75 | + (instancetype) sharedInstance; 76 | 77 | /*! 78 | * 开始录音 79 | * 80 | * @return 开启录音成功返回YES,否则返回NO 81 | */ 82 | - (BOOL) start; 83 | 84 | /*! 85 | * 停止录音 86 | */ 87 | - (void) stop; 88 | 89 | /*! 90 | * 设置音频采样率 91 | * 92 | * @param rate -[in] 采样率,8k/16k 93 | */ 94 | - (void) setSample:(NSString *) rate; 95 | 96 | /*! 97 | * 设置录音音量回调时间间隔参数 98 | */ 99 | - (void) setPowerCycle:(float) cycle; 100 | 101 | /*! 102 | * 保存录音 103 | * 104 | * @param savePath 音频保存路径 105 | */ 106 | -(void) setSaveAudioPath:(NSString *)savePath; 107 | 108 | /*! 109 | * 录音器是否完成 110 | * 111 | * @return 录音器完全结束返回YES,否则返回NO 112 | */ 113 | -(BOOL) isCompleted; 114 | 115 | @end 116 | 117 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyRecognizerView.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyRecognizerView.h 3 | // MSC 4 | // 5 | // Created by admin on 13-4-16. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | 12 | @protocol IFlyRecognizerViewDelegate ; 13 | 14 | /*! 15 | * 语音识别控件
16 | * 录音时触摸控件结束录音,开始识别(相当于旧版的停止);触摸其他位置,取消录音,结束会话(取消)
17 | * 出错时触摸控件,重新开启会话(相当于旧版的再说一次);触摸其他位置,取消录音,结束会话(取消) 18 | * 19 | */ 20 | @interface IFlyRecognizerView : UIView 21 | 22 | /*! 23 | * 设置委托对象 24 | */ 25 | @property(nonatomic,assign)id delegate; 26 | 27 | /*! 28 | * 初始化控件 29 | * 30 | * @param origin 控件左上角的坐标 31 | * 32 | * @return IFlyRecognizerView 对象 33 | */ 34 | - (id)initWithOrigin:(CGPoint)origin; 35 | 36 | /*! 37 | * 初始化控件 38 | * 39 | * @param center 控件中心的坐标 40 | * 41 | * @return IFlyRecognizerView 对象 42 | */ 43 | - (id) initWithCenter:(CGPoint)center; 44 | 45 | /*! 46 | * 设置横竖屏自适应 47 | * 48 | * @param autoRotate 默认值YES,横竖屏自适应 49 | */ 50 | - (void) setAutoRotate:(BOOL)autoRotate; 51 | 52 | /* 53 | * | ------------- |----------------------------------------------------------- 54 | * | 参数 | 描述 55 | * | ------------- |----------------------------------------------------------- 56 | * | domain |应用的领域: 取值为:iat、search、video、poi、music、asr; 57 | * | | iat:普通文本听写; 58 | * | | search:热词搜索; 59 | * | | video:视频音乐搜索; 60 | * | | asr:关键词识别; 61 | * | ------------- |----------------------------------------------------------- 62 | * | vad_bos |前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms; 63 | * | | engine指定iat识别默认值为5000; 64 | * | | 其他情况默认值为 4000,范围 0-10000。 65 | * | ------------- |----------------------------------------------------------- 66 | * | vad_eos |后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 67 | * | | 自动停止录音;单位:ms; 68 | * | | sms 识别默认值为 1800; 69 | * | | 其他默认值为 700,范围 0-10000。 70 | * | ------------- |----------------------------------------------------------- 71 | * | sample_rate |采样率:目前支持的采样率设置有 16000 和 8000。 72 | * | ------------- |----------------------------------------------------------- 73 | * | asr_ptt |标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。 74 | * | ------------- |----------------------------------------------------------- 75 | * | result_type |返回结果的数据格式: 可设置为json,xml,plain,默认为json。 76 | * | ------------- |----------------------------------------------------------- 77 | * | grammarID |识别的语法id: 只针对 domain 设置为”asr”的应用。 78 | * | ------------- |----------------------------------------------------------- 79 | * | asr_audio_path|音频文件名: 设置此参数后,将会自动保存识别的录音文件。 80 | * | | 路径为Documents/(指定值)。 81 | * | | 不设置或者设置为nil,则不保存音频。 82 | * | ------------- |----------------------------------------------------------- 83 | * | params |扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。 84 | * | ------------- |----------------------------------------------------------- 85 | * 86 | */ 87 | 88 | /*! 89 | * 设置识别引擎的参数 90 | * 91 | * 识别的引擎参数(key)取值如下:
92 | * 93 | * | 参数 | 描述 | 94 | * |-----------------|-------------------------------------------------------| 95 | * | domain | 应用的领域: 取值为:iat、search、video、poi、music、asr;
iat:普通文本听写;
search:热词搜索;
video:视频音乐搜索;
asr:关键词识别;| 96 | * | vad_bos | 前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;
engine指定iat识别默认值为5000;
其他情况默认值为 4000,范围 0-10000。| 97 | * | vad_eos | 后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,自动停止录音;单位:ms;
sms 识别默认值为 1800;
其他默认值为 700,范围 0-10000。| 98 | * | sample_rate | 采样率:目前支持的采样率设置有 16000 和 8000。| 99 | * | asr_ptt | 标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。| 100 | * | result_type | 返回结果的数据格式: 可设置为json,xml,plain,默认为json。| 101 | * | grammarID | 识别的语法id: 只针对 domain 设置为”asr”的应用。| 102 | * | asr_audio_path | 音频文件名: 设置此参数后,将会自动保存识别的录音文件。
路径为Documents/(指定值)。
不设置或者设置为nil,则不保存音频。| 103 | * | params | 扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。| 104 | * 105 | * @param value 参数对应的取值 106 | * @param key 识别引擎参数 107 | * 108 | * @return 成功返回YES;失败返回NO 109 | */ 110 | -(BOOL) setParameter:(NSString *) value forKey:(NSString*)key; 111 | 112 | /*! 113 | * 获取识别引擎参数 114 | * 115 | * @param key 参数key 116 | * 117 | * @return 参数值 118 | */ 119 | -(NSString*) parameterForKey:(NSString *)key; 120 | 121 | /*! 122 | * 开始识别 123 | * 124 | * @return 成功返回YES;失败返回NO 125 | */ 126 | - (BOOL)start; 127 | 128 | /*! 129 | * 取消本次识别 130 | */ 131 | - (void)cancel; 132 | 133 | 134 | @end 135 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyRecognizerViewDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyRecognizerDelegate.h 3 | // MSC 4 | // 5 | // Created by admin on 13-4-16. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class IFlyRecognizerView; 12 | @class IFlySpeechError; 13 | 14 | /*! 15 | * 识别回调委托 16 | */ 17 | @protocol IFlyRecognizerViewDelegate 18 | 19 | /*! 20 | * 回调返回识别结果 21 | * 22 | * @param resultArray 识别结果,NSArray的第一个元素为NSDictionary,NSDictionary的key为识别结果,sc为识别结果的置信度 23 | * @param isLast -[out] 是否最后一个结果 24 | */ 25 | - (void)onResult:(NSArray *)resultArray isLast:(BOOL) isLast; 26 | 27 | /*! 28 | * 识别结束回调 29 | * 30 | * @param error 识别结束错误码 31 | */ 32 | - (void)onCompleted: (IFlySpeechError *) error; 33 | 34 | @optional 35 | 36 | @end 37 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyResourceUtil.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyResourceUtil.h 3 | // MSCDemo 4 | // 5 | // Created by admin on 14-6-20. 6 | // Copyright (c) 2014年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 资源工具类 13 | */ 14 | @interface IFlyResourceUtil : NSObject 15 | 16 | /*! 17 | * 获取通过MSPSetParam,启动引擎的标识 18 | * 19 | * @return 通过MSPSetParam,启动引擎的标识 20 | */ 21 | +(NSString*) ENGINE_START; 22 | 23 | /*! 24 | * 获取通过MSPSetParam,销毁引擎的标识 25 | * 26 | * @return 通过MSPSetParam,销毁引擎的标识 27 | */ 28 | +(NSString*) ENGINE_DESTROY; 29 | 30 | /*! 31 | * 获取识别引擎的资源目录标识 32 | * 33 | * @return 识别引擎的资源目录标识 34 | */ 35 | +(NSString*) ASR_RES_PATH; 36 | 37 | /*! 38 | * 得到语法构建目录 39 | * 40 | * @return 语法构建目录 41 | */ 42 | +(NSString*) GRM_BUILD_PATH; 43 | 44 | /*! 45 | * 获取合成引擎的资源目录标识,同时需要先传入voice_name方可生效 46 | * 47 | * @return 合成引擎的资源目录标识,同时需要先传入voice_name方可生效 48 | */ 49 | +(NSString*) TTS_RES_PATH; 50 | 51 | /*! 52 | * 获取唤醒资源的资源目录标识 53 | * 54 | * @return 唤醒资源的资源目录标识 55 | */ 56 | +(NSString*) IVW_RES_PATH; 57 | 58 | /*! 59 | * 语法类型 60 | * 61 | * @return 语法类型 62 | */ 63 | +(NSString*) GRAMMARTYPE; 64 | 65 | /*! 66 | * 语记SDK专用参数,用于设置本地默认资源路径 67 | * 68 | * @return 本地默认资源路径key字符串 69 | */ 70 | +(NSString*) PLUS_LOCAL_DEFAULT_RES_PATH; 71 | 72 | #pragma mark - 73 | /*! 74 | * 资源存放路径 75 | * 76 | * @param path 设置的路径 77 | * 78 | * @return 资源目录 79 | */ 80 | +(NSString*) generateResourcePath:(NSString *)path; 81 | 82 | /** 83 | * 获得离线发音人对应的id 84 | * 85 | * @param voiceName 发音人名称 86 | * 87 | * @return 有,发音人对应的id;无,返回nil 88 | */ 89 | +(NSString*) identifierForVoiceName:(NSString*)voiceName; 90 | @end 91 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySetting.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySetting.h 3 | // MSC 4 | // 5 | // Created by iflytek on 13-4-12. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 日志打印等级 13 | */ 14 | typedef NS_OPTIONS(NSInteger, LOG_LEVEL){ 15 | /*! 16 | * 全部打印 17 | */ 18 | LVL_ALL = -1, 19 | /*! 20 | * 高,异常分析需要的级别 21 | */ 22 | LVL_DETAIL = 31, 23 | /*! 24 | * 中,打印基本日志信息 25 | */ 26 | LVL_NORMAL = 15, 27 | /*! 28 | * 低,只打印主要日志信息 29 | */ 30 | LVL_LOW = 7, 31 | /*! 32 | * 不打印 33 | */ 34 | LVL_NONE = 0 35 | }; 36 | 37 | /*! 38 | * 此接口为iflyMSC sdk 配置接口。
39 | * 可以获取版本号,设置日志打印等级等 40 | */ 41 | @interface IFlySetting : NSObject 42 | 43 | /*! 44 | * 获取版本号 45 | * 46 | * @return 版本号 47 | */ 48 | + (NSString *) getVersion; 49 | 50 | /*! 51 | * 获取日志等级 52 | * 53 | * @return 返回日志等级 54 | */ 55 | + (LOG_LEVEL) logLvl; 56 | 57 | /*! 58 | * 是否打印控制台log
59 | * 在软件发布时,建议关闭此log。 60 | * 61 | * @param showLog -[in] YES,打印log;NO,不打印 62 | */ 63 | + (void) showLogcat:(BOOL) showLog; 64 | 65 | /*! 66 | * 设置日志msc.log生成路径以及日志等级 67 | * 68 | * | 日志打印等级 | 描述 | 69 | * |------------------------|-----------------------------------| 70 | * | LVL_ALL | 全部打印 | 71 | * | LVL_DETAIL | 高,异常分析需要的级别 | 72 | * | LVL_NORMAL | 中,打印基本日志信息 | 73 | * | LVL_LOW | 低,只打印主要日志信息 | 74 | * | LVL_NONE | 不打印 | 75 | * 76 | * @param level -[in] 日志打印等级 77 | */ 78 | + (void) setLogFile:(LOG_LEVEL) level; 79 | 80 | /*! 81 | * 设置日志文件的路径
82 | * 日志文件默认存放在Documents目录。 83 | * 84 | * @param path -[in] 日志文件的全路径 85 | */ 86 | + (void) setLogFilePath:(NSString*) path; 87 | 88 | @end 89 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechConstant.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechConstant.h 3 | // MSCDemo 4 | // 5 | // Created by iflytek on 5/9/14. 6 | // Copyright (c) 2014 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 公共常量类
13 | * 主要定义参数的key value值 14 | */ 15 | @interface IFlySpeechConstant : NSObject 16 | 17 | 18 | #pragma mark - 通用参数key 19 | 20 | /*! 21 | * 语音应用ID
22 | * 通过开发者网站申请 23 | * 24 | * @return 语音应用IDkey 25 | */ 26 | +(NSString*)APPID; 27 | 28 | 29 | /*! 30 | * 语言区域。 31 | * 32 | * @return 语言区域key。 33 | */ 34 | +(NSString*)ACCENT; 35 | 36 | /*! 37 | * 语言区域。 38 | * 39 | * @return 普通话value。 40 | */ 41 | +(NSString*)ACCENT_MANDARIN; 42 | 43 | /*! 44 | * 语言区域。 45 | * 46 | * @return 河南话value。 47 | */ 48 | +(NSString*)ACCENT_HENANESE; 49 | 50 | /*! 51 | * 语言区域。 52 | * 53 | * @return 四川话value。 54 | */ 55 | +(NSString*)ACCENT_SICHUANESE; 56 | 57 | /*! 58 | * 语言区域。 59 | * 60 | * @return 粤语value。 61 | */ 62 | +(NSString*)ACCENT_CANTONESE; 63 | 64 | /*! 65 | * 语言
66 | * 支持:zh_cn,zh_tw,en_us
67 | * 68 | * @return 语言key 69 | */ 70 | +(NSString*)LANGUAGE; 71 | 72 | /*! 73 | * 语言 74 | * 75 | * @return 中文value 76 | */ 77 | +(NSString*)LANGUAGE_CHINESE; 78 | 79 | 80 | /*! 81 | * 语言 82 | * 83 | * @return 中文台湾value 84 | */ 85 | +(NSString*)LANGUAGE_CHINESE_TW; 86 | 87 | /*! 88 | * 语言 89 | * 90 | * @return 英文value 91 | */ 92 | +(NSString*)LANGUAGE_ENGLISH; 93 | 94 | /*! 95 | * 返回结果的数据格式,可设置为json,xml,plain,默认为json。 96 | * 97 | * @return 返回结果的数据格式key 98 | */ 99 | +(NSString*)RESULT_TYPE; 100 | 101 | /*! 102 | * 应用领域。 103 | * 104 | * @return 应用领域key 105 | */ 106 | +(NSString*)IFLY_DOMAIN; 107 | 108 | /*! 109 | * 个性化数据上传类型 110 | * 111 | * @return 个性化数据上传类型key 112 | */ 113 | +(NSString*)DATA_TYPE; 114 | 115 | /*! 116 | * 语音输入超时时间
117 | * 单位:ms,默认30000 118 | * 119 | * @return 语音输入超时时间key 120 | */ 121 | +(NSString*)SPEECH_TIMEOUT; 122 | 123 | /*! 124 | * 网络连接超时时间
125 | * 单位:ms,默认20000 126 | * 127 | * @return 网络连接超时时间key 128 | */ 129 | +(NSString*)NET_TIMEOUT; 130 | 131 | /*! 132 | * 业务类型。 133 | * 134 | * @return 业务类型key。 135 | */ 136 | +(NSString*)SUBJECT; 137 | 138 | /*! 139 | * 扩展参数。 140 | * 141 | * @return 扩展参数key。 142 | */ 143 | +(NSString*)PARAMS; 144 | 145 | /*! 146 | * 加密参数 147 | * 148 | * 支持类型:ssl 加密 tcp 非加密 默认:tcp
149 | * 建议对安全性要求较高时使用ssl。 150 | * 151 | * @return 加密参数key 152 | */ 153 | +(NSString*)PROT_TYPE; 154 | 155 | /*! 156 | * ssl证书内容 157 | * 158 | * @return ssl证书内容key 159 | */ 160 | +(NSString*)SSL_CERT; 161 | 162 | /*! 163 | * 录音音量返回时间间隔。 164 | * 165 | * @return 间隔key。 166 | */ 167 | +(NSString*)POWER_CYCLE; 168 | 169 | /*! 170 | * 合成、识别、唤醒、评测、声纹等业务采样率。 171 | * 172 | * @return 合成及识别采样率key。 173 | */ 174 | +(NSString*)SAMPLE_RATE; 175 | 176 | /*! 177 | * 合成、识别、唤醒、评测、声纹等业务采样率。 178 | * 179 | * @return 合成及识别采样率8K Value。 180 | */ 181 | +(NSString*)SAMPLE_RATE_8K; 182 | 183 | /*! 184 | * 合成、识别、唤醒、评测、声纹等业务采样率。 185 | * 186 | * @return 合成及识别采样率16K Value。 187 | */ 188 | +(NSString*)SAMPLE_RATE_16K; 189 | 190 | /*! 191 | * 引擎类型。
192 | * 可选:local,cloud,auto
193 | * 默认:auto 194 | * 195 | * @return 引擎类型key。 196 | */ 197 | +(NSString*)ENGINE_TYPE; 198 | 199 | /*! 200 | * 本地识别引擎。 201 | * 202 | * @return 本地识别引擎value。 203 | */ 204 | +(NSString*)TYPE_LOCAL; 205 | 206 | /*! 207 | * 云端识别引擎。 208 | * 209 | * @return 云端识别引擎value。 210 | */ 211 | +(NSString*)TYPE_CLOUD; 212 | 213 | /*! 214 | * 混合识别引擎。 215 | * 216 | * @return 混合识别引擎value。 217 | */ 218 | +(NSString*)TYPE_MIX; 219 | 220 | /*! 221 | * 引擎根据当前配置进行选择。 222 | * 223 | * @return 引擎根据当前配置进行选择value。 224 | */ 225 | +(NSString*)TYPE_AUTO; 226 | 227 | /*! 228 | * 输入文本编码格式。 229 | * 230 | * @return 编码格式key。 231 | */ 232 | +(NSString*)TEXT_ENCODING; 233 | 234 | /*! 235 | * 结果编码格式。 236 | * 237 | * @return 结果编码格式key。 238 | */ 239 | +(NSString*)RESULT_ENCODING; 240 | 241 | /*! 242 | * 是否初始化播放器
243 | * SDK内部播放器采用音频队列实现,有部分外部需求需要自定义音频队列,可以通过此开关控制
244 | * 0:不初始化,非0或者参数为空:初始化,默认初始化 245 | * 246 | * @return 是否初始化播放器参数key 247 | */ 248 | +(NSString*)PLAYER_INIT; 249 | 250 | /*! 251 | * 是否播放器结束后发送deactive系统通知
252 | * SDK内部播放器结束后可通过此开关发送deactive系统通知,使其他被中断的音频应用解除中断
253 | * 0:不发送,非0或者参数为空:发送,默认发送 254 | * 255 | * @return 是否播放器结束后发送deactive系统通知参数key 256 | */ 257 | +(NSString*)PLAYER_DEACTIVE; 258 | 259 | /** 260 | * 是否初始化录音器
261 | * SDK内部录音器采用音频队列实现,有部分外部需求需要自定义音频队列,可以通过此开关控制
262 | * 0:不初始化,非0或者参数为空:初始化,默认初始化 263 | * 264 | * @return 是否初始化录音器参数key 265 | */ 266 | +(NSString*)RECORDER_INIT; 267 | 268 | /** 269 | * 是否录音器结束后发送deactive系统通知
270 | * SDK内部录音器结束后可通过此开关发送deactive系统通知,使其他被中断的音频应用解除中断
271 | * 0:不发送,非0或者参数为空:发送,默认发送 272 | * 273 | * @return 是否录音器结束后发送deactive系统通知参数key 274 | */ 275 | +(NSString*)RECORDER_DEACTIVE; 276 | 277 | 278 | #pragma mark - 合成相关设置key 279 | /*! 280 | * 语速
281 | * 范围 (0~100) 默认值:50 282 | * 283 | * @return 语速key 284 | */ 285 | +(NSString*)SPEED; 286 | 287 | /*! 288 | * 音调
289 | * 范围(0~100)默认值:50 290 | * 291 | * @return 音调key 292 | */ 293 | +(NSString*)PITCH; 294 | 295 | /*! 296 | * 合成录音保存路径 297 | * 298 | * 注意:只需要设置文件名则可,会自动拼接到[IFlySetting setLogFilePath]接口设置的目录后 299 | * 300 | * @return 合成录音保存路径key 301 | */ 302 | +(NSString*)TTS_AUDIO_PATH; 303 | 304 | /** 305 | * 启用VAD功能 306 | * 307 | * @return 启用VAD功能key 308 | */ 309 | +(NSString*)VAD_ENABLE; 310 | 311 | /*! 312 | * VAD前端点超时
313 | * 范围:0-10000(单位ms) 314 | * 315 | * @return VAD前端点超时key 316 | */ 317 | +(NSString*)VAD_BOS; 318 | 319 | /*! 320 | * VAD后端点超时。
321 | * 可选范围:0-10000(单位ms) 322 | * 323 | * @return VAD后端点超时key 324 | */ 325 | +(NSString*)VAD_EOS; 326 | 327 | 328 | /* 329 | * 云端支持如下发音人: 330 | * 对于网络TTS的发音人角色,不同引擎类型支持的发音人不同,使用中请注意选择。 331 | * 332 | * |--------|----------------| 333 | * | 发音人 | 参数 | 334 | * |--------|----------------| 335 | * | 小燕 | xiaoyan | 336 | * |--------|----------------| 337 | * | 小宇 | xiaoyu | 338 | * |--------|----------------| 339 | * | 凯瑟琳 | catherine | 340 | * |--------|----------------| 341 | * | 亨利 | henry | 342 | * |--------|----------------| 343 | * | 玛丽 | vimary | 344 | * |--------|----------------| 345 | * | 小研 | vixy | 346 | * |--------|----------------| 347 | * | 小琪 | vixq | 348 | * |--------|----------------| 349 | * | 小峰 | vixf | 350 | * |--------|----------------| 351 | * | 小梅 | vixl | 352 | * |--------|----------------| 353 | * | 小莉 | vixq | 354 | * |--------|----------------| 355 | * | 小蓉 | vixr | 356 | * |--------|----------------| 357 | * | 小芸 | vixyun | 358 | * |--------|----------------| 359 | * | 小坤 | vixk | 360 | * |--------|----------------| 361 | * | 小强 | vixqa | 362 | * |--------|----------------| 363 | * | 小莹 | vixyin | 364 | * |--------|----------------| 365 | * | 小新 | vixx | 366 | * |--------|----------------| 367 | * | 楠楠 | vinn | 368 | * |--------|----------------| 369 | * | 老孙 | vils | 370 | * |--------|----------------| 371 | */ 372 | 373 | /*! 374 | * 发音人 375 | * 376 | * 云端支持如下发音人:
377 | * 对于网络TTS的发音人角色,不同引擎类型支持的发音人不同,使用中请注意选择。
378 | * 379 | * | 发音人 | 参数 | 380 | * |:--------:|:----------------:| 381 | * | 小燕 | xiaoyan | 382 | * | 小宇 | xiaoyu | 383 | * | 凯瑟琳 | catherine | 384 | * | 亨利 | henry | 385 | * | 玛丽 | vimary | 386 | * | 小研 | vixy | 387 | * | 小琪 | vixq | 388 | * | 小峰 | vixf | 389 | * | 小梅 | vixl | 390 | * | 小莉 | vixq | 391 | * | 小蓉 | vixr | 392 | * | 小芸 | vixyun | 393 | * | 小坤 | vixk | 394 | * | 小强 | vixqa | 395 | * | 小莹 | vixyin | 396 | * | 小新 | vixx | 397 | * | 楠楠 | vinn | 398 | * | 老孙 | vils | 399 | * 400 | * @return 发音人key 401 | */ 402 | +(NSString*)VOICE_NAME; 403 | 404 | /*! 405 | * 发音人ID key。 406 | * 407 | * @return 发音人ID key 408 | */ 409 | +(NSString*)VOICE_ID; 410 | 411 | /*! 412 | * 发音人语种 key。 413 | * 414 | * 参数值:0:Auto 1:中文 2英文 ,默认 0. 415 | * 416 | * @return 发音人ID key 417 | */ 418 | +(NSString*)VOICE_LANG; 419 | 420 | /*! 421 | * 音量
422 | * 范围(0~100) 默认值:50 423 | * 424 | * @return 音量key 425 | */ 426 | +(NSString*)VOLUME ; 427 | 428 | /*! 429 | * 合成音频播放缓冲时间
430 | * 即缓冲多少秒音频后开始播放,如tts_buffer_time=1000;
431 | * 默认缓冲1000ms毫秒后播放。 432 | * 433 | * @return 合成音频播放缓冲时间缓冲时间key 434 | */ 435 | +(NSString*)TTS_BUFFER_TIME ; 436 | 437 | 438 | /*! 439 | * 合成数据是否即时返回 440 | * 441 | * 是否需要数据回调,为1时,当合成一段音频会通过onEvent回调返回,直接合成结束;
442 | * 设置为1为即时返回;0为非即时返回;默认值为0; 443 | * 444 | * @return 合成数据即时返回key 445 | */ 446 | +(NSString*)TTS_DATA_NOTIFY; 447 | 448 | /*! 449 | * 预合成文本 450 | * 451 | * @return 预合成文本参数key 452 | */ 453 | +(NSString*)NEXT_TEXT; 454 | 455 | /*! 456 | * 是否需要打开MPPlayingInfocenter
457 | * 是否需要初始化MPPlayerCenter的属性;0:需要初始化,1:不初始化 458 | * 459 | * @return 是否需要打开MPPlayingInfocenter 参数key 460 | */ 461 | +(NSString*)MPPLAYINGINFOCENTER; 462 | 463 | #pragma mark - 识别、听写、语义相关设置key 464 | 465 | /*! 466 | * 录音源
467 | * 录音时的录音方式,默认为麦克风,设置为1;
468 | * 如果需要外部送入音频,设置为-1,通过WriteAudio接口送入音频。 469 | * 470 | * @return 录音源key 471 | */ 472 | +(NSString*)AUDIO_SOURCE; 473 | 474 | /*! 475 | * 识别录音保存路径 476 | * 477 | * @return 识别录音保存路径key 478 | */ 479 | +(NSString*) ASR_AUDIO_PATH; 480 | 481 | /*! 482 | * 设置是否开启语义 483 | * 484 | * @return 设置是否开启语义key 485 | */ 486 | +(NSString*)ASR_SCH; 487 | 488 | /*! 489 | * 设置是否有标点符号 490 | * 491 | * @return 设置是否有标点符号key 492 | */ 493 | +(NSString*)ASR_PTT; 494 | 495 | /*! 496 | * ASR_PTT 参数值:设置带标点符号 497 | * 498 | * @return 设置是有标点符号Value 499 | */ 500 | +(NSString*)ASR_PTT_HAVEDOT; 501 | 502 | /*! 503 | * ASR_PTT 参数值:设置不带标点符号 504 | * 505 | * @return 设置是无标点符号Value 506 | */ 507 | +(NSString*)ASR_PTT_NODOT; 508 | 509 | /*! 510 | * 本地语法名称。
511 | * 本地语法名称,对应云端的有CLOUD_GRAMMAR 512 | * 513 | * @return 本地语法名称key。 514 | */ 515 | +(NSString*)LOCAL_GRAMMAR; 516 | 517 | /*! 518 | * 云端语法ID。
519 | * 云端编译语法返回的表示,早期版本使用GRAMMAR_ID,仍然兼容,但建议使用新的。 520 | * 521 | * @return 云端语法ID key。 522 | */ 523 | +(NSString*)CLOUD_GRAMMAR; 524 | 525 | /*! 526 | * 语法类型 527 | * 528 | * @return 语法类型key 529 | */ 530 | +(NSString*)GRAMMAR_TYPE; 531 | 532 | /*! 533 | * 语法内容。 534 | * 535 | * @return 语法内容key。 536 | */ 537 | +(NSString*)GRAMMAR_CONTENT; 538 | 539 | /*! 540 | * 字典内容。 541 | * 542 | * @return 字典内容key。 543 | */ 544 | +(NSString*)LEXICON_CONTENT; 545 | 546 | /*! 547 | * 字典名字。 548 | * 549 | * @return 字典名字key。 550 | */ 551 | +(NSString*)LEXICON_NAME; 552 | 553 | /*! 554 | * 语法名称列表。 555 | * 556 | * @return 语法名称列表key。 557 | */ 558 | +(NSString*)GRAMMAR_LIST; 559 | 560 | /*! 561 | * 开放语义协议版本号。
562 | * 如需使用请在http://osp.voicecloud.cn/上进行业务配置 563 | * 564 | * @return 开放语义协议版本号key。 565 | */ 566 | +(NSString*)NLP_VERSION; 567 | 568 | #pragma mark - 唤醒相关设置key 569 | /*! 570 | * 唤醒门限值。 571 | * 572 | * @return 唤醒门限值key。 573 | */ 574 | +(NSString*)IVW_THRESHOLD; 575 | 576 | /*! 577 | * 唤醒服务类型。 578 | * 579 | * @return 唤醒服务类型key。 580 | */ 581 | +(NSString*)IVW_SST; 582 | 583 | /*! 584 | * 唤醒+识别。 585 | * 586 | * @return 唤醒+识别key。 587 | */ 588 | +(NSString*)IVW_ONESHOT; 589 | 590 | /*! 591 | * 唤醒工作方式
592 | * 1:表示唤醒成功后继续录音,0:表示唤醒成功后停止录音。 593 | * 594 | * @return 唤醒工作方式key 595 | */ 596 | +(NSString*)KEEP_ALIVE; 597 | 598 | /*! 599 | * 唤醒录音保存路径 600 | * 601 | * @return 唤醒录音保存路径key 602 | */ 603 | +(NSString*) IVW_AUDIO_PATH; 604 | 605 | #pragma mark - 评测相关设置key 606 | /*! 607 | * 评测类型
608 | * 可选值:read_syllable(英文评测不支持):单字;read_word:词语;read_sentence:句子;read_chapter(待开放):篇章。 609 | * 610 | * @return 评测类型 key 611 | */ 612 | +(NSString*)ISE_CATEGORY; 613 | 614 | /*! 615 | * 评测结果等级
616 | * 可选值:complete:完整 ;plain:简单 617 | * 618 | * @return 评测结果等级 key 619 | */ 620 | +(NSString*)ISE_RESULT_LEVEL; 621 | 622 | /*! 623 | * 评测结果格式
624 | * 可选值:xml;plain 625 | * 626 | * @return 评测结果格式 key 627 | */ 628 | +(NSString*)ISE_RESULT_TYPE; 629 | 630 | /*! 631 | * 评测录音保存路径 632 | * 633 | * @return 评测录音保存路径key 634 | */ 635 | +(NSString*) ISE_AUDIO_PATH; 636 | 637 | 638 | /*! 639 | * 朗读跟踪,只对句子和篇章有效
640 | * 可选值:enable:开启;disable:关闭。 641 | * 642 | * @return 朗读跟踪 key 643 | */ 644 | +(NSString*)ISE_AUTO_TRACKING; 645 | 646 | /*! 647 | * 跟踪模式
648 | * 可选值:easy:简单;hard:复杂。 649 | * 650 | * @return 跟踪模式 key 651 | */ 652 | +(NSString*)ISE_TRACK_TYPE; 653 | 654 | #pragma mark - 语记SDK业务key 655 | /*! 656 | * 本地所有资源 657 | * 658 | * @return 本地所有资源key 659 | */ 660 | + (NSString *)PLUS_LOCAL_ALL; 661 | 662 | /*! 663 | * 本地合成资源 664 | * 665 | * @return 本地合成资源key 666 | */ 667 | + (NSString *)PLUS_LOCAL_TTS; 668 | 669 | /*! 670 | * 本地识别资源 671 | * 672 | * @return 本地识别资源key 673 | */ 674 | + (NSString *)PLUS_LOCAL_ASR; 675 | 676 | /*! 677 | * 本地唤醒资源 678 | * 679 | * @return 本地唤醒资源key 680 | */ 681 | + (NSString *)PLUS_LOCAL_IVW; 682 | 683 | #pragma mark - 身份验证业务key 684 | 685 | /*! 686 | * auth_id
687 | * 用于用户注册和登录、查询、删除等业务时标识用户身份 688 | * 689 | * @return 用户标识 690 | */ 691 | + (NSString*)MFV_AUTH_ID; 692 | 693 | /*! 694 | * 请求业务类型,可选值:mfv(默认,融合验证),ivp(声纹),ifr(人脸) 695 | * 696 | * @return 请求业务类型key 697 | */ 698 | + (NSString*)MFV_SUB; 699 | 700 | /*! 701 | * 会话类型,不同sub有不同的sst取值。
702 | * ifr:enroll,verify,identify,reenroll,query,delete
703 | * ivp:enroll(train),verify,reenroll,query,delete,download 704 | * 705 | * @return 会话类型key 706 | */ 707 | + (NSString*)MFV_SST; 708 | 709 | /*! 710 | * 融合验证模式,仅在融合验证场景下使用。可选值:sin(单一生物特征数据验证),mix(混合生物特征数据验证),agi(灵活生物特征数据验证) 711 | * 712 | * @return 融合验证模式key 713 | */ 714 | + (NSString*)MFV_VCM; 715 | 716 | /*! 717 | * 特征场景,用来说明本次验证将涉及的业务。可选值:ivp,ifr,ivp|ifr 718 | * 719 | * @return 特征场景 key 720 | */ 721 | + (NSString*)MFV_SCENES; 722 | 723 | /*! 724 | * 确认周期(affirmance cycle,单位:s),用户设置的确认超时时间(生命周期),仅在灵活融合验证场景下使用 725 | * 726 | * @return 确认周期key 727 | */ 728 | + (NSString*)MFV_AFC; 729 | 730 | /*! 731 | * 数据保存路径 732 | * 733 | * @return 数据保存路径key 734 | */ 735 | + (NSString*)MFV_DATA_PATH; 736 | 737 | /*! 738 | * 训练次数:取值2~9.无默认值,必须明确指定。 739 | * 740 | * @return 训练次数key 741 | */ 742 | + (NSString*)MFV_RGN; 743 | 744 | /*! 745 | * 声纹确认门限值,验证得分>=tsd验证通过,否则验证失败(该参数目前不支持,作为保留参数。)却只范围:0~100. 746 | * 747 | * @return 声纹确认门限值key 748 | */ 749 | + (NSString*)MFV_TSD; 750 | 751 | /*! 752 | * 密码文本。从服务端下载,比如数字密码所需要的数字串。 753 | * 754 | * @return 密码文本key 755 | */ 756 | + (NSString*)MFV_PTXT; 757 | 758 | /*! 759 | * 密码类型。取值:1(文本密码),2(自由说),3(数字密码). 760 | * 761 | * @return 密码类型key 762 | */ 763 | + (NSString*)MFV_PWDT; 764 | 765 | /*! 766 | * 取消注册。取值:0(不取消,即不生效),1(取消本次注册). 767 | * 768 | * @return 取消注册key 769 | */ 770 | + (NSString*)MFV_FIN; 771 | 772 | /*! 773 | * 等待超时时间:描述客户端等待结果的超时时间 774 | * 775 | * @return 等待超时时间:key 776 | */ 777 | + (NSString*)MFV_WTT; 778 | 779 | /*! 780 | * 数据格式
781 | * 声纹为音频采样率支持:16000和8000;人脸为图片格式,支持jpg和gif 782 | * 783 | * @return 数据格式key 784 | */ 785 | + (NSString*)MFV_DATA_FORMAT; 786 | 787 | /*! 788 | * 数据压缩编码
789 | * 声纹为;人脸支持raw,不对图片压缩 790 | * 791 | * @return 数据压缩编码key 792 | */ 793 | + (NSString*)MFV_DATA_ENCODING; 794 | 795 | #pragma mark - 人脸业务key 796 | 797 | //1. sub 取值: wfr 用途: 用于区分业务类型,web访问方式中,nginx配置不用使用,但是在结构化日志和染色日志记录中使用。 798 | //2. sst 取值: reg、verify、detect、align 用途: 指定本路会话是属于何种性质 799 | // + 人脸图像注册(reg):上传图像,验证图像的有效性,然后存储起来,作为数据源。 800 | // + 人脸图像验证(verify):通过与指定源图像比较,验证人脸相似性。 801 | // + 人脸图像检测(detect):能够检测出不同姿态方位的人脸在图中的位置。 802 | // + 人脸图像聚焦(align):在给定人脸框下自动标定出两眼、鼻尖、嘴角的坐标。 803 | //3. aue 取值: raw 用途: 图像压缩格式,现在引擎不支持图像压缩,aue只能取值raw 804 | //4. pset 取值: 整数 用途: 人脸识别验证阈值,取值可以是负数也可以是整数。 805 | //5. skip 取值: true/false 用途: 后台图片处理是否进行过滤。true表示不过滤,false表示过滤 806 | //6. gid 取值: *********** 用途: 图像模型id,如:4a6c124ed6b78436ee5aac4563f13eb5 807 | //7. appid 取值:用户申请的appid 用途: 验证用户 808 | 809 | 810 | /*! 811 | * sub 默认值:wfr
812 | * 用于区分业务类型,web访问方式中,nginx配置不用使用,但是在结构化日志和染色日志记录中使用。 813 | */ 814 | + (NSString*) FACE_SUB; 815 | 816 | /*! 817 | * WFR
818 | * sub参数的默认值 819 | */ 820 | + (NSString*) FACE_WFR; 821 | 822 | /*! 823 | * sst
824 | * 指定本路会话是属于何种性质 825 | */ 826 | + (NSString*) FACE_SST; 827 | 828 | /*! 829 | * REG
830 | * 人脸图像注册(reg):上传图像,验证图像的有效性,然后存储起来,作为数据源。 831 | */ 832 | + (NSString*) FACE_REG; 833 | 834 | /*! 835 | * VERIFY
836 | * 人脸图像验证(verify):通过与指定源图像比较,验证人脸相似性。 837 | */ 838 | + (NSString*) FACE_VERIFY; 839 | 840 | /*! 841 | * DETECT
842 | * 人脸图像检测(detect):能够检测出不同姿态方位的人脸在图中的位置。 843 | */ 844 | + (NSString*) FACE_DETECT; 845 | 846 | /*! 847 | * ALIGN
848 | * 人脸图像聚焦(align):在给定人脸框下自动标定出两眼、鼻尖、嘴角的坐标。 849 | */ 850 | + (NSString*) FACE_ALIGN; 851 | 852 | /*! 853 | * ATTR
854 | * 面部属性识别(attr):对面部属性进行识别:例如秃顶、刘海、大嘴、模糊、眼镜等。 855 | */ 856 | + (NSString*) FACE_ATTR; 857 | 858 | 859 | /*! 860 | * AUE
861 | * 图像压缩格式,现在引擎不支持图像压缩,aue只能取值raw 862 | */ 863 | + (NSString*) FACE_AUE; 864 | 865 | /*! 866 | * RAW
867 | * AUE参数的值 868 | */ 869 | + (NSString*) FACE_RAW; 870 | 871 | /*! 872 | * PSET
873 | * 人脸识别验证阈值,取值可以是负数也可以是整数。 874 | */ 875 | + (NSString*) FACE_PSET; 876 | 877 | /*! 878 | * SKIP
879 | * 后台图片处理是否进行过滤。true表示不过滤,false表示过滤,传入字符串@“true”或@“false” 880 | */ 881 | + (NSString*) FACE_SKIP; 882 | 883 | /*! 884 | * GID
885 | * 图像模型id,如:4a6c124ed6b78436ee5aac4563f13eb5 886 | */ 887 | + (NSString*) FACE_GID; 888 | 889 | /*! 890 | * auth_id
891 | * 用于用户注册和登录、查询、删除等业务时标识用户身份 892 | * 893 | * @return 用户标识 894 | */ 895 | + (NSString*)FACE_AUTH_ID; 896 | 897 | /*! 898 | * DVC
899 | * 用户设备编号,用于验证用户 900 | */ 901 | + (NSString*) FACE_DVC; 902 | 903 | @end 904 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechError.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechError.h 3 | // MSC 4 | // 5 | // Created by iflytek on 13-3-19. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #ifndef __IFlySpeechError__ 10 | #define __IFlySpeechError__ 11 | 12 | #import 13 | 14 | /*! 15 | * 错误描述类 16 | */ 17 | @interface IFlySpeechError : NSObject 18 | 19 | /*! 20 | * 错误码 21 | */ 22 | @property(nonatomic,assign) int errorCode; 23 | 24 | /*! 25 | * 错误码类型 26 | */ 27 | @property(nonatomic,assign) int errorType; 28 | 29 | /*! 30 | * 错误描述 31 | */ 32 | @property(nonatomic,retain) NSString* errorDesc; 33 | 34 | /*! 35 | * 初始化 36 | * 37 | * @param errorCode -[in] 错误码 38 | * 39 | * @return IFlySpeechError对象 40 | */ 41 | + (instancetype) initWithError:(int) errorCode; 42 | 43 | /*! 44 | * 获取错误码 45 | * 46 | * @return 错误码 47 | */ 48 | -(int) errorCode; 49 | 50 | /*! 51 | * 获取错误描述 52 | * 53 | * @return 错误描述 54 | */ 55 | - (NSString *) errorDesc; 56 | 57 | @end 58 | #endif 59 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechEvaluator.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechEvaluator.h 3 | // msc 4 | // 5 | // Created by jianzhang on 14-1-13 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | #import "IFlySpeechEvaluatorDelegate.h" 12 | 13 | #define IFLY_AUDIO_SOURCE_MIC @"1" 14 | #define IFLY_AUDIO_SOURCE_STREAM @"-1" 15 | 16 | /*! 17 | * 语音评测类 18 | */ 19 | @interface IFlySpeechEvaluator : NSObject 20 | 21 | /*! 22 | * 设置委托对象 23 | */ 24 | @property (assign) id delegate; 25 | 26 | /*! 27 | * 返回评测对象的单例 28 | * 29 | * @return 别对象的单例 30 | */ 31 | + (instancetype)sharedInstance; 32 | 33 | /*! 34 | * 销毁评测对象。 35 | * 36 | * @return 成功返回YES,失败返回NO。 37 | */ 38 | - (BOOL)destroy; 39 | 40 | /*! 41 | * 设置评测引擎的参数 42 | * 43 | * @param value 评测引擎参数值 44 | * @param key 评测引擎参数 45 | * 46 | * @return 设置的参数和取值正确返回YES,失败返回NO 47 | */ 48 | - (BOOL)setParameter:(NSString *)value forKey:(NSString *)key; 49 | 50 | 51 | /*! 52 | * 获得评测引擎的参数 53 | * 54 | * @param key 评测引擎参数 55 | * 56 | * @return key对应的参数值 57 | */ 58 | - (NSString*)parameterForKey:(NSString *)key; 59 | 60 | /*! 61 | * 开始评测
62 | * 同时只能进行一路会话,这次会话没有结束不能进行下一路会话,否则会报错 63 | * 64 | * @param data 评测的试题 65 | * @param params 评测的参数 66 | * @return 成功返回YES,失败返回NO 67 | */ 68 | - (BOOL)startListening:(NSData *)data params:(NSString *)params; 69 | 70 | /*! 71 | * 停止录音
72 | * 调用此函数会停止录音,并开始进行语音识别 73 | */ 74 | - (void)stopListening; 75 | 76 | /*! 77 | * 取消本次会话 78 | */ 79 | - (void)cancel; 80 | 81 | @end 82 | 83 | /*! 84 | * 音频流评测
85 | * 音频流评测可以将文件分段写入 86 | */ 87 | @interface IFlySpeechEvaluator(IFlyStreamISERecognizer) 88 | 89 | /*! 90 | * 写入音频流 91 | * 92 | * @param audioData 音频数据 93 | * 94 | * @return 写入成功返回YES,写入失败返回NO 95 | */ 96 | - (BOOL) writeAudio:(NSData *) audioData; 97 | 98 | @end 99 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechEvaluatorDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechEvaluatorDelegate.h 3 | // msc 4 | // 5 | // Created by admin on 13-6-19. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | 12 | @class IFlySpeechError; 13 | 14 | /*! 15 | * 评测协议 16 | */ 17 | @protocol IFlySpeechEvaluatorDelegate 18 | 19 | /*! 20 | * 音量和数据回调 21 | * 22 | * @param volume 音量 23 | * @param buffer 音频数据 24 | */ 25 | - (void)onVolumeChanged:(int)volume buffer:(NSData *)buffer; 26 | 27 | /*! 28 | * 开始录音回调
29 | * 当调用了`startListening`函数之后,如果没有发生错误则会回调此函数。如果发生错误则回调onCompleted:函数 30 | */ 31 | - (void)onBeginOfSpeech; 32 | 33 | /*! 34 | * 停止录音回调
35 | * 当调用了`stopListening`函数或者引擎内部自动检测到断点,如果没有发生错误则回调此函数。
36 | * 如果发生错误则回调onCompleted:函数 37 | */ 38 | - (void)onEndOfSpeech; 39 | 40 | /*! 41 | * 正在取消 42 | */ 43 | - (void)onCancel; 44 | 45 | /*! 46 | * 评测错误回调 47 | * 48 | * 在进行语音评测过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理.当errorCode没有错误时,表示此次会话正常结束,否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。 49 | * 50 | * @param errorCode 错误描述类 51 | */ 52 | - (void)onCompleted:(IFlySpeechError *)errorCode; 53 | 54 | /*! 55 | * 评测结果回调
56 | * 在评测过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。 57 | * 58 | * @param results -[out] 评测结果。 59 | * @param isLast -[out] 是否最后一条结果 60 | */ 61 | - (void)onResults:(NSData *)results isLast:(BOOL)isLast; 62 | 63 | @end 64 | 65 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechEvent.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechEvent.h 3 | // MSCDemo 4 | // 5 | // Created by admin on 14-8-12. 6 | // Copyright (c) 2014年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 事件类型 13 | */ 14 | typedef NS_ENUM(NSUInteger,IFlySpeechEventType){ 15 | /*! 16 | * 网络状态消息
17 | * 在消息到达时,可通过onEvent的第2个参数arg1,获取当前网络连接状态值 18 | */ 19 | IFlySpeechEventTypeNetPref = 10001, 20 | /*! 21 | * 转写音频文件消息
22 | * 在录音模式下,成功创建音频文件时返回。可通过onEvent第4个参数data,指定Key为[IFlySpeechConstant IST_AUDIO_PATH],获取音频文件绝对路径.或通过[IFlySpeechTranscripter getParameter:[IFlySpeechConstant IST_AUDIO_PATH]],获取音频文件绝对路径. 23 | */ 24 | IFlySpeechEventTypeISTAudioFile = 10004, 25 | /*! 26 | * 转写已上传字节消息
27 | * 在消息到达时,通过onEvent的第二个参数arg1,获取已确认上传到服务器的字节数.若当前音频源为非写音频模式,还可通过onEvent 28 | * 的第三个参数arg2,获取当前所有音频的字节大小.录音模式时,由于所有音频字节大小会变。当停止音频输入后(等待录音时间超时[IFlySpeechConstant SPEECH_TIMEOUT],或调用[IFlySpeechTranscripter stopTranscripting]),且服务器收到所有音频时,第四个参数data,将包含完成标记的布尔值(true),可通过data调用指定KEY为KCIFlySpeechEventKeyISTUploadComplete获取。此消息可能多次返回. 29 | */ 30 | IFlySpeechEventTypeISTUploadBytes = 10006, 31 | 32 | /*! 33 | * 转写缓存剩余
34 | * 此消息仅在音频源为-1时需要关注,在调用[IFlySpeechTranscripter writeAudio]写音频时,应该关注此事件。
35 | * 此事件在调用写音频接口、及音频最后被写入底库库时分别回调一次。当事件回调时,通过onEvent的第二个参数arg1,获取当前剩余的缓存大小,当缓存小于要写入的音频时,应该先暂停写音频数据,直到下次缓存大小大于要写入的音频时.最大缓存为128KByte。 36 | */ 37 | IFlySpeechEventTypeISTCacheLeft = 10007, 38 | 39 | /*! 40 | * 转写结果等待时间消息
41 | * 在消息到达时,通过 onEvent的第二个参数arg1,获取当前结果需要的时间.
42 | * 此消息可能多次返回,返回时间不定,且不一定会返回. 43 | */ 44 | IFlySpeechEventTypeISTResultTime= 10008, 45 | 46 | /*! 47 | * 转写转写音频同步ID消息
48 | * 在消息到达时,通过 onEvent的第二个参数arg1,获取当前写音频同步ID.
49 | * 此消息可能多次返回. 50 | */ 51 | IFlySpeechEventTypeISTSyncID= 10009, 52 | 53 | /*! 54 | * 会话开始消息
55 | * 在会话开始成功后返回 56 | */ 57 | IFlySpeechEventTypeSessionBegin = 10010, 58 | 59 | /*! 60 | * 会话结束消息
61 | * 在会话结束前返回 62 | */ 63 | IFlySpeechEventTypeSessionEnd = 10011, 64 | 65 | /*! 66 | * 音量消息,在得到音量时抛出,暂时只有身份验证的声纹业务用到 67 | */ 68 | IFlySpeechEventTypeVolume = 10012, 69 | 70 | /*! 71 | * VAD后端点消息,在检测到VAD后端点时抛出,暂时只有身份验证的声纹业务用到 72 | */ 73 | IFlySpeechEventTypeVadEOS = 10013, 74 | 75 | /*! 76 | * 服务端会话id
77 | * 在消息到达时,可通过onEvent的第4个参数data(字典类型),指定key KCIFlySpeechEventKeySessionID,获取服务端会话id. 78 | */ 79 | IFlySpeechEventTypeSessionID = 20001, 80 | 81 | /*! 82 | * TTS合成数据消息
83 | * -(void)onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData
84 | * 其中eventData中包含数据 85 | * 86 | */ 87 | IFlySpeechEventTypeTTSBuffer = 21001, 88 | 89 | /*! 90 | * 通知cancel方法被调用的回调 91 | * 92 | */ 93 | IFlySpeechEventTypeTTSCancel = 21002, 94 | 95 | /*! 96 | * IVW onshot 听写 or 识别结果
97 | * 在消息到达时,第2个参数arg1包含是否为最后一个结果:1为是,0为否;
98 | * 第4个参数data中包含数据,通过指定KEY为KCIFlySpeechEventKeyIVWResult获取. 99 | */ 100 | IFlySpeechEventTypeIVWResult = 22001, 101 | 102 | /*! 103 | * 开始处理录音数据 104 | * 105 | */ 106 | IFlySpeechEventTypeSpeechStart= 22002, 107 | 108 | /*! 109 | * 录音停止 110 | * 111 | */ 112 | IFlySpeechEventTypeRecordStop= 22003, 113 | 114 | /*! 115 | * 服务端音频url
116 | * 在消息到达时,第4个参数data,包含数据,通过指定KEY为KCIFlySpeechEventKeyAudioUrl获取. 117 | */ 118 | IFlySpeechEventTypeAudioUrl = 23001, 119 | 120 | /*! 121 | * 变声数据结果返回
122 | * 设置voice_change参数获取结果. 123 | */ 124 | IFlySpeechEventTypeVoiceChangeResult = 24001 125 | 126 | }; 127 | 128 | #pragma mark - keys for event data 129 | 130 | /** 131 | * 转写是否已上传完标记key 132 | */ 133 | extern NSString* const KCIFlySpeechEventKeyISTUploadComplete; 134 | 135 | /** 136 | * 服务端会话key 137 | */ 138 | extern NSString* const KCIFlySpeechEventKeySessionID; 139 | /** 140 | * TTS取音频数据key 141 | */ 142 | extern NSString* const KCIFlySpeechEventKeyTTSBuffer; 143 | /** 144 | * IVW oneshot 听写 or 识别结果 key 145 | */ 146 | extern NSString* const KCIFlySpeechEventKeyIVWResult; 147 | /** 148 | * 服务端音频url key 149 | */ 150 | extern NSString* const KCIFlySpeechEventKeyAudioUrl; 151 | 152 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechRecognizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechRecognizer.h 3 | // MSC 4 | // 5 | // Created by iflytek on 13-3-19. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "IFlySpeechRecognizerDelegate.h" 12 | 13 | #define IFLY_AUDIO_SOURCE_MIC @"1" 14 | #define IFLY_AUDIO_SOURCE_STREAM @"-1" 15 | 16 | /*! 17 | * 语音识别类
18 | * 此类现在设计为单例,你在使用中只需要创建此对象,不能调用release/dealloc函数去释放此对象。所有关于语音识别的操作都在此类中。 19 | */ 20 | @interface IFlySpeechRecognizer : NSObject 21 | 22 | /*! 23 | * 设置委托对象 24 | */ 25 | @property(nonatomic,assign) id delegate ; 26 | 27 | /*! 28 | * 返回识别对象的单例 29 | * 30 | * @return 识别对象的单例 31 | */ 32 | + (instancetype) sharedInstance; 33 | 34 | /*! 35 | * 销毁识别对象。 36 | * 37 | * @return 成功返回YES,失败返回NO 38 | */ 39 | - (BOOL) destroy; 40 | 41 | /* 42 | * | ------------- |----------------------------------------------------------- 43 | * | 参数 | 描述 44 | * | ------------- |----------------------------------------------------------- 45 | * | domain |应用的领域: 取值为:iat、search、video、poi、music、asr; 46 | * | | iat:普通文本听写; 47 | * | | search:热词搜索; 48 | * | | video:视频音乐搜索; 49 | * | | asr:关键词识别; 50 | * | ------------- |----------------------------------------------------------- 51 | * | vad_bos |前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms; 52 | * | | engine指定iat识别默认值为5000; 53 | * | | 其他情况默认值为 4000,范围 0-10000。 54 | * | ------------- |----------------------------------------------------------- 55 | * | vad_eos |后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 56 | * | | 自动停止录音;单位:ms; 57 | * | | sms 识别默认值为 1800; 58 | * | | 其他默认值为 700,范围 0-10000。 59 | * | ------------- |----------------------------------------------------------- 60 | * | sample_rate |采样率:目前支持的采样率设置有 16000 和 8000。 61 | * | ------------- |----------------------------------------------------------- 62 | * | asr_ptt |标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。 63 | * | ------------- |----------------------------------------------------------- 64 | * | result_type |返回结果的数据格式: 可设置为json,xml,plain,默认为json。 65 | * | ------------- |----------------------------------------------------------- 66 | * | grammarID |识别的语法id: 只针对 domain 设置为”asr”的应用。 67 | * | ------------- |----------------------------------------------------------- 68 | * | asr_audio_path|音频文件名: 设置此参数后,将会自动保存识别的录音文件。 69 | * | | 路径为Documents/(指定值)。 70 | * | | 不设置或者设置为nil,则不保存音频。 71 | * | ------------- |----------------------------------------------------------- 72 | * | params |扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。 73 | * | ------------- |----------------------------------------------------------- 74 | * 75 | */ 76 | 77 | /*! 78 | * 设置识别引擎的参数 79 | * 80 | * 识别的引擎参数(key)取值如下: 81 | * 82 | * | 参数 | 描述 | 83 | * |-----------------|-------------------------------------------------------| 84 | * | domain | 应用的领域: 取值为:iat、search、video、poi、music、asr;
iat:普通文本听写;
search:热词搜索;
video:视频音乐搜索;
asr:关键词识别;| 85 | * | vad_bos | 前端点检测: 静音超时时间,即用户多长时间不说话则当做超时处理; 单位:ms;
engine指定iat识别默认值为5000;
其他情况默认值为 4000,范围 0-10000。| 86 | * | vad_eos | 后断点检测: 后端点静音检测时间,即用户停止说话多长时间内即认为不再输入,自动停止录音;单位:ms;
sms 识别默认值为 1800;
其他默认值为 700,范围 0-10000。| 87 | * | sample_rate | 采样率:目前支持的采样率设置有 16000 和 8000。| 88 | * | asr_ptt | 标点符号设置: 默认为 1,当设置为 0 时,将返回无标点符号文本。| 89 | * | result_type | 返回结果的数据格式: 可设置为json,xml,plain,默认为json。| 90 | * | grammarID | 识别的语法id: 只针对 domain 设置为”asr”的应用。| 91 | * | asr_audio_path | 音频文件名: 设置此参数后,将会自动保存识别的录音文件。
路径为Documents/(指定值)。
不设置或者设置为nil,则不保存音频。| 92 | * | params | 扩展参数: 对于一些特殊的参数可在此设置,一般用于设置语义。| 93 | * 94 | * @param value 参数对应的取值 95 | * @param key 识别引擎参数 96 | * 97 | * @return 成功返回YES;失败返回NO 98 | */ 99 | -(BOOL) setParameter:(NSString *) value forKey:(NSString*)key; 100 | 101 | /*! 102 | * 获取识别引擎参数 103 | * 104 | * @param key 参数key 105 | * 106 | * @return 参数值 107 | */ 108 | -(NSString*) parameterForKey:(NSString *)key; 109 | 110 | /*! 111 | * 开始识别 112 | * 113 | * 同时只能进行一路会话,这次会话没有结束不能进行下一路会话,否则会报错。若有需要多次回话,请在onCompleted回调返回后请求下一路回话。 114 | * 115 | * @return 成功返回YES;失败返回NO 116 | */ 117 | - (BOOL) startListening; 118 | 119 | /*! 120 | * 停止录音
121 | * 调用此函数会停止录音,并开始进行语音识别 122 | */ 123 | - (void) stopListening; 124 | 125 | /*! 126 | * 取消本次会话 127 | */ 128 | - (void) cancel; 129 | 130 | /*! 131 | * 上传语法 132 | * 133 | * @param completionHandler 上传语法完成回调 134 | * @param grammarType 语法类型 135 | * @param grammarContent 语法内容 136 | * 137 | * @return 错误码 138 | */ 139 | - (int) buildGrammarCompletionHandler:(IFlyOnBuildFinishCompletionHandler)completionHandler 140 | grammarType:(NSString *)grammarType 141 | grammarContent:(NSString *)grammarContent; 142 | 143 | /*! 144 | * 是否正在识别 145 | */ 146 | @property (nonatomic, readonly) BOOL isListening; 147 | 148 | @end 149 | 150 | /*! 151 | * 音频流识别
152 | * 音频流识别可以将文件分段写入 153 | */ 154 | @interface IFlySpeechRecognizer(IFlyStreamRecognizer) 155 | 156 | /*! 157 | * 写入音频流 158 | * 159 | * 此方法的使用示例如下: 160 | *
[_iFlySpeechRecognizer setParameter:@"-1" value:@"audio_source"];
161 | * [_iFlySpeechRecognizer startListening];
162 | * [_iFlySpeechRecognizer writeAudio:audioData1];
163 | * [_iFlySpeechRecognizer writeAudio:audioData2];
164 | * ...
165 | * [_iFlySpeechRecognizer stopListening];
166 | * 
167 | * 168 | * @param audioData 音频数据 169 | * 170 | * @return 写入成功返回YES,写入失败返回NO 171 | */ 172 | - (BOOL) writeAudio:(NSData *) audioData; 173 | 174 | @end 175 | 176 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechRecognizerDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechRecognizerDelegate.h 3 | // MSC 4 | // 5 | // Created by ypzhao on 13-3-27. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class IFlySpeechError; 12 | 13 | /*! 14 | * 构建语法结束回调 15 | * 16 | * @param grammarId 语法id 17 | * @param error 错误描述 18 | */ 19 | typedef void(^IFlyOnBuildFinishCompletionHandler)(NSString* grammarId,IFlySpeechError * error); 20 | 21 | 22 | /*! 23 | * 语音识别协议
24 | * 在使用语音识别时,需要实现这个协议中的方法. 25 | */ 26 | @protocol IFlySpeechRecognizerDelegate 27 | 28 | @required 29 | 30 | /*! 31 | * 识别结果回调 32 | * 33 | * 在进行语音识别过程中的任何时刻都有可能回调此函数,你可以根据errorCode进行相应的处理,当errorCode没有错误时,表示此次会话正常结束;否则,表示此次会话有错误发生。特别的当调用`cancel`函数时,引擎不会自动结束,需要等到回调此函数,才表示此次会话结束。在没有回调此函数之前如果重新调用了`startListenging`函数则会报错误。 34 | * 35 | * @param errorCode 错误描述 36 | */ 37 | - (void) onCompleted:(IFlySpeechError *) errorCode; 38 | 39 | /*! 40 | * 识别结果回调 41 | * 42 | * 在识别过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。
43 | * 使用results的示例如下: 44 | *

 45 |  *  - (void) onResults:(NSArray *) results{
 46 |  *     NSMutableString *result = [[NSMutableString alloc] init];
 47 |  *     NSDictionary *dic = [results objectAtIndex:0];
 48 |  *     for (NSString *key in dic){
 49 |  *        [result appendFormat:@"%@",key];//合并结果
 50 |  *     }
 51 |  *   }
 52 |  *  
53 | * 54 | * @param results -[out] 识别结果,NSArray的第一个元素为NSDictionary,NSDictionary的key为识别结果,sc为识别结果的置信度。 55 | * @param isLast -[out] 是否最后一个结果 56 | */ 57 | - (void) onResults:(NSArray *) results isLast:(BOOL)isLast; 58 | 59 | @optional 60 | 61 | /*! 62 | * 音量变化回调
63 | * 在录音过程中,回调音频的音量。 64 | * 65 | * @param volume -[out] 音量,范围从0-30 66 | */ 67 | - (void) onVolumeChanged: (int)volume; 68 | 69 | /*! 70 | * 开始录音回调
71 | * 当调用了`startListening`函数之后,如果没有发生错误则会回调此函数。
72 | * 如果发生错误则回调onCompleted:函数 73 | */ 74 | - (void) onBeginOfSpeech; 75 | 76 | /*! 77 | * 停止录音回调
78 | * 当调用了`stopListening`函数或者引擎内部自动检测到断点,如果没有发生错误则回调此函数。
79 | * 如果发生错误则回调onCompleted:函数 80 | */ 81 | - (void) onEndOfSpeech; 82 | 83 | /*! 84 | * 取消识别回调
85 | * 当调用了`cancel`函数之后,会回调此函数,在调用了cancel函数和回调onCompleted之前会有一个
86 | * 短暂时间,您可以在此函数中实现对这段时间的界面显示。 87 | */ 88 | - (void) onCancel; 89 | 90 | #ifdef _EDUCATION_ 91 | /*! 92 | * 返回音频Key 93 | * 94 | * @param key 音频Key 95 | */ 96 | - (void) getAudioKey:(NSString *)key; 97 | 98 | #endif 99 | 100 | /*! 101 | * 扩展事件回调
102 | * 根据事件类型返回额外的数据 103 | * 104 | * @param eventType 事件类型,具体参见IFlySpeechEventType的IFlySpeechEventTypeVoiceChangeResult枚举。 105 | * @param arg0 arg0 106 | * @param arg1 arg1 107 | * @param eventData 事件数据 108 | */ 109 | - (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData; 110 | 111 | @end 112 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechSynthesizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechSynthesizer.h 3 | // MSC 4 | // 5 | // Created by 侯效林 on 16-4-22. 6 | // Copyright (c) 2016年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "IFlySpeechSynthesizerDelegate.h" 11 | 12 | 13 | /*! 14 | * 语音合成 15 | */ 16 | @interface IFlySpeechSynthesizer : NSObject 17 | 18 | /*! 19 | * 设置识别的委托对象 20 | */ 21 | @property(nonatomic,assign) id delegate; 22 | 23 | /*! 24 | * 返回合成对象的单例 25 | * 26 | * @return 合成对象 27 | */ 28 | + (instancetype) sharedInstance; 29 | 30 | /*! 31 | * 销毁合成对象。 32 | * 33 | * @return 成功返回YES,失败返回NO. 34 | */ 35 | + (BOOL) destroy; 36 | 37 | /* 38 | * | ------------- |----------------------------------------------------------- 39 | * | 参数 | 描述 40 | * | ------------- |----------------------------------------------------------- 41 | * | speed |合成语速,取值范围 0~100 42 | * | ------------- |----------------------------------------------------------- 43 | * | volume |合成的音量,取值范围 0~100 44 | * | ------------- |----------------------------------------------------------- 45 | * | voice_name |默认为”xiaoyan”;可以设置的参数列表可参考个性化发音人列表 46 | * | ------------- |----------------------------------------------------------- 47 | * | sample_rate |采样率:目前支持的采样率设置有 16000 和 8000。 48 | * | ------------- |----------------------------------------------------------- 49 | * | tts_audio_path|音频文件名 设置此参数后,将会自动保存合成的音频文件。 50 | * | |路径为Documents/(指定值)。不设置或者设置为nil,则不保存音频。 51 | * | ------------- |----------------------------------------------------------- 52 | * | params |扩展参数: 对于一些特殊的参数可在此设置。 53 | * | ------------- |----------------------------------------------------------- 54 | * 55 | */ 56 | 57 | /*! 58 | * 设置合成参数 59 | * 60 | * | 参数 | 描述 | 61 | * |-----------------|----------------------------------------------------| 62 | * | speed | 合成语速,取值范围 0~100 | 63 | * | volume | 合成的音量,取值范围 0~100 | 64 | * | voice_name | 默认为”xiaoyan”;可以设置的参数列表可参考个性化发音人列表 | 65 | * | sample_rate | 采样率:目前支持的采样率设置有 16000 和 8000。 | 66 | * | tts_audio_path | 音频文件名 设置此参数后,将会自动保存合成的音频文件。
路径为Documents/(指定值)。不设置或者设置为nil,则不保存音频。| 67 | * | params | 扩展参数: 对于一些特殊的参数可在此设置。 | 68 | * 69 | * @param value 参数取值 70 | * @param key 合成参数 71 | * 72 | * @return 设置成功返回YES,失败返回NO 73 | */ 74 | -(BOOL) setParameter:(NSString *) value forKey:(NSString*)key; 75 | 76 | /*! 77 | * 获取合成参数 78 | * 79 | * @param key 参数key 80 | * 81 | * @return 参数值 82 | */ 83 | -(NSString*) parameterForKey:(NSString *)key; 84 | 85 | /*! 86 | * 开始合成(播放)
87 | * 调用此函数进行合成,如果发生错误会回调错误`onCompleted` 88 | * 89 | * @param text 合成的文本,最大的字节数为1k 90 | */ 91 | - (void) startSpeaking:(NSString *)text; 92 | 93 | /*! 94 | * 开始合成(不播放)
95 | * 调用此函数进行合成,如果发生错误会回调错误`onCompleted` 96 | * 97 | * @param text 合成的文本,最大的字节数为1k 98 | * @param uri 合成后,保存再本地的音频路径 99 | */ 100 | -(void)synthesize:(NSString *)text toUri:(NSString*)uri; 101 | 102 | /*! 103 | * 暂停播放
104 | * 暂停播放之后,合成不会暂停,仍会继续,如果发生错误则会回调错误`onCompleted` 105 | */ 106 | - (void) pauseSpeaking; 107 | 108 | /*! 109 | * 恢复播放 110 | */ 111 | - (void) resumeSpeaking; 112 | 113 | /*! 114 | * 停止播放并停止合成 115 | */ 116 | - (void) stopSpeaking; 117 | 118 | /*! 119 | * 是否正在播放 120 | */ 121 | @property (nonatomic, readonly) BOOL isSpeaking; 122 | 123 | @end 124 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechSynthesizerDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechSynthesizerDelegate.h 3 | // MSC 4 | // 5 | // Created by ypzhao on 13-3-20. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "IFlySpeechEvent.h" 11 | 12 | @class IFlySpeechError; 13 | 14 | /*! 15 | * 语音合成回调 16 | */ 17 | @protocol IFlySpeechSynthesizerDelegate 18 | 19 | @required 20 | 21 | /*! 22 | * 结束回调
23 | * 当整个合成结束之后会回调此函数 24 | * 25 | * @param error 错误码 26 | */ 27 | - (void) onCompleted:(IFlySpeechError*) error; 28 | 29 | @optional 30 | 31 | /*! 32 | * 开始合成回调 33 | */ 34 | - (void) onSpeakBegin; 35 | 36 | /*! 37 | * 缓冲进度回调 38 | * 39 | * @param progress 缓冲进度,0-100 40 | * @param msg 附件信息,此版本为nil 41 | */ 42 | - (void) onBufferProgress:(int) progress message:(NSString *)msg; 43 | 44 | /*! 45 | * 播放进度回调 46 | * 47 | * @param progress 当前播放进度,0-100 48 | * @param beginPos 当前播放文本的起始位置(按照字节计算),对于汉字(2字节)需/2处理 49 | * @param endPos 当前播放文本的结束位置(按照字节计算),对于汉字(2字节)需/2处理 50 | */ 51 | - (void) onSpeakProgress:(int) progress beginPos:(int)beginPos endPos:(int)endPos; 52 | 53 | /*! 54 | * 暂停播放回调 55 | */ 56 | - (void) onSpeakPaused; 57 | 58 | /*! 59 | * 恢复播放回调
60 | * 注意:此回调方法SDK内部不执行,播放恢复全部在onSpeakBegin中执行 61 | */ 62 | - (void) onSpeakResumed; 63 | 64 | /*! 65 | * 正在取消回调
66 | * 注意:此回调方法SDK内部不执行 67 | */ 68 | - (void) onSpeakCancel; 69 | 70 | /*! 71 | * 扩展事件回调
72 | * 根据事件类型返回额外的数据 73 | * 74 | * @param eventType 事件类型,具体参见IFlySpeechEventType枚举。目前只支持EVENT_TTS_BUFFER也就是实时返回合成音频。 75 | * @param arg0 arg0 76 | * @param arg1 arg1 77 | * @param eventData 事件数据 78 | */ 79 | - (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData; 80 | 81 | @end 82 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlySpeechUtility.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlySpeechUtility.h 3 | // MSCDemo 4 | // 5 | // Created by admin on 14-5-7. 6 | // Copyright (c) 2014年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #define iOS_EXCLUSIVE //iOS平台独占API 12 | 13 | @class IFlySpeechError; 14 | 15 | /*! 16 | * 引擎模式 17 | */ 18 | typedef NS_ENUM(NSUInteger,IFlyEngineMode){ 19 | /*! 20 | * 云端使用MSC,本地优先使用语记 21 | */ 22 | IFlyEngineModeAuto = 0, 23 | /*! 24 | * 只使用MSC 25 | */ 26 | IFlyEngineModeMsc, 27 | /*! 28 | * 本地只使用语记(受平台限制,云端无法使用语记) 29 | */ 30 | IFlyEngineModePlus, 31 | }; 32 | 33 | 34 | /*! 35 | * 服务类型 36 | */ 37 | typedef NS_ENUM(NSUInteger,IFlySpeechPlusServiceType){ 38 | /*! 39 | * 打开语记主界面 40 | */ 41 | IFlySpeechPlusServiceTypeNone=0, 42 | /*! 43 | * 获取合成资源 44 | */ 45 | IFlySpeechPlusServiceTypeTTS, 46 | /*! 47 | * 获取识别资源(未开放) 48 | */ 49 | IFlySpeechPlusServiceTypeISR, 50 | /*! 51 | * 获取唤醒资源(未开放) 52 | */ 53 | IFlySpeechPlusServiceTypeIVW, 54 | } ; 55 | 56 | /*! 语记返回回调 57 | */ 58 | @protocol IFlySpeechplusDelegate 59 | 60 | /*! 61 | * 发生错误 62 | * 63 | * @param errorCode 错误码 64 | */ 65 | - (void)onCompleted:(int)errorCode; 66 | 67 | /*! 68 | * 服务正常结束 69 | */ 70 | - (void)onCompleted; 71 | 72 | @end 73 | 74 | /*! 75 | * 用户配置 76 | */ 77 | @interface IFlySpeechUtility : NSObject 78 | 79 | /*! 80 | * 创建用户语音配置
81 | * 注册应用请前往语音云开发者网站。
82 | * 网站:http://www.xfyun.cn 83 | * 84 | * @param params 启动参数,必须保证appid参数传入,示例:appid=123456 85 | * 86 | * @return 语音配置对象 87 | */ 88 | + (IFlySpeechUtility*) createUtility:(NSString *) params; 89 | 90 | /*! 91 | * 销毁用户配置对象 92 | * 93 | * @return 成功返回YES,失败返回NO 94 | */ 95 | +(BOOL) destroy; 96 | 97 | /*! 98 | * 获取用户配置对象 99 | * 100 | * @return 用户配置对象 101 | */ 102 | +(IFlySpeechUtility *) getUtility; 103 | 104 | /*! 105 | * 设置MSC引擎的状态参数 106 | * 107 | * @param value 参数值 108 | * @param key 参数名称 109 | * 110 | * @return 成功返回YES,失败返回NO 111 | */ 112 | -(BOOL) setParameter:(NSString *) value forKey:(NSString*)key; 113 | 114 | /*! 115 | * 获取MSC引擎状态参数 116 | * 117 | * @param key 参数名 118 | * 119 | * @return 参数值 120 | */ 121 | - (NSString *)parameterForKey:(NSString *)key; 122 | 123 | /*! 124 | * 引擎类型 125 | */ 126 | @property (nonatomic, readonly) IFlyEngineMode engineMode; 127 | 128 | /*! 129 | * 语记协议委托 130 | */ 131 | @property (nonatomic, assign) id delegate; 132 | 133 | @end 134 | 135 | /*! 136 | * 讯飞语记类别 137 | */ 138 | @interface IFlySpeechUtility (SpeechPlus) 139 | 140 | /*! 141 | * 检查讯飞语记是否安装 142 | * 143 | * @return 已安装返回YES,否则返回NO 144 | */ 145 | + (BOOL)checkServiceInstalled; 146 | 147 | /*! 148 | * 获取讯飞语记下载地址进行下载,安装完成后即可使用服务。
149 | * 下载地址需要通过[[UIApplication sharedApplication] openUrl:]打开 150 | * 151 | * @return 讯飞语记在App Store下载地址 152 | */ 153 | + (NSString *)componentUrl; 154 | 155 | 156 | /*! 157 | * 注意:此接口废弃,不再需要使用
158 | * 处理语记使用URL启动第三方应用程序时传递的数据
159 | * 需要在 application:openURL:sourceApplication:annotation:或者application:handleOpenURL中调用。 160 | * 161 | * @param url 语记启动第三方应用程序时传递过来的URL 162 | * 163 | * @return 成功返回YES,失败返回NO。 164 | */ 165 | - (BOOL)handleOpenURL:(NSURL *)url iOS_EXCLUSIVE; 166 | 167 | /*! 168 | * 打开讯飞语记获取相应类型服务,0表示打开主界面 169 | * 170 | * @param serviceType 服务类型 171 | * 172 | * @return 成功打开返回YES,否则返回NO 173 | */ 174 | - (BOOL)openSpeechPlus:(IFlySpeechPlusServiceType)serviceType iOS_EXCLUSIVE; 175 | 176 | @end 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyUserWords.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyUserWords.h 3 | // MSC 4 | // 5 | // Created by ypzhao on 13-2-26. 6 | // Copyright (c) 2013年 iflytek. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | /*! 12 | * 用户词表类 13 | * 获取用户词表是为了更好的语音识别(iat),用户词表也属于个性化的一部分. 14 | */ 15 | @interface IFlyUserWords : NSObject 16 | 17 | /*! 18 | * 初始化对象 19 | * 20 | * 在进行初始化时,需要传入的格式如下: 21 | *
{\"userword\":[{\"name\":\"iflytek\",\"words\":[\"科大讯飞\",
22 |  *  \"云平台\",\"用户词条\",\"开始上传词条\"]}]}
23 | * 24 | * @param json 初始化时传入的数据 25 | * 26 | * @return IFlyUserWords对象 27 | */ 28 | - (id) initWithJson:(NSString *)json; 29 | 30 | /*! 31 | * 将数据转化为上传的数据格式 32 | * 33 | * @return 没有数据或者格式不对时返回nil 34 | */ 35 | - (NSString *) toString; 36 | 37 | /*! 38 | * 返回key对应的数据 39 | * 40 | * @param key 在putword:value中设置的key 41 | * 42 | * @return key对应的数组 43 | */ 44 | - (NSArray *) getWords: (NSString *) key; 45 | 46 | /*! 47 | * 添加一条用户词数据 48 | * 49 | * @param key 用户词对应的key 50 | * @param value 上传的用户词数据 51 | * 52 | * @return 成功返回YES,失败返回NO 53 | */ 54 | - (BOOL) putWord: (NSString *) key value:(NSString *)value; 55 | 56 | /*! 57 | * 添加一组数据 58 | * 59 | * @param key 用户词对应的key 60 | * @param words 上传的用户词数据 61 | * 62 | * @return 成功返回YES,失败返回NO 63 | */ 64 | - (BOOL) putwords: (NSString *) key words:(NSArray *)words; 65 | 66 | /*! 67 | * 是否包含key对应的用户词数据 68 | * 69 | * @param key 用户词对应的key 70 | * 71 | * @return 成功返回YES,失败返回NO 72 | */ 73 | - (BOOL) containsKey: (NSString *) key; 74 | @end 75 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyVoiceWakeuper.h: -------------------------------------------------------------------------------- 1 | // 2 | // IFlyVoiceWakeuper.h 3 | // wakeup 4 | // 5 | // Created by admin on 14-3-18. 6 | // Copyright (c) 2014年 iflytek. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | #import "IFlyVoiceWakeuperDelegate.h" 12 | 13 | #define IFLY_AUDIO_SOURCE_MIC @"1" 14 | #define IFLY_AUDIO_SOURCE_STREAM @"-1" 15 | 16 | /*! 17 | * 语音唤醒 18 | */ 19 | @interface IFlyVoiceWakeuper : NSObject 20 | 21 | /*! 22 | * 代理 23 | */ 24 | @property (nonatomic, assign) id delegate; 25 | 26 | /*! 27 | * 是否正在唤醒 28 | */ 29 | @property (nonatomic, readonly) BOOL isListening; 30 | 31 | /*! 32 | * 创建唤醒实例,采用单例模式 33 | */ 34 | + (instancetype) sharedInstance; 35 | 36 | 37 | /*! 38 | * 启动唤醒 39 | * 返回值:YES 成功,NO:失败 40 | */ 41 | -(BOOL) startListening; 42 | 43 | /*! 44 | * 停止录音 45 | */ 46 | -(BOOL) stopListening; 47 | 48 | /*! 49 | * 取消唤醒会话 50 | */ 51 | -(BOOL) cancel; 52 | 53 | /*! 54 | * 获取工作参数 55 | */ 56 | -(NSString*) getParameter:(NSString *)key; 57 | 58 | /*! 59 | * 设置工作参数
60 | * 注意服务正在运行中,不能设置参数 61 | */ 62 | -(BOOL) setParameter:(NSString *) value forKey:(NSString*)key; 63 | 64 | @end 65 | 66 | /*! 67 | * 音频流唤醒
68 | * 音频流唤醒可以将文件分段写入 69 | */ 70 | @interface IFlyVoiceWakeuper(IFlyStreamVoiceWakeuper) 71 | 72 | /*! 73 | * 写入音频流 74 | * 75 | * @param audioData 音频数据 76 | * 77 | * @return 写入成功返回YES,写入失败返回NO 78 | */ 79 | - (BOOL) writeAudio:(NSData *) audioData; 80 | 81 | @end 82 | 83 | 84 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/Headers/IFlyVoiceWakeuperDelegate.h: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // IFlyVoiceWakeuperDel.h 4 | // wakeup 5 | // 6 | // Created by admin on 14-3-18. 7 | // Copyright (c) 2014年 iflytek. All rights reserved. 8 | // 9 | 10 | 11 | 12 | #import 13 | 14 | @class IFlySpeechError; 15 | 16 | @protocol IFlyVoiceWakeuperDelegate 17 | 18 | @optional 19 | 20 | /*! 21 | * 录音开始 22 | */ 23 | -(void) onBeginOfSpeech; 24 | 25 | /*! 26 | * 录音结束 27 | */ 28 | -(void) onEndOfSpeech; 29 | 30 | /*! 31 | * 会话错误 32 | * 33 | * @param errorCode 错误描述类, 34 | */ 35 | - (void) onCompleted:(IFlySpeechError *) error; 36 | 37 | /*! 38 | * 唤醒结果 39 | * 40 | * @param resultDic 唤醒结果字典 41 | */ 42 | -(void) onResult:(NSMutableDictionary *)resultDic; 43 | 44 | /*! 45 | * 音量反馈,返回频率与录音数据返回回调频率一致 46 | * 47 | * @param volume 音量值 48 | */ 49 | - (void) onVolumeChanged: (int)volume; 50 | 51 | /*! 52 | * 扩展事件回调
53 | * 根据事件类型返回额外的数据 54 | * 55 | @param eventType 事件类型,具体参见IFlySpeechEvent枚举。 56 | */ 57 | - (void) onEvent:(int)eventType isLast:(BOOL)isLast arg1:(int)arg1 data:(NSMutableDictionary *)eventData; 58 | 59 | @end 60 | 61 | -------------------------------------------------------------------------------- /FZSpeakDemo/iflyMSC.framework/iflyMSC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fuzheng0301/FZSpeak/a802d3ddebd7985ddac2bd677f4a3d9898ebda40/FZSpeakDemo/iflyMSC.framework/iflyMSC -------------------------------------------------------------------------------- /FZSpeakDemo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // FZSpeakDemo 4 | // 5 | // Created by 付正 on 2018/8/15. 6 | // Copyright © 2018年 付正. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FZSpeak 2 | 语音听写、语音评测、语音合成 3 | 4 | # 目录 5 | 1. [引言](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#引言) 6 | 2. [前言](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#前言) 7 | 3. [正文](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#正文) 8 | 4. [集成](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#集成) 9 | 5. [语音听写](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#语音听写) 10 | 6. [语音评测](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#语音评测) 11 | 7. [语音合成](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#语音合成) 12 | 8. [尾声](https://github.com/fuzheng0301/FZSpeak/blob/master/README.md#尾声) 13 | 14 | 15 | ## 引言 16 | ``` 17 | 子弹短信,不仅支持语音输入、文本输入,同时还支持“语音输入、文字输出”。 18 | ``` 19 | 20 | ## 前言 21 | 之前在讯飞人脸识别的基础上做了[活体人脸识别](https://github.com/fuzheng0301/FaceRecognition),并在当时没有免费活体识别的大环境下,本着程序猿的互联网精神,在Git上第一个站出来开源出来,感谢大家的支持。 22 | 23 | 后来也一直打算拿出来讯飞的语音识别,做些事情方便大家,初衷是想做一款读书软件,后来拖延症晚期患者一直没上手。今年7月份偶然参加了一个活动,需要做一款APP参赛,后来决定做一个语音识别方面的,又重新找回讯飞语音识别,做了一个语音方面的APP。 24 | 25 | 赶巧制作过程中听闻锤子公司出了“子弹短信”,也是使用了语音识别的功能,想来后续会有很多同胞会应用到语音方面内容,故做完APP后,赶紧过来开源分享给大家。 26 | 27 | ## 正文 28 | 本次开源的语音识别是在讯飞语音的基础上,重新封装了语音评测、语音听写、语音朗读三个功能,集成更方便,使用更便捷。下面仍从集成和使用方面来讲解。 29 | 30 | ### 集成 31 | 集成可以参考[讯飞语音识别官方集成API](https://doc.xfyun.cn/msc_ios/%E9%9B%86%E6%88%90%E6%B5%81%E7%A8%8B.html)。同时在APP中需要首先初始化语音识别功能。 32 | 33 | ``` 34 | NSString*initString = [[NSStringalloc]initWithFormat:@"appid=%@",@"讯飞平台注册APPID"]; 35 | 36 | [IFlySpeechUtility createUtility:initString]; 37 | ``` 38 | ### 语音听写 39 | 语音听写功能用于识别输入语音,输出文字功能。 40 | 41 | 这里封装成了一个方法,通过Block回调识别结果resText和错误信息error。 42 | 43 | ``` 44 | /** 45 | 语音听写 46 | @param callback 听写结果回调 47 | */ 48 | + (void)xf_AudioRecognizerResult: (void(^)(NSString *resText,NSError *error))callback 49 | ``` 50 | 51 | ### 语音评测 52 | 语音评测功能中,可以设置想要评测的内容,通过用户朗读内容,机器识别并对比评测,得到朗读评分。 53 | 54 | 这里把评测中的状态分了开始录音、录音音量、停止录音、取消录音、评测结果、评测失败 6种情况,在Block回调中可通过type获取状态,并进行判断。progress为各个状态情况下的数值,为0-100之间的有理数。resultMsg为评测结果、评测失败两种情况下返回的评测结果、失败内容。 55 | 56 | ``` 57 | /** 58 | 语音测评 59 | @param text 评测内容 60 | @param callback 评测结果返回 61 | */ 62 | + (void)xf_AudioEvaluationOfText: (NSString*)text callback:(void(^)(XF_Audio_Evaluation_Type type,float progress,NSString *resultMsg))callback; 63 | ``` 64 | 65 | ### 语音合成 66 | 语音合成即语音播报,给出内容,由机器朗读内容。 67 | 68 | Block返回内容里type分为合成进度、开始播放、播放进度、播放结束四种合成状态。progress为各个阶段的进度值。 69 | 70 | 以下为默认播报语音发音人的方法: 71 | 72 | ``` 73 | /** 74 | 语音合成 75 | @param text 合成内容 76 | @param callback 回调结果 77 | */ 78 | + (void)xf_AudioSynthesizeOfText: (NSString*)text callback:(void(^)(XF_Audio_Synthesize_Type type,NSInteger progress))callback; 79 | ``` 80 | 81 | 以下为自定义语音发音人的语音合成调用方法: 82 | 83 | ``` 84 | /** 85 | 语音合成 86 | @param text 合成内容 87 | @param people 设置发音人 88 | @param callback 回调结果 89 | */ 90 | + (void)xf_AudioSynthesizeOfText: (NSString*)text fromPeople:(NSString*)people callback:(void(^)(XF_Audio_Synthesize_Typetype,NSIntegerprogress))callback; 91 | ``` 92 | 93 | ## 尾声 94 | 如果能帮到大家,深感荣幸,感谢您的star。 95 | 96 | 97 | --------------------------------------------------------------------------------