初始化讯飞插件

This commit is contained in:
victor-fa 2018-07-25 10:03:54 +08:00
commit 09b6667615
61 changed files with 5322 additions and 0 deletions

13
README.md Normal file
View File

@ -0,0 +1,13 @@
# cordova-plugin-IFlyspeech-master
科大讯飞的语音听说读写的cordova插件
### Supported Platforms
- iOS
- android
## Installation
cordova plugin add https://github.com/victor-fa/cordova-plugin-IFlyspeech-master.git

19
package.json Normal file
View File

@ -0,0 +1,19 @@
{
"name": "cordova-plugin-xunfeiListenSpeaking",
"version": "0.0.1",
"cordova": {
"id": "cordova-plugin-xunfeiListenSpeaking",
"platforms": [
"android",
"ios"
]
},
"keywords": [
"ecosystem:cordova",
"cordova-android",
"cordova-ios"
],
"author": "cc",
"license": "ISC",
"description": ""
}

275
plugin.xml Normal file
View File

@ -0,0 +1,275 @@
<?xml version='1.0' encoding='utf-8'?>
<plugin id="cordova-plugin-xunfeiListenSpeaking" version="0.0.1" xmlns="http://apache.org/cordova/ns/plugins/1.0" xmlns:android="http://schemas.android.com/apk/res/android">
<name>cordova-plugin-xunfeiListenSpeaking</name>
<platform name = "android">
<js-module name="xunfeiListenSpeaking" src="www/cordova-plugin-xunfeiListenSpeaking.js">
<clobbers target="xunfeiListenSpeaking" />
</js-module>
<config-file target="res/xml/config.xml" parent="/*">
<feature name="XunfeiListenSpeaking" >
<param name="android-package" value="com.thomas.xunfeilistenspeaking.XunfeiListenSpeaking"/>
</feature>
</config-file>
<config-file target="AndroidManifest.xml" parent="/manifest">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.READ_CONTACTS" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_SETTINGS" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<!-- 移动统计分析 -->
<meta-data
android:name="IFLYTEK_APPKEY"
android:value="'5b553cd9'" />
<meta-data
android:name="IFLYTEK_CHANNEL"
android:value="Android_Demo" />
</config-file>
<config-file target="AndroidManifest.xml" parent="/manifest/application">
<activity android:name="com.thomas.xunfeilistenspeaking.XunfeiDialogActivity"
android:theme="@android:style/Theme.Translucent.NoTitleBar">
</activity>
</config-file>
<!--assets-->
<source-file src="src/android/assets/iattest.wav" target-dir="assets"/>
<source-file src="src/android/assets/call.bnf" target-dir="assets"/>
<source-file src="src/android/assets/grammar_sample.abnf" target-dir="assets"/>
<source-file src="src/android/assets/keys" target-dir="assets"/>
<source-file src="src/android/assets/userwords" target-dir="assets"/>
<source-file src="src/android/assets/iflytek/voice_bg.9.png" target-dir="assets/iflytek"/>
<source-file src="src/android/assets/iflytek/voice_empty.png" target-dir="assets/iflytek"/>
<source-file src="src/android/assets/iflytek/voice_full.png" target-dir="assets/iflytek"/>
<source-file src="src/android/assets/iflytek/waiting.png" target-dir="assets/iflytek"/>
<source-file src="src/android/assets/iflytek/warning.png" target-dir="assets/iflytek"/>
<source-file src="src/android/assets/iflytek/recognize.xml" target-dir="assets/iflytek"/>
<!--lib-->
<source-file src="src/android/libs/Msc.jar" target-dir="libs"/>
<source-file src="src/android/libs/Sunflower.jar" target-dir="libs"/>
<source-file src="src/android/libs/armeabi/libmsc.so" target-dir="libs/armeabi"/>
<source-file src="src/android/libs/arm64-v8a/libmsc.so" target-dir="libs/arm64-v8a"/>
<source-file src="src/android/libs/armeabi-v7a/libmsc.so" target-dir="libs/armeabi-v7a"/>
<!--layout-->
<source-file src="src/android/res/layout/activity_xunfei_dialog.xml" target-dir="res/layout"/>
<!--xml-->
<source-file src="src/android/res/xml/iat_setting.xml" target-dir="res/xml"/>
<source-file src="src/android/res/xml/ise_settings.xml" target-dir="res/xml"/>
<source-file src="src/android/res/xml/tts_setting.xml" target-dir="res/xml"/>
<source-file src="src/android/res/xml/understand_setting.xml" target-dir="res/xml"/>
<!--src-->
<source-file src="src/android/src/com/thomas/xunfeilistenspeaking/XunfeiListenSpeaking.java" target-dir="src/com/thomas/xunfeilistenspeaking"/>
<source-file src="src/android/src/com/thomas/xunfeilistenspeaking/IatSettings.java" target-dir="src/com/thomas/xunfeilistenspeaking"/>
<source-file src="src/android/src/com/thomas/xunfeilistenspeaking/JsonParser.java" target-dir="src/com/thomas/xunfeilistenspeaking"/>
<source-file src="src/android/src/com/thomas/xunfeilistenspeaking/SettingTextWatcher.java" target-dir="src/com/thomas/xunfeilistenspeaking"/>
<source-file src="src/android/src/com/thomas/xunfeilistenspeaking/XunfeiDialogActivity.java" target-dir="src/com/thomas/xunfeilistenspeaking"/>
<!--res-->
<config-file target="res/values/strings.xml" parent="/resources">
<!--<string name="app_name">讯飞语音示例</string>-->
<!-- 请替换成在语音云官网申请的appid -->
<string name="app_id">584e7225</string>
<string name="example_explain">本示例为讯飞语音Android平台开发者提供语音听写、语法识别、语义理解和语音合成等代码样例旨在让用户能够依据该示例快速开发出基于语音接口的应用程序。</string>
<string name="text_tts_source">科大讯飞作为中国最大的智能语音技术提供商在智能语音技术领域有着长期的研究积累并在中文语音合成、语音识别、口语评测等多项技术上拥有国际领先的成果。科大讯飞是我国唯一以语音技术为产业化方向的“国家863计划成果产业化基地”…</string>
<string name="text_tts_source_en">iFLYTEK is a national key software enterprise dedicated to the research of intelligent speech and language technologies, development of software and chip products, provision of speech information services, and integration of E-government systems. The intelligent speech technology of iFLYTEK, the core technology of the company, represents the top level in the world.</string>
<string name="text_isr_abnf_hint">\t上传内容为\n\t#ABNF 1.0 gb2312;\n\tlanguage zh-CN;\n\tmode voice;\n\troot $main;\n\t$main = $place1 到$place2 ;\n\t$place1 = 北京 | 武汉 | 南京 | 天津 | 东京;\n\t$place2 = 上海 | 合肥;</string>
<string name="text_understand_hint">\t您可以说\n\t今天的天气怎么样?\n\t北京到上海的火车?\n\t有什么好吃的?\n\t上海外滩有哪些酒店?\n\n\t更多语义请登录\n\thttp://osp.voicecloud.cn/ \n\t配置您的专属语义吧!</string>
<!-- 听写 -->
<string name="text_begin">请开始说话…</string>
<string name="text_begin_recognizer">开始音频流识别</string>
<string name="text_upload_contacts">上传联系人</string>
<string name="text_upload_userwords">上传用户词表</string>
<string name="text_upload_success">上传成功</string>
<string name="text_userword_empty">词表下载失败或内容为空</string>
<string name="text_download_success">下载成功</string>
<string name="pref_key_iat_show">iat_show</string>
<string name="pref_title_iat_show">显示听写界面</string>
<string name="pref_title_iat_dwa">结果动态修正</string>
<string name="xunfei_cancel_listen">取消语音</string>
<!-- 合成 -->
<string-array name="engine_entries">
<item>本地合成</item>
<item>在线合成</item>
</string-array>
<string-array name="engine_values">
<item>local</item>
<item>cloud</item>
</string-array>
<string-array name="voicer_cloud_entries">
<item>小燕—女青、中英、普通话</item>
<item>小宇—男青、中英、普通话</item>
<item>凯瑟琳—女青、英</item>
<item>亨利—男青、英</item>
<item>玛丽—女青、英</item>
<item>小研—女青、中英、普通话</item>
<item>小琪—女青、中英、普通话</item>
<item>小峰—男青、中英、普通话</item>
<item>小梅—女青、中英、粤语</item>
<item>小莉—女青、中英、台湾普通话</item>
<item>小蓉—女青、中、四川话</item>
<item>小芸—女青、中、东北话</item>
<item>小坤—男青、中、河南话</item>
<item>小强—男青、中、湖南话</item>
<item>小莹—女青、中、陕西话</item>
<item>小新—男童、中、普通话</item>
<item>楠楠—女童、中、普通话</item>
<item>老孙—男老、中、普通话</item>
</string-array>
<string-array name="voicer_cloud_values">
<item>xiaoyan</item>
<item>xiaoyu</item>
<item>catherine</item>
<item>henry</item>
<item>vimary</item>
<item>vixy</item>
<item>xiaoqi</item>
<item>vixf</item>
<item>xiaomei</item>
<item>xiaolin</item>
<item>xiaorong</item>
<item>xiaoqian</item>
<item>xiaokun</item>
<item>xiaoqiang</item>
<item>vixying</item>
<item>xiaoxin</item>
<item>nannan</item>
<item>vils</item>
</string-array>
<string-array name="emot_entries">
<item>正常</item>
<item>高兴</item>
<item>悲伤</item>
<item>生气</item>
</string-array>
<string-array name="emot_values">
<item>neutral</item>
<item>happy</item>
<item>sad</item>
<item>angry</item>
</string-array>
<string-array name="stream_entries">
<item>通话</item>
<item>系统</item>
<item>铃声</item>
<item>音乐</item>
<item>闹铃</item>
<item>通知</item>
</string-array>
<string-array name="stream_values">
<item>0</item>
<item>1</item>
<item>2</item>
<item>3</item>
<item>4</item>
<item>5</item>
</string-array>
<string formatted="false" name="tts_toast_format">缓冲进度为%d%%,播放进度为%d%%</string>
<!-- 语言 -->
<string-array name="language_entries">
<item>普通话</item>
<item>粤语</item>
<item>河南话</item>
<item>英语</item>
</string-array>
<string-array name="language_values">
<item>mandarin</item>
<item>cantonese</item>
<item>henanese</item>
<item>en_us</item>
</string-array>
<!-- 语音评测 -->
<string name="text_en_word">"[word]\napple\nbanana\norange"</string>
<string name="text_en_sentence">"The quick brown fox jumps over the lazy dog."</string>
<string name="text_cn_syllable">"知,痴,是"</string>
<string name="text_cn_word">"磁铁,率领,脆弱,动手,古筝"</string>
<string name="text_cn_sentence">"一座座雪峰插入云霄,峰顶银光闪闪,大大小小的湖泊,像颗颗宝石镶嵌在彩带般的沟谷中。"</string>
<string-array name="ise_language_entries">
<item>英语</item>
<item>汉语</item>
</string-array>
<string-array name="ise_language_values">
<item>en_us</item>
<item>zh_cn</item>
</string-array>
<string-array name="category_entries">
<item>单字</item>
<item>词语</item>
<item>句子</item>
</string-array>
<string-array name="category_values">
<item>read_syllable</item>
<item>read_word</item>
<item>read_sentence</item>
</string-array>
<string-array name="result_level_entries">
<item>plain</item>
<item>complete</item>
</string-array>
<!-- 标点符号 -->
<string-array name="punc_entries">
<item>有标点</item>
<item>无标点</item>
</string-array>
<string-array name="punc_values">
<item>1</item>
<item>0</item>
</string-array>
<string-array name="dwa_entries">
<item>开启</item>
<item>关闭</item>
</string-array>
</config-file>
</platform>
<platform name="ios">
<js-module name="speech" src="www/Speech.js">
<!-- <clobbers target="navigator.speech" />-->
<clobbers target="xunfeiListenSpeaking" />
</js-module>
<config-file parent="/*" target="config.xml">
<feature name="Speech">
<param name="ios-package" value="CDVSpeech" />
</feature>
</config-file>
<config-file target="*-Info.plist" parent="NSMicrophoneUsageDescription">
<string></string>
</config-file>
<header-file src="src/ios/CDVSpeech.h" />
<source-file src="src/ios/CDVSpeech.m" />
<framework src="AVFoundation.framework" />
<framework src="AddressBook.framework" />
<framework src="AudioToolbox.framework" />
<framework src="SystemConfiguration.framework" />
<framework src="QuartzCore.framework" />
<framework src="libz.tbd" />
<framework src="libc++.tbd" />
<framework src="UIKit.framework" />
<framework src="CoreGraphics.framework" />
<framework src="Foundation.framework" />
<framework src="CoreTelephony.framework" />
<framework src="CoreLocation.framework" />
<framework src="Contacts.framework" />
<framework src="src/ios/iflyMSC.framework" custom="true" />
</platform>
</plugin>

View File

@ -0,0 +1,12 @@
#BNF+IAT 1.0 UTF-8;
!grammar call;
!slot <contact>;
!slot <callPre>;
!slot <callPhone>;
!slot <callTo>;
!start <callStart>;
<callStart>:[<callPre>][<callTo>]<contact><callPhone>|[<callPre>]<callPhone>[<callTo>]<contact>;
<contact>:张海洋;
<callPre>:我要|我想|我想要;
<callPhone>:打电话;
<callTo>:给;

View File

@ -0,0 +1,8 @@
#ABNF 1.0 UTF-8;
language zh-CN;
mode voice;
root $main;
$main = $place1 到 $place2;
$place1 = 北京|武汉|南京|天津|东京;
$place2 = 上海|合肥;

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
src/android/assets/keys Normal file

Binary file not shown.

View File

@ -0,0 +1 @@
{"userword":[{"name":"我的常用词","words":["佳晨实业","蜀南庭苑","高兰路","复联二"]},{"name":"我的好友","words":["李馨琪","鹿晓雷","张集栋","周家莉","叶震珂","熊泽萌"]}]}

BIN
src/android/libs/Msc.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent">
</LinearLayout>

View File

@ -0,0 +1,155 @@
<resources>
<string name="app_name">XunfeiSpeaking</string>
<!--<string name="app_name">讯飞语音示例</string>-->
<!-- 请替换成在语音云官网申请的appid -->
<string name="app_id">584e7225</string>
<string name="example_explain">本示例为讯飞语音Android平台开发者提供语音听写、语法识别、语义理解和语音合成等代码样例旨在让用户能够依据该示例快速开发出基于语音接口的应用程序。</string>
<string name="text_tts_source">科大讯飞作为中国最大的智能语音技术提供商在智能语音技术领域有着长期的研究积累并在中文语音合成、语音识别、口语评测等多项技术上拥有国际领先的成果。科大讯飞是我国唯一以语音技术为产业化方向的“国家863计划成果产业化基地”…</string>
<string name="text_tts_source_en">iFLYTEK is a national key software enterprise dedicated to the research of intelligent speech and language technologies, development of software and chip products, provision of speech information services, and integration of E-government systems. The intelligent speech technology of iFLYTEK, the core technology of the company, represents the top level in the world.</string>
<string name="text_isr_abnf_hint">\t上传内容为\n\t#ABNF 1.0 gb2312;\n\tlanguage zh-CN;\n\tmode voice;\n\troot $main;\n\t$main = $place1 到$place2 ;\n\t$place1 = 北京 | 武汉 | 南京 | 天津 | 东京;\n\t$place2 = 上海 | 合肥;</string>
<string name="text_understand_hint">\t您可以说\n\t今天的天气怎么样?\n\t北京到上海的火车?\n\t有什么好吃的?\n\t上海外滩有哪些酒店?\n\n\t更多语义请登录\n\thttp://osp.voicecloud.cn/ \n\t配置您的专属语义吧!</string>
<!-- 听写 -->
<string name="text_begin">请开始说话…</string>
<string name="text_begin_recognizer">开始音频流识别</string>
<string name="text_upload_contacts">上传联系人</string>
<string name="text_upload_userwords">上传用户词表</string>
<string name="text_upload_success">上传成功</string>
<string name="text_userword_empty">词表下载失败或内容为空</string>
<string name="text_download_success">下载成功</string>
<string name="pref_key_iat_show">iat_show</string>
<string name="pref_title_iat_show">显示听写界面</string>
<string name="pref_title_iat_dwa">结果动态修正</string>
<!-- 合成 -->
<string-array name="engine_entries">
<item>本地合成</item>
<item>在线合成</item>
</string-array>
<string-array name="engine_values">
<item>local</item>
<item>cloud</item>
</string-array>
<string-array name="voicer_cloud_entries">
<item>小燕—女青、中英、普通话</item>
<item>小宇—男青、中英、普通话</item>
<item>凯瑟琳—女青、英</item>
<item>亨利—男青、英</item>
<item>玛丽—女青、英</item>
<item>小研—女青、中英、普通话</item>
<item>小琪—女青、中英、普通话</item>
<item>小峰—男青、中英、普通话</item>
<item>小梅—女青、中英、粤语</item>
<item>小莉—女青、中英、台湾普通话</item>
<item>小蓉—女青、中、四川话</item>
<item>小芸—女青、中、东北话</item>
<item>小坤—男青、中、河南话</item>
<item>小强—男青、中、湖南话</item>
<item>小莹—女青、中、陕西话</item>
<item>小新—男童、中、普通话</item>
<item>楠楠—女童、中、普通话</item>
<item>老孙—男老、中、普通话</item>
</string-array>
<string-array name="voicer_cloud_values">
<item>xiaoyan</item>
<item>xiaoyu</item>
<item>catherine</item>
<item>henry</item>
<item>vimary</item>
<item>vixy</item>
<item>xiaoqi</item>
<item>vixf</item>
<item>xiaomei</item>
<item>xiaolin</item>
<item>xiaorong</item>
<item>xiaoqian</item>
<item>xiaokun</item>
<item>xiaoqiang</item>
<item>vixying</item>
<item>xiaoxin</item>
<item>nannan</item>
<item>vils</item>
</string-array>
<string-array name="emot_entries">
<item>正常</item>
<item>高兴</item>
<item>悲伤</item>
<item>生气</item>
</string-array>
<string-array name="emot_values">
<item>neutral</item>
<item>happy</item>
<item>sad</item>
<item>angry</item>
</string-array>
<string-array name="stream_entries">
<item>通话</item>
<item>系统</item>
<item>铃声</item>
<item>音乐</item>
<item>闹铃</item>
<item>通知</item>
</string-array>
<string-array name="stream_values">
<item>0</item>
<item>1</item>
<item>2</item>
<item>3</item>
<item>4</item>
<item>5</item>
</string-array>
<string formatted="false" name="tts_toast_format">缓冲进度为%d%%,播放进度为%d%%</string>
<!-- 语言 -->
<string-array name="language_entries">
<item>普通话</item>
<item>粤语</item>
<item>河南话</item>
<item>英语</item>
</string-array>
<string-array name="language_values">
<item>mandarin</item>
<item>cantonese</item>
<item>henanese</item>
<item>en_us</item>
</string-array>
<!-- 语音评测 -->
<string name="text_en_word">"[word]\napple\nbanana\norange"</string>
<string name="text_en_sentence">"The quick brown fox jumps over the lazy dog."</string>
<string name="text_cn_syllable">"知,痴,是"</string>
<string name="text_cn_word">"磁铁,率领,脆弱,动手,古筝"</string>
<string name="text_cn_sentence">"一座座雪峰插入云霄,峰顶银光闪闪,大大小小的湖泊,像颗颗宝石镶嵌在彩带般的沟谷中。"</string>
<string-array name="ise_language_entries">
<item>英语</item>
<item>汉语</item>
</string-array>
<string-array name="ise_language_values">
<item>en_us</item>
<item>zh_cn</item>
</string-array>
<string-array name="category_entries">
<item>单字</item>
<item>词语</item>
<item>句子</item>
</string-array>
<string-array name="category_values">
<item>read_syllable</item>
<item>read_word</item>
<item>read_sentence</item>
</string-array>
<string-array name="result_level_entries">
<item>plain</item>
<item>complete</item>
</string-array>
<!-- 标点符号 -->
<string-array name="punc_entries">
<item>有标点</item>
<item>无标点</item>
</string-array>
<string-array name="punc_values">
<item>1</item>
<item>0</item>
</string-array>
<string-array name="dwa_entries">
<item>开启</item>
<item>关闭</item>
</string-array>
</resources>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android" >
<ListPreference
android:key="iat_language_preference"
android:title="语言设置"
android:entries="@array/language_entries"
android:entryValues="@array/language_values"
android:summary="支持:普通话,粤语,河南话,英语 "
android:defaultValue="mandarin" />
<EditTextPreference
android:key="iat_vadbos_preference"
android:title="前端点超时"
android:dialogTitle="请输入时间(0-10000)ms"
android:summary="默认值短信转写5000其他4000"
android:defaultValue="5000" />
<EditTextPreference
android:key="iat_vadeos_preference"
android:title="后端点超时"
android:dialogTitle="请输入时间(0-10000)ms"
android:summary="默认值短信转写1800其他700 "
android:defaultValue="1800" />
<ListPreference
android:key="iat_punc_preference"
android:title="标点符号"
android:entries="@array/punc_entries"
android:entryValues="@array/punc_values"
android:summary="默认值:有标点 "
android:defaultValue="1" />
<CheckBoxPreference
android:key="@string/pref_key_iat_show"
android:title="@string/pref_title_iat_show"
android:defaultValue="true" />
</PreferenceScreen>

View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android" >
<ListPreference
android:key="language"
android:title="评测语种"
android:summary=""
android:entries="@array/ise_language_entries"
android:entryValues="@array/ise_language_values"
android:defaultValue="zh_cn"/>
<ListPreference
android:key="category"
android:title="评测题型"
android:summary=""
android:entries="@array/category_entries"
android:entryValues="@array/category_values"
android:defaultValue="read_sentence"/>
<ListPreference
android:key="result_level"
android:title="结果等级"
android:summary=""
android:entries="@array/result_level_entries"
android:entryValues="@array/result_level_entries"
android:defaultValue="complete"/>
<EditTextPreference
android:key="vad_bos"
android:title="前端点超时"
android:summary=""
android:defaultValue="5000"/>
<EditTextPreference
android:key="vad_eos"
android:title="后端点超时"
android:summary=""
android:defaultValue="1800"/>
<EditTextPreference
android:key="speech_timeout"
android:title="评测超时"
android:summary=""
android:defaultValue="-1"/>
</PreferenceScreen>

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android" >
<EditTextPreference
android:dialogTitle="请输入语速:在线(0-100)本地(0-200)"
android:key="speed_preference"
android:defaultValue="50"
android:summary="默认值50 "
android:title="语速" />
<EditTextPreference
android:dialogTitle="请输入音调(0-100)"
android:key="pitch_preference"
android:defaultValue="50"
android:summary="默认值50 "
android:title="音调" />
<EditTextPreference
android:dialogTitle="请输入音量(0-100)"
android:key="volume_preference"
android:defaultValue="50"
android:summary="默认值50 "
android:title="音量" />
<ListPreference
android:defaultValue="3"
android:entries="@array/stream_entries"
android:entryValues="@array/stream_values"
android:key="stream_preference"
android:title="音频流类型" />
</PreferenceScreen>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android" >
<ListPreference
android:key="understander_language_preference"
android:title="语言设置"
android:entries="@array/language_entries"
android:entryValues="@array/language_values"
android:summary="支持:普通话,粤语,河南话,英语 "
android:defaultValue="mandarin" />
<EditTextPreference
android:key="understander_vadbos_preference"
android:title="前端点超时"
android:dialogTitle="请输入时间(0-10000)ms"
android:summary="默认值短信转写5000其他4000"
android:defaultValue="4000" />
<EditTextPreference
android:key="understander_vadeos_preference"
android:title="后端点超时"
android:dialogTitle="请输入时间(0-10000)ms"
android:summary="默认值短信转写1800其他700 "
android:defaultValue="700" />
<ListPreference
android:key="understander_punc_preference"
android:title="标点符号"
android:entries="@array/punc_entries"
android:entryValues="@array/punc_values"
android:summary="默认值:有标点 "
android:defaultValue="1" />
<!-- <CheckBoxPreference -->
<!-- android:key="nbest_preference" -->
<!-- android:title="@string/set_multiple_candidate_title" -->
<!-- android:summary="@string/set_multiple_candidate_summary" -->
<!-- android:defaultValue="false" /> -->
</PreferenceScreen>

View File

@ -0,0 +1,39 @@
package com.thomas.xunfeilistenspeaking;
import android.os.Bundle;
import android.preference.EditTextPreference;
import android.preference.Preference;
import android.preference.Preference.OnPreferenceChangeListener;
import android.preference.PreferenceActivity;
import android.view.Window;
import com.thomas.xunfeilistenspeaking.SettingTextWatcher;
/**
* 听写设置界面
*/
public class IatSettings extends PreferenceActivity implements OnPreferenceChangeListener {
public static final String PREFER_NAME = "com.iflytek.setting";
private EditTextPreference mVadbosPreference;
private EditTextPreference mVadeosPreference;
@SuppressWarnings("deprecation")
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
getPreferenceManager().setSharedPreferencesName(PREFER_NAME);
// addPreferencesFromResource(R.xml.iat_setting);
addPreferencesFromResource(getResources().getIdentifier("iat_setting","xml",getPackageName()));
mVadbosPreference = (EditTextPreference)findPreference("iat_vadbos_preference");
mVadbosPreference.getEditText().addTextChangedListener(new SettingTextWatcher(IatSettings.this,mVadbosPreference,0,10000));
mVadeosPreference = (EditTextPreference)findPreference("iat_vadeos_preference");
mVadeosPreference.getEditText().addTextChangedListener(new SettingTextWatcher(IatSettings.this,mVadeosPreference,0,10000));
}
@Override
public boolean onPreferenceChange(Preference preference, Object newValue) {
return true;
}
}

View File

@ -0,0 +1,95 @@
package com.thomas.xunfeilistenspeaking;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONTokener;
/**
* Json结果解析类
*/
public class JsonParser {
public static String parseIatResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
// 转写结果词默认使用第一个结果
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
JSONObject obj = items.getJSONObject(0);
ret.append(obj.getString("w"));
// 如果需要多候选结果解析数组其他字段
// for(int j = 0; j < items.length(); j++)
// {
// JSONObject obj = items.getJSONObject(j);
// ret.append(obj.getString("w"));
// }
}
} catch (Exception e) {
e.printStackTrace();
}
return ret.toString();
}
public static String parseGrammarResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
for(int j = 0; j < items.length(); j++)
{
JSONObject obj = items.getJSONObject(j);
if(obj.getString("w").contains("nomatch"))
{
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append("【结果】" + obj.getString("w"));
ret.append("【置信度】" + obj.getInt("sc"));
ret.append("\n");
}
}
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
public static String parseLocalGrammarResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
JSONArray items = words.getJSONObject(i).getJSONArray("cw");
for(int j = 0; j < items.length(); j++)
{
JSONObject obj = items.getJSONObject(j);
if(obj.getString("w").contains("nomatch"))
{
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append("【结果】" + obj.getString("w"));
ret.append("\n");
}
}
ret.append("【置信度】" + joResult.optInt("sc"));
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
}

View File

@ -0,0 +1,71 @@
package com.thomas.xunfeilistenspeaking;
import android.content.Context;
import android.preference.EditTextPreference;
import android.text.Editable;
import android.text.TextUtils;
import android.text.TextWatcher;
import android.widget.Toast;
import java.util.regex.Pattern;
/**
* 输入框输入范围控制
*/
public class SettingTextWatcher implements TextWatcher {
private int editStart ;
private int editCount ;
private EditTextPreference mEditTextPreference;
int minValue;//最小值
int maxValue;//最大值
private Context mContext;
public SettingTextWatcher(Context context, EditTextPreference e, int min, int max) {
mContext = context;
mEditTextPreference = e;
minValue = min;
maxValue = max;
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
// Log.e("demo", "onTextChanged start:"+start+" count:"+count+" before:"+before);
editStart = start;
editCount = count;
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count,int after) {
// Log.e("demo", "beforeTextChanged start:"+start+" count:"+count+" after:"+after);
}
@Override
public void afterTextChanged(Editable s) {
if (TextUtils.isEmpty(s)) {
return;
}
String content = s.toString();
// Log.e("demo", "content:"+content);
if (isNumeric(content)) {
int num = Integer.parseInt(content);
if (num > maxValue || num < minValue) {
s.delete(editStart, editStart+editCount);
mEditTextPreference.getEditText().setText(s);
Toast.makeText(mContext, "超出有效值范围", Toast.LENGTH_SHORT).show();
}
}else {
s.delete(editStart, editStart+editCount);
mEditTextPreference.getEditText().setText(s);
Toast.makeText(mContext, "只能输入数字哦", Toast.LENGTH_SHORT).show();
}
}
/**
* 正则表达式-判断是否为数字
*/
public static boolean isNumeric(String str){
Pattern pattern = Pattern.compile("[0-9]*");
return pattern.matcher(str).matches();
}
};

View File

@ -0,0 +1,299 @@
package com.thomas.xunfeilistenspeaking;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.widget.Toast;
import com.iflytek.cloud.*;
import com.iflytek.cloud.ui.RecognizerDialog;
import com.iflytek.cloud.ui.RecognizerDialogListener;
import com.iflytek.sunflower.FlowerCollector;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.LinkedHashMap;
/**
* Created by Thomas.Wang on 2017/2/10.
*/
public class XunfeiDialogActivity extends Activity{
private static String TAG = XunfeiDialogActivity.class.getSimpleName();
// 语音听写对象
private SpeechRecognizer mIat;
// 语音听写UI
private RecognizerDialog mIatDialog;
// 用HashMap存储听写结果
private HashMap<String, String> mIatResults = new LinkedHashMap<String, String>();
// private EditText mResultText;
private Toast mToast;
private SharedPreferences mSharedPreferences;
// 引擎类型
private String mEngineType = SpeechConstant.TYPE_CLOUD;
private Handler mHandler = new Handler();
private boolean isShowDialog;
private String punc = "1";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(getResources().getIdentifier("activity_xunfei_dialog","layout",getPackageName()));
isShowDialog = getIntent().getBooleanExtra("isShowDialog",false);
punc = getIntent().getStringExtra("punc");
mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT);
mIat = SpeechRecognizer.createRecognizer(this, mInitListener);
// 初始化听写Dialog如果只使用有UI听写功能无需创建SpeechRecognizer
// 使用UI听写功能请根据sdk文件目录下的notice.txt,放置布局文件和图片资源
mIatDialog = new RecognizerDialog(this, mInitListener);
mIatDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
finishThisActivity(RESULT_CANCELED,getString(getId("xunfei_cancel_listen","string")));
}
});
mSharedPreferences = getSharedPreferences(IatSettings.PREFER_NAME,
Activity.MODE_PRIVATE);
startListen();
}
private void stopListen(){
if (mIat.isListening()) {
mIat.stopListening();
}
}
private void startListen() {
if (mIat.isListening()) {
mIat.stopListening();
}
// 移动数据分析收集开始听写事件
FlowerCollector.onEvent(this, "iat_recognize");
mIatResults.clear();
// 设置参数
setParam();
// boolean isShowDialog = true;
if (isShowDialog) {
// 显示听写对话框
mIatDialog.setListener(mRecognizerDialogListener);
mIatDialog.show();
// showTip(this.getString(getId("text_begin","string")));
}else {
// 不显示听写对话框
ret = mIat.startListening(mRecognizerListener);
if (ret != ErrorCode.SUCCESS) {
// showTip("听写失败,错误码:" + ret);
finishThisActivity(RESULT_CANCELED,"听写失败,错误码:" + ret);
} else {
// showTip(this.getString(getId("text_begin","string")));
}
}
}
private int getId(String idName,String type){
return getResources().getIdentifier(idName, type,getPackageName());
}
private void showTip(final String str) {
mHandler.post(new Runnable() {
@Override
public void run() {
mToast.setText(str);
mToast.show();
}
});
}
int ret = 0; // 函数调用返回值
/**
* 听写UI监听器
*/
private RecognizerDialogListener mRecognizerDialogListener = new RecognizerDialogListener() {
public void onResult(RecognizerResult results, boolean isLast) {
printResult(results,isLast);
}
/**
* 识别回调错误.
*/
public void onError(SpeechError error) {
// showTip(error.getPlainDescription(true));
finishThisActivity(RESULT_CANCELED,error.getPlainDescription(true));
}
};
private void finishThisActivity(int type ,String str){
Intent resultIntent = new Intent();
Bundle bundle = new Bundle();
bundle.putString("result", str);
resultIntent.putExtras(bundle);
setResult(type, resultIntent);
finish();
}
/**
* 初始化监听器
*/
private InitListener mInitListener = new InitListener() {
@Override
public void onInit(int code) {
Log.d(TAG, "SpeechRecognizer init() code = " + code);
if (code != ErrorCode.SUCCESS) {
showTip("初始化失败,错误码:" + code);
}
}
};
/**
* 听写监听器
*/
private RecognizerListener mRecognizerListener = new RecognizerListener() {
@Override
public void onBeginOfSpeech() {
// 此回调表示sdk内部录音机已经准备好了用户可以开始语音输入
// showTip("开始说话");
}
@Override
public void onError(SpeechError error) {
// Tips
// 错误码10118(您没有说话)可能是录音机权限被禁需要提示用户打开应用的录音权限
// 如果使用本地功能语记需要提示用户开启语记的录音权限
// showTip(error.getPlainDescription(true));
finishThisActivity(RESULT_CANCELED,error.getPlainDescription(true));
}
@Override
public void onEndOfSpeech() {
// 此回调表示检测到了语音的尾端点已经进入识别过程不再接受语音输入
// showTip("结束说话");
}
@Override
public void onResult(RecognizerResult results, boolean isLast) {
Log.d(TAG, results.getResultString());
printResult(results,isLast);
}
@Override
public void onVolumeChanged(int volume, byte[] data) {
// showTip("当前正在说话,音量大小:" + volume);
// showTip("当前正在说话...");
// Log.d(TAG, "返回音频数据:" + data.length);
}
@Override
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
// 以下代码用于获取与云端的会话id当业务出错时将会话id提供给技术支持人员可用于查询会话日志定位出错原因
// 若使用本地能力会话id为null
// if (SpeechEvent.EVENT_SESSION_ID == eventType) {
// String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
// Log.d(TAG, "session id =" + sid);
// }
}
};
private void printResult(RecognizerResult results,boolean isLast) {
String text = JsonParser.parseIatResult(results.getResultString());
String sn = null;
// 读取json结果中的sn字段
try {
JSONObject resultJson = new JSONObject(results.getResultString());
sn = resultJson.optString("sn");
} catch (JSONException e) {
e.printStackTrace();
}
mIatResults.put(sn, text);
StringBuffer resultBuffer = new StringBuffer();
for (String key : mIatResults.keySet()) {
resultBuffer.append(mIatResults.get(key));
}
Log.d(TAG, "音频中文:" + resultBuffer.toString());
// mResultText.setText(resultBuffer.toString());
// mResultText.setSelection(mResultText.length());
if (isLast){
Intent resultIntent = new Intent();
Bundle bundle = new Bundle();
bundle.putString("result", resultBuffer.toString());
resultIntent.putExtras(bundle);
this.setResult(RESULT_OK, resultIntent);
finish();
}
}
/**
* 参数设置
*
* @param
* @return
*/
public void setParam() {
// 清空参数
mIat.setParameter(SpeechConstant.PARAMS, null);
// 设置听写引擎
mIat.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
// 设置返回结果格式
mIat.setParameter(SpeechConstant.RESULT_TYPE, "json");
String lag = mSharedPreferences.getString("iat_language_preference",
"mandarin");
if (lag.equals("en_us")) {
// 设置语言
mIat.setParameter(SpeechConstant.LANGUAGE, "en_us");
} else {
// 设置语言
mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
// 设置语言区域
mIat.setParameter(SpeechConstant.ACCENT, lag);
}
// 设置语音前端点:静音超时时间即用户多长时间不说话则当做超时处理
mIat.setParameter(SpeechConstant.VAD_BOS, mSharedPreferences.getString("iat_vadbos_preference", "4000"));
// 设置语音后端点:后端点静音检测时间即用户停止说话多长时间内即认为不再输入 自动停止录音
mIat.setParameter(SpeechConstant.VAD_EOS, mSharedPreferences.getString("iat_vadeos_preference", "1000"));
// 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
// mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", "1"));
// mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", punc));
mIat.setParameter(SpeechConstant.ASR_PTT, punc);
// 设置听写结果是否结果动态修正1则在听写过程中动态递增地返回结果否则只在听写结束之后返回最终结果
// 该参数暂时只对在线听写有效
// mIat.setParameter(SpeechConstant.ASR_DWA, mSharedPreferences.getString("iat_dwa_preference", "0"));
// 设置音频保存路径保存音频格式支持pcmwav设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// AUDIO_FORMAT参数语记需要更新版本才能生效
mIat.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory() + "/msc/iat.wav");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mIatDialog.isShowing()){
mIatDialog.dismiss();
}
}
}

View File

@ -0,0 +1,399 @@
package com.thomas.xunfeilistenspeaking;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.widget.Toast;
import com.iflytek.cloud.*;
import com.iflytek.sunflower.FlowerCollector;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.LinkedHashMap;
/**
* Created by Thomas.Wang on 17/2/9.
*/
public class XunfeiListenSpeaking extends CordovaPlugin{
private static String TAG = XunfeiListenSpeaking.class.getSimpleName();
private Context context;
private CallbackContext callbackContext;
private Toast mToast;
private Handler mHandler = new Handler();
private SpeechSynthesizer mTts;
// 语音听写对象
private SpeechRecognizer mIat;
private SharedPreferences mSharedPreferences;
// 引擎类型
private String mEngineType = SpeechConstant.TYPE_CLOUD;
// 用HashMap存储听写结果
private HashMap<String, String> mIatResults = new LinkedHashMap<String, String>();
@Override
protected void pluginInitialize() {
super.pluginInitialize();
context = cordova.getActivity();
// SpeechUtility.createUtility(context, SpeechConstant.APPID +"=584e7225");
SpeechUtility.createUtility(context, SpeechConstant.APPID +"="+context.getString(getId("app_id","string")));
}
private int getId(String idName,String type){
return context.getResources().getIdentifier(idName, type,context.getPackageName());
}
private static final int DIALOG_ACTIVIT_CODE = 0;
@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
this.callbackContext = callbackContext;
//开始听写
if (action.equals("startListen")){
// boolean isShowDialog = args.getBoolean(0);
// String punc = args.getBoolean(1)?"1":"0";
boolean isShowDialog ;
try {
isShowDialog = args.getBoolean(0);
}catch (Exception e){
isShowDialog = true;
}
String punc;
try{
punc = args.getBoolean(1)?"1":"0";
}catch (Exception e){
punc = "1";
}
if (isShowDialog){
Intent intent = new Intent();
intent.setClass(context, XunfeiDialogActivity.class);
intent.putExtra("isShowDialog",isShowDialog);
intent.putExtra("punc",punc);
cordova.startActivityForResult( this,intent, DIALOG_ACTIVIT_CODE);
}else {
startListenWidthNotDialog(punc);
}
return true;
}
//停止听写
if (action.equals("stopListen")) {
stopListen();
return true;
}
//开始听写
if (action.equals("startSpeak")){
mToast = Toast.makeText(context, "", Toast.LENGTH_SHORT);
String speakMessage = args.getString(0).trim();
startSpeak(speakMessage);
return true;
}
//停止说话
if (action.equals("stopSpeak")){
stopSpeak();
return true;
}
//暂停
if (action.equals("pauseSpeaking")){
pauseSpeaking();
return true;
}
//继续
if (action.equals("resumeSpeaking")){
resumeSpeaking();
return true;
}
return false;
}
private void stopListen(){
if (mIat!=null&&mIat.isListening()) {
mIat.stopListening();
}
}
int ret = 0; // 函数调用返回值
private void startListenWidthNotDialog(String punc){
mIat = SpeechRecognizer.createRecognizer(context, mInitListener);
mSharedPreferences = context.getSharedPreferences(IatSettings.PREFER_NAME,
Activity.MODE_PRIVATE);
if (mIat.isListening()) {
mIat.stopListening();
}
// 移动数据分析收集开始听写事件
FlowerCollector.onEvent(context, "iat_recognize");
mIatResults.clear();
// 设置参数
setParam(punc);
// 不显示听写对话框
ret = mIat.startListening(mRecognizerListener);
if (ret != ErrorCode.SUCCESS) {
// showTip("听写失败,错误码:" + ret);
// finishThisActivity(RESULT_CANCELED,"听写失败,错误码:" + ret);
callbackContext.error("听写失败,错误码:" + ret);
} else {
// showTip(this.getString(getId("text_begin","string")));
}
}
/**
* 初始化监听器
*/
private InitListener mInitListener = new InitListener() {
@Override
public void onInit(int code) {
Log.d(TAG, "SpeechRecognizer init() code = " + code);
if (code != ErrorCode.SUCCESS) {
showTip("初始化失败,错误码:" + code);
}
}
};
/**
* 听写监听器
*/
private RecognizerListener mRecognizerListener = new RecognizerListener() {
@Override
public void onBeginOfSpeech() {
// 此回调表示sdk内部录音机已经准备好了用户可以开始语音输入
// showTip("开始说话");
}
@Override
public void onError(SpeechError error) {
// Tips
// 错误码10118(您没有说话)可能是录音机权限被禁需要提示用户打开应用的录音权限
// 如果使用本地功能语记需要提示用户开启语记的录音权限
// showTip(error.getPlainDescription(true));
// finishThisActivity(RESULT_CANCELED,error.getPlainDescription(true));
callbackContext.error(error.getPlainDescription(true));
}
@Override
public void onEndOfSpeech() {
// 此回调表示检测到了语音的尾端点已经进入识别过程不再接受语音输入
// showTip("结束说话");
}
@Override
public void onResult(RecognizerResult results, boolean isLast) {
Log.d(TAG, results.getResultString());
printResult(results,isLast);
}
@Override
public void onVolumeChanged(int volume, byte[] data) {
// showTip("当前正在说话,音量大小:" + volume);
// showTip("当前正在说话...");
// Log.d(TAG, "返回音频数据:" + data.length);
}
@Override
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
// 以下代码用于获取与云端的会话id当业务出错时将会话id提供给技术支持人员可用于查询会话日志定位出错原因
// 若使用本地能力会话id为null
// if (SpeechEvent.EVENT_SESSION_ID == eventType) {
// String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
// Log.d(TAG, "session id =" + sid);
// }
}
};
private void printResult(RecognizerResult results, boolean isLast) {
String text = JsonParser.parseIatResult(results.getResultString());
String sn = null;
// 读取json结果中的sn字段
try {
JSONObject resultJson = new JSONObject(results.getResultString());
sn = resultJson.optString("sn");
} catch (JSONException e) {
e.printStackTrace();
}
mIatResults.put(sn, text);
StringBuffer resultBuffer = new StringBuffer();
for (String key : mIatResults.keySet()) {
resultBuffer.append(mIatResults.get(key));
}
Log.d(TAG, "音频中文:" + resultBuffer.toString());
// mResultText.setText(resultBuffer.toString());
// mResultText.setSelection(mResultText.length());
// Intent resultIntent = new Intent();
// Bundle bundle = new Bundle();
// bundle.putString("result", resultBuffer.toString());
// resultIntent.putExtras(bundle);
// this.setResult(RESULT_OK, resultIntent);
// finish();
if (isLast)
callbackContext.success(resultBuffer.toString());
}
/**
* 参数设置
*
* @param
* @return
*/
public void setParam(String punc) {
// 清空参数
mIat.setParameter(SpeechConstant.PARAMS, null);
// 设置听写引擎
mIat.setParameter(SpeechConstant.ENGINE_TYPE, mEngineType);
// 设置返回结果格式
mIat.setParameter(SpeechConstant.RESULT_TYPE, "json");
String lag = mSharedPreferences.getString("iat_language_preference",
"mandarin");
if (lag.equals("en_us")) {
// 设置语言
mIat.setParameter(SpeechConstant.LANGUAGE, "en_us");
} else {
// 设置语言
mIat.setParameter(SpeechConstant.LANGUAGE, "zh_cn");
// 设置语言区域
mIat.setParameter(SpeechConstant.ACCENT, lag);
}
// 设置语音前端点:静音超时时间即用户多长时间不说话则当做超时处理
mIat.setParameter(SpeechConstant.VAD_BOS, mSharedPreferences.getString("iat_vadbos_preference", "4000"));
// 设置语音后端点:后端点静音检测时间即用户停止说话多长时间内即认为不再输入 自动停止录音
mIat.setParameter(SpeechConstant.VAD_EOS, mSharedPreferences.getString("iat_vadeos_preference", "1000"));
// 设置标点符号,设置为"0"返回结果无标点,设置为"1"返回结果有标点
// mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", "1"));
// mIat.setParameter(SpeechConstant.ASR_PTT, mSharedPreferences.getString("iat_punc_preference", punc));
mIat.setParameter(SpeechConstant.ASR_PTT, punc);
// 设置音频保存路径保存音频格式支持pcmwav设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// AUDIO_FORMAT参数语记需要更新版本才能生效
mIat.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
mIat.setParameter(SpeechConstant.ASR_AUDIO_PATH, Environment.getExternalStorageDirectory() + "/msc/iat.wav");
}
private void resumeSpeaking(){
mTts.resumeSpeaking();
}
private void pauseSpeaking(){
mTts.pauseSpeaking();
}
private void stopSpeak(){
if(mTts!=null&&mTts.isSpeaking()){
mTts.stopSpeaking();
}
}
private void startSpeak(String speakMessage) {
setSpeakParameter();
if (mTts.isSpeaking()){
mTts.stopSpeaking();
}
mTts.startSpeaking(speakMessage, mSynListener);
}
private void setSpeakParameter(){
if (mTts==null){
//1.创建SpeechSynthesizer对象, 第二个参数本地合成时传InitListener
mTts = SpeechSynthesizer.createSynthesizer(context, null);
//2.合成参数设置详见科大讯飞MSC API手册(Android)SpeechSynthesizer
mTts.setParameter(SpeechConstant.VOICE_NAME, "xiaoyan");//设置发音人
mTts.setParameter(SpeechConstant.SPEED, "50");//设置语速
mTts.setParameter(SpeechConstant.VOLUME, "80");//设置音量范围0~100
mTts.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_CLOUD); //设置云端
//设置合成音频保存位置可自定义保存位置保存在./sdcard/iflytek.pcm
//保存在SD卡需要在AndroidManifest.xml添加写SD卡权限
//如果不需要保存合成音频注释该行代码
mTts.setParameter(SpeechConstant.TTS_AUDIO_PATH, "./sdcard/iflytek.pcm");
//3.开始合成
// mTts.startSpeaking("科大讯飞,让世界聆听我们的声音", mSynListener);
}
}
//合成监听器
private SynthesizerListener mSynListener = new SynthesizerListener(){
//会话结束回调接口没有错误时error为null
public void onCompleted(SpeechError error) {
if (error == null) {
// showTip("播放完成");
callbackContext.success("播放完成");
} else if (error != null) {
showTip(error.getPlainDescription(true));
callbackContext.error(error.getPlainDescription(true));
}
}
//缓冲进度回调
//percent为缓冲进度0~100beginPos为缓冲音频在文本中开始位置endPos表示缓冲音频在文本中结束位置info为附加信息
public void onBufferProgress(int percent, int beginPos, int endPos, String info) {}
//开始播放
public void onSpeakBegin() {
// showTip("开始播放");
}
//暂停播放
public void onSpeakPaused() {}
//播放进度回调
//percent为播放进度0~100,beginPos为播放音频在文本中开始位置endPos表示播放音频在文本中结束位置.
public void onSpeakProgress(int percent, int beginPos, int endPos) {}
//恢复播放回调接口
public void onSpeakResumed() {}
//会话事件回调接口
public void onEvent(int arg0, int arg1, int arg2, Bundle arg3) {}
};
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case DIALOG_ACTIVIT_CODE:
if(resultCode == Activity.RESULT_OK){
Bundle bundle = data.getExtras();
callbackContext.success(bundle.getString("result"));
}else if (resultCode == Activity.RESULT_CANCELED){
Bundle bundle = data.getExtras();
callbackContext.error(bundle.getString("result"));
}
break;
}
}
private void showTip(final String str) {
mHandler.post(new Runnable() {
@Override
public void run() {
mToast.setText(str);
mToast.show();
}
});
}
}

36
src/ios/CDVSpeech.h Normal file
View File

@ -0,0 +1,36 @@
//
// CDVSpeech.h
// ZJxunfeiDemo-OC
//
// Created by Edc.zhang on 2017/2/13.
// Copyright © 2017年 Edc.zhang. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <Cordova/CDV.h>
//#import "iflyMSC/iflyMSC.h"
#import "iflyMSC.framework/Headers/IFlyMSC.h"
@interface CDVSpeech : CDVPlugin <IFlySpeechRecognizerDelegate,IFlySpeechSynthesizerDelegate,IFlyRecognizerViewDelegate>{
}
@property (nonatomic, copy) NSString* appId;
@property (nonatomic, strong) NSString* callbackId;
@property (nonatomic, strong) IFlySpeechRecognizer* recognizer;
@property (nonatomic, strong) IFlySpeechSynthesizer* synthesizer;
@property(nonatomic,strong) IFlyRecognizerView *iflyRecognizerView;
- (void)startListening:(CDVInvokedUrlCommand*)command;
- (void)stopListening:(CDVInvokedUrlCommand*)command;
- (void)cancelListening:(CDVInvokedUrlCommand*)command;
- (void)startSpeaking:(CDVInvokedUrlCommand*)command;
- (void)pauseSpeaking:(CDVInvokedUrlCommand*)command;
- (void)resumeSpeaking:(CDVInvokedUrlCommand*)command;
- (void)stopSpeaking:(CDVInvokedUrlCommand*)command;
@end

343
src/ios/CDVSpeech.m Normal file
View File

@ -0,0 +1,343 @@
//
// CDVSpeech.m
// ZJxunfeiDemo-OC
//
// Created by Edc.zhang on 2017/2/13.
// Copyright © 2017 Edc.zhang. All rights reserved.
//
#import "CDVSpeech.h"
#define STR_EVENT @"event"
#define STR_CODE @"code"
#define STR_MESSAGE @"message"
#define STR_VOLUME @"volume"
#define STR_RESULTS @"results"
#define STR_PROGRESS @"progress"
// always replace the appid and the SDK with what you get from voicecloud.cn
#define SPEECH_APP_ID @"589d270d"
@interface CDVSpeech()
- (void) fireEvent:(NSString*)event;
@end
#import "CDVSpeech.h"
@implementation CDVSpeech
- (void)login:(CDVInvokedUrlCommand*)command
{
self.callbackId = command.callbackId;
self.appId = SPEECH_APP_ID;
NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@",self.appId];
[IFlySpeechUtility createUtility:initString];
}
#pragma mark -
- (void)startListening:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: startListening");
NSDictionary* options = [command.arguments objectAtIndex:0];
// withDefault:[NSNull null]];
//UI
// BOOL isShowDialog = [command.arguments objectAtIndex:1];
NSString *isShowDialog = [NSString stringWithFormat:@"%@",[command.arguments objectAtIndex:1]];
NSString *isShowPunc = [NSString stringWithFormat:@"%@",[command.arguments objectAtIndex:2]];
if ([isShowDialog isEqualToString:@"0"]) {
if (!self.recognizer){
self.recognizer = [IFlySpeechRecognizer sharedInstance];
self.recognizer.delegate = self;
[self.recognizer setParameter:@"iat" forKey:@"domain"];
[self.recognizer setParameter:@"16000" forKey:@"sample_rate"];
[self.recognizer setParameter:@"700" forKey:@"vad_eos"];
[self.recognizer setParameter:@"0" forKey:@"plain_result"];
[self.recognizer setParameter:@"asr.pcm" forKey:@"asr_audio_path"];
//
if ([isShowPunc isEqualToString:@"0"]) {
[self.recognizer setParameter:@"0" forKey:@"asr_ptt"];
}
NSLog(@"Speech :: createRecognizer");
}
if ((NSNull *)options != [NSNull null]) {
NSArray *keys = [options allKeys];
for (NSString *key in keys) {
NSString *value = [options objectForKey:key];
[self.recognizer setParameter:value forKey:key];
}
}
//
if ([self.recognizer isListening]) {
[self.recognizer stopListening];
}
[self.recognizer startListening];
}else{
//
UIWindow *keyWindow = [UIApplication sharedApplication].keyWindow;
self.iflyRecognizerView = [[IFlyRecognizerView alloc] initWithCenter:keyWindow.center];
self.iflyRecognizerView.delegate = self;
[self.iflyRecognizerView setParameter: @"iat" forKey: [IFlySpeechConstant IFLY_DOMAIN]];
//asr_audio_pathvaluenildocuments
[self.iflyRecognizerView setParameter:@"asrview.pcm " forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];
if ([isShowPunc isEqualToString:@"0"]) {
[self.iflyRecognizerView setParameter:@"0" forKey:@"asr_ptt"];
}
[self.iflyRecognizerView start];
}
}
- (void)stopListening:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: stopListening");
[self.recognizer stopListening];
}
- (void)cancelListening:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: cancelListening");
[self.recognizer cancel];
[self.iflyRecognizerView cancel];
}
#pragma mark -
- (void)startSpeaking:(CDVInvokedUrlCommand*)command
{
NSString* text = [command.arguments objectAtIndex:0];
NSDictionary* options = [command.arguments objectAtIndex:1];
// withDefault:[NSNull null]];
NSLog(@"Speech :: startSpeaking - %@", text);
// [self.commandDelegate runInBackground:^{
if (!self.synthesizer){
self.synthesizer = [IFlySpeechSynthesizer sharedInstance];
self.synthesizer.delegate = self;
[self.synthesizer setParameter:@"50" forKey:[IFlySpeechConstant SPEED]];//, 0~100
[self.synthesizer setParameter:@"80" forKey:[IFlySpeechConstant VOLUME]];//; 0~100
[self.synthesizer setParameter:@"vixr" forKey:[IFlySpeechConstant VOICE_NAME]];//,xiaoyan
[self.synthesizer setParameter:@"8000" forKey: [IFlySpeechConstant SAMPLE_RATE]];//, 16000 8000;
[self.synthesizer setParameter:@"tts.pcm" forKey: [IFlySpeechConstant TTS_AUDIO_PATH]];
NSLog(@"Speech :: createSynthesizer");
}
if ((NSNull *)options != [NSNull null]) {
NSArray *keys = [options allKeys];
for (NSString *key in keys) {
NSString *value = [options objectForKey:key];
[self.synthesizer setParameter:value forKey:key];
}
}
if ([self.synthesizer isSpeaking]) {
[self.synthesizer stopSpeaking];
}
[self.synthesizer startSpeaking:text];
// }];
}
#pragma mark -
- (void)pauseSpeaking:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: pauseSpeaking");
[self.synthesizer pauseSpeaking];
}
#pragma mark -
- (void)resumeSpeaking:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: resumeSpeaking");
[self.synthesizer resumeSpeaking];
}
#pragma mark -
- (void)stopSpeaking:(CDVInvokedUrlCommand*)command
{
NSLog(@"Speech :: stopSpeaking");
[self.synthesizer stopSpeaking];
}
#pragma mark IFlyRecognizerViewDelegate
/*! UI
* IFlyRecognizerViewDelegate
*
* @param resultArray NSArrayNSDictionaryNSDictionarykeysc
* @param isLast -[out]
*/
- (void)onResult:(NSArray *)resultArray isLast:(BOOL) isLast{
NSLog(@"Speech :: onResults - %@", resultArray);
if (self.callbackId) {
NSMutableString *text = [[NSMutableString alloc] init];
NSDictionary *dic = [resultArray objectAtIndex:0];
for (NSString *key in dic) {
[text appendFormat:@"%@",key];
}
NSLog(@"Recognize Result: %@",text);
// NSString * resultFromJson = [ISRDataHelper stringFromJson:text];
//
// NSLog(@"---------%@",resultFromJson);
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"SpeechResults",STR_EVENT,text,STR_RESULTS, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
/*! UI
* IFlySpeechRecognizerDelegate
*
* 使results
* <pre><code>
* - (void) onResults:(NSArray *) results{
* NSMutableString *result = [[NSMutableString alloc] init];
* NSDictionary *dic = [results objectAtIndex:0];
* for (NSString *key in dic){
* [result appendFormat:@"%@",key];//
* }
* }
* </code></pre>
*
* @param results -[out] NSArrayNSDictionaryNSDictionarykeysc
* @param isLast -[out]
*/
- (void) onResults:(NSArray *) results isLast:(BOOL)isLast{
NSLog(@"Speech :: onResults - %@", results);
if (self.callbackId) {
NSMutableString *text = [[NSMutableString alloc] init];
NSDictionary *dic = [results objectAtIndex:0];
for (NSString *key in dic) {
[text appendFormat:@"%@",key];
}
NSLog(@"Recognize Result: %@",text);
// NSString * resultFromJson = [ISRDataHelper stringFromJson:text];
//
// NSLog(@"---------%@",resultFromJson);
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"SpeechResults",STR_EVENT,text,STR_RESULTS, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
/*!
* IFlySpeechRecognizerDelegate
*
*
* @param volume -[out] 0-30
*/
- (void) onVolumeChanged:(int)volume
{
NSLog(@"Speech :: onVolumeChanged - %d", volume);
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"VolumeChanged",STR_EVENT,[NSNumber numberWithInt:volume],STR_VOLUME, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
/*!
*
*
* @param error
*/
- (void)onError: (IFlySpeechError *) error{
NSLog(@"Speech :: onError - %d", error.errorCode);
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"11SpeechError",STR_EVENT,[NSNumber numberWithInt:error.errorCode],STR_CODE,error.errorDesc,STR_MESSAGE, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
#pragma mark IFlySpeechSynthesizerDelegate
- (void) onCompleted:(IFlySpeechError*)error
{
NSLog(@"Speech :: onCompleted - %d", error.errorCode);
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"SpeakCompleted",STR_EVENT,[NSNumber numberWithInt:error.errorCode],STR_CODE,error.errorDesc,STR_MESSAGE, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
- (void) onSpeakBegin
{
NSLog(@"Speech :: onSpeakBegin");
[self fireEvent:@"SpeakBegin"];
}
- (void) onBufferProgress:(int)progress message:(NSString *)msg
{
NSLog(@"Speech :: onBufferProgress - %d", progress);
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"BufferProgress",STR_EVENT,[NSNumber numberWithInt:progress],STR_PROGRESS,msg,STR_MESSAGE, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
- (void) onSpeakProgress:(int)progress
{
NSLog(@"Speech :: onSpeakProgress - %d", progress);
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:@"SpeakProgress",STR_EVENT,[NSNumber numberWithInt:progress],STR_PROGRESS, nil];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
- (void) onSpeakPaused
{
NSLog(@"Speech :: onSpeakPaused");
[self fireEvent:@"SpeakPaused"];
}
- (void) onSpeakResumed
{
NSLog(@"Speech :: onSpeakResumed");
[self fireEvent:@"SpeakResumed"];
}
- (void) onSpeakCancel
{
NSLog(@"Speech :: onSpeakCancel");
[self fireEvent:@"SpeakCancel"];
}
- (void) fireEvent:(NSString*)event
{
if (self.callbackId) {
NSDictionary* info = [NSDictionary dictionaryWithObject:event forKey:STR_EVENT];
CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:info];
[result setKeepCallbackAsBool:YES];
[self.commandDelegate sendPluginResult:result callbackId:self.callbackId];
}
}
@end

View File

@ -0,0 +1,27 @@
//
// IFlyContact.h
// msc
//
// Created by ypzhao on 13-3-1.
// Copyright (c) 2013年 IFLYTEK. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
* (sms),
* .
*/
@interface IFlyContact : NSObject
/*!
*
* AddressBook.framework
* IFlyDataUploader类
*
*
* @return
*/
- (NSString *) contact;
@end

View File

@ -0,0 +1,54 @@
//
// IFlyDataUploader.h
// MSC
//
// Created by ypzhao on 13-4-8.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/*!
*
*/
@interface IFlyDataUploader : NSObject
/*!
*
*/
@property(nonatomic,copy) NSString *dataName;
/*!
*
*/
@property(nonatomic,copy) NSString *data;
/*!
*
*
* @param result
* @param error
*/
typedef void(^IFlyUploadDataCompletionHandler)(NSString* result,IFlySpeechError * error);
/*!
*
* ****
*
* @param completionHandler -[in]
* @param name -[in] ,nil
* @param data -[in] utf8编码,nil
*/
- (void) uploadDataWithCompletionHandler:(IFlyUploadDataCompletionHandler)completionHandler name:(NSString *)name data:(NSString *)data;
/*!
*
*
* @param parameter
* @param key
*/
-(void) setParameter:(NSString*) parameter forKey:(NSString*) key;
@end

View File

@ -0,0 +1,37 @@
//
// IFlyDebugLog.h
// MSC
// description: 程序中的log处理类
// Created by ypzhao on 12-11-22.
// Copyright (c) 2012年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
*/
@interface IFlyDebugLog : NSObject
/*!
*
*
* @param format -[in]
* @param ... -[in]
*/
+ (void) showLog:(NSString *)format, ...;
/*!
* log写入文件中
*/
+ (void) writeLog;
/*!
* log
*
* @param showLog YES:NO:
*/
+ (void) setShowLog:(BOOL) showLog;
@end

View File

@ -0,0 +1,48 @@
//
// IFlyISVDelegate.h
// msc_UI
//
// Created by admin on 14-9-15.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/**
*
*/
@protocol IFlyISVDelegate
/**
*
*
* @param dic
*/
-(void) onResult:(NSDictionary *)dic;
/**
*
*
* @param errorCode
*/
-(void) onError:(IFlySpeechError *) errorCode;
@optional
/**
*
*/
-(void) onRecognition;
/**
*
*
* @param volume
*/
-(void) onVolumeChanged: (int)volume;
@end

View File

@ -0,0 +1,134 @@
//
// IFlyISVRecognizer.h
// ISV
//
// Created by wangdan on 14-9-6.
// Copyright (c) 2014年 IFlyTEK. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlyISVDelegate.h"
/**
*
*/
@interface IFlyISVRecognizer : NSObject
{
}
@property (assign) id<IFlyISVDelegate> delegate;
/*
* FlyISVRecognizer is a kind of Singleton calss
* the function can be used as below:
IFLyISVRecognizer *recognizer=[IFlyISVRecognizer creteRecognizer: self];
*/
+(instancetype) sharedInstance;
/*
* genrerate a serial number password
* princeple:
1.number serial has no 1 in itself;
2.the nuber serial has no same number("98765432"is right while "99876543" is wrong)
* @length: the serial number's length,length of "98765432" is 8,
generally length is 8 and other value is forbidden
*/
-(NSString*) generatePassword:(int)length;
/*
* Used to get password from server
* @pwdt:
when pwdt is 1,the function will return chinese text;
while pwdt is 2, the funciton will return number serial
*/
-(NSArray*) getPasswordList:(int)pwdt;
/*
* Used to judge if the engine is running in listenning
* return value:
YES: the engine is listenning;
No : the engine is not listenning
*/
-(BOOL) isListening;
/*
* Used to query or delete the voiceprint model in server
* @cmd:
"del": delete model
"que": query model
* @authid: user id ,can be @"tianxia" or other;
* @pwdt: voiceprint type
1: fixed txt voiceprint code ,like @"我的地盘我做主"
2: free voiceprint code , user can speek anything,but 5 times
trainning the speech shall be same
3: number serial voiceprint code ,like @"98765432" and so on
* @ptxt: voiceprint txt,only fixed voiceprint and number serial have this,
in free voiceprint model this param shall be set nil
* @vid: another voiceprint type model,user can use this to query or delete
model in server can be @"jakillasdfasdjjjlajlsdfhdfdsadff",totally 32 bits;
* NOTES:
when vid is not nil,then the server will judge the vid first
while the vid is nil, server can still query or delete the voiceprint model
by other params
*/
-(BOOL) sendRequest:(NSString*)cmd authid:(NSString *)auth_id pwdt:(int)pwdt ptxt:(NSString *)ptxt vid:(NSString *)vid err:(int *)err;
/*
* set the voiceprint params
* @"sst" : @"train" or @"verify"
* @"auth_id" : @"tianxia" or ther
* @"sub" : @"ivp"
* @"ptxt" :
* @"rgn" : @"5"
* @"pwdt" : @"1",or @"2", or @"3"
* @"auf" : @"audio/L16;rate=16000" or @"audio/L16;rate=8000"
* @"vad_enable : @"1" or @"0"
* @"vad_timeout" : @"3000"
* @"vad_speech_tail": @"100"
*/
-(BOOL) setParameter:(NSString *)value forKey:(NSString *)key;
/*
* get the voiceprint params
* used the same as function of setParameter
*/
-(NSString*) getParameter:(NSString *)key;
/*
* start recording
*/
-(void) startListening;
/*
* stop recording
*/
-(void) stopListening;
/*
* cancel recording,like function stopListening
*/
-(void) cancel; /* cancel recognization */
@end

View File

@ -0,0 +1,42 @@
//
// IFlyMSC.h
// msc
//
// Created by 张剑 on 15/1/14.
// Copyright (c) 2015年 iflytek. All rights reserved.
//
#ifndef MSC_IFlyMSC_h
#define MSC_IFlyMSC_h
#import "IFlyContact.h"
#import "IFlyDataUploader.h"
#import "IFlyDebugLog.h"
#import "IFlyISVDelegate.h"
#import "IFlyISVRecognizer.h"
#import "IFlyRecognizerView.h"
#import "IFlyRecognizerViewDelegate.h"
#import "IFlyResourceUtil.h"
#import "IFlySetting.h"
#import "IFlySpeechConstant.h"
#import "IFlySpeechError.h"
#import "IFlySpeechEvaluator.h"
#import "IFlySpeechEvaluatorDelegate.h"
#import "IFlySpeechEvent.h"
#import "IFlySpeechRecognizer.h"
#import "IFlySpeechRecognizerDelegate.h"
#import "IFlySpeechSynthesizer.h"
#import "IFlySpeechSynthesizerDelegate.h"
#import "IFlySpeechUnderstander.h"
#import "IFlySpeechUtility.h"
#import "IFlyTextUnderstander.h"
#import "IFlyUserWords.h"
#import "IFlyPcmRecorder.h"
#import "IFlySpeechEvaluator.h"
#import "IFlySpeechEvaluatorDelegate.h"
#import "IFlyVoiceWakeuper.h"
#import "IFlyVoiceWakeuperDelegate.h"
#endif

View File

@ -0,0 +1,104 @@
//
// IFlyPcmRecorder.h
// MSC
// description:
// Created by ypzhao on 12-11-15.
// Copyright (c) 2012年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <AudioToolbox/AudioServices.h>
#import <AudioToolbox/AudioConverter.h>
#import <AVFoundation/AVAudioSession.h>
@class IFlyPcmRecorder;
/**
*
*/
@protocol IFlyPcmRecorderDelegate<NSObject>
/**
*
*
* @param buffer
* @param size
*/
- (void) onIFlyRecorderBuffer: (const void *)buffer bufferSize:(int)size;
/**
*
*
* @param recoder
* @param error
*/
- (void) onIFlyRecorderError:(IFlyPcmRecorder*)recoder theError:(int) error;
@optional
/**
*
*
* @param power
*/
- (void) onIFlyRecorderVolumeChanged:(int) power;
@end
/**
*
*/
@interface IFlyPcmRecorder : NSObject<AVAudioSessionDelegate>
/**
*
*/
@property (assign) id<IFlyPcmRecorderDelegate> delegate;
/**
*
*
* @return
*/
+ (instancetype) sharedInstance;
/**
*
*
* @return YESNO
*/
- (BOOL) start;
/**
*
*/
- (void) stop;
/**
*
*
* @param rate -[in] 8k/16k
*/
- (void) setSample:(NSString *) rate;
/*
*
*/
- (void) setPowerCycle:(float) cycle;
/**
*
*
* @param savePath
*/
-(void) setSaveAudioPath:(NSString *)savePath;
@end

View File

@ -0,0 +1,137 @@
//
// IFlyRecognizerView.h
// MSC
//
// Created by admin on 13-4-16.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <UIKit/UIKit.h>
@protocol IFlyRecognizerViewDelegate ;
/*!
*
*
*
*
*/
@interface IFlyRecognizerView : UIView<NSObject>
/*!
*
*/
@property(nonatomic,assign)id<IFlyRecognizerViewDelegate> delegate;
/*!
*
*
* @param origin
*
* @return IFlyRecognizerView
*/
- (id)initWithOrigin:(CGPoint)origin;
/*!
*
*
* @param center
*
* @return IFlyRecognizerView
*/
- (id) initWithCenter:(CGPoint)center;
/*!
*
*
* @param autoRotate YES
*/
- (void) setAutoRotate:(BOOL)autoRotate;
/*
* | ------------- |-----------------------------------------------------------
* | |
* | ------------- |-----------------------------------------------------------
* | domain |: :iatsearchvideopoimusicasr
* | | iat
* | | search
* | | video
* | | asr;
* | ------------- |-----------------------------------------------------------
* | vad_bos |: ms
* | | engine指定iat识别默认值为5000
* | | 4000 0-10000
* | ------------- |-----------------------------------------------------------
* | vad_eos |: ,,
* | | :ms;
* | | sms 1800;
* | | 700 0-10000
* | ------------- |-----------------------------------------------------------
* | sample_rate |: 16000 8000
* | ------------- |-----------------------------------------------------------
* | asr_ptt |: 1 0
* | ------------- |-----------------------------------------------------------
* | result_type |: jsonxmlplainjson
* | ------------- |-----------------------------------------------------------
* | grammarID |id: domain asr
* | ------------- |-----------------------------------------------------------
* | asr_audio_path|:
* | | Documents/()
* | | nil
* | ------------- |-----------------------------------------------------------
* | params |:
* | ------------- |-----------------------------------------------------------
*
*/
/*!
*
* (key)
* <table>
* <thead>
* <tr><th>*</th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td>domain</td><td>: :iatsearchvideopoimusicasr<br/>iat<br/>search<br/>video<br/>video<br/>asr;</td></tr>
* <tr><td>vad_bos</td><td>: ms<br/>engine指定iat识别默认值为5000<br/> 4000 0-10000</td></tr>
* <tr><td>vad_eos</td><td>: ,,<br/>:ms;<br/>sms 1800;<br/> 700 0-10000</td></tr>
* <tr><td>sample_rate</td><td>: 16000 8000</td></tr>
* <tr><td>asr_ptt</td><td>: 1 0 </td></tr>
* <tr><td>result_type</td><td>: jsonxmlplainjson</td></tr>
* <tr><td>grammarID</td><td>id: domain asr</td></tr>
* <tr><td>asr_audio_path</td><td>: <br/> Documents/()<br/>nil</td></tr>
* <tr><td>params</td><td>: </td></tr>
* </tbody>
* </table>
* @param value
* @param key
*
* @return YESNO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/*!
*
*
* @param key key
*
* @return
*/
-(NSString*) parameterForKey:(NSString *)key;
/*!
*
*
* @return YESNO
*/
- (BOOL)start;
/*!
*
*/
- (void)cancel;
@end

View File

@ -0,0 +1,36 @@
//
// IFlyRecognizerDelegate.h
// MSC
//
// Created by admin on 13-4-16.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlyRecognizerView;
@class IFlySpeechError;
/*!
*
*/
@protocol IFlyRecognizerViewDelegate <NSObject>
/*!
*
*
* @param resultArray NSArray的第一个元素为NSDictionaryNSDictionary的key为识别结果sc为识别结果的置信度
* @param isLast -[out]
*/
- (void)onResult:(NSArray *)resultArray isLast:(BOOL) isLast;
/*!
*
*
* @param error
*/
- (void)onError: (IFlySpeechError *) error;
@optional
@end

View File

@ -0,0 +1,90 @@
//
// IFlyResourceUtil.h
// MSCDemo
//
// Created by admin on 14-6-20.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
*/
@interface IFlyResourceUtil : NSObject
/*!
* MSPSetParam
*
* @return MSPSetParam
*/
+(NSString*) ENGINE_START;
/*!
* MSPSetParam
*
* @return MSPSetParam
*/
+(NSString*) ENGINE_DESTROY;
/*!
*
*
* @return
*/
+(NSString*) ASR_RES_PATH;
/*!
*
*
* @return
*/
+(NSString*) GRM_BUILD_PATH;
/*!
* voice_name方可生效
*
* @return voice_name方可生效
*/
+(NSString*) TTS_RES_PATH;
/*!
*
*
* @return
*/
+(NSString*) IVW_RES_PATH;
/*!
*
*
* @return
*/
+(NSString*) GRAMMARTYPE;
/*!
* SDK专用参数
*
* @return key字符串
*/
+(NSString*) PLUS_LOCAL_DEFAULT_RES_PATH;
#pragma mark -
/*!
*
*
* @param path
*
* @return
*/
+(NSString*) generateResourcePath:(NSString *)path;
/**
* 线id
*
* @param voiceName
*
* @return idnil
*/
+(NSString*) identifierForVoiceName:(NSString*)voiceName;
@end

View File

@ -0,0 +1,93 @@
//
// IFlySetting.h
// MSC
//
// Created by iflytek on 13-4-12.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
*/
typedef NS_OPTIONS(NSInteger, LOG_LEVEL){
/*!
*
*/
LVL_ALL = -1,
/*!
*
*/
LVL_DETAIL = 31,
/*!
*
*/
LVL_NORMAL = 15,
/*!
*
*/
LVL_LOW = 7,
/*!
*
*/
LVL_NONE = 0
};
/*!
* iflyMSC sdk
*
*/
@interface IFlySetting : NSObject
/*!
*
*
* @return
*/
+ (NSString *) getVersion;
/*!
*
*
* @return
*/
+ (LOG_LEVEL) logLvl;
/*!
* log
* log
*
* @param showLog -[in] YES,log;NO,
*/
+ (void) showLogcat:(BOOL) showLog;
/*!
* msc.log生成路径以及日志等级
* <table>
* <thead>
* <tr><th>*</th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td>LVL_ALL</td><td></td></tr>
* <tr><td>LVL_DETAIL</td><td></td></tr>
* <tr><td>LVL_NORMAL</td><td></td></tr>
* <tr><td>LVL_LOW</td><td></td></tr>
* <tr><td>LVL_NONE</td><td></td></tr>
* </tbody>
* </table>
*
* @param level -[in]
*/
+ (void) setLogFile:(LOG_LEVEL) level;
/*!
*
* Documents目录
*
* @param path -[in]
*/
+ (void) setLogFilePath:(NSString*) path;
@end

View File

@ -0,0 +1,851 @@
//
// IFlySpeechConstant.h
// MSCDemo
//
// Created by iflytek on 5/9/14.
// Copyright (c) 2014 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
* key value值
*/
@interface IFlySpeechConstant : NSObject
#pragma mark - 通用参数key
/*!
* ID
*
*
* @return IDkey
*/
+(NSString*)APPID;
/*!
*
*
* @return key
*/
+(NSString*)ACCENT;
/*!
*
*
* @return value
*/
+(NSString*)ACCENT_MANDARIN;
/*!
*
*
* @return value
*/
+(NSString*)ACCENT_HENANESE;
/*!
*
*
* @return value
*/
+(NSString*)ACCENT_CANTONESE;
/*!
*
* zh_cnzh_twen_us<br>
*
* @return key
*/
+(NSString*)LANGUAGE;
/*!
*
*
* @return value
*/
+(NSString*)LANGUAGE_CHINESE;
/*!
*
*
* @return value
*/
+(NSString*)LANGUAGE_CHINESE_TW;
/*!
*
*
* @return value
*/
+(NSString*)LANGUAGE_ENGLISH;
/*!
*
* jsonxmlplainjson
*
* @return key
*/
+(NSString*)RESULT_TYPE;
/*!
*
*
* @return key
*/
+(NSString*)IFLY_DOMAIN;
/*!
*
*
* @return key
*/
+(NSString*)DATA_TYPE;
/*!
*
* ms30000
*
* @return key
*/
+(NSString*)SPEECH_TIMEOUT;
/*!
*
* ms20000
*
* @return key
*/
+(NSString*)NET_TIMEOUT;
/*!
*
*
* @return key
*/
+(NSString*)SUBJECT;
/*!
*
*
* @return key
*/
+(NSString*)PARAMS;
/**
*
*
* ssl tcp tcp
* 使ssl
*
* @return key
*/
+(NSString*)PROT_TYPE;
/**
* ssl证书内容
*
* @return ssl证书内容key
*/
+(NSString*)SSL_CERT;
/*!
*
*
* @return key
*/
+(NSString*)POWER_CYCLE;
/*!
*
*
* @return key
*/
+(NSString*)SAMPLE_RATE;
/*!
*
*
* @return 8K Value
*/
+(NSString*)SAMPLE_RATE_8K;
/*!
*
*
* @return 16K Value
*/
+(NSString*)SAMPLE_RATE_16K;
/*!
*
* localcloudauto
* auto
*
* @return key
*/
+(NSString*)ENGINE_TYPE;
/*!
*
*
* @return value
*/
+(NSString*)TYPE_LOCAL;
/*!
*
*
* @return value
*/
+(NSString*)TYPE_CLOUD;
/*!
*
*
* @return value
*/
+(NSString*)TYPE_MIX;
/*!
*
*
* @return value
*/
+(NSString*)TYPE_AUTO;
/*!
*
*
* @return key
*/
+(NSString*)TEXT_ENCODING;
/*!
*
*
* @return key
*/
+(NSString*)RESULT_ENCODING;
/**
*
* SDK内部播放器采用音频队列实现
* 0:0:
* @return key
*/
+(NSString*)PLAYER_INIT;
/**
*
* SDK内部录音器采用音频队列实现
* 0:0:
* @return key
*/
+(NSString*)RECORDER_INIT;
#pragma mark - 合成相关设置key
/*!
*
* 0~100 :50
*
* @return key
*/
+(NSString*)SPEED;
/*!
*
* 0~100:50
*
* @return key
*/
+(NSString*)PITCH;
/*!
*
*
* @return key
* @ IFlySetting setLogFilePath接口设置的目录后
*/
+(NSString*)TTS_AUDIO_PATH;
/**
* VAD功能
*
* @return VAD功能key
*/
+(NSString*)VAD_ENABLE;
/*!
* VAD前端点超时
* 0-10000(ms)
*
* @return VAD前端点超时key
*/
+(NSString*)VAD_BOS;
/*!
* VAD后端点超时
* 0-10000(ms)
*
* @return VAD后端点超时key
*/
+(NSString*)VAD_EOS;
/*
*
* TTS的发音人角色使
*
* |--------|----------------|
* | | |
* |--------|----------------|
* | | xiaoyan |
* |--------|----------------|
* | | xiaoyu |
* |--------|----------------|
* | | catherine |
* |--------|----------------|
* | | henry |
* |--------|----------------|
* | | vimary |
* |--------|----------------|
* | | vixy |
* |--------|----------------|
* | | vixq |
* |--------|----------------|
* | | vixf |
* |--------|----------------|
* | | vixl |
* |--------|----------------|
* | | vixq |
* |--------|----------------|
* | | vixr |
* |--------|----------------|
* | | vixyun |
* |--------|----------------|
* | | vixk |
* |--------|----------------|
* | | vixqa |
* |--------|----------------|
* | | vixyin |
* |--------|----------------|
* | | vixx |
* |--------|----------------|
* | | vinn |
* |--------|----------------|
* | | vils |
* |--------|----------------|
*/
/*!
*
* <table>
* <thead>
* <tr><th>*</th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td></td><td>xiaoyan</td></tr>
* <tr><td></td><td>xiaoyu</td></tr>
* <tr><td></td><td>catherine</td></tr>
* <tr><td></td><td>henry</td></tr>
* <tr><td></td><td>vimary</td></tr>
* <tr><td></td><td>vixy</td></tr>
* <tr><td></td><td>vixq</td></tr>
* <tr><td></td><td>vixf</td></tr>
* <tr><td></td><td>vixl</td></tr>
* <tr><td></td><td>vixq</td></tr>
* <tr><td>()</td><td>vixr</td></tr>
* <tr><td></td><td>vixyun</td></tr>
* <tr><td></td><td>vixk</td></tr>
* <tr><td></td><td>vixqa</td></tr>
* <tr><td></td><td>vixying</td></tr>
* <tr><td></td><td>vixx</td></tr>
* <tr><td></td><td>vinn</td></tr>
* <tr><td></td><td>vils</td></tr>
* </tbody>
* </table>
*
* @return key
*/
+(NSString*)VOICE_NAME;
/*!
* ID key
* @return ID key
*/
+(NSString*)VOICE_ID;
/*!
* key
* 0:Auto 1: 2 0.
* @return ID key
*/
+(NSString*)VOICE_LANG;
/*!
*
* 0~100 :50
*
* @return key
*/
+(NSString*)VOLUME ;
/*!
*
* tts_buffer_time=1000;
* 1000ms毫秒后播放
*
* @return key
*/
+(NSString*)TTS_BUFFER_TIME ;
/** 合成数据即时返回
*/
/**
*
* 1onEvent回调返回
* 100
*
* @return key
*/
+(NSString*)TTS_DATA_NOTIFY;
/**
*
*
* @return key
*/
+(NSString*)NEXT_TEXT;
/**
* MPPlayingInfocenter
* MPPlayerCenter的属性;0:1:
*
* @return MPPlayingInfocenter key
*/
+(NSString*)MPPLAYINGINFOCENTER;
#pragma mark - 识别、听写、语义相关设置key
/*!
*
* 1
* -1WriteAudio接口送入音频
*
* @return key
*/
+(NSString*)AUDIO_SOURCE;
/*!
*
*
* @return key
*/
+(NSString*) ASR_AUDIO_PATH;
/*!
*
*
* @return key
*/
+(NSString*)ASR_SCH;
/*!
*
*
* @return key
*/
+(NSString*)ASR_PTT;
/*!
* ASR_PTT
*
* @return Value
*/
+(NSString*)ASR_PTT_HAVEDOT;
/*!
* ASR_PTT
*
* @return Value
*/
+(NSString*)ASR_PTT_NODOT;
/*!
*
* CLOUD_GRAMMAR
*
* @return key
*/
+(NSString*)LOCAL_GRAMMAR;
/*!
* ID
* 使GRAMMAR_ID使
*
* @return ID key
*/
+(NSString*)CLOUD_GRAMMAR;
/*!
*
*
* @return key
*/
+(NSString*)GRAMMAR_TYPE;
/*!
*
*
* @return key
*/
+(NSString*)GRAMMAR_CONTENT;
/*!
*
*
* @return key
*/
+(NSString*)LEXICON_CONTENT;
/*!
*
*
* @return key
*/
+(NSString*)LEXICON_NAME;
/*!
*
*
* @return key
*/
+(NSString*)GRAMMAR_LIST;
/*!
*
* 使http://osp.voicecloud.cn/上进行业务配置
*
* @return key
*/
+(NSString*)NLP_VERSION;
#pragma mark - 唤醒相关设置key
/*!
*
*
* @return key
*/
+(NSString*)IVW_THRESHOLD;
/*!
*
*
* @return key
*/
+(NSString*)IVW_SST;
/*!
* +
*
* @return +key
*/
+(NSString*)IVW_ONESHOT;
/*!
*
* 10
*
* @return key
*/
+(NSString*)KEEP_ALIVE;
#pragma mark - 评测相关设置key
/*!
* <br>
* read_syllable():;read_word:;read_sentence:;read_chapter():
*
* @return key
*/
+(NSString*)ISE_CATEGORY;
/*!
* <br>
* complete plain
*
* @return key
*/
+(NSString*)ISE_RESULT_LEVEL;
/*!
*
* xml;plain
*
* @return key
*/
+(NSString*)ISE_RESULT_TYPE;
/*!
*
*
* @return key
*/
+(NSString*) ISE_AUDIO_PATH;
/*!
* <br>
* enable:;disable:
*
* @return key
*/
+(NSString*)ISE_AUTO_TRACKING;
/*!
* <br>
* easy:;hard:
*
* @return key
*/
+(NSString*)ISE_TRACK_TYPE;
#pragma mark - 语记SDK业务key
/**
*
*
* @return key
*/
+ (NSString *)PLUS_LOCAL_ALL;
/**
*
*
* @return key
*/
+ (NSString *)PLUS_LOCAL_TTS;
/**
*
*
* @return key
*/
+ (NSString *)PLUS_LOCAL_ASR;
/**
*
*
* @return key
*/
+ (NSString *)PLUS_LOCAL_IVW;
#pragma mark - 身份验证业务key
/**
* auth_id
*
*
* @return
*/
+ (NSString*)MFV_AUTH_ID;
/**
* mfvivpifr
*
* @return key
*/
+ (NSString*)MFV_SUB;
/**
* sub有不同的sst取值
* ifrenrollverifyidentifyreenrollquerydelete
* ivpenrolltrainverifyreenrollquerydeletedownload
*
* @return key
*/
+ (NSString*)MFV_SST;
/**
* 使sinmixagi
*
* @return key
*/
+ (NSString*)MFV_VCM;
/**
* ivpifrivp|ifr
*
* @return key
*/
+ (NSString*)MFV_SCENES;
/**
* (affirmance cycles)使
*
* @return key
*/
+ (NSString*)MFV_AFC;
/**
*
*
* @return key
*/
+ (NSString*)MFV_DATA_PATH;
/**
* 2~9.
*
* @return key
*/
+ (NSString*)MFV_RGN;
/**
* >=tsd验证通过,0~100.
*
* @return key
*/
+ (NSString*)MFV_TSD;
/**
*
*
* @return key
*/
+ (NSString*)MFV_PTXT;
/**
* 1(),2(),3().
*
* @return key
*/
+ (NSString*)MFV_PWDT;
/**
* 0(),1().
*
* @return key
*/
+ (NSString*)MFV_FIN;
/**
* :
*
* @return :key
*/
+ (NSString*)MFV_WTT;
/**
*
* 160008000;jpg和gif
*
* @return key
*/
+ (NSString*)MFV_DATA_FORMAT;
/**
*
* ;raw
*
* @return key
*/
+ (NSString*)MFV_DATA_ENCODING;
#pragma mark - 人脸业务key
//1. sub 取值: wfr 用途: 用于区分业务类型,web访问方式中nginx配置不用使用但是在结构化日志和染色日志记录中使用。
//2. sst 取值: reg、verify、detect、align 用途: 指定本路会话是属于何种性质
// + 人脸图像注册(reg):上传图像,验证图像的有效性,然后存储起来,作为数据源。
// + 人脸图像验证(verify):通过与指定源图像比较,验证人脸相似性。
// + 人脸图像检测(detect):能够检测出不同姿态方位的人脸在图中的位置。
// + 人脸图像聚焦(align):在给定人脸框下自动标定出两眼、鼻尖、嘴角的坐标。
//3. aue 取值: raw 用途: 图像压缩格式现在引擎不支持图像压缩aue只能取值raw
//4. pset 取值: 整数 用途: 人脸识别验证阈值,取值可以是负数也可以是整数。
//5. skip 取值: true/false 用途: 后台图片处理是否进行过滤。true表示不过滤false表示过滤
//6. gid 取值: *********** 用途: 图像模型id4a6c124ed6b78436ee5aac4563f13eb5
//7. appid 取值:用户申请的appid 用途: 验证用户
/** sub 默认值:wfr
* ,web访问方式中nginx配置不用使用使
*/
+ (NSString*) FACE_SUB;
/** WFR
* sub参数的默认值
*/
+ (NSString*) FACE_WFR;
/** sst
*
*/
+ (NSString*) FACE_SST;
/** REG
* (reg)
*/
+ (NSString*) FACE_REG;
/** VERIFY
* (verify)
*/
+ (NSString*) FACE_VERIFY;
/** DETECT
* (detect)姿
*/
+ (NSString*) FACE_DETECT;
/** ALIGN
* (align)
*/
+ (NSString*) FACE_ALIGN;
/** ATTR
* (attr)
*/
+ (NSString*) FACE_ATTR;
/** AUE
* aue只能取值raw
*/
+ (NSString*) FACE_AUE;
/** RAW
* AUE参数的值
*/
+ (NSString*) FACE_RAW;
/** PSET
*
*/
+ (NSString*) FACE_PSET;
/** SKIP
* true表示不过滤false表示过滤@true@false
*/
+ (NSString*) FACE_SKIP;
/** GID
* id4a6c124ed6b78436ee5aac4563f13eb5
*/
+ (NSString*) FACE_GID;
/**
* auth_id
*
*
* @return
*/
+ (NSString*)FACE_AUTH_ID;
/** DVC
* ,
*/
+ (NSString*) FACE_DVC;
@end

View File

@ -0,0 +1,58 @@
//
// IFlySpeechError.h
// MSC
//
// Created by iflytek on 13-3-19.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#ifndef __IFlySpeechError__
#define __IFlySpeechError__
#import <Foundation/Foundation.h>
/*!
*
*/
@interface IFlySpeechError : NSObject
/*!
*
*/
@property(nonatomic,assign) int errorCode;
/*!
*
*/
@property(nonatomic,assign) int errorType;
/*!
*
*/
@property(nonatomic,retain) NSString* errorDesc;
/*!
*
*
* @param errorCode -[in]
*
* @return IFlySpeechError对象
*/
+ (instancetype) initWithError:(int) errorCode;
/*!
*
*
* @return
*/
-(int) errorCode;
/*!
*
*
* @return
*/
- (NSString *) errorDesc;
@end
#endif

View File

@ -0,0 +1,78 @@
//
// IFlySpeechEvaluator.h
// msc
//
// Created by jianzhang on 14-1-13
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlySpeechEvaluatorDelegate.h"
/*!
*
*/
@interface IFlySpeechEvaluator : NSObject <IFlySpeechEvaluatorDelegate>
/*!
*
*/
@property (assign) id <IFlySpeechEvaluatorDelegate> delegate;
/*!
*
*
* @return
*/
+ (instancetype)sharedInstance;
/*!
*
*
* @return YESNO
*/
- (BOOL)destroy;
/*!
*
*
* @param value
* @param key
*
* @return YES,NO
*/
- (BOOL)setParameter:(NSString *)value forKey:(NSString *)key;
/*!
*
*
* @param key
*
* @return key对应的参数值
*/
- (NSString*)parameterForKey:(NSString *)key;
/*!
*
* ,
*
* @param data
* @param params
*/
- (void)startListening:(NSData *)data params:(NSString *)params;
/*!
*
*
*/
- (void)stopListening;
/*!
*
*/
- (void)cancel;
@end

View File

@ -0,0 +1,66 @@
//
// IFlySpeechEvaluatorDelegate.h
// msc
//
// Created by admin on 13-6-19.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/*!
*
*/
@protocol IFlySpeechEvaluatorDelegate <NSObject>
/*!
*
*
* @param volume
* @param buffer
*/
- (void)onVolumeChanged:(int)volume buffer:(NSData *)buffer;
/*!
*
* `startListening`onError:
*/
- (void)onBeginOfSpeech;
/*!
*
* `stopListening`
* onError:
*/
- (void)onEndOfSpeech;
/*!
*
*/
- (void)onCancel;
/*!
*
* errorCode进行相应的处理.
* errorCode没有错误时
* `cancel`
* `startListenging`
*
* @param errorCode
*/
- (void)onError:(IFlySpeechError *)errorCode;
/*!
*
*
*
* @param results -[out]
* @param isLast -[out]
*/
- (void)onResults:(NSData *)results isLast:(BOOL)isLast;
@end

View File

@ -0,0 +1,171 @@
//
// IFlySpeechEvent.h
// MSCDemo
//
// Created by admin on 14-8-12.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
*/
typedef NS_ENUM(NSUInteger,IFlySpeechEventType){
/*!
*
* ,onEvent的第2个参数arg1,
*/
IFlySpeechEventTypeNetPref = 10001,
/**
*
* onEvent
* 4data Key为[IFlySpeechConstant IST_AUDIO_PATH],.
* [IFlySpeechTranscripter getParameter:[IFlySpeechConstant IST_AUDIO_PATH]],
* .
*/
IFlySpeechEventTypeISTAudioFile = 10004,
/**
*
* ,onEvent
* arg1,.
* ,onEvent
* arg2,.
*
* [IFlySpeechConstant SPEECH_TIMEOUT]
* [IFlySpeechTranscripter stopTranscripting]
* data(true)data调用
* KEY为KCIFlySpeechEventKeyISTUploadComplete获取
* .
*/
IFlySpeechEventTypeISTUploadBytes = 10006,
/**
*
* -1
* [IFlySpeechTranscripter writeAudio]
* onEvent
* arg1,.
* 128KByte
*/
IFlySpeechEventTypeISTCacheLeft = 10007,
/**
*
* , onEvent
* arg1,.
* .
*/
IFlySpeechEventTypeISTResultTime= 10008,
/**
* ID消息
* , onEvent
* arg1,ID.
* .
*/
IFlySpeechEventTypeISTSyncID= 10009,
/**
*
*
*/
IFlySpeechEventTypeSessionBegin = 10010,
/**
*
*
*/
IFlySpeechEventTypeSessionEnd = 10011,
/**
*
*/
IFlySpeechEventTypeVolume = 10012,
/**
* VAD后端点消息VAD后端点时抛出
*/
IFlySpeechEventTypeVadEOS = 10013,
/*!
* id
* ,onEvent的第4个参数data()
* key KCIFlySpeechEventKeySessionID,id.
*/
IFlySpeechEventTypeSessionID = 20001,
/*!
* TTS合成数据消息
* -(void)onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData
* eventData中包含数据
*
*/
IFlySpeechEventTypeTTSBuffer = 21001,
/*!
* cancel方法被调用的回调
*
*/
IFlySpeechEventTypeTTSCancel = 21002,
/*!
* IVW onshot or
* ,2arg1包含是否为最后一个结果:1,0;
* 4data中包含数据KEY为KCIFlySpeechEventKeyIVWResult获取.
*/
IFlySpeechEventTypeIVWResult = 22001,
/*!
*
*
*/
IFlySpeechEventTypeSpeechStart= 22002,
/*!
*
*
*/
IFlySpeechEventTypeRecordStop= 22003,
/*!
* url
* ,
* 4data,,
* KEY为KCIFlySpeechEventKeyAudioUrl获取.
*/
IFlySpeechEventTypeAudioUrl = 23001,
/*!
*
*
* voice_change参数获取结果.
*/
IFlySpeechEventTypeVoiceChangeResult = 24001
};
#pragma mark - keys for event data
/**
* key
*/
extern NSString* const KCIFlySpeechEventKeyISTUploadComplete;
/**
* key
*/
extern NSString* const KCIFlySpeechEventKeySessionID;
/**
* TTS取音频数据key
*/
extern NSString* const KCIFlySpeechEventKeyTTSBuffer;
/**
* IVW oneshot or key
*/
extern NSString* const KCIFlySpeechEventKeyIVWResult;
/**
* url key
*/
extern NSString* const KCIFlySpeechEventKeyAudioUrl;

View File

@ -0,0 +1,174 @@
//
// IFlySpeechRecognizer.h
// MSC
//
// Created by iflytek on 13-3-19.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlySpeechRecognizerDelegate.h"
#define IFLY_AUDIO_SOURCE_MIC @"1"
#define IFLY_AUDIO_SOURCE_STREAM @"-1"
/*!
*
* 使release/dealloc函数去释放此对象
*/
@interface IFlySpeechRecognizer : NSObject<IFlySpeechRecognizerDelegate>
/** 设置委托对象 */
@property(nonatomic,assign) id<IFlySpeechRecognizerDelegate> delegate ;
/*!
*
*
* @return
*/
+ (instancetype) sharedInstance;
/*!
*
*
* @return YES,NO
*/
- (BOOL) destroy;
/*
* | ------------- |-----------------------------------------------------------
* | |
* | ------------- |-----------------------------------------------------------
* | domain |: :iatsearchvideopoimusicasr
* | | iat
* | | search
* | | video
* | | asr;
* | ------------- |-----------------------------------------------------------
* | vad_bos |: ms
* | | engine指定iat识别默认值为5000
* | | 4000 0-10000
* | ------------- |-----------------------------------------------------------
* | vad_eos |: ,,
* | | :ms;
* | | sms 1800;
* | | 700 0-10000
* | ------------- |-----------------------------------------------------------
* | sample_rate |: 16000 8000
* | ------------- |-----------------------------------------------------------
* | asr_ptt |: 1 0
* | ------------- |-----------------------------------------------------------
* | result_type |: jsonxmlplainjson
* | ------------- |-----------------------------------------------------------
* | grammarID |id: domain asr
* | ------------- |-----------------------------------------------------------
* | asr_audio_path|:
* | | Documents/()
* | | nil
* | ------------- |-----------------------------------------------------------
* | params |:
* | ------------- |-----------------------------------------------------------
*
*/
/*!
*
* (key)
* <table>
* <thead>
* <tr><th>*</th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td>domain</td><td>: :iatsearchvideopoimusicasr<br/>iat<br/>search<br/>video<br/>video<br/>asr;</td></tr>
* <tr><td>vad_bos</td><td>: ms<br/>engine指定iat识别默认值为5000<br/> 4000 0-10000</td></tr>
* <tr><td>vad_eos</td><td>: ,,<br/>:ms;<br/>sms 1800;<br/> 700 0-10000</td></tr>
* <tr><td>sample_rate</td><td>: 16000 8000</td></tr>
* <tr><td>asr_ptt</td><td>: 1 0 </td></tr>
* <tr><td>result_type</td><td>: jsonxmlplainjson</td></tr>
* <tr><td>grammarID</td><td>id: domain asr</td></tr>
* <tr><td>asr_audio_path</td><td>: <br/> Documents/()<br/>nil</td></tr>
* <tr><td>params</td><td>: </td></tr>
* </tbody>
* </table>
* @param value
* @param key
*
* @return YESNO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/*!
*
*
* @param key key
*
* @return
*/
-(NSString*) parameterForKey:(NSString *)key;
/*!
*
*
* onError回调返回后请求下一路回话
*
* @return YESNO
*/
- (BOOL) startListening;
/*!
*
*
*/
- (void) stopListening;
/*!
*
*/
- (void) cancel;
/*!
*
*
* @param completionHandler
* @param grammarType
* @param grammarContent
*
* @return
*/
- (int) buildGrammarCompletionHandler:(IFlyOnBuildFinishCompletionHandler)completionHandler
grammarType:(NSString *)grammarType
grammarContent:(NSString *)grammarContent;
/** 是否正在识别
*/
@property (nonatomic, readonly) BOOL isListening;
@end
/*!
*
*
*/
@interface IFlySpeechRecognizer(IFlyStreamRecognizer)
/*!
*
* 使:
* <pre><code>[_iFlySpeechRecognizer setParameter:@"audio_source" value:@"-1"];
* [_iFlySpeechRecognizer startListening];
* [_iFlySpeechRecognizer writeAudio:audioData1];
* [_iFlySpeechRecognizer writeAudio:audioData2];
* ...
* [_iFlySpeechRecognizer stopListening];
* </code></pre>
*
* @param audioData
*
* @return YESNO
*/
- (BOOL) writeAudio:(NSData *) audioData;
@end

View File

@ -0,0 +1,112 @@
//
// IFlySpeechRecognizerDelegate.h
// MSC
//
// Created by ypzhao on 13-3-27.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/*!
*
*
* @param grammarId id
* @param error
*/
typedef void(^IFlyOnBuildFinishCompletionHandler)(NSString* grammarId,IFlySpeechError * error);
/*!
*
* 使.
*/
@protocol IFlySpeechRecognizerDelegate <NSObject>
@required
/*!
*
* errorCode进行相应的处理
* errorCode没有错误时
* `cancel`
* `startListenging`
*
* @param errorCode
*/
- (void) onError:(IFlySpeechError *) errorCode;
/*!
*
*
* 使results的示例如下
* <pre><code>
* - (void) onResults:(NSArray *) results{
* NSMutableString *result = [[NSMutableString alloc] init];
* NSDictionary *dic = [results objectAtIndex:0];
* for (NSString *key in dic){
* [result appendFormat:@"%@",key];//合并结果
* }
* }
* </code></pre>
*
* @param results -[out] NSArray的第一个元素为NSDictionaryNSDictionary的key为识别结果sc为识别结果的置信度
* @param isLast -[out]
*/
- (void) onResults:(NSArray *) results isLast:(BOOL)isLast;
@optional
/*!
*
*
*
* @param volume -[out] 0-30
*/
- (void) onVolumeChanged: (int)volume;
/*!
*
* `startListening`
* onError:
*/
- (void) onBeginOfSpeech;
/*!
*
* `stopListening`
* onError:
*/
- (void) onEndOfSpeech;
/*!
*
* `cancel`cancel函数和回调onError之前会有一个
*
*/
- (void) onCancel;
#ifdef _EDUCATION_
/**
* Key
*
* @param key Key
*/
- (void) getAudioKey:(NSString *)key;
#endif
/**
*
*
*
* @param eventType IFlySpeechEventType的IFlySpeechEventTypeVoiceChangeResult枚举
* @param arg0 arg0
* @param arg1 arg1
* @param eventData
*/
- (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData;
@end

View File

@ -0,0 +1,128 @@
//
// IFlySpeechSynthesizer.h
// MSC
//
// Created by 侯效林 on 16-4-22.
// Copyright (c) 2016年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlySpeechSynthesizerDelegate.h"
/*!
*
*/
@interface IFlySpeechSynthesizer : NSObject
/*!
*
*/
@property(nonatomic,assign) id<IFlySpeechSynthesizerDelegate> delegate;
/*!
*
*
* @return
*/
+ (instancetype) sharedInstance;
/*!
*
*
* @return YES,NO.
*/
+ (BOOL) destroy;
/*
* | ------------- |-----------------------------------------------------------
* | |
* | ------------- |-----------------------------------------------------------
* | speed |, 0~100
* | ------------- |-----------------------------------------------------------
* | volume |, 0~100
* | ------------- |-----------------------------------------------------------
* | voice_name |xiaoyan
* | ------------- |-----------------------------------------------------------
* | sample_rate |: 16000 8000
* | ------------- |-----------------------------------------------------------
* | tts_audio_path|
* | |Documents/()nil
* | ------------- |-----------------------------------------------------------
* | params |:
* | ------------- |-----------------------------------------------------------
*
*/
/*!
*
* <table>
* <thead>
* <tr><th></th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td>speed</td><td>, 0~100</td></tr>
* <tr><td>volume</td><td>, 0~100</td></tr>
* <tr><td>voice_name</td><td>xiaoyan</td></tr>
* <tr><td>sample_rate</td><td>: 16000 8000</td></tr>
* <tr><td>tts_audio_path</td><td> <br/>Documents/()nil</td></tr>
* <tr><td>params</td><td>: </td></tr>
* </tbody>
* </table>
*
* @param value
* @param key
*
* @return YESNO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/*!
*
*
* @param key key
*
* @return
*/
-(NSString*) parameterForKey:(NSString *)key;
/*!
* ()
* `onCompleted`
*
* @param text ,1k
*/
- (void) startSpeaking:(NSString *)text;
/*!
* ()
* `onCompleted`
*
* @param text ,1k
* @param uri
*/
-(void)synthesize:(NSString *)text toUri:(NSString*)uri;
/*!
*
* `onCompleted`
*/
- (void) pauseSpeaking;
/*!
*
*/
- (void) resumeSpeaking;
/*!
*
*/
- (void) stopSpeaking;
/*!
*
*/
@property (nonatomic, readonly) BOOL isSpeaking;
@end

View File

@ -0,0 +1,80 @@
//
// IFlySpeechSynthesizerDelegate.h
// MSC
//
// Created by ypzhao on 13-3-20.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlySpeechEvent.h"
@class IFlySpeechError;
/**
*
*/
@protocol IFlySpeechSynthesizerDelegate <NSObject>
@required
/**
*
*
*
* @param error
*/
- (void) onCompleted:(IFlySpeechError*) error;
@optional
/**
*
*/
- (void) onSpeakBegin;
/**
*
*
* @param progress 0-100
* @param msg nil
*/
- (void) onBufferProgress:(int) progress message:(NSString *)msg;
/**
*
*
* @param progress 0-100
* @param beginPos 0-100
* @param endPos 0-100
*/
- (void) onSpeakProgress:(int) progress beginPos:(int)beginPos endPos:(int)endPos;
/**
*
*/
- (void) onSpeakPaused;
/**
*
*/
- (void) onSpeakResumed;
/**
*
* `cancel`
*/
- (void) onSpeakCancel;
/**
*
*
*
* @param eventType IFlySpeechEventType枚举EVENT_TTS_BUFFER也就是实时返回合成音频
* @param arg0 arg0
* @param arg1 arg1
* @param eventData
*/
- (void) onEvent:(int)eventType arg0:(int)arg0 arg1:(int)arg1 data:(NSData *)eventData;
@end

View File

@ -0,0 +1,134 @@
//
// IFlySpeechUnderstander.h
// MSC
//
// Created by iflytek on 2014-03-12.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
@protocol IFlySpeechRecognizerDelegate;
/*!
*
*/
@interface IFlySpeechUnderstander : NSObject
/*!
*
*/
@property (readonly) BOOL isUnderstanding;
/*!
*
*/
@property(nonatomic,retain) id<IFlySpeechRecognizerDelegate> delegate ;
/*!
*
*
* @return
*/
+(instancetype) sharedInstance;
/*!
*
* onError回调返回后请求下一路回话
*
* @return YESNO
*/
- (BOOL) startListening;
/*!
*
*
*/
- (void) stopListening;
/*!
*
*/
- (void) cancel;
/*
* | ------------- |-----------------------------------------------------------
* | |
* | ------------- |-----------------------------------------------------------
* | domain |: :iatsearchvideopoimusicasr
* | | iat
* | | search
* | | video
* | | asr;
* | ------------- |-----------------------------------------------------------
* | vad_bos |: ms
* | | engine指定iat识别默认值为5000
* | | 4000 0-10000
* | ------------- |-----------------------------------------------------------
* | vad_eos |: ,,
* | | :ms;
* | | sms 1800;
* | | 700 0-10000
* | ------------- |-----------------------------------------------------------
* | sample_rate |: 16000 8000
* | ------------- |-----------------------------------------------------------
* | asr_ptt |: 1 0
* | ------------- |-----------------------------------------------------------
* | result_type |: jsonxmlplainjson
* | ------------- |-----------------------------------------------------------
* | grammarID |id: domain asr
* | ------------- |-----------------------------------------------------------
* | asr_audio_path|:
* | | Documents/()
* | | nil
* | ------------- |-----------------------------------------------------------
* | params |:
* | ------------- |-----------------------------------------------------------
*
*/
/*!
*
* (key)
* <table>
* <thead>
* <tr><th>*</th><th><em></em></th>
* </tr>
* </thead>
* <tbody>
* <tr><td>domain</td><td>: :iatsearchvideopoimusicasr<br/>iat<br/>search<br/>video<br/>video<br/>asr;</td></tr>
* <tr><td>vad_bos</td><td>: ms<br/>engine指定iat识别默认值为5000<br/> 4000 0-10000</td></tr>
* <tr><td>vad_eos</td><td>: ,,<br/>:ms;<br/>sms 1800;<br/> 700 0-10000</td></tr>
* <tr><td>sample_rate</td><td>: 16000 8000</td></tr>
* <tr><td>asr_ptt</td><td>: 1 0 </td></tr>
* <tr><td>result_type</td><td>: jsonxmlplainjson</td></tr>
* <tr><td>grammarID</td><td>id: domain asr</td></tr>
* <tr><td>asr_audio_path</td><td>: <br/> Documents/()<br/>nil</td></tr>
* <tr><td>params</td><td>: </td></tr>
* </tbody>
* </table>
* @param value
* @param key
*
* @return YESNO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/*!
*
*
* @param audioData
*
* @return YESNO
*/
- (BOOL) writeAudio:(NSData *) audioData;
/*!
*
*
* @return YESNO
*/
- (BOOL) destroy;
@end

View File

@ -0,0 +1,184 @@
//
// IFlySpeechUtility.h
// MSCDemo
//
// Created by admin on 14-5-7.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#define iOS_EXCLUSIVE //iOS平台独占API
@class IFlySpeechError;
/**
*
*/
typedef NS_ENUM(NSUInteger,IFlyEngineMode){
/**
* 使MSC使
*/
IFlyEngineModeAuto = 0,
/**
* 使MSC
*/
IFlyEngineModeMsc,
/**
* 使(使
*/
IFlyEngineModePlus,
};
/**
*
*/
typedef NS_ENUM(NSUInteger,IFlySpeechPlusServiceType){
/**
*
*/
IFlySpeechPlusServiceTypeNone=0,
/**
*
*/
IFlySpeechPlusServiceTypeTTS,
/**
*
*/
IFlySpeechPlusServiceTypeISR,
/**
*
*/
IFlySpeechPlusServiceTypeIVW,
} ;
/** 语记返回回调
*/
@protocol IFlySpeechplusDelegate <NSObject>
/**
*
*
* @param errorCode
*/
- (void)onError:(int)errorCode;
/**
*
*/
- (void)onCompleted;
@end
/**
*
*/
@interface IFlySpeechUtility : NSObject
/*!
*
* <br>
* http://www.xfyun.cn
*
* @param params appid参数传入appid=123456
*
* @return
*/
+ (IFlySpeechUtility*) createUtility:(NSString *) params;
/*!
*
*
* @return YES,NO
*/
+(BOOL) destroy;
/*!
*
*
* @return
*/
+(IFlySpeechUtility *) getUtility;
/*!
* MSC引擎的状态参数
*
* @param value
* @param key
*
* @return YES,NO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/**
* MSC引擎状态参数
*
* @param key
*
* @return
*/
- (NSString *)parameterForKey:(NSString *)key;
/**
*
*/
@property (nonatomic, readonly) IFlyEngineMode engineMode;
/**
*
*/
@property (nonatomic, assign) id<IFlySpeechplusDelegate> delegate;
@end
/**
*
*/
@interface IFlySpeechUtility (SpeechPlus)
/**
*
*
* @return YESNO
*/
+ (BOOL)checkServiceInstalled;
/**
* 使
* [[UIApplication sharedApplication] openUrl:]
*
* @return App Store下载地址
*/
+ (NSString *)componentUrl;
/**
* 使
* 使URL启动第三方应用程序时传递的数据
* application:openURL:sourceApplication:annotation:application:handleOpenURL中调用
*
* @param url URL
*
* @return YESNO
*/
- (BOOL)handleOpenURL:(NSURL *)url iOS_EXCLUSIVE;
/**
* 0
*
* @param serviceType
*
* @return YESNO
*/
- (BOOL)openSpeechPlus:(IFlySpeechPlusServiceType)serviceType iOS_EXCLUSIVE;
@end

View File

@ -0,0 +1,57 @@
//
// TextUnderstand.h
// MSCDemo
//
// Created by iflytek on 4/24/14.
// Copyright (c) 2014 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/*!
*
*
* @param result
* @param error
*/
typedef void(^IFlyUnderstandTextCompletionHandler)(NSString* result, IFlySpeechError * error);
/*!
*
*/
@interface IFlyTextUnderstander : NSObject
/*!
*
*/
@property (readonly, atomic) __block BOOL isUnderstanding;
/*!
*
*
*
* @param text
* @param completionHandler
*
* @return
*/
-(int) understandText:(NSString*)text withCompletionHandler:(IFlyUnderstandTextCompletionHandler) completionHandler;
/*!
*
*
* @param value
* @param key
*
* @return YESNO
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/*!
*
*/
-(void)cancel;
@end

View File

@ -0,0 +1,73 @@
//
// IFlyUserWords.h
// MSC
//
// Created by ypzhao on 13-2-26.
// Copyright (c) 2013年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
/*!
*
* (iat).
*/
@interface IFlyUserWords : NSObject
/*!
*
*
* <pre><code>{\"userword\":[{\"name\":\"iflytek\",\"words\":[\"科大讯飞\",
* \"云平台\",\"用户词条\",\"开始上传词条\"]}]}</code></pre>
*
* @param json
*
* @return IFlyUserWords对象
*/
- (id) initWithJson:(NSString *)json;
/*!
*
*
* @return nil
*/
- (NSString *) toString;
/*!
* key对应的数据
*
* @param key putword:value中设置的key
*
* @return key对应的数组
*/
- (NSArray *) getWords: (NSString *) key;
/*!
*
*
* @param key key
* @param value
*
* @return YES,NO
*/
- (BOOL) putWord: (NSString *) key value:(NSString *)value;
/*!
*
*
* @param key key
* @param words
*
* @return YES,NO
*/
- (BOOL) putwords: (NSString *) key words:(NSArray *)words;
/*!
* key对应的用户词数据
*
* @param key key
*
* @return YES,NO
*/
- (BOOL) containsKey: (NSString *) key;
@end

View File

@ -0,0 +1,179 @@
//
// IFlyVoiceWakeuper.h
// wakeup
//
// Created by admin on 14-3-18.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "IFlyVoiceWakeuperDelegate.h"
/**
1.0.0.1
iPhone5s实测
3M
CPU占用< 12%
sharedInstance
setParameter
[_voiceWakeuper setParameter:@"wake" forKey:@"sst"];
sst wake是唤醒enroll是注册
[_voiceWakeuper setParameter:@"m_wakeupResPath" forKey:@"ivw_wake_list"];
ivw_wake_list
[_voiceWakeuper setParameter:@"holdValue" forKey:@"ivw_threshold"];
ivw_threshold holdValue形式
ID201530
151202303
demo只有一个资源设置为ID20
startListening启动服务
stopListening
cancel
*
*
sst=wake表示唤醒
sst=enroll表示注册
SESSION_TYPE @"sst" //服务类型
WAKEUP @"wake" //唤醒
ENROLL @"enroll" //注册
* ID:20;20;3
ID为起始;20203
IVW_THRESHOLD @"ivw_threshold" //唤醒词对应的门限
*
params传入
PARAM @"params"
*
ivw_word_path=/abc/123/newpath.irf
IVW_WORD_PATH @"ivw_word_path"
*
keep_alive 0:1
KEEP_ALIVE @"keep_alive"
* focus_type注册和唤醒的返回参数
wake
enroll
FOCUS_TYPE @"focus_type" //服务类型
*
status=success
status=failed
status=done
STATUS @"status" //服务状态
SUCESS @"success" //服务成功
FAILED @"failed" //服务失败
DONE @"done" //训练完成
*
ID @"id" //唤醒结果的id
*
THRESHOLD @"threshold" //训练资源的阀值
*
SCORE @"score" //服务结果可信度
*
NUM @"num" //已训练成功次数
*
BOS @"bos" //前端点
EOS @"eos" //后端点
* -1WriteAudio送入音频
AUDIO_SOURCE @"audio_source"
*
MERGE_RES_ACTION @"merge"
*/
@interface IFlyVoiceWakeuper : NSObject
{
}
@property(assign) id<IFlyVoiceWakeuperDelegate> delegate;
/**
*/
+ (instancetype) sharedInstance;
/**
:YES NO
*/
-(BOOL) startListening;
/**
cancel的区别
*/
-(BOOL) stopListening;
/**
*/
-(NSString*) getParameter:(NSString *)key;
/**
*/
-(BOOL) setParameter:(NSString *) value forKey:(NSString*)key;
/**
*/
-(BOOL) cancel;
/**
*/
//+(int) updateWords:(NSString *)action params:(NSString*) params;
/**
*/
//-(int) writeAudio:(const void*)buffer offset:(int)offset length:(int)length;
@property (readonly) BOOL isListening;
@end

View File

@ -0,0 +1,84 @@
//
// IFlyVoiceWakeuperDel.h
// wakeup
//
// Created by admin on 14-3-18.
// Copyright (c) 2014年 iflytek. All rights reserved.
//
#import <Foundation/Foundation.h>
@class IFlySpeechError;
/**
*
onError
onBeginOfSpeech
onVolumeChanged
onEndOfSpeech onEndOfSpeech
onResult
*
focus_type = wake
wakeup_result_id = 0
wakeup_result_Score = 60
*
focus_type = enroll
enroll_success_num = 1
current_enroll_status = success/failed
wakeup_result_Score = 60
threshold = 10 3
*/
@protocol IFlyVoiceWakeuperDelegate <NSObject>
@optional
/**
*/
-(void) onBeginOfSpeech;
/**
*/
-(void) onEndOfSpeech;
/**
@param errorCode
*/
- (void) onError:(IFlySpeechError *) error;
/**
resultID:
*/
-(void) onResult:(NSMutableDictionary *)resultArray;
/**
volume:
*/
- (void) onVolumeChanged: (int)volume;
/** 扩展事件回调
@param eventType IFlySpeechEvent枚举
*/
- (void) onEvent:(int)eventType isLast:(BOOL)isLast arg1:(int)arg1 data:(NSMutableDictionary *)eventData;
@end

Binary file not shown.

137
www/Speech.js Normal file
View File

@ -0,0 +1,137 @@
/**
* Created by Edc.zhang on 2017/2/13.
*/
var cordova = require('cordova'),
channel = require('cordova/channel'),
exec = require('cordova/exec');
var Speech = function() {
this.channels = {
'SyncContact': channel.create('SyncContact'),
'UpdateUserWord': channel.create('UpdateUserWord'),
'SpeechError': channel.create('SpeechError'),
'SpeechResults': channel.create('SpeechResults'),
'VolumeChanged': channel.create('VolumeChanged'),
'SpeechBegin': channel.create('SpeechBegin'),
'SpeechEnd': channel.create('SpeechEnd'),
'SpeechCancel': channel.create('SpeechCancel'),
'SpeakCompleted': channel.create('SpeakCompleted'),
'SpeakBegin': channel.create('SpeakBegin'),
'SpeakProgress': channel.create('SpeakProgress'),
'SpeakPaused': channel.create('SpeakPaused'),
'SpeakResumed': channel.create('SpeakResumed'),
'SpeakCancel': channel.create('SpeakCancel'),
'BufferProgress': channel.create('BufferProgress')
};
this.voice_names = {
'xiaoyan' : '小燕',
'xiaoyu' : '小宇',
'vixy' : '小研',
'vixq' : '小琪',
'vixf' : '小峰',
'vixm' : '香港小梅',
'vixl' : '台湾小莉',
'vixr' : '四川妹纸',
'vixyun' : '东北小芸',
'vixk' : '河南小坤',
'vixqa' : '湖南小强',
'vixying' : '陕西小莹',
'vixx' : '蜡笔小新',
'vinn' : '楠楠',
'vils' : '孙大爷',
'Catherine' : '美国Catherine',
'henry' : '美国Henry',
'vimary' : '英国Mary',
'Mariane' : '法国Mariane',
'Guli' : '维族Guli',
'Allabent' : '俄国Allabent',
'Gabriela' : '西班牙Gabriela',
'Abha' : '印度Abha',
'XiaoYun' : '越南XiaoYun'
};
this.login();
this.msg = "";
};
Speech.prototype = {
_eventHandler: function(info) {
if (info.event in this.channels) {
this.channels[info.event].fire(info);
}
},
addEventListener: function(event, f, c) {
if (event in this.channels) {
this.channels[event].subscribe(f, c || this);
}
},
removeEventListener: function(event, f) {
if (event in this.channels) {
this.channels[event].unsubscribe(f);
}
},
login: function() {
// closure variable for local function to use
var speech = this;
// the callback will be saved in the session for later use
var callback = function(info) {
speech._eventHandler(info);
};
exec(callback, callback, 'Speech', 'login', []);
function parseResults( e ) {
var data = JSON.parse( e.results );
if(data.sn == 1) speech.msg = "";
var ws = data.ws;
for( var i=0; i<ws.length; i++ ) {
var word = ws[i].cw[0].w;
speech.msg += word;
}
if(data.ls == true) {
console.log( speech.msg );
if(typeof speech.onspeakcallback === 'function') {
speech.onspeakcallback( speech.msg );
}
}
}
this.addEventListener('SpeechResults', parseResults );
},
startListen: function(func,fail,isShow,isShowPunc) {
this.onspeakcallback = func;
exec(null, null, 'Speech', 'startListening', [{language:'zh_cn', accent:'mandarin'},isShow,isShowPunc]);
},
stopListen: function() {
exec(null, null, 'Speech', 'stopListening', []);
},
cancelListening: function() {
exec(null, null, 'Speech', 'cancelListening', []);
},
startSpeak: function(success,error,text) {
exec(null, null, 'Speech', 'startSpeaking', [text, {voice_name: 'xiaoyan'}]);
},
pauseSpeaking: function() {
exec(null, null, 'Speech', 'pauseSpeaking', []);
},
resumeSpeaking: function() {
exec(null, null, 'Speech', 'resumeSpeaking', []);
},
stopSpeak: function() {
exec(null, null, 'Speech', 'stopSpeaking', []);
}
};
module.exports = new Speech();

View File

@ -0,0 +1,26 @@
var exec = require('cordova/exec');
var xunfeiListenSpeaking = {
startListen:function (success,error,isShowDialog,isShowPunc){
exec(success,error,"XunfeiListenSpeaking","startListen",[isShowDialog,isShowPunc]);
},
stopListen:function(){
exec(null,null,"XunfeiListenSpeaking","stopListen",[]);
},
startSpeak:function(success,error,speakMessage){
exec(success,error,"XunfeiListenSpeaking","startSpeak",[speakMessage]);
},
stopSpeak:function(){
exec(null,null,"XunfeiListenSpeaking","stopSpeak",[]);
},
pauseSpeaking: function() {
exec(null, null, 'XunfeiListenSpeaking', 'pauseSpeaking', []);
},
resumeSpeaking: function() {
exec(null, null, 'XunfeiListenSpeaking', 'resumeSpeaking', []);
}
};
module.exports = xunfeiListenSpeaking;