第二步其他配置:根据不同版本配置可能有所不同。
build.gradle(app)
plugins {
id 'com.android.application'
}
android {
signingConfigs {
config {
keyAlias 'xf2022'
keyPassword 'xf2022'
storeFile file('../xf2022.jks')
storePassword 'xf2022'
}
}
compileSdk 32
defaultConfig {
applicationId "com.example.clientapplication"
minSdk 21
targetSdk 32
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
repositories {
maven {
url "https://jitpack.io"
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
implementation files('libs\\Msc.jar')
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
implementation "org.java-websocket:Java-WebSocket:1.5.1"
implementation 'org.greenrobot:eventbus:3.1.1'
implementation 'com.google.code.gson:gson:2.6.2'
}
settings.gradle
pluginManagement {
repositories {
gradlePluginPortal()
google()
mavenCentral()
}
}
rootProject.name = "ClientApplication"
include ':app'
gradle.properties
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
android.enableJetifier=true
android.injected.testOnly=false
org.gradle.daemon=true
org.gradle.parallel=true
build.gradle
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
//google()
//mavenCentral()
maven { url 'https://maven.aliyun.com/repository/public/'}
maven { url 'https://maven.aliyun.com/repository/google/'}
maven { url 'https://maven.aliyun.com/repository/jcenter/'}
mavenLocal()
mavenCentral()
}
dependencies {
classpath "com.android.tools.build:gradle:4.2.2"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
//google()
//mavenCentral()
maven { url 'https://maven.aliyun.com/repository/public/'}
maven { url 'https://maven.aliyun.com/repository/google/'}
maven { url 'https://maven.aliyun.com/repository/jcenter/'}
mavenLocal()
mavenCentral()
}
}
//plugins {
// id 'com.android.application' version '7.2.1' apply false
// id 'com.android.library' version '7.2.1' apply false
//}
// Top-level build file where you can add configuration options common to all sub-projects/modules.
task clean(type: Delete) {
delete rootProject.buildDir
}
第三步:使用
工具类
package com.example.clientapplication;
import android.content.Context;
import android.util.Log;
import com.example.clientapplication.Util.FucUtil;
import com.iflytek.cloud.ErrorCode;
import com.iflytek.cloud.GrammarListener;
import com.iflytek.cloud.InitListener;
import com.iflytek.cloud.SpeechConstant;
import com.iflytek.cloud.SpeechError;
import com.iflytek.cloud.SpeechRecognizer;
import com.iflytek.cloud.util.ResourceUtil;
/**
* author:android开发练习生
* 离线命令词对象构建类
*/
public class RecognizerUtil {
/**
* 返回一个构建好的离线命令词对象
*
* @param context 上下文对象
* @param mAsr 离线命令词对象
* @param path 文件保存路径
*/
public static SpeechRecognizer getAsr(Context context , SpeechRecognizer mAsr , String path){
InitListener mInitListener = new InitListener() {
@Override
public void onInit(int code) {
Log.e("buggg", "SpeechRecognizer init() code1111 = " + code);
if (code != ErrorCode.SUCCESS) {
//showTip("初始化失败,错误码:" + code + ",请点击网址https://www.xfyun.cn/document/error-code查询解决方案");
}
}
};
GrammarListener mGrammarListener = new GrammarListener() {
@Override
public void onBuildFinish(String grammarId, SpeechError error) {
if (error == null) {
Log.d("buggg", "构建成功"+ grammarId);
} else {
Log.d("buggg", "构建失败");
}
}
};
if (mInitListener != null){
mAsr = SpeechRecognizer.createRecognizer(context, mInitListener);
if (mAsr == null) {
Log.e("buggg", "masr is null");
} else {
Log.e("buggg", "now is setAsrParam");
// 设置文本编码格式
mAsr.setParameter(SpeechConstant.TEXT_ENCODING, "utf-8");
// 清空参数
mAsr.setParameter(SpeechConstant.PARAMS, null);
// 设置识别引擎
mAsr.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_LOCAL);
// 设置本地识别资源
mAsr.setParameter(ResourceUtil.ASR_RES_PATH,ResourceUtil.generateResourcePath(context, ResourceUtil.RESOURCE_TYPE.assets, "asr/common.jet"));
// 设置语法构建路径
mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, path);
// 设置返回结果格式
mAsr.setParameter(SpeechConstant.RESULT_TYPE, "json");
// 设置本地识别使用语法id
mAsr.setParameter(SpeechConstant.LOCAL_GRAMMAR, "call");
// 设置识别的门限值
mAsr.setParameter(SpeechConstant.MIXED_THRESHOLD, "30");
// 使用8k音频的时候请解开注释
// mAsr.setParameter(SpeechConstant.SAMPLE_RATE, "8000");
//只有设置这个属性为1时,VAD_BOS VAD_EOS才会生效,且RecognizerListener.onVolumeChanged才有音量返回默认:1
mAsr.setParameter(SpeechConstant.VAD_ENABLE, "1");
// 设置语音前端点:静音超时时间,即用户多长时间不说话则当做超时处理1000~10000
//当音版的前静音时长超过设置值时,SDK就会认为音频输入已完成。
// 此时,SDK会通过RecognizerListener.onError(SpeechError)结束会话,
// 并携带错误码为ErrorCode.MSP_ERROR_NO_DATA的错误信息。
mAsr.setParameter(SpeechConstant.VAD_BOS, "5000");
// 设置语音后端点:后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 自动停止录音0~10000
mAsr.setParameter(SpeechConstant.VAD_EOS, "200");
// 设置音频保存路径,保存音频格式支持pcm、wav,设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// 注:AUDIO_FORMAT参数语记需要更新版本才能生效
mAsr.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
//设置识别会话被中断时(如当前会话未结束就开启了新会话等),
//是否通过RecognizerListener.onError(com.iflytek.cloud.SpeechError)回调ErrorCode.ERROR_INTERRUPT错误。
//默认false [null,true,false]
mAsr.setParameter(SpeechConstant.ASR_INTERRUPT_ERROR, "false");
Log.e("buggg", "masr is not null");
//int ret=buildAsrGrammar("call.bnf",mGrammarListener);
String strLocalGrammar = FucUtil.readFile(context,"call.bnf", "utf-8");
int ret = mAsr.buildGrammar("bnf", strLocalGrammar, mGrammarListener);
if (ret != ErrorCode.SUCCESS) {
Log.e("buggg", "creat not success");
}
}}
else {
Log.e("buggg", "initlistener is null");
}
return mAsr;
}
//讯飞模块初始化
//grmPath = getExternalFilesDir("msc").getAbsolutePath() + "/xfMsc";
// 初始化识别对象
//mAsr = SpeechRecognizer.createRecognizer(this, mInitListener);
// 初始化语法、命令词
//讯飞模块初始化-end
// public int buildAsrGrammar(String strFile,GrammarListener grammarListener)
// {
// Log.e("buggg", "masr is not null");
// String strLocalGrammar = FucUtil.readFile(context, strFile, "utf-8");
// int ret = mAsr.buildGrammar("bnf", strLocalGrammar, grammarListener);
// return ret;
// }
//
/**
* 参数设置
* 内部录音识别工作模式
*
* @return
*/
// public void setAsrParam() {
// Log.e("buggg", "now is setAsrParam");
// // 设置文本编码格式
// mAsr.setParameter(SpeechConstant.TEXT_ENCODING, "utf-8");
// // 清空参数
// mAsr.setParameter(SpeechConstant.PARAMS, null);
// // 设置识别引擎
// mAsr.setParameter(SpeechConstant.ENGINE_TYPE, SpeechConstant.TYPE_LOCAL);
// // 设置本地识别资源
// mAsr.setParameter(ResourceUtil.ASR_RES_PATH, getResourcePath());
// // 设置语法构建路径
// mAsr.setParameter(ResourceUtil.GRM_BUILD_PATH, path);
// // 设置返回结果格式
// mAsr.setParameter(SpeechConstant.RESULT_TYPE, "json");
// // 设置本地识别使用语法id
// mAsr.setParameter(SpeechConstant.LOCAL_GRAMMAR, "call");
// // 设置识别的门限值
// mAsr.setParameter(SpeechConstant.MIXED_THRESHOLD, "30");
// // 使用8k音频的时候请解开注释
mAsr.setParameter(SpeechConstant.SAMPLE_RATE, "8000");
//
//
// //只有设置这个属性为1时,VAD_BOS VAD_EOS才会生效,且RecognizerListener.onVolumeChanged才有音量返回默认:1
// mAsr.setParameter(SpeechConstant.VAD_ENABLE, "1");
// // 设置语音前端点:静音超时时间,即用户多长时间不说话则当做超时处理1000~10000
// //当音版的前静音时长超过设置值时,SDK就会认为音频输入已完成。
// // 此时,SDK会通过RecognizerListener.onError(SpeechError)结束会话,
// // 并携带错误码为ErrorCode.MSP_ERROR_NO_DATA的错误信息。
// mAsr.setParameter(SpeechConstant.VAD_BOS, "5000");
// // 设置语音后端点:后端点静音检测时间,即用户停止说话多长时间内即认为不再输入, 自动停止录音0~10000
// mAsr.setParameter(SpeechConstant.VAD_EOS, "500");
// // 设置音频保存路径,保存音频格式支持pcm、wav,设置路径为sd卡请注意WRITE_EXTERNAL_STORAGE权限
// // 注:AUDIO_FORMAT参数语记需要更新版本才能生效
// mAsr.setParameter(SpeechConstant.AUDIO_FORMAT, "wav");
// //设置识别会话被中断时(如当前会话未结束就开启了新会话等),
// //是否通过RecognizerListener.onError(com.iflytek.cloud.SpeechError)回调ErrorCode.ERROR_INTERRUPT错误。
// //默认false [null,true,false]
// mAsr.setParameter(SpeechConstant.ASR_INTERRUPT_ERROR, "false");
// }
//讯飞模块-获取识别资源路径
// private String getResourcePath() {
// StringBuffer tempBuffer = new StringBuffer();
// //识别通用资源
// tempBuffer.append(ResourceUtil.generateResourcePath(context, ResourceUtil.RESOURCE_TYPE.assets, "asr/common.jet"));
// return tempBuffer.toString();
// }
}
主页面使用
//ASR文件存放位置
mAsrPath = getExternalFilesDir("msc").getAbsolutePath() + "/xfMsc";
//创建ASR
mAsr = RecognizerUtil.getAsr(this,mAsr,mAsrPath);
/**
* ASR识别监听器。
*/
mRecognizerListener = new RecognizerListener() {
@Override
public void onVolumeChanged(int volume, byte[] data) {
//音量大小回调方法
//showTip("当前正在说话,音量大小:" + volume);
//Log.d("buggg", "返回音频数据:" + data.length);
}
@Override
public void onResult(final RecognizerResult result, boolean isLast) {
//返回识别数据时被调用
if (null != result && !TextUtils.isEmpty(result.getResultString())) {
Log.d("buggg", "recognizer result:" + result.getResultString());
mAsrContent = JsonParser.parseGrammarResult(result.getResultString());
mWebSocketContent = JsonParser.parseGrammarResultId(result.getResultString());
mTvShow.setText(mAsrContent + mWebSocketContent);
} else {
Log.d("buggg", "recognizer result : null");
}
int ret = mAsr.startListening(mRecognizerListener);//再次开启、连续录音识别
if (ret != ErrorCode.SUCCESS) {
showTip("开启监听失败");
}
}
@Override
public void onEndOfSpeech() {
// 此回调表示:检测到了语音的尾端点,已经进入识别过程,不再接受语音输入
Log.d("buggg", "结束录音正在识别");
}
@Override
public void onBeginOfSpeech() {
// 此回调表示:sdk内部录音机已经准备好了,用户可以开始语音输入
Log.d("buggg", "开始说话开始说话");
}
@Override
public void onError(SpeechError error) {
//出现错误时被调用
showTip("识别错误:" + error.getErrorCode());
int ret = mAsr.startListening(mRecognizerListener);//再次开启、连续录音识别
if (ret != ErrorCode.SUCCESS) {
showTip("开启监听失败");
}
}
@Override
public void onEvent(int eventType, int arg1, int arg2, Bundle obj) {
// 以下代码用于获取与云端的会话id,当业务出错时将会话id提供给技术支持人员,可用于查询会话日志,定位出错原因
// 若使用本地能力,会话id为null
// if (SpeechEvent.EVENT_SESSION_ID == eventType) {
// String sid = obj.getString(SpeechEvent.KEY_EVENT_SESSION_ID);
// Log.d(TAG, "session id =" + sid);
// }
}
};
退出时释放资源
if (null != mAsr) {
// 退出时释放连接
mAsr.cancel();
mAsr.destroy();}
第四步:由于命令词识别返回的数据为json模式,应使用确定的方式接收解析
其返回的数据格式如下
"sn":1,
"ls":true,
"bg":0,
"ed":0,
"ws":[
{"bg":0,"cw":[{"w":"今天","sc":0}]},
{"bg":0,"cw":[{"w":"的","sc":0}]},
{"bg":0,"cw":[{"w":"天气","sc":0}]},
{"bg":0,"cw":[{"w":"怎么样","sc":0}]},
{"bg":0,"cw":[{"w":"。","sc":0}]}
]
解析代码如下:分别返回识别内容和id
package com.example.clientapplication.Util;
import com.example.clientapplication.Bean.WebSocketBean;
import com.google.gson.Gson;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONTokener;
/**
* author:android开发练习生
* Json结果解析类
*/
public class JsonParser {
private static int mConfidence = 60; //置信度
/**
* 从返回的result中解析命令词文本
* @param json 需要解析的字符串
*
*/
public static String parseGrammarResult(String json) {
StringBuffer ret = new StringBuffer();
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
if (joResult.getInt("sc") <= mConfidence)
return ret.append("没有匹配结果.").append("\n").toString();
else {
JSONArray words = joResult.getJSONArray("ws");
ret.append("【结果】");
for (int i = 0; i < words.length(); i++) {
JSONObject wsItem = words.getJSONObject(i);
JSONArray items = wsItem.getJSONArray("cw");
// if ("<contact>".equals(wsItem.getString("slot"))) {
// // 可能会有多个联系人供选择,用中括号括起来,这些候选项具有相同的置信度
// ret.append("【");
// for (int j = 0; j < items.length(); j++) {
// JSONObject obj = items.getJSONObject(j);
// if (obj.getString("w").contains("nomatch")) {
// ret.append("没有匹配结果.");
// return ret.toString();
// }
// ret.append(obj.getString("w")).append("|");
// }
// ret.setCharAt(ret.length() - 1, '】');
// } else {
//本地多候选按照置信度高低排序,一般选取第一个结果即可
JSONObject obj = items.getJSONObject(0);
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
}
ret.append(obj.getString("w"));
// }
}}
//ret.append("【置信度】" + joResult.getInt("sc"));
ret.append("\n");
} catch (Exception e) {
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
/**
* 从返回的result中解析命令词id
* @param json 需要解析的字符串
*/
public static String parseGrammarResultId(String json) {
WebSocketBean webSocketBean = new WebSocketBean();
StringBuffer ret = new StringBuffer();
String[] parameter = new String[2];
try {
JSONTokener tokener = new JSONTokener(json);
JSONObject joResult = new JSONObject(tokener);
if (joResult.getInt("sc") <= mConfidence)
return ret.append("没有匹配结果").toString();
else {
JSONArray words = joResult.getJSONArray("ws");
for (int i = 0; i < words.length(); i++) {
JSONObject wsItem = words.getJSONObject(i);
JSONArray items = wsItem.getJSONArray("cw");
for (int j = 0; j < items.length(); j++) {
JSONObject obj = items.getJSONObject(j);
String beanId = obj.getString("id");
if (obj.getString("w").contains("nomatch")) {
ret.append("没有匹配结果.");
return ret.toString();
} else if (beanId.length() == 7) {
String id = beanId.substring(0, 5);
parameter[0] = beanId.substring(5);
webSocketBean.setId(id);
webSocketBean.setParameter(parameter);
} else if (beanId.length() == 5) {
webSocketBean.setId(beanId);
} else {
parameter[1] = beanId;
webSocketBean.setParameter(parameter);
}
//ret.append(beanId);
}
}
Gson gson = new Gson();
String gsonContent = gson.toJson(webSocketBean);
ret.append(gsonContent);}
}catch(Exception e){
e.printStackTrace();
ret.append("没有匹配结果.");
}
return ret.toString();
}
}
注:不是官网给的解决办法,但能够成功实现
|