use of ai.saiy.android.nlu.bluemix.ResolveBluemix in project Saiy-PS by brandall76.
the class RecognitionBluemix method onMessage.
@Override
public void onMessage(final String message) {
if (DEBUG) {
MyLog.i(CLS_NAME, "onMessage: " + message);
}
final Gson gson = new GsonBuilder().disableHtmlEscaping().create();
NLUBluemix nluBluemix = null;
try {
nluBluemix = gson.fromJson(message, NLUBluemix.class);
} catch (final JsonSyntaxException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "JsonSyntaxException");
e.printStackTrace();
}
} catch (final NullPointerException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "NullPointerException");
e.printStackTrace();
}
} catch (final Exception e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "Exception");
e.printStackTrace();
}
}
if (nluBluemix != null) {
if (UtilsString.notNaked(nluBluemix.getState())) {
if (DEBUG) {
MyLog.d(CLS_NAME, "Status message: " + nluBluemix.getState());
}
} else {
partialArray.clear();
resultsArray.clear();
confidenceArray.clear();
bundle.clear();
final List<Result> results = nluBluemix.getResults();
if (UtilsList.notNaked(results)) {
final int resultsSize = results.size();
if (DEBUG) {
MyLog.i(CLS_NAME, "results size: " + resultsSize);
}
if (!detectFinal(results)) {
if (DEBUG) {
MyLog.i(CLS_NAME, "onMessage: have partial");
}
for (int i = 0; i < resultsSize; i++) {
final List<Alternative> alternatives = results.get(i).getAlternatives();
final int size = alternatives.size();
if (DEBUG) {
MyLog.i(CLS_NAME, "partial alternatives size: " + size);
}
for (int j = 0; j < size; j++) {
if (DEBUG) {
MyLog.i(CLS_NAME, "final result: " + alternatives.get(j).getTranscript() + " ~ " + alternatives.get(j).getConfidence());
}
partialArray.add(alternatives.get(j).getTranscript().trim());
}
}
bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, partialArray);
listener.onPartialResults(bundle);
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "onMessage: have final");
}
haveFinal.set(true);
if (doError.get()) {
stopListening();
}
for (int i = 0; i < resultsSize; i++) {
final List<Alternative> alternatives = results.get(i).getAlternatives();
final int size = alternatives.size();
if (DEBUG) {
MyLog.i(CLS_NAME, "final alternatives size: " + size);
}
for (int j = 0; j < size; j++) {
if (DEBUG) {
MyLog.i(CLS_NAME, "final result: " + alternatives.get(j).getTranscript() + " ~ " + alternatives.get(j).getConfidence());
}
confidenceArray.add(alternatives.get(j).getConfidence());
resultsArray.add(alternatives.get(j).getTranscript().trim());
}
}
Recognition.setState(Recognition.State.IDLE);
if (languageModel == SaiyDefaults.LanguageModel.IBM) {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu required");
}
if (servingRemote) {
bundle.putString(Request.RESULTS_NLU, message);
bundle.putFloatArray(SpeechRecognizer.CONFIDENCE_SCORES, ArrayUtils.toPrimitive(confidenceArray.toArray(new Float[0]), 0.0F));
bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, resultsArray);
listener.onResults(bundle);
} else {
new ResolveBluemix(mic.getContext(), sl, UtilsLocale.stringToLocale(vrLocale.toString()), ttsLocale, ArrayUtils.toPrimitive(confidenceArray.toArray(new Float[0]), 0.0F), resultsArray).unpack(nluBluemix);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu not required");
}
bundle.putFloatArray(SpeechRecognizer.CONFIDENCE_SCORES, ArrayUtils.toPrimitive(confidenceArray.toArray(new Float[0]), 0.0F));
bundle.putStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION, resultsArray);
listener.onResults(bundle);
}
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "onMessage: nluBluemix results naked");
}
}
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "onMessage: nluBluemix null");
}
}
}
Aggregations