use of ai.saiy.android.nlu.wit.NLUWit in project Saiy-PS by brandall76.
the class NLUCoerce method coerce.
/**
* Coerce the NLP results into a generic {@link CommandRequest} object, validating the minimal
* requirements for each implementation.
*/
public void coerce() {
if (nluProvider instanceof NLUMicrosoft) {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUMicrosoft");
}
if (validateNLUNLUMicrosoft((NLUMicrosoft) nluProvider)) {
for (final Intent i : ((NLUMicrosoft) nluProvider).getIntents()) {
if (i.getScore() > NLUMicrosoft.MIN_THRESHOLD) {
commandRequest.setCC(NLUConstants.intentToCC(i.getIntent()));
if (!commandRequest.getCC().equals(CC.COMMAND_UNKNOWN)) {
final NLUMicrosoftHelper microsoftHelper = new NLUMicrosoftHelper();
commandRequest = microsoftHelper.prepareCommand(mContext, commandRequest, getSupportedLanguage(), ((NLUMicrosoft) nluProvider).getEntities());
if (commandRequest.isResolved()) {
break;
}
break;
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: COMMAND_UNKNOWN");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: below threshold: " + i.getScore());
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
}
if (!commandRequest.isResolved()) {
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "coerce: NLUMicrosoft validation failed");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else if (nluProvider instanceof NLUNuance) {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUNuance");
}
if (validateNLUNuance((NLUNuance) nluProvider)) {
for (final Interpretation interpretation : ((NLUNuance) nluProvider).getInterpretations()) {
if (interpretation.getAction().getIntent().getConfidence() > NLUNuance.MIN_THRESHOLD) {
commandRequest.setCC(NLUConstants.intentToCC(interpretation.getAction().getIntent().getValue()));
if (!commandRequest.getCC().equals(CC.COMMAND_UNKNOWN)) {
final NLUNuanceHelper nuanceHelper = new NLUNuanceHelper();
commandRequest = nuanceHelper.prepareCommand(mContext, commandRequest, getSupportedLanguage(), interpretation.getConcept());
if (commandRequest.isResolved()) {
break;
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: COMMAND_UNKNOWN");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: below threshold: " + interpretation.getAction().getIntent().getConfidence());
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
}
if (!commandRequest.isResolved()) {
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "coerce: NLUNuance validation failed");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else if (nluProvider instanceof NLUAPIAI) {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUAPIAI");
}
if (validateNLUAPIAI((NLUAPIAI) nluProvider)) {
commandRequest.setCC(NLUConstants.intentToCC(((NLUAPIAI) nluProvider).getIntent()));
if (!commandRequest.getCC().equals(CC.COMMAND_UNKNOWN)) {
final NLUAPIAIHelper apiaiHelper = new NLUAPIAIHelper();
commandRequest = apiaiHelper.prepareCommand(mContext, commandRequest, getSupportedLanguage(), ((NLUAPIAI) nluProvider).getParameters());
if (!commandRequest.isResolved()) {
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: COMMAND_UNKNOWN");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "coerce: NLUAPIAI validation failed");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else if (nluProvider instanceof NLUWit) {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUWit");
}
// TODO
} else if (nluProvider instanceof NLUBluemix) {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUBluemix");
}
// TODO
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: instanceof NLUSaiy");
}
if (validateNLUSaiy((NLUSaiy) nluProvider)) {
for (final ai.saiy.android.nlu.saiy.Intent intent : ((NLUSaiy) nluProvider).getIntents()) {
commandRequest.setCC(NLUConstants.intentToCC(intent.getIntent()));
if (!commandRequest.getCC().equals(CC.COMMAND_UNKNOWN)) {
final NLUSaiyHelper saiyHelper = new NLUSaiyHelper();
commandRequest = saiyHelper.prepareCommand(mContext, commandRequest, getSupportedLanguage(), intent.getEntities());
if (commandRequest.isResolved()) {
break;
}
break;
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "coerce: COMMAND_UNKNOWN");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
}
if (!commandRequest.isResolved()) {
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "coerce: NLUSaiy validation failed");
}
commandRequest.setCC(CC.COMMAND_UNKNOWN);
commandRequest.setResolved(false);
}
}
commandRequest.setResultsArray(getResultsArray());
new Quantum(mContext).executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, commandRequest);
}
use of ai.saiy.android.nlu.wit.NLUWit in project Saiy-PS by brandall76.
the class RecognitionWit method startListening.
public void startListening() {
if (DEBUG) {
MyLog.i(CLS_NAME, "startRecording");
}
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
isRecording.set(true);
pauseDetector.begin();
final int bufferSize = saiyRecorder.getBufferSize();
if (DEBUG) {
MyLog.i(CLS_NAME, "bufferSize: " + bufferSize);
}
final byte[] buffer = new byte[bufferSize];
switch(saiyRecorder.initialise()) {
case AudioRecord.STATE_INITIALIZED:
try {
urlConnection = (HttpsURLConnection) new URL(WitConfiguration.WIT_SPEECH_URL).openConnection();
urlConnection.setAllowUserInteraction(false);
urlConnection.setInstanceFollowRedirects(true);
urlConnection.setRequestMethod(Constants.HTTP_POST);
urlConnection.setRequestProperty(CONTENT_TYPE, HEADER_CONTENT_TYPE);
urlConnection.setRequestProperty(AUTHORIZATION, BEARER_ + accessToken);
urlConnection.setRequestProperty(ACCEPT_HEADER, ACCEPT_VERSION);
urlConnection.setRequestProperty(N_HEADER, "5");
urlConnection.setUseCaches(false);
urlConnection.setDoOutput(true);
urlConnection.setRequestProperty(TRANSFER_ENCODING, CHUNKED);
urlConnection.setChunkedStreamingMode(0);
urlConnection.connect();
outputStream = urlConnection.getOutputStream();
ssp.play(ssp.getBeepStart());
switch(saiyRecorder.startRecording()) {
case AudioRecord.RECORDSTATE_RECORDING:
{
if (DEBUG) {
MyLog.i(CLS_NAME, "AudioRecord.RECORDSTATE_RECORDING");
}
int bufferReadResult;
int count = 0;
while (isRecording.get() && saiyRecorder != null && saiyRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
if (count == 0) {
if (DEBUG) {
MyLog.i(CLS_NAME, "Recording Started");
}
Recognition.setState(Recognition.State.LISTENING);
listener.onReadyForSpeech(null);
listener.onBeginningOfSpeech();
count++;
}
if (saiyRecorder != null) {
bufferReadResult = saiyRecorder.read(buffer);
listener.onBufferReceived(buffer);
if (!pauseDetector.hasDetected()) {
pauseDetector.addLength(buffer, bufferReadResult);
pauseDetector.monitor();
}
for (int i = 0; i < bufferReadResult; i++) {
outputStream.write(buffer[i]);
}
}
}
audioShutdown();
final int responseCode = urlConnection.getResponseCode();
if (DEBUG) {
MyLog.d(CLS_NAME, "responseCode: " + responseCode);
}
if (responseCode != HttpsURLConnection.HTTP_OK) {
if (DEBUG) {
MyLog.e(CLS_NAME, "audioThread ErrorStream: " + UtilsString.streamToString(urlConnection.getErrorStream()));
}
listener.onError(SpeechRecognizer.ERROR_NETWORK);
} else {
inputStream = urlConnection.getInputStream();
final String response = UtilsString.streamToString(inputStream);
final Gson gson = new GsonBuilder().disableHtmlEscaping().create();
final NLUWit nluWit = gson.fromJson(response, NLUWit.class);
final ArrayList<String> resultsArray = new ArrayList<>();
resultsArray.add(nluWit.getText());
final float[] floatsArray = new float[1];
floatsArray[0] = nluWit.getConfidence();
final Bundle results = new Bundle();
if (languageModel == SaiyDefaults.LanguageModel.WIT) {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu required");
}
if (servingRemote) {
results.putString(Request.RESULTS_NLU, response);
results.putStringArrayList(Request.RESULTS_RECOGNITION, resultsArray);
results.putFloatArray(Request.CONFIDENCE_SCORES, floatsArray);
listener.onResults(results);
} else {
new ResolveWit(mContext, sl, UtilsLocale.stringToLocale(vrLocale.toString()), ttsLocale, floatsArray, resultsArray).unpack(nluWit);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu not required");
}
results.putStringArrayList(Request.RESULTS_RECOGNITION, resultsArray);
results.putFloatArray(Request.CONFIDENCE_SCORES, floatsArray);
listener.onResults(results);
}
}
}
break;
case AudioRecord.ERROR:
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "AudioRecord.ERROR");
}
listener.onError(SpeechRecognizer.ERROR_AUDIO);
break;
}
} catch (final MalformedURLException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "MalformedURLException");
e.printStackTrace();
}
} catch (final ParseException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "ParseException");
e.printStackTrace();
}
} catch (final UnknownHostException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "UnknownHostException");
e.printStackTrace();
}
} catch (final IOException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "IOException");
e.printStackTrace();
}
} catch (final IllegalStateException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "IllegalStateException");
e.printStackTrace();
}
} catch (final NullPointerException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "NullPointerException");
e.printStackTrace();
}
} catch (final Exception e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "Exception");
e.printStackTrace();
}
} finally {
closeConnection();
}
audioShutdown();
break;
case AudioRecord.STATE_UNINITIALIZED:
if (DEBUG) {
MyLog.w(CLS_NAME, "AudioRecord.STATE_UNINITIALIZED");
}
listener.onError(SpeechRecognizer.ERROR_AUDIO);
break;
}
}
Aggregations