use of ai.saiy.android.nlu.wit.ResolveWit in project Saiy-PS by brandall76.
the class RecognitionWit method startListening.
public void startListening() {
if (DEBUG) {
MyLog.i(CLS_NAME, "startRecording");
}
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
isRecording.set(true);
pauseDetector.begin();
final int bufferSize = saiyRecorder.getBufferSize();
if (DEBUG) {
MyLog.i(CLS_NAME, "bufferSize: " + bufferSize);
}
final byte[] buffer = new byte[bufferSize];
switch(saiyRecorder.initialise()) {
case AudioRecord.STATE_INITIALIZED:
try {
urlConnection = (HttpsURLConnection) new URL(WitConfiguration.WIT_SPEECH_URL).openConnection();
urlConnection.setAllowUserInteraction(false);
urlConnection.setInstanceFollowRedirects(true);
urlConnection.setRequestMethod(Constants.HTTP_POST);
urlConnection.setRequestProperty(CONTENT_TYPE, HEADER_CONTENT_TYPE);
urlConnection.setRequestProperty(AUTHORIZATION, BEARER_ + accessToken);
urlConnection.setRequestProperty(ACCEPT_HEADER, ACCEPT_VERSION);
urlConnection.setRequestProperty(N_HEADER, "5");
urlConnection.setUseCaches(false);
urlConnection.setDoOutput(true);
urlConnection.setRequestProperty(TRANSFER_ENCODING, CHUNKED);
urlConnection.setChunkedStreamingMode(0);
urlConnection.connect();
outputStream = urlConnection.getOutputStream();
ssp.play(ssp.getBeepStart());
switch(saiyRecorder.startRecording()) {
case AudioRecord.RECORDSTATE_RECORDING:
{
if (DEBUG) {
MyLog.i(CLS_NAME, "AudioRecord.RECORDSTATE_RECORDING");
}
int bufferReadResult;
int count = 0;
while (isRecording.get() && saiyRecorder != null && saiyRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
if (count == 0) {
if (DEBUG) {
MyLog.i(CLS_NAME, "Recording Started");
}
Recognition.setState(Recognition.State.LISTENING);
listener.onReadyForSpeech(null);
listener.onBeginningOfSpeech();
count++;
}
if (saiyRecorder != null) {
bufferReadResult = saiyRecorder.read(buffer);
listener.onBufferReceived(buffer);
if (!pauseDetector.hasDetected()) {
pauseDetector.addLength(buffer, bufferReadResult);
pauseDetector.monitor();
}
for (int i = 0; i < bufferReadResult; i++) {
outputStream.write(buffer[i]);
}
}
}
audioShutdown();
final int responseCode = urlConnection.getResponseCode();
if (DEBUG) {
MyLog.d(CLS_NAME, "responseCode: " + responseCode);
}
if (responseCode != HttpsURLConnection.HTTP_OK) {
if (DEBUG) {
MyLog.e(CLS_NAME, "audioThread ErrorStream: " + UtilsString.streamToString(urlConnection.getErrorStream()));
}
listener.onError(SpeechRecognizer.ERROR_NETWORK);
} else {
inputStream = urlConnection.getInputStream();
final String response = UtilsString.streamToString(inputStream);
final Gson gson = new GsonBuilder().disableHtmlEscaping().create();
final NLUWit nluWit = gson.fromJson(response, NLUWit.class);
final ArrayList<String> resultsArray = new ArrayList<>();
resultsArray.add(nluWit.getText());
final float[] floatsArray = new float[1];
floatsArray[0] = nluWit.getConfidence();
final Bundle results = new Bundle();
if (languageModel == SaiyDefaults.LanguageModel.WIT) {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu required");
}
if (servingRemote) {
results.putString(Request.RESULTS_NLU, response);
results.putStringArrayList(Request.RESULTS_RECOGNITION, resultsArray);
results.putFloatArray(Request.CONFIDENCE_SCORES, floatsArray);
listener.onResults(results);
} else {
new ResolveWit(mContext, sl, UtilsLocale.stringToLocale(vrLocale.toString()), ttsLocale, floatsArray, resultsArray).unpack(nluWit);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "final: nlu not required");
}
results.putStringArrayList(Request.RESULTS_RECOGNITION, resultsArray);
results.putFloatArray(Request.CONFIDENCE_SCORES, floatsArray);
listener.onResults(results);
}
}
}
break;
case AudioRecord.ERROR:
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "AudioRecord.ERROR");
}
listener.onError(SpeechRecognizer.ERROR_AUDIO);
break;
}
} catch (final MalformedURLException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "MalformedURLException");
e.printStackTrace();
}
} catch (final ParseException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "ParseException");
e.printStackTrace();
}
} catch (final UnknownHostException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "UnknownHostException");
e.printStackTrace();
}
} catch (final IOException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "IOException");
e.printStackTrace();
}
} catch (final IllegalStateException e) {
if (DEBUG) {
MyLog.e(CLS_NAME, "IllegalStateException");
e.printStackTrace();
}
} catch (final NullPointerException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "NullPointerException");
e.printStackTrace();
}
} catch (final Exception e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "Exception");
e.printStackTrace();
}
} finally {
closeConnection();
}
audioShutdown();
break;
case AudioRecord.STATE_UNINITIALIZED:
if (DEBUG) {
MyLog.w(CLS_NAME, "AudioRecord.STATE_UNINITIALIZED");
}
listener.onError(SpeechRecognizer.ERROR_AUDIO);
break;
}
}
Aggregations