use of ai.saiy.android.cognitive.emotion.provider.beyondverbal.containers.StartResponse in project Saiy-PS by brandall76.
the class BVStartRequest method getId.
/**
* Method to get a recording identifier.
*
* @return an {@link Pair} of which the first parameter will denote success and the second an
* {@link StartResponse} object, containing the recording id.
* <p>
* If the request was unsuccessful, the second parameter may be null.
*/
public Pair<Boolean, StartResponse> getId(@NonNull final JSONObject body) {
if (DEBUG) {
MyLog.i(CLS_NAME, "getId");
}
final RequestFuture<JSONObject> future = RequestFuture.newFuture();
final RequestQueue queue = Volley.newRequestQueue(mContext);
queue.start();
final JsonObjectRequest jsonObjReq = new JsonObjectRequest(Request.Method.POST, START_URL, body, future, new Response.ErrorListener() {
@Override
public void onErrorResponse(final VolleyError error) {
if (DEBUG) {
MyLog.w(CLS_NAME, "onErrorResponse: " + error.toString());
BVStartRequest.this.verboseError(error);
}
queue.stop();
}
}) {
@Override
public Map<String, String> getHeaders() throws AuthFailureError {
final Map<String, String> params = new HashMap<>();
params.put(CHARSET, ENCODING);
params.put(AUTHORIZATION, BEARER_ + token);
return params;
}
};
jsonObjReq.setRetryPolicy(new DefaultRetryPolicy(DefaultRetryPolicy.DEFAULT_TIMEOUT_MS * 2, DefaultRetryPolicy.DEFAULT_MAX_RETRIES, DefaultRetryPolicy.DEFAULT_BACKOFF_MULT));
queue.add(jsonObjReq);
JSONObject response = null;
try {
response = future.get(THREAD_TIMEOUT, TimeUnit.SECONDS);
} catch (final InterruptedException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "execute: InterruptedException");
e.printStackTrace();
}
} catch (ExecutionException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "execute: ExecutionException");
e.printStackTrace();
}
} catch (TimeoutException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "execute: TimeoutException");
e.printStackTrace();
}
} finally {
queue.stop();
}
if (response != null) {
if (DEBUG) {
MyLog.i(CLS_NAME, "response: " + response);
}
final Gson gson = new GsonBuilder().disableHtmlEscaping().create();
final StartResponse startResponse = gson.fromJson(response.toString(), StartResponse.class);
if (DEBUG) {
MyLog.i(CLS_NAME, "onResponse: getStatus: " + startResponse.getStatus());
}
if (startResponse.isSuccessful()) {
if (DEBUG) {
MyLog.i(CLS_NAME, "onResponse: getRecordingId: " + startResponse.getRecordingId());
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "onResponse: getReason: " + startResponse.getReason());
}
}
return new Pair<>(true, startResponse);
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "response: failed");
}
return new Pair<>(false, null);
}
}
use of ai.saiy.android.cognitive.emotion.provider.beyondverbal.containers.StartResponse in project Saiy-PS by brandall76.
the class BeyondVerbal method stream.
public void stream() {
if (DEBUG) {
MyLog.i(CLS_NAME, "stream");
}
final String token = getToken();
if (UtilsString.notNaked(token)) {
final Pair<Boolean, StartResponse> startRequest = new BVStartRequest(mContext, token).getId(new StartRequestBody(AudioConfig.getDefault(), MetaData.getEmpty(), SupportedLanguageBV.getSupportedLanguage(sl.getLocale())).prepare());
if (startRequest.first) {
if (startRequest.second.isSuccessful()) {
final String recordingId = startRequest.second.getRecordingId();
if (mic.isAvailable()) {
new BVStreamAudio(mic, sl, token, recordingId).stream();
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "mic unavailable");
}
Recognition.setState(Recognition.State.IDLE);
onError();
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "startRequest.second.isSuccessful()");
}
onError();
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "startRequest.first");
}
onError();
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "token naked");
}
onError();
}
}
Aggregations