use of ai.saiy.android.service.helper.LocalRequest in project Saiy-PS by brandall76.
the class BVStreamAudio method onError.
@Override
public void onError(int error) {
if (DEBUG) {
MyLog.w(CLS_NAME, "onError");
}
mic.getRecognitionListener().onComplete();
Recognition.setState(Recognition.State.IDLE);
if (mic.isInterrupted()) {
error = Speaker.ERROR_USER_CANCELLED;
}
final LocalRequest localRequest = new LocalRequest(mic.getContext());
localRequest.setSupportedLanguage(sl);
localRequest.setAction(LocalRequest.ACTION_SPEAK_ONLY);
localRequest.setTTSLocale(SPH.getTTSLocale(mic.getContext()));
localRequest.setVRLocale(SPH.getVRLocale(mic.getContext()));
switch(error) {
case Speaker.ERROR_USER_CANCELLED:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_USER_CANCELLED");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.cancelled));
break;
case Speaker.ERROR_NETWORK:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_NETWORK");
}
localRequest.setUtterance(PersonalityResponse.getNoNetwork(mic.getContext(), sl));
break;
case Speaker.ERROR_AUDIO:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_AUDIO");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
case Speaker.ERROR_FILE:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_FILE");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError default");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
}
localRequest.execute();
}
use of ai.saiy.android.service.helper.LocalRequest in project Saiy-PS by brandall76.
the class MotionHelper method reactMotion.
/**
* Check if we need to react to the detected ActivityRecognition type.
*
* @param ctx the application context
* @param motion the detection {@link Motion} object
*/
private static void reactMotion(@NonNull final Context ctx, @NonNull final Motion motion) {
if (DEBUG) {
MyLog.i(CLS_NAME, "reactMotion");
}
switch(motion.getType()) {
case DetectedActivity.WALKING:
break;
case DetectedActivity.IN_VEHICLE:
if (DEBUG) {
MyLog.i(CLS_NAME, "reactMotion: IN_VEHICLE");
}
if (SPH.getHotwordDriving(ctx)) {
if (DEBUG) {
MyLog.i(CLS_NAME, "reactMotion: IN_VEHICLE: enabled");
}
final LocalRequest request = new LocalRequest(ctx);
request.prepareDefault(LocalRequest.ACTION_START_HOTWORD, null);
request.execute();
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "reactMotion: IN_VEHICLE: disabled");
}
}
break;
case DetectedActivity.ON_BICYCLE:
break;
case DetectedActivity.ON_FOOT:
break;
case DetectedActivity.RUNNING:
break;
case DetectedActivity.STILL:
break;
case DetectedActivity.TILTING:
break;
case DetectedActivity.UNKNOWN:
break;
default:
break;
}
}
use of ai.saiy.android.service.helper.LocalRequest in project Saiy-PS by brandall76.
the class SpeakerIdentification method onError.
@Override
public void onError(int error) {
if (DEBUG) {
MyLog.w(CLS_NAME, "onError");
}
mic.getRecognitionListener().onComplete();
Recognition.setState(Recognition.State.IDLE);
if (mic.isInterrupted()) {
error = Speaker.ERROR_USER_CANCELLED;
}
final LocalRequest localRequest = new LocalRequest(mic.getContext());
localRequest.setSupportedLanguage(sl);
localRequest.setAction(LocalRequest.ACTION_SPEAK_ONLY);
localRequest.setTTSLocale(SPH.getTTSLocale(mic.getContext()));
localRequest.setVRLocale(SPH.getVRLocale(mic.getContext()));
switch(error) {
case Speaker.ERROR_USER_CANCELLED:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_USER_CANCELLED");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.cancelled));
break;
case Speaker.ERROR_NETWORK:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_NETWORK");
}
localRequest.setUtterance(PersonalityResponse.getNoNetwork(mic.getContext(), sl));
break;
case Speaker.ERROR_AUDIO:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_AUDIO");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
case Speaker.ERROR_FILE:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError ERROR_FILE");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "onError default");
}
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.error_audio));
break;
}
localRequest.execute();
}
use of ai.saiy.android.service.helper.LocalRequest in project Saiy-PS by brandall76.
the class SpeakerIdentification method onFileWriteComplete.
@Override
public void onFileWriteComplete(final boolean success) {
if (DEBUG) {
MyLog.i(CLS_NAME, "onFileWriteComplete: " + success);
}
if (!mic.isInterrupted()) {
if (success) {
final LocalRequest localRequest = new LocalRequest(mic.getContext());
localRequest.setSupportedLanguage(sl);
localRequest.setAction(LocalRequest.ACTION_SPEAK_ONLY);
localRequest.setTTSLocale(SPH.getTTSLocale(mic.getContext()));
localRequest.setVRLocale(SPH.getVRLocale(mic.getContext()));
localRequest.setUtterance(SaiyResourcesHelper.getStringResource(mic.getContext(), sl, R.string.vocal_notify_verify));
localRequest.execute();
new ValidateID(mic, sl, apiKey, profileId, shortAudio, mic.getFile()).stream();
} else {
onError(Speaker.ERROR_FILE);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "onFileWriteComplete: mic interrupted");
}
}
}
Aggregations