use of ai.saiy.android.processing.Outcome in project Saiy-PS by brandall76.
the class CommandUserName method getResponse.
/**
* Resolve the required command returning an {@link Outcome} object
*
* @param ctx the application context
* @param voiceData ArrayList<String> containing the voice data
* @param sl the {@link SupportedLanguage} we are using to analyse the voice data.
* @param cr the {@link CommandRequest}
* @return {@link Outcome} containing everything we need to respond to the command.
*/
public Outcome getResponse(@NonNull final Context ctx, @NonNull final ArrayList<String> voiceData, @NonNull final SupportedLanguage sl, @NonNull final CommandRequest cr) {
if (DEBUG) {
MyLog.i(CLS_NAME, "voiceData: " + voiceData.size() + " : " + voiceData.toString());
}
then = System.nanoTime();
final Outcome outcome = new Outcome();
String name;
if (cr.isResolved()) {
if (DEBUG) {
MyLog.i(CLS_NAME, "isResolved: true");
}
final CommandUserNameValues cunv = (CommandUserNameValues) cr.getVariableData();
name = cunv.getName();
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "isResolved: false");
}
name = new CommandUserNameLocal().getResponse(ctx, voiceData, sl);
}
if (UtilsString.notNaked(name)) {
if (DEBUG) {
MyLog.v(CLS_NAME, "name: " + name);
}
final String response;
final String currentName = SPH.getUserName(ctx);
if (DEBUG) {
MyLog.v(CLS_NAME, "currentName: " + currentName);
}
if (currentName.matches(name)) {
response = String.format(PersonalityResponse.getUserNameRepeat(ctx, sl), currentName);
} else {
SPH.setUserName(ctx, name);
response = String.format(PersonalityResponse.getUserName(ctx, sl), name);
if (DEBUG) {
MyLog.v(CLS_NAME, "response: " + response);
}
}
outcome.setUtterance(response);
outcome.setOutcome(Outcome.SUCCESS);
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "name naked");
}
name = PersonalityResponse.getUserNameError(ctx, sl);
outcome.setUtterance(name);
outcome.setOutcome(Outcome.FAILURE);
return returnOutcome(outcome);
}
return returnOutcome(outcome);
}
use of ai.saiy.android.processing.Outcome in project Saiy-PS by brandall76.
the class CommandBattery method getResponse.
/**
* Resolve the required command returning an {@link Outcome} object
*
* @param ctx the application context
* @param voiceData ArrayList<String> containing the voice data
* @param sl the {@link SupportedLanguage} we are using to analyse the voice data.
* @param cr the {@link CommandRequest}
* @return {@link Outcome} containing everything we need to respond to the command.
*/
public Outcome getResponse(@NonNull final Context ctx, @NonNull final ArrayList<String> voiceData, @NonNull final SupportedLanguage sl, @NonNull final CommandRequest cr) {
if (DEBUG) {
MyLog.i(CLS_NAME, "voiceData: " + voiceData.size() + " : " + voiceData.toString());
}
final long then = System.nanoTime();
Outcome outcome = new Outcome();
if (cr.isResolved()) {
if (DEBUG) {
MyLog.i(CLS_NAME, "isResolved: true");
}
final CommandBatteryValues cbv = (CommandBatteryValues) cr.getVariableData();
final CommandBatteryValues.Type type = cbv.getType();
outcome = new BatteryInformation(ctx, sl, outcome, cbv.getTypeString()).getInfo(type);
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "isResolved: false");
}
outcome = new CommandBatteryLocal().getResponse(ctx, voiceData, sl);
}
if (DEBUG) {
MyLog.getElapsed(CLS_NAME, then);
}
return outcome;
}
use of ai.saiy.android.processing.Outcome in project Saiy-PS by brandall76.
the class CommandCustom method getResponse.
/**
* Action the custom command request and return the {@link Outcome}
*
* @param ctx the application context
* @param customCommand the identified {@link CustomCommand}
* @param sl the {@link SupportedLanguage}
* @return the created {@link Outcome}
*/
public Outcome getResponse(@NonNull final Context ctx, @NonNull final CustomCommand customCommand, @NonNull final SupportedLanguage sl, @NonNull final CommandRequest cr) {
final long then = System.nanoTime();
if (DEBUG) {
MyLog.i(CLS_NAME, "getCustomAction: " + customCommand.getCustomAction().name());
MyLog.i(CLS_NAME, "isExactMatch: " + customCommand.isExactMatch());
MyLog.i(CLS_NAME, "getAlgorithm: " + customCommand.getAlgorithm().name());
MyLog.i(CLS_NAME, "getKeyphrase: " + customCommand.getKeyphrase());
MyLog.i(CLS_NAME, "getResponseError: " + customCommand.getResponseError());
MyLog.i(CLS_NAME, "getResponseSuccess: " + customCommand.getResponseSuccess());
MyLog.i(CLS_NAME, "getTTSLocale: " + customCommand.getTTSLocale());
MyLog.i(CLS_NAME, "getVRLocale: " + customCommand.getVRLocale());
MyLog.i(CLS_NAME, "getAction: " + customCommand.getAction());
MyLog.i(CLS_NAME, "getScore: " + customCommand.getScore());
MyLog.i(CLS_NAME, "getCommandConstant: " + customCommand.getCommandConstant().name());
MyLog.i(CLS_NAME, "getIntent: " + customCommand.getIntent());
MyLog.i(CLS_NAME, "getIntent: " + customCommand.getExtraText());
if (customCommand.getAlgorithm() != null) {
MyLog.i(CLS_NAME, "getAlgorithm: " + customCommand.getAlgorithm().name());
}
}
final Outcome outcome = new Outcome();
switch(customCommand.getCustomAction()) {
case CUSTOM_SPEECH:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_SPEECH.name());
}
outcome.setUtterance(customCommand.getResponseSuccess());
outcome.setAction(customCommand.getAction());
outcome.setOutcome(Outcome.SUCCESS);
break;
case CUSTOM_DISPLAY_CONTACT:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_DISPLAY_CONTACT.name());
}
break;
case CUSTOM_TASKER_TASK:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_TASKER_TASK.name());
}
break;
case CUSTOM_ACTIVITY:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_ACTIVITY.name());
}
Intent intent = null;
try {
intent = Intent.parseUri(customCommand.getIntent(), 0);
if (DEBUG) {
MyLog.i(CLS_NAME, "Intent:" + intent.toUri(0));
examineIntent(intent);
}
} catch (final URISyntaxException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "Intent.parseUri: URISyntaxException");
e.printStackTrace();
}
}
if (intent != null && ExecuteIntent.executeIntent(ctx, intent)) {
if (DEBUG) {
MyLog.i(CLS_NAME, "Execute remoteIntent success");
}
outcome.setUtterance(UtilsString.notNaked(customCommand.getResponseSuccess()) ? customCommand.getResponseSuccess() : SaiyRequestParams.SILENCE);
outcome.setAction(customCommand.getAction());
outcome.setOutcome(Outcome.SUCCESS);
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "Execute remoteIntent failed");
}
outcome.setUtterance(UtilsString.notNaked(customCommand.getResponseError()) ? customCommand.getResponseError() : SaiyRequestParams.SILENCE);
outcome.setAction(customCommand.getAction());
outcome.setOutcome(Outcome.FAILURE);
}
break;
case CUSTOM_CALL_CONTACT:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_CALL_CONTACT.name());
}
break;
case CUSTOM_LAUNCH_APPLICATION:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_LAUNCH_APPLICATION.name());
}
break;
case CUSTOM_LAUNCH_SHORTCUT:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_LAUNCH_SHORTCUT.name());
}
break;
case CUSTOM_SEARCHABLE:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_SEARCHABLE.name());
}
break;
case CUSTOM_INTENT_SERVICE:
if (DEBUG) {
MyLog.i(CLS_NAME, CCC.CUSTOM_INTENT_SERVICE.name());
}
Intent remoteIntent = null;
try {
remoteIntent = Intent.parseUri(customCommand.getIntent(), 0);
if (DEBUG) {
MyLog.i(CLS_NAME, "remoteIntent:" + remoteIntent.toUri(0));
examineIntent(remoteIntent);
}
} catch (final URISyntaxException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "remoteIntent.parseUri: URISyntaxException");
e.printStackTrace();
}
} catch (final NullPointerException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "remoteIntent.parseUri: NullPointerException");
e.printStackTrace();
}
}
if (remoteIntent != null) {
final Pair<Boolean, String> pair = UtilsApplication.getAppNameFromPackage(ctx, remoteIntent.getPackage());
if (pair.first) {
Bundle bundle = remoteIntent.getExtras();
if (bundle == null) {
bundle = new Bundle();
}
bundle.putStringArrayList(Request.RESULTS_RECOGNITION, cr.getResultsArray());
bundle.putFloatArray(Request.CONFIDENCE_SCORES, cr.getConfidenceArray());
remoteIntent.putExtras(bundle);
final String appName = pair.second;
if (ExecuteIntent.startService(ctx, remoteIntent)) {
final String verboseWords;
if (SPH.getRemoteCommandVerbose(ctx) >= CUSTOM_COMMAND_VERBOSE_LIMIT) {
verboseWords = SaiyRequestParams.SILENCE;
} else {
SPH.incrementRemoteCommandVerbose(ctx);
verboseWords = PersonalityResponse.getRemoteSuccess(ctx, sl, appName);
}
outcome.setUtterance(verboseWords);
outcome.setAction(customCommand.getAction());
outcome.setOutcome(Outcome.SUCCESS);
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "Execute remoteIntent failed");
}
outcome.setUtterance(PersonalityResponse.getErrorRemoteFailed(ctx, sl, appName));
outcome.setAction(LocalRequest.ACTION_SPEAK_ONLY);
outcome.setOutcome(Outcome.FAILURE);
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "remoteIntent package name unknown");
}
outcome.setUtterance(PersonalityResponse.getErrorRemoteFailedUnknown(ctx, sl));
outcome.setAction(LocalRequest.ACTION_SPEAK_ONLY);
outcome.setOutcome(Outcome.FAILURE);
}
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "remoteIntent failed");
}
outcome.setUtterance(PersonalityResponse.getErrorRemoteFailedUnknown(ctx, sl));
outcome.setAction(LocalRequest.ACTION_SPEAK_ONLY);
outcome.setOutcome(Outcome.FAILURE);
}
break;
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "DEFAULT");
}
break;
}
if (DEBUG) {
MyLog.getElapsed(CLS_NAME, then);
}
return outcome;
}
use of ai.saiy.android.processing.Outcome in project Saiy-PS by brandall76.
the class CommandEmotion method getResponse.
/**
* Resolve the required command returning an {@link Outcome} object
*
* @param ctx the application context
* @param sl the {@link SupportedLanguage} we are using to analyse the voice data.
* @return {@link Outcome} containing everything we need to respond to the command.
*/
public Outcome getResponse(@NonNull final Context ctx, @NonNull final SupportedLanguage sl) {
final long then = System.nanoTime();
final Outcome outcome = new Outcome();
outcome.setOutcome(Outcome.SUCCESS);
switch(SPH.getEmotionCommandVerbose(ctx)) {
case 0:
case COMMAND_EMOTION_EXTRA_VERBOSE_LIMIT:
if (DEBUG) {
MyLog.i(CLS_NAME, "COMMAND_EMOTION_EXTRA_VERBOSE_LIMIT");
}
outcome.setUtterance(PersonalityResponse.getBeyondVerbalExtraVerboseResponse(ctx, sl));
SPH.incrementEmotionCommandVerbose(ctx);
break;
case COMMAND_EMOTION_VERBOSE_LIMIT:
if (DEBUG) {
MyLog.i(CLS_NAME, "COMMAND_EMOTION_VERBOSE_LIMIT");
}
outcome.setUtterance(PersonalityResponse.getBeyondVerbalVerboseResponse(ctx, sl));
SPH.incrementEmotionCommandVerbose(ctx);
break;
default:
if (DEBUG) {
MyLog.i(CLS_NAME, "Standard response");
}
outcome.setUtterance(PersonalityResponse.getBeyondVerbalIntroResponse(ctx, sl));
break;
}
if (DEBUG) {
MyLog.getElapsed(CommandEmotion.class.getSimpleName(), then);
}
return outcome;
}
use of ai.saiy.android.processing.Outcome in project Saiy-PS by brandall76.
the class CommandVocalRecognition method getResponse.
/**
* Resolve the required command returning an {@link Outcome} object
*
* @param ctx the application context
* @param sl the {@link SupportedLanguage} we are using to analyse the voice data.
* @return {@link Outcome} containing everything we need to respond to the command.
*/
public Outcome getResponse(@NonNull final Context ctx, @NonNull final SupportedLanguage sl) {
final long then = System.nanoTime();
final Outcome outcome = new Outcome();
outcome.setQubit(new Qubit());
final SaiyAccountList saiyAccountList = SaiyAccountHelper.getAccounts(ctx);
if (saiyAccountList != null && saiyAccountList.size() > 0) {
if (DEBUG) {
MyLog.v(CLS_NAME, "saiyAccountList.size: " + saiyAccountList.size());
}
switch(saiyAccountList.size()) {
case 1:
default:
final SaiyAccount saiyAccount = saiyAccountList.getSaiyAccountList().get(0);
if (saiyAccount != null) {
final ProfileItem profileItem = saiyAccount.getProfileItem();
if (profileItem != null) {
final String profileId = profileItem.getId();
if (UtilsString.notNaked(profileId)) {
if (DEBUG) {
MyLog.d(CLS_NAME, "profileId: " + profileId);
}
final Speaker.Status status = Speaker.Status.getStatus(profileItem.getStatus());
if (DEBUG) {
MyLog.d(CLS_NAME, "status: " + status.name());
}
switch(status) {
case SUCCEEDED:
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.speech_enroll_instructions_15));
outcome.setAction(LocalRequest.ACTION_SPEAK_LISTEN);
outcome.setOutcome(Outcome.SUCCESS);
outcome.setExtra(profileId);
break;
default:
if (DEBUG) {
MyLog.w(CLS_NAME, "enrollment status");
}
outcome.setOutcome(Outcome.FAILURE);
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.error_vi_status));
break;
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "profile id naked");
}
outcome.setOutcome(Outcome.FAILURE);
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.error_vi_status));
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "profile item null");
}
outcome.setOutcome(Outcome.FAILURE);
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.error_vi_status));
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "account null");
}
outcome.setOutcome(Outcome.FAILURE);
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.error_vi_no_account));
}
break;
}
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "no accounts");
}
outcome.setOutcome(Outcome.FAILURE);
outcome.setUtterance(SaiyResourcesHelper.getStringResource(ctx, sl, R.string.error_vi_no_account));
}
if (DEBUG) {
MyLog.getElapsed(CLS_NAME, then);
}
return outcome;
}
Aggregations