use of com.amazonaws.services.elasticmapreduce.util.StepFactory in project herd by FINRAOS.
the class EmrPigStepHelper method getEmrStepConfig.
@Override
public StepConfig getEmrStepConfig(Object step) {
EmrPigStep pigStep = (EmrPigStep) step;
// Default ActionOnFailure is to cancel the execution and wait
ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT;
if (pigStep.isContinueOnError() != null && pigStep.isContinueOnError()) {
// Override based on user input
actionOnFailure = ActionOnFailure.CONTINUE;
}
// If there are no arguments to hive script
if (CollectionUtils.isEmpty(pigStep.getScriptArguments())) {
// Just build the StepConfig object and return
return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure).withHadoopJarStep(new StepFactory().newRunPigScriptStep(pigStep.getScriptLocation().trim()));
} else // If there are arguments specified
{
return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure).withHadoopJarStep(new StepFactory().newRunPigScriptStep(pigStep.getScriptLocation().trim(), pigStep.getScriptArguments().toArray(new String[pigStep.getScriptArguments().size()])));
}
}
use of com.amazonaws.services.elasticmapreduce.util.StepFactory in project herd by FINRAOS.
the class EmrDaoImpl method getStepConfig.
/**
* Create the step config list of objects for hive/pig installation.
*
* @param emrClusterDefinition the EMR definition name value.
*
* @return list of step configuration that contains all the steps for the given configuration.
*/
private List<StepConfig> getStepConfig(EmrClusterDefinition emrClusterDefinition) {
StepFactory stepFactory = new StepFactory();
List<StepConfig> appSteps = new ArrayList<>();
// Create install hive step and add to the StepConfig list
if (StringUtils.isNotBlank(emrClusterDefinition.getHiveVersion())) {
StepConfig installHive = new StepConfig().withName("Hive " + emrClusterDefinition.getHiveVersion()).withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW).withHadoopJarStep(stepFactory.newInstallHiveStep(emrClusterDefinition.getHiveVersion()));
appSteps.add(installHive);
}
// Create install Pig step and add to the StepConfig List
if (StringUtils.isNotBlank(emrClusterDefinition.getPigVersion())) {
StepConfig installPig = new StepConfig().withName("Pig " + emrClusterDefinition.getPigVersion()).withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW).withHadoopJarStep(stepFactory.newInstallPigStep(emrClusterDefinition.getPigVersion()));
appSteps.add(installPig);
}
// Add the hadoop jar steps that need to be added.
if (!CollectionUtils.isEmpty(emrClusterDefinition.getHadoopJarSteps())) {
for (HadoopJarStep hadoopJarStep : emrClusterDefinition.getHadoopJarSteps()) {
StepConfig stepConfig = emrHelper.getEmrHadoopJarStepConfig(hadoopJarStep.getStepName(), hadoopJarStep.getJarLocation(), hadoopJarStep.getMainClass(), hadoopJarStep.getScriptArguments(), hadoopJarStep.isContinueOnError());
appSteps.add(stepConfig);
}
}
return appSteps;
}
use of com.amazonaws.services.elasticmapreduce.util.StepFactory in project herd by FINRAOS.
the class EmrHiveStepHelper method getEmrStepConfig.
@Override
public StepConfig getEmrStepConfig(Object step) {
EmrHiveStep emrHiveStep = (EmrHiveStep) step;
// Default ActionOnFailure is to cancel the execution and wait
ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT;
if (emrHiveStep.isContinueOnError() != null && emrHiveStep.isContinueOnError()) {
// Override based on user input
actionOnFailure = ActionOnFailure.CONTINUE;
}
// If there are no arguments to hive script
if (CollectionUtils.isEmpty(emrHiveStep.getScriptArguments())) {
// Just build the StepConfig object and return
return new StepConfig().withName(emrHiveStep.getStepName().trim()).withActionOnFailure(actionOnFailure).withHadoopJarStep(new StepFactory().newRunHiveScriptStep(emrHiveStep.getScriptLocation().trim()));
} else // If there are arguments specified
{
// For each argument, add "-d" option
List<String> hiveArgs = new ArrayList<>();
for (String hiveArg : emrHiveStep.getScriptArguments()) {
hiveArgs.add("-d");
hiveArgs.add(hiveArg);
}
// Return the StepConfig object
return new StepConfig().withName(emrHiveStep.getStepName().trim()).withActionOnFailure(actionOnFailure).withHadoopJarStep(new StepFactory().newRunHiveScriptStep(emrHiveStep.getScriptLocation().trim(), hiveArgs.toArray(new String[hiveArgs.size()])));
}
}
Aggregations