use of com.thinkbiganalytics.spark.multiexec.SparkApplicationCommand in project kylo by Teradata.
the class ExecuteSparkApps method getMainArgs.
/* (non-Javadoc)
* @see com.thinkbiganalytics.nifi.v2.spark.ExecuteSparkJob#getMainArgs(org.apache.nifi.processor.ProcessContext, org.apache.nifi.flowfile.FlowFile)
*/
@Override
protected String[] getMainArgs(ProcessContext context, FlowFile flowFile) {
SparkApplicationCommandsBuilder listBldr = new SparkApplicationCommandsBuilder();
String namesCsv = context.getProperty(this.appNamesPropDescriptor).evaluateAttributeExpressions(flowFile).getValue();
for (String appName : parseAppNames(namesCsv)) {
AppCommand cmd = this.appCommands.get(appName);
SparkCommandBuilder cmdBldr = listBldr.application(cmd.name);
PropertyValue classProp = context.newPropertyValue(cmd.appClass);
cmdBldr.className(classProp.evaluateAttributeExpressions(flowFile).getValue());
for (Entry<NamedArgument, String> entry : cmd.namedArgs.entrySet()) {
PropertyValue valueProp = context.newPropertyValue(entry.getValue());
cmdBldr.addArgument(entry.getKey().argName, valueProp.evaluateAttributeExpressions(flowFile).getValue());
}
cmd.positionalArgs.entrySet().stream().sorted((e1, e2) -> Integer.compare(e1.getKey().position, e2.getKey().position)).map(entry -> context.newPropertyValue(entry.getValue())).forEach(prop -> cmdBldr.addArgument(prop.evaluateAttributeExpressions(flowFile).getValue()));
cmdBldr.add();
}
List<SparkApplicationCommand> commands = listBldr.build();
String[] args = MultiSparkExecArguments.createCommandLine(commands);
getLog().info("Spark main args: {} {}", args);
return args;
}
Aggregations