use of io.cdap.cdap.api.spark.AbstractSpark in project cdap by caskdata.
the class ExternalSparkProgram method initialize.
@Override
protected void initialize() throws Exception {
SparkClientContext context = getContext();
BatchPhaseSpec phaseSpec = GSON.fromJson(getContext().getSpecification().getProperty(Constants.PIPELINEID), BatchPhaseSpec.class);
SparkConf sparkConf = new SparkConf();
for (Map.Entry<String, String> pipelineProperty : phaseSpec.getPipelineProperties().entrySet()) {
sparkConf.set(pipelineProperty.getKey(), pipelineProperty.getValue());
}
context.setSparkConf(sparkConf);
String stageName = context.getSpecification().getProperty(STAGE_NAME);
Class<?> externalProgramClass = context.loadPluginClass(stageName);
// If the external program implements Spark, instantiate it and call initialize() to provide full lifecycle support
if (Spark.class.isAssignableFrom(externalProgramClass)) {
MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(context), context.getLogicalStartTime(), context, context, context.getNamespace());
delegateSpark = context.newPluginInstance(stageName, macroEvaluator);
if (delegateSpark instanceof AbstractSpark) {
// noinspection unchecked
((AbstractSpark) delegateSpark).initialize(context);
}
}
}
use of io.cdap.cdap.api.spark.AbstractSpark in project cdap by caskdata.
the class SparkRuntimeService method initialize.
/**
* Calls the {@link Spark#beforeSubmit(SparkClientContext)} for the pre 3.5 Spark programs, calls
* the {@link ProgramLifecycle#initialize} otherwise.
*/
@SuppressWarnings("unchecked")
private void initialize() throws Exception {
context.setState(new ProgramState(ProgramStatus.INITIALIZING, null));
// AbstractSpark implements final initialize(context) and requires subclass to
// implement initialize(), whereas programs that directly implement Spark have
// the option to override initialize(context) (if they implement ProgramLifeCycle)
TransactionControl defaultTxControl = runtimeContext.getDefaultTxControl();
final TransactionControl txControl = spark instanceof AbstractSpark ? Transactions.getTransactionControl(defaultTxControl, AbstractSpark.class, spark, "initialize") : spark instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, Spark.class, spark, "initialize", SparkClientContext.class) : defaultTxControl;
runtimeContext.initializeProgram(programLifecycle, txControl, false);
}
Aggregations