use of co.cask.cdap.api.spark.AbstractSpark in project cdap by caskdata.
the class SparkRuntimeService method initialize.
/**
* Calls the {@link Spark#beforeSubmit(SparkClientContext)} for the pre 3.5 Spark programs, calls
* the {@link ProgramLifecycle#initialize} otherwise.
*/
@SuppressWarnings("unchecked")
private void initialize() throws Exception {
context.setState(new ProgramState(ProgramStatus.INITIALIZING, null));
// AbstractSpark implements final initialize(context) and requires subclass to
// implement initialize(), whereas programs that directly implement Spark have
// the option to override initialize(context) (if they implement ProgramLifeCycle)
final TransactionControl txControl = spark instanceof AbstractSpark ? Transactions.getTransactionControl(TransactionControl.IMPLICIT, AbstractSpark.class, spark, "initialize") : spark instanceof ProgramLifecycle ? Transactions.getTransactionControl(TransactionControl.IMPLICIT, Spark.class, spark, "initialize", SparkClientContext.class) : TransactionControl.IMPLICIT;
runtimeContext.initializeProgram(programLifecycle, txControl, false);
;
}
use of co.cask.cdap.api.spark.AbstractSpark in project cdap by caskdata.
the class ExternalSparkProgram method initialize.
@Override
protected void initialize() throws Exception {
SparkClientContext context = getContext();
SparkConf sparkConf = new SparkConf();
sparkConf.set("spark.driver.extraJavaOptions", "-XX:MaxPermSize=256m " + sparkConf.get("spark.driver.extraJavaOptions", ""));
sparkConf.set("spark.executor.extraJavaOptions", "-XX:MaxPermSize=256m " + sparkConf.get("spark.executor.extraJavaOptions", ""));
context.setSparkConf(sparkConf);
String stageName = context.getSpecification().getProperty(STAGE_NAME);
Class<?> externalProgramClass = context.loadPluginClass(stageName);
// If the external program implements Spark, instantiate it and call initialize() to provide full lifecycle support
if (Spark.class.isAssignableFrom(externalProgramClass)) {
MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(context), context.getLogicalStartTime(), context, context.getNamespace());
delegateSpark = context.newPluginInstance(stageName, macroEvaluator);
if (delegateSpark instanceof AbstractSpark) {
// noinspection unchecked
((AbstractSpark) delegateSpark).initialize(context);
}
}
}
Aggregations