use of io.cdap.cdap.api.ProgramLifecycle in project cdap by caskdata.
the class MapReduceRuntimeService method destroy.
/**
* Calls the destroy method of {@link ProgramLifecycle}.
*/
private void destroy() {
TransactionControl defaultTxControl = context.getDefaultTxControl();
TransactionControl txControl = mapReduce instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, MapReduce.class, mapReduce, "destroy") : defaultTxControl;
context.destroyProgram(programLifecycle, txControl, false);
if (emitFieldLineage()) {
try {
// here we cannot call context.flushRecord() since the WorkflowNodeState will need to record and store
// the lineage information
FieldLineageInfo info = new FieldLineageInfo(context.getFieldLineageOperations());
fieldLineageWriter.write(mapReduceRunId, info);
} catch (Throwable t) {
LOG.warn("Failed to emit the field lineage operations for MapReduce {}", mapReduceRunId, t);
}
}
}
use of io.cdap.cdap.api.ProgramLifecycle in project cdap by caskdata.
the class MapperWrapper method run.
@SuppressWarnings("unchecked")
@Override
public void run(Context context) throws IOException, InterruptedException {
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration());
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader);
BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context);
String program = basicMapReduceContext.getProgramName();
final MapTaskMetricsWriter mapTaskMetricsWriter = new MapTaskMetricsWriter(basicMapReduceContext.getProgramMetrics(), context);
// this is a hook for periodic flushing of changes buffered by datasets (to avoid OOME)
WrappedMapper.Context flushingContext = createAutoFlushingContext(context, basicMapReduceContext, mapTaskMetricsWriter);
basicMapReduceContext.setHadoopContext(flushingContext);
InputSplit inputSplit = context.getInputSplit();
if (inputSplit instanceof MultiInputTaggedSplit) {
basicMapReduceContext.setInputContext(InputContexts.create((MultiInputTaggedSplit) inputSplit));
}
ClassLoader programClassLoader = classLoader.getProgramClassLoader();
Mapper delegate = createMapperInstance(programClassLoader, getWrappedMapper(context.getConfiguration()), context, program);
// injecting runtime components, like datasets, etc.
try {
Reflections.visit(delegate, delegate.getClass(), new PropertyFieldSetter(basicMapReduceContext.getSpecification().getProperties()), new MetricsFieldSetter(basicMapReduceContext.getMetrics()), new DataSetFieldSetter(basicMapReduceContext));
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
USERLOG.error("Failed to initialize program '{}' with error: {}. Please check the system logs for more details.", program, rootCause.getMessage(), rootCause);
throw new IOException(String.format("Failed to inject fields to %s", delegate.getClass()), t);
}
ClassLoader oldClassLoader;
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle) delegate).initialize(new MapReduceLifecycleContext(basicMapReduceContext));
} catch (Exception e) {
Throwable rootCause = Throwables.getRootCause(e);
USERLOG.error("Failed to initialize program '{}' with error: {}. Please check the system logs for more " + "details.", program, rootCause.getMessage(), rootCause);
throw new IOException(String.format("Failed to initialize mapper with %s", basicMapReduceContext), e);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
delegate.run(flushingContext);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
// memory by tx agent)
try {
basicMapReduceContext.flushOperations();
} catch (Exception e) {
throw new IOException("Failed to flush operations at the end of mapper of " + basicMapReduceContext, e);
}
// Close all writers created by MultipleOutputs
basicMapReduceContext.closeMultiOutputs();
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle<? extends RuntimeContext>) delegate).destroy();
} catch (Exception e) {
LOG.error("Error during destroy of mapper {}", basicMapReduceContext, e);
// Do nothing, try to finish
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
// Emit metrics one final time
mapTaskMetricsWriter.reportMetrics();
}
use of io.cdap.cdap.api.ProgramLifecycle in project cdap by caskdata.
the class BasicMapReduceTaskContext method close.
@Override
public void close() {
super.close();
// destroy all of the ProgramLifeCycles registered
RuntimeException ex = null;
for (ProgramLifecycle programLifecycle : programLifecycles) {
try {
programLifecycle.destroy();
} catch (RuntimeException e) {
if (ex == null) {
ex = new RuntimeException(e);
} else {
ex.addSuppressed(e);
}
}
}
if (ex != null) {
throw ex;
}
}
use of io.cdap.cdap.api.ProgramLifecycle in project cdap by caskdata.
the class SparkRuntimeService method destroy.
/**
* Calls the destroy or onFinish method of {@link ProgramLifecycle}.
*/
private void destroy(final ProgramState state) {
context.setState(state);
TransactionControl defaultTxControl = runtimeContext.getDefaultTxControl();
TransactionControl txControl = spark instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, Spark.class, spark, "destroy") : defaultTxControl;
runtimeContext.destroyProgram(programLifecycle, txControl, false);
if (emitFieldLineage()) {
try {
// here we cannot call context.flushRecord() since the WorkflowNodeState will need to record and store
// the lineage information
FieldLineageInfo info = new FieldLineageInfo(runtimeContext.getFieldLineageOperations());
fieldLineageWriter.write(runtimeContext.getProgramRunId(), info);
} catch (Throwable t) {
LOG.warn("Failed to emit the field lineage operations for Spark {}", runtimeContext.getProgramRunId(), t);
}
}
}
use of io.cdap.cdap.api.ProgramLifecycle in project cdap by caskdata.
the class WorkflowDriver method destroyWorkflow.
@SuppressWarnings("unchecked")
private void destroyWorkflow() {
if (!(workflow instanceof ProgramLifecycle)) {
return;
}
final TransactionControl txControl = Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class, workflow, "destroy");
basicWorkflowToken.setCurrentNode(workflowSpec.getName());
workflowContext.destroyProgram((ProgramLifecycle) workflow, txControl, false);
try {
workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken);
} catch (Throwable t) {
LOG.error("Failed to store the final workflow token of Workflow {}", workflowRunId, t);
}
if (ProgramStatus.COMPLETED != workflowContext.getState().getStatus()) {
return;
}
writeFieldLineage(workflowContext);
}
Aggregations