use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class MapperWrapper method run.
@SuppressWarnings("unchecked")
@Override
public void run(Context context) throws IOException, InterruptedException {
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration());
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader);
BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context);
String program = basicMapReduceContext.getProgramName();
// this is a hook for periodic flushing of changes buffered by datasets (to avoid OOME)
WrappedMapper.Context flushingContext = createAutoFlushingContext(context, basicMapReduceContext);
basicMapReduceContext.setHadoopContext(flushingContext);
InputSplit inputSplit = context.getInputSplit();
if (inputSplit instanceof MultiInputTaggedSplit) {
basicMapReduceContext.setInputContext(InputContexts.create((MultiInputTaggedSplit) inputSplit));
}
ClassLoader programClassLoader = classLoader.getProgramClassLoader();
Mapper delegate = createMapperInstance(programClassLoader, getWrappedMapper(context.getConfiguration()), context, program);
// injecting runtime components, like datasets, etc.
try {
Reflections.visit(delegate, delegate.getClass(), new PropertyFieldSetter(basicMapReduceContext.getSpecification().getProperties()), new MetricsFieldSetter(basicMapReduceContext.getMetrics()), new DataSetFieldSetter(basicMapReduceContext));
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
USERLOG.error("Failed to initialize program '{}' with error: {}. Please check the system logs for more details.", program, rootCause.getMessage(), rootCause);
throw new IOException(String.format("Failed to inject fields to %s", delegate.getClass()), t);
}
ClassLoader oldClassLoader;
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle) delegate).initialize(new MapReduceLifecycleContext(basicMapReduceContext));
} catch (Exception e) {
Throwable rootCause = Throwables.getRootCause(e);
USERLOG.error("Failed to initialize program '{}' with error: {}. Please check the system logs for more " + "details.", program, rootCause.getMessage(), rootCause);
throw new IOException(String.format("Failed to initialize mapper with %s", basicMapReduceContext), e);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
delegate.run(flushingContext);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
// memory by tx agent)
try {
basicMapReduceContext.flushOperations();
} catch (Exception e) {
throw new IOException("Failed to flush operations at the end of mapper of " + basicMapReduceContext, e);
}
// Close all writers created by MultipleOutputs
basicMapReduceContext.closeMultiOutputs();
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle<? extends RuntimeContext>) delegate).destroy();
} catch (Exception e) {
LOG.error("Error during destroy of mapper {}", basicMapReduceContext, e);
// Do nothing, try to finish
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class ReducerWrapper method run.
@SuppressWarnings("unchecked")
@Override
public void run(Context context) throws IOException, InterruptedException {
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration());
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader);
BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context);
// this is a hook for periodic flushing of changes buffered by datasets (to avoid OOME)
WrappedReducer.Context flushingContext = createAutoFlushingContext(context, basicMapReduceContext);
basicMapReduceContext.setHadoopContext(flushingContext);
String userReducer = context.getConfiguration().get(ATTR_REDUCER_CLASS);
ClassLoader programClassLoader = classLoader.getProgramClassLoader();
Reducer delegate = createReducerInstance(programClassLoader, userReducer);
// injecting runtime components, like datasets, etc.
try {
Reflections.visit(delegate, delegate.getClass(), new PropertyFieldSetter(basicMapReduceContext.getSpecification().getProperties()), new MetricsFieldSetter(basicMapReduceContext.getMetrics()), new DataSetFieldSetter(basicMapReduceContext));
} catch (Throwable t) {
LOG.error("Failed to inject fields to {}.", delegate.getClass(), t);
throw Throwables.propagate(t);
}
ClassLoader oldClassLoader;
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle) delegate).initialize(new MapReduceLifecycleContext(basicMapReduceContext));
} catch (Exception e) {
LOG.error("Failed to initialize reducer with {}", basicMapReduceContext, e);
throw Throwables.propagate(e);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
delegate.run(flushingContext);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
// memory by tx agent)
try {
basicMapReduceContext.flushOperations();
} catch (Exception e) {
LOG.error("Failed to flush operations at the end of reducer of " + basicMapReduceContext, e);
throw Throwables.propagate(e);
}
// Close all writers created by MultipleOutputs
basicMapReduceContext.closeMultiOutputs();
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle<? extends RuntimeContext>) delegate).destroy();
} catch (Exception e) {
LOG.error("Error during destroy of reducer {}", basicMapReduceContext, e);
// Do nothing, try to finish
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
}
use of co.cask.cdap.common.lang.PropertyFieldSetter in project cdap by caskdata.
the class FlowletProgramRunner method run.
@SuppressWarnings("unchecked")
@Override
public ProgramController run(Program program, ProgramOptions options) {
BasicFlowletContext flowletContext = null;
try {
// Extract and verify parameters
String flowletName = options.getName();
int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
RunId runId = ProgramRunners.getRunId(options);
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");
String processorName = program.getName();
Preconditions.checkNotNull(processorName, "Missing processor name.");
FlowSpecification flowSpec = appSpec.getFlows().get(processorName);
FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName);
Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);
Class<?> clz = Class.forName(flowletDef.getFlowletSpec().getClassName(), true, program.getClassLoader());
Preconditions.checkArgument(Flowlet.class.isAssignableFrom(clz), "%s is not a Flowlet.", clz);
// Setup dataset framework context, if required
ProgramId programId = program.getId();
FlowletId flowletId = programId.flowlet(flowletName);
ProgramRunId run = programId.run(runId);
ProgramContext programContext = new BasicProgramContext(run, flowletId);
if (dsFramework instanceof ProgramContextAware) {
((ProgramContextAware) dsFramework).setContext(programContext);
}
Class<? extends Flowlet> flowletClass = (Class<? extends Flowlet>) clz;
// Creates flowlet context
flowletContext = new BasicFlowletContext(program, options, flowletId, instanceId, instanceCount, flowletDef.getDatasets(), flowletDef.getFlowletSpec(), metricsCollectionService, discoveryServiceClient, txClient, dsFramework, secureStore, secureStoreManager, messageService, cConf);
// Creates tx related objects
DataFabricFacade dataFabricFacade = dataFabricFacadeFactory.create(program, flowletContext.getDatasetCache());
if (dataFabricFacade instanceof ProgramContextAware) {
((ProgramContextAware) dataFabricFacade).setContext(programContext);
}
// Creates QueueSpecification
Table<Node, String, Set<QueueSpecification>> queueSpecs = new SimpleQueueSpecificationGenerator(new ApplicationId(program.getNamespaceId(), program.getApplicationId())).create(flowSpec);
Flowlet flowlet = new InstantiatorFactory(false).get(TypeToken.of(flowletClass)).create();
TypeToken<? extends Flowlet> flowletType = TypeToken.of(flowletClass);
// Set the context classloader to the cdap classloader. It is needed for the DatumWriterFactory be able
// to load cdap classes
Thread.currentThread().setContextClassLoader(FlowletProgramRunner.class.getClassLoader());
// Inject DataSet, OutputEmitter, Metric fields
ImmutableList.Builder<ProducerSupplier> queueProducerSupplierBuilder = ImmutableList.builder();
Reflections.visit(flowlet, flowlet.getClass(), new PropertyFieldSetter(flowletDef.getFlowletSpec().getProperties()), new DataSetFieldSetter(flowletContext), new MetricsFieldSetter(flowletContext.getMetrics()), new OutputEmitterFieldSetter(outputEmitterFactory(flowletContext, flowletName, dataFabricFacade, queueProducerSupplierBuilder, queueSpecs)));
ImmutableList.Builder<ConsumerSupplier<?>> queueConsumerSupplierBuilder = ImmutableList.builder();
Collection<ProcessSpecification<?>> processSpecs = createProcessSpecification(flowletContext, flowletType, processMethodFactory(flowlet), processSpecificationFactory(flowletContext, dataFabricFacade, queueReaderFactory, flowletName, queueSpecs, queueConsumerSupplierBuilder, createSchemaCache(program)), Lists.<ProcessSpecification<?>>newLinkedList());
List<ConsumerSupplier<?>> consumerSuppliers = queueConsumerSupplierBuilder.build();
// Create the flowlet driver
AtomicReference<FlowletProgramController> controllerRef = new AtomicReference<>();
Service serviceHook = createServiceHook(flowletName, consumerSuppliers, controllerRef);
FlowletRuntimeService driver = new FlowletRuntimeService(flowlet, flowletContext, processSpecs, createCallback(flowlet, flowletDef.getFlowletSpec()), dataFabricFacade, serviceHook);
FlowletProgramController controller = new FlowletProgramController(program.getId(), flowletName, flowletContext, driver, queueProducerSupplierBuilder.build(), consumerSuppliers);
controllerRef.set(controller);
LOG.info("Starting flowlet: {}", flowletContext);
driver.start();
LOG.info("Flowlet started: {}", flowletContext);
return controller;
} catch (Exception e) {
// of the flowlet context.
if (flowletContext != null) {
flowletContext.close();
}
throw Throwables.propagate(e);
}
}
Aggregations