use of co.cask.cdap.app.runtime.ProgramController in project cdap by caskdata.
the class WorkerProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
RunId runId = ProgramRunners.getRunId(options);
ProgramType programType = program.getType();
Preconditions.checkNotNull(programType, "Missing processor type.");
Preconditions.checkArgument(programType == ProgramType.WORKER, "Only Worker process type is supported.");
WorkerSpecification workerSpec = appSpec.getWorkers().get(program.getName());
Preconditions.checkArgument(workerSpec != null, "Missing Worker specification for %s", program.getId());
String instances = options.getArguments().getOption(ProgramOptionConstants.INSTANCES, String.valueOf(workerSpec.getInstances()));
WorkerSpecification newWorkerSpec = new WorkerSpecification(workerSpec.getClassName(), workerSpec.getName(), workerSpec.getDescription(), workerSpec.getProperties(), workerSpec.getDatasets(), workerSpec.getResources(), Integer.valueOf(instances));
// Setup dataset framework context, if required
if (datasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) datasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
final PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
try {
BasicWorkerContext context = new BasicWorkerContext(newWorkerSpec, program, options, cConf, instanceId, instanceCount, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, streamWriterFactory, pluginInstantiator, secureStore, secureStoreManager, messagingService);
WorkerDriver worker = new WorkerDriver(program, newWorkerSpec, context);
// Add a service listener to make sure the plugin instantiator is closed when the worker driver finished.
worker.addListener(createRuntimeServiceListener(Collections.singleton((Closeable) pluginInstantiator)), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new WorkerControllerServiceAdapter(worker, program.getId().run(runId), workerSpec.getName() + "-" + instanceId);
worker.start();
return controller;
} catch (Throwable t) {
Closeables.closeQuietly(pluginInstantiator);
throw t;
}
}
use of co.cask.cdap.app.runtime.ProgramController in project cdap by caskdata.
the class DefaultProgramWorkflowRunner method runAndWait.
private void runAndWait(ProgramRunner programRunner, Program program, ProgramOptions options) throws Exception {
Closeable closeable = createCloseable(programRunner, program);
// Publish the program's starting state
RunId runId = ProgramRunners.getRunId(options);
String twillRunId = options.getArguments().getOption(ProgramOptionConstants.TWILL_RUN_ID);
programStateWriter.start(program.getId().run(runId), options, twillRunId);
ProgramController controller;
try {
controller = programRunner.run(program, options);
} catch (Throwable t) {
// If there is any exception when running the program, close the program to release resources.
// Otherwise it will be released when the execution completed.
programStateWriter.error(program.getId().run(runId), t);
Closeables.closeQuietly(closeable);
throw t;
}
blockForCompletion(closeable, controller);
if (controller instanceof WorkflowTokenProvider) {
updateWorkflowToken(((WorkflowTokenProvider) controller).getWorkflowToken());
} else {
// This shouldn't happen
throw new IllegalStateException("No WorkflowToken available after program completed: " + program.getId());
}
}
use of co.cask.cdap.app.runtime.ProgramController in project cdap by caskdata.
the class DefaultProgramWorkflowRunner method blockForCompletion.
/**
* Adds a listener to the {@link ProgramController} and blocks for completion.
*
* @param closeable a {@link Closeable} to call when the program execution completed
* @param controller the {@link ProgramController} for the program
* @throws Exception if the execution failed
*/
private void blockForCompletion(final Closeable closeable, final ProgramController controller) throws Exception {
// Execute the program.
final SettableFuture<Void> completion = SettableFuture.create();
controller.addListener(new AbstractListener() {
@Override
public void init(ProgramController.State currentState, @Nullable Throwable cause) {
switch(currentState) {
case COMPLETED:
completed();
break;
case KILLED:
killed();
break;
case ERROR:
error(cause);
break;
}
}
@Override
public void completed() {
Closeables.closeQuietly(closeable);
nodeStates.put(nodeId, new WorkflowNodeState(nodeId, NodeStatus.COMPLETED, controller.getRunId().getId(), null));
completion.set(null);
}
@Override
public void killed() {
Closeables.closeQuietly(closeable);
nodeStates.put(nodeId, new WorkflowNodeState(nodeId, NodeStatus.KILLED, controller.getRunId().getId(), null));
completion.set(null);
}
@Override
public void error(Throwable cause) {
Closeables.closeQuietly(closeable);
nodeStates.put(nodeId, new WorkflowNodeState(nodeId, NodeStatus.FAILED, controller.getRunId().getId(), cause));
completion.setException(cause);
}
}, Threads.SAME_THREAD_EXECUTOR);
// Block for completion.
try {
completion.get();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof Exception) {
throw (Exception) cause;
}
throw Throwables.propagate(cause);
} catch (InterruptedException e) {
try {
Futures.getUnchecked(controller.stop());
} catch (Throwable t) {
// no-op
}
// reset the interrupt
Thread.currentThread().interrupt();
}
}
use of co.cask.cdap.app.runtime.ProgramController in project cdap by caskdata.
the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.
@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
// validate worker wrote the "initialize" and "run" rows
final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
// wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {
@Override
public String call() throws Exception {
return executor.execute(new Callable<String>() {
@Override
public String call() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
return Bytes.toString(kvTable.read(AppWithWorker.RUN));
}
});
}
}, 5, TimeUnit.SECONDS);
stopProgram(controller);
txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
}
});
// validate that the table emitted metrics
Tasks.waitFor(3L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of co.cask.cdap.app.runtime.ProgramController in project cdap by caskdata.
the class WorkerProgramRunnerTest method startProgram.
private ProgramController startProgram(ApplicationWithPrograms app, Class<?> programClass) throws Throwable {
final AtomicReference<Throwable> errorCause = new AtomicReference<>();
final ProgramController controller = AppFabricTestHelper.submit(app, programClass.getName(), new BasicArguments(), TEMP_FOLDER_SUPPLIER);
runningPrograms.add(controller);
controller.addListener(new AbstractListener() {
@Override
public void error(Throwable cause) {
errorCause.set(cause);
}
@Override
public void killed() {
errorCause.set(new RuntimeException("Killed"));
}
}, Threads.SAME_THREAD_EXECUTOR);
Tasks.waitFor(ProgramController.State.ALIVE, new Callable<ProgramController.State>() {
@Override
public ProgramController.State call() throws Exception {
Throwable t = errorCause.get();
if (t != null) {
Throwables.propagateIfInstanceOf(t, Exception.class);
throw Throwables.propagate(t);
}
return controller.getState();
}
}, 30, TimeUnit.SECONDS);
return controller;
}
Aggregations