use of co.cask.cdap.internal.app.runtime.SimpleProgramOptions in project cdap by caskdata.
the class DistributedProgramRunner method addArtifactPluginFiles.
private ProgramOptions addArtifactPluginFiles(ProgramOptions options, Map<String, LocalizeResource> localizeResources, File tempDir) throws IOException {
Arguments systemArgs = options.getArguments();
if (!systemArgs.hasOption(ProgramOptionConstants.PLUGIN_DIR)) {
return options;
}
File localDir = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_DIR));
File archiveFile = new File(tempDir, "artifacts.jar");
BundleJarUtil.createJar(localDir, archiveFile);
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put("artifacts", new LocalizeResource(archiveFile, true));
localizeResources.put("artifacts_archive.jar", new LocalizeResource(archiveFile, false));
Map<String, String> newSystemArgs = Maps.newHashMap(systemArgs.asMap());
newSystemArgs.put(ProgramOptionConstants.PLUGIN_DIR, "artifacts");
newSystemArgs.put(ProgramOptionConstants.PLUGIN_ARCHIVE, "artifacts_archive.jar");
return new SimpleProgramOptions(options.getProgramId(), new BasicArguments(newSystemArgs), options.getUserArguments(), options.isDebug());
}
use of co.cask.cdap.internal.app.runtime.SimpleProgramOptions in project cdap by caskdata.
the class ProgramNotificationSubscriberServiceTest method testAppSpecNotRequiredToWriteState.
@Test
public void testAppSpecNotRequiredToWriteState() throws Exception {
Injector injector = AppFabricTestHelper.getInjector();
CConfiguration cConf = injector.getInstance(CConfiguration.class);
ProgramNotificationSubscriberService programNotificationSubscriberService = injector.getInstance(ProgramNotificationSubscriberService.class);
programNotificationSubscriberService.startAndWait();
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
TransactionExecutorFactory txExecutorFactory = injector.getInstance(TransactionExecutorFactory.class);
DatasetId storeTable = NamespaceId.SYSTEM.dataset(Constants.AppMetaStore.TABLE);
Table table = DatasetsUtil.getOrCreateDataset(datasetFramework, storeTable, Table.class.getName(), DatasetProperties.EMPTY, Collections.<String, String>emptyMap());
final AppMetadataStore metadataStoreDataset = new AppMetadataStore(table, cConf, new AtomicBoolean(false));
final TransactionExecutor txnl = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) metadataStoreDataset));
ProgramStateWriter programStateWriter = injector.getInstance(ProgramStateWriter.class);
ProgramId programId = NamespaceId.DEFAULT.app("someapp").program(ProgramType.SERVICE, "s");
ProgramOptions programOptions = new SimpleProgramOptions(programId);
final ProgramRunId runId = programId.run(RunIds.generate());
programStateWriter.start(runId, programOptions, null);
Tasks.waitFor(ProgramRunStatus.STARTING, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
programStateWriter.running(runId, UUID.randomUUID().toString());
Tasks.waitFor(ProgramRunStatus.RUNNING, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
programStateWriter.killed(runId);
Tasks.waitFor(ProgramRunStatus.KILLED, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
}
use of co.cask.cdap.internal.app.runtime.SimpleProgramOptions in project cdap by caskdata.
the class MapReduceTaskContextProvider method createCacheLoader.
/**
* Creates a {@link CacheLoader} for the task context cache.
*/
private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) {
final DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
final DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
final SecureStore secureStore = injector.getInstance(SecureStore.class);
final SecureStoreManager secureStoreManager = injector.getInstance(SecureStoreManager.class);
final MessagingService messagingService = injector.getInstance(MessagingService.class);
// Multiple instances of BasicMapReduceTaskContext can share the same program.
final AtomicReference<Program> programRef = new AtomicReference<>();
return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() {
@Override
public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception {
TaskAttemptID taskAttemptId = key.getTaskAttemptID();
// taskAttemptId could be null if used from a org.apache.hadoop.mapreduce.Partitioner or
// from a org.apache.hadoop.io.RawComparator, in which case we can get the JobId from the conf. Note that the
// JobId isn't in the conf for the OutputCommitter#setupJob method, in which case we use the taskAttemptId
Path txFile = MainOutputCommitter.getTxFile(key.getConfiguration(), taskAttemptId != null ? taskAttemptId.getJobID() : null);
FileSystem fs = txFile.getFileSystem(key.getConfiguration());
Preconditions.checkArgument(fs.exists(txFile));
Transaction tx;
try (FSDataInputStream txFileInputStream = fs.open(txFile)) {
byte[] txByteArray = ByteStreams.toByteArray(txFileInputStream);
tx = new TransactionCodec().decode(txByteArray);
}
MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration());
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration());
Program program = programRef.get();
if (program == null) {
// Creation of program is relatively cheap, so just create and do compare and set.
programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader()));
program = programRef.get();
}
WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, program.getApplicationSpecification());
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
}
MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName());
MetricsCollectionService metricsCollectionService = null;
MapReduceMetrics.TaskType taskType = null;
String taskId = null;
ProgramOptions options = contextConfig.getProgramOptions();
// from a org.apache.hadoop.io.RawComparator
if (taskAttemptId != null) {
taskId = taskAttemptId.getTaskID().toString();
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) {
taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
// if this is not for a mapper or a reducer, we don't need the metrics collection service
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
options = new SimpleProgramOptions(options.getProgramId(), options.getArguments(), new BasicArguments(RuntimeArguments.extractScope("task", taskType.toString().toLowerCase(), contextConfig.getProgramOptions().getUserArguments().asMap())), options.isDebug());
}
}
CConfiguration cConf = injector.getInstance(CConfiguration.class);
TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class);
return new BasicMapReduceTaskContext(program, options, cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, tx, programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, mapReduceClassLoader);
}
};
}
use of co.cask.cdap.internal.app.runtime.SimpleProgramOptions in project cdap by caskdata.
the class WorkflowDriver method executeCustomAction.
private void executeCustomAction(final WorkflowActionNode node, InstantiatorFactory instantiator, final ClassLoader classLoader, WorkflowToken token) throws Exception {
CustomActionExecutor customActionExecutor;
// Node has CustomActionSpecification, so it must represent the CustomAction added in 3.5.0
// Create instance of the CustomActionExecutor using CustomActionContext
WorkflowProgramInfo info = new WorkflowProgramInfo(workflowSpec.getName(), node.getNodeId(), workflowRunId.getRun(), node.getNodeId(), (BasicWorkflowToken) token);
ProgramOptions actionOptions = new SimpleProgramOptions(programOptions.getProgramId(), programOptions.getArguments(), new BasicArguments(RuntimeArguments.extractScope(ACTION_SCOPE, node.getNodeId(), programOptions.getUserArguments().asMap())));
BasicCustomActionContext context = new BasicCustomActionContext(program, actionOptions, cConf, node.getCustomActionSpecification(), info, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, pluginInstantiator, secureStore, secureStoreManager, messagingService);
customActionExecutor = new CustomActionExecutor(context, instantiator, classLoader);
status.put(node.getNodeId(), node);
runtimeStore.addWorkflowNodeState(workflowRunId, new WorkflowNodeStateDetail(node.getNodeId(), NodeStatus.RUNNING));
Throwable failureCause = null;
try {
customActionExecutor.execute();
} catch (Throwable t) {
failureCause = t;
throw t;
} finally {
status.remove(node.getNodeId());
runtimeStore.updateWorkflowToken(workflowRunId, token);
NodeStatus status = failureCause == null ? NodeStatus.COMPLETED : NodeStatus.FAILED;
nodeStates.put(node.getNodeId(), new WorkflowNodeState(node.getNodeId(), status, null, failureCause));
BasicThrowable defaultThrowable = failureCause == null ? null : new BasicThrowable(failureCause);
runtimeStore.addWorkflowNodeState(workflowRunId, new WorkflowNodeStateDetail(node.getNodeId(), status, null, defaultThrowable));
}
}
use of co.cask.cdap.internal.app.runtime.SimpleProgramOptions in project cdap by caskdata.
the class ProgramLifecycleService method startInternal.
/**
* Start a Program.
*
* Note that this method can only be called through internal service, it does not have auth check for starting the
* program.
*
* @param programId the {@link ProgramId program} to start
* @param systemArgs system arguments
* @param userArgs user arguments
* @param debug enable debug mode
* @return {@link ProgramRuntimeService.RuntimeInfo}
* @throws IOException if there is an error starting the program
* @throws ProgramNotFoundException if program is not found
* @throws UnauthorizedException if the logged in user is not authorized to start the program. To start a program,
* a user requires {@link Action#EXECUTE} on the program
* @throws Exception if there were other exceptions checking if the current user is authorized to start the program
*/
public ProgramRuntimeService.RuntimeInfo startInternal(final ProgramId programId, final Map<String, String> systemArgs, final Map<String, String> userArgs, boolean debug) throws Exception {
LOG.info("{} tries to start {} Program {}", authenticationContext.getPrincipal().getName(), programId.getType(), programId.getProgram());
ProgramDescriptor programDescriptor = store.loadProgram(programId);
BasicArguments systemArguments = new BasicArguments(systemArgs);
BasicArguments userArguments = new BasicArguments(userArgs);
ProgramRuntimeService.RuntimeInfo runtimeInfo = runtimeService.run(programDescriptor, new SimpleProgramOptions(programId, systemArguments, userArguments, debug));
return runtimeInfo;
}
Aggregations