use of co.cask.cdap.app.runtime.Arguments in project cdap by caskdata.
the class SparkProgramRunner method run.
@Override
public ProgramController run(Program program, ProgramOptions options) {
// Get the RunId first. It is used for the creation of the ClassLoader closing thread.
Arguments arguments = options.getArguments();
RunId runId = ProgramRunners.getRunId(options);
Deque<Closeable> closeables = new LinkedList<>();
try {
// Extract and verify parameters
ApplicationSpecification appSpec = program.getApplicationSpecification();
Preconditions.checkNotNull(appSpec, "Missing application specification.");
ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
Preconditions.checkArgument(host != null, "No hostname is provided");
// Get the WorkflowProgramInfo if it is started by Workflow
WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramId programId = program.getId();
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
}
PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
if (pluginInstantiator != null) {
closeables.addFirst(pluginInstantiator);
}
SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, discoveryServiceClient, metricsCollectionService, streamAdmin, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService, serviceAnnouncer, pluginFinder, locationFactory);
closeables.addFirst(runtimeContext);
Spark spark;
try {
spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
SparkSubmitter submitter = SparkRuntimeContextConfig.isLocal(hConf) ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter, locationFactory);
sparkRuntimeService.addListener(createRuntimeServiceListener(closeables), Threads.SAME_THREAD_EXECUTOR);
ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
if (SparkRuntimeContextConfig.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
sparkRuntimeService.start();
} else {
ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
}
return controller;
} catch (Throwable t) {
closeAllQuietly(closeables);
throw Throwables.propagate(t);
}
}
use of co.cask.cdap.app.runtime.Arguments in project cdap by caskdata.
the class FlowTest method testAppWithArgs.
@Test
public void testAppWithArgs() throws Exception {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(ArgumentCheckApp.class, TEMP_FOLDER_SUPPLIER);
// Only running flow is good. But, in case of service, we need to send something to service as it's lazy loading
List<ProgramController> controllers = Lists.newArrayList();
for (ProgramDescriptor programDescriptor : app.getPrograms()) {
Arguments userArgs = new BasicArguments(ImmutableMap.of("arg", "test"));
controllers.add(AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), userArgs, TEMP_FOLDER_SUPPLIER));
}
DiscoveryServiceClient discoveryServiceClient = AppFabricTestHelper.getInjector().getInstance(DiscoveryServiceClient.class);
String discoverableName = String.format("service.%s.%s.%s", DefaultId.NAMESPACE.getNamespace(), "ArgumentCheckApp", "SimpleService");
Discoverable discoverable = new RandomEndpointStrategy(discoveryServiceClient.discover(discoverableName)).pick(5, TimeUnit.SECONDS);
Assert.assertNotNull(discoverable);
URL url = new URL(String.format("http://%s:%d/v3/namespaces/default/apps/%s/services/%s/methods/%s", discoverable.getSocketAddress().getHostName(), discoverable.getSocketAddress().getPort(), "ArgumentCheckApp", "SimpleService", "ping"));
HttpURLConnection urlConn = (HttpURLConnection) url.openConnection();
// this would fail had the service been started without the argument (initialize would have thrown)
Assert.assertEquals(200, urlConn.getResponseCode());
for (ProgramController controller : controllers) {
controller.stop().get();
}
}
use of co.cask.cdap.app.runtime.Arguments in project cdap by caskdata.
the class ProgramOptionsCodec method deserialize.
@Override
public ProgramOptions deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
ProgramId programId = context.deserialize(jsonObj.get("programId"), ProgramId.class);
Arguments arguments = context.deserialize(jsonObj.get("arguments"), Arguments.class);
Arguments userArguments = context.deserialize(jsonObj.get("userArguments"), Arguments.class);
boolean debug = jsonObj.get("debug").getAsBoolean();
return new SimpleProgramOptions(programId, arguments, userArguments, debug);
}
use of co.cask.cdap.app.runtime.Arguments in project cdap by caskdata.
the class ProgramOptionsCodec method serialize.
@Override
public JsonElement serialize(ProgramOptions src, Type typeOfSrc, JsonSerializationContext context) {
JsonObject json = new JsonObject();
json.add("programId", context.serialize(src.getProgramId(), ProgramId.class));
json.add("arguments", context.serialize(src.getArguments(), Arguments.class));
json.add("userArguments", context.serialize(src.getUserArguments(), Arguments.class));
json.addProperty("debug", src.isDebug());
return json;
}
use of co.cask.cdap.app.runtime.Arguments in project cdap by caskdata.
the class DistributedProgramRunner method addArtifactPluginFiles.
private ProgramOptions addArtifactPluginFiles(ProgramOptions options, Map<String, LocalizeResource> localizeResources, File tempDir) throws IOException {
Arguments systemArgs = options.getArguments();
if (!systemArgs.hasOption(ProgramOptionConstants.PLUGIN_DIR)) {
return options;
}
File localDir = new File(systemArgs.getOption(ProgramOptionConstants.PLUGIN_DIR));
File archiveFile = new File(tempDir, "artifacts.jar");
BundleJarUtil.createJar(localDir, archiveFile);
// Localize plugins to two files, one expanded into a directory, one not.
localizeResources.put("artifacts", new LocalizeResource(archiveFile, true));
localizeResources.put("artifacts_archive.jar", new LocalizeResource(archiveFile, false));
Map<String, String> newSystemArgs = Maps.newHashMap(systemArgs.asMap());
newSystemArgs.put(ProgramOptionConstants.PLUGIN_DIR, "artifacts");
newSystemArgs.put(ProgramOptionConstants.PLUGIN_ARCHIVE, "artifacts_archive.jar");
return new SimpleProgramOptions(options.getProgramId(), new BasicArguments(newSystemArgs), options.getUserArguments(), options.isDebug());
}
Aggregations