use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class AbstractAppCreationService method createAppAndStartProgram.
private void createAppAndStartProgram(ArtifactSummary artifactSummary) throws Exception {
LOG.info("Creating and Starting {} App with config : {}", appId.getApplication(), appConfig);
ArtifactId artifactId = artifactSummary.getScope().equals(ArtifactScope.SYSTEM) ? NamespaceId.SYSTEM.artifact(artifactSummary.getName(), artifactSummary.getVersion()) : appId.getNamespaceId().artifact(artifactSummary.getName(), artifactSummary.getVersion());
applicationLifecycleService.deployApp(appId.getParent(), appId.getApplication(), appId.getVersion(), Id.Artifact.fromEntityId(artifactId), appConfig, new DefaultProgramTerminator());
for (Map.Entry<ProgramId, Map<String, String>> programEntry : programIdMap.entrySet()) {
try {
programLifecycleService.start(programEntry.getKey(), programEntry.getValue(), false);
} catch (IOException ex) {
// Might happen if the program is being started in parallel through UI
LOG.debug("Tried to start {} program but had a conflict. {}", programEntry.getKey(), ex.getMessage());
}
}
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class Spark2Test method deploy.
private ApplicationManager deploy(NamespaceId namespaceId, Class<? extends Application> appClass) throws Exception {
ArtifactId artifactId = new ArtifactId(namespaceId.getNamespace(), appClass.getSimpleName(), "1.0-SNAPSHOT");
addArtifact(artifactId, ARTIFACTS.get(appClass));
AppRequest<?> appRequest = new AppRequest<>(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()), null);
return deployApplication(namespaceId.app(appClass.getSimpleName()), appRequest);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class StandaloneTester method addSystemArtifact.
/**
* Adds a system artifact to CDAP instance that is used for testing.
*/
public void addSystemArtifact(String name, ArtifactVersion version, File artifactFile, @Nullable Set<ArtifactRange> parentArtifacts) throws Exception {
ArtifactRepository artifactRepository = standaloneMain.getInjector().getInstance(DefaultArtifactRepository.class);
ArtifactId artifactId = NamespaceId.SYSTEM.artifact(name, version.getVersion());
artifactRepository.addArtifact(Id.Artifact.fromEntityId(artifactId), artifactFile, parentArtifacts, null);
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class EntityIdKeyHelper method getTargetIdIdFromKey.
public static NamespacedEntityId getTargetIdIdFromKey(MDSKey.Splitter keySplitter, String type) {
if (type.equals(TYPE_MAP.get(NamespaceId.class))) {
String namespaceId = keySplitter.getString();
return new NamespaceId(namespaceId);
} else if (type.equals(TYPE_MAP.get(ProgramId.class))) {
String namespaceId = keySplitter.getString();
String appId = keySplitter.getString();
String programType = keySplitter.getString();
String programId = keySplitter.getString();
return new ProgramId(namespaceId, appId, programType, programId);
} else if (type.equals(TYPE_MAP.get(ApplicationId.class))) {
String namespaceId = keySplitter.getString();
String appId = keySplitter.getString();
return new ApplicationId(namespaceId, appId);
} else if (type.equals(TYPE_MAP.get(ArtifactId.class))) {
String namespaceId = keySplitter.getString();
String name = keySplitter.getString();
String version = keySplitter.getString();
return new ArtifactId(namespaceId, name, version);
} else if (type.equals(TYPE_MAP.get(DatasetId.class))) {
String namespaceId = keySplitter.getString();
String instanceId = keySplitter.getString();
return new DatasetId(namespaceId, instanceId);
} else if (type.equals(TYPE_MAP.get(StreamId.class))) {
String namespaceId = keySplitter.getString();
String instanceId = keySplitter.getString();
return new StreamId(namespaceId, instanceId);
} else if (type.equals(TYPE_MAP.get(StreamViewId.class))) {
String namespaceId = keySplitter.getString();
String streamId = keySplitter.getString();
String viewId = keySplitter.getString();
return new StreamViewId(namespaceId, streamId, viewId);
}
throw new IllegalArgumentException("Illegal Type " + type + " of metadata source.");
}
use of co.cask.cdap.proto.id.ArtifactId in project cdap by caskdata.
the class TestFrameworkTestRun method testAppWithPlugin.
@Test
public void testAppWithPlugin() throws Exception {
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
addAppArtifact(artifactId, AppWithPlugin.class);
ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
ApplicationManager appManager = deployApplication(appId, createRequest);
final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
workerManager.start();
workerManager.waitForStatus(false, 5, 1);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return workerManager.getHistory(ProgramRunStatus.COMPLETED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
serviceManager.start();
serviceManager.waitForStatus(true, 1, 10);
URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
callServiceGet(serviceURL, "dummy");
serviceManager.stop();
serviceManager.waitForStatus(false, 1, 10);
Tasks.waitFor(false, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return serviceManager.getHistory(ProgramRunStatus.KILLED).isEmpty();
}
}, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
workflowManager.start();
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
List<RunRecord> runRecords = workflowManager.getHistory();
Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
String value = Bytes.toString(workflowTableManager.get().read("val"));
Assert.assertEquals(AppWithPlugin.TEST, value);
Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
// Testing Spark Plugins. First send some data to stream for the Spark program to process
StreamManager streamManager = getStreamManager(AppWithPlugin.SPARK_STREAM);
for (int i = 0; i < 5; i++) {
streamManager.send("Message " + i);
}
SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
// Verify the Spark result.
DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
Table table = dataSetManager.get();
try (Scanner scanner = table.scan(null, null)) {
for (int i = 0; i < 5; i++) {
Row row = scanner.next();
Assert.assertNotNull(row);
String expected = "Message " + i + " " + AppWithPlugin.TEST;
Assert.assertEquals(expected, Bytes.toString(row.getRow()));
Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
}
// There shouldn't be any more rows in the table.
Assert.assertNull(scanner.next());
}
}
Aggregations