use of io.cdap.cdap.proto.ApplicationDetail in project cdap by caskdata.
the class LocalApplicationDetailFetcher method list.
/**
* Get a list of {@link ApplicationDetail} for all applications in the given namespace
*
* @param namespace the name of the namespace to get the list of applications
* @return a list of {@link ApplicationDetail} for all applications in the given namespace
* @throws IOException if failed to get the list of {@link ApplicationDetail}
* @throws NamespaceNotFoundException if the given namespace doesn't exit
*/
@Override
public List<ApplicationDetail> list(String namespace) throws IOException, NamespaceNotFoundException {
NamespaceId namespaceId = new NamespaceId(namespace);
List<ApplicationDetail> detailList = Collections.emptyList();
try {
// the existence of the namespace. Does a check here to explicitly throw an exception if nonexistent.
if (!namespaceQueryAdmin.exists(namespaceId)) {
throw new NamespaceNotFoundException(namespaceId);
}
detailList = applicationLifecycleService.getApps(namespaceId);
} catch (Exception e) {
Throwables.propagateIfPossible(e, NamespaceNotFoundException.class, IOException.class);
throw new IOException(e);
}
return detailList;
}
use of io.cdap.cdap.proto.ApplicationDetail in project cdap by caskdata.
the class DataPipelineTest method testSimpleUpgradePipelinesWithArtifactScope.
/* Tests upgrade for a deployed application. Also tests artifact scope parameter for only considering artifacts in
a given scope.
1. Deploy an application with older application artifact (1.0.0) and older filter plugin version (1.0.0).
2. Add new versions of application artifacts (0.0.9, 1.1.0, 1.2.0) and filter plugin artifacts (1.0.5, 1.1.0) in
SYSTEM scope (in test class setup).
3. Also deploy a snapshot version of plugin artifact 1.0.8 in USER scope.
3. Upgrade the older deployed application with artifact scope set to USER for upgrade.
4. Verify that after upgrading, application artifact and filter plugin artifact is upgraded to use latest version
in its config and it uses snapshot plugin version with 1.0.8 from USER scope.
*/
@Test
public void testSimpleUpgradePipelinesWithArtifactScope() throws Exception {
ArtifactSelectorConfig currentArtifactSelector = new ArtifactSelectorConfig(ArtifactScope.USER.name(), "test-plugins", "1.0.0");
Engine engine = Engine.MAPREDUCE;
String sourceName = "testSource" + engine.name();
String sinkName = "testSink" + engine.name();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().setEngine(engine).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter", PluggableFilterTransform.getPlugin(ValueFilter.NAME, ValueFilter.getProperties("${field}", "${value}"), currentArtifactSelector))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("source", "filter").addConnection("filter", "sink").build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app("sparkProgramTest");
// Deploy app with artifact version 1.0.0.
ApplicationManager appManager = deployApplication(appId, appRequest);
ApplicationDetail oldAppDetail = getAppDetail(appId);
ETLBatchConfig oldBatchConfig = GSON.fromJson(oldAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> oldStageMap = oldBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Upgrade application with artifact scope as USER.
appManager.upgrade(Collections.singleton(ArtifactScope.USER.toString()), false);
ApplicationDetail upgradedAppDetail = getAppDetail(appId);
ETLBatchConfig newBatchConfig = GSON.fromJson(upgradedAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> newStageMap = newBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Compare stages that should be same after upgrade.
Assert.assertEquals(oldStageMap.get("source"), newStageMap.get("source"));
Assert.assertEquals(oldStageMap.get("sink"), newStageMap.get("sink"));
// Verify that after upgrade, application upgrades artifact version to latest version available.
Assert.assertEquals(UPGRADE_APP_ARTIFACT_ID_2.getVersion(), upgradedAppDetail.getArtifact().getVersion());
// Check if the filter stage, for which version should be upgraded to desired version in SYSTEM scope.
ETLPlugin upgradedPlugin = newStageMap.get("filter").getPlugin();
Assert.assertEquals("1.0.8", upgradedPlugin.getArtifactConfig().getVersion());
Assert.assertEquals(ArtifactScope.valueOf(upgradedPlugin.getArtifactConfig().getScope().toUpperCase()), ArtifactScope.USER);
}
use of io.cdap.cdap.proto.ApplicationDetail in project cdap by caskdata.
the class DataPipelineTest method testSimpleUpgradePipelines.
/* Tests upgrade for a deployed application.
1. Deploy an application with older application artifact (1.0.0) and older filter plugin version (1.0.0).
2. Add new versions of application artifacts (0.0.9, 1.1.0, 1.2.0) and filter plugin artifacts (1.0.5, 1.1.0) in
SYSTEM scope (in test class setup).
3. Upgrade the older deployed application.
4. Verify that after upgrading, application artifact and filter plugin artifact is upgraded to use latest version
in its config.
*/
@Test
public void testSimpleUpgradePipelines() throws Exception {
ArtifactSelectorConfig currentArtifactSelector = new ArtifactSelectorConfig(ArtifactScope.USER.name(), "test-plugins", "1.0.0");
Engine engine = Engine.MAPREDUCE;
String sourceName = "testSource" + engine.name();
String sinkName = "testSink" + engine.name();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().setEngine(engine).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter", PluggableFilterTransform.getPlugin(ValueFilter.NAME, ValueFilter.getProperties("${field}", "${value}"), currentArtifactSelector))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("source", "filter").addConnection("filter", "sink").build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app("sparkProgramTest");
// Deploy app with artifact version 1.0.0.
ApplicationManager appManager = deployApplication(appId, appRequest);
ApplicationDetail oldAppDetail = getAppDetail(appId);
ETLBatchConfig oldBatchConfig = GSON.fromJson(oldAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> oldStageMap = oldBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Upgrade application.
appManager.upgrade();
ApplicationDetail upgradedAppDetail = getAppDetail(appId);
ETLBatchConfig newBatchConfig = GSON.fromJson(upgradedAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> newStageMap = newBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Compare stages that should be same after upgrade.
Assert.assertEquals(oldStageMap.get("source"), newStageMap.get("source"));
Assert.assertEquals(oldStageMap.get("sink"), newStageMap.get("sink"));
// Verify that after upgrade, application upgrades artifact version to latest version available.
Assert.assertEquals(UPGRADE_APP_ARTIFACT_ID_2.getVersion(), upgradedAppDetail.getArtifact().getVersion());
// Check if the filter stage, for which version should be upgraded to desired version in SYSTEM scope.
ETLPlugin upgradedPlugin = newStageMap.get("filter").getPlugin();
Assert.assertEquals(upgradedPlugin.getArtifactConfig().getVersion(), "1.1.0");
Assert.assertEquals(ArtifactScope.valueOf(upgradedPlugin.getArtifactConfig().getScope().toUpperCase()), ArtifactScope.SYSTEM);
}
use of io.cdap.cdap.proto.ApplicationDetail in project cdap by caskdata.
the class DataPipelineTest method testSimpleUpgradePipelinesWithSnapshotArtifact.
/* Tests upgrade for a deployed application. Also tests that SNAPSHOT artifacts are being considered for upgrade.
1. Deploy an application with older application artifact (1.0.0) and older filter plugin version (1.0.0).
2. Add new versions of application artifact (0.0.9, 1.1.0, 1.2.0) and filter plugin artifacts (1.0.5, 1.1.0).
3. Also deploy a snapshot version of app artifact 1.3.0-SNAPSHOT and plugin artifact 1.1.1-SNAPSHOT bind to it.
3. Upgrade the older deployed application.
4. Verify that after upgrading, application artifact and filter plugin artifact is upgraded to use latest version
in its config and it uses snapshot versions for both.
*/
@Test
public void testSimpleUpgradePipelinesWithSnapshotArtifact() throws Exception {
ArtifactSelectorConfig currentArtifactSelector = new ArtifactSelectorConfig(ArtifactScope.USER.name(), "test-plugins", "1.0.0");
Engine engine = Engine.MAPREDUCE;
String sourceName = "testSource" + engine.name();
String sinkName = "testSink" + engine.name();
ETLBatchConfig etlConfig = ETLBatchConfig.builder().setEngine(engine).addStage(new ETLStage("source", MockSource.getPlugin(sourceName))).addStage(new ETLStage("filter", PluggableFilterTransform.getPlugin(ValueFilter.NAME, ValueFilter.getProperties("${field}", "${value}"), currentArtifactSelector))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkName))).addConnection("source", "filter").addConnection("filter", "sink").build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app("sparkProgramTest");
// Deploy app with artifact version 1.0.0.
ApplicationManager appManager = deployApplication(appId, appRequest);
ApplicationDetail oldAppDetail = getAppDetail(appId);
ETLBatchConfig oldBatchConfig = GSON.fromJson(oldAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> oldStageMap = oldBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Upgrade application with allowSnapshot set to true.
appManager.upgrade(Collections.emptySet(), true);
ApplicationDetail upgradedAppDetail = getAppDetail(appId);
ETLBatchConfig newBatchConfig = GSON.fromJson(upgradedAppDetail.getConfiguration(), ETLBatchConfig.class);
Map<String, ETLStage> newStageMap = newBatchConfig.getStages().stream().collect(Collectors.toMap(ETLStage::getName, e -> e));
// Compare stages that should be same after upgrade.
Assert.assertEquals(oldStageMap.get("source"), newStageMap.get("source"));
Assert.assertEquals(oldStageMap.get("sink"), newStageMap.get("sink"));
// Verify that after upgrade, application upgrades artifact version to latest version available.
Assert.assertEquals(UPGRADE_APP_ARTIFACT_ID_3_SNAPSHOT.getVersion(), upgradedAppDetail.getArtifact().getVersion());
// Check if the filter stage, for which version should be upgraded to desired version in SYSTEM scope.
ETLPlugin upgradedPlugin = newStageMap.get("filter").getPlugin();
Assert.assertEquals(upgradedPlugin.getArtifactConfig().getVersion(), "1.1.1-SNAPSHOT");
Assert.assertEquals(ArtifactScope.valueOf(upgradedPlugin.getArtifactConfig().getScope().toUpperCase()), ArtifactScope.USER);
}
use of io.cdap.cdap.proto.ApplicationDetail in project cdap by caskdata.
the class AppLifecycleHttpHandler method getApplicationDetails.
/**
* Gets {@link ApplicationDetail} for a set of applications. It expects a post body as a array of object, with each
* object specifying the applciation id and an optional version. E.g.
*
* <pre>
* {@code
* [
* {"appId":"XYZ", "version":"1.2.3"},
* {"appId":"ABC"},
* {"appId":"FOO", "version":"2.3.4"},
* ]
* }
* </pre>
* The response will be an array of {@link BatchApplicationDetail} object, which either indicates a success (200) or
* failure for each of the requested application in the same order as the request.
*/
@POST
@Path("/appdetail")
public void getApplicationDetails(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace) throws Exception {
List<ApplicationId> appIds = decodeAndValidateBatchApplication(validateNamespace(namespace), request);
Map<ApplicationId, ApplicationDetail> details = applicationLifecycleService.getAppDetails(appIds);
List<BatchApplicationDetail> result = new ArrayList<>();
for (ApplicationId appId : appIds) {
ApplicationDetail detail = details.get(appId);
if (detail == null) {
result.add(new BatchApplicationDetail(new NotFoundException(appId)));
} else {
result.add(new BatchApplicationDetail(detail));
}
}
responder.sendJson(HttpResponseStatus.OK, GSON.toJson(result));
}
Aggregations