use of co.cask.cdap.proto.id.ApplicationId in project cdap by caskdata.
the class LocalArtifactLoaderStage method process.
/**
* Instantiates the Application class and calls configure() on it to generate the {@link ApplicationSpecification}.
*
* @param deploymentInfo information needed to deploy the application, such as the artifact to create it from
* and the application config to use.
*/
@Override
public void process(AppDeploymentInfo deploymentInfo) throws Exception {
ArtifactId artifactId = deploymentInfo.getArtifactId();
Location artifactLocation = deploymentInfo.getArtifactLocation();
String appClassName = deploymentInfo.getAppClassName();
String appVersion = deploymentInfo.getApplicationVersion();
String configString = deploymentInfo.getConfigString();
EntityImpersonator classLoaderImpersonator = new EntityImpersonator(artifactId, impersonator);
ClassLoader artifactClassLoader = artifactRepository.createArtifactClassLoader(artifactLocation, classLoaderImpersonator);
getContext().setProperty(LocalApplicationManager.ARTIFACT_CLASSLOADER_KEY, artifactClassLoader);
InMemoryConfigurator inMemoryConfigurator = new InMemoryConfigurator(cConf, Id.Namespace.fromEntityId(deploymentInfo.getNamespaceId()), Id.Artifact.fromEntityId(artifactId), appClassName, artifactRepository, artifactClassLoader, deploymentInfo.getApplicationName(), deploymentInfo.getApplicationVersion(), configString);
ListenableFuture<ConfigResponse> result = inMemoryConfigurator.config();
ConfigResponse response = result.get(120, TimeUnit.SECONDS);
if (response.getExitCode() != 0) {
throw new IllegalArgumentException("Failed to configure application: " + deploymentInfo);
}
ApplicationSpecification specification = adapter.fromJson(response.get());
ApplicationId applicationId;
if (appVersion == null) {
applicationId = deploymentInfo.getNamespaceId().app(specification.getName());
} else {
applicationId = deploymentInfo.getNamespaceId().app(specification.getName(), appVersion);
}
authorizationEnforcer.enforce(applicationId, authenticationContext.getPrincipal(), Action.ADMIN);
emit(new ApplicationDeployable(deploymentInfo.getArtifactId(), deploymentInfo.getArtifactLocation(), applicationId, specification, store.getApplication(applicationId), ApplicationDeployScope.USER, deploymentInfo.getOwnerPrincipal(), deploymentInfo.canUpdateSchedules()));
}
use of co.cask.cdap.proto.id.ApplicationId in project cdap by caskdata.
the class ApplicationVerificationStage method process.
/**
* Receives an input containing application specification and location
* and verifies both.
*
* @param input An instance of {@link ApplicationDeployable}
*/
@Override
public void process(ApplicationDeployable input) throws Exception {
Preconditions.checkNotNull(input);
ApplicationSpecification specification = input.getSpecification();
ApplicationId appId = input.getApplicationId();
// verify that the owner principal is valid if one was given
if (input.getOwnerPrincipal() != null) {
SecurityUtil.validateKerberosPrincipal(input.getOwnerPrincipal());
}
Collection<ApplicationId> allAppVersionsAppIds = store.getAllAppVersionsAppIds(appId);
// verify that the owner is same
if (!allAppVersionsAppIds.isEmpty()) {
verifyOwner(appId, input.getOwnerPrincipal());
}
verifySpec(appId, specification);
// We are verifying owner of dataset/stream at this stage itself even though the creation will fail in later
// stage if the owner is different because we don't want to end up in scenario where we created few dataset/streams
// and the failed because some dataset/stream already exists and have different owner
verifyData(appId, specification, input.getOwnerPrincipal());
verifyPrograms(appId, specification);
// Emit the input to next stage.
emit(input);
}
use of co.cask.cdap.proto.id.ApplicationId in project cdap by caskdata.
the class DeleteAndCreateSchedulesStage method process.
@Override
public void process(final ApplicationWithPrograms input) throws Exception {
if (!input.canUpdateSchedules()) {
// if we cant update schedules, emit and return
emit(input);
return;
}
ApplicationId appId = input.getApplicationId();
// Get a set of new schedules from the app spec
Set<ProgramSchedule> newSchedules = getProgramScheduleSet(appId, input.getSpecification());
for (ProgramSchedule schedule : programScheduler.listSchedules(appId)) {
if (newSchedules.contains(schedule)) {
// Remove the existing schedule from the newSchedules
newSchedules.remove(schedule);
continue;
}
// Delete the existing schedule if it is not present in newSchedules
programScheduler.deleteSchedule(schedule.getScheduleId());
}
// Add new schedules
for (ProgramSchedule schedule : newSchedules) {
addSchedule(schedule);
}
// Emit the input to next stage.
emit(input);
}
use of co.cask.cdap.proto.id.ApplicationId in project cdap by caskdata.
the class DefaultPreviewManager method start.
@Override
public ApplicationId start(NamespaceId namespace, AppRequest<?> appRequest) throws Exception {
ApplicationId previewApp = namespace.app(PREFIX + System.currentTimeMillis());
Injector injector = createPreviewInjector(previewApp);
PreviewRunner runner = injector.getInstance(PreviewRunner.class);
if (runner instanceof Service) {
((Service) runner).startAndWait();
}
try {
runner.startPreview(new PreviewRequest<>(getProgramIdFromRequest(previewApp, appRequest), appRequest));
} catch (Exception e) {
if (runner instanceof Service) {
stopQuietly((Service) runner);
}
removePreviewDir(previewApp);
throw e;
}
appInjectors.put(previewApp, injector);
return previewApp;
}
use of co.cask.cdap.proto.id.ApplicationId in project cdap by caskdata.
the class DataPipelineTest method testExternalDatasetTracking.
private void testExternalDatasetTracking(Engine engine, boolean backwardsCompatible) throws Exception {
String suffix = engine.name() + (backwardsCompatible ? "-bc" : "");
// Define input/output datasets
String expectedExternalDatasetInput = "fileInput-" + suffix;
String expectedExternalDatasetOutput = "fileOutput-" + suffix;
// Define input/output directories
File inputDir = TMP_FOLDER.newFolder("input-" + suffix);
String inputFile = "input-file1.txt";
File outputDir = TMP_FOLDER.newFolder("output-" + suffix);
File outputSubDir1 = new File(outputDir, "subdir1");
File outputSubDir2 = new File(outputDir, "subdir2");
if (!backwardsCompatible) {
// Assert that there are no external datasets
Assert.assertNull(getDataset(NamespaceId.DEFAULT.dataset(expectedExternalDatasetInput)).get());
Assert.assertNull(getDataset(NamespaceId.DEFAULT.dataset(expectedExternalDatasetOutput)).get());
}
ETLBatchConfig.Builder builder = ETLBatchConfig.builder("* * * * *");
ETLBatchConfig etlConfig = builder.setEngine(engine).addStage(new ETLStage("source", MockExternalSource.getPlugin(expectedExternalDatasetInput, inputDir.getAbsolutePath()))).addStage(new ETLStage("sink1", MockExternalSink.getPlugin(backwardsCompatible ? null : expectedExternalDatasetOutput, "dir1", outputSubDir1.getAbsolutePath()))).addStage(new ETLStage("sink2", MockExternalSink.getPlugin(backwardsCompatible ? null : expectedExternalDatasetOutput, "dir2", outputSubDir2.getAbsolutePath()))).addConnection("source", "sink1").addConnection("source", "sink2").build();
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig);
ApplicationId appId = NamespaceId.DEFAULT.app("ExternalDatasetApp-" + suffix);
ApplicationManager appManager = deployApplication(appId, appRequest);
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
StructuredRecord recordJane = StructuredRecord.builder(schema).set("name", "jane").build();
ImmutableList<StructuredRecord> allInput = ImmutableList.of(recordSamuel, recordBob, recordJane);
// Create input files
MockExternalSource.writeInput(new File(inputDir, inputFile).getAbsolutePath(), allInput);
WorkflowManager workflowManager = appManager.getWorkflowManager(SmartWorkflow.NAME);
workflowManager.start();
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
List<RunRecord> history = workflowManager.getHistory();
// there should be only one completed run
Assert.assertEquals(1, history.size());
Assert.assertEquals(ProgramRunStatus.COMPLETED, history.get(0).getStatus());
// Assert output
Assert.assertEquals(allInput, MockExternalSink.readOutput(outputSubDir1.getAbsolutePath()));
Assert.assertEquals(allInput, MockExternalSink.readOutput(outputSubDir2.getAbsolutePath()));
if (!backwardsCompatible) {
// Assert that external datasets got created
Assert.assertNotNull(getDataset(NamespaceId.DEFAULT.dataset(expectedExternalDatasetInput)).get());
Assert.assertNotNull(getDataset(NamespaceId.DEFAULT.dataset(expectedExternalDatasetOutput)).get());
}
}
Aggregations