use of co.cask.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataPipelineTest method testDataPipelinePreviewRun.
private void testDataPipelinePreviewRun(Engine engine) throws Exception {
PreviewManager previewManager = getPreviewManager();
String sourceTableName = "singleInput";
String sinkTableName = "singleOutput";
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
/*
* source --> transform -> sink
*/
ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("source", MockSource.getPlugin(sourceTableName, schema))).addStage(new ETLStage("transform", IdentityTransform.getPlugin())).addStage(new ETLStage("sink", MockSink.getPlugin(sinkTableName))).addConnection("source", "transform").addConnection("transform", "sink").setEngine(engine).setNumOfRecordsPreview(100).build();
// Construct the preview config with the program name and program type
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, Collections.<String, String>emptyMap(), 10);
// Create the table for the mock source
addDatasetInstance(Table.class.getName(), sourceTableName, DatasetProperties.of(ImmutableMap.of("schema", schema.toString())));
DataSetManager<Table> inputManager = getDataset(NamespaceId.DEFAULT.dataset(sourceTableName));
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordBob));
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT_RANGE, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
final PreviewRunner previewRunner = previewManager.getRunner(previewId);
// Wait for the preview status go into COMPLETED.
Tasks.waitFor(PreviewStatus.Status.COMPLETED, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
PreviewStatus status = previewRunner.getStatus();
return status == null ? null : status.getStatus();
}
}, 5, TimeUnit.MINUTES);
// Get the data for stage "source" in the PreviewStore, should contain two records.
checkPreviewStore(previewRunner, "source", 2);
// Get the data for stage "transform" in the PreviewStore, should contain two records.
checkPreviewStore(previewRunner, "transform", 2);
// Get the data for stage "sink" in the PreviewStore, should contain two records.
checkPreviewStore(previewRunner, "sink", 2);
// Validate the metrics for preview
validateMetric(2, previewId, "source.records.in", previewRunner);
validateMetric(2, previewId, "source.records.out", previewRunner);
validateMetric(2, previewId, "transform.records.in", previewRunner);
validateMetric(2, previewId, "transform.records.out", previewRunner);
validateMetric(2, previewId, "sink.records.out", previewRunner);
validateMetric(2, previewId, "sink.records.in", previewRunner);
// Check the sink table is not created in the real space.
DataSetManager<Table> sinkManager = getDataset(sinkTableName);
Assert.assertNull(sinkManager.get());
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(sourceTableName));
}
use of co.cask.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataPipelineTest method testPreviewFailedRun.
private void testPreviewFailedRun(Engine engine) throws Exception {
PreviewManager previewManager = getPreviewManager();
String sourceTableName = "singleInput";
String sinkTableName = "singleOutput";
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
/*
* source --> transform -> sink
*/
ETLBatchConfig etlConfig = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("source", MockSource.getPlugin(sourceTableName, schema))).addStage(new ETLStage("transform", ExceptionTransform.getPlugin("name", "samuel"))).addStage(new ETLStage("sink", MockSink.getPlugin(sinkTableName))).addConnection("source", "transform").addConnection("transform", "sink").setNumOfRecordsPreview(100).setEngine(engine).build();
// Construct the preview config with the program name and program type.
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, Collections.<String, String>emptyMap(), 10);
// Create the table for the mock source
addDatasetInstance(Table.class.getName(), sourceTableName, DatasetProperties.of(ImmutableMap.of("schema", schema.toString())));
DataSetManager<Table> inputManager = getDataset(NamespaceId.DEFAULT.dataset(sourceTableName));
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
MockSource.writeInput(inputManager, "1", recordSamuel);
MockSource.writeInput(inputManager, "2", recordBob);
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
final PreviewRunner previewRunner = previewManager.getRunner(previewId);
// Wait for the preview status go into FAILED.
Tasks.waitFor(PreviewStatus.Status.RUN_FAILED, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
PreviewStatus status = previewRunner.getStatus();
return status == null ? null : status.getStatus();
}
}, 5, TimeUnit.MINUTES);
// Get the data for stage "source" in the PreviewStore.
checkPreviewStore(previewRunner, "source", 2);
// Get the data for stage "transform" in the PreviewStore, should contain one less record than source.
checkPreviewStore(previewRunner, "transform", 1);
// Get the data for stage "sink" in the PreviewStore, should contain one less record than source.
checkPreviewStore(previewRunner, "sink", 1);
// Validate the metrics for preview
validateMetric(2, previewId, "source.records.in", previewRunner);
validateMetric(2, previewId, "source.records.out", previewRunner);
validateMetric(2, previewId, "transform.records.in", previewRunner);
validateMetric(1, previewId, "transform.records.out", previewRunner);
validateMetric(1, previewId, "sink.records.out", previewRunner);
validateMetric(1, previewId, "sink.records.in", previewRunner);
// Check the sink table is not created in the real space.
DataSetManager<Table> sinkManager = getDataset(sinkTableName);
Assert.assertNull(sinkManager.get());
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(sourceTableName));
}
use of co.cask.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class DefaultPreviewRunner method startPreview.
@Override
public void startPreview(PreviewRequest<?> previewRequest) throws Exception {
namespaceAdmin.create(new NamespaceMeta.Builder().setName(previewRequest.getProgram().getNamespaceId()).build());
programId = previewRequest.getProgram();
AppRequest<?> request = previewRequest.getAppRequest();
ArtifactSummary artifactSummary = request.getArtifact();
ApplicationId preview = programId.getParent();
DataTracerFactoryProvider.setDataTracerFactory(preview, dataTracerFactory);
String config = request.getConfig() == null ? null : GSON.toJson(request.getConfig());
try {
applicationLifecycleService.deployApp(preview.getParent(), preview.getApplication(), preview.getVersion(), artifactSummary, config, NOOP_PROGRAM_TERMINATOR, null, request.canUpdateSchedules());
} catch (Exception e) {
this.status = new PreviewStatus(PreviewStatus.Status.DEPLOY_FAILED, new BasicThrowable(e), null, null);
throw e;
}
final PreviewConfig previewConfig = previewRequest.getAppRequest().getPreview();
ProgramController controller = programLifecycleService.start(programId, previewConfig == null ? Collections.<String, String>emptyMap() : previewConfig.getRuntimeArgs(), false);
controller.addListener(new AbstractListener() {
@Override
public void init(ProgramController.State currentState, @Nullable Throwable cause) {
setStatus(new PreviewStatus(PreviewStatus.Status.RUNNING, null, System.currentTimeMillis(), null));
// Only have timer if there is a timeout setting.
if (previewConfig.getTimeout() != null) {
timer = new Timer();
final int timeOutMinutes = previewConfig.getTimeout();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
LOG.info("Stopping the preview since it has reached running time: {} mins.", timeOutMinutes);
stopPreview();
killedByTimer = true;
} catch (Exception e) {
LOG.debug("Error shutting down the preview run with id: {}", programId);
}
}
}, timeOutMinutes * 60 * 1000);
}
}
@Override
public void completed() {
setStatus(new PreviewStatus(PreviewStatus.Status.COMPLETED, null, status.getStartTime(), System.currentTimeMillis()));
shutDownUnrequiredServices();
}
@Override
public void killed() {
if (!killedByTimer) {
setStatus(new PreviewStatus(PreviewStatus.Status.KILLED, null, status.getStartTime(), System.currentTimeMillis()));
} else {
setStatus(new PreviewStatus(PreviewStatus.Status.KILLED_BY_TIMER, null, status.getStartTime(), System.currentTimeMillis()));
}
shutDownUnrequiredServices();
}
@Override
public void error(Throwable cause) {
setStatus(new PreviewStatus(PreviewStatus.Status.RUN_FAILED, new BasicThrowable(cause), status.getStartTime(), System.currentTimeMillis()));
shutDownUnrequiredServices();
}
}, Threads.SAME_THREAD_EXECUTOR);
runId = controller.getProgramRunId();
}
use of co.cask.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class CreateAppCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
ApplicationId appId = parseApplicationId(arguments);
String artifactName = arguments.get(ArgumentName.ARTIFACT_NAME.toString());
String artifactVersion = arguments.get(ArgumentName.ARTIFACT_VERSION.toString());
ArtifactScope artifactScope = ArtifactScope.valueOf(arguments.get(ArgumentName.SCOPE.toString()).toUpperCase());
ArtifactSummary artifact = new ArtifactSummary(artifactName, artifactVersion, artifactScope);
JsonObject config = new JsonObject();
String ownerPrincipal = null;
Boolean updateSchedules = null;
PreviewConfig previewConfig = null;
String configPath = arguments.getOptional(ArgumentName.APP_CONFIG_FILE.toString());
if (configPath != null) {
File configFile = resolver.resolvePathToFile(configPath);
try (FileReader reader = new FileReader(configFile)) {
AppRequest<JsonObject> appRequest = GSON.fromJson(reader, configType);
config = appRequest.getConfig();
ownerPrincipal = appRequest.getOwnerPrincipal();
previewConfig = appRequest.getPreview();
updateSchedules = appRequest.canUpdateSchedules();
}
}
AppRequest<JsonObject> appRequest = new AppRequest<>(artifact, config, previewConfig, ownerPrincipal, updateSchedules);
applicationClient.deploy(appId, appRequest);
output.println("Successfully created application");
}
use of co.cask.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataStreamsTest method testDataStreamsPreviewRun.
@Test
public void testDataStreamsPreviewRun() throws Exception {
PreviewManager previewManager = getPreviewManager();
String sinkTableName = "singleOutput";
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
List<StructuredRecord> records = new ArrayList<>();
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
StructuredRecord recordTest = StructuredRecord.builder(schema).set("name", "test").build();
records.add(recordSamuel);
records.add(recordBob);
records.add(recordTest);
/*
* source --> transform -> sink
*/
DataStreamsConfig etlConfig = DataStreamsConfig.builder().addStage(new ETLStage("source", MockSource.getPlugin(schema, records))).addStage(new ETLStage("transform", IdentityTransform.getPlugin())).addStage(new ETLStage("sink", MockSink.getPlugin(sinkTableName))).addConnection("source", "transform").addConnection("transform", "sink").setNumOfRecordsPreview(100).setBatchInterval("1s").build();
// Construct the preview config with the program name and program type.
PreviewConfig previewConfig = new PreviewConfig(DataStreamsSparkLauncher.NAME, ProgramType.SPARK, Collections.<String, String>emptyMap(), 1);
AppRequest<DataStreamsConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
final PreviewRunner previewRunner = previewManager.getRunner(previewId);
// Wait for the preview to be running and wait until the records are processed in the sink.
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
Map<String, List<JsonElement>> data = previewRunner.getData("sink");
return data != null && data.get(DATA_TRACER_PROPERTY) != null && data.get(DATA_TRACER_PROPERTY).size() == 3;
}
}, 1, TimeUnit.MINUTES);
// check data in source and transform
checkPreviewStore(previewRunner, "source", 3);
checkPreviewStore(previewRunner, "transform", 3);
// Wait for the pipeline to be shutdown by timer.
TimeUnit.MINUTES.sleep(1);
Tasks.waitFor(PreviewStatus.Status.KILLED_BY_TIMER, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
return previewRunner.getStatus().getStatus();
}
}, 1, TimeUnit.MINUTES);
// Validate the metrics for preview
validateMetric(3, previewId, "source.records.out", previewRunner);
validateMetric(3, previewId, "transform.records.in", previewRunner);
validateMetric(3, previewId, "transform.records.out", previewRunner);
validateMetric(3, previewId, "sink.records.in", previewRunner);
validateMetric(3, previewId, "sink.records.out", previewRunner);
// Check the sink table is not created in the real space.
DataSetManager<Table> sinkManager = getDataset(sinkTableName);
Assert.assertNull(sinkManager.get());
}
Aggregations