use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataPipelineTest method testDataPipelinePreviewStop.
private void testDataPipelinePreviewStop(Engine engine, @Nullable Long sleepInMillis) throws Exception {
PreviewManager previewManager = getPreviewManager();
String sourceTableName = "singleInput";
String sinkTableName = "singleOutput";
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
/*
* source --> transform -> sink
*/
ETLPlugin sourcePlugin = sleepInMillis == null ? MockSource.getPlugin(sourceTableName, schema) : MockSource.getPlugin(sourceTableName, schema, sleepInMillis);
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("source", sourcePlugin)).addStage(new ETLStage("transform", IdentityTransform.getPlugin())).addStage(new ETLStage("sink", MockSink.getPlugin(sinkTableName))).addConnection("source", "transform").addConnection("transform", "sink").setEngine(engine).setNumOfRecordsPreview(100).build();
// Construct the preview config with the program name and program type
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, Collections.<String, String>emptyMap(), 10);
// Create the table for the mock source
addDatasetInstance(Table.class.getName(), sourceTableName, DatasetProperties.of(ImmutableMap.of("schema", schema.toString())));
DataSetManager<Table> inputManager = getDataset(NamespaceId.DEFAULT.dataset(sourceTableName));
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
MockSource.writeInput(inputManager, ImmutableList.of(recordSamuel, recordBob));
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT_RANGE, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
if (sleepInMillis != null) {
// Wait for the preview status go into RUNNING.
Tasks.waitFor(PreviewStatus.Status.RUNNING, () -> {
PreviewStatus status = previewManager.getStatus(previewId);
return status == null ? null : status.getStatus();
}, 5, TimeUnit.MINUTES);
}
previewManager.stopPreview(previewId);
// Wait for the preview status go into KILLED.
Tasks.waitFor(PreviewStatus.Status.KILLED, () -> {
PreviewStatus status = previewManager.getStatus(previewId);
return status == null ? null : status.getStatus();
}, 5, TimeUnit.MINUTES);
// Check the sink table is not created in the real space.
DataSetManager<Table> sinkManager = getDataset(sinkTableName);
Assert.assertNull(sinkManager.get());
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(sourceTableName));
Assert.assertTrue(previewManager.getRunId(previewId) == null || sleepInMillis != null);
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataStreamsTest method testDataStreamsPreviewRun.
@Test
public void testDataStreamsPreviewRun() throws Exception {
PreviewManager previewManager = getPreviewManager();
String sinkTableName = "singleOutput";
Schema schema = Schema.recordOf("testRecord", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
List<StructuredRecord> records = new ArrayList<>();
StructuredRecord recordSamuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord recordBob = StructuredRecord.builder(schema).set("name", "bob").build();
StructuredRecord recordTest = StructuredRecord.builder(schema).set("name", "test").build();
records.add(recordSamuel);
records.add(recordBob);
records.add(recordTest);
/*
* source --> transform -> sink
*/
DataStreamsConfig etlConfig = DataStreamsConfig.builder().addStage(new ETLStage("source", MockSource.getPlugin(schema, records))).addStage(new ETLStage("transform", IdentityTransform.getPlugin())).addStage(new ETLStage("sink", MockSink.getPlugin(sinkTableName))).addConnection("source", "transform").addConnection("transform", "sink").setNumOfRecordsPreview(100).setBatchInterval("1s").setCheckpointDir("file://" + TMP_FOLDER.getRoot().toPath().toString()).build();
// Construct the preview config with the program name and program type.
PreviewConfig previewConfig = new PreviewConfig(DataStreamsSparkLauncher.NAME, ProgramType.SPARK, Collections.<String, String>emptyMap(), 1);
AppRequest<DataStreamsConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
// Wait for the preview to be running and wait until the records are processed in the sink.
Tasks.waitFor(true, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
Map<String, List<JsonElement>> data = previewManager.getData(previewId, "sink");
return data != null && data.get(DATA_TRACER_PROPERTY) != null && data.get(DATA_TRACER_PROPERTY).size() == 3;
}
}, 1, TimeUnit.MINUTES);
// check data in source and transform
checkPreviewStore(previewManager, previewId, "source", 3);
checkPreviewStore(previewManager, previewId, "transform", 3);
// Wait for the pipeline to be shutdown by timer.
TimeUnit.MINUTES.sleep(1);
Tasks.waitFor(PreviewStatus.Status.KILLED_BY_TIMER, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
return previewManager.getStatus(previewId).getStatus();
}
}, 1, TimeUnit.MINUTES);
// Validate the metrics for preview
validateMetric(3, previewId, "source.records.out", previewManager);
validateMetric(3, previewId, "transform.records.in", previewManager);
validateMetric(3, previewId, "transform.records.out", previewManager);
validateMetric(3, previewId, "sink.records.in", previewManager);
validateMetric(3, previewId, "sink.records.out", previewManager);
// Check the sink table is not created in the real space.
DataSetManager<Table> sinkManager = getDataset(sinkTableName);
Assert.assertNull(sinkManager.get());
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewServiceMainTest method testPreviewSimpleApp.
@Test
public void testPreviewSimpleApp() throws Exception {
// Build the app
LocationFactory locationFactory = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, PreviewTestApp.class);
// Deploy the app
String artifactName = PreviewTestApp.class.getSimpleName();
String artifactVersion = "1.0.0-SNAPSHOT";
deployArtifact(appJar, artifactName, artifactVersion);
// Run a preview
ArtifactSummary artifactSummary = new ArtifactSummary(artifactName, artifactVersion);
PreviewConfig previewConfig = new PreviewConfig(PreviewTestApp.TestWorkflow.NAME, ProgramType.WORKFLOW, Collections.emptyMap(), 2);
AppRequest appRequest = new AppRequest<>(artifactSummary, null, previewConfig);
ApplicationId previewId = runPreview(appRequest);
// Wait for preview to complete
waitForPreview(previewId);
// Verify the result of preview run
URL url = getRouterBaseURI().resolve(String.format("/v3/namespaces/default/previews/%s/tracers/%s", previewId.getApplication(), PreviewTestApp.TRACER_NAME)).toURL();
HttpResponse response = HttpRequests.execute(HttpRequest.get(url).build(), getHttpRequestConfig());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Map<String, List<String>> tracerData = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<Map<String, List<String>>>() {
}.getType());
Assert.assertEquals(Collections.singletonMap(PreviewTestApp.TRACER_KEY, Collections.singletonList(PreviewTestApp.TRACER_VAL)), tracerData);
// Clean up
deleteArtfiact(artifactName, artifactVersion);
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class CreateAppCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
ApplicationId appId = parseApplicationId(arguments);
String artifactName = arguments.get(ArgumentName.ARTIFACT_NAME.toString());
String artifactVersion = arguments.get(ArgumentName.ARTIFACT_VERSION.toString());
ArtifactScope artifactScope = ArtifactScope.valueOf(arguments.get(ArgumentName.SCOPE.toString()).toUpperCase());
ArtifactSummary artifact = new ArtifactSummary(artifactName, artifactVersion, artifactScope);
JsonObject config = new JsonObject();
String ownerPrincipal = null;
Boolean updateSchedules = null;
PreviewConfig previewConfig = null;
String configPath = arguments.getOptional(ArgumentName.APP_CONFIG_FILE.toString());
if (configPath != null) {
File configFile = resolver.resolvePathToFile(configPath);
try (FileReader reader = new FileReader(configFile)) {
AppRequest<JsonObject> appRequest = GSON.fromJson(reader, configType);
config = appRequest.getConfig();
ownerPrincipal = appRequest.getOwnerPrincipal();
previewConfig = appRequest.getPreview();
updateSchedules = appRequest.canUpdateSchedules();
}
}
AppRequest<JsonObject> appRequest = new AppRequest<>(artifact, config, previewConfig, ownerPrincipal, updateSchedules);
applicationClient.deploy(appId, appRequest);
output.println("Successfully created application");
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class DefaultPreviewRunner method trackPreviewTimeout.
private void trackPreviewTimeout(PreviewRequest previewRequest, AtomicBoolean timeout, CompletableFuture<PreviewRequest> resultFuture) {
ProgramId programId = previewRequest.getProgram();
long timeoutMins = Optional.ofNullable(previewRequest.getAppRequest().getPreview()).map(PreviewConfig::getTimeout).map(Integer::longValue).orElse(PREVIEW_TIMEOUT);
Thread timeoutThread = Threads.createDaemonThreadFactory("preview-timeout-" + programId).newThread(() -> {
try {
Uninterruptibles.getUninterruptibly(resultFuture, timeoutMins, TimeUnit.MINUTES);
} catch (ExecutionException e) {
// Ignore. This means the preview completed with failure.
} catch (TimeoutException e) {
// Timeout, kill the preview
timeout.set(true);
try {
stopPreview(programId);
} catch (Exception ex) {
LOG.warn("Failed to stop preview upon timeout of {} minutes", timeoutMins, ex);
}
}
});
timeoutThread.start();
}
Aggregations