use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class DataPipelineConnectionTest method testUsingConnections.
private void testUsingConnections(Engine engine) throws Exception {
String sourceConnName = "sourceConn " + engine;
String sinkConnName = "sinkConn " + engine;
String srcTableName = "src" + engine;
String sinkTableName = "sink" + engine;
// add some bad json object to the property
addConnection(sourceConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, ImmutableMap.of("tableName", srcTableName, "key1", "${badval}"), new ArtifactSelectorConfig())));
addConnection(sinkConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, ImmutableMap.of("tableName", sinkTableName, "key1", "${badval}"), new ArtifactSelectorConfig())));
// add json string to the runtime arguments to ensure plugin can get instantiated under such condition
Map<String, String> runtimeArguments = Collections.singletonMap("badval", "{\"a\" : 1}");
// source -> sink
ETLBatchConfig config = ETLBatchConfig.builder().setEngine(engine).addStage(new ETLStage("source", MockSource.getPluginUsingConnection(sourceConnName))).addStage(new ETLStage("sink", MockSink.getPluginUsingConnection(sinkConnName))).addConnection("source", "sink").build();
Schema schema = Schema.recordOf("x", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
StructuredRecord samuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord dwayne = StructuredRecord.builder(schema).set("name", "dwayne").build();
// add the dataset by the test, the source won't create it since table name is macro enabled
addDatasetInstance(NamespaceId.DEFAULT.dataset(srcTableName), Table.class.getName());
DataSetManager<Table> sourceTable = getDataset(srcTableName);
MockSource.writeInput(sourceTable, ImmutableList.of(samuel, dwayne));
// verify preview can run successfully using connections
PreviewManager previewManager = getPreviewManager();
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, runtimeArguments, 10);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, new AppRequest<>(APP_ARTIFACT, config, previewConfig));
// Wait for the preview status go into COMPLETED.
Tasks.waitFor(PreviewStatus.Status.COMPLETED, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
PreviewStatus status = previewManager.getStatus(previewId);
return status == null ? null : status.getStatus();
}
}, 5, TimeUnit.MINUTES);
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, config);
ApplicationId appId = NamespaceId.DEFAULT.app("testApp" + engine);
ApplicationManager appManager = deployApplication(appId, appRequest);
// start the actual pipeline run
WorkflowManager manager = appManager.getWorkflowManager(SmartWorkflow.NAME);
manager.startAndWaitForGoodRun(runtimeArguments, ProgramRunStatus.COMPLETED, 3, TimeUnit.MINUTES);
DataSetManager<Table> sinkTable = getDataset(sinkTableName);
List<StructuredRecord> outputRecords = MockSink.readOutput(sinkTable);
Assert.assertEquals(ImmutableSet.of(dwayne, samuel), new HashSet<>(outputRecords));
// modify the connection to use a new table name for source and sink
String newSrcTableName = "new" + srcTableName;
String newSinkTableName = "new" + sinkTableName;
addConnection(sourceConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, Collections.singletonMap("tableName", newSrcTableName), new ArtifactSelectorConfig())));
addConnection(sinkConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, Collections.singletonMap("tableName", newSinkTableName), new ArtifactSelectorConfig())));
addDatasetInstance(NamespaceId.DEFAULT.dataset(newSrcTableName), Table.class.getName());
StructuredRecord newRecord1 = StructuredRecord.builder(schema).set("name", "john").build();
StructuredRecord newRecord2 = StructuredRecord.builder(schema).set("name", "tom").build();
sourceTable = getDataset(newSrcTableName);
MockSource.writeInput(sourceTable, ImmutableList.of(newRecord1, newRecord2));
// run the program again, it should use the new table to read and write
manager.start(runtimeArguments);
manager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 3, TimeUnit.MINUTES);
sinkTable = getDataset(newSinkTableName);
outputRecords = MockSink.readOutput(sinkTable);
Assert.assertEquals(ImmutableSet.of(newRecord1, newRecord2), new HashSet<>(outputRecords));
deleteConnection(sourceConnName);
deleteConnection(sinkConnName);
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(srcTableName));
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(sinkTableName));
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(newSrcTableName));
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(newSinkTableName));
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewDataPipelineTest method testMultiplePhase.
private void testMultiplePhase(Engine engine) throws Exception {
/*
* source1 ----> t1 ------
* | --> innerjoin ----> t4 ------
* source2 ----> t2 ------ |
* | ---> outerjoin --> sink1
* |
* source3 -------------------- t3 ------------------------
*/
PreviewManager previewManager = getPreviewManager();
Schema inputSchema1 = Schema.recordOf("customerRecord", Schema.Field.of("customer_id", Schema.of(Schema.Type.STRING)), Schema.Field.of("customer_name", Schema.of(Schema.Type.STRING)));
Schema inputSchema2 = Schema.recordOf("itemRecord", Schema.Field.of("item_id", Schema.of(Schema.Type.STRING)), Schema.Field.of("item_price", Schema.of(Schema.Type.LONG)), Schema.Field.of("cust_id", Schema.of(Schema.Type.STRING)), Schema.Field.of("cust_name", Schema.of(Schema.Type.STRING)));
Schema inputSchema3 = Schema.recordOf("transactionRecord", Schema.Field.of("t_id", Schema.of(Schema.Type.STRING)), Schema.Field.of("c_id", Schema.of(Schema.Type.STRING)), Schema.Field.of("i_id", Schema.of(Schema.Type.STRING)));
Schema outSchema2 = Schema.recordOf("join.output", Schema.Field.of("t_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("c_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("i_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("customer_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("customer_name", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("item_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("item_price", Schema.nullableOf(Schema.of(Schema.Type.LONG))), Schema.Field.of("cust_id", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("cust_name", Schema.nullableOf(Schema.of(Schema.Type.STRING))));
String source1MulitJoinInput = "multiJoinSource1-" + engine;
String source2MultiJoinInput = "multiJoinSource2-" + engine;
String source3MultiJoinInput = "multiJoinSource3-" + engine;
String outputName = "multiJoinOutput-" + engine;
String sinkName = "multiJoinOutputSink-" + engine;
String outerJoinName = "multiJoinOuter-" + engine;
ETLBatchConfig etlConfig = ETLBatchConfig.builder().addStage(new ETLStage("source1", MockSource.getPlugin(source1MulitJoinInput, inputSchema1))).addStage(new ETLStage("source2", MockSource.getPlugin(source2MultiJoinInput, inputSchema2))).addStage(new ETLStage("source3", MockSource.getPlugin(source3MultiJoinInput, inputSchema3))).addStage(new ETLStage("t1", IdentityTransform.getPlugin())).addStage(new ETLStage("t2", IdentityTransform.getPlugin())).addStage(new ETLStage("t3", IdentityTransform.getPlugin())).addStage(new ETLStage("t4", IdentityTransform.getPlugin())).addStage(new ETLStage("innerjoin", MockJoiner.getPlugin("t1.customer_id=t2.cust_id", "t1,t2", ""))).addStage(new ETLStage(outerJoinName, MockJoiner.getPlugin("t4.item_id=t3.i_id", "", ""))).addStage(new ETLStage(sinkName, MockSink.getPlugin(outputName))).addConnection("source1", "t1").addConnection("source2", "t2").addConnection("source3", "t3").addConnection("t1", "innerjoin").addConnection("t2", "innerjoin").addConnection("innerjoin", "t4").addConnection("t3", outerJoinName).addConnection("t4", outerJoinName).addConnection(outerJoinName, sinkName).setEngine(engine).setNumOfRecordsPreview(100).build();
// Construct the preview config with the program name and program type
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, Collections.<String, String>emptyMap(), 10);
// Create the table for the mock source
addDatasetInstance(Table.class.getName(), source1MulitJoinInput, DatasetProperties.of(ImmutableMap.of("schema", inputSchema1.toString())));
addDatasetInstance(Table.class.getName(), source2MultiJoinInput, DatasetProperties.of(ImmutableMap.of("schema", inputSchema2.toString())));
addDatasetInstance(Table.class.getName(), source3MultiJoinInput, DatasetProperties.of(ImmutableMap.of("schema", inputSchema3.toString())));
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, etlConfig, previewConfig);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, appRequest);
ingestData(inputSchema1, inputSchema2, inputSchema3, source1MulitJoinInput, source2MultiJoinInput, source3MultiJoinInput);
// Wait for the preview status go into COMPLETED.
Tasks.waitFor(PreviewStatus.Status.COMPLETED, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
PreviewStatus status = previewManager.getStatus(previewId);
return status == null ? null : status.getStatus();
}
}, 5, TimeUnit.MINUTES);
checkPreviewStore(previewManager, previewId, sinkName, 3);
validateMetric(3L, previewId, sinkName + ".records.in", previewManager);
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class DefaultPreviewManager method getProgramIdFromRequest.
private ProgramId getProgramIdFromRequest(ApplicationId preview, AppRequest<?> request) throws BadRequestException {
PreviewConfig previewConfig = request.getPreview();
if (previewConfig == null) {
throw new BadRequestException("Preview config cannot be null");
}
String programName = previewConfig.getProgramName();
ProgramType programType = previewConfig.getProgramType();
if (programName == null || programType == null) {
throw new IllegalArgumentException("ProgramName or ProgramType cannot be null.");
}
return preview.program(programType, programName);
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class DefaultPreviewRequestQueueTest method testPreviewRequestQueue.
@Test
public void testPreviewRequestQueue() {
PreviewConfig previewConfig = new PreviewConfig("WordCount", ProgramType.WORKFLOW, null, null);
AppRequest<?> testRequest = new AppRequest<>(new ArtifactSummary("test", "1.0"), null, previewConfig);
byte[] pollerInfo = Bytes.toBytes("runner-1");
Optional<PreviewRequest> requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertFalse(requestOptional.isPresent());
ApplicationId app1 = new ApplicationId("default", RunIds.generate().getId());
PreviewRequest request = new PreviewRequest(app1, testRequest, null);
previewRequestQueue.add(request);
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertTrue(requestOptional.isPresent());
request = requestOptional.get();
ProgramId programId1 = new ProgramId(app1, ProgramType.WORKFLOW, "WordCount");
Assert.assertEquals(programId1, request.getProgram());
Principal principal = new Principal("userFoo", Principal.PrincipalType.USER, new Credential("userFooCredential", Credential.CredentialType.EXTERNAL));
PreviewRequest requestWithPrinciple = new PreviewRequest(app1, testRequest, principal);
previewRequestQueue.add(requestWithPrinciple);
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertTrue(requestOptional.isPresent());
request = requestOptional.get();
Assert.assertTrue(request.getPrincipal().equals(principal));
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertFalse(requestOptional.isPresent());
ApplicationId app2 = new ApplicationId("default", RunIds.generate().getId());
request = new PreviewRequest(app2, testRequest, null);
previewRequestQueue.add(request);
Assert.assertEquals(0, previewRequestQueue.positionOf(app2));
ApplicationId app3 = new ApplicationId("default", RunIds.generate().getId());
request = new PreviewRequest(app3, testRequest, null);
previewRequestQueue.add(request);
Assert.assertEquals(1, previewRequestQueue.positionOf(app3));
ApplicationId app4 = new ApplicationId("default", RunIds.generate().getId());
request = new PreviewRequest(app4, testRequest, null);
boolean exceptionThrown = false;
try {
previewRequestQueue.add(request);
} catch (IllegalStateException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertTrue(requestOptional.isPresent());
request = requestOptional.get();
ProgramId programId2 = new ProgramId(app2, ProgramType.WORKFLOW, "WordCount");
Assert.assertEquals(programId2, request.getProgram());
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertTrue(requestOptional.isPresent());
request = requestOptional.get();
ProgramId programId3 = new ProgramId(app3, ProgramType.WORKFLOW, "WordCount");
Assert.assertEquals(programId3, request.getProgram());
requestOptional = previewRequestQueue.poll(pollerInfo);
Assert.assertFalse(requestOptional.isPresent());
}
use of io.cdap.cdap.proto.artifact.preview.PreviewConfig in project cdap by caskdata.
the class PreviewServiceMainTest method testPreviewAppWithPlugin.
@Test
public void testPreviewAppWithPlugin() throws Exception {
// Build the app
LocationFactory locationFactory = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, PreviewTestAppWithPlugin.class);
String appArtifactName = PreviewTestAppWithPlugin.class.getSimpleName() + "_artifact";
String artifactVersion = "1.0.0-SNAPSHOT";
// Deploy the app
deployArtifact(appJar, appArtifactName, artifactVersion);
HttpResponse response;
// Build plugin artifact
Manifest manifest = new Manifest();
manifest.getMainAttributes().put(ManifestFields.EXPORT_PACKAGE, ConstantCallable.class.getPackage().getName());
Location pluginJar = PluginJarHelper.createPluginJar(locationFactory, manifest, ConstantCallable.class);
// Deploy plug artifact
String pluginArtifactName = ConstantCallable.class.getSimpleName() + "_artifact";
URL pluginArtifactUrl = getRouterBaseURI().resolve(String.format("/v3/namespaces/default/artifacts/%s", pluginArtifactName)).toURL();
response = HttpRequests.execute(HttpRequest.post(pluginArtifactUrl).withBody((ContentProvider<? extends InputStream>) pluginJar::getInputStream).addHeader("Artifact-Extends", String.format("%s[1.0.0-SNAPSHOT,10.0.0]", appArtifactName)).addHeader("Artifact-Version", artifactVersion).build(), getHttpRequestConfig());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
// Run a preview
String expectedOutput = "output_value";
ArtifactId appArtifactId = new ArtifactId(appArtifactName, new ArtifactVersion(artifactVersion), ArtifactScope.USER);
ArtifactSummary artifactSummary = ArtifactSummary.from(appArtifactId);
PreviewConfig previewConfig = new PreviewConfig(PreviewTestAppWithPlugin.TestWorkflow.NAME, ProgramType.WORKFLOW, Collections.emptyMap(), 2);
PreviewTestAppWithPlugin.Conf appConf = new PreviewTestAppWithPlugin.Conf(ConstantCallable.NAME, Collections.singletonMap("val", expectedOutput));
AppRequest appRequest = new AppRequest<>(artifactSummary, appConf, previewConfig);
ApplicationId previewId = runPreview(appRequest);
// Wait for preview to complete
waitForPreview(previewId);
// Verify the result of preview run
URL url = getRouterBaseURI().resolve(String.format("/v3/namespaces/default/previews/%s/tracers/%s", previewId.getApplication(), PreviewTestApp.TRACER_NAME)).toURL();
response = HttpRequests.execute(HttpRequest.get(url).build(), getHttpRequestConfig());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Map<String, List<String>> tracerData = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<Map<String, List<String>>>() {
}.getType());
Assert.assertEquals(Collections.singletonMap(PreviewTestAppWithPlugin.TRACER_KEY, Collections.singletonList(expectedOutput)), tracerData);
}
Aggregations