use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ETLStage method upgradeStage.
// used by UpgradeTool to upgrade a 3.4.x stage to 3.5.x, which may include an update of the plugin artifact
@Deprecated
public ETLStage upgradeStage(UpgradeContext upgradeContext) {
ArtifactSelectorConfig artifactSelectorConfig = upgradeContext.getPluginArtifact(plugin.getType(), plugin.getName());
io.cdap.cdap.etl.proto.v2.ETLPlugin etlPlugin = new io.cdap.cdap.etl.proto.v2.ETLPlugin(plugin.getName(), plugin.getType(), plugin.getProperties(), artifactSelectorConfig, plugin.getLabel());
return new io.cdap.cdap.etl.proto.v2.ETLStage(name, etlPlugin);
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ETLStage method upgradePlugin.
/**
* Upgrade plugin used in the stage.
* 1. If plugin is using fixed version and a new plugin artifact is found with higher version in SYSTEM scope,
* use the new plugin.
* 2. If plugin is using a plugin range and a new plugin artifact is found with higher version in SYSTEM scope,
* move the upper bound of the range to include the new plugin artifact. Also change plugin scope.
* If new plugin is in range, do not change range. (Note: It would not change range even though new plugin is in
* different scope).
*
* @param updateContext To use helper functions like getPluginArtifacts.
* @return Updated plugin object to be used for the udated stage. Returned null if no changes to current plugin.
*/
private ETLPlugin upgradePlugin(ApplicationUpdateContext updateContext) throws Exception {
// Find the plugin with max version from available candidates.
Optional<ArtifactId> newPluginCandidate = updateContext.getPluginArtifacts(plugin.getType(), plugin.getName(), null).stream().max(Comparator.comparing(artifactId -> artifactId.getVersion()));
if (!newPluginCandidate.isPresent()) {
// TODO: Consider throwing exception here.
return plugin;
}
ArtifactId newPlugin = newPluginCandidate.get();
String newVersion = getUpgradedVersionString(newPlugin);
// If getUpgradedVersionString returns null, candidate plugin is not valid for upgrade.
if (newVersion == null) {
return plugin;
}
ArtifactSelectorConfig newArtifactSelectorConfig = new ArtifactSelectorConfig(newPlugin.getScope().name(), newPlugin.getName(), newVersion);
io.cdap.cdap.etl.proto.v2.ETLPlugin upgradedEtlPlugin = new io.cdap.cdap.etl.proto.v2.ETLPlugin(plugin.getName(), plugin.getType(), plugin.getProperties(), newArtifactSelectorConfig, plugin.getLabel());
return upgradedEtlPlugin;
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ETLStage method upgradeStage.
public io.cdap.cdap.etl.proto.v2.ETLStage upgradeStage(String type, UpgradeContext upgradeContext) {
ArtifactSelectorConfig artifactSelectorConfig = upgradeContext.getPluginArtifact(type, plugin.getName());
if (artifactSelectorConfig == null) {
artifactSelectorConfig = plugin.getArtifact();
}
io.cdap.cdap.etl.proto.v2.ETLPlugin etlPlugin = new io.cdap.cdap.etl.proto.v2.ETLPlugin(plugin.getName(), type, plugin.getProperties(), artifactSelectorConfig);
if (errorDatasetName != null) {
throw new IllegalStateException(String.format("Cannot upgrade stage '%s'. Error datasets have been replaced by error collectors. " + "Please connect stage '%s' to an error collector, then connect the error collector " + "to a sink.", name, name));
}
return new io.cdap.cdap.etl.proto.v2.ETLStage(name, etlPlugin);
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class DataPipelineConnectionTest method testConnectionSpec.
@Test
public void testConnectionSpec() throws Exception {
File directory = TEMP_FOLDER.newFolder();
String conn = "test_connection2";
ConnectionCreationRequest creationRequest = new ConnectionCreationRequest("", new PluginInfo(FileConnector.NAME, Connector.PLUGIN_TYPE, null, Collections.emptyMap(), // in set up we add "-mocks" as the suffix for the artifact id
new ArtifactSelectorConfig("system", APP_ARTIFACT_ID.getArtifact() + "-mocks", APP_ARTIFACT_ID.getVersion())));
addConnection(conn, creationRequest);
ConnectorDetail connectorDetail = getConnectionSpec(conn, directory.getCanonicalPath(), null, null);
Assert.assertTrue(connectorDetail.getRelatedPlugins().size() > 1);
connectorDetail = getConnectionSpec(conn, directory.getCanonicalPath(), "dummyPlugin", "batchsource");
Assert.assertEquals(connectorDetail.getRelatedPlugins().size(), 0);
connectorDetail = getConnectionSpec(conn, directory.getCanonicalPath(), "", "batchsource");
Assert.assertEquals(connectorDetail.getRelatedPlugins().size(), 1);
deleteConnection(conn);
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class DataPipelineConnectionTest method testConnectionsWithPluginMacros.
private void testConnectionsWithPluginMacros(Engine engine) throws Exception {
String sourceConnName = "sourceConnPluginMacros " + engine;
String transformConnName = "transformConnPluginMacros " + engine;
String sinkConnName = "sinkConnPluginMacros " + engine;
String srcTableName = "srcPluginMacros" + engine;
String sinkTableName = "sinkPluginMacros" + engine;
addConnection(sourceConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, Collections.singletonMap("tableName", "${srcTable}"), new ArtifactSelectorConfig())));
addConnection(transformConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, ImmutableMap.of("plugin1", "${plugin1}", "plugin1Type", "${plugin1Type}"), new ArtifactSelectorConfig())));
addConnection(sinkConnName, new ConnectionCreationRequest("", new PluginInfo("test", "dummy", null, Collections.singletonMap("tableName", "${sinkTable}"), new ArtifactSelectorConfig())));
// source -> pluginValidation transform -> sink
ETLBatchConfig config = ETLBatchConfig.builder().setEngine(engine).addStage(new ETLStage("source", MockSource.getPluginUsingConnection(sourceConnName))).addStage(new ETLStage("transform", PluginValidationTransform.getPluginUsingConnection(transformConnName, "${plugin2}", "${plugin2Type}"))).addStage(new ETLStage("sink", MockSink.getPluginUsingConnection(sinkConnName))).addConnection("source", "transform").addConnection("transform", "sink").build();
// runtime arguments
Map<String, String> runtimeArguments = ImmutableMap.<String, String>builder().put("srcTable", srcTableName).put("sinkTable", sinkTableName).put("plugin1", "Identity").put("plugin1Type", Transform.PLUGIN_TYPE).put("plugin2", "Double").put("plugin2Type", Transform.PLUGIN_TYPE).build();
Schema schema = Schema.recordOf("x", Schema.Field.of("name", Schema.of(Schema.Type.STRING)));
StructuredRecord samuel = StructuredRecord.builder(schema).set("name", "samuel").build();
StructuredRecord dwayne = StructuredRecord.builder(schema).set("name", "dwayne").build();
addDatasetInstance(NamespaceId.DEFAULT.dataset(srcTableName), Table.class.getName());
DataSetManager<Table> sourceTable = getDataset(srcTableName);
MockSource.writeInput(sourceTable, ImmutableList.of(samuel, dwayne));
// verify preview can run successfully using connections
PreviewManager previewManager = getPreviewManager();
PreviewConfig previewConfig = new PreviewConfig(SmartWorkflow.NAME, ProgramType.WORKFLOW, runtimeArguments, 10);
// Start the preview and get the corresponding PreviewRunner.
ApplicationId previewId = previewManager.start(NamespaceId.DEFAULT, new AppRequest<>(APP_ARTIFACT, config, previewConfig));
// Wait for the preview status go into COMPLETED.
Tasks.waitFor(PreviewStatus.Status.COMPLETED, new Callable<PreviewStatus.Status>() {
@Override
public PreviewStatus.Status call() throws Exception {
PreviewStatus status = previewManager.getStatus(previewId);
return status == null ? null : status.getStatus();
}
}, 5, TimeUnit.MINUTES);
AppRequest<ETLBatchConfig> appRequest = new AppRequest<>(APP_ARTIFACT, config);
ApplicationId appId = NamespaceId.DEFAULT.app("testConnectionsWithPluginMacros" + engine);
ApplicationManager appManager = deployApplication(appId, appRequest);
// start the actual pipeline run
WorkflowManager manager = appManager.getWorkflowManager(SmartWorkflow.NAME);
manager.startAndWaitForGoodRun(runtimeArguments, ProgramRunStatus.COMPLETED, 3, TimeUnit.MINUTES);
DataSetManager<Table> sinkTable = getDataset(sinkTableName);
List<StructuredRecord> outputRecords = MockSink.readOutput(sinkTable);
Assert.assertEquals(ImmutableSet.of(dwayne, samuel), new HashSet<>(outputRecords));
deleteConnection(sourceConnName);
deleteConnection(sinkConnName);
deleteConnection(transformConnName);
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(srcTableName));
deleteDatasetInstance(NamespaceId.DEFAULT.dataset(sinkTableName));
}
Aggregations