use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ConnectionStoreTest method testConflict.
@Test
public void testConflict() throws Exception {
// put a connection in the store
NamespaceSummary namespace = new NamespaceSummary("default", "", 0L);
ConnectionId connectionId = new ConnectionId(namespace, "my_conn");
Connection connection = new Connection("my_conn", "GCS", "GCS connection", false, false, 0L, 0L, new PluginInfo("GCS", "connector", "Google Cloud Platform", ImmutableMap.of("project", "abc"), new ArtifactSelectorConfig("SYSTEM", "google-cloud", "1.0.0")));
connectionStore.saveConnection(connectionId, connection, false);
// use a different name but evaluate to same id, with overwrite to false, it should fail to update
try {
connectionId = new ConnectionId(namespace, "my conn");
connection = new Connection("my conn", "GCS", "GCS connection", false, false, 0L, 0L, new PluginInfo("GCS", "connector", "Google Cloud Platform", ImmutableMap.of("project", "abc"), new ArtifactSelectorConfig("SYSTEM", "google-cloud", "1.0.0")));
connectionStore.saveConnection(connectionId, connection, false);
Assert.fail();
} catch (ConnectionConflictException e) {
// expected
}
// update the same name should also fail
try {
connectionId = new ConnectionId(namespace, "my_conn");
connection = new Connection("my conn", "GCS", "GCS connection", false, false, 0L, 0L, new PluginInfo("GCS", "connector", "Google Cloud Platform", ImmutableMap.of("project", "abc"), new ArtifactSelectorConfig("SYSTEM", "google-cloud", "1.0.0")));
connectionStore.saveConnection(connectionId, connection, false);
Assert.fail();
} catch (ConnectionConflictException e) {
// expected
}
// check pre configured connection cannot get updated
connectionId = new ConnectionId(namespace, "default conn");
connection = new Connection("default conn", "GCS", "GCS connection", true, false, 0L, 0L, new PluginInfo("GCS", "connector", "Google Cloud Platform", ImmutableMap.of("project", "abc"), new ArtifactSelectorConfig("SYSTEM", "google-cloud", "1.0.0")));
connectionStore.saveConnection(connectionId, connection, false);
try {
connection = new Connection("default conn", "BigQuery", "", false, false, 0L, 0L, new PluginInfo("BigQuery", "connector", "", Collections.emptyMap(), new ArtifactSelectorConfig()));
connectionStore.saveConnection(connectionId, connection, true);
Assert.fail();
} catch (ConnectionConflictException e) {
// expected
}
// and pre-configured cannot be deleted
try {
connectionStore.deleteConnection(connectionId);
Assert.fail();
} catch (ConnectionConflictException e) {
// expected
}
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class TrackedPluginSelector method select.
@Nullable
@Override
public Map.Entry<ArtifactId, PluginClass> select(SortedMap<ArtifactId, PluginClass> plugins) {
ArtifactId latestArtifact = plugins.tailMap(plugins.lastKey()).entrySet().iterator().next().getKey();
suggestion = new ArtifactSelectorConfig(latestArtifact.getScope().name(), latestArtifact.getName(), latestArtifact.getVersion().getVersion());
Map.Entry<ArtifactId, PluginClass> selected = delegate.select(plugins);
selectedArtifact = selected == null ? null : selected.getKey();
return selected;
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class SampleResponseCodecTest method testCodec.
@Test
public void testCodec() throws Exception {
// schema with all types
Schema schema = Schema.recordOf("schema", Schema.Field.of("f1", Schema.of(Schema.Type.INT)), Schema.Field.of("f2", Schema.of(Schema.Type.STRING)), Schema.Field.of("f3", Schema.of(Schema.Type.LONG)), Schema.Field.of("f4", Schema.of(Schema.Type.DOUBLE)), Schema.Field.of("f5", Schema.of(Schema.Type.BYTES)), Schema.Field.of("f6", Schema.of(Schema.Type.BOOLEAN)), Schema.Field.of("f7", Schema.of(Schema.Type.FLOAT)), Schema.Field.of("f8", Schema.of(Schema.LogicalType.DATE)), Schema.Field.of("f9", Schema.of(Schema.LogicalType.TIMESTAMP_MICROS)), Schema.Field.of("f10", Schema.of(Schema.LogicalType.TIMESTAMP_MILLIS)), Schema.Field.of("f11", Schema.of(Schema.LogicalType.TIME_MICROS)), Schema.Field.of("f12", Schema.of(Schema.LogicalType.TIME_MILLIS)), Schema.Field.of("f13", Schema.decimalOf(3, 2)), Schema.Field.of("f14", Schema.of(Schema.LogicalType.DATETIME)), Schema.Field.of("n1", Schema.nullableOf(Schema.of(Schema.Type.INT))), Schema.Field.of("n2", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("n3", Schema.nullableOf(Schema.of(Schema.Type.LONG))), Schema.Field.of("n4", Schema.nullableOf(Schema.of(Schema.Type.DOUBLE))), Schema.Field.of("n5", Schema.nullableOf(Schema.of(Schema.Type.BYTES))), Schema.Field.of("n6", Schema.nullableOf(Schema.of(Schema.Type.BOOLEAN))), Schema.Field.of("n7", Schema.nullableOf(Schema.of(Schema.Type.FLOAT))), Schema.Field.of("n8", Schema.nullableOf(Schema.of(Schema.LogicalType.DATE))), Schema.Field.of("n9", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS))), Schema.Field.of("n10", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MILLIS))), Schema.Field.of("n11", Schema.nullableOf(Schema.of(Schema.LogicalType.TIME_MICROS))), Schema.Field.of("n12", Schema.nullableOf(Schema.of(Schema.LogicalType.TIME_MILLIS))), Schema.Field.of("n13", Schema.nullableOf(Schema.decimalOf(3, 2))), Schema.Field.of("n14", Schema.nullableOf(Schema.of(Schema.LogicalType.DATETIME))));
// all nullable fields are null
StructuredRecord record1 = StructuredRecord.builder(schema).set("f1", 1).set("f2", "aaa").set("f3", 1L).set("f4", 0d).set("f5", ByteBuffer.wrap("test".getBytes(Charsets.UTF_8))).set("f6", true).set("f7", 0f).setDate("f8", LocalDate.now()).setTimestamp("f9", ZonedDateTime.now()).setTimestamp("f10", ZonedDateTime.now()).setTime("f11", LocalTime.now()).setTime("f12", LocalTime.now()).set("f13", ByteBuffer.wrap(new BigDecimal(new BigInteger("111"), 2).unscaledValue().toByteArray())).setDateTime("f14", LocalDateTime.now()).build();
// all fields are filled
StructuredRecord record2 = StructuredRecord.builder(schema).set("f1", 1).set("f2", "aaa").set("f3", 1L).set("f4", 0d).set("f5", ByteBuffer.wrap("test".getBytes(Charsets.UTF_8))).set("f6", true).set("f7", 0f).setDate("f8", LocalDate.now()).setTimestamp("f9", ZonedDateTime.now()).setTimestamp("f10", ZonedDateTime.now()).setTime("f11", LocalTime.now()).setTime("f12", LocalTime.now()).set("f13", ByteBuffer.wrap(new BigDecimal(new BigInteger("111"), 2).unscaledValue().toByteArray())).setDateTime("f14", LocalDateTime.now()).set("n1", 1).set("n2", "aaa").set("n3", 1L).set("n4", 0d).set("n5", ByteBuffer.wrap("test".getBytes(Charsets.UTF_8))).set("n6", true).set("n7", 0f).setDate("n8", LocalDate.now()).setTimestamp("n9", ZonedDateTime.now()).setTimestamp("n10", ZonedDateTime.now()).setTime("n11", LocalTime.now()).setTime("n12", LocalTime.now()).set("n13", ByteBuffer.wrap(new BigDecimal(new BigInteger("111"), 2).unscaledValue().toByteArray())).setDateTime("n14", LocalDateTime.now()).build();
List<StructuredRecord> sample = ImmutableList.of(record1, record2);
SampleResponse sampleResponse = new SampleResponse(new ConnectorDetail(ImmutableSet.of(new PluginDetail("file", "batchsource", ImmutableMap.of("k1", "v1", "k2", "v2"), new ArtifactSelectorConfig(), schema))), schema, sample);
String jsonString = GSON.toJson(sampleResponse);
SampleResponse deserialized = GSON.fromJson(jsonString, SampleResponse.class);
Assert.assertEquals(sampleResponse, deserialized);
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ETLBatchConfigTest method testUpgrade.
@Test
public void testUpgrade() throws Exception {
final ArtifactSelectorConfig artifact = new ArtifactSelectorConfig("SYSTEM", "universal", "1.0.0");
ETLStage source = new ETLStage("source", new Plugin("DataGenerator", ImmutableMap.of("p1", "v1"), artifact), null);
io.cdap.cdap.etl.proto.v2.ETLStage sourceNew = from(source, BatchSource.PLUGIN_TYPE);
ETLStage transform1 = new ETLStage("transform1", new Plugin("Script", ImmutableMap.of("script", "something"), null));
io.cdap.cdap.etl.proto.v2.ETLStage transform1New = from(transform1, Transform.PLUGIN_TYPE);
ETLStage transform2 = new ETLStage("transform2", new Plugin("Script", null, null));
io.cdap.cdap.etl.proto.v2.ETLStage transform2New = from(transform2, Transform.PLUGIN_TYPE);
ETLStage transform3 = new ETLStage("transform3", new Plugin("Validator", ImmutableMap.of("p1", "v1", "p2", "v2")), null);
io.cdap.cdap.etl.proto.v2.ETLStage transform3New = from(transform3, Transform.PLUGIN_TYPE);
ETLStage sink1 = new ETLStage("sink1", new Plugin("Table", ImmutableMap.of("rowkey", "xyz"), artifact), null);
io.cdap.cdap.etl.proto.v2.ETLStage sink1New = from(sink1, BatchSink.PLUGIN_TYPE);
ETLStage sink2 = new ETLStage("sink2", new Plugin("HDFS", ImmutableMap.of("name", "abc"), artifact), null);
io.cdap.cdap.etl.proto.v2.ETLStage sink2New = from(sink2, BatchSink.PLUGIN_TYPE);
Set<Connection> connections = new HashSet<>();
connections.add(new Connection(sourceNew.getName(), transform1New.getName()));
connections.add(new Connection(transform1New.getName(), transform2New.getName()));
connections.add(new Connection(transform2New.getName(), transform3New.getName()));
connections.add(new Connection(transform3New.getName(), sink1New.getName()));
connections.add(new Connection(transform3New.getName(), sink2New.getName()));
String schedule = "*/5 * * * *";
Resources resources = new Resources(1024, 1);
ETLBatchConfig config = ETLBatchConfig.builder(schedule).setSource(source).addSink(sink1).addSink(sink2).addTransform(transform1).addTransform(transform2).addTransform(transform3).addConnections(connections).setResources(resources).setDriverResources(resources).build();
io.cdap.cdap.etl.proto.v2.ETLBatchConfig configNew = io.cdap.cdap.etl.proto.v2.ETLBatchConfig.builder(schedule).addStage(sourceNew).addStage(sink1New).addStage(sink2New).addStage(transform1New).addStage(transform2New).addStage(transform3New).addConnections(connections).setResources(resources).setDriverResources(resources).build();
Assert.assertEquals(configNew, config.upgrade(new UpgradeContext() {
@Nullable
@Override
public ArtifactSelectorConfig getPluginArtifact(String pluginType, String pluginName) {
return null;
}
}));
}
use of io.cdap.cdap.etl.proto.ArtifactSelectorConfig in project cdap by caskdata.
the class ETLBatchConfigTest method testUpgrade.
@Test
public void testUpgrade() throws Exception {
final ArtifactSelectorConfig artifact = new ArtifactSelectorConfig("SYSTEM", "universal", "1.0.0");
ETLStage source = new ETLStage("DataGenerator", ImmutableMap.of("p1", "v1"), null);
io.cdap.cdap.etl.proto.v1.ETLStage sourceNew = new io.cdap.cdap.etl.proto.v1.ETLStage("DataGenerator.1", new Plugin(source.getName(), source.getProperties(), artifact), source.getErrorDatasetName());
ETLStage transform1 = new ETLStage("Script", ImmutableMap.of("script", "something"), null);
io.cdap.cdap.etl.proto.v1.ETLStage transform1New = new io.cdap.cdap.etl.proto.v1.ETLStage("Script.2", new Plugin(transform1.getName(), transform1.getProperties(), artifact), transform1.getErrorDatasetName());
ETLStage transform2 = new ETLStage("Script", null, null);
io.cdap.cdap.etl.proto.v1.ETLStage transform2New = new io.cdap.cdap.etl.proto.v1.ETLStage("Script.3", new Plugin(transform2.getName(), transform2.getProperties(), artifact), transform2.getErrorDatasetName());
ETLStage transform3 = new ETLStage("Validator", ImmutableMap.of("p1", "v1", "p2", "v2"), "errorDS");
io.cdap.cdap.etl.proto.v1.ETLStage transform3New = new io.cdap.cdap.etl.proto.v1.ETLStage("Validator.4", new Plugin(transform3.getName(), transform3.getProperties(), artifact), transform3.getErrorDatasetName());
ETLStage sink1 = new ETLStage("Table", ImmutableMap.of("rowkey", "xyz"), null);
io.cdap.cdap.etl.proto.v1.ETLStage sink1New = new io.cdap.cdap.etl.proto.v1.ETLStage("Table.5", new Plugin(sink1.getName(), sink1.getProperties(), artifact), sink1.getErrorDatasetName());
ETLStage sink2 = new ETLStage("HDFS", ImmutableMap.of("name", "abc"), null);
io.cdap.cdap.etl.proto.v1.ETLStage sink2New = new io.cdap.cdap.etl.proto.v1.ETLStage("HDFS.6", new Plugin(sink2.getName(), sink2.getProperties(), artifact), sink2.getErrorDatasetName());
ETLStage action = new ETLStage("Email", ImmutableMap.of("email", "slj@example.com"), null);
io.cdap.cdap.etl.proto.v1.ETLStage actionNew = new io.cdap.cdap.etl.proto.v1.ETLStage("Email.1", new Plugin(action.getName(), action.getProperties(), artifact), action.getErrorDatasetName());
List<Connection> connections = new ArrayList<>();
connections.add(new Connection(sourceNew.getName(), transform1New.getName()));
connections.add(new Connection(transform1New.getName(), transform2New.getName()));
connections.add(new Connection(transform2New.getName(), transform3New.getName()));
connections.add(new Connection(transform3New.getName(), sink1New.getName()));
connections.add(new Connection(transform3New.getName(), sink2New.getName()));
String schedule = "*/5 * * * *";
Resources resources = new Resources(1024, 1);
ETLBatchConfig config = new ETLBatchConfig(schedule, source, ImmutableList.of(sink1, sink2), ImmutableList.of(transform1, transform2, transform3), resources, ImmutableList.of(action));
io.cdap.cdap.etl.proto.v1.ETLBatchConfig configNew = io.cdap.cdap.etl.proto.v1.ETLBatchConfig.builder(schedule).setSource(sourceNew).addSink(sink1New).addSink(sink2New).addTransform(transform1New).addTransform(transform2New).addTransform(transform3New).addConnections(connections).setResources(resources).setDriverResources(resources).addAction(actionNew).build();
Assert.assertEquals(configNew, config.upgrade(new UpgradeContext() {
@Nullable
@Override
public ArtifactSelectorConfig getPluginArtifact(String pluginType, String pluginName) {
return new ArtifactSelectorConfig(ArtifactScope.SYSTEM.name(), "universal", "1.0.0");
}
}));
}
Aggregations