use of com.google.cloud.teleport.spanner.common.Type in project DataflowTemplates by GoogleCloudPlatform.
the class AvroSchemaToDdlConverter method toTable.
public Table toTable(String tableName, Schema schema) {
if (tableName == null) {
tableName = schema.getName();
}
LOG.debug("Converting to Ddl tableName {}", tableName);
Table.Builder table = Table.builder(dialect);
table.name(tableName);
for (Schema.Field f : schema.getFields()) {
Column.Builder column = table.column(f.name());
String sqlType = f.getProp("sqlType");
String expression = f.getProp("generationExpression");
if (expression != null) {
// This is a generated column.
if (Strings.isNullOrEmpty(sqlType)) {
throw new IllegalArgumentException("Property sqlType is missing for generated column " + f.name());
}
String notNull = f.getProp("notNull");
if (notNull == null) {
throw new IllegalArgumentException("Property notNull is missing for generated column " + f.name());
}
column.parseType(sqlType).notNull(Boolean.parseBoolean(notNull)).generatedAs(expression);
String stored = f.getProp("stored");
if (stored == null) {
throw new IllegalArgumentException("Property stored is missing for generated column " + f.name());
}
if (Boolean.parseBoolean(stored)) {
column.stored();
}
} else {
boolean nullable = false;
Schema avroType = f.schema();
if (avroType.getType() == Schema.Type.UNION) {
Schema unpacked = unpackNullable(avroType);
nullable = unpacked != null;
if (nullable) {
avroType = unpacked;
}
}
if (Strings.isNullOrEmpty(sqlType)) {
Type spannerType = inferType(avroType, true);
sqlType = toString(spannerType, true);
}
String defaultExpression = f.getProp("defaultExpression");
column.parseType(sqlType).notNull(!nullable).defaultExpression(defaultExpression);
}
ImmutableList.Builder<String> columnOptions = ImmutableList.builder();
for (int i = 0; ; i++) {
String spannerOption = f.getProp("spannerOption_" + i);
if (spannerOption == null) {
break;
}
columnOptions.add(spannerOption);
}
column.columnOptions(columnOptions.build());
column.endColumn();
}
for (int i = 0; ; i++) {
String spannerPrimaryKey = schema.getProp("spannerPrimaryKey_" + i);
if (spannerPrimaryKey == null) {
break;
}
if (spannerPrimaryKey.endsWith(" ASC")) {
String name = spannerPrimaryKey.substring(0, spannerPrimaryKey.length() - 4);
table.primaryKey().asc(unescape(name, dialect)).end();
} else if (spannerPrimaryKey.endsWith(" DESC")) {
String name = spannerPrimaryKey.substring(0, spannerPrimaryKey.length() - 5);
table.primaryKey().desc(unescape(name, dialect)).end();
} else {
throw new IllegalArgumentException("Cannot parse spannerPrimaryKey " + spannerPrimaryKey);
}
}
table.indexes(getNumberedPropsWithPrefix(schema, "spannerIndex_"));
table.foreignKeys(getNumberedPropsWithPrefix(schema, "spannerForeignKey_"));
table.checkConstraints(getNumberedPropsWithPrefix(schema, "spannerCheckConstraint_"));
// Table parent options.
String spannerParent = schema.getProp("spannerParent");
if (!Strings.isNullOrEmpty(spannerParent)) {
table.interleaveInParent(spannerParent);
// Process the on delete action.
String onDeleteAction = schema.getProp("spannerOnDeleteAction");
if (onDeleteAction == null) {
// Preserve behavior for old versions of exporter that did not provide the
// spannerOnDeleteAction property.
onDeleteAction = "no action";
}
if (onDeleteAction.equals("cascade")) {
table.onDeleteCascade();
} else if (!onDeleteAction.equals("no action")) {
// This is an unknown on delete action.
throw new IllegalArgumentException("Unsupported ON DELETE action " + onDeleteAction);
}
}
return table.build();
}
use of com.google.cloud.teleport.spanner.common.Type in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method numerics.
@Test
public void numerics() {
Ddl ddl = Ddl.builder().createTable("numerictable").column("id").int64().notNull().endColumn().column("numeric").type(Type.numeric()).endColumn().column("numeric_arr").type(Type.array(Type.numeric())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema);
String[] numericArrValues = { null, "-25398514232141142.012479", null, "1999999999.1246" };
Struct struct = Struct.newBuilder().set("id").to(1L).set("numeric").to("-9305028.140032").set("numeric_arr").toStringArray(Lists.newArrayList(numericArrValues)).build();
GenericRecord avroRecord = recordConverter.convert(struct);
List<ByteBuffer> expectedNumericArr = Stream.of(numericArrValues).map(x -> x == null ? null : ByteBuffer.wrap(NumericUtils.stringToBytes(x))).collect(Collectors.toList());
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("numeric"), equalTo(ByteBuffer.wrap(NumericUtils.stringToBytes("-9305028.140032"))));
assertThat(avroRecord.get("numeric_arr"), equalTo(expectedNumericArr));
}
use of com.google.cloud.teleport.spanner.common.Type in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method pgNumerics.
@Test
public void pgNumerics() {
Ddl ddl = Ddl.builder(Dialect.POSTGRESQL).createTable("numerictable").column("id").pgInt8().notNull().endColumn().column("numeric1").type(Type.pgNumeric()).endColumn().column("numeric2").type(Type.pgNumeric()).endColumn().column("numeric3").type(Type.pgNumeric()).endColumn().column("numeric_arr").type(Type.pgArray(Type.pgNumeric())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema, Dialect.POSTGRESQL);
StringBuilder maxPgNumeric = new StringBuilder();
StringBuilder minPgNumeric = new StringBuilder("-");
for (int i = 0; i < NumericUtils.PG_MAX_PRECISION - NumericUtils.PG_MAX_SCALE; i++) {
maxPgNumeric.append("9");
minPgNumeric.append("9");
}
maxPgNumeric.append(".");
minPgNumeric.append(".");
for (int i = 0; i < NumericUtils.PG_MAX_SCALE; i++) {
maxPgNumeric.append("9");
minPgNumeric.append("9");
}
String[] pgNumericArrValues = { null, "NaN", null, maxPgNumeric.toString(), minPgNumeric.toString() };
Struct struct = Struct.newBuilder().set("id").to(1L).set("numeric1").to("-9305028.140032").set("numeric2").to("-25398514232141142.012479").set("numeric3").to("1999999999.1246").set("numeric_arr").toStringArray(Lists.newArrayList(pgNumericArrValues)).build();
GenericRecord avroRecord = recordConverter.convert(struct);
List<ByteBuffer> expectedPgNumericArr = Stream.of(pgNumericArrValues).map(x -> x == null ? null : ByteBuffer.wrap(NumericUtils.pgStringToBytes(x))).collect(Collectors.toList());
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("numeric1"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("-9305028.140032"))));
assertThat(avroRecord.get("numeric2"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("-25398514232141142.012479"))));
assertThat(avroRecord.get("numeric3"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("1999999999.1246"))));
assertThat(avroRecord.get("numeric_arr"), equalTo(expectedPgNumericArr));
}
use of com.google.cloud.teleport.spanner.common.Type in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerTableFilterTest method individualTableSelection_selectsOnlyChosenTable.
@Test
public void individualTableSelection_selectsOnlyChosenTable() throws Exception {
Ddl ddl = Ddl.builder().createTable("Users").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("age").int64().endColumn().primaryKey().asc("first_name").desc("last_name").end().endTable().createTable("AllTYPES").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("id").int64().notNull().endColumn().column("bool_field").bool().endColumn().column("int64_field").int64().endColumn().column("float64_field").float64().endColumn().column("string_field").string().max().endColumn().column("bytes_field").bytes().max().endColumn().column("timestamp_field").timestamp().endColumn().column("date_field").date().endColumn().column("arr_bool_field").type(Type.array(Type.bool())).endColumn().column("arr_int64_field").type(Type.array(Type.int64())).endColumn().column("arr_float64_field").type(Type.array(Type.float64())).endColumn().column("arr_string_field").type(Type.array(Type.string())).max().endColumn().column("arr_bytes_field").type(Type.array(Type.bytes())).max().endColumn().column("arr_timestamp_field").type(Type.array(Type.timestamp())).endColumn().column("arr_date_field").type(Type.array(Type.date())).endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("Users").onDeleteCascade().endTable().build();
List<String> filteredTables = getFilteredTables(ddl, ImmutableList.of(usersTable)).stream().map(t -> t.name()).collect(Collectors.toList());
List<String> expectedFilteredTables = ImmutableList.of(usersTable);
assertEquals(expectedFilteredTables, filteredTables);
}
use of com.google.cloud.teleport.spanner.common.Type in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerTableFilterTest method filterWithAllAncestorsSelection_selectsChosenTableWithAllParents.
@Test
public void filterWithAllAncestorsSelection_selectsChosenTableWithAllParents() throws Exception {
Ddl ddl = Ddl.builder().createTable("table_c").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("age").int64().endColumn().primaryKey().asc("first_name").desc("last_name").end().endTable().createTable("table_b").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("id").int64().notNull().endColumn().column("bool_field").bool().endColumn().column("int64_field").int64().endColumn().column("float64_field").float64().endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("table_c").onDeleteCascade().endTable().createTable("table_a").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("string_field").string().max().endColumn().column("bytes_field").bytes().max().endColumn().column("timestamp_field").timestamp().endColumn().column("date_field").date().endColumn().column("arr_bool_field").type(Type.array(Type.bool())).endColumn().column("arr_int64_field").type(Type.array(Type.int64())).endColumn().column("arr_float64_field").type(Type.array(Type.float64())).endColumn().column("arr_string_field").type(Type.array(Type.string())).max().endColumn().column("arr_bytes_field").type(Type.array(Type.bytes())).max().endColumn().column("arr_timestamp_field").type(Type.array(Type.timestamp())).endColumn().column("arr_date_field").type(Type.array(Type.date())).endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("table_b").onDeleteCascade().endTable().build();
List<String> filteredTables = getFilteredTables(ddl, ImmutableList.of(tableA)).stream().map(t -> t.name()).collect(Collectors.toList());
List<String> expectedFilteredTables = ImmutableList.of(tableA, tableB, tableC);
Collections.sort(filteredTables);
assertEquals(expectedFilteredTables, filteredTables);
}
Aggregations