use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class ImportFromAvroTest method changeStreams.
@Test
public void changeStreams() throws Exception {
Map<String, Schema> avroFiles = new HashMap<>();
avroFiles.put("ChangeStreamAll.avro", SchemaBuilder.record("ChangeStreamAll").prop("spannerChangeStreamForClause", "FOR ALL").prop("spannerOption_0", "retention_period=\"7d\"").prop("spannerOption_1", "value_capture_type=\"OLD_AND_NEW_VALUES\"").fields().endRecord());
avroFiles.put("ChangeStreamEmpty.avro", SchemaBuilder.record("ChangeStreamEmpty").prop("spannerChangeStreamForClause", "").fields().endRecord());
avroFiles.put("ChangeStreamTable.avro", SchemaBuilder.record("ChangeStreamTable").prop("spannerChangeStreamForClause", "FOR T").fields().endRecord());
avroFiles.put("ChangeStreamColumns.avro", SchemaBuilder.record("ChangeStreamColumns").prop("spannerChangeStreamForClause", "FOR T(c1, c2)").fields().endRecord());
avroFiles.put("ChangeStreamKeyOnly.avro", SchemaBuilder.record("ChangeStreamKeyOnly").prop("spannerChangeStreamForClause", "FOR T()").fields().endRecord());
ExportProtos.Export.Builder exportProtoBuilder = ExportProtos.Export.newBuilder();
for (Entry<String, Schema> entry : avroFiles.entrySet()) {
String fileName = entry.getKey();
Schema schema = entry.getValue();
exportProtoBuilder.addChangeStreams(ExportProtos.Export.Table.newBuilder().setName(schema.getName()).addDataFiles(fileName).build());
// Create the Avro files to be imported.
File avroFile = tmpDir.newFile(fileName);
try (DataFileWriter<GenericRecord> fileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) {
fileWriter.create(schema, avroFile);
}
}
// Create the database manifest file.
ExportProtos.Export exportProto = exportProtoBuilder.build();
File manifestFile = tmpDir.newFile("spanner-export.json");
String manifestFileLocation = manifestFile.getParent();
Files.write(manifestFile.toPath(), JsonFormat.printer().print(exportProto).getBytes(StandardCharsets.UTF_8));
// Create the target database.
String spannerSchema = "CREATE TABLE `T` (" + "`id` INT64 NOT NULL," + "`c1` BOOL," + "`c2` INT64," + ") PRIMARY KEY (`id`)";
spannerServer.createDatabase(dbName, Collections.singleton(spannerSchema));
// Run the import pipeline.
importPipeline.apply("Import", new ImportTransform(spannerServer.getSpannerConfig(dbName), ValueProvider.StaticValueProvider.of(manifestFileLocation), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(30)));
PipelineResult importResult = importPipeline.run();
importResult.waitUntilFinish();
Ddl ddl;
try (ReadOnlyTransaction ctx = spannerServer.getDbClient(dbName).readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx).scan();
}
assertThat(ddl.prettyPrint(), equalToCompressingWhiteSpace("CREATE TABLE `T` (" + " `id` INT64 NOT NULL," + " `c1` BOOL," + " `c2` INT64," + " ) PRIMARY KEY (`id` ASC)" + " CREATE CHANGE STREAM `ChangeStreamAll`" + " FOR ALL" + " OPTIONS (retention_period=\"7d\", value_capture_type=\"OLD_AND_NEW_VALUES\")" + " CREATE CHANGE STREAM `ChangeStreamColumns`" + " FOR `T`(`c1`, `c2`)" + " CREATE CHANGE STREAM `ChangeStreamEmpty`" + " CREATE CHANGE STREAM `ChangeStreamKeyOnly`" + " FOR `T`()" + " CREATE CHANGE STREAM `ChangeStreamTable`" + " FOR `T`"));
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method numerics.
@Test
public void numerics() {
Ddl ddl = Ddl.builder().createTable("numerictable").column("id").int64().notNull().endColumn().column("numeric").type(Type.numeric()).endColumn().column("numeric_arr").type(Type.array(Type.numeric())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema);
String[] numericArrValues = { null, "-25398514232141142.012479", null, "1999999999.1246" };
Struct struct = Struct.newBuilder().set("id").to(1L).set("numeric").to("-9305028.140032").set("numeric_arr").toStringArray(Lists.newArrayList(numericArrValues)).build();
GenericRecord avroRecord = recordConverter.convert(struct);
List<ByteBuffer> expectedNumericArr = Stream.of(numericArrValues).map(x -> x == null ? null : ByteBuffer.wrap(NumericUtils.stringToBytes(x))).collect(Collectors.toList());
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("numeric"), equalTo(ByteBuffer.wrap(NumericUtils.stringToBytes("-9305028.140032"))));
assertThat(avroRecord.get("numeric_arr"), equalTo(expectedNumericArr));
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method pgArrays.
@Test
public void pgArrays() {
Ddl ddl = Ddl.builder(Dialect.POSTGRESQL).createTable("users").column("id").pgInt8().notNull().endColumn().column("ints").type(Type.pgArray(Type.pgInt8())).endColumn().column("varchars").type(Type.pgArray(Type.pgVarchar())).max().endColumn().column("texts").type(Type.pgArray(Type.pgText())).endColumn().column("ts").type(Type.pgArray(Type.pgTimestamptz())).endColumn().column("date").type(Type.pgArray(Type.pgDate())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema, Dialect.POSTGRESQL);
Struct struct = Struct.newBuilder().set("id").to(1L).set("ints").toInt64Array(Lists.newArrayList(1L, null, 2L)).set("varchars").toStringArray(Lists.newArrayList(null, null, "one")).set("texts").toStringArray(Lists.newArrayList(null, null, "two")).set("ts").toTimestampArray(Lists.newArrayList(null, null, Timestamp.ofTimeMicroseconds(10L))).set("date").toDateArray(Lists.newArrayList(null, null, Date.fromYearMonthDay(2018, 2, 2))).build();
GenericRecord avroRecord = recordConverter.convert(struct);
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("ints"), equalTo(Arrays.asList(1L, null, 2L)));
assertThat(avroRecord.get("varchars"), equalTo(Arrays.asList(null, null, "one")));
assertThat(avroRecord.get("texts"), equalTo(Arrays.asList(null, null, "two")));
assertThat(avroRecord.get("date"), equalTo(Arrays.asList(null, null, "2018-02-02")));
assertThat(avroRecord.get("ts"), equalTo(Arrays.asList(null, null, "1970-01-01T00:00:00.000010000Z")));
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method arrays.
@Test
public void arrays() {
Ddl ddl = Ddl.builder().createTable("users").column("id").int64().notNull().endColumn().column("ints").type(Type.array(Type.int64())).endColumn().column("strings").type(Type.array(Type.string())).max().endColumn().column("ts").type(Type.array(Type.timestamp())).endColumn().column("date").type(Type.array(Type.date())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema);
Struct struct = Struct.newBuilder().set("id").to(1L).set("ints").toInt64Array(Lists.newArrayList(1L, null, 2L)).set("strings").toStringArray(Lists.newArrayList(null, null, "one")).set("ts").toTimestampArray(Lists.newArrayList(null, null, Timestamp.ofTimeMicroseconds(10L))).set("date").toDateArray(Lists.newArrayList(null, null, Date.fromYearMonthDay(2018, 2, 2))).build();
GenericRecord avroRecord = recordConverter.convert(struct);
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("ints"), equalTo(Arrays.asList(1L, null, 2L)));
assertThat(avroRecord.get("strings"), equalTo(Arrays.asList(null, null, "one")));
assertThat(avroRecord.get("date"), equalTo(Arrays.asList(null, null, "2018-02-02")));
assertThat(avroRecord.get("ts"), equalTo(Arrays.asList(null, null, "1970-01-01T00:00:00.000010000Z")));
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerRecordConverterTest method pgNumerics.
@Test
public void pgNumerics() {
Ddl ddl = Ddl.builder(Dialect.POSTGRESQL).createTable("numerictable").column("id").pgInt8().notNull().endColumn().column("numeric1").type(Type.pgNumeric()).endColumn().column("numeric2").type(Type.pgNumeric()).endColumn().column("numeric3").type(Type.pgNumeric()).endColumn().column("numeric_arr").type(Type.pgArray(Type.pgNumeric())).endColumn().primaryKey().asc("id").end().endTable().build();
Schema schema = converter.convert(ddl).iterator().next();
SpannerRecordConverter recordConverter = new SpannerRecordConverter(schema, Dialect.POSTGRESQL);
StringBuilder maxPgNumeric = new StringBuilder();
StringBuilder minPgNumeric = new StringBuilder("-");
for (int i = 0; i < NumericUtils.PG_MAX_PRECISION - NumericUtils.PG_MAX_SCALE; i++) {
maxPgNumeric.append("9");
minPgNumeric.append("9");
}
maxPgNumeric.append(".");
minPgNumeric.append(".");
for (int i = 0; i < NumericUtils.PG_MAX_SCALE; i++) {
maxPgNumeric.append("9");
minPgNumeric.append("9");
}
String[] pgNumericArrValues = { null, "NaN", null, maxPgNumeric.toString(), minPgNumeric.toString() };
Struct struct = Struct.newBuilder().set("id").to(1L).set("numeric1").to("-9305028.140032").set("numeric2").to("-25398514232141142.012479").set("numeric3").to("1999999999.1246").set("numeric_arr").toStringArray(Lists.newArrayList(pgNumericArrValues)).build();
GenericRecord avroRecord = recordConverter.convert(struct);
List<ByteBuffer> expectedPgNumericArr = Stream.of(pgNumericArrValues).map(x -> x == null ? null : ByteBuffer.wrap(NumericUtils.pgStringToBytes(x))).collect(Collectors.toList());
assertThat(avroRecord.get("id"), equalTo(1L));
assertThat(avroRecord.get("numeric1"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("-9305028.140032"))));
assertThat(avroRecord.get("numeric2"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("-25398514232141142.012479"))));
assertThat(avroRecord.get("numeric3"), equalTo(ByteBuffer.wrap(NumericUtils.pgStringToBytes("1999999999.1246"))));
assertThat(avroRecord.get("numeric_arr"), equalTo(expectedPgNumericArr));
}
Aggregations