use of com.google.cloud.teleport.spanner.ddl.InformationSchemaScanner in project DataflowTemplates by GoogleCloudPlatform.
the class CopyDbTest method readDdl.
/* Returns the Ddl representing a Spanner database for given a String for the database name */
private Ddl readDdl(String db) {
DatabaseClient dbClient = spannerServer.getDbClient(db);
Ddl ddl;
try (ReadOnlyTransaction ctx = dbClient.readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx).scan();
}
return ddl;
}
use of com.google.cloud.teleport.spanner.ddl.InformationSchemaScanner in project DataflowTemplates by GoogleCloudPlatform.
the class ImportFromAvroTest method runTest.
private void runTest(Schema avroSchema, String spannerSchema, Iterable<GenericRecord> records) throws Exception {
// Create the Avro file to be imported.
String fileName = "avroFile.avro";
ExportProtos.Export exportProto = ExportProtos.Export.newBuilder().addTables(ExportProtos.Export.Table.newBuilder().setName("AvroTable").addDataFiles(fileName).build()).addDatabaseOptions(ExportProtos.Export.DatabaseOption.newBuilder().setOptionName("version_retention_period").setOptionValue("\"4d\"").build()).build();
JsonFormat.printer().print(exportProto);
File manifestFile = tmpDir.newFile("spanner-export.json");
String manifestFileLocation = manifestFile.getParent();
Files.write(manifestFile.toPath(), JsonFormat.printer().print(exportProto).getBytes(StandardCharsets.UTF_8));
File avroFile = tmpDir.newFile(fileName);
try (DataFileWriter<GenericRecord> fileWriter = new DataFileWriter<>(new GenericDatumWriter<>(avroSchema))) {
fileWriter.create(avroSchema, avroFile);
for (GenericRecord r : records) {
fileWriter.append(r);
}
fileWriter.flush();
}
// Create the target database.
spannerServer.createDatabase(dbName, Collections.singleton(spannerSchema));
// Run the import pipeline.
importPipeline.apply("Import", new ImportTransform(spannerServer.getSpannerConfig(dbName), ValueProvider.StaticValueProvider.of(manifestFileLocation), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(30)));
PipelineResult importResult = importPipeline.run();
importResult.waitUntilFinish();
Ddl ddl;
try (ReadOnlyTransaction ctx = spannerServer.getDbClient(dbName).readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx).scan();
}
assertThat(ddl.databaseOptions().size(), is(1));
ExportProtos.Export.DatabaseOption dbOption = ddl.databaseOptions().get(0);
assertThat(dbOption.getOptionName(), is("version_retention_period"));
assertThat(dbOption.getOptionValue(), is("4d"));
}
use of com.google.cloud.teleport.spanner.ddl.InformationSchemaScanner in project DataflowTemplates by GoogleCloudPlatform.
the class ImportFromAvroTest method changeStreams.
@Test
public void changeStreams() throws Exception {
Map<String, Schema> avroFiles = new HashMap<>();
avroFiles.put("ChangeStreamAll.avro", SchemaBuilder.record("ChangeStreamAll").prop("spannerChangeStreamForClause", "FOR ALL").prop("spannerOption_0", "retention_period=\"7d\"").prop("spannerOption_1", "value_capture_type=\"OLD_AND_NEW_VALUES\"").fields().endRecord());
avroFiles.put("ChangeStreamEmpty.avro", SchemaBuilder.record("ChangeStreamEmpty").prop("spannerChangeStreamForClause", "").fields().endRecord());
avroFiles.put("ChangeStreamTable.avro", SchemaBuilder.record("ChangeStreamTable").prop("spannerChangeStreamForClause", "FOR T").fields().endRecord());
avroFiles.put("ChangeStreamColumns.avro", SchemaBuilder.record("ChangeStreamColumns").prop("spannerChangeStreamForClause", "FOR T(c1, c2)").fields().endRecord());
avroFiles.put("ChangeStreamKeyOnly.avro", SchemaBuilder.record("ChangeStreamKeyOnly").prop("spannerChangeStreamForClause", "FOR T()").fields().endRecord());
ExportProtos.Export.Builder exportProtoBuilder = ExportProtos.Export.newBuilder();
for (Entry<String, Schema> entry : avroFiles.entrySet()) {
String fileName = entry.getKey();
Schema schema = entry.getValue();
exportProtoBuilder.addChangeStreams(ExportProtos.Export.Table.newBuilder().setName(schema.getName()).addDataFiles(fileName).build());
// Create the Avro files to be imported.
File avroFile = tmpDir.newFile(fileName);
try (DataFileWriter<GenericRecord> fileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) {
fileWriter.create(schema, avroFile);
}
}
// Create the database manifest file.
ExportProtos.Export exportProto = exportProtoBuilder.build();
File manifestFile = tmpDir.newFile("spanner-export.json");
String manifestFileLocation = manifestFile.getParent();
Files.write(manifestFile.toPath(), JsonFormat.printer().print(exportProto).getBytes(StandardCharsets.UTF_8));
// Create the target database.
String spannerSchema = "CREATE TABLE `T` (" + "`id` INT64 NOT NULL," + "`c1` BOOL," + "`c2` INT64," + ") PRIMARY KEY (`id`)";
spannerServer.createDatabase(dbName, Collections.singleton(spannerSchema));
// Run the import pipeline.
importPipeline.apply("Import", new ImportTransform(spannerServer.getSpannerConfig(dbName), ValueProvider.StaticValueProvider.of(manifestFileLocation), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(30)));
PipelineResult importResult = importPipeline.run();
importResult.waitUntilFinish();
Ddl ddl;
try (ReadOnlyTransaction ctx = spannerServer.getDbClient(dbName).readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx).scan();
}
assertThat(ddl.prettyPrint(), equalToCompressingWhiteSpace("CREATE TABLE `T` (" + " `id` INT64 NOT NULL," + " `c1` BOOL," + " `c2` INT64," + " ) PRIMARY KEY (`id` ASC)" + " CREATE CHANGE STREAM `ChangeStreamAll`" + " FOR ALL" + " OPTIONS (retention_period=\"7d\", value_capture_type=\"OLD_AND_NEW_VALUES\")" + " CREATE CHANGE STREAM `ChangeStreamColumns`" + " FOR `T`(`c1`, `c2`)" + " CREATE CHANGE STREAM `ChangeStreamEmpty`" + " CREATE CHANGE STREAM `ChangeStreamKeyOnly`" + " FOR `T`()" + " CREATE CHANGE STREAM `ChangeStreamTable`" + " FOR `T`"));
}
use of com.google.cloud.teleport.spanner.ddl.InformationSchemaScanner in project DataflowTemplates by GoogleCloudPlatform.
the class ExportRelatedTablesCheckTest method readDdl.
/* Returns the Ddl representing a Spanner database for given a String for the database name */
private Ddl readDdl(String db) {
DatabaseClient dbClient = spannerServer.getDbClient(db);
Ddl ddl;
try (ReadOnlyTransaction ctx = dbClient.readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx).scan();
}
return ddl;
}
use of com.google.cloud.teleport.spanner.ddl.InformationSchemaScanner in project DataflowTemplates by GoogleCloudPlatform.
the class ImportFromAvroTest method runTest.
private void runTest(Schema avroSchema, String spannerSchema, Iterable<GenericRecord> records, Dialect dialect) throws Exception {
// Create the Avro file to be imported.
String fileName = "avroFile.avro";
ExportProtos.Export exportProto = ExportProtos.Export.newBuilder().addTables(ExportProtos.Export.Table.newBuilder().setName("AvroTable").addDataFiles(fileName).build()).addDatabaseOptions(ExportProtos.Export.DatabaseOption.newBuilder().setOptionName("version_retention_period").setOptionValue(dialect == Dialect.GOOGLE_STANDARD_SQL ? "\"4d\"" : "'4d'").build()).setDialect(ProtoDialect.valueOf(dialect.name())).build();
JsonFormat.printer().print(exportProto);
File manifestFile = tmpDir.newFile("spanner-export.json");
String manifestFileLocation = manifestFile.getParent();
Files.write(manifestFile.toPath(), JsonFormat.printer().print(exportProto).getBytes(StandardCharsets.UTF_8));
File avroFile = tmpDir.newFile(fileName);
try (DataFileWriter<GenericRecord> fileWriter = new DataFileWriter<>(new GenericDatumWriter<>(avroSchema))) {
fileWriter.create(avroSchema, avroFile);
for (GenericRecord r : records) {
fileWriter.append(r);
}
fileWriter.flush();
}
// Create the target database.
switch(dialect) {
case GOOGLE_STANDARD_SQL:
spannerServer.createDatabase(dbName, Collections.singleton(spannerSchema));
break;
case POSTGRESQL:
spannerServer.createPgDatabase(dbName, Collections.singleton(spannerSchema));
break;
default:
throw new IllegalArgumentException("Unrecognized dialect: " + dialect);
}
// Run the import pipeline.
importPipeline.apply("Import", new ImportTransform(spannerServer.getSpannerConfig(dbName), ValueProvider.StaticValueProvider.of(manifestFileLocation), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(true), ValueProvider.StaticValueProvider.of(30)));
PipelineResult importResult = importPipeline.run();
importResult.waitUntilFinish();
Ddl ddl;
try (ReadOnlyTransaction ctx = spannerServer.getDbClient(dbName).readOnlyTransaction()) {
ddl = new InformationSchemaScanner(ctx, dialect).scan();
}
assertThat(ddl.databaseOptions().size(), is(1));
ExportProtos.Export.DatabaseOption dbOption = ddl.databaseOptions().get(0);
assertThat(dbOption.getOptionName(), is("version_retention_period"));
assertThat(dbOption.getOptionValue(), is("4d"));
}
Aggregations