use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerTableFilterTest method basicInterleavedTableFilterSelection_selectsChosenAndParentTables.
@Test
public void basicInterleavedTableFilterSelection_selectsChosenAndParentTables() throws Exception {
Ddl ddl = Ddl.builder().createTable("Users").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("age").int64().endColumn().primaryKey().asc("first_name").desc("last_name").end().endTable().createTable("AllTYPES").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("id").int64().notNull().endColumn().column("bool_field").bool().endColumn().column("int64_field").int64().endColumn().column("float64_field").float64().endColumn().column("string_field").string().max().endColumn().column("bytes_field").bytes().max().endColumn().column("timestamp_field").timestamp().endColumn().column("date_field").date().endColumn().column("arr_bool_field").type(Type.array(Type.bool())).endColumn().column("arr_int64_field").type(Type.array(Type.int64())).endColumn().column("arr_float64_field").type(Type.array(Type.float64())).endColumn().column("arr_string_field").type(Type.array(Type.string())).max().endColumn().column("arr_bytes_field").type(Type.array(Type.bytes())).max().endColumn().column("arr_timestamp_field").type(Type.array(Type.timestamp())).endColumn().column("arr_date_field").type(Type.array(Type.date())).endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("Users").onDeleteCascade().endTable().build();
List<String> filteredTables = getFilteredTables(ddl, ImmutableList.of(allTypesTable)).stream().map(t -> t.name()).collect(Collectors.toList());
List<String> expectedFilteredTables = ImmutableList.of(allTypesTable, usersTable);
Collections.sort(filteredTables);
assertEquals(expectedFilteredTables, filteredTables);
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class SpannerTableFilterTest method filterWithPartialAncestorsSelection_selectsChosenAndParentTable.
@Test
public void filterWithPartialAncestorsSelection_selectsChosenAndParentTable() throws Exception {
Ddl ddl = Ddl.builder().createTable("table_c").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("age").int64().endColumn().primaryKey().asc("first_name").desc("last_name").end().endTable().createTable("table_b").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("id").int64().notNull().endColumn().column("bool_field").bool().endColumn().column("int64_field").int64().endColumn().column("float64_field").float64().endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("table_c").onDeleteCascade().endTable().createTable("table_a").column("first_name").string().max().endColumn().column("last_name").string().size(5).endColumn().column("string_field").string().max().endColumn().column("bytes_field").bytes().max().endColumn().column("timestamp_field").timestamp().endColumn().column("date_field").date().endColumn().column("arr_bool_field").type(Type.array(Type.bool())).endColumn().column("arr_int64_field").type(Type.array(Type.int64())).endColumn().column("arr_float64_field").type(Type.array(Type.float64())).endColumn().column("arr_string_field").type(Type.array(Type.string())).max().endColumn().column("arr_bytes_field").type(Type.array(Type.bytes())).max().endColumn().column("arr_timestamp_field").type(Type.array(Type.timestamp())).endColumn().column("arr_date_field").type(Type.array(Type.date())).endColumn().primaryKey().asc("first_name").desc("last_name").asc("id").end().interleaveInParent("table_b").onDeleteCascade().endTable().build();
List<String> filteredTables = getFilteredTables(ddl, ImmutableList.of(tableB)).stream().map(t -> t.name()).collect(Collectors.toList());
List<String> expectedFilteredTables = ImmutableList.of(tableB, tableC);
Collections.sort(filteredTables);
assertEquals(expectedFilteredTables, filteredTables);
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readImportManifestInvalidTable.
@Test
public void readImportManifestInvalidTable() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"NON_EXIST_TABLE\"," + "\"file_patterns\":[\"%s\"]}" + "]}", f11.toString());
writer.write(jsonString, 0, jsonString.length());
} catch (IOException e) {
e.printStackTrace();
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
try {
pipeline.run();
} catch (PipelineExecutionException e) {
assertThat(e.getMessage(), equalTo("java.lang.RuntimeException: Table NON_EXIST_TABLE not found in the database. " + "Table must be pre-created in database"));
}
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readImportManifest.
@Test
public void readImportManifest() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path f12 = Files.createTempFile("table1-file", "2");
Path f13 = Files.createTempFile("table1-file", "3");
Path f21 = Files.createTempFile("table2-file", "1");
Path f22 = Files.createTempFile("table2-file", "2");
String tempDir = f11.getParent().toString();
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"table1\"," + "\"file_patterns\":[\"%s\",\"%s\"]}," + "{\"table_name\": \"table2\"," + "\"file_patterns\":[\"%s\"]}" + "]}", f11.toString(), f12.toString(), f21.toString());
writer.write(jsonString, 0, jsonString.length());
} catch (IOException e) {
e.printStackTrace();
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
// Validates that only the file patterns specified in manifest will be returned.
// E.g., f13 and f22 are not in the tableAndFiles result.
PAssert.that(tableAndFiles).containsInAnyOrder(KV.of("table1", f11.toString()), KV.of("table1", f12.toString()), KV.of("table2", f21.toString()));
pipeline.run();
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readImportManifestInvalidManifestFormat.
@Test(expected = PipelineExecutionException.class)
public void readImportManifestInvalidManifestFormat() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path f12 = Files.createTempFile("table1-file", "2");
Path f13 = Files.createTempFile("table1-file", "3");
String tempDir = f11.getParent().toString();
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
// An invalid json string (missing the ending close "}").
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"table1\"," + "\"file_patterns\":[\"%s\",\"%s\"]}," + "{\"table_name\": \"table2\"," + "\"file_patterns\":[\"*\"]}" + "]", f11.toString(), f12.toString());
writer.write(jsonString, 0, jsonString.length());
} catch (IOException e) {
e.printStackTrace();
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
pipeline.run();
}
Aggregations