use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readImportManifestPartialMatching.
@Test
public void readImportManifestPartialMatching() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path f12 = Files.createTempFile("table1-file", "2");
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("US-ASCII");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"table1\"," + "\"file_patterns\":[\"%s\",\"%s\"]}," + "{\"table_name\": \"table2\"," + "\"file_patterns\":[\"NOT_FOUND_FILE_PATTERN_\"]}" + "]}", f11.toString(), f12.toString());
writer.write(jsonString, 0, jsonString.length());
} catch (IOException e) {
e.printStackTrace();
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
PAssert.that(tableAndFiles).containsInAnyOrder(KV.of("table1", f11.toString()), KV.of("table1", f12.toString()));
try {
pipeline.run();
} catch (Exception e) {
e.printStackTrace();
}
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readPgImportManifestTypeMismatch.
@Test
public void readPgImportManifestTypeMismatch() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"table1\"," + "\"file_patterns\": [\"%s\"]," + "\"columns\": [{\"column_name\": \"int_col\", \"type_name\": \"text\"}]}" + "]}", f11.toString());
writer.write(jsonString, 0, jsonString.length());
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getPgTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
PipelineExecutionException thrown = assertThrows(PipelineExecutionException.class, () -> pipeline.run());
assertThat(thrown.getMessage(), equalTo("java.lang.RuntimeException: Mismatching type: Table table1 Column int_col [PG_INT8" + " from DB and text from manifest]"));
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readPgImportManifestTypeMustMatch.
@Test
public void readPgImportManifestTypeMustMatch() throws Exception {
Path f11 = Files.createTempFile("table1-file", "1");
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [{\"table_name\": \"table1\",\"file_patterns\": [\"%s\"],\"columns\":" + " [{\"column_name\": \"int_col\", \"type_name\": \"bigint\"}," + " {\"column_name\":\"str_10_col\", \"type_name\": \"character varying(10)\"}," + " {\"column_name\":\"float_col\", \"type_name\": \"double precision\"}," + " {\"column_name\":\"bool_col\", \"type_name\": \"boolean\"}," + " {\"column_name\": \"byte_col\", \"type_name\": \"bytea\"}," + " {\"column_name\": \"timestamp_col\"," + " \"type_name\":\"timestamp with time zone\"}," + " {\"column_name\": \"numeric_col\", \"type_name\": \"numeric\"}," + " {\"column_name\": \"date_col\", \"type_name\": \"date\"}]}]}", f11.toString());
writer.write(jsonString, 0, jsonString.length());
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getPgTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
pipeline.run();
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class TextImportTransformTest method readImportManifestGeneratedColumnCannotBeImported.
@Test
public void readImportManifestGeneratedColumnCannotBeImported() throws Exception {
Path f31 = Files.createTempFile("table3-file", "1");
Path manifestFile = Files.createTempFile("import-manifest", ".json");
Charset charset = Charset.forName("UTF-8");
try (BufferedWriter writer = Files.newBufferedWriter(manifestFile, charset)) {
String jsonString = String.format("{\"tables\": [" + "{\"table_name\": \"table3\"," + "\"file_patterns\": [\"%s\"]," + "\"columns\": [{\"column_name\": \"gen_col\", \"type_name\": \"INT64\"}]}" + "]}", f31.toString());
writer.write(jsonString, 0, jsonString.length());
} catch (IOException e) {
e.printStackTrace();
}
ValueProvider<String> importManifest = ValueProvider.StaticValueProvider.of(manifestFile.toString());
PCollectionView<Ddl> ddlView = pipeline.apply("ddl", Create.of(getTestDdl())).apply(View.asSingleton());
PCollection<KV<String, String>> tableAndFiles = pipeline.apply("Read manifest file", new ReadImportManifest(importManifest)).apply("Resolve data files", new ResolveDataFiles(importManifest, ddlView));
try {
pipeline.run();
} catch (PipelineExecutionException e) {
assertThat(e.getMessage(), equalTo("java.lang.RuntimeException: Column gen_col in manifest is a generated column " + "in DB table table3. Generated columns cannot be imported."));
}
}
use of com.google.cloud.teleport.spanner.ddl.Ddl in project DataflowTemplates by GoogleCloudPlatform.
the class AvroSchemaToDdlConverterTest method columnOptions.
@Test
public void columnOptions() {
String avroString = "{" + " \"type\" : \"record\"," + " \"name\" : \"Users\"," + " \"namespace\" : \"spannertest\"," + " \"fields\" : [ {" + " \"name\" : \"id\"," + " \"type\" : \"long\"," + " \"sqlType\" : \"INT64\"" + " }, {" + " \"name\" : \"first_name\"," + " \"type\" : [ \"null\", \"string\" ]," + " \"sqlType\" : \"STRING(10)\"," + " \"spannerOption_0\" : \"allow_commit_timestamp=TRUE\"," + " \"spannerOption_1\" : \"my_random_opt=\\\"1\\\"\"" + " }]," + " \"googleStorage\" : \"CloudSpanner\"," + " \"spannerParent\" : \"\"," + " \"googleFormatVersion\" : \"booleans\"," + " \"spannerPrimaryKey_0\" : \"`id` ASC\"" + "}";
Schema schema = new Schema.Parser().parse(avroString);
AvroSchemaToDdlConverter converter = new AvroSchemaToDdlConverter();
Ddl ddl = converter.toDdl(Collections.singleton(schema));
assertThat(ddl.allTables(), hasSize(1));
assertThat(ddl.prettyPrint(), equalToCompressingWhiteSpace("CREATE TABLE `Users` (" + " `id` INT64 NOT NULL," + " `first_name` STRING(10) " + " OPTIONS (allow_commit_timestamp=TRUE,my_random_opt=\"1\")," + " ) PRIMARY KEY (`id` ASC)"));
}
Aggregations