use of com.google.cloud.teleport.v2.transforms.ErrorConverters.FailedStringToTableRowFn in project DataflowTemplates by GoogleCloudPlatform.
the class ErrorConvertersTest method testFailedStringMessageToTableRowFn.
/**
* Tests that {@link ErrorConverters.FailedStringToTableRowFn} properly formats failed String
* objects into {@link TableRow} objects to save to BigQuery.
*/
@Test
public void testFailedStringMessageToTableRowFn() {
// Test input
final String message = "Super secret";
final String errorMessage = "Failed to parse input JSON";
final String stacktrace = "Error at com.google.cloud.teleport.TextToBigQueryStreaming";
final FailsafeElement<String, String> input = FailsafeElement.of(message, message).setErrorMessage(errorMessage).setStacktrace(stacktrace);
final Instant timestamp = new DateTime(2022, 2, 22, 22, 22, 22, 222, DateTimeZone.UTC).toInstant();
// Register the coder for the pipeline. This prevents having to invoke .setCoder() on
// many transforms.
FailsafeElementCoder<String, String> coder = FailsafeElementCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
CoderRegistry coderRegistry = pipeline.getCoderRegistry();
coderRegistry.registerCoderForType(coder.getEncodedTypeDescriptor(), coder);
// Build pipeline
PCollection<TableRow> output = pipeline.apply("CreateInput", Create.timestamped(TimestampedValue.of(input, timestamp)).withCoder(coder)).apply("FailedRecordToTableRow", ParDo.of(new FailedStringToTableRowFn()));
// Assert
PAssert.that(output).satisfies(collection -> {
final TableRow result = collection.iterator().next();
assertThat(result.get("timestamp")).isEqualTo("2022-02-22 22:22:22.222000");
assertThat(result.get("attributes")).isNull();
assertThat(result.get("payloadString")).isEqualTo(message);
assertThat(result.get("payloadBytes")).isNotNull();
assertThat(result.get("errorMessage")).isEqualTo(errorMessage);
assertThat(result.get("stacktrace")).isEqualTo(stacktrace);
return null;
});
// Execute pipeline
pipeline.run();
}
Aggregations