use of org.apache.beam.sdk.transforms.display.DisplayData in project beam by apache.
the class TextIOTest method testReadDisplayData.
@Test
public void testReadDisplayData() {
TextIO.Read read = TextIO.read().from("foo.*").withCompressionType(BZIP2);
DisplayData displayData = DisplayData.from(read);
assertThat(displayData, hasDisplayItem("filePattern", "foo.*"));
assertThat(displayData, hasDisplayItem("compressionType", BZIP2.toString()));
}
use of org.apache.beam.sdk.transforms.display.DisplayData in project beam by apache.
the class TextIOTest method testWriteDisplayData.
@Test
public void testWriteDisplayData() {
TextIO.Write write = TextIO.write().to("/foo").withSuffix("bar").withShardNameTemplate("-SS-of-NN-").withNumShards(100).withFooter("myFooter").withHeader("myHeader");
DisplayData displayData = DisplayData.from(write);
assertThat(displayData, hasDisplayItem("filePrefix", "/foo"));
assertThat(displayData, hasDisplayItem("fileSuffix", "bar"));
assertThat(displayData, hasDisplayItem("fileHeader", "myHeader"));
assertThat(displayData, hasDisplayItem("fileFooter", "myFooter"));
assertThat(displayData, hasDisplayItem("shardNameTemplate", "-SS-of-NN-"));
assertThat(displayData, hasDisplayItem("numShards", 100));
assertThat(displayData, hasDisplayItem("writableByteChannelFactory", "UNCOMPRESSED"));
}
use of org.apache.beam.sdk.transforms.display.DisplayData in project beam by apache.
the class HadoopInputFormatIOTest method testReadDisplayData.
/**
* This test validates functionality of
* {@link HadoopInputFormatIO.HadoopInputFormatBoundedSource#populateDisplayData()
* populateDisplayData()}.
*/
@Test
public void testReadDisplayData() {
HadoopInputFormatBoundedSource<Text, Employee> boundedSource = new HadoopInputFormatBoundedSource<Text, Employee>(serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), // No key translation required.
null, // No value translation required.
null, new SerializableSplit());
DisplayData displayData = DisplayData.from(boundedSource);
assertThat(displayData, hasDisplayItem("mapreduce.job.inputformat.class", serConf.get().get("mapreduce.job.inputformat.class")));
assertThat(displayData, hasDisplayItem("key.class", serConf.get().get("key.class")));
assertThat(displayData, hasDisplayItem("value.class", serConf.get().get("value.class")));
}
use of org.apache.beam.sdk.transforms.display.DisplayData in project beam by apache.
the class AvroIOTest method testWriteDisplayData.
@Test
public void testWriteDisplayData() {
AvroIO.Write<GenericClass> write = AvroIO.write(GenericClass.class).to("/foo").withShardNameTemplate("-SS-of-NN-").withSuffix("bar").withNumShards(100).withCodec(CodecFactory.snappyCodec());
DisplayData displayData = DisplayData.from(write);
assertThat(displayData, hasDisplayItem("filePrefix", "/foo"));
assertThat(displayData, hasDisplayItem("shardNameTemplate", "-SS-of-NN-"));
assertThat(displayData, hasDisplayItem("fileSuffix", "bar"));
assertThat(displayData, hasDisplayItem("schema", GenericClass.class));
assertThat(displayData, hasDisplayItem("numShards", 100));
assertThat(displayData, hasDisplayItem("codec", CodecFactory.snappyCodec().toString()));
}
use of org.apache.beam.sdk.transforms.display.DisplayData in project beam by apache.
the class AvroIOTest method testReadDisplayData.
// TODO: for Write only, test withSuffix,
// withShardNameTemplate and withoutSharding.
@Test
public void testReadDisplayData() {
AvroIO.Read<String> read = AvroIO.read(String.class).from("/foo.*");
DisplayData displayData = DisplayData.from(read);
assertThat(displayData, hasDisplayItem("filePattern", "/foo.*"));
}
Aggregations