use of org.apache.beam.sdk.io.AvroGeneratedUser in project beam by apache.
the class SnowflakeIOReadTest method setup.
@BeforeClass
public static void setup() {
List<String> testData = Arrays.asList("Paul,51,red", "Jackson,41,green");
avroTestData = ImmutableList.of(new AvroGeneratedUser("Paul", 51, "red"), new AvroGeneratedUser("Jackson", 41, "green"));
FakeSnowflakeDatabase.createTableWithElements(FAKE_TABLE, testData);
options.setServerName("NULL.snowflakecomputing.com");
options.setStorageIntegrationName("STORAGE_INTEGRATION");
options.setStagingBucketName(BUCKET_NAME);
dataSourceConfiguration = SnowflakeIO.DataSourceConfiguration.create(new FakeSnowflakeBasicDataSource()).withServerName(options.getServerName());
snowflakeService = new FakeSnowflakeBatchServiceImpl();
}
use of org.apache.beam.sdk.io.AvroGeneratedUser in project beam by apache.
the class PubsubIOTest method testAvroSpecificRecord.
@Test
public void testAvroSpecificRecord() {
AvroCoder<AvroGeneratedUser> coder = AvroCoder.of(AvroGeneratedUser.class);
List<AvroGeneratedUser> inputs = ImmutableList.of(new AvroGeneratedUser("Bob", 256, null), new AvroGeneratedUser("Alice", 128, null), new AvroGeneratedUser("Ted", null, "white"));
setupTestClient(inputs, coder);
PCollection<AvroGeneratedUser> read = readPipeline.apply(PubsubIO.readAvrosWithBeamSchema(AvroGeneratedUser.class).fromSubscription(SUBSCRIPTION.getPath()).withClock(CLOCK).withClientFactory(clientFactory));
PAssert.that(read).containsInAnyOrder(inputs);
readPipeline.run();
}
use of org.apache.beam.sdk.io.AvroGeneratedUser in project beam by apache.
the class PubsubIOTest method testAvroGenericRecords.
@Test
public void testAvroGenericRecords() {
AvroCoder<GenericRecord> coder = AvroCoder.of(GenericRecord.class, SCHEMA);
List<GenericRecord> inputs = ImmutableList.of(new AvroGeneratedUser("Bob", 256, null), new AvroGeneratedUser("Alice", 128, null), new AvroGeneratedUser("Ted", null, "white"));
setupTestClient(inputs, coder);
PCollection<GenericRecord> read = readPipeline.apply(PubsubIO.readAvroGenericRecords(SCHEMA).fromSubscription(SUBSCRIPTION.getPath()).withClock(CLOCK).withClientFactory(clientFactory));
PAssert.that(read).containsInAnyOrder(inputs);
readPipeline.run();
}
use of org.apache.beam.sdk.io.AvroGeneratedUser in project beam by apache.
the class KafkaIOTest method testReadAvroSpecificRecordsWithConfluentSchemaRegistry.
@Test
public void testReadAvroSpecificRecordsWithConfluentSchemaRegistry() {
int numElements = 100;
String topic = "my_topic";
String schemaRegistryUrl = "mock://my-scope-name";
String valueSchemaSubject = topic + "-value";
List<KV<Integer, AvroGeneratedUser>> inputs = new ArrayList<>();
for (int i = 0; i < numElements; i++) {
inputs.add(KV.of(i, new AvroGeneratedUser("ValueName" + i, i, "color" + i)));
}
KafkaIO.Read<Integer, AvroGeneratedUser> reader = KafkaIO.<Integer, AvroGeneratedUser>read().withBootstrapServers("localhost:9092").withTopic(topic).withKeyDeserializer(IntegerDeserializer.class).withValueDeserializer(mockDeserializerProvider(schemaRegistryUrl, valueSchemaSubject, null)).withConsumerFactoryFn(new ConsumerFactoryFn(ImmutableList.of(topic), 1, numElements, OffsetResetStrategy.EARLIEST, i -> ByteBuffer.wrap(new byte[4]).putInt(i).array(), new ValueAvroSerializableFunction(topic, schemaRegistryUrl))).withMaxNumRecords(numElements);
PCollection<KV<Integer, AvroGeneratedUser>> input = p.apply(reader.withoutMetadata());
PAssert.that(input).containsInAnyOrder(inputs);
p.run();
}
use of org.apache.beam.sdk.io.AvroGeneratedUser in project beam by apache.
the class KafkaIOTest method testReadAvroGenericRecordsWithConfluentSchemaRegistry.
@Test
public void testReadAvroGenericRecordsWithConfluentSchemaRegistry() {
int numElements = 100;
String topic = "my_topic";
String schemaRegistryUrl = "mock://my-scope-name";
String keySchemaSubject = topic + "-key";
String valueSchemaSubject = topic + "-value";
List<KV<GenericRecord, GenericRecord>> inputs = new ArrayList<>();
for (int i = 0; i < numElements; i++) {
inputs.add(KV.of(new AvroGeneratedUser("KeyName" + i, i, "color" + i), new AvroGeneratedUser("ValueName" + i, i, "color" + i)));
}
KafkaIO.Read<GenericRecord, GenericRecord> reader = KafkaIO.<GenericRecord, GenericRecord>read().withBootstrapServers("localhost:9092").withTopic(topic).withKeyDeserializer(mockDeserializerProvider(schemaRegistryUrl, keySchemaSubject, null)).withValueDeserializer(mockDeserializerProvider(schemaRegistryUrl, valueSchemaSubject, null)).withConsumerFactoryFn(new ConsumerFactoryFn(ImmutableList.of(topic), 1, numElements, OffsetResetStrategy.EARLIEST, new KeyAvroSerializableFunction(topic, schemaRegistryUrl), new ValueAvroSerializableFunction(topic, schemaRegistryUrl))).withMaxNumRecords(numElements);
PCollection<KV<GenericRecord, GenericRecord>> input = p.apply(reader.withoutMetadata());
PAssert.that(input).containsInAnyOrder(inputs);
p.run();
}
Aggregations