use of org.apache.beam.sdk.io.common.TestRow.DeterministicallyConstructTestRowFn in project beam by apache.
the class SqsIOIT method testWriteThenRead.
@Test
public void testWriteThenRead() {
int rows = env.options().getNumberOfRows();
// Write test dataset to SQS.
pipelineWrite.apply("Generate Sequence", GenerateSequence.from(0).to(rows)).apply("Prepare TestRows", ParDo.of(new DeterministicallyConstructTestRowFn())).apply("Prepare SQS message", MapElements.into(requestType).via(sqsQueue::messageRequest)).apply("Write to SQS", SqsIO.write());
// Read test dataset from SQS.
PCollection<String> output = pipelineRead.apply("Read from SQS", SqsIO.read().withQueueUrl(sqsQueue.url).withMaxNumRecords(rows)).apply("Extract body", MapElements.into(strings()).via(SqsMessage::getBody));
PAssert.thatSingleton(output.apply("Count All", Count.globally())).isEqualTo((long) rows);
PAssert.that(output.apply(Combine.globally(new HashingFn()).withoutDefaults())).containsInAnyOrder(getExpectedHashForRowCount(rows));
pipelineWrite.run();
pipelineRead.run();
}
use of org.apache.beam.sdk.io.common.TestRow.DeterministicallyConstructTestRowFn in project beam by apache.
the class SnsIOIT method testWriteThenRead.
@Test
public void testWriteThenRead() {
ITOptions opts = env.options();
int rows = opts.getNumberOfRows();
// Write test dataset to SNS
pipelineWrite.apply("Generate Sequence", GenerateSequence.from(0).to(rows)).apply("Prepare TestRows", ParDo.of(new DeterministicallyConstructTestRowFn())).apply("SNS request", MapElements.into(publishRequests).via(resources::publishRequest)).apply("Write to SNS", SnsIO.write().withTopicName(resources.snsTopic).withResultOutputTag(new TupleTag<>()).withAWSClientsProvider(opts.getAwsCredentialsProvider().getCredentials().getAWSAccessKeyId(), opts.getAwsCredentialsProvider().getCredentials().getAWSSecretKey(), Regions.fromName(opts.getAwsRegion()), opts.getAwsServiceEndpoint()));
// Read test dataset from SQS.
PCollection<String> output = pipelineRead.apply("Read from SQS", SqsIO.read().withQueueUrl(resources.sqsQueue).withMaxNumRecords(rows)).apply("Extract message", MapElements.into(strings()).via(SnsIOIT::extractMessage));
PAssert.thatSingleton(output.apply("Count All", Count.globally())).isEqualTo((long) rows);
PAssert.that(output.apply(Combine.globally(new HashingFn()).withoutDefaults())).containsInAnyOrder(getExpectedHashForRowCount(rows));
pipelineWrite.run();
pipelineRead.run();
}
use of org.apache.beam.sdk.io.common.TestRow.DeterministicallyConstructTestRowFn in project beam by apache.
the class SqsIOIT method testWriteThenRead.
@Test
public void testWriteThenRead() {
int rows = env.options().getNumberOfRows();
// Write test dataset to SQS.
pipelineWrite.apply("Generate Sequence", GenerateSequence.from(0).to(rows)).apply("Prepare TestRows", ParDo.of(new DeterministicallyConstructTestRowFn())).apply("Prepare SQS message", MapElements.into(requestType).via(sqsQueue::messageRequest)).apply("Write to SQS", SqsIO.write());
// Read test dataset from SQS.
PCollection<String> output = pipelineRead.apply("Read from SQS", SqsIO.read().withQueueUrl(sqsQueue.url).withMaxNumRecords(rows)).apply("Extract body", MapElements.into(strings()).via(Message::getBody));
PAssert.thatSingleton(output.apply("Count All", Count.globally())).isEqualTo((long) rows);
PAssert.that(output.apply(Combine.globally(new HashingFn()).withoutDefaults())).containsInAnyOrder(getExpectedHashForRowCount(rows));
pipelineWrite.run();
pipelineRead.run();
}
Aggregations