use of org.apache.beam.sdk.io.aws2.kinesis.KinesisIO.Read in project beam by apache.
the class KinesisIOReadTest method testBuildWithCredentialsProvider.
@Test
public void testBuildWithCredentialsProvider() {
Region region = Region.US_EAST_1;
AwsCredentialsProvider credentialsProvider = DefaultCredentialsProvider.create();
Read read = KinesisIO.read().withAWSClientsProvider(credentialsProvider, region);
assertThat(read.getClientConfiguration()).isEqualTo(ClientConfiguration.create(credentialsProvider, region, null));
}
use of org.apache.beam.sdk.io.aws2.kinesis.KinesisIO.Read in project beam by apache.
the class KinesisIOReadTest method readFromShards.
private void readFromShards(Function<Read, Read> fn, Iterable<Record> expected) {
Read read = KinesisIO.read().withStreamName("stream").withInitialPositionInStream(TRIM_HORIZON).withArrivalTimeWatermarkPolicy().withMaxNumRecords(SHARDS * SHARD_EVENTS);
PCollection<Record> result = p.apply(fn.apply(read)).apply(ParDo.of(new ToRecord()));
PAssert.that(result).containsInAnyOrder(expected);
p.run();
}
use of org.apache.beam.sdk.io.aws2.kinesis.KinesisIO.Read in project beam by apache.
the class SqsIOReadTest method testBuildWithCredentialsProviderAndRegionAndEndpoint.
@Test
public void testBuildWithCredentialsProviderAndRegionAndEndpoint() {
Region region = Region.US_EAST_1;
AwsCredentialsProvider credentialsProvider = DefaultCredentialsProvider.create();
URI endpoint = URI.create("localhost:9999");
Read read = SqsIO.read().withSqsClientProvider(credentialsProvider, region.id(), endpoint);
assertThat(read.clientConfiguration()).isEqualTo(ClientConfiguration.create(credentialsProvider, region, endpoint));
}
use of org.apache.beam.sdk.io.aws2.kinesis.KinesisIO.Read in project beam by apache.
the class SnsIOIT method testWriteThenRead.
@Test
public void testWriteThenRead() {
ITOptions opts = env.options();
int rows = opts.getNumberOfRows();
// Write test dataset to SNS
pipelineWrite.apply("Generate Sequence", GenerateSequence.from(0).to(rows)).apply("Prepare TestRows", ParDo.of(new DeterministicallyConstructTestRowFn())).apply("Write to SNS", SnsIO.<TestRow>write().withTopicArn(resources.snsTopic).withPublishRequestBuilder(r -> PublishRequest.builder().message(r.name())));
// Read test dataset from SQS.
PCollection<String> output = pipelineRead.apply("Read from SQS", SqsIO.read().withQueueUrl(resources.sqsQueue).withMaxNumRecords(rows)).apply("Extract message", MapElements.into(strings()).via(SnsIOIT::extractMessage));
PAssert.thatSingleton(output.apply("Count All", Count.globally())).isEqualTo((long) rows);
PAssert.that(output.apply(Combine.globally(new HashingFn()).withoutDefaults())).containsInAnyOrder(getExpectedHashForRowCount(rows));
pipelineWrite.run();
pipelineRead.run();
}
use of org.apache.beam.sdk.io.aws2.kinesis.KinesisIO.Read in project beam by apache.
the class S3FileSystemTest method testWriteAndRead.
@Test
public void testWriteAndRead() throws IOException {
S3FileSystem s3FileSystem = buildMockedS3FileSystem(s3Config("mys3"), client);
client.createBucket(CreateBucketRequest.builder().bucket("testbucket").build());
byte[] writtenArray = new byte[] { 0 };
ByteBuffer bb = ByteBuffer.allocate(writtenArray.length);
bb.put(writtenArray);
// First create an object and write data to it
S3ResourceId path = S3ResourceId.fromUri("mys3://testbucket/foo/bar.txt");
WritableByteChannel writableByteChannel = s3FileSystem.create(path, CreateOptions.StandardCreateOptions.builder().setMimeType("application/text").build());
writableByteChannel.write(bb);
writableByteChannel.close();
// Now read the same object
ByteBuffer bb2 = ByteBuffer.allocate(writtenArray.length);
ReadableByteChannel open = s3FileSystem.open(path);
open.read(bb2);
// And compare the content with the one that was written
byte[] readArray = bb2.array();
assertArrayEquals(readArray, writtenArray);
open.close();
}
Aggregations