use of com.google.cloud.spanner.Partition in project beam by apache.
the class SpannerIOReadTest method runReadWithPriority.
@Test
public void runReadWithPriority() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
Read read = SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound).withLowPriority();
PCollection<Struct> one = pipeline.apply("read q", read);
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
assertEquals(RpcPriority.LOW, read.getSpannerConfig().getRpcPriority().get());
pipeline.run();
}
use of com.google.cloud.spanner.Partition in project beam by apache.
the class SpannerIOReadTest method readPipeline.
@Test
public void readPipeline() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
PCollection<Struct> one = pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withTimestampBound(timestampBound));
FakeBatchTransactionId txId = new FakeBatchTransactionId("readPipelineTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(txId);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
use of com.google.cloud.spanner.Partition in project beam by apache.
the class SpannerIOReadTest method runQuery.
@Test
public void runQuery() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
PCollection<Struct> one = pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
use of com.google.cloud.spanner.Partition in project beam by apache.
the class SpannerIOReadTest method testReadMetricsSucceed.
@Test
public void testReadMetricsSucceed() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
pipeline.run();
verifyMetricWasSet("test", "aaa", "123", "ok", null, 3);
}
use of com.google.cloud.spanner.Partition in project google-cloud-java by GoogleCloudPlatform.
the class BatchClientSnippets method partitionRead.
void partitionRead() {
// [START partition_read]
final BatchReadOnlyTransaction txn = batchClient.batchReadOnlyTransaction(TimestampBound.strong());
List<Partition> partitions = txn.partitionRead(PartitionOptions.getDefaultInstance(), "Singers", KeySet.all(), Arrays.asList("SingerId", "FirstName", "LastName"));
for (final Partition p : partitions) {
try (ResultSet results = txn.execute(p)) {
while (results.next()) {
long singerId = results.getLong(0);
String firstName = results.getString(1);
String lastName = results.getString(2);
System.out.println("[" + singerId + "] " + firstName + " " + lastName);
}
}
}
// [END partition_read]
}
Aggregations