use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method runQuery.
@Test
public void runQuery() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
PCollection<Struct> one = pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method testReadMetricsSucceed.
@Test
public void testReadMetricsSucceed() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
pipeline.run();
verifyMetricWasSet("test", "aaa", "123", "ok", null, 3);
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method runReadTest.
private void runReadTest(SpannerConfig spannerConfig) throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
PCollection<Struct> one = pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method testQueryMetricsFail.
@Test
public void testQueryMetricsFail() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withQueryName("queryName").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"));
try {
pipeline.run();
} catch (PipelineExecutionException e) {
if (e.getCause() instanceof SpannerException && ((SpannerException) e.getCause()).getErrorCode().getGrpcStatusCode() == Code.DEADLINE_EXCEEDED) {
// expected
} else {
throw e;
}
}
verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 1);
verifyMetricWasSet("test", "aaa", "123", "ok", null, 0);
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method testQueryMetricsSucceed.
@Test
public void testQueryMetricsSucceed() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withQueryName("queryName").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
pipeline.run();
verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 0);
verifyMetricWasSet("test", "aaa", "123", "ok", null, 2);
}
Aggregations