use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method testReadMetricsFail.
@Test
public void testReadMetricsFail() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound));
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"));
try {
pipeline.run();
} catch (PipelineExecutionException e) {
if (e.getCause() instanceof SpannerException && ((SpannerException) e.getCause()).getErrorCode().getGrpcStatusCode() == Code.DEADLINE_EXCEEDED) {
// expected
} else {
throw e;
}
}
verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 1);
verifyMetricWasSet("test", "aaa", "123", "ok", null, 0);
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method runQueryWithPriority.
@Test
public void runQueryWithPriority() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
Read read = SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withTimestampBound(timestampBound).withHighPriority();
PCollection<Struct> one = pipeline.apply("read q", read);
FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
assertEquals(RpcPriority.HIGH, read.getSpannerConfig().getRpcPriority().get());
pipeline.run();
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method readAllPipeline.
@Test
public void readAllPipeline() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
PCollectionView<Transaction> tx = pipeline.apply("tx", SpannerIO.createTransaction().withSpannerConfig(spannerConfig).withTimestampBound(timestampBound));
PCollection<ReadOperation> reads = pipeline.apply(Create.of(ReadOperation.create().withQuery("SELECT * FROM users"), ReadOperation.create().withTable("users").withColumns("id", "name")));
PCollection<Struct> one = reads.apply("read all", SpannerIO.readAll().withSpannerConfig(spannerConfig).withTransaction(tx));
BatchTransactionId txId = new FakeBatchTransactionId("tx");
when(mockBatchTx.getBatchTransactionId()).thenReturn(txId);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("partition"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method runReadWithPriority.
@Test
public void runReadWithPriority() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
Read read = SpannerIO.read().withSpannerConfig(spannerConfig).withTable("users").withColumns("id", "name").withTimestampBound(timestampBound).withLowPriority();
PCollection<Struct> one = pipeline.apply("read q", read);
FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionRead(any(PartitionOptions.class), eq("users"), eq(KeySet.all()), eq(Arrays.asList("id", "name")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
assertEquals(RpcPriority.LOW, read.getSpannerConfig().getRpcPriority().get());
pipeline.run();
}
use of com.google.cloud.spanner.FakeBatchTransactionId in project beam by apache.
the class SpannerIOReadTest method readPipeline.
@Test
public void readPipeline() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
SpannerConfig spannerConfig = getSpannerConfig();
PCollection<Struct> one = pipeline.apply("read q", SpannerIO.read().withSpannerConfig(spannerConfig).withQuery("SELECT * FROM users").withTimestampBound(timestampBound));
FakeBatchTransactionId txId = new FakeBatchTransactionId("readPipelineTest");
when(mockBatchTx.getBatchTransactionId()).thenReturn(txId);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound)).thenReturn(mockBatchTx);
when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class))).thenReturn(mockBatchTx);
Partition fakePartition = FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
when(mockBatchTx.partitionQuery(any(PartitionOptions.class), eq(Statement.of("SELECT * FROM users")), any(ReadQueryUpdateTransactionOption.class))).thenReturn(Arrays.asList(fakePartition, fakePartition));
when(mockBatchTx.execute(any(Partition.class))).thenReturn(ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)), ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
PAssert.that(one).containsInAnyOrder(FAKE_ROWS);
pipeline.run();
}
Aggregations