use of com.google.cloud.bigquery.storage.v1.TableName in project java-bigtable by googleapis.
the class BaseBigtableTableAdminClientTest method deleteTableTest.
@Test
public void deleteTableTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockBigtableTableAdmin.addResponse(expectedResponse);
TableName name = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
client.deleteTable(name);
List<AbstractMessage> actualRequests = mockBigtableTableAdmin.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteTableRequest actualRequest = ((DeleteTableRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
use of com.google.cloud.bigquery.storage.v1.TableName in project java-bigtable by googleapis.
the class BaseBigtableTableAdminClientTest method generateConsistencyTokenTest.
@Test
public void generateConsistencyTokenTest() throws Exception {
GenerateConsistencyTokenResponse expectedResponse = GenerateConsistencyTokenResponse.newBuilder().setConsistencyToken("consistencyToken-1985152319").build();
mockBigtableTableAdmin.addResponse(expectedResponse);
TableName name = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
GenerateConsistencyTokenResponse actualResponse = client.generateConsistencyToken(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockBigtableTableAdmin.getRequests();
Assert.assertEquals(1, actualRequests.size());
GenerateConsistencyTokenRequest actualRequest = ((GenerateConsistencyTokenRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
use of com.google.cloud.bigquery.storage.v1.TableName in project java-bigtable by googleapis.
the class BaseBigtableTableAdminClientTest method snapshotTableExceptionTest2.
@Test
public void snapshotTableExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockBigtableTableAdmin.addException(exception);
try {
TableName name = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
String cluster = "cluster872092154";
String snapshotId = "snapshotId-1113817601";
String description = "description-1724546052";
client.snapshotTableAsync(name, cluster, snapshotId, description).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
use of com.google.cloud.bigquery.storage.v1.TableName in project java-bigtable by googleapis.
the class BaseBigtableTableAdminClientTest method deleteTableExceptionTest.
@Test
public void deleteTableExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockBigtableTableAdmin.addException(exception);
try {
TableName name = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
client.deleteTable(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
use of com.google.cloud.bigquery.storage.v1.TableName in project dataproc-templates by GoogleCloudPlatform.
the class PubSubToBQ method writeToBQ.
public static void writeToBQ(JavaDStream<SparkPubsubMessage> pubSubStream, String outputProjectID, String pubSubBQOutputDataset, String PubSubBQOutputTable, Integer batchSize) {
pubSubStream.foreachRDD(new VoidFunction<JavaRDD<SparkPubsubMessage>>() {
@Override
public void call(JavaRDD<SparkPubsubMessage> sparkPubsubMessageJavaRDD) throws Exception {
sparkPubsubMessageJavaRDD.foreachPartition(new VoidFunction<Iterator<SparkPubsubMessage>>() {
@Override
public void call(Iterator<SparkPubsubMessage> sparkPubsubMessageIterator) throws Exception {
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
Table table = bigquery.getTable(pubSubBQOutputDataset, PubSubBQOutputTable);
TableName parentTable = TableName.of(outputProjectID, pubSubBQOutputDataset, PubSubBQOutputTable);
Schema schema = table.getDefinition().getSchema();
JsonStreamWriter writer = JsonStreamWriter.newBuilder(parentTable.toString(), schema).build();
JSONArray jsonArr = new JSONArray();
while (sparkPubsubMessageIterator.hasNext()) {
SparkPubsubMessage message = sparkPubsubMessageIterator.next();
JSONObject record = new JSONObject(new String(message.getData()));
jsonArr.put(record);
if (jsonArr.length() == batchSize) {
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
jsonArr = new JSONArray();
}
}
if (jsonArr.length() > 0) {
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
}
writer.close();
}
});
}
});
}
Aggregations