use of com.google.cloud.bigquery.storage.v1beta2.TableName in project gapic-generator-java by googleapis.
the class SyncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutation method syncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutation.
public static void syncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutation() throws Exception {
// It may require modifications to work in your environment.
try (BaseBigtableDataClient baseBigtableDataClient = BaseBigtableDataClient.create()) {
TableName tableName = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
ByteString rowKey = ByteString.EMPTY;
RowFilter predicateFilter = RowFilter.newBuilder().build();
List<Mutation> trueMutations = new ArrayList<>();
List<Mutation> falseMutations = new ArrayList<>();
CheckAndMutateRowResponse response = baseBigtableDataClient.checkAndMutateRow(tableName, rowKey, predicateFilter, trueMutations, falseMutations);
}
}
use of com.google.cloud.bigquery.storage.v1beta2.TableName in project gapic-generator-java by googleapis.
the class SyncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutationString method syncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutationString.
public static void syncCheckAndMutateRowTablenameBytestringRowfilterListmutationListmutationString() throws Exception {
// It may require modifications to work in your environment.
try (BaseBigtableDataClient baseBigtableDataClient = BaseBigtableDataClient.create()) {
TableName tableName = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
ByteString rowKey = ByteString.EMPTY;
RowFilter predicateFilter = RowFilter.newBuilder().build();
List<Mutation> trueMutations = new ArrayList<>();
List<Mutation> falseMutations = new ArrayList<>();
String appProfileId = "appProfileId704923523";
CheckAndMutateRowResponse response = baseBigtableDataClient.checkAndMutateRow(tableName, rowKey, predicateFilter, trueMutations, falseMutations, appProfileId);
}
}
use of com.google.cloud.bigquery.storage.v1beta2.TableName in project gapic-generator-java by googleapis.
the class SyncMutateRowTablenameBytestringListmutation method syncMutateRowTablenameBytestringListmutation.
public static void syncMutateRowTablenameBytestringListmutation() throws Exception {
// It may require modifications to work in your environment.
try (BaseBigtableDataClient baseBigtableDataClient = BaseBigtableDataClient.create()) {
TableName tableName = TableName.of("[PROJECT]", "[INSTANCE]", "[TABLE]");
ByteString rowKey = ByteString.EMPTY;
List<Mutation> mutations = new ArrayList<>();
MutateRowResponse response = baseBigtableDataClient.mutateRow(tableName, rowKey, mutations);
}
}
use of com.google.cloud.bigquery.storage.v1beta2.TableName in project java-bigquerystorage by googleapis.
the class WritePendingStream method writePendingStream.
public static void writePendingStream(String projectId, String datasetName, String tableName) throws DescriptorValidationException, InterruptedException, IOException {
try (BigQueryWriteClient client = BigQueryWriteClient.create()) {
// Initialize a write stream for the specified table.
// For more information on WriteStream.Type, see:
// https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1/WriteStream.Type.html
WriteStream stream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build();
TableName parentTable = TableName.of(projectId, datasetName, tableName);
CreateWriteStreamRequest createWriteStreamRequest = CreateWriteStreamRequest.newBuilder().setParent(parentTable.toString()).setWriteStream(stream).build();
WriteStream writeStream = client.createWriteStream(createWriteStreamRequest);
// https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()).build()) {
// Write two batches to the stream, each with 10 JSON records.
for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
JSONArray jsonArr = new JSONArray();
for (int j = 0; j < 10; j++) {
JSONObject record = new JSONObject();
record.put("col1", String.format("batch-record %03d-%03d", i, j));
jsonArr.put(record);
}
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
}
FinalizeWriteStreamResponse finalizeResponse = client.finalizeWriteStream(writeStream.getName());
System.out.println("Rows written: " + finalizeResponse.getRowCount());
}
// Commit the streams.
BatchCommitWriteStreamsRequest commitRequest = BatchCommitWriteStreamsRequest.newBuilder().setParent(parentTable.toString()).addWriteStreams(writeStream.getName()).build();
BatchCommitWriteStreamsResponse commitResponse = client.batchCommitWriteStreams(commitRequest);
// If the response does not have a commit time, it means the commit operation failed.
if (commitResponse.hasCommitTime() == false) {
for (StorageError err : commitResponse.getStreamErrorsList()) {
System.out.println(err.getErrorMessage());
}
throw new RuntimeException("Error committing the streams");
}
System.out.println("Appended and committed records successfully.");
} catch (ExecutionException e) {
// If the wrapped exception is a StatusRuntimeException, check the state of the operation.
// If the state is INTERNAL, CANCELLED, or ABORTED, you can retry. For more information, see:
// https://grpc.github.io/grpc-java/javadoc/io/grpc/StatusRuntimeException.html
System.out.println(e);
}
}
use of com.google.cloud.bigquery.storage.v1beta2.TableName in project java-bigquerystorage by googleapis.
the class AppendCompleteCallback method writeToDefaultStream.
// writeToDefaultStream: Writes records from the source file to the destination table.
public static void writeToDefaultStream(String projectId, String datasetName, String tableName, String dataFile) throws DescriptorValidationException, InterruptedException, IOException {
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
// Get the schema of the destination table and convert to the equivalent BigQueryStorage type.
Table table = bigquery.getTable(datasetName, tableName);
Schema schema = table.getDefinition().getSchema();
TableSchema tableSchema = BqToBqStorageSchemaConverter.convertTableSchema(schema);
// Use the JSON stream writer to send records in JSON format.
TableName parentTable = TableName.of(projectId, datasetName, tableName);
try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build()) {
// Read JSON data from the source file and send it to the Write API.
BufferedReader reader = new BufferedReader(new FileReader(dataFile));
String line = reader.readLine();
while (line != null) {
// As a best practice, send batches of records, instead of single records at a time.
JSONArray jsonArr = new JSONArray();
for (int i = 0; i < 100; i++) {
JSONObject record = new JSONObject(line);
jsonArr.put(record);
line = reader.readLine();
if (line == null) {
break;
}
}
// batch
ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
// The append method is asynchronous. Rather than waiting for the method to complete,
// which can hurt performance, register a completion callback and continue streaming.
ApiFutures.addCallback(future, new AppendCompleteCallback(), MoreExecutors.directExecutor());
}
}
}
Aggregations