use of com.google.cloud.bigquery.TableId in project google-cloud-java by GoogleCloudPlatform.
the class CreateTableAndLoadData method main.
public static void main(String... args) throws InterruptedException, TimeoutException {
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
TableId tableId = TableId.of("dataset", "table");
Table table = bigquery.getTable(tableId);
if (table == null) {
System.out.println("Creating table " + tableId);
Field integerField = Field.of("fieldName", Field.Type.integer());
Schema schema = Schema.of(integerField);
table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema)));
}
System.out.println("Loading data into table " + tableId);
Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path");
loadJob = loadJob.waitFor();
if (loadJob.getStatus().getError() != null) {
System.out.println("Job completed with errors");
} else {
System.out.println("Job succeeded");
}
}
use of com.google.cloud.bigquery.TableId in project google-cloud-java by GoogleCloudPlatform.
the class BigQuerySnippets method getTableFromId.
/**
* Example of getting a table.
*/
// [TARGET getTable(TableId, TableOption...)]
// [VARIABLE "my_project_id"]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
public Table getTableFromId(String projectId, String datasetName, String tableName) {
// [START getTableFromId]
TableId tableId = TableId.of(projectId, datasetName, tableName);
Table table = bigquery.getTable(tableId);
// [END getTableFromId]
return table;
}
use of com.google.cloud.bigquery.TableId in project google-cloud-java by GoogleCloudPlatform.
the class BigQuerySnippets method insertAll.
/**
* Example of inserting rows into a table without running a load job.
*/
// [TARGET insertAll(InsertAllRequest)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
public InsertAllResponse insertAll(String datasetName, String tableName) {
// [START insertAll]
TableId tableId = TableId.of(datasetName, tableName);
// Values of the row to insert
Map<String, Object> rowContent = new HashMap<>();
rowContent.put("booleanField", true);
// Bytes are passed in base64
// 0xA, 0xD, 0xD, 0xE, 0xD in base64
rowContent.put("bytesField", "Cg0NDg0=");
// Records are passed as a map
Map<String, Object> recordsContent = new HashMap<>();
recordsContent.put("stringField", "Hello, World!");
rowContent.put("recordField", recordsContent);
InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent).build());
if (response.hasErrors()) {
// If any of the insertions failed, this lets you inspect the errors
for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
// inspect row error
}
}
// [END insertAll]
return response;
}
use of com.google.cloud.bigquery.TableId in project google-cloud-java by GoogleCloudPlatform.
the class BigQuerySnippets method writeToTable.
/**
* Example of creating a channel with which to write to a table.
*/
// [TARGET writer(WriteChannelConfiguration)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
// [VARIABLE "StringValue1\nStringValue2\n"]
public long writeToTable(String datasetName, String tableName, String csvData) throws IOException, InterruptedException, TimeoutException {
// [START writeToTable]
TableId tableId = TableId.of(datasetName, tableName);
WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
// Write data to writer
try {
writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
} finally {
writer.close();
}
// Get load job
Job job = writer.getJob();
job = job.waitFor();
LoadStatistics stats = job.getStatistics();
return stats.getOutputRows();
// [END writeToTable]
}
use of com.google.cloud.bigquery.TableId in project components by Talend.
the class BigQueryDatasetRuntime method getSchema.
/**
* Get the schema by table name or query. This method also needed for read and write, because we can not get schema
* from the ongoing data. BigQueryIO.Read return TableRow, which do not include schema in itself. So use BigQuery
* client to get it before read and write.
*
* @return
*/
@Override
public Schema getSchema() {
BigQuery bigquery = BigQueryConnection.createClient(properties.getDatastoreProperties());
com.google.cloud.bigquery.Schema bqRowSchema = null;
switch(properties.sourceType.getValue()) {
case TABLE_NAME:
{
TableId tableId = TableId.of(properties.getDatastoreProperties().projectName.getValue(), properties.bqDataset.getValue(), properties.tableName.getValue());
Table table = bigquery.getTable(tableId);
if (table == null) {
ComponentException.build(CommonErrorCodes.UNEXPECTED_EXCEPTION).setAndThrow("Table not found:" + tableId.toString());
}
bqRowSchema = table.getDefinition().getSchema();
break;
}
case QUERY:
{
QueryRequest queryRequest = QueryRequest.newBuilder(properties.query.getValue()).setUseLegacySql(properties.useLegacySql.getValue()).build();
QueryResponse queryResponse = bigquery.query(queryRequest);
bqRowSchema = queryResponse.getResult().getSchema();
break;
}
default:
throw new RuntimeException("To be implemented: " + properties.sourceType.getValue());
}
return BigQueryAvroRegistry.get().inferSchema(bqRowSchema);
}
Aggregations