use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class WriteTables method processElement.
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
dynamicDestinations.setSideInputAccessorFromProcessContext(c);
DestinationT destination = c.element().getKey().getKey();
TableSchema tableSchema = BigQueryHelpers.fromJsonString(c.sideInput(schemasView).get(destination), TableSchema.class);
TableDestination tableDestination = dynamicDestinations.getTable(destination);
TableReference tableReference = tableDestination.getTableReference();
if (Strings.isNullOrEmpty(tableReference.getProjectId())) {
tableReference.setProjectId(c.getPipelineOptions().as(BigQueryOptions.class).getProject());
tableDestination = new TableDestination(tableReference, tableDestination.getTableDescription());
}
Integer partition = c.element().getKey().getShardNumber();
List<String> partitionFiles = Lists.newArrayList(c.element().getValue());
String jobIdPrefix = BigQueryHelpers.createJobId(c.sideInput(jobIdToken), tableDestination, partition);
if (!singlePartition) {
tableReference.setTableId(jobIdPrefix);
}
load(bqServices.getJobService(c.getPipelineOptions().as(BigQueryOptions.class)), bqServices.getDatasetService(c.getPipelineOptions().as(BigQueryOptions.class)), jobIdPrefix, tableReference, tableSchema, partitionFiles, writeDisposition, createDisposition, tableDestination.getTableDescription());
c.output(KV.of(tableDestination, BigQueryHelpers.toJsonString(tableReference)));
removeTemporaryFiles(partitionFiles);
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryTableSource method createReader.
@Override
public BoundedReader<TableRow> createReader(PipelineOptions options) throws IOException {
BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
checkState(jsonTable.isAccessible());
TableReference tableRef = BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class);
return new BigQueryReader(this, bqServices.getReaderFromTable(bqOptions, tableRef));
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryTableSource method getTableToExtract.
@Override
protected TableReference getTableToExtract(BigQueryOptions bqOptions) throws IOException {
checkState(jsonTable.isAccessible());
TableReference tableReference = BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class);
return setDefaultProjectIfAbsent(bqOptions, tableReference);
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class CreateTables method possibleCreateTable.
private void possibleCreateTable(BigQueryOptions options, TableDestination tableDestination, TableSchema tableSchema) throws InterruptedException, IOException {
String tableSpec = tableDestination.getTableSpec();
TableReference tableReference = tableDestination.getTableReference();
String tableDescription = tableDestination.getTableDescription();
if (createDisposition != createDisposition.CREATE_NEVER && !createdTables.contains(tableSpec)) {
synchronized (createdTables) {
// Another thread may have succeeded in creating the table in the meanwhile, so
// check again. This check isn't needed for correctness, but we add it to prevent
// every thread from attempting a create and overwhelming our BigQuery quota.
DatasetService datasetService = bqServices.getDatasetService(options);
if (!createdTables.contains(tableSpec)) {
if (datasetService.getTable(tableReference) == null) {
datasetService.createTable(new Table().setTableReference(tableReference).setSchema(tableSchema).setDescription(tableDescription));
}
createdTables.add(tableSpec);
}
}
}
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryServicesImplTest method testCreateTableRetry.
/**
* Tests that {@link BigQueryServicesImpl} retries quota rate limited attempts.
*/
@Test
public void testCreateTableRetry() throws IOException {
TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table");
Table testTable = new Table().setTableReference(ref);
// First response is 403 rate limited, second response has valid payload.
setupMockResponses(response -> {
when(response.getStatusCode()).thenReturn(403);
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getContent()).thenReturn(toStream(errorWithReasonAndStatus("rateLimitExceeded", 403)));
}, response -> {
when(response.getContentType()).thenReturn(Json.MEDIA_TYPE);
when(response.getStatusCode()).thenReturn(200);
when(response.getContent()).thenReturn(toStream(testTable));
});
BigQueryServicesImpl.DatasetServiceImpl services = new BigQueryServicesImpl.DatasetServiceImpl(bigquery, null, PipelineOptionsFactory.create());
Table ret = services.tryCreateTable(testTable, new RetryBoundedBackOff(BackOff.ZERO_BACKOFF, 3), Sleeper.DEFAULT);
assertEquals(testTable, ret);
verifyAllResponsesAreRead();
assertNotNull(ret.getTableReference());
expectedLogs.verifyInfo("Quota limit reached when creating table project:dataset.table, " + "retrying up to 5 minutes");
}
Aggregations