use of com.google.api.services.bigquery.model.JobConfigurationQuery in project beam by apache.
the class BigQueryTableRowIteratorTest method testQueryFailed.
/**
* Verifies that when the query fails, the user gets a useful exception and the temporary dataset
* is cleaned up. Also verifies that the temporary table (which is never created) is not
* erroneously attempted to be deleted.
*/
@Test
public void testQueryFailed() throws IOException {
// Job state polled with an error.
String errorReason = "bad query";
Exception exception = new IOException(errorReason);
when(mockJobsInsert.execute()).thenThrow(exception, exception, exception, exception);
JobConfigurationQuery queryConfig = new JobConfigurationQuery().setQuery("NOT A QUERY");
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromQuery(queryConfig, "project", mockClient)) {
iterator.open();
fail();
} catch (Exception expected) {
// Verify message explains cause and reports the query.
assertThat(expected.getMessage(), containsString("Error"));
assertThat(expected.getMessage(), containsString("NOT A QUERY"));
assertThat(expected.getCause().getMessage(), containsString(errorReason));
}
// Job inserted to run the query, then polled once.
verify(mockClient, times(1)).jobs();
verify(mockJobs).insert(anyString(), any(Job.class));
verify(mockJobsInsert, times(4)).execute();
}
use of com.google.api.services.bigquery.model.JobConfigurationQuery in project beam by apache.
the class BigQueryTableRowIteratorTest method testReadFromQuery.
/**
* Verifies that when the query runs, the correct data is returned and the temporary dataset and
* table are both cleaned up.
*/
@Test
public void testReadFromQuery() throws IOException, InterruptedException {
// Mock job inserting.
Job dryRunJob = new Job().setStatistics(new JobStatistics().setQuery(new JobStatistics2().setReferencedTables(ImmutableList.of(new TableReference()))));
Job insertedJob = new Job().setJobReference(new JobReference());
when(mockJobsInsert.execute()).thenReturn(dryRunJob, insertedJob);
// Mock job polling.
JobStatus status = new JobStatus().setState("DONE");
JobConfigurationQuery resultQueryConfig = new JobConfigurationQuery().setDestinationTable(new TableReference().setProjectId("project").setDatasetId("tempdataset").setTableId("temptable"));
Job getJob = new Job().setJobReference(new JobReference()).setStatus(status).setConfiguration(new JobConfiguration().setQuery(resultQueryConfig));
when(mockJobsGet.execute()).thenReturn(getJob);
// Mock table schema fetch.
when(mockTablesGet.execute()).thenReturn(tableWithLocation(), tableWithBasicSchema());
byte[] photoBytes = "photograph".getBytes();
String photoBytesEncoded = BaseEncoding.base64().encode(photoBytes);
// Mock table data fetch.
when(mockTabledataList.execute()).thenReturn(rawDataList(rawRow("Arthur", 42, photoBytesEncoded, "2000-01-01", "2000-01-01 00:00:00.000005", "00:00:00.000005")));
// Run query and verify
String query = "SELECT name, count, photo, anniversary_date, " + "anniversary_datetime, anniversary_time from table";
JobConfigurationQuery queryConfig = new JobConfigurationQuery().setQuery(query);
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromQuery(queryConfig, "project", mockClient)) {
iterator.open();
assertTrue(iterator.advance());
TableRow row = iterator.getCurrent();
assertTrue(row.containsKey("name"));
assertTrue(row.containsKey("answer"));
assertTrue(row.containsKey("photo"));
assertTrue(row.containsKey("anniversary_date"));
assertTrue(row.containsKey("anniversary_datetime"));
assertTrue(row.containsKey("anniversary_time"));
assertEquals("Arthur", row.get("name"));
assertEquals(42, row.get("answer"));
assertEquals(photoBytesEncoded, row.get("photo"));
assertEquals("2000-01-01", row.get("anniversary_date"));
assertEquals("2000-01-01 00:00:00.000005", row.get("anniversary_datetime"));
assertEquals("00:00:00.000005", row.get("anniversary_time"));
assertFalse(iterator.advance());
}
// Temp dataset created and later deleted.
verify(mockClient, times(2)).datasets();
verify(mockDatasets).insert(anyString(), any(Dataset.class));
verify(mockDatasetsInsert).execute();
verify(mockDatasets).delete(anyString(), anyString());
verify(mockDatasetsDelete).execute();
// Job inserted to run the query, polled once.
verify(mockClient, times(3)).jobs();
verify(mockJobs, times(2)).insert(anyString(), any(Job.class));
verify(mockJobsInsert, times(2)).execute();
verify(mockJobs).get(anyString(), anyString());
verify(mockJobsGet).execute();
// Temp table get after query finish, deleted after reading.
verify(mockClient, times(3)).tables();
verify(mockTables, times(2)).get(anyString(), anyString(), anyString());
verify(mockTablesGet, times(2)).execute();
verify(mockTables).delete(anyString(), anyString(), anyString());
verify(mockTablesDelete).execute();
// Table data read.
verify(mockClient).tabledata();
verify(mockTabledata).list("project", "tempdataset", "temptable");
verify(mockTabledataList).execute();
}
use of com.google.api.services.bigquery.model.JobConfigurationQuery in project google-cloud-java by GoogleCloudPlatform.
the class QueryJobConfiguration method toPb.
@Override
com.google.api.services.bigquery.model.JobConfiguration toPb() {
com.google.api.services.bigquery.model.JobConfiguration configurationPb = new com.google.api.services.bigquery.model.JobConfiguration();
JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery();
queryConfigurationPb.setQuery(query);
configurationPb.setDryRun(dryRun());
if (allowLargeResults != null) {
queryConfigurationPb.setAllowLargeResults(allowLargeResults);
}
if (createDisposition != null) {
queryConfigurationPb.setCreateDisposition(createDisposition.toString());
}
if (destinationTable != null) {
queryConfigurationPb.setDestinationTable(destinationTable.toPb());
}
if (defaultDataset != null) {
queryConfigurationPb.setDefaultDataset(defaultDataset.toPb());
}
if (flattenResults != null) {
queryConfigurationPb.setFlattenResults(flattenResults);
}
if (priority != null) {
queryConfigurationPb.setPriority(priority.toString());
}
if (tableDefinitions != null) {
queryConfigurationPb.setTableDefinitions(Maps.transformValues(tableDefinitions, ExternalTableDefinition.TO_EXTERNAL_DATA_FUNCTION));
}
if (useQueryCache != null) {
queryConfigurationPb.setUseQueryCache(useQueryCache);
}
if (userDefinedFunctions != null) {
queryConfigurationPb.setUserDefinedFunctionResources(Lists.transform(userDefinedFunctions, UserDefinedFunction.TO_PB_FUNCTION));
}
if (writeDisposition != null) {
queryConfigurationPb.setWriteDisposition(writeDisposition.toString());
}
if (useLegacySql != null) {
queryConfigurationPb.setUseLegacySql(useLegacySql);
}
if (maximumBillingTier != null) {
queryConfigurationPb.setMaximumBillingTier(maximumBillingTier);
}
if (schemaUpdateOptions != null) {
ImmutableList.Builder<String> schemaUpdateOptionsBuilder = new ImmutableList.Builder<>();
for (JobInfo.SchemaUpdateOption schemaUpdateOption : schemaUpdateOptions) {
schemaUpdateOptionsBuilder.add(schemaUpdateOption.name());
}
queryConfigurationPb.setSchemaUpdateOptions(schemaUpdateOptionsBuilder.build());
}
return configurationPb.setQuery(queryConfigurationPb);
}
use of com.google.api.services.bigquery.model.JobConfigurationQuery in project beam by apache.
the class BigqueryClient method queryUnflattened.
/**
* Performs a query without flattening results.
*/
@Nonnull
public List<TableRow> queryUnflattened(String query, String projectId, boolean typed) throws IOException, InterruptedException {
Random rnd = new Random(System.currentTimeMillis());
String temporaryDatasetId = "_dataflow_temporary_dataset_" + rnd.nextInt(1000000);
String temporaryTableId = "dataflow_temporary_table_" + rnd.nextInt(1000000);
TableReference tempTableReference = new TableReference().setProjectId(projectId).setDatasetId(temporaryDatasetId).setTableId(temporaryTableId);
createNewDataset(projectId, temporaryDatasetId);
createNewTable(projectId, temporaryDatasetId, new Table().setTableReference(tempTableReference));
JobConfigurationQuery jcQuery = new JobConfigurationQuery().setFlattenResults(false).setAllowLargeResults(true).setDestinationTable(tempTableReference).setQuery(query);
JobConfiguration jc = new JobConfiguration().setQuery(jcQuery);
Job job = new Job().setConfiguration(jc);
Job insertedJob = bqClient.jobs().insert(projectId, job).execute();
GetQueryResultsResponse qResponse;
do {
qResponse = bqClient.jobs().getQueryResults(projectId, insertedJob.getJobReference().getJobId()).execute();
} while (!qResponse.getJobComplete());
final TableSchema schema = qResponse.getSchema();
final List<TableRow> rows = qResponse.getRows();
deleteDataset(projectId, temporaryDatasetId);
return !typed ? rows : rows.stream().map(r -> getTypedTableRow(schema.getFields(), r)).collect(Collectors.toList());
}
use of com.google.api.services.bigquery.model.JobConfigurationQuery in project beam by apache.
the class BigQueryTableRowIteratorTest method testReadFromQueryNoTables.
/**
* Verifies that queries that reference no data can be read.
*/
@Test
public void testReadFromQueryNoTables() throws IOException, InterruptedException {
// Mock job inserting.
Job dryRunJob = new Job().setStatistics(new JobStatistics().setQuery(new JobStatistics2()));
Job insertedJob = new Job().setJobReference(new JobReference());
when(mockJobsInsert.execute()).thenReturn(dryRunJob, insertedJob);
// Mock job polling.
JobStatus status = new JobStatus().setState("DONE");
JobConfigurationQuery resultQueryConfig = new JobConfigurationQuery().setDestinationTable(new TableReference().setProjectId("project").setDatasetId("tempdataset").setTableId("temptable"));
Job getJob = new Job().setJobReference(new JobReference()).setStatus(status).setConfiguration(new JobConfiguration().setQuery(resultQueryConfig));
when(mockJobsGet.execute()).thenReturn(getJob);
// Mock table schema fetch.
when(mockTablesGet.execute()).thenReturn(noTableQuerySchema());
byte[] photoBytes = "photograph".getBytes();
String photoBytesEncoded = BaseEncoding.base64().encode(photoBytes);
// Mock table data fetch.
when(mockTabledataList.execute()).thenReturn(rawDataList(rawRow("Arthur", 42, photoBytesEncoded)));
// Run query and verify
String query = String.format("SELECT \"Arthur\" as name, 42 as count, \"%s\" as photo", photoBytesEncoded);
JobConfigurationQuery queryConfig = new JobConfigurationQuery().setQuery(query);
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromQuery(queryConfig, "project", mockClient)) {
iterator.open();
assertTrue(iterator.advance());
TableRow row = iterator.getCurrent();
assertTrue(row.containsKey("name"));
assertTrue(row.containsKey("count"));
assertTrue(row.containsKey("photo"));
assertEquals("Arthur", row.get("name"));
assertEquals(42, row.get("count"));
assertEquals(photoBytesEncoded, row.get("photo"));
assertFalse(iterator.advance());
}
// Temp dataset created and later deleted.
verify(mockClient, times(2)).datasets();
verify(mockDatasets).insert(anyString(), any(Dataset.class));
verify(mockDatasetsInsert).execute();
verify(mockDatasets).delete(anyString(), anyString());
verify(mockDatasetsDelete).execute();
// Job inserted to run the query, polled once.
verify(mockClient, times(3)).jobs();
verify(mockJobs, times(2)).insert(anyString(), any(Job.class));
verify(mockJobsInsert, times(2)).execute();
verify(mockJobs).get(anyString(), anyString());
verify(mockJobsGet).execute();
// Temp table get after query finish, deleted after reading.
verify(mockClient, times(2)).tables();
verify(mockTables, times(1)).get(anyString(), anyString(), anyString());
verify(mockTablesGet, times(1)).execute();
verify(mockTables).delete(anyString(), anyString(), anyString());
verify(mockTablesDelete).execute();
// Table data read.
verify(mockClient).tabledata();
verify(mockTabledata).list("project", "tempdataset", "temptable");
verify(mockTabledataList).execute();
}
Aggregations