use of com.google.api.services.bigquery.model.Dataset in project beam by apache.
the class BigQueryTableRowIteratorTest method testReadFromQuery.
/**
* Verifies that when the query runs, the correct data is returned and the temporary dataset and
* table are both cleaned up.
*/
@Test
public void testReadFromQuery() throws IOException, InterruptedException {
// Mock job inserting.
Job dryRunJob = new Job().setStatistics(new JobStatistics().setQuery(new JobStatistics2().setReferencedTables(ImmutableList.of(new TableReference()))));
Job insertedJob = new Job().setJobReference(new JobReference());
when(mockJobsInsert.execute()).thenReturn(dryRunJob, insertedJob);
// Mock job polling.
JobStatus status = new JobStatus().setState("DONE");
JobConfigurationQuery resultQueryConfig = new JobConfigurationQuery().setDestinationTable(new TableReference().setProjectId("project").setDatasetId("tempdataset").setTableId("temptable"));
Job getJob = new Job().setJobReference(new JobReference()).setStatus(status).setConfiguration(new JobConfiguration().setQuery(resultQueryConfig));
when(mockJobsGet.execute()).thenReturn(getJob);
// Mock table schema fetch.
when(mockTablesGet.execute()).thenReturn(tableWithLocation(), tableWithBasicSchema());
byte[] photoBytes = "photograph".getBytes();
String photoBytesEncoded = BaseEncoding.base64().encode(photoBytes);
// Mock table data fetch.
when(mockTabledataList.execute()).thenReturn(rawDataList(rawRow("Arthur", 42, photoBytesEncoded, "2000-01-01", "2000-01-01 00:00:00.000005", "00:00:00.000005")));
// Run query and verify
String query = "SELECT name, count, photo, anniversary_date, " + "anniversary_datetime, anniversary_time from table";
JobConfigurationQuery queryConfig = new JobConfigurationQuery().setQuery(query);
try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.fromQuery(queryConfig, "project", mockClient)) {
iterator.open();
assertTrue(iterator.advance());
TableRow row = iterator.getCurrent();
assertTrue(row.containsKey("name"));
assertTrue(row.containsKey("answer"));
assertTrue(row.containsKey("photo"));
assertTrue(row.containsKey("anniversary_date"));
assertTrue(row.containsKey("anniversary_datetime"));
assertTrue(row.containsKey("anniversary_time"));
assertEquals("Arthur", row.get("name"));
assertEquals(42, row.get("answer"));
assertEquals(photoBytesEncoded, row.get("photo"));
assertEquals("2000-01-01", row.get("anniversary_date"));
assertEquals("2000-01-01 00:00:00.000005", row.get("anniversary_datetime"));
assertEquals("00:00:00.000005", row.get("anniversary_time"));
assertFalse(iterator.advance());
}
// Temp dataset created and later deleted.
verify(mockClient, times(2)).datasets();
verify(mockDatasets).insert(anyString(), any(Dataset.class));
verify(mockDatasetsInsert).execute();
verify(mockDatasets).delete(anyString(), anyString());
verify(mockDatasetsDelete).execute();
// Job inserted to run the query, polled once.
verify(mockClient, times(3)).jobs();
verify(mockJobs, times(2)).insert(anyString(), any(Job.class));
verify(mockJobsInsert, times(2)).execute();
verify(mockJobs).get(anyString(), anyString());
verify(mockJobsGet).execute();
// Temp table get after query finish, deleted after reading.
verify(mockClient, times(3)).tables();
verify(mockTables, times(2)).get(anyString(), anyString(), anyString());
verify(mockTablesGet, times(2)).execute();
verify(mockTables).delete(anyString(), anyString(), anyString());
verify(mockTablesDelete).execute();
// Table data read.
verify(mockClient).tabledata();
verify(mockTabledata).list("project", "tempdataset", "temptable");
verify(mockTabledataList).execute();
}
use of com.google.api.services.bigquery.model.Dataset in project google-cloud-java by GoogleCloudPlatform.
the class AclTest method testOf.
@Test
public void testOf() {
Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER);
assertEquals(Group.ofAllAuthenticatedUsers(), acl.getEntity());
assertEquals(Role.READER, acl.getRole());
Dataset.Access pb = acl.toPb();
assertEquals(acl, Acl.fromPb(pb));
View view = new View(TableId.of("project", "dataset", "view"));
acl = Acl.of(view);
assertEquals(view, acl.getEntity());
assertEquals(null, acl.getRole());
}
use of com.google.api.services.bigquery.model.Dataset in project beam by apache.
the class ExampleUtils method setupBigQueryTable.
private void setupBigQueryTable(String projectId, String datasetId, String tableId, TableSchema schema) throws IOException {
if (bigQueryClient == null) {
bigQueryClient = newBigQueryClient(options.as(BigQueryOptions.class)).build();
}
Datasets datasetService = bigQueryClient.datasets();
if (executeNullIfNotFound(datasetService.get(projectId, datasetId)) == null) {
Dataset newDataset = new Dataset().setDatasetReference(new DatasetReference().setProjectId(projectId).setDatasetId(datasetId));
datasetService.insert(projectId, newDataset).execute();
}
Tables tableService = bigQueryClient.tables();
Table table = executeNullIfNotFound(tableService.get(projectId, datasetId, tableId));
if (table == null) {
Table newTable = new Table().setSchema(schema).setTableReference(new TableReference().setProjectId(projectId).setDatasetId(datasetId).setTableId(tableId));
tableService.insert(projectId, datasetId, newTable).execute();
} else if (!table.getSchema().equals(schema)) {
throw new RuntimeException("Table exists and schemas do not match, expecting: " + schema.toPrettyString() + ", actual: " + table.getSchema().toPrettyString());
}
}
use of com.google.api.services.bigquery.model.Dataset in project google-cloud-java by GoogleCloudPlatform.
the class AclTest method testViewEntity.
@Test
public void testViewEntity() {
TableId viewId = TableId.of("project", "dataset", "view");
View entity = new View(viewId);
assertEquals(viewId, entity.getId());
assertEquals(Type.VIEW, entity.getType());
Dataset.Access pb = entity.toPb();
assertEquals(entity, Entity.fromPb(pb));
}
use of com.google.api.services.bigquery.model.Dataset in project google-cloud-java by GoogleCloudPlatform.
the class HttpBigQueryRpc method listDatasets.
@Override
public Tuple<String, Iterable<Dataset>> listDatasets(String projectId, Map<Option, ?> options) {
try {
DatasetList datasetsList = bigquery.datasets().list(projectId).setAll(Option.ALL_DATASETS.getBoolean(options)).setMaxResults(Option.MAX_RESULTS.getLong(options)).setPageToken(Option.PAGE_TOKEN.getString(options)).setPageToken(Option.PAGE_TOKEN.getString(options)).execute();
Iterable<DatasetList.Datasets> datasets = datasetsList.getDatasets();
return Tuple.of(datasetsList.getNextPageToken(), Iterables.transform(datasets != null ? datasets : ImmutableList.<DatasetList.Datasets>of(), new Function<DatasetList.Datasets, Dataset>() {
@Override
public Dataset apply(DatasetList.Datasets datasetPb) {
return new Dataset().setDatasetReference(datasetPb.getDatasetReference()).setFriendlyName(datasetPb.getFriendlyName()).setId(datasetPb.getId()).setKind(datasetPb.getKind());
}
}));
} catch (IOException ex) {
throw translate(ex);
}
}
Aggregations