use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryInsertErrorCoderTest method testDecodeEncodeEqual.
@Test
public void testDecodeEncodeEqual() throws Exception {
BigQueryInsertError value = new BigQueryInsertError(new TableRow().setF(Collections.singletonList(new TableCell().setV("Value"))), new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors(Collections.singletonList(new ErrorProto().setReason("a Reason").setLocation("A location").setMessage("A message").setDebugInfo("The debug info"))), new TableReference().setProjectId("dummy-project-id").setDatasetId("dummy-dataset-id").setTableId("dummy-table-id"));
CoderProperties.coderDecodeEncodeEqual(TEST_CODER, value);
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOReadTest method testBuildSourceWithTableReference.
@Test
public void testBuildSourceWithTableReference() {
TableReference table = new TableReference().setProjectId("foo.com:project").setDatasetId("somedataset").setTableId("sometable");
BigQueryIO.Read read = BigQueryIO.read().from(table);
checkReadTableObject(read, "foo.com:project", "somedataset", "sometable");
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOReadTest method testBigQueryQuerySourceInitSplit.
@Test
public void testBigQueryQuerySourceInitSplit() throws Exception {
PipelineOptions options = PipelineOptionsFactory.create();
BigQueryOptions bqOptions = options.as(BigQueryOptions.class);
bqOptions.setProject("project");
TableReference sourceTableRef = BigQueryHelpers.parseTableSpec("project:dataset.table");
fakeDatasetService.createDataset(sourceTableRef.getProjectId(), sourceTableRef.getDatasetId(), "asia-northeast1", "Fake plastic tree^H^H^H^Htables", null);
fakeDatasetService.createTable(new Table().setTableReference(sourceTableRef).setLocation("asia-northeast1"));
Table queryResultTable = new Table().setSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER"))));
List<TableRow> expected = ImmutableList.of(new TableRow().set("name", "a").set("number", 1L), new TableRow().set("name", "b").set("number", 2L), new TableRow().set("name", "c").set("number", 3L), new TableRow().set("name", "d").set("number", 4L), new TableRow().set("name", "e").set("number", 5L), new TableRow().set("name", "f").set("number", 6L));
String encodedQuery = FakeBigQueryServices.encodeQueryResult(queryResultTable, expected);
String stepUuid = "testStepUuid";
TableReference tempTableReference = createTempTableReference(bqOptions.getProject(), BigQueryResourceNaming.createJobIdPrefix(options.getJobName(), stepUuid, JobType.QUERY), Optional.empty());
fakeJobService.expectDryRunQuery(bqOptions.getProject(), encodedQuery, new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(100L).setReferencedTables(ImmutableList.of(sourceTableRef, tempTableReference))));
BoundedSource<TableRow> bqSource = BigQueryQuerySourceDef.create(fakeBqServices, ValueProvider.StaticValueProvider.of(encodedQuery), true, /* flattenResults */
true, /* useLegacySql */
QueryPriority.BATCH, null, null, null).toSource(stepUuid, TableRowJsonCoder.of(), BigQueryIO.TableRowParser.INSTANCE, false);
options.setTempLocation(testFolder.getRoot().getAbsolutePath());
List<TableRow> read = convertStringsToLong(SourceTestUtils.readFromSplitsOfSource(bqSource, 0L, /* ignored */
options));
assertThat(read, containsInAnyOrder(Iterables.toArray(expected, TableRow.class)));
List<? extends BoundedSource<TableRow>> sources = bqSource.split(100, options);
assertEquals(2, sources.size());
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOStorageQueryTest method testQuerySourceInitialSplitWithBigQueryProject_EmptyResult.
@Test
@ProjectOverride
public void testQuerySourceInitialSplitWithBigQueryProject_EmptyResult() throws Exception {
TableReference sourceTableRef = BigQueryHelpers.parseTableSpec("bigquery-project-id:dataset.table");
fakeDatasetService.createDataset(sourceTableRef.getProjectId(), sourceTableRef.getDatasetId(), "asia-northeast1", "Fake plastic tree^H^H^H^Htables", null);
fakeDatasetService.createTable(new Table().setTableReference(sourceTableRef).setLocation("asia-northeast1"));
Table queryResultTable = new Table().setSchema(TABLE_SCHEMA).setNumBytes(0L);
String encodedQuery = FakeBigQueryServices.encodeQueryResult(queryResultTable);
fakeJobService.expectDryRunQuery(options.getBigQueryProject(), encodedQuery, new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(1024L * 1024L).setReferencedTables(ImmutableList.of(sourceTableRef))));
String stepUuid = "testStepUuid";
TableReference tempTableReference = createTempTableReference(options.getBigQueryProject(), BigQueryResourceNaming.createJobIdPrefix(options.getJobName(), stepUuid, JobType.QUERY), Optional.empty());
CreateReadSessionRequest expectedRequest = CreateReadSessionRequest.newBuilder().setParent("projects/" + options.getBigQueryProject()).setReadSession(ReadSession.newBuilder().setTable(BigQueryHelpers.toTableResourceName(tempTableReference)).setDataFormat(DataFormat.AVRO)).setMaxStreamCount(10).build();
ReadSession emptyReadSession = ReadSession.newBuilder().build();
StorageClient fakeStorageClient = mock(StorageClient.class);
when(fakeStorageClient.createReadSession(expectedRequest)).thenReturn(emptyReadSession);
BigQueryStorageQuerySource<TableRow> querySource = BigQueryStorageQuerySource.create(stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), /* flattenResults = */
true, /* useLegacySql = */
true, /* priority = */
QueryPriority.BATCH, /* location = */
null, /* queryTempDataset = */
null, /* kmsKey = */
null, DataFormat.AVRO, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withDatasetService(fakeDatasetService).withJobService(fakeJobService).withStorageClient(fakeStorageClient));
List<? extends BoundedSource<TableRow>> sources = querySource.split(1024L, options);
assertTrue(sources.isEmpty());
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOStorageQueryTest method doQuerySourceInitialSplit.
private void doQuerySourceInitialSplit(long bundleSize, int requestedStreamCount, int expectedStreamCount) throws Exception {
TableReference sourceTableRef = BigQueryHelpers.parseTableSpec("project:dataset.table");
fakeDatasetService.createDataset(sourceTableRef.getProjectId(), sourceTableRef.getDatasetId(), "asia-northeast1", "Fake plastic tree^H^H^H^Htables", null);
fakeDatasetService.createTable(new Table().setTableReference(sourceTableRef).setLocation("asia-northeast1"));
Table queryResultTable = new Table().setSchema(new TableSchema().setFields(ImmutableList.of(new TableFieldSchema().setName("name").setType("STRING"), new TableFieldSchema().setName("number").setType("INTEGER")))).setNumBytes(1024L * 1024L);
String encodedQuery = FakeBigQueryServices.encodeQueryResult(queryResultTable);
fakeJobService.expectDryRunQuery(options.getProject(), encodedQuery, new JobStatistics().setQuery(new JobStatistics2().setTotalBytesProcessed(1024L * 1024L).setReferencedTables(ImmutableList.of(sourceTableRef))));
String stepUuid = "testStepUuid";
TableReference tempTableReference = createTempTableReference(options.getProject(), BigQueryResourceNaming.createJobIdPrefix(options.getJobName(), stepUuid, JobType.QUERY), Optional.empty());
CreateReadSessionRequest expectedRequest = CreateReadSessionRequest.newBuilder().setParent("projects/" + options.getProject()).setReadSession(ReadSession.newBuilder().setTable(BigQueryHelpers.toTableResourceName(tempTableReference))).setMaxStreamCount(requestedStreamCount).build();
Schema sessionSchema = SchemaBuilder.record("__root__").fields().name("name").type().nullable().stringType().noDefault().name("number").type().nullable().longType().noDefault().endRecord();
ReadSession.Builder builder = ReadSession.newBuilder().setAvroSchema(AvroSchema.newBuilder().setSchema(sessionSchema.toString())).setDataFormat(DataFormat.AVRO);
for (int i = 0; i < expectedStreamCount; i++) {
builder.addStreams(ReadStream.newBuilder().setName("stream-" + i));
}
StorageClient fakeStorageClient = mock(StorageClient.class);
when(fakeStorageClient.createReadSession(expectedRequest)).thenReturn(builder.build());
BigQueryStorageQuerySource<TableRow> querySource = BigQueryStorageQuerySource.create(stepUuid, ValueProvider.StaticValueProvider.of(encodedQuery), /* flattenResults = */
true, /* useLegacySql = */
true, /* priority = */
QueryPriority.BATCH, /* location = */
null, /* queryTempDataset = */
null, /* kmsKey = */
null, null, new TableRowParser(), TableRowJsonCoder.of(), new FakeBigQueryServices().withDatasetService(fakeDatasetService).withJobService(fakeJobService).withStorageClient(fakeStorageClient));
List<? extends BoundedSource<TableRow>> sources = querySource.split(bundleSize, options);
assertEquals(expectedStreamCount, sources.size());
}
Aggregations