use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOTest method testWriteValidatesDataset.
private void testWriteValidatesDataset(boolean unbounded) throws Exception {
String projectId = "someproject";
String datasetId = "somedataset";
BigQueryOptions options = TestPipeline.testingPipelineOptions().as(BigQueryOptions.class);
options.setProject(projectId);
FakeBigQueryServices fakeBqServices = new FakeBigQueryServices().withJobService(new FakeJobService()).withDatasetService(new FakeDatasetService());
Pipeline p = TestPipeline.create(options);
TableReference tableRef = new TableReference();
tableRef.setDatasetId(datasetId);
tableRef.setTableId("sometable");
PCollection<TableRow> tableRows;
if (unbounded) {
tableRows = p.apply(GenerateSequence.from(0)).apply(MapElements.via(new SimpleFunction<Long, TableRow>() {
@Override
public TableRow apply(Long input) {
return null;
}
})).setCoder(TableRowJsonCoder.of());
} else {
tableRows = p.apply(Create.empty(TableRowJsonCoder.of()));
}
thrown.expect(RuntimeException.class);
// Message will be one of following depending on the execution environment.
thrown.expectMessage(Matchers.either(Matchers.containsString("Unable to confirm BigQuery dataset presence")).or(Matchers.containsString("BigQuery dataset not found for table")));
tableRows.apply(BigQueryIO.writeTableRows().to(tableRef).withCreateDisposition(CreateDisposition.CREATE_IF_NEEDED).withSchema(new TableSchema()).withTestServices(fakeBqServices));
p.run();
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryIOTest method testTableParsing_noProjectId.
@Test
public void testTableParsing_noProjectId() {
TableReference ref = BigQueryHelpers.parseTableSpec("data_set.table_name");
Assert.assertEquals(null, ref.getProjectId());
Assert.assertEquals("data_set", ref.getDatasetId());
Assert.assertEquals("table_name", ref.getTableId());
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryUtilTest method testInsertAll.
@Test
public void testInsertAll() throws Exception, IOException {
// Build up a list of indices to fail on each invocation. This should result in
// 5 calls to insertAll.
List<List<Long>> errorsIndices = new ArrayList<>();
errorsIndices.add(Arrays.asList(0L, 5L, 10L, 15L, 20L));
errorsIndices.add(Arrays.asList(0L, 2L, 4L));
errorsIndices.add(Arrays.asList(0L, 2L));
errorsIndices.add(new ArrayList<Long>());
onInsertAll(errorsIndices);
TableReference ref = BigQueryHelpers.parseTableSpec("project:dataset.table");
DatasetServiceImpl datasetService = new DatasetServiceImpl(mockClient, options, 5);
List<ValueInSingleWindow<TableRow>> rows = new ArrayList<>();
List<String> ids = new ArrayList<>();
for (int i = 0; i < 25; ++i) {
rows.add(ValueInSingleWindow.of(rawRow("foo", 1234), GlobalWindow.TIMESTAMP_MAX_VALUE, GlobalWindow.INSTANCE, PaneInfo.ON_TIME_AND_ONLY_FIRING));
ids.add(new String());
}
long totalBytes = 0;
try {
totalBytes = datasetService.insertAll(ref, rows, ids, InsertRetryPolicy.alwaysRetry(), null);
} finally {
verifyInsertAll(5);
// Each of the 25 rows is 23 bytes: "{f=[{v=foo}, {v=1234}]}"
assertEquals("Incorrect byte count", 25L * 23L, totalBytes);
}
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class BigQueryUtilTest method testTableGet.
@Test
public void testTableGet() throws InterruptedException, IOException {
onTableGet(basicTableSchema());
TableDataList dataList = new TableDataList().setTotalRows(0L);
onTableList(dataList);
BigQueryServicesImpl.DatasetServiceImpl services = new BigQueryServicesImpl.DatasetServiceImpl(mockClient, options);
services.getTable(new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table"));
verifyTableGet();
}
use of com.google.api.services.bigquery.model.TableReference in project beam by apache.
the class FakeJobService method runCopyJob.
private JobStatus runCopyJob(JobConfigurationTableCopy copy) throws InterruptedException, IOException {
List<TableReference> sources = copy.getSourceTables();
TableReference destination = copy.getDestinationTable();
WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition());
CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition());
Table existingTable = datasetService.getTable(destination);
if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
}
List<TableRow> allRows = Lists.newArrayList();
for (TableReference source : sources) {
allRows.addAll(datasetService.getAllRows(source.getProjectId(), source.getDatasetId(), source.getTableId()));
}
datasetService.createTable(new Table().setTableReference(destination));
datasetService.insertAll(destination, allRows, null);
return new JobStatus().setState("DONE");
}
Aggregations