use of org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition in project beam by apache.
the class FakeJobService method runLoadJob.
private JobStatus runLoadJob(JobReference jobRef, JobConfigurationLoad load) throws InterruptedException, IOException {
TableReference destination = load.getDestinationTable();
TableSchema schema = load.getSchema();
List<ResourceId> sourceFiles = filesForLoadJobs.get(jobRef.getProjectId(), jobRef.getJobId());
WriteDisposition writeDisposition = WriteDisposition.valueOf(load.getWriteDisposition());
CreateDisposition createDisposition = CreateDisposition.valueOf(load.getCreateDisposition());
checkArgument(load.getSourceFormat().equals("NEWLINE_DELIMITED_JSON"));
Table existingTable = datasetService.getTable(destination);
if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
}
datasetService.createTable(new Table().setTableReference(destination).setSchema(schema));
List<TableRow> rows = Lists.newArrayList();
for (ResourceId filename : sourceFiles) {
rows.addAll(readRows(filename.toString()));
}
datasetService.insertAll(destination, rows, null);
return new JobStatus().setState("DONE");
}
use of org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition in project beam by apache.
the class FakeJobService method runCopyJob.
private JobStatus runCopyJob(JobConfigurationTableCopy copy) throws InterruptedException, IOException {
List<TableReference> sources = copy.getSourceTables();
TableReference destination = copy.getDestinationTable();
WriteDisposition writeDisposition = WriteDisposition.valueOf(copy.getWriteDisposition());
CreateDisposition createDisposition = CreateDisposition.valueOf(copy.getCreateDisposition());
Table existingTable = datasetService.getTable(destination);
if (!validateDispositions(existingTable, createDisposition, writeDisposition)) {
return new JobStatus().setState("FAILED").setErrorResult(new ErrorProto());
}
List<TableRow> allRows = Lists.newArrayList();
for (TableReference source : sources) {
allRows.addAll(datasetService.getAllRows(source.getProjectId(), source.getDatasetId(), source.getTableId()));
}
datasetService.createTable(new Table().setTableReference(destination));
datasetService.insertAll(destination, allRows, null);
return new JobStatus().setState("DONE");
}
Aggregations