use of bio.terra.service.dataset.AssetTable in project jade-data-repo by DataBiosphere.
the class SnapshotService method conjureSnapshotTablesFromAsset.
/**
* Magic up the snapshot tables and snapshot map from the asset tables and columns.
* This method sets the table lists into snapshot and snapshotSource.
*
* @param asset the one and only asset specification for this snapshot
* @param snapshot snapshot to point back to and fill in
* @param snapshotSource snapshotSource to point back to and fill in
*/
private void conjureSnapshotTablesFromAsset(AssetSpecification asset, Snapshot snapshot, SnapshotSource snapshotSource) {
List<SnapshotTable> tableList = new ArrayList<>();
List<SnapshotMapTable> mapTableList = new ArrayList<>();
for (AssetTable assetTable : asset.getAssetTables()) {
// Create early so we can hook up back pointers.
SnapshotTable table = new SnapshotTable();
// Build the column lists in parallel, so we can easily connect the
// map column to the snapshot column.
List<Column> columnList = new ArrayList<>();
List<SnapshotMapColumn> mapColumnList = new ArrayList<>();
for (AssetColumn assetColumn : assetTable.getColumns()) {
Column column = new Column(assetColumn.getDatasetColumn());
columnList.add(column);
mapColumnList.add(new SnapshotMapColumn().fromColumn(assetColumn.getDatasetColumn()).toColumn(column));
}
table.name(assetTable.getTable().getName()).columns(columnList);
tableList.add(table);
mapTableList.add(new SnapshotMapTable().fromTable(assetTable.getTable()).toTable(table).snapshotMapColumns(mapColumnList));
}
snapshotSource.snapshotMapTables(mapTableList);
snapshot.snapshotTables(tableList);
}
use of bio.terra.service.dataset.AssetTable in project jade-data-repo by DataBiosphere.
the class BigQueryPdao method queryForRowIds.
// insert the rowIds into the snapshot row ids table and then kick off the rest of the relationship walking
// once we have the row ids in addition to the asset spec, this should look familiar to wAsset
public void queryForRowIds(AssetSpecification assetSpecification, Snapshot snapshot, String sqlQuery) throws InterruptedException {
BigQueryProject bigQueryProject = bigQueryProjectForSnapshot(snapshot);
BigQuery bigQuery = bigQueryProject.getBigQuery();
String snapshotName = snapshot.getName();
Dataset dataset = snapshot.getSnapshotSources().get(0).getDataset();
String datasetBqDatasetName = prefixName(dataset.getName());
String projectId = bigQueryProject.getProjectId();
// create snapshot bq dataset
try {
// create snapshot BQ dataset
snapshotCreateBQDataset(bigQueryProject, snapshot);
// now create a temp table with all the selected row ids based on the query in it
bigQueryProject.createTable(snapshotName, PDAO_TEMP_TABLE, tempTableSchema());
QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(sqlQuery).setDestinationTable(TableId.of(snapshotName, PDAO_TEMP_TABLE)).setWriteDisposition(JobInfo.WriteDisposition.WRITE_APPEND).build();
try {
final TableResult query = bigQuery.query(queryConfig);
// get results and validate that it got back more than 0 value
if (query.getTotalRows() < 1) {
// should this be a different error?
throw new InvalidQueryException("Query returned 0 results");
}
} catch (InterruptedException ie) {
throw new PdaoException("Append query unexpectedly interrupted", ie);
}
// join on the root table to validate that the dataset's rootTable.rowid is never null
// and thus matches the PDAO_ROW_ID_COLUMN
AssetTable rootAssetTable = assetSpecification.getRootTable();
Table rootTable = rootAssetTable.getTable();
String datasetTableName = rootTable.getName();
String rootTableId = rootTable.getId().toString();
ST sqlTemplate = new ST(joinTablesToTestForMissingRowIds);
sqlTemplate.add("snapshotDatasetName", snapshotName);
sqlTemplate.add("tempTable", PDAO_TEMP_TABLE);
sqlTemplate.add("datasetDatasetName", datasetBqDatasetName);
sqlTemplate.add("datasetTable", datasetTableName);
sqlTemplate.add("commonColumn", PDAO_ROW_ID_COLUMN);
TableResult result = bigQueryProject.query(sqlTemplate.render());
FieldValueList mismatchedCount = result.getValues().iterator().next();
Long mismatchedCountLong = mismatchedCount.get(0).getLongValue();
if (mismatchedCountLong > 0) {
throw new MismatchedValueException("Query results did not match dataset root row ids");
}
// TODO should this be pulled up to the top of queryForRowIds() / added to snapshotCreateBQDataset() helper
bigQueryProject.createTable(snapshotName, PDAO_ROW_ID_TABLE, rowIdTableSchema());
// populate root row ids. Must happen before the relationship walk.
// NOTE: when we have multiple sources, we can put this into a loop
// insert into the PDAO_ROW_ID_TABLE the literal that is the table id
// and then all the row ids from the temp table
ST sqlLoadTemplate = new ST(loadRootRowIdsFromTempTableTemplate);
sqlLoadTemplate.add("project", projectId);
sqlLoadTemplate.add("snapshot", snapshotName);
sqlLoadTemplate.add("dataset", datasetBqDatasetName);
sqlLoadTemplate.add("tableId", rootTableId);
// this is the disc from classic asset
sqlLoadTemplate.add("commonColumn", PDAO_ROW_ID_COLUMN);
sqlLoadTemplate.add("tempTable", PDAO_TEMP_TABLE);
bigQueryProject.query(sqlLoadTemplate.render());
// ST sqlValidateTemplate = new ST(validateRowIdsForRootTemplate);
// TODO do we want to reuse this validation? if yes, maybe mismatchedCount / query should be updated
// walk and populate relationship table row ids
List<WalkRelationship> walkRelationships = WalkRelationship.ofAssetSpecification(assetSpecification);
walkRelationships(datasetBqDatasetName, snapshotName, walkRelationships, rootTableId, projectId, bigQuery);
// populate root row ids. Must happen before the relationship walk.
// NOTE: when we have multiple sources, we can put this into a loop
snapshotViewCreation(datasetBqDatasetName, snapshotName, snapshot, projectId, bigQuery, bigQueryProject);
} catch (PdaoException ex) {
// TODO what if the query is invalid? Seems like there might be more to catch here.
throw new PdaoException("createSnapshot failed", ex);
}
}
Aggregations