use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataManagerUtil method lookupSourceInMetadata.
public static DataSource lookupSourceInMetadata(MetadataTransactionContext mdTxnCtx, DataSourceId aqlId) throws AlgebricksException {
Dataset dataset = findDataset(mdTxnCtx, aqlId.getDataverseName(), aqlId.getDatasourceName());
if (dataset == null) {
throw new AlgebricksException("Datasource with id " + aqlId + " was not found.");
}
IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
INodeDomain domain = findNodeDomain(mdTxnCtx, dataset.getNodeGroupName());
byte datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? DataSource.Type.EXTERNAL_DATASET : DataSource.Type.INTERNAL_DATASET;
return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType, dataset.getDatasetDetails(), domain);
}
use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataProvider method getUpsertRuntime.
@Override
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getUpsertRuntime(IDataSource<DataSourceId> dataSource, IOperatorSchema inputSchema, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, LogicalVariable payload, List<LogicalVariable> filterKeys, List<LogicalVariable> additionalNonFilterFields, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException {
String datasetName = dataSource.getId().getDatasourceName();
Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
if (dataset == null) {
throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataSource.getId().getDataverseName());
}
boolean temp = dataset.getDatasetDetails().isTemp();
isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
int numKeys = primaryKeys.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
int numOfAdditionalFields = additionalNonFilterFields == null ? 0 : additionalNonFilterFields.size();
// Move key fields to front. [keys, record, filters]
int[] fieldPermutation = new int[numKeys + 1 + numFilterFields + numOfAdditionalFields];
int[] bloomFilterKeyFields = new int[numKeys];
int i = 0;
// set the keys' permutations
for (LogicalVariable varKey : primaryKeys) {
int idx = inputSchema.findVariable(varKey);
fieldPermutation[i] = idx;
bloomFilterKeyFields[i] = i;
i++;
}
// set the record permutation
fieldPermutation[i++] = inputSchema.findVariable(payload);
// set the filters' permutations.
if (numFilterFields > 0) {
int idx = inputSchema.findVariable(filterKeys.get(0));
fieldPermutation[i++] = idx;
}
if (additionalNonFilterFields != null) {
for (LogicalVariable var : additionalNonFilterFields) {
int idx = inputSchema.findVariable(var);
fieldPermutation[i++] = idx;
}
}
return DatasetUtil.createPrimaryIndexUpsertOp(spec, this, dataset, recordDesc, fieldPermutation, context.getMissingWriterFactory());
}
Aggregations