use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.
the class SubscribeFeedStatement method initialize.
public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
this.query = new Query(false);
EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
if (subscriberFeed == null) {
throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
}
String feedOutputType = getOutputType(mdTxnCtx);
StringBuilder builder = new StringBuilder();
builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
if ((functionsToApply != null) && functionsToApply.isEmpty()) {
builder.append(" return $x");
} else {
Function function;
String rValueName = "x";
String lValueName = "y";
int variableIndex = 0;
for (FunctionSignature appliedFunction : functionsToApply) {
function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
variableIndex++;
switch(function.getLanguage().toUpperCase()) {
case Function.LANGUAGE_AQL:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
case Function.LANGUAGE_JAVA:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
}
builder.append("\n");
}
builder.append("return $" + lValueName + variableIndex);
}
builder.append(")");
builder.append(";");
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Connect feed statement translated to\n" + builder.toString());
}
IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
List<Statement> statements;
try {
statements = parser.parse();
query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
} catch (CompilationException pe) {
throw new MetadataException(pe);
}
}
use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.
the class DatasetUtil method createPrimaryIndexUpsertOp.
/**
* Creates a primary index upsert operator for a given dataset.
*
* @param spec,
* the job specification.
* @param metadataProvider,
* the metadata provider.
* @param dataset,
* the dataset to upsert.
* @param inputRecordDesc,the
* record descriptor for an input tuple.
* @param fieldPermutation,
* the field permutation according to the input.
* @param missingWriterFactory,
* the factory for customizing missing value serialization.
* @return a primary index scan operator and its location constraints.
* @throws AlgebricksException
*/
public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> createPrimaryIndexUpsertOp(JobSpecification spec, MetadataProvider metadataProvider, Dataset dataset, RecordDescriptor inputRecordDesc, int[] fieldPermutation, IMissingWriterFactory missingWriterFactory) throws AlgebricksException {
int numKeys = dataset.getPrimaryKeys().size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
ARecordType itemType = (ARecordType) metadataProvider.findType(dataset);
ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset);
try {
Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset);
// prepare callback
JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
int[] primaryKeyFields = new int[numKeys];
for (int i = 0; i < numKeys; i++) {
primaryKeyFields[i] = i;
}
boolean hasSecondaries = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1;
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
LSMPrimaryUpsertOperatorDescriptor op;
ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
// add the previous record first
int f = 0;
outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType);
f++;
// add the previous meta second
if (dataset.hasMetaPart()) {
outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(metaItemType);
outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(metaItemType);
f++;
}
// add the previous filter third
int fieldIdx = -1;
if (numFilterFields > 0) {
String filterField = DatasetUtil.getFilterField(dataset).get(0);
String[] fieldNames = itemType.getFieldNames();
int i = 0;
for (; i < fieldNames.length; i++) {
if (fieldNames[i].equals(filterField)) {
break;
}
}
fieldIdx = i;
outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(itemType.getFieldTypes()[fieldIdx]);
outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]);
f++;
}
for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) {
outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j];
outputSerDes[j + f] = inputRecordDesc.getFields()[j];
}
RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits);
op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, missingWriterFactory, modificationCallbackFactory, searchCallbackFactory, dataset.getFrameOpCallbackFactory(), numKeys, itemType, fieldIdx, hasSecondaries);
return new Pair<>(op, splitsAndConstraint.second);
} catch (MetadataException me) {
throw new AlgebricksException(me);
}
}
use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.
the class InvertedIndexPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
AbstractUnnestMapOperator unnestMapOp = (AbstractUnnestMapOperator) op;
ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
throw new IllegalStateException();
}
AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
if (unnestFuncExpr.getFunctionIdentifier() != BuiltinFunctions.INDEX_SEARCH) {
return;
}
InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams();
jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
Dataset dataset;
try {
dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
} catch (MetadataException e) {
throw new AlgebricksException(e);
}
int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
int[] minFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMinFilterVars(), inputSchemas);
int[] maxFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMaxFilterVars(), inputSchemas);
boolean retainNull = false;
if (op.getOperatorTag() == LogicalOperatorTag.LEFT_OUTER_UNNEST_MAP) {
// By nature, LEFT_OUTER_UNNEST_MAP should generate null values for non-matching tuples.
retainNull = true;
}
// Build runtime.
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> invIndexSearch = buildInvertedIndexRuntime(metadataProvider, context, builder.getJobSpec(), unnestMapOp, opSchema, jobGenParams.getRetainInput(), retainNull, jobGenParams.getDatasetName(), dataset, jobGenParams.getIndexName(), jobGenParams.getSearchKeyType(), keyIndexes, jobGenParams.getSearchModifierType(), jobGenParams.getSimilarityThreshold(), minFilterFieldIndexes, maxFilterFieldIndexes, jobGenParams.getIsFullTextSearch());
// Contribute operator in hyracks job.
builder.contributeHyracksOperator(unnestMapOp, invIndexSearch.first);
builder.contributeAlgebricksPartitionConstraint(invIndexSearch.first, invIndexSearch.second);
ILogicalOperator srcExchange = unnestMapOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(srcExchange, 0, unnestMapOp, 0);
}
use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.
the class MetadataProvider method buildRtreeRuntime.
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildRtreeRuntime(JobSpecification jobSpec, List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, boolean retainInput, boolean retainMissing, Dataset dataset, String indexName, int[] keyFields, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
try {
int numPrimaryKeys = dataset.getPrimaryKeys().size();
Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(), indexName);
if (secondaryIndex == null) {
throw new AlgebricksException("Code generation error: no index " + indexName + " for dataset " + dataset.getDatasetName());
}
RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
int[] primaryKeyFields = new int[numPrimaryKeys];
for (int i = 0; i < numPrimaryKeys; i++) {
primaryKeyFields[i] = i;
}
ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(storaegComponentProvider, secondaryIndex, jobId, IndexOperation.SEARCH, primaryKeyFields);
RTreeSearchOperatorDescriptor rtreeSearchOp;
IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storaegComponentProvider.getStorageManager(), spPc.first);
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
rtreeSearchOp = new RTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, keyFields, true, true, indexDataflowHelperFactory, retainInput, retainMissing, context.getMissingWriterFactory(), searchCallbackFactory, minFilterFieldIndexes, maxFilterFieldIndexes, false);
} else {
// Create the operator
rtreeSearchOp = new ExternalRTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, keyFields, true, true, indexDataflowHelperFactory, retainInput, retainMissing, context.getMissingWriterFactory(), searchCallbackFactory, minFilterFieldIndexes, maxFilterFieldIndexes, ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this));
}
return new Pair<>(rtreeSearchOp, spPc.second);
} catch (MetadataException me) {
throw new AlgebricksException(me);
}
}
Aggregations