Search in sources :

Example 16 with MetadataException

use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.

the class SubscribeFeedStatement method initialize.

public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
    this.query = new Query(false);
    EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
    Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
    if (subscriberFeed == null) {
        throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
    }
    String feedOutputType = getOutputType(mdTxnCtx);
    StringBuilder builder = new StringBuilder();
    builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
    builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
    builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
    builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
    builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
    List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
    if ((functionsToApply != null) && functionsToApply.isEmpty()) {
        builder.append(" return $x");
    } else {
        Function function;
        String rValueName = "x";
        String lValueName = "y";
        int variableIndex = 0;
        for (FunctionSignature appliedFunction : functionsToApply) {
            function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
            variableIndex++;
            switch(function.getLanguage().toUpperCase()) {
                case Function.LANGUAGE_AQL:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
                case Function.LANGUAGE_JAVA:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
            }
            builder.append("\n");
        }
        builder.append("return $" + lValueName + variableIndex);
    }
    builder.append(")");
    builder.append(";");
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Connect feed statement translated to\n" + builder.toString());
    }
    IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
    List<Statement> statements;
    try {
        statements = parser.parse();
        query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
    } catch (CompilationException pe) {
        throw new MetadataException(pe);
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) Query(org.apache.asterix.lang.common.statement.Query) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) Statement(org.apache.asterix.lang.common.base.Statement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) MetadataException(org.apache.asterix.metadata.MetadataException) EntityId(org.apache.asterix.active.EntityId) Function(org.apache.asterix.metadata.entities.Function) StringReader(java.io.StringReader) Feed(org.apache.asterix.metadata.entities.Feed) IParser(org.apache.asterix.lang.common.base.IParser)

Example 17 with MetadataException

use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.

the class DatasetUtil method createPrimaryIndexUpsertOp.

/**
     * Creates a primary index upsert operator for a given dataset.
     *
     * @param spec,
     *            the job specification.
     * @param metadataProvider,
     *            the metadata provider.
     * @param dataset,
     *            the dataset to upsert.
     * @param inputRecordDesc,the
     *            record descriptor for an input tuple.
     * @param fieldPermutation,
     *            the field permutation according to the input.
     * @param missingWriterFactory,
     *            the factory for customizing missing value serialization.
     * @return a primary index scan operator and its location constraints.
     * @throws AlgebricksException
     */
public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> createPrimaryIndexUpsertOp(JobSpecification spec, MetadataProvider metadataProvider, Dataset dataset, RecordDescriptor inputRecordDesc, int[] fieldPermutation, IMissingWriterFactory missingWriterFactory) throws AlgebricksException {
    int numKeys = dataset.getPrimaryKeys().size();
    int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
    ARecordType itemType = (ARecordType) metadataProvider.findType(dataset);
    ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset);
    try {
        Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName());
        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset);
        // prepare callback
        JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
        int[] primaryKeyFields = new int[numKeys];
        for (int i = 0; i < numKeys; i++) {
            primaryKeyFields[i] = i;
        }
        boolean hasSecondaries = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1;
        IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
        IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
        ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
        IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
        LSMPrimaryUpsertOperatorDescriptor op;
        ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
        ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
        // add the previous record first
        int f = 0;
        outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType);
        f++;
        // add the previous meta second
        if (dataset.hasMetaPart()) {
            outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(metaItemType);
            outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(metaItemType);
            f++;
        }
        // add the previous filter third
        int fieldIdx = -1;
        if (numFilterFields > 0) {
            String filterField = DatasetUtil.getFilterField(dataset).get(0);
            String[] fieldNames = itemType.getFieldNames();
            int i = 0;
            for (; i < fieldNames.length; i++) {
                if (fieldNames[i].equals(filterField)) {
                    break;
                }
            }
            fieldIdx = i;
            outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(itemType.getFieldTypes()[fieldIdx]);
            outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]);
            f++;
        }
        for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) {
            outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j];
            outputSerDes[j + f] = inputRecordDesc.getFields()[j];
        }
        RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits);
        op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, missingWriterFactory, modificationCallbackFactory, searchCallbackFactory, dataset.getFrameOpCallbackFactory(), numKeys, itemType, fieldIdx, hasSecondaries);
        return new Pair<>(op, splitsAndConstraint.second);
    } catch (MetadataException me) {
        throw new AlgebricksException(me);
    }
}
Also used : LSMPrimaryUpsertOperatorDescriptor(org.apache.asterix.runtime.operators.LSMPrimaryUpsertOperatorDescriptor) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) Index(org.apache.asterix.metadata.entities.Index) AMutableString(org.apache.asterix.om.base.AMutableString) AString(org.apache.asterix.om.base.AString) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) JobId(org.apache.asterix.common.transactions.JobId) Pair(org.apache.hyracks.algebricks.common.utils.Pair) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) JobEventListenerFactory(org.apache.asterix.runtime.job.listener.JobEventListenerFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) ISearchOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IModificationOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory) ARecordType(org.apache.asterix.om.types.ARecordType)

Example 18 with MetadataException

use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.

the class InvertedIndexPOperator method contributeRuntimeOperator.

@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    AbstractUnnestMapOperator unnestMapOp = (AbstractUnnestMapOperator) op;
    ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
    if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
        throw new IllegalStateException();
    }
    AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
    if (unnestFuncExpr.getFunctionIdentifier() != BuiltinFunctions.INDEX_SEARCH) {
        return;
    }
    InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams();
    jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
    MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
    Dataset dataset;
    try {
        dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
    } catch (MetadataException e) {
        throw new AlgebricksException(e);
    }
    int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
    int[] minFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMinFilterVars(), inputSchemas);
    int[] maxFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMaxFilterVars(), inputSchemas);
    boolean retainNull = false;
    if (op.getOperatorTag() == LogicalOperatorTag.LEFT_OUTER_UNNEST_MAP) {
        // By nature, LEFT_OUTER_UNNEST_MAP should generate null values for non-matching tuples.
        retainNull = true;
    }
    // Build runtime.
    Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> invIndexSearch = buildInvertedIndexRuntime(metadataProvider, context, builder.getJobSpec(), unnestMapOp, opSchema, jobGenParams.getRetainInput(), retainNull, jobGenParams.getDatasetName(), dataset, jobGenParams.getIndexName(), jobGenParams.getSearchKeyType(), keyIndexes, jobGenParams.getSearchModifierType(), jobGenParams.getSimilarityThreshold(), minFilterFieldIndexes, maxFilterFieldIndexes, jobGenParams.getIsFullTextSearch());
    // Contribute operator in hyracks job.
    builder.contributeHyracksOperator(unnestMapOp, invIndexSearch.first);
    builder.contributeAlgebricksPartitionConstraint(invIndexSearch.first, invIndexSearch.second);
    ILogicalOperator srcExchange = unnestMapOp.getInputs().get(0).getValue();
    builder.contributeGraphEdge(srcExchange, 0, unnestMapOp, 0);
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataException(org.apache.asterix.metadata.MetadataException) InvertedIndexJobGenParams(org.apache.asterix.optimizer.rules.am.InvertedIndexJobGenParams) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) AbstractUnnestMapOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestMapOperator) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)

Example 19 with MetadataException

use of org.apache.asterix.metadata.MetadataException in project asterixdb by apache.

the class MetadataProvider method buildRtreeRuntime.

public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildRtreeRuntime(JobSpecification jobSpec, List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, boolean retainInput, boolean retainMissing, Dataset dataset, String indexName, int[] keyFields, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
    try {
        int numPrimaryKeys = dataset.getPrimaryKeys().size();
        Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(), indexName);
        if (secondaryIndex == null) {
            throw new AlgebricksException("Code generation error: no index " + indexName + " for dataset " + dataset.getDatasetName());
        }
        RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
        int[] primaryKeyFields = new int[numPrimaryKeys];
        for (int i = 0; i < numPrimaryKeys; i++) {
            primaryKeyFields[i] = i;
        }
        ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(storaegComponentProvider, secondaryIndex, jobId, IndexOperation.SEARCH, primaryKeyFields);
        RTreeSearchOperatorDescriptor rtreeSearchOp;
        IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storaegComponentProvider.getStorageManager(), spPc.first);
        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
            rtreeSearchOp = new RTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, keyFields, true, true, indexDataflowHelperFactory, retainInput, retainMissing, context.getMissingWriterFactory(), searchCallbackFactory, minFilterFieldIndexes, maxFilterFieldIndexes, false);
        } else {
            // Create the operator
            rtreeSearchOp = new ExternalRTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, keyFields, true, true, indexDataflowHelperFactory, retainInput, retainMissing, context.getMissingWriterFactory(), searchCallbackFactory, minFilterFieldIndexes, maxFilterFieldIndexes, ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this));
        }
        return new Pair<>(rtreeSearchOp, spPc.second);
    } catch (MetadataException me) {
        throw new AlgebricksException(me);
    }
}
Also used : RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) ExternalRTreeSearchOperatorDescriptor(org.apache.asterix.external.operators.ExternalRTreeSearchOperatorDescriptor) Index(org.apache.asterix.metadata.entities.Index) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) DatasetCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) MetadataException(org.apache.asterix.metadata.MetadataException) ISearchOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) RTreeSearchOperatorDescriptor(org.apache.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor) ExternalRTreeSearchOperatorDescriptor(org.apache.asterix.external.operators.ExternalRTreeSearchOperatorDescriptor) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Aggregations

MetadataException (org.apache.asterix.metadata.MetadataException)19 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)12 Index (org.apache.asterix.metadata.entities.Index)9 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)8 Dataset (org.apache.asterix.metadata.entities.Dataset)7 Pair (org.apache.hyracks.algebricks.common.utils.Pair)7 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)7 IIndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory)7 IndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)7 RemoteException (java.rmi.RemoteException)6 IDataSourceIndex (org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex)6 DatasetCardinalityHint (org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint)5 ACIDException (org.apache.asterix.common.exceptions.ACIDException)4 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)4 ARecordType (org.apache.asterix.om.types.ARecordType)4 AlgebricksAbsolutePartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)4 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)4 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)4 IAType (org.apache.asterix.om.types.IAType)3 JobEventListenerFactory (org.apache.asterix.runtime.job.listener.JobEventListenerFactory)3