Search in sources :

Example 66 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class SetAsterixPhysicalOperatorsRule method computeDefaultPhysicalOp.

private static void computeDefaultPhysicalOp(AbstractLogicalOperator op, IOptimizationContext context) throws AlgebricksException {
    PhysicalOptimizationConfig physicalOptimizationConfig = context.getPhysicalOptimizationConfig();
    if (op.getOperatorTag().equals(LogicalOperatorTag.GROUP)) {
        GroupByOperator gby = (GroupByOperator) op;
        if (gby.getNestedPlans().size() == 1) {
            ILogicalPlan p0 = gby.getNestedPlans().get(0);
            if (p0.getRoots().size() == 1) {
                Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
                if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(LogicalOperatorTag.AGGREGATE)) {
                    AggregateOperator aggOp = (AggregateOperator) r0.getValue();
                    boolean serializable = true;
                    for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
                        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) exprRef.getValue();
                        if (!BuiltinFunctions.isAggregateFunctionSerializable(expr.getFunctionIdentifier())) {
                            serializable = false;
                            break;
                        }
                    }
                    if ((gby.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY) == Boolean.TRUE || gby.getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY) == Boolean.TRUE)) {
                        boolean setToExternalGby = false;
                        if (serializable) {
                            // if serializable, use external group-by
                            // now check whether the serialized version aggregation function has corresponding intermediate agg
                            boolean hasIntermediateAgg = true;
                            IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context.getMergeAggregationExpressionFactory();
                            List<LogicalVariable> originalVariables = aggOp.getVariables();
                            List<Mutable<ILogicalExpression>> aggExprs = aggOp.getExpressions();
                            int aggNum = aggExprs.size();
                            for (int i = 0; i < aggNum; i++) {
                                AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs.get(i).getValue();
                                AggregateFunctionCallExpression serialAggExpr = BuiltinFunctions.makeSerializableAggregateFunctionExpression(expr.getFunctionIdentifier(), expr.getArguments());
                                if (mergeAggregationExpressionFactory.createMergeAggregation(originalVariables.get(i), serialAggExpr, context) == null) {
                                    hasIntermediateAgg = false;
                                    break;
                                }
                            }
                            // Check whether there are multiple aggregates in the sub plan.
                            // Currently, we don't support multiple aggregates in one external group-by.
                            boolean multipleAggOpsFound = false;
                            ILogicalOperator r1Logical = aggOp;
                            while (r1Logical.hasInputs()) {
                                r1Logical = r1Logical.getInputs().get(0).getValue();
                                if (r1Logical.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
                                    multipleAggOpsFound = true;
                                    break;
                                }
                            }
                            if (hasIntermediateAgg && !multipleAggOpsFound) {
                                for (int i = 0; i < aggNum; i++) {
                                    AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs.get(i).getValue();
                                    AggregateFunctionCallExpression serialAggExpr = BuiltinFunctions.makeSerializableAggregateFunctionExpression(expr.getFunctionIdentifier(), expr.getArguments());
                                    aggOp.getExpressions().get(i).setValue(serialAggExpr);
                                }
                                ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(gby.getGroupByList(), physicalOptimizationConfig.getMaxFramesExternalGroupBy(), (long) physicalOptimizationConfig.getMaxFramesExternalGroupBy() * physicalOptimizationConfig.getFrameSize());
                                generateMergeAggregationExpressions(gby, context);
                                op.setPhysicalOperator(externalGby);
                                setToExternalGby = true;
                            }
                        }
                        if (!setToExternalGby) {
                            // if not serializable or no intermediate agg, use pre-clustered group-by
                            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
                            List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
                            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
                                ILogicalExpression expr = p.second.getValue();
                                if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
                                    VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
                                    columnList.add(varRef.getVariableReference());
                                }
                            }
                            op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList, gby.isGroupAll()));
                        }
                    }
                } else if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(LogicalOperatorTag.RUNNINGAGGREGATE)) {
                    List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
                    List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
                    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
                        ILogicalExpression expr = p.second.getValue();
                        if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
                            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
                            columnList.add(varRef.getVariableReference());
                        }
                    }
                    op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList, gby.isGroupAll()));
                } else {
                    throw new AlgebricksException("Unsupported nested operator within a group-by: " + ((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().name());
                }
            }
        }
    }
    if (op.getPhysicalOperator() == null) {
        switch(op.getOperatorTag()) {
            case INNERJOIN:
                {
                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((InnerJoinOperator) op, context);
                    break;
                }
            case LEFTOUTERJOIN:
                {
                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((LeftOuterJoinOperator) op, context);
                    break;
                }
            case UNNEST_MAP:
            case LEFT_OUTER_UNNEST_MAP:
                {
                    ILogicalExpression unnestExpr = null;
                    unnestExpr = ((AbstractUnnestMapOperator) op).getExpressionRef().getValue();
                    if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
                        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
                        FunctionIdentifier fid = f.getFunctionIdentifier();
                        if (!fid.equals(BuiltinFunctions.INDEX_SEARCH)) {
                            throw new IllegalStateException();
                        }
                        AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
                        jobGenParams.readFromFuncArgs(f.getArguments());
                        MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
                        DataSourceId dataSourceId = new DataSourceId(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
                        Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
                        IDataSourceIndex<String, DataSourceId> dsi = mp.findDataSourceIndex(jobGenParams.getIndexName(), dataSourceId);
                        INodeDomain storageDomain = mp.findNodeDomain(dataset.getNodeGroupName());
                        if (dsi == null) {
                            throw new AlgebricksException("Could not find index " + jobGenParams.getIndexName() + " for dataset " + dataSourceId);
                        }
                        IndexType indexType = jobGenParams.getIndexType();
                        boolean requiresBroadcast = jobGenParams.getRequiresBroadcast();
                        switch(indexType) {
                            case BTREE:
                                {
                                    BTreeJobGenParams btreeJobGenParams = new BTreeJobGenParams();
                                    btreeJobGenParams.readFromFuncArgs(f.getArguments());
                                    op.setPhysicalOperator(new BTreeSearchPOperator(dsi, storageDomain, requiresBroadcast, btreeJobGenParams.isPrimaryIndex(), btreeJobGenParams.isEqCondition(), btreeJobGenParams.getLowKeyVarList(), btreeJobGenParams.getHighKeyVarList()));
                                    break;
                                }
                            case RTREE:
                                {
                                    op.setPhysicalOperator(new RTreeSearchPOperator(dsi, storageDomain, requiresBroadcast));
                                    break;
                                }
                            case SINGLE_PARTITION_WORD_INVIX:
                            case SINGLE_PARTITION_NGRAM_INVIX:
                                {
                                    op.setPhysicalOperator(new InvertedIndexPOperator(dsi, storageDomain, requiresBroadcast, false));
                                    break;
                                }
                            case LENGTH_PARTITIONED_WORD_INVIX:
                            case LENGTH_PARTITIONED_NGRAM_INVIX:
                                {
                                    op.setPhysicalOperator(new InvertedIndexPOperator(dsi, storageDomain, requiresBroadcast, true));
                                    break;
                                }
                            default:
                                {
                                    throw new NotImplementedException(indexType + " indexes are not implemented.");
                                }
                        }
                    }
                    break;
                }
        }
    }
    if (op.hasNestedPlans()) {
        AbstractOperatorWithNestedPlans nested = (AbstractOperatorWithNestedPlans) op;
        for (ILogicalPlan p : nested.getNestedPlans()) {
            setPhysicalOperators(p, context);
        }
    }
    for (Mutable<ILogicalOperator> opRef : op.getInputs()) {
        computeDefaultPhysicalOp((AbstractLogicalOperator) opRef.getValue(), context);
    }
}
Also used : IMergeAggregationExpressionFactory(org.apache.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory) PreclusteredGroupByPOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.PreclusteredGroupByPOperator) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) ArrayList(java.util.ArrayList) LeftOuterJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator) InnerJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator) AccessMethodJobGenParams(org.apache.asterix.optimizer.rules.am.AccessMethodJobGenParams) AggregateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) ArrayList(java.util.ArrayList) List(java.util.List) IndexType(org.apache.asterix.common.config.DatasetConfig.IndexType) PhysicalOptimizationConfig(org.apache.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig) Pair(org.apache.hyracks.algebricks.common.utils.Pair) AbstractOperatorWithNestedPlans(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AggregateFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) RTreeSearchPOperator(org.apache.asterix.algebra.operators.physical.RTreeSearchPOperator) Dataset(org.apache.asterix.metadata.entities.Dataset) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) BTreeJobGenParams(org.apache.asterix.optimizer.rules.am.BTreeJobGenParams) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) INodeDomain(org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain) InvertedIndexPOperator(org.apache.asterix.algebra.operators.physical.InvertedIndexPOperator) BTreeSearchPOperator(org.apache.asterix.algebra.operators.physical.BTreeSearchPOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) FunctionIdentifier(org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) DataSourceId(org.apache.asterix.metadata.declared.DataSourceId) ExternalGroupByPOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.ExternalGroupByPOperator)

Example 67 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class IntroduceLSMComponentFilterRule method getDataset.

private Dataset getDataset(AbstractLogicalOperator op, IOptimizationContext context) throws AlgebricksException {
    AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
    while (descendantOp != null) {
        if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
            DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
            DataSource ds = (DataSource) dataSourceScanOp.getDataSource();
            if (ds.getDatasourceType() != DataSource.Type.INTERNAL_DATASET) {
                return null;
            }
            return ((DatasetDataSource) ds).getDataset();
        } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
            UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
            ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
            if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
                FunctionIdentifier fid = f.getFunctionIdentifier();
                String dataverseName;
                String datasetName;
                if (BuiltinFunctions.EXTERNAL_LOOKUP.equals(fid)) {
                    dataverseName = AccessMethodUtils.getStringConstant(f.getArguments().get(0));
                    datasetName = AccessMethodUtils.getStringConstant(f.getArguments().get(1));
                } else if (fid.equals(BuiltinFunctions.INDEX_SEARCH)) {
                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
                    jobGenParams.readFromFuncArgs(f.getArguments());
                    dataverseName = jobGenParams.dataverseName;
                    datasetName = jobGenParams.datasetName;
                } else {
                    throw new AlgebricksException("Unexpected function for Unnest Map: " + fid);
                }
                return ((MetadataProvider) context.getMetadataProvider()).findDataset(dataverseName, datasetName);
            }
        }
        if (descendantOp.getInputs().isEmpty()) {
            break;
        }
        descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
    }
    return null;
}
Also used : FunctionIdentifier(org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) DataSourceScanOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator) UnnestMapOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) DatasetDataSource(org.apache.asterix.metadata.declared.DatasetDataSource) DataSource(org.apache.asterix.metadata.declared.DataSource) DatasetDataSource(org.apache.asterix.metadata.declared.DatasetDataSource)

Example 68 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class InvertedIndexAccessMethod method isEditDistanceFuncSelectOptimizable.

private boolean isEditDistanceFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) throws AlgebricksException {
    // Check for panic in selection query.
    // TODO: Panic also depends on prePost which is currently hardcoded to be true.
    AsterixConstantValue listOrStrConstVal = (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(0)).getValue();
    IAObject listOrStrObj = listOrStrConstVal.getObject();
    ATypeTag typeTag = listOrStrObj.getType().getTypeTag();
    if (!isEditDistanceFuncCompatible(typeTag, index.getIndexType())) {
        return false;
    }
    AsterixConstantValue intConstVal = (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(1)).getValue();
    IAObject intObj = intConstVal.getObject();
    AInt32 edThresh = null;
    // Apply type casting based on numeric types of the input to INTEGER type.
    try {
        edThresh = (AInt32) ATypeHierarchy.convertNumericTypeObject(intObj, ATypeTag.INTEGER);
    } catch (HyracksDataException e) {
        throw new AlgebricksException(e);
    }
    int mergeThreshold = 0;
    if (typeTag == ATypeTag.STRING) {
        AString astr = (AString) listOrStrObj;
        // Compute merge threshold depending on the query grams contain pre- and postfixing
        if (optFuncExpr.containsPartialField()) {
            mergeThreshold = (astr.getStringValue().length() - index.getGramLength() + 1) - edThresh.getIntegerValue() * index.getGramLength();
        } else {
            mergeThreshold = (astr.getStringValue().length() + index.getGramLength() - 1) - edThresh.getIntegerValue() * index.getGramLength();
        }
    }
    if ((typeTag == ATypeTag.ARRAY) && (index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
        IACollection alist = (IACollection) listOrStrObj;
        // Compute merge threshold.
        mergeThreshold = alist.size() - edThresh.getIntegerValue();
    }
    if (mergeThreshold <= 0) {
        // We cannot use index to optimize expr.
        return false;
    }
    return true;
}
Also used : AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) ATypeTag(org.apache.asterix.om.types.ATypeTag) IAObject(org.apache.asterix.om.base.IAObject) IACollection(org.apache.asterix.om.base.IACollection) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AString(org.apache.asterix.om.base.AString) AInt32(org.apache.asterix.om.base.AInt32) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 69 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class IntroduceSelectAccessMethodRule method intersectAllSecondaryIndexes.

/**
     * Construct all applicable secondary index-based access paths in the given selection plan and
     * intersect them using INTERSECT operator to guide to the common primary index search.
     * In case where the applicable index is one, we only construct one path.
     */
private boolean intersectAllSecondaryIndexes(List<Pair<IAccessMethod, Index>> chosenIndexes, Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs, IOptimizationContext context) throws AlgebricksException {
    Pair<IAccessMethod, Index> chosenIndex = null;
    Optional<Pair<IAccessMethod, Index>> primaryIndex = chosenIndexes.stream().filter(pair -> pair.second.isPrimaryIndex()).findFirst();
    if (chosenIndexes.size() == 1) {
        chosenIndex = chosenIndexes.get(0);
    } else if (primaryIndex.isPresent()) {
        // one primary + secondary indexes, choose the primary index directly.
        chosenIndex = primaryIndex.get();
    }
    if (chosenIndex != null) {
        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
        return chosenIndex.first.applySelectPlanTransformation(afterSelectRefs, selectRef, subTree, chosenIndex.second, analysisCtx, context);
    }
    // Intersect all secondary indexes, and postpone the primary index search.
    Mutable<ILogicalExpression> conditionRef = selectOp.getCondition();
    List<ILogicalOperator> subRoots = new ArrayList<>();
    for (Pair<IAccessMethod, Index> pair : chosenIndexes) {
        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(pair.first);
        subRoots.add(pair.first.createSecondaryToPrimaryPlan(conditionRef, subTree, null, pair.second, analysisCtx, AccessMethodUtils.retainInputs(subTree.getDataSourceVariables(), subTree.getDataSourceRef().getValue(), afterSelectRefs), false, subTree.getDataSourceRef().getValue().getInputs().get(0).getValue().getExecutionMode() == ExecutionMode.UNPARTITIONED, context));
    }
    // Connect each secondary index utilization plan to a common intersect operator.
    ILogicalOperator primaryUnnestOp = connectAll2ndarySearchPlanWithIntersect(subRoots, context);
    subTree.getDataSourceRef().setValue(primaryUnnestOp);
    return primaryUnnestOp != null;
}
Also used : OperatorPropertiesUtil(org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil) CommitOperator(org.apache.asterix.algebra.operators.CommitOperator) IOptimizationContext(org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext) HashMap(java.util.HashMap) LogicalExpressionTag(org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag) ArrayList(java.util.ArrayList) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) OrderOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator) Map(java.util.Map) Index(org.apache.asterix.metadata.entities.Index) MutableObject(org.apache.commons.lang3.mutable.MutableObject) DelegateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator) IntersectOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) LogicalOperatorTag(org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) List(java.util.List) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) TreeMap(java.util.TreeMap) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) SelectOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) Optional(java.util.Optional) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) Pair(org.apache.hyracks.algebricks.common.utils.Pair) ExecutionMode(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode) FunctionIdentifier(org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 70 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class QueryTranslator method handleConnectFeedStatement.

private void handleConnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    FeedConnection fc;
    ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
    String dataverseName = getActiveDataverse(cfs.getDataverseName());
    String feedName = cfs.getFeedName();
    String datasetName = cfs.getDatasetName().getValue();
    String policyName = cfs.getPolicy();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    // Check whether feed is alive
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    if (activeEventHandler.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
        throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
    }
    // Transaction handling
    MetadataLockManager.INSTANCE.connectFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + feedName);
    try {
        // validation
        FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName, mdTxnCtx);
        Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
        ARecordType outputType = FeedMetadataUtil.getOutputType(feed, feed.getAdapterConfiguration(), ExternalDataConstants.KEY_TYPE_NAME);
        List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions();
        for (FunctionSignature func : appliedFunctions) {
            if (MetadataManager.INSTANCE.getFunction(mdTxnCtx, func) == null) {
                throw new CompilationException(ErrorCode.FEED_CONNECT_FEED_APPLIED_INVALID_FUNCTION, func.getName());
            }
        }
        fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName);
        if (fc != null) {
            throw new AlgebricksException("Feed" + feedName + " is already connected dataset " + datasetName);
        }
        fc = new FeedConnection(dataverseName, feedName, datasetName, appliedFunctions, policyName, outputType.toString());
        MetadataManager.INSTANCE.addFeedConnection(metadataProvider.getMetadataTxnContext(), fc);
        // Increase function reference count.
        for (FunctionSignature funcSig : appliedFunctions) {
            // The function should be cached in Metadata manager, so this operation is not that expensive.
            Function func = MetadataManager.INSTANCE.getFunction(mdTxnCtx, funcSig);
            func.reference();
            MetadataManager.INSTANCE.updateFunction(mdTxnCtx, func);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ConnectFeedStatement(org.apache.asterix.lang.common.statement.ConnectFeedStatement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) EntityId(org.apache.asterix.active.EntityId) Function(org.apache.asterix.metadata.entities.Function) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) ARecordType(org.apache.asterix.om.types.ARecordType) Feed(org.apache.asterix.metadata.entities.Feed)

Aggregations

AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)134 MetadataException (org.apache.asterix.metadata.MetadataException)42 ArrayList (java.util.ArrayList)39 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)39 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)38 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)37 AsterixException (org.apache.asterix.common.exceptions.AsterixException)36 IOException (java.io.IOException)35 CompilationException (org.apache.asterix.common.exceptions.CompilationException)33 Dataset (org.apache.asterix.metadata.entities.Dataset)31 IAType (org.apache.asterix.om.types.IAType)31 Mutable (org.apache.commons.lang3.mutable.Mutable)30 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)30 Pair (org.apache.hyracks.algebricks.common.utils.Pair)28 Index (org.apache.asterix.metadata.entities.Index)26 RemoteException (java.rmi.RemoteException)25 ACIDException (org.apache.asterix.common.exceptions.ACIDException)24 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)23 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)22 AString (org.apache.asterix.om.base.AString)21