Search in sources :

Example 6 with Identifier

use of org.apache.asterix.lang.common.struct.Identifier in project asterixdb by apache.

the class SqlppGroupByVisitor method visit.

@Override
public Expression visit(GroupbyClause gc, ILangExpression arg) throws CompilationException {
    // Puts all FROM binding variables into withVarList.
    FromClause fromClause = (FromClause) arg;
    Collection<VariableExpr> fromBindingVars = fromClause == null ? new ArrayList<>() : SqlppVariableUtil.getBindingVariables(fromClause);
    Map<Expression, VariableExpr> withVarMap = new HashMap<>();
    for (VariableExpr fromBindingVar : fromBindingVars) {
        VariableExpr varExpr = new VariableExpr();
        varExpr.setIsNewVar(false);
        varExpr.setVar(fromBindingVar.getVar());
        VariableExpr newVarExpr = (VariableExpr) SqlppRewriteUtil.deepCopy(varExpr);
        withVarMap.put(varExpr, newVarExpr);
    }
    // Sets the field list for the group variable.
    List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<>();
    if (!gc.hasGroupFieldList()) {
        for (VariableExpr varExpr : fromBindingVars) {
            Pair<Expression, Identifier> varIdPair = new Pair<>(new VariableExpr(varExpr.getVar()), SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar()));
            groupFieldList.add(varIdPair);
        }
        gc.setGroupFieldList(groupFieldList);
    } else {
        for (Pair<Expression, Identifier> groupField : gc.getGroupFieldList()) {
            Expression newFieldExpr = groupField.first.accept(this, arg);
            groupFieldList.add(new Pair<>(newFieldExpr, groupField.second));
            // Adds a field binding variable into withVarList.
            VariableExpr bindingVar = new VariableExpr(new VarIdentifier(SqlppVariableUtil.toInternalVariableName(groupField.second.getValue())));
            withVarMap.put(newFieldExpr, bindingVar);
        }
    }
    gc.setGroupFieldList(groupFieldList);
    // Sets the group variable.
    if (!gc.hasGroupVar()) {
        VariableExpr groupVar = new VariableExpr(context.newVariable());
        gc.setGroupVar(groupVar);
    }
    // Adds the group variable into the "with" (i.e., re-binding) variable list.
    VariableExpr gbyVarRef = new VariableExpr(gc.getGroupVar().getVar());
    gbyVarRef.setIsNewVar(false);
    withVarMap.put(gbyVarRef, (VariableExpr) SqlppRewriteUtil.deepCopy(gbyVarRef));
    gc.setWithVarMap(withVarMap);
    // Call super.visit(...) to scope variables.
    return super.visit(gc, arg);
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Identifier(org.apache.asterix.lang.common.struct.Identifier) VarIdentifier(org.apache.asterix.lang.common.struct.VarIdentifier) FromClause(org.apache.asterix.lang.sqlpp.clause.FromClause) ILangExpression(org.apache.asterix.lang.common.base.ILangExpression) Expression(org.apache.asterix.lang.common.base.Expression) SelectExpression(org.apache.asterix.lang.sqlpp.expression.SelectExpression) VarIdentifier(org.apache.asterix.lang.common.struct.VarIdentifier) VariableExpr(org.apache.asterix.lang.common.expression.VariableExpr) GbyVariableExpressionPair(org.apache.asterix.lang.common.expression.GbyVariableExpressionPair) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 7 with Identifier

use of org.apache.asterix.lang.common.struct.Identifier in project asterixdb by apache.

the class QueryTranslator method handleCreateNodeGroupStatement.

protected void handleCreateNodeGroupStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
    String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), ngName);
    try {
        NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
        if (ng != null) {
            if (!stmtCreateNodegroup.getIfNotExists()) {
                throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
            }
        } else {
            List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
            List<String> ncNames = new ArrayList<>(ncIdentifiers.size());
            for (Identifier id : ncIdentifiers) {
                ncNames.add(id.getValue());
            }
            MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : NodegroupDecl(org.apache.asterix.lang.common.statement.NodegroupDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) NodeGroup(org.apache.asterix.metadata.entities.NodeGroup)

Example 8 with Identifier

use of org.apache.asterix.lang.common.struct.Identifier in project asterixdb by apache.

the class LangExpressionToPlanTranslator method visit.

@Override
public Pair<ILogicalOperator, LogicalVariable> visit(GroupbyClause gc, Mutable<ILogicalOperator> tupSource) throws CompilationException {
    Mutable<ILogicalOperator> topOp = tupSource;
    if (gc.hasGroupVar()) {
        List<Pair<Expression, Identifier>> groupFieldList = gc.getGroupFieldList();
        List<Mutable<ILogicalExpression>> groupRecordConstructorArgList = new ArrayList<>();
        for (Pair<Expression, Identifier> groupField : groupFieldList) {
            ILogicalExpression groupFieldNameExpr = langExprToAlgExpression(new LiteralExpr(new StringLiteral(groupField.second.getValue())), topOp).first;
            groupRecordConstructorArgList.add(new MutableObject<>(groupFieldNameExpr));
            ILogicalExpression groupFieldExpr = langExprToAlgExpression(groupField.first, topOp).first;
            groupRecordConstructorArgList.add(new MutableObject<>(groupFieldExpr));
        }
        LogicalVariable groupVar = context.newVarFromExpression(gc.getGroupVar());
        AssignOperator groupVarAssignOp = new AssignOperator(groupVar, new MutableObject<>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), groupRecordConstructorArgList)));
        groupVarAssignOp.getInputs().add(topOp);
        topOp = new MutableObject<>(groupVarAssignOp);
    }
    GroupByOperator gOp = new GroupByOperator();
    for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
        VariableExpr vexpr = ve.getVar();
        LogicalVariable v = vexpr == null ? context.newVar() : context.newVarFromExpression(vexpr);
        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
        gOp.addGbyExpression(v, eo.first);
        topOp = eo.second;
    }
    for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
        VariableExpr vexpr = ve.getVar();
        LogicalVariable v = vexpr == null ? context.newVar() : context.newVarFromExpression(vexpr);
        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
        gOp.addDecorExpression(v, eo.first);
        topOp = eo.second;
    }
    gOp.getInputs().add(topOp);
    for (Entry<Expression, VariableExpr> entry : gc.getWithVarMap().entrySet()) {
        Pair<ILogicalExpression, Mutable<ILogicalOperator>> listifyInput = langExprToAlgExpression(entry.getKey(), new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(gOp))));
        List<Mutable<ILogicalExpression>> flArgs = new ArrayList<>(1);
        flArgs.add(new MutableObject<>(listifyInput.first));
        AggregateFunctionCallExpression fListify = BuiltinFunctions.makeAggregateFunctionExpression(BuiltinFunctions.LISTIFY, flArgs);
        LogicalVariable aggVar = context.newVar();
        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar), mkSingletonArrayList(new MutableObject<>(fListify)));
        agg.getInputs().add(listifyInput.second);
        ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<>(agg));
        gOp.getNestedPlans().add(plan);
        // Hide the variable that was part of the "with", replacing it with
        // the one bound by the aggregation op.
        context.setVar(entry.getValue(), aggVar);
    }
    gOp.setGroupAll(gc.isGroupAll());
    gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
    return new Pair<>(gOp, null);
}
Also used : NestedTupleSourceOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator) ArrayList(java.util.ArrayList) Identifier(org.apache.asterix.lang.common.struct.Identifier) FunctionIdentifier(org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier) ALogicalPlanImpl(org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl) AggregateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator) LiteralExpr(org.apache.asterix.lang.common.expression.LiteralExpr) GbyVariableExpressionPair(org.apache.asterix.lang.common.expression.GbyVariableExpressionPair) Pair(org.apache.hyracks.algebricks.common.utils.Pair) QuantifiedPair(org.apache.asterix.lang.common.struct.QuantifiedPair) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) MutableObject(org.apache.commons.lang3.mutable.MutableObject) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AggregateFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) StringLiteral(org.apache.asterix.lang.common.literal.StringLiteral) ILangExpression(org.apache.asterix.lang.common.base.ILangExpression) AggregateFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression) Expression(org.apache.asterix.lang.common.base.Expression) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) QuantifiedExpression(org.apache.asterix.lang.common.expression.QuantifiedExpression) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) UnnestingFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression) GbyVariableExpressionPair(org.apache.asterix.lang.common.expression.GbyVariableExpressionPair) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) VariableExpr(org.apache.asterix.lang.common.expression.VariableExpr)

Example 9 with Identifier

use of org.apache.asterix.lang.common.struct.Identifier in project asterixdb by apache.

the class QueryTranslator method handleDataverseDropStatement.

protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
        tempMdProvider.setConfig(metadataProvider.getConfig());
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
                tempMdProvider.getLocks().reset();
                stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }
        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (Dataset dataset : datasets) {
            String datasetName = dataset.getDatasetName();
            DatasetType dsType = dataset.getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (Index index : indexes) {
                    jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
                }
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
                    } else {
                        jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
            }
        }
        jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        // Drops all node groups that no longer needed
        for (Dataset dataset : datasets) {
            String nodeGroup = dataset.getNodeGroupName();
            MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
            if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
            }
        }
        if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
            activeDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
                activeDataverse = null;
            }
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) Index(org.apache.asterix.metadata.entities.Index) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) Identifier(org.apache.asterix.lang.common.struct.Identifier) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) EntityId(org.apache.asterix.active.EntityId) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider)

Example 10 with Identifier

use of org.apache.asterix.lang.common.struct.Identifier in project asterixdb by apache.

the class QueryTranslator method handleCreateDatasetStatement.

public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
    String itemTypeName = dd.getItemTypeName().getValue();
    String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
    String metaItemTypeName = dd.getMetaItemTypeName().getValue();
    Identifier ngNameId = dd.getNodegroupName();
    String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
    String compactionPolicy = dd.getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = compactionPolicy == null;
    boolean temp = dd.getDatasetDetailsDecl().isTemp();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;
    try {
        IDatasetDetails datasetDetails = null;
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
        if (compactionPolicy == null) {
            compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
        } else {
            validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
        }
        switch(dd.getDatasetType()) {
            case INTERNAL:
                IAType itemType = dt.getDatatype();
                if (itemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset type has to be a record type.");
                }
                IAType metaItemType = null;
                if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
                    metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
                }
                if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset meta type has to be a record type.");
                }
                ARecordType metaRecType = (ARecordType) metaItemType;
                List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
                List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
                boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
                ARecordType aRecordType = (ARecordType) itemType;
                List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
                List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
                if (filterField != null) {
                    ValidateUtil.validateFilterField(aRecordType, filterField);
                }
                if (compactionPolicy == null && filterField != null) {
                    // If the dataset has a filter and the user didn't specify a merge
                    // policy, then we will pick the
                    // correlated-prefix as the default merge policy.
                    compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                }
                datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
                break;
            case EXTERNAL:
                String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
                Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
                datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
                break;
            default:
                throw new CompilationException("Unknown datatype " + dd.getDatasetType());
        }
        // #. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }
        // #. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
            // #. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
            // #. runJob
            JobUtils.runJob(hcc, jobSpec, true);
            // #. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }
        // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
        dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            // [Notice]
            // As long as we updated(and committed) metadata, we should remove any effect of the job
            // because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ExternalDetailsDecl(org.apache.asterix.lang.common.statement.ExternalDetailsDecl) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) Datatype(org.apache.asterix.metadata.entities.Datatype) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Aggregations

Identifier (org.apache.asterix.lang.common.struct.Identifier)20 ArrayList (java.util.ArrayList)10 Expression (org.apache.asterix.lang.common.base.Expression)8 VariableExpr (org.apache.asterix.lang.common.expression.VariableExpr)8 VarIdentifier (org.apache.asterix.lang.common.struct.VarIdentifier)8 GbyVariableExpressionPair (org.apache.asterix.lang.common.expression.GbyVariableExpressionPair)6 AsterixException (org.apache.asterix.common.exceptions.AsterixException)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)5 ILangExpression (org.apache.asterix.lang.common.base.ILangExpression)5 IOException (java.io.IOException)4 RemoteException (java.rmi.RemoteException)4 HashMap (java.util.HashMap)4 ACIDException (org.apache.asterix.common.exceptions.ACIDException)4 QuantifiedExpression (org.apache.asterix.lang.common.expression.QuantifiedExpression)4 SelectExpression (org.apache.asterix.lang.sqlpp.expression.SelectExpression)4 MetadataException (org.apache.asterix.metadata.MetadataException)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 Pair (org.apache.hyracks.algebricks.common.utils.Pair)4 FunctionIdentifier (org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier)4 IDataset (org.apache.asterix.common.metadata.IDataset)3