Search in sources :

Example 36 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class IntroduceLSMComponentFilterRule method rewritePost.

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    if (!checkIfRuleIsApplicable(opRef, context)) {
        return false;
    }
    AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
    typeEnvironment = context.getOutputTypeEnvironment(op);
    ILogicalExpression condExpr = ((SelectOperator) op).getCondition().getValue();
    AccessMethodAnalysisContext analysisCtx = analyzeCondition(condExpr, context, typeEnvironment);
    if (analysisCtx.getMatchedFuncExprs().isEmpty()) {
        return false;
    }
    Dataset dataset = getDataset(op, context);
    List<String> filterFieldName = null;
    ARecordType recType = null;
    if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) {
        filterFieldName = DatasetUtil.getFilterField(dataset);
        IAType itemType = ((MetadataProvider) context.getMetadataProvider()).findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
        if (itemType.getTypeTag() == ATypeTag.OBJECT) {
            recType = (ARecordType) itemType;
        }
    }
    if (filterFieldName == null || recType == null) {
        return false;
    }
    List<Index> datasetIndexes = ((MetadataProvider) context.getMetadataProvider()).getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
    List<IOptimizableFuncExpr> optFuncExprs = new ArrayList<>();
    for (int i = 0; i < analysisCtx.getMatchedFuncExprs().size(); i++) {
        IOptimizableFuncExpr optFuncExpr = analysisCtx.getMatchedFuncExpr(i);
        boolean found = findMacthedExprFieldName(optFuncExpr, op, dataset, recType, datasetIndexes, context);
        if (found && optFuncExpr.getFieldName(0).equals(filterFieldName)) {
            optFuncExprs.add(optFuncExpr);
        }
    }
    if (optFuncExprs.isEmpty()) {
        return false;
    }
    changePlan(optFuncExprs, op, dataset, context);
    OperatorPropertiesUtil.typeOpRec(opRef, context);
    context.addToDontApplySet(this, op);
    return true;
}
Also used : AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 37 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class QueryTranslator method handleDataverseDropStatement.

protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
        tempMdProvider.setConfig(metadataProvider.getConfig());
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
                tempMdProvider.getLocks().reset();
                stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }
        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (Dataset dataset : datasets) {
            String datasetName = dataset.getDatasetName();
            DatasetType dsType = dataset.getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (Index index : indexes) {
                    jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
                }
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
                    } else {
                        jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
            }
        }
        jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        // Drops all node groups that no longer needed
        for (Dataset dataset : datasets) {
            String nodeGroup = dataset.getNodeGroupName();
            MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
            if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
            }
        }
        if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
            activeDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
                activeDataverse = null;
            }
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) Index(org.apache.asterix.metadata.entities.Index) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) Identifier(org.apache.asterix.lang.common.struct.Identifier) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) EntityId(org.apache.asterix.active.EntityId) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider)

Example 38 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class QueryTranslator method prepareRunExternalRuntime.

// Prepares to run a program on external runtime.
protected void prepareRunExternalRuntime(MetadataProvider metadataProvider, IHyracksClientConnection hcc, RunStatement pregelixStmt, String dataverseNameFrom, String dataverseNameTo, String datasetNameFrom, String datasetNameTo, MetadataTransactionContext mdTxnCtx) throws Exception {
    // Validates the source/sink dataverses and datasets.
    Dataset fromDataset = metadataProvider.findDataset(dataverseNameFrom, datasetNameFrom);
    if (fromDataset == null) {
        throw new CompilationException("The source dataset " + datasetNameFrom + " in dataverse " + dataverseNameFrom + " could not be found for the Run command");
    }
    Dataset toDataset = metadataProvider.findDataset(dataverseNameTo, datasetNameTo);
    if (toDataset == null) {
        throw new CompilationException("The sink dataset " + datasetNameTo + " in dataverse " + dataverseNameTo + " could not be found for the Run command");
    }
    try {
        // Find the primary index of the sink dataset.
        Index toIndex = null;
        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo, pregelixStmt.getDatasetNameTo().getValue());
        for (Index index : indexes) {
            if (index.isPrimaryIndex()) {
                toIndex = index;
                break;
            }
        }
        if (toIndex == null) {
            throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo);
        }
        // Cleans up the sink dataset -- Drop and then Create.
        DropDatasetStatement dropStmt = new DropDatasetStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), true);
        this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc);
        IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(), toIndex.getKeyFieldSourceIndicators(), false, null, toDataset.getDatasetDetails().isTemp());
        DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeDataverseName()), new Identifier(toDataset.getItemTypeName()), new Identifier(toDataset.getMetaItemTypeDataverseName()), new Identifier(toDataset.getMetaItemTypeName()), new Identifier(toDataset.getNodeGroupName()), toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(), toDataset.getDatasetType(), idd, false);
        this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc);
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, e.getMessage(), e);
        throw new AlgebricksException("Error cleaning the result dataset. This should not happen.");
    }
    // Flushes source dataset.
    FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseNameFrom, datasetNameFrom, datasetNameFrom);
}
Also used : IDatasetDetailsDecl(org.apache.asterix.lang.common.statement.IDatasetDetailsDecl) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) CompilationException(org.apache.asterix.common.exceptions.CompilationException) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) Index(org.apache.asterix.metadata.entities.Index) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 39 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class QueryTranslator method handleCompactStatement.

protected void handleCompactStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    CompactStatement compactStatement = (CompactStatement) stmt;
    String dataverseName = getActiveDataverse(compactStatement.getDataverseName());
    String datasetName = compactStatement.getDatasetName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.compactBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
        }
        // Prepare jobs to compact the datatset and its indexes
        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
        if (indexes.isEmpty()) {
            throw new AlgebricksException("Cannot compact the extrenal dataset " + datasetName + " because it has no indexes");
        }
        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
        jobsToExecute.add(DatasetUtil.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            for (Index index : indexes) {
                if (index.isSecondaryIndex()) {
                    jobsToExecute.add(IndexUtil.buildSecondaryIndexCompactJobSpec(ds, index, metadataProvider));
                }
            }
        } else {
            prepareCompactJobsForExternalDataset(indexes, ds, jobsToExecute, metadataProvider);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        // #. run the jobs
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) Dataverse(org.apache.asterix.metadata.entities.Dataverse) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) CompactStatement(org.apache.asterix.lang.common.statement.CompactStatement) JobSpecification(org.apache.hyracks.api.job.JobSpecification)

Example 40 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class RebalanceUtil method dropDatasetFiles.

private static void dropDatasetFiles(Dataset dataset, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
    List<JobSpecification> jobs = new ArrayList<>();
    List<Index> indexes = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
    for (Index index : indexes) {
        jobs.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
    }
    for (JobSpecification jobSpec : jobs) {
        JobUtils.runJob(hcc, jobSpec, true);
    }
}
Also used : ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) JobSpecification(org.apache.hyracks.api.job.JobSpecification)

Aggregations

Index (org.apache.asterix.metadata.entities.Index)53 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)26 Dataset (org.apache.asterix.metadata.entities.Dataset)25 ArrayList (java.util.ArrayList)24 MetadataException (org.apache.asterix.metadata.MetadataException)20 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)16 ARecordType (org.apache.asterix.om.types.ARecordType)15 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)15 Pair (org.apache.hyracks.algebricks.common.utils.Pair)14 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)14 JobSpecification (org.apache.hyracks.api.job.JobSpecification)13 IIndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory)13 IndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)13 AsterixException (org.apache.asterix.common.exceptions.AsterixException)12 IAType (org.apache.asterix.om.types.IAType)12 IDataSourceIndex (org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex)12 IOException (java.io.IOException)11 CompilationException (org.apache.asterix.common.exceptions.CompilationException)11 List (java.util.List)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)10