Search in sources :

Example 1 with DataverseDropStatement

use of org.apache.asterix.lang.common.statement.DataverseDropStatement in project asterixdb by apache.

the class AbstractLangTranslator method validateOperation.

public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
    if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        try {
            ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
        } catch (HyracksDataException e) {
            throw new AsterixException(e);
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
            throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
        } else {
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
            }
        }
    }
    if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
        throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
    }
    if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        int waitCycleCount = 0;
        try {
            while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
                Thread.sleep(1000);
                waitCycleCount++;
            }
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
            throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
        }
    }
    boolean invalidOperation = false;
    String message = null;
    String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
    switch(stmt.getKind()) {
        case Statement.Kind.INSERT:
            InsertStatement insertStmt = (InsertStatement) stmt;
            if (insertStmt.getDataverseName() != null) {
                dataverse = insertStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DELETE:
            DeleteStatement deleteStmt = (DeleteStatement) stmt;
            if (deleteStmt.getDataverseName() != null) {
                dataverse = deleteStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATAVERSE_DROP:
            DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
            if (invalidOperation) {
                message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
            }
            break;
        case Statement.Kind.DATASET_DROP:
            DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
            if (dropStmt.getDataverseName() != null) {
                dataverse = dropStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATASET_DECL:
            DatasetDecl datasetStmt = (DatasetDecl) stmt;
            Map<String, String> hints = datasetStmt.getHints();
            if (hints != null && !hints.isEmpty()) {
                Pair<Boolean, String> validationResult = null;
                StringBuffer errorMsgBuffer = new StringBuffer();
                for (Entry<String, String> hint : hints.entrySet()) {
                    validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                    if (!validationResult.first) {
                        errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
                        errorMsgBuffer.append(" \n");
                    }
                }
                invalidOperation = errorMsgBuffer.length() > 0;
                if (invalidOperation) {
                    message = errorMsgBuffer.toString();
                }
            }
            break;
        default:
            break;
    }
    if (invalidOperation) {
        throw new AsterixException("Invalid operation - " + message);
    }
}
Also used : DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 2 with DataverseDropStatement

use of org.apache.asterix.lang.common.statement.DataverseDropStatement in project asterixdb by apache.

the class QueryTranslator method handleDataverseDropStatement.

protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
        tempMdProvider.setConfig(metadataProvider.getConfig());
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
                tempMdProvider.getLocks().reset();
                stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }
        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (Dataset dataset : datasets) {
            String datasetName = dataset.getDatasetName();
            DatasetType dsType = dataset.getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (Index index : indexes) {
                    jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
                }
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
                    } else {
                        jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
            }
        }
        jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        // Drops all node groups that no longer needed
        for (Dataset dataset : datasets) {
            String nodeGroup = dataset.getNodeGroupName();
            MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
            if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
            }
        }
        if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
            activeDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
                activeDataverse = null;
            }
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) Index(org.apache.asterix.metadata.entities.Index) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) Identifier(org.apache.asterix.lang.common.struct.Identifier) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) EntityId(org.apache.asterix.active.EntityId) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider)

Aggregations

AsterixException (org.apache.asterix.common.exceptions.AsterixException)2 DataverseDropStatement (org.apache.asterix.lang.common.statement.DataverseDropStatement)2 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)2 IOException (java.io.IOException)1 RemoteException (java.rmi.RemoteException)1 ArrayList (java.util.ArrayList)1 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)1 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)1 EntityId (org.apache.asterix.active.EntityId)1 IActiveEntityEventsListener (org.apache.asterix.active.IActiveEntityEventsListener)1 DatasetType (org.apache.asterix.common.config.DatasetConfig.DatasetType)1 ACIDException (org.apache.asterix.common.exceptions.ACIDException)1 CompilationException (org.apache.asterix.common.exceptions.CompilationException)1 IDataset (org.apache.asterix.common.metadata.IDataset)1 ProgressState (org.apache.asterix.common.utils.JobUtils.ProgressState)1 DatasetDecl (org.apache.asterix.lang.common.statement.DatasetDecl)1 DeleteStatement (org.apache.asterix.lang.common.statement.DeleteStatement)1 DropDatasetStatement (org.apache.asterix.lang.common.statement.DropDatasetStatement)1 InsertStatement (org.apache.asterix.lang.common.statement.InsertStatement)1 Identifier (org.apache.asterix.lang.common.struct.Identifier)1