Search in sources :

Example 1 with DeleteStatement

use of org.apache.asterix.lang.common.statement.DeleteStatement in project asterixdb by apache.

the class AbstractLangTranslator method validateOperation.

public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
    if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        try {
            ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
        } catch (HyracksDataException e) {
            throw new AsterixException(e);
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
            throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
        } else {
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
            }
        }
    }
    if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
        throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
    }
    if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        int waitCycleCount = 0;
        try {
            while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
                Thread.sleep(1000);
                waitCycleCount++;
            }
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
            throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
        }
    }
    boolean invalidOperation = false;
    String message = null;
    String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
    switch(stmt.getKind()) {
        case Statement.Kind.INSERT:
            InsertStatement insertStmt = (InsertStatement) stmt;
            if (insertStmt.getDataverseName() != null) {
                dataverse = insertStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DELETE:
            DeleteStatement deleteStmt = (DeleteStatement) stmt;
            if (deleteStmt.getDataverseName() != null) {
                dataverse = deleteStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATAVERSE_DROP:
            DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
            if (invalidOperation) {
                message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
            }
            break;
        case Statement.Kind.DATASET_DROP:
            DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
            if (dropStmt.getDataverseName() != null) {
                dataverse = dropStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATASET_DECL:
            DatasetDecl datasetStmt = (DatasetDecl) stmt;
            Map<String, String> hints = datasetStmt.getHints();
            if (hints != null && !hints.isEmpty()) {
                Pair<Boolean, String> validationResult = null;
                StringBuffer errorMsgBuffer = new StringBuffer();
                for (Entry<String, String> hint : hints.entrySet()) {
                    validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                    if (!validationResult.first) {
                        errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
                        errorMsgBuffer.append(" \n");
                    }
                }
                invalidOperation = errorMsgBuffer.length() > 0;
                if (invalidOperation) {
                    message = errorMsgBuffer.toString();
                }
            }
            break;
        default:
            break;
    }
    if (invalidOperation) {
        throw new AsterixException("Invalid operation - " + message);
    }
}
Also used : DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 2 with DeleteStatement

use of org.apache.asterix.lang.common.statement.DeleteStatement in project asterixdb by apache.

the class QueryTranslator method handleDeleteStatement.

public JobSpecification handleDeleteStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc, boolean compileOnly) throws Exception {
    DeleteStatement stmtDelete = (DeleteStatement) stmt;
    String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), dataverseName + "." + stmtDelete.getDatasetName());
    try {
        metadataProvider.setWriteTransaction(true);
        CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName, stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(), stmtDelete.getQuery());
        JobSpecification jobSpec = rewriteCompileQuery(hcc, metadataProvider, clfrqs.getQuery(), clfrqs);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        if (jobSpec != null && !compileOnly) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        return jobSpec;
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) JobSpecification(org.apache.hyracks.api.job.JobSpecification) CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Aggregations

AsterixException (org.apache.asterix.common.exceptions.AsterixException)2 DeleteStatement (org.apache.asterix.lang.common.statement.DeleteStatement)2 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)2 IOException (java.io.IOException)1 RemoteException (java.rmi.RemoteException)1 ACIDException (org.apache.asterix.common.exceptions.ACIDException)1 CompilationException (org.apache.asterix.common.exceptions.CompilationException)1 DatasetDecl (org.apache.asterix.lang.common.statement.DatasetDecl)1 DataverseDropStatement (org.apache.asterix.lang.common.statement.DataverseDropStatement)1 DropDatasetStatement (org.apache.asterix.lang.common.statement.DropDatasetStatement)1 InsertStatement (org.apache.asterix.lang.common.statement.InsertStatement)1 MetadataException (org.apache.asterix.metadata.MetadataException)1 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)1 CompiledDeleteStatement (org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement)1 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)1 JobSpecification (org.apache.hyracks.api.job.JobSpecification)1