Search in sources :

Example 91 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class ResultUtil method apiErrorHandler.

public static void apiErrorHandler(PrintWriter out, Exception e) {
    int errorCode = 99;
    if (e instanceof ParseException) {
        errorCode = 2;
    } else if (e instanceof AlgebricksException) {
        errorCode = 3;
    } else if (e instanceof HyracksDataException) {
        errorCode = 4;
    }
    ObjectNode errorResp = ResultUtil.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e), extractFullStackTrace(e));
    out.write(errorResp.toString());
}
Also used : ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) ParseException(org.apache.http.ParseException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 92 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class QueryTranslator method executeExternalShellProgram.

// Executes external shell commands.
protected int executeExternalShellProgram(ProcessBuilder pb) throws IOException, AlgebricksException, InterruptedException {
    Process process = pb.start();
    try (BufferedReader in = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
        String line;
        while ((line = in.readLine()) != null) {
            LOGGER.info(line);
            if (line.contains("Exception") || line.contains("Error")) {
                LOGGER.severe(line);
                if (line.contains("Connection refused")) {
                    throw new AlgebricksException("The connection to your Pregelix cluster was refused. Is it running? " + "Is the port in the query correct?");
                }
                if (line.contains("Could not find or load main class")) {
                    throw new AlgebricksException("The main class of your Pregelix query was not found. " + "Is the path to your .jar file correct?");
                }
                if (line.contains("ClassNotFoundException")) {
                    throw new AlgebricksException("The vertex class of your Pregelix query was not found. " + "Does it exist? Is the spelling correct?");
                }
            }
        }
        process.waitFor();
    }
    // Gets the exit value of the program.
    return process.exitValue();
}
Also used : InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)

Example 93 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class QueryTranslator method handleDataverseDropStatement.

protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
        tempMdProvider.setConfig(metadataProvider.getConfig());
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
                tempMdProvider.getLocks().reset();
                stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }
        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (Dataset dataset : datasets) {
            String datasetName = dataset.getDatasetName();
            DatasetType dsType = dataset.getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (Index index : indexes) {
                    jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
                }
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
                    } else {
                        jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
            }
        }
        jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        // Drops all node groups that no longer needed
        for (Dataset dataset : datasets) {
            String nodeGroup = dataset.getNodeGroupName();
            MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
            if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
            }
        }
        if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
            activeDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
                activeDataverse = null;
            }
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) Index(org.apache.asterix.metadata.entities.Index) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) Identifier(org.apache.asterix.lang.common.struct.Identifier) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) EntityId(org.apache.asterix.active.EntityId) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider)

Example 94 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class QueryTranslator method handleCreateFeedPolicyStatement.

protected void handleCreateFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws AlgebricksException, HyracksDataException {
    String dataverse;
    String policy;
    FeedPolicyEntity newPolicy = null;
    MetadataTransactionContext mdTxnCtx = null;
    CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
    dataverse = getActiveDataverse(null);
    policy = cfps.getPolicyName();
    MetadataLockManager.INSTANCE.createFeedPolicyBegin(metadataProvider.getLocks(), dataverse, dataverse + "." + policy);
    try {
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
        if (feedPolicy != null) {
            if (cfps.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A policy with this name " + policy + " already exists.");
            }
        }
        boolean extendingExisting = cfps.getSourcePolicyName() != null;
        String description = cfps.getDescription() == null ? "" : cfps.getDescription();
        if (extendingExisting) {
            FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
            if (sourceFeedPolicy == null) {
                sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
                if (sourceFeedPolicy == null) {
                    throw new AlgebricksException("Unknown policy " + cfps.getSourcePolicyName());
                }
            }
            Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
            policyProperties.putAll(cfps.getProperties());
            newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
        } else {
            Properties prop = new Properties();
            try {
                InputStream stream = new FileInputStream(cfps.getSourcePolicyFile());
                prop.load(stream);
            } catch (Exception e) {
                throw new AlgebricksException("Unable to read policy file" + cfps.getSourcePolicyFile(), e);
            }
            Map<String, String> policyProperties = new HashMap<>();
            for (Entry<Object, Object> entry : prop.entrySet()) {
                policyProperties.put((String) entry.getKey(), (String) entry.getValue());
            }
            newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
        }
        MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (RemoteException | ACIDException e) {
        abort(e, e, mdTxnCtx);
        throw new HyracksDataException(e);
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : HashMap(java.util.HashMap) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Properties(java.util.Properties) ClusterProperties(org.apache.asterix.common.config.ClusterProperties) ExternalProperties(org.apache.asterix.common.config.ExternalProperties) FileInputStream(java.io.FileInputStream) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) ACIDException(org.apache.asterix.common.exceptions.ACIDException) CreateFeedPolicyStatement(org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) MutableObject(org.apache.commons.lang3.mutable.MutableObject) RemoteException(java.rmi.RemoteException)

Example 95 with AlgebricksException

use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.

the class QueryTranslator method rewriteCompileInsertUpsert.

private JobSpecification rewriteCompileInsertUpsert(IClusterInfoCollector clusterInfoCollector, MetadataProvider metadataProvider, InsertStatement insertUpsert) throws RemoteException, AlgebricksException, ACIDException {
    // Insert/upsert statement rewriting (happens under the same ongoing metadata transaction)
    Pair<IReturningStatement, Integer> rewrittenResult = apiFramework.reWriteQuery(declaredFunctions, metadataProvider, insertUpsert, sessionOutput);
    InsertStatement rewrittenInsertUpsert = (InsertStatement) rewrittenResult.first;
    String dataverseName = getActiveDataverse(rewrittenInsertUpsert.getDataverseName());
    String datasetName = rewrittenInsertUpsert.getDatasetName().getValue();
    CompiledInsertStatement clfrqs;
    switch(insertUpsert.getKind()) {
        case Statement.Kind.INSERT:
            clfrqs = new CompiledInsertStatement(dataverseName, datasetName, rewrittenInsertUpsert.getQuery(), rewrittenInsertUpsert.getVarCounter(), rewrittenInsertUpsert.getVar(), rewrittenInsertUpsert.getReturnExpression());
            break;
        case Statement.Kind.UPSERT:
            clfrqs = new CompiledUpsertStatement(dataverseName, datasetName, rewrittenInsertUpsert.getQuery(), rewrittenInsertUpsert.getVarCounter(), rewrittenInsertUpsert.getVar(), rewrittenInsertUpsert.getReturnExpression());
            break;
        default:
            throw new AlgebricksException("Unsupported statement type " + rewrittenInsertUpsert.getKind());
    }
    // Insert/upsert statement compilation (happens under the same ongoing metadata transaction)
    return apiFramework.compileQuery(clusterInfoCollector, metadataProvider, rewrittenInsertUpsert.getQuery(), rewrittenResult.second, datasetName, sessionOutput, clfrqs);
}
Also used : CompiledUpsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IReturningStatement(org.apache.asterix.lang.common.base.IReturningStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement)

Aggregations

AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)134 MetadataException (org.apache.asterix.metadata.MetadataException)42 ArrayList (java.util.ArrayList)39 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)39 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)38 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)37 AsterixException (org.apache.asterix.common.exceptions.AsterixException)36 IOException (java.io.IOException)35 CompilationException (org.apache.asterix.common.exceptions.CompilationException)33 Dataset (org.apache.asterix.metadata.entities.Dataset)31 IAType (org.apache.asterix.om.types.IAType)31 Mutable (org.apache.commons.lang3.mutable.Mutable)30 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)30 Pair (org.apache.hyracks.algebricks.common.utils.Pair)28 Index (org.apache.asterix.metadata.entities.Index)26 RemoteException (java.rmi.RemoteException)25 ACIDException (org.apache.asterix.common.exceptions.ACIDException)24 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)23 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)22 AString (org.apache.asterix.om.base.AString)21