Search in sources :

Example 6 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class SocketServerInputStreamFactory method configure.

@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException, CompilationException {
    try {
        sockets = new ArrayList<>();
        String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
        if (modeValue != null) {
            mode = Mode.valueOf(modeValue.trim().toUpperCase());
        }
        String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
        if (socketsValue == null) {
            throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
        }
        Map<InetAddress, Set<String>> ncMap;
        ncMap = RuntimeUtils.getNodeControllerMap((ICcApplicationContext) serviceCtx.getApplicationContext());
        List<String> ncs = RuntimeUtils.getAllNodeControllers((ICcApplicationContext) serviceCtx.getApplicationContext());
        String[] socketsArray = socketsValue.split(",");
        Random random = new Random();
        for (String socket : socketsArray) {
            String[] socketTokens = socket.split(":");
            String host = socketTokens[0].trim();
            int port = Integer.parseInt(socketTokens[1].trim());
            Pair<String, Integer> p = null;
            switch(mode) {
                case IP:
                    Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
                    if ((ncsOnIp == null) || ncsOnIp.isEmpty()) {
                        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "host", host, StringUtils.join(ncMap.keySet(), ", "));
                    }
                    String[] ncArray = ncsOnIp.toArray(new String[] {});
                    String nc = ncArray[random.nextInt(ncArray.length)];
                    p = new Pair<>(nc, port);
                    break;
                case NC:
                    p = new Pair<>(host, port);
                    if (!ncs.contains(host)) {
                        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "NC", host, StringUtils.join(ncs, ", "));
                    }
                    break;
            }
            sockets.add(p);
        }
    } catch (CompilationException e) {
        throw e;
    } catch (HyracksDataException | UnknownHostException e) {
        throw new AsterixException(e);
    } catch (Exception e) {
        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) ICcApplicationContext(org.apache.asterix.common.dataflow.ICcApplicationContext) Set(java.util.Set) UnknownHostException(java.net.UnknownHostException) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) Random(java.util.Random) InetAddress(java.net.InetAddress)

Example 7 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method handleIndexDropStatement.

protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
    String datasetName = stmtIndexDrop.getDatasetName().getValue();
    String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.dropIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    String indexName = null;
    // For external index
    boolean dropFilesIndex = false;
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
        StringBuilder builder = null;
        for (IActiveEntityEventsListener listener : listeners) {
            if (listener.isEntityUsingDataset(ds)) {
                if (builder == null) {
                    builder = new StringBuilder();
                }
                builder.append(new FeedConnectionId(listener.getEntityId(), datasetName) + "\n");
            }
        }
        if (builder != null) {
            throw new CompilationException("Dataset" + datasetName + " is currently being fed into by the following active entities: " + builder.toString());
        }
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
        } else {
            // External dataset
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            } else if (ExternalIndexingOperations.isFileIndex(index)) {
                throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
            if (datasetIndexes.size() == 2) {
                dropFilesIndex = true;
                // only one index + the files index, we need to delete both of the indexes
                for (Index externalIndex : datasetIndexes) {
                    if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds));
                        // #. mark PendingDropOp on the existing files index
                        MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, externalIndex.getIndexName());
                        MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex.getIndexType(), externalIndex.getKeyFieldNames(), externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
                    }
                }
            }
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (dropFilesIndex) {
                // delete the files index too
                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
                if (dropFilesIndex) {
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                }
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) IndexDropStatement(org.apache.asterix.lang.common.statement.IndexDropStatement) FeedConnectionId(org.apache.asterix.external.feed.management.FeedConnectionId) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler)

Example 8 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method handleDisconnectFeedStatement.

protected void handleDisconnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
    String dataverseName = getActiveDataverse(cfs.getDataverseName());
    String datasetName = cfs.getDatasetName().getValue();
    String feedName = cfs.getFeedName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    // Check whether feed is alive
    if (activeEventHandler.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
        throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
    }
    MetadataLockManager.INSTANCE.disconnectFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + cfs.getFeedName());
    try {
        FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
        FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
        FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName);
        if (fc == null) {
            throw new CompilationException("Feed " + feedName + " is currently not connected to " + cfs.getDatasetName().getValue() + ". Invalid operation!");
        }
        MetadataManager.INSTANCE.dropFeedConnection(mdTxnCtx, dataverseName, feedName, datasetName);
        for (FunctionSignature functionSignature : fc.getAppliedFunctions()) {
            Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, functionSignature);
            function.dereference();
            MetadataManager.INSTANCE.updateFunction(mdTxnCtx, function);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : EntityId(org.apache.asterix.active.EntityId) CompilationException(org.apache.asterix.common.exceptions.CompilationException) Function(org.apache.asterix.metadata.entities.Function) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) DisconnectFeedStatement(org.apache.asterix.lang.common.statement.DisconnectFeedStatement) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 9 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method configureNodegroupForDataset.

protected static String configureNodegroupForDataset(ICcApplicationContext appCtx, Map<String, String> hints, String dataverseName, String datasetName, MetadataProvider metadataProvider) throws Exception {
    Set<String> allNodes = ClusterStateManager.INSTANCE.getParticipantNodes();
    Set<String> selectedNodes = new LinkedHashSet<>();
    String hintValue = hints.get(DatasetNodegroupCardinalityHint.NAME);
    if (hintValue == null) {
        selectedNodes.addAll(allNodes);
    } else {
        int nodegroupCardinality;
        boolean valid = DatasetHints.validate(appCtx, DatasetNodegroupCardinalityHint.NAME, hints.get(DatasetNodegroupCardinalityHint.NAME)).first;
        if (!valid) {
            throw new CompilationException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
        } else {
            nodegroupCardinality = Integer.parseInt(hints.get(DatasetNodegroupCardinalityHint.NAME));
        }
        List<String> allNodeList = new ArrayList<>(allNodes);
        Collections.shuffle(allNodeList);
        selectedNodes.addAll(allNodeList.subList(0, nodegroupCardinality));
    }
    // Creates the associated node group for the dataset.
    return DatasetUtil.createNodeGroupForNewDataset(dataverseName, datasetName, selectedNodes, metadataProvider);
}
Also used : LinkedHashSet(java.util.LinkedHashSet) CompilationException(org.apache.asterix.common.exceptions.CompilationException) ArrayList(java.util.ArrayList) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 10 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method validateIfResourceIsActiveInFeed.

protected void validateIfResourceIsActiveInFeed(Dataset dataset) throws CompilationException {
    StringBuilder builder = null;
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
    for (IActiveEntityEventsListener listener : listeners) {
        if (listener.isEntityUsingDataset(dataset)) {
            if (builder == null) {
                builder = new StringBuilder();
            }
            builder.append(listener.getEntityId() + "\n");
        }
    }
    if (builder != null) {
        throw new CompilationException("Dataset " + dataset.getDataverseName() + "." + dataset.getDatasetName() + " is currently being " + "fed into by the following active entities.\n" + builder.toString());
    }
}
Also used : IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) CompilationException(org.apache.asterix.common.exceptions.CompilationException) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler)

Aggregations

CompilationException (org.apache.asterix.common.exceptions.CompilationException)44 ArrayList (java.util.ArrayList)13 IAType (org.apache.asterix.om.types.IAType)12 ARecordType (org.apache.asterix.om.types.ARecordType)9 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)9 AsterixException (org.apache.asterix.common.exceptions.AsterixException)8 IOException (java.io.IOException)7 List (java.util.List)7 FunctionSignature (org.apache.asterix.common.functions.FunctionSignature)7 MetadataException (org.apache.asterix.metadata.MetadataException)7 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)7 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)7 RemoteException (java.rmi.RemoteException)6 ACIDException (org.apache.asterix.common.exceptions.ACIDException)6 Expression (org.apache.asterix.lang.common.base.Expression)6 Pair (org.apache.hyracks.algebricks.common.utils.Pair)6 IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)6 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)5 IDataset (org.apache.asterix.common.metadata.IDataset)5 Dataset (org.apache.asterix.metadata.entities.Dataset)5