Search in sources :

Example 11 with Feed

use of org.apache.asterix.metadata.entities.Feed in project asterixdb by apache.

the class QueryTranslator method handleCreateFeedStatement.

protected void handleCreateFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    CreateFeedStatement cfs = (CreateFeedStatement) stmt;
    String dataverseName = getActiveDataverse(cfs.getDataverseName());
    String feedName = cfs.getFeedName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName);
    Feed feed = null;
    try {
        feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
        if (feed != null) {
            if (cfs.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A feed with this name " + feedName + " already exists.");
            }
        }
        String adaptorName = cfs.getAdaptorName();
        feed = new Feed(dataverseName, feedName, adaptorName, cfs.getAdaptorConfiguration());
        FeedMetadataUtil.validateFeed(feed, mdTxnCtx, appCtx);
        MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : CreateFeedStatement(org.apache.asterix.lang.common.statement.CreateFeedStatement) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) Feed(org.apache.asterix.metadata.entities.Feed)

Example 12 with Feed

use of org.apache.asterix.metadata.entities.Feed in project asterixdb by apache.

the class MetadataNode method dropDataverse.

@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
    try {
        confirmDataverseCanBeDeleted(jobId, dataverseName);
        List<Dataset> dataverseDatasets;
        Dataset ds;
        dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
        // Drop all datasets in this dataverse.
        for (int i = 0; i < dataverseDatasets.size(); i++) {
            ds = dataverseDatasets.get(i);
            dropDataset(jobId, dataverseName, ds.getDatasetName());
        }
        //After dropping datasets, drop datatypes
        List<Datatype> dataverseDatatypes;
        // As a side effect, acquires an S lock on the 'datatype' dataset
        // on behalf of txnId.
        dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
        // Drop all types in this dataverse.
        for (int i = 0; i < dataverseDatatypes.size(); i++) {
            forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
        }
        // As a side effect, acquires an S lock on the 'Function' dataset
        // on behalf of txnId.
        List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
        // Drop all functions in this dataverse.
        for (Function function : dataverseFunctions) {
            dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
        }
        // As a side effect, acquires an S lock on the 'Adapter' dataset
        // on behalf of txnId.
        List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
        // Drop all functions in this dataverse.
        for (DatasourceAdapter adapter : dataverseAdapters) {
            dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
        }
        List<Feed> dataverseFeeds;
        List<FeedConnection> feedConnections;
        Feed feed;
        dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
        // Drop all feeds&connections in this dataverse.
        for (int i = 0; i < dataverseFeeds.size(); i++) {
            feed = dataverseFeeds.get(i);
            feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
            for (FeedConnection feedConnection : feedConnections) {
                dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
            }
            dropFeed(jobId, dataverseName, feed.getFeedName());
        }
        List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
        if (feedPolicies != null && feedPolicies.size() > 0) {
            // Drop all feed ingestion policies in this dataverse.
            for (FeedPolicyEntity feedPolicy : feedPolicies) {
                dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
            }
        }
        // Delete the dataverse entry from the 'dataverse' dataset.
        ITupleReference searchKey = createTuple(dataverseName);
        // As a side effect, acquires an S lock on the 'dataverse' dataset
        // on behalf of txnId.
        ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
        deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
    // TODO: Change this to be a BTree specific exception, e.g.,
    // BTreeKeyDoesNotExistException.
    } catch (HyracksDataException e) {
        if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
            throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
        } else {
            throw new MetadataException(e);
        }
    } catch (ACIDException e) {
        throw new MetadataException(e);
    }
}
Also used : DatasourceAdapter(org.apache.asterix.metadata.entities.DatasourceAdapter) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) ExtensionMetadataDataset(org.apache.asterix.metadata.api.ExtensionMetadataDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) Datatype(org.apache.asterix.metadata.entities.Datatype) ACIDException(org.apache.asterix.common.exceptions.ACIDException) Function(org.apache.asterix.metadata.entities.Function) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) Feed(org.apache.asterix.metadata.entities.Feed)

Example 13 with Feed

use of org.apache.asterix.metadata.entities.Feed in project asterixdb by apache.

the class MetadataNode method getFeed.

@Override
public Feed getFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException {
    try {
        ITupleReference searchKey = createTuple(dataverse, feedName);
        FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
        List<Feed> results = new ArrayList<>();
        IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
        searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
        if (!results.isEmpty()) {
            return results.get(0);
        }
        return null;
    } catch (HyracksDataException e) {
        throw new MetadataException(e);
    }
}
Also used : MetadataEntityValueExtractor(org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) ArrayList(java.util.ArrayList) FeedTupleTranslator(org.apache.asterix.metadata.entitytupletranslators.FeedTupleTranslator) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) Feed(org.apache.asterix.metadata.entities.Feed)

Example 14 with Feed

use of org.apache.asterix.metadata.entities.Feed in project asterixdb by apache.

the class SubscribeFeedStatement method initialize.

public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
    this.query = new Query(false);
    EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
    Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
    if (subscriberFeed == null) {
        throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
    }
    String feedOutputType = getOutputType(mdTxnCtx);
    StringBuilder builder = new StringBuilder();
    builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
    builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
    builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
    builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
    builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
    List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
    if ((functionsToApply != null) && functionsToApply.isEmpty()) {
        builder.append(" return $x");
    } else {
        Function function;
        String rValueName = "x";
        String lValueName = "y";
        int variableIndex = 0;
        for (FunctionSignature appliedFunction : functionsToApply) {
            function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
            variableIndex++;
            switch(function.getLanguage().toUpperCase()) {
                case Function.LANGUAGE_AQL:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
                case Function.LANGUAGE_JAVA:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
            }
            builder.append("\n");
        }
        builder.append("return $" + lValueName + variableIndex);
    }
    builder.append(")");
    builder.append(";");
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Connect feed statement translated to\n" + builder.toString());
    }
    IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
    List<Statement> statements;
    try {
        statements = parser.parse();
        query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
    } catch (CompilationException pe) {
        throw new MetadataException(pe);
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) Query(org.apache.asterix.lang.common.statement.Query) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) Statement(org.apache.asterix.lang.common.base.Statement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) MetadataException(org.apache.asterix.metadata.MetadataException) EntityId(org.apache.asterix.active.EntityId) Function(org.apache.asterix.metadata.entities.Function) StringReader(java.io.StringReader) Feed(org.apache.asterix.metadata.entities.Feed) IParser(org.apache.asterix.lang.common.base.IParser)

Aggregations

Feed (org.apache.asterix.metadata.entities.Feed)14 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)7 MetadataException (org.apache.asterix.metadata.MetadataException)6 RemoteException (java.rmi.RemoteException)5 ArrayList (java.util.ArrayList)5 EntityId (org.apache.asterix.active.EntityId)5 ACIDException (org.apache.asterix.common.exceptions.ACIDException)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)5 FeedConnection (org.apache.asterix.metadata.entities.FeedConnection)5 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)5 IOException (java.io.IOException)4 AsterixException (org.apache.asterix.common.exceptions.AsterixException)4 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)4 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)3 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)3 FunctionSignature (org.apache.asterix.common.functions.FunctionSignature)3 Dataset (org.apache.asterix.metadata.entities.Dataset)3 Function (org.apache.asterix.metadata.entities.Function)3 FeedEventsListener (org.apache.asterix.external.feed.management.FeedEventsListener)2 FeedTupleTranslator (org.apache.asterix.metadata.entitytupletranslators.FeedTupleTranslator)2