Search in sources :

Example 11 with FeedConnection

use of org.apache.asterix.metadata.entities.FeedConnection in project asterixdb by apache.

the class QueryTranslator method handleConnectFeedStatement.

private void handleConnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    FeedConnection fc;
    ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
    String dataverseName = getActiveDataverse(cfs.getDataverseName());
    String feedName = cfs.getFeedName();
    String datasetName = cfs.getDatasetName().getValue();
    String policyName = cfs.getPolicy();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    // Check whether feed is alive
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    if (activeEventHandler.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
        throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
    }
    // Transaction handling
    MetadataLockManager.INSTANCE.connectFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + feedName);
    try {
        // validation
        FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName, mdTxnCtx);
        Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
        ARecordType outputType = FeedMetadataUtil.getOutputType(feed, feed.getAdapterConfiguration(), ExternalDataConstants.KEY_TYPE_NAME);
        List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions();
        for (FunctionSignature func : appliedFunctions) {
            if (MetadataManager.INSTANCE.getFunction(mdTxnCtx, func) == null) {
                throw new CompilationException(ErrorCode.FEED_CONNECT_FEED_APPLIED_INVALID_FUNCTION, func.getName());
            }
        }
        fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName);
        if (fc != null) {
            throw new AlgebricksException("Feed" + feedName + " is already connected dataset " + datasetName);
        }
        fc = new FeedConnection(dataverseName, feedName, datasetName, appliedFunctions, policyName, outputType.toString());
        MetadataManager.INSTANCE.addFeedConnection(metadataProvider.getMetadataTxnContext(), fc);
        // Increase function reference count.
        for (FunctionSignature funcSig : appliedFunctions) {
            // The function should be cached in Metadata manager, so this operation is not that expensive.
            Function func = MetadataManager.INSTANCE.getFunction(mdTxnCtx, funcSig);
            func.reference();
            MetadataManager.INSTANCE.updateFunction(mdTxnCtx, func);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ConnectFeedStatement(org.apache.asterix.lang.common.statement.ConnectFeedStatement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) EntityId(org.apache.asterix.active.EntityId) Function(org.apache.asterix.metadata.entities.Function) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) ARecordType(org.apache.asterix.om.types.ARecordType) Feed(org.apache.asterix.metadata.entities.Feed)

Example 12 with FeedConnection

use of org.apache.asterix.metadata.entities.FeedConnection in project asterixdb by apache.

the class IntroduceRandomPartitioningFeedComputationRule method rewritePre.

@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    ILogicalOperator op = opRef.getValue();
    if (!op.getOperatorTag().equals(LogicalOperatorTag.ASSIGN)) {
        return false;
    }
    ILogicalOperator opChild = op.getInputs().get(0).getValue();
    if (!opChild.getOperatorTag().equals(LogicalOperatorTag.DATASOURCESCAN)) {
        return false;
    }
    DataSourceScanOperator scanOp = (DataSourceScanOperator) opChild;
    DataSource dataSource = (DataSource) scanOp.getDataSource();
    if (dataSource.getDatasourceType() != DataSource.Type.FEED) {
        return false;
    }
    final FeedDataSource feedDataSource = (FeedDataSource) dataSource;
    FeedConnection feedConnection = feedDataSource.getFeedConnection();
    if (feedConnection.getAppliedFunctions() == null || feedConnection.getAppliedFunctions().size() == 0) {
        return false;
    }
    ExchangeOperator exchangeOp = new ExchangeOperator();
    INodeDomain domain = new INodeDomain() {

        @Override
        public boolean sameAs(INodeDomain domain) {
            return domain == this;
        }

        @Override
        public Integer cardinality() {
            return feedDataSource.getComputeCardinality();
        }
    };
    exchangeOp.setPhysicalOperator(new RandomPartitionExchangePOperator(domain));
    op.getInputs().get(0).setValue(exchangeOp);
    exchangeOp.getInputs().add(new MutableObject<ILogicalOperator>(scanOp));
    ExecutionMode em = ((AbstractLogicalOperator) scanOp).getExecutionMode();
    exchangeOp.setExecutionMode(em);
    exchangeOp.computeDeliveredPhysicalProperties(context);
    context.computeAndSetTypeEnvironmentForOperator(exchangeOp);
    AssignOperator assignOp = (AssignOperator) opRef.getValue();
    AssignPOperator assignPhyOp = (AssignPOperator) assignOp.getPhysicalOperator();
    assignPhyOp.setCardinalityConstraint(domain.cardinality());
    return true;
}
Also used : AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) RandomPartitionExchangePOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.RandomPartitionExchangePOperator) ExchangeOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator) INodeDomain(org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain) ExecutionMode(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) FeedDataSource(org.apache.asterix.metadata.declared.FeedDataSource) DataSource(org.apache.asterix.metadata.declared.DataSource) FeedDataSource(org.apache.asterix.metadata.declared.FeedDataSource) AssignPOperator(org.apache.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator) DataSourceScanOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator)

Example 13 with FeedConnection

use of org.apache.asterix.metadata.entities.FeedConnection in project asterixdb by apache.

the class QueryTranslator method handleStartFeedStatement.

private void handleStartFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    StartFeedStatement sfs = (StartFeedStatement) stmt;
    String dataverseName = getActiveDataverse(sfs.getDataverseName());
    String feedName = sfs.getFeedName().getValue();
    // Transcation handler
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    // Runtime handler
    EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
    // Feed & Feed Connections
    Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
    List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
    ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
    IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
    DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
    if (listener != null) {
        throw new AlgebricksException("Feed " + feedName + " is started already.");
    }
    // Start
    MetadataLockManager.INSTANCE.startFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName, feedConnections);
    try {
        // Prepare policy
        List<IDataset> datasets = new ArrayList<>();
        for (FeedConnection connection : feedConnections) {
            Dataset ds = metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
            datasets.add(ds);
        }
        org.apache.commons.lang3.tuple.Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> jobInfo = FeedOperations.buildStartFeedJob(sessionOutput, metadataProvider, feed, feedConnections, compilationProvider, storageComponentProvider, qtFactory, hcc);
        JobSpecification feedJob = jobInfo.getLeft();
        listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
        activeEventHandler.registerListener(listener);
        IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
        feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
        JobUtils.runJob(hcc, feedJob, Boolean.valueOf(metadataProvider.getConfig().get(StartFeedStatement.WAIT_FOR_COMPLETION)));
        eventSubscriber.sync();
        LOGGER.log(Level.INFO, "Submitted");
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        if (listener != null) {
            activeEventHandler.unregisterListener(listener);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : FeedEventsListener(org.apache.asterix.external.feed.management.FeedEventsListener) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IDataset(org.apache.asterix.common.metadata.IDataset) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) Feed(org.apache.asterix.metadata.entities.Feed) AqlCompilationProvider(org.apache.asterix.compiler.provider.AqlCompilationProvider) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) IActiveEventSubscriber(org.apache.asterix.active.IActiveEventSubscriber) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) EntityId(org.apache.asterix.active.EntityId) StartFeedStatement(org.apache.asterix.lang.common.statement.StartFeedStatement) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 14 with FeedConnection

use of org.apache.asterix.metadata.entities.FeedConnection in project asterixdb by apache.

the class MetadataNode method dropDataverse.

@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
    try {
        confirmDataverseCanBeDeleted(jobId, dataverseName);
        List<Dataset> dataverseDatasets;
        Dataset ds;
        dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
        // Drop all datasets in this dataverse.
        for (int i = 0; i < dataverseDatasets.size(); i++) {
            ds = dataverseDatasets.get(i);
            dropDataset(jobId, dataverseName, ds.getDatasetName());
        }
        //After dropping datasets, drop datatypes
        List<Datatype> dataverseDatatypes;
        // As a side effect, acquires an S lock on the 'datatype' dataset
        // on behalf of txnId.
        dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
        // Drop all types in this dataverse.
        for (int i = 0; i < dataverseDatatypes.size(); i++) {
            forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
        }
        // As a side effect, acquires an S lock on the 'Function' dataset
        // on behalf of txnId.
        List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
        // Drop all functions in this dataverse.
        for (Function function : dataverseFunctions) {
            dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
        }
        // As a side effect, acquires an S lock on the 'Adapter' dataset
        // on behalf of txnId.
        List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
        // Drop all functions in this dataverse.
        for (DatasourceAdapter adapter : dataverseAdapters) {
            dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
        }
        List<Feed> dataverseFeeds;
        List<FeedConnection> feedConnections;
        Feed feed;
        dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
        // Drop all feeds&connections in this dataverse.
        for (int i = 0; i < dataverseFeeds.size(); i++) {
            feed = dataverseFeeds.get(i);
            feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
            for (FeedConnection feedConnection : feedConnections) {
                dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
            }
            dropFeed(jobId, dataverseName, feed.getFeedName());
        }
        List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
        if (feedPolicies != null && feedPolicies.size() > 0) {
            // Drop all feed ingestion policies in this dataverse.
            for (FeedPolicyEntity feedPolicy : feedPolicies) {
                dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
            }
        }
        // Delete the dataverse entry from the 'dataverse' dataset.
        ITupleReference searchKey = createTuple(dataverseName);
        // As a side effect, acquires an S lock on the 'dataverse' dataset
        // on behalf of txnId.
        ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
        deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
    // TODO: Change this to be a BTree specific exception, e.g.,
    // BTreeKeyDoesNotExistException.
    } catch (HyracksDataException e) {
        if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
            throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
        } else {
            throw new MetadataException(e);
        }
    } catch (ACIDException e) {
        throw new MetadataException(e);
    }
}
Also used : DatasourceAdapter(org.apache.asterix.metadata.entities.DatasourceAdapter) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) ExtensionMetadataDataset(org.apache.asterix.metadata.api.ExtensionMetadataDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) Datatype(org.apache.asterix.metadata.entities.Datatype) ACIDException(org.apache.asterix.common.exceptions.ACIDException) Function(org.apache.asterix.metadata.entities.Function) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) Feed(org.apache.asterix.metadata.entities.Feed)

Aggregations

FeedConnection (org.apache.asterix.metadata.entities.FeedConnection)14 ArrayList (java.util.ArrayList)6 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)6 Feed (org.apache.asterix.metadata.entities.Feed)5 RemoteException (java.rmi.RemoteException)4 ACIDException (org.apache.asterix.common.exceptions.ACIDException)4 FunctionSignature (org.apache.asterix.common.functions.FunctionSignature)4 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)4 IOException (java.io.IOException)3 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)3 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)3 EntityId (org.apache.asterix.active.EntityId)3 AsterixException (org.apache.asterix.common.exceptions.AsterixException)3 CompilationException (org.apache.asterix.common.exceptions.CompilationException)3 MetadataException (org.apache.asterix.metadata.MetadataException)3 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)3 Dataset (org.apache.asterix.metadata.entities.Dataset)3 Function (org.apache.asterix.metadata.entities.Function)3 List (java.util.List)2 FeedDataSource (org.apache.asterix.metadata.declared.FeedDataSource)2