Search in sources :

Example 21 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleCreateNodeGroupStatement.

protected void handleCreateNodeGroupStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
    String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), ngName);
    try {
        NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
        if (ng != null) {
            if (!stmtCreateNodegroup.getIfNotExists()) {
                throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
            }
        } else {
            List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
            List<String> ncNames = new ArrayList<>(ncIdentifiers.size());
            for (Identifier id : ncIdentifiers) {
                ncNames.add(id.getValue());
            }
            MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : NodegroupDecl(org.apache.asterix.lang.common.statement.NodegroupDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) NodeGroup(org.apache.asterix.metadata.entities.NodeGroup)

Example 22 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class ExternalLibraryUtils method installLibraryIfNeeded.

/**
     * Each element of a library is installed as part of a transaction. Any
     * failure in installing an element does not effect installation of other
     * libraries.
     */
protected static void installLibraryIfNeeded(String dataverse, final File libraryDir, Map<String, List<String>> uninstalledLibs) throws Exception {
    String libraryName = libraryDir.getName().trim();
    List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
    // was this library just un-installed?
    boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
    MetadataTransactionContext mdTxnCtx = null;
    try {
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
        if (libraryInMetadata != null && !wasUninstalled) {
            // exists in metadata and was not un-installed, we return.
            // Another place which shows that our metadata transactions are broken
            // (we didn't call commit before!!!)
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            return;
        }
        // Add library
        MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new Library(dataverse, libraryName));
        if (LOGGER.isLoggable(Level.INFO)) {
            LOGGER.info("Added library " + libraryName + " to Metadata");
        }
        // Get the descriptor
        String[] libraryDescriptors = libraryDir.list((dir, name) -> name.endsWith(".xml"));
        if (libraryDescriptors == null) {
            throw new IOException("Unable to list files in directory " + libraryDir);
        }
        if (libraryDescriptors.length == 0) {
            // should be fine. library was installed but its content was not added to metadata
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            return;
        } else if (libraryDescriptors.length > 1) {
            throw new IllegalStateException("More than 1 library descriptors defined");
        }
        ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
        // Get the dataverse
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
        if (dv == null) {
            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse, NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP));
        }
        // Add functions
        if (library.getLibraryFunctions() != null) {
            for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
                String[] fargs = function.getArguments().trim().split(",");
                List<String> args = new ArrayList<>();
                for (String arg : fargs) {
                    args.add(arg);
                }
                Function f = new Function(dataverse, libraryName + "#" + function.getName().trim(), args.size(), args, function.getReturnType().trim(), function.getDefinition().trim(), library.getLanguage().trim(), function.getFunctionType().trim(), 0);
                MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
                if (LOGGER.isLoggable(Level.INFO)) {
                    LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
                }
            }
        }
        if (LOGGER.isLoggable(Level.INFO)) {
            LOGGER.info("Installed functions in library :" + libraryName);
        }
        // Add adapters
        if (library.getLibraryAdapters() != null) {
            for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
                String adapterFactoryClass = adapter.getFactoryClass().trim();
                String adapterName = libraryName + "#" + adapter.getName().trim();
                AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
                DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, IDataSourceAdapter.AdapterType.EXTERNAL);
                MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
                if (LOGGER.isLoggable(Level.INFO)) {
                    LOGGER.info("Installed adapter: " + adapterName);
                }
            }
        }
        if (LOGGER.isLoggable(Level.INFO)) {
            LOGGER.info("Installed adapters in library :" + libraryName);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (LOGGER.isLoggable(Level.SEVERE)) {
            LOGGER.log(Level.SEVERE, "Exception in installing library " + libraryName, e);
        }
        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
    }
}
Also used : DatasourceAdapter(org.apache.asterix.metadata.entities.DatasourceAdapter) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) IOException(java.io.IOException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) LibraryFunction(org.apache.asterix.external.library.LibraryFunction) AsterixException(org.apache.asterix.common.exceptions.AsterixException) ACIDException(org.apache.asterix.common.exceptions.ACIDException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) Function(org.apache.asterix.metadata.entities.Function) LibraryFunction(org.apache.asterix.external.library.LibraryFunction) ExternalLibrary(org.apache.asterix.external.library.ExternalLibrary) AdapterIdentifier(org.apache.asterix.external.dataset.adapter.AdapterIdentifier) Library(org.apache.asterix.metadata.entities.Library) ExternalLibrary(org.apache.asterix.external.library.ExternalLibrary) LibraryAdapter(org.apache.asterix.external.library.LibraryAdapter) File(java.io.File)

Example 23 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class FilePartition method get.

@Override
protected void get(IServletRequest request, IServletResponse response) {
    response.setStatus(HttpResponseStatus.OK);
    try {
        HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
    } catch (IOException e) {
        LOGGER.log(Level.WARNING, "Failure setting content type", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        response.writer().write(e.toString());
        return;
    }
    PrintWriter out = response.writer();
    try {
        ObjectMapper om = new ObjectMapper();
        ObjectNode jsonResponse = om.createObjectNode();
        String dataverseName = request.getParameter("dataverseName");
        String datasetName = request.getParameter("datasetName");
        if (dataverseName == null || datasetName == null) {
            jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
            out.write(jsonResponse.toString());
            return;
        }
        IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
        // Metadata transaction begins.
        MetadataManager.INSTANCE.init();
        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        // Retrieves file splits of the dataset.
        MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
        try {
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
            if (dataset == null) {
                jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
                out.write(jsonResponse.toString());
                out.flush();
                return;
            }
            boolean temp = dataset.getDatasetDetails().isTemp();
            FileSplit[] fileSplits = metadataProvider.splitsForIndex(mdTxnCtx, dataset, datasetName);
            ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
            List<List<String>> primaryKeys = dataset.getPrimaryKeys();
            StringBuilder pkStrBuf = new StringBuilder();
            for (List<String> keys : primaryKeys) {
                for (String key : keys) {
                    pkStrBuf.append(key).append(",");
                }
            }
            pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
            // Constructs the returned json object.
            formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), temp, hcc.getNodeControllerInfos());
            // Flush the cached contents of the dataset to file system.
            FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName, datasetName);
            // Metadata transaction commits.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            // Writes file splits.
            out.write(jsonResponse.toString());
        } finally {
            metadataProvider.getLocks().unlock();
        }
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failure handling a request", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        out.write(e.toString());
    } finally {
        out.flush();
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Dataset(org.apache.asterix.metadata.entities.Dataset) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) IOException(java.io.IOException) FileSplit(org.apache.hyracks.api.io.FileSplit) IOException(java.io.IOException) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) List(java.util.List) ARecordType(org.apache.asterix.om.types.ARecordType) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) PrintWriter(java.io.PrintWriter)

Example 24 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class RebalanceApiServlet method getAllDatasetsForRebalance.

// Lists all datasets that should be rebalanced.
private List<Dataset> getAllDatasetsForRebalance() throws Exception {
    List<Dataset> datasets = new ArrayList<>();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    try {
        List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
        for (Dataverse dv : dataverses) {
            datasets.addAll(getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx));
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
        throw e;
    }
    return datasets;
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Dataverse(org.apache.asterix.metadata.entities.Dataverse)

Example 25 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleDataverseDropStatement.

protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
    String dataverseName = stmtDelete.getDataverseName().getValue();
    if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
        throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
    }
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            if (stmtDelete.getIfExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
            }
        }
        // # disconnect all feeds from any datasets in the dataverse.
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
        Identifier dvId = new Identifier(dataverseName);
        MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
        tempMdProvider.setConfig(metadataProvider.getConfig());
        for (IActiveEntityEventsListener listener : activeListeners) {
            EntityId activeEntityId = listener.getEntityId();
            if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
                tempMdProvider.getLocks().reset();
                stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
                // prepare job to remove feed log storage
                jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
            }
        }
        // #. prepare jobs which will drop corresponding datasets with indexes.
        List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
        for (Dataset dataset : datasets) {
            String datasetName = dataset.getDatasetName();
            DatasetType dsType = dataset.getDatasetType();
            if (dsType == DatasetType.INTERNAL) {
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (Index index : indexes) {
                    jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
                }
            } else {
                // External dataset
                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
                for (int k = 0; k < indexes.size(); k++) {
                    if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
                    } else {
                        jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
                    }
                }
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
            }
        }
        jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
        // #. mark PendingDropOp on the dataverse record by
        // first, deleting the dataverse record from the DATAVERSE_DATASET
        // second, inserting the dataverse record with the PendingDropOp value into the
        // DATAVERSE_DATASET
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. finally, delete the dataverse.
        MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
        // Drops all node groups that no longer needed
        for (Dataset dataset : datasets) {
            String nodeGroup = dataset.getNodeGroupName();
            MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
            if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
            }
        }
        if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
            activeDataverse = null;
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
                activeDataverse = null;
            }
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            try {
                MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) Index(org.apache.asterix.metadata.entities.Index) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) Identifier(org.apache.asterix.lang.common.struct.Identifier) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Dataverse(org.apache.asterix.metadata.entities.Dataverse) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) EntityId(org.apache.asterix.active.EntityId) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider)

Aggregations

MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)41 RemoteException (java.rmi.RemoteException)32 ACIDException (org.apache.asterix.common.exceptions.ACIDException)32 IOException (java.io.IOException)30 AsterixException (org.apache.asterix.common.exceptions.AsterixException)30 MetadataException (org.apache.asterix.metadata.MetadataException)30 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)29 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)28 CompilationException (org.apache.asterix.common.exceptions.CompilationException)27 Dataset (org.apache.asterix.metadata.entities.Dataset)14 JobSpecification (org.apache.hyracks.api.job.JobSpecification)12 ArrayList (java.util.ArrayList)11 Dataverse (org.apache.asterix.metadata.entities.Dataverse)11 IDataset (org.apache.asterix.common.metadata.IDataset)8 IHyracksDataset (org.apache.hyracks.api.dataset.IHyracksDataset)8 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)7 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)7 EntityId (org.apache.asterix.active.EntityId)6 Function (org.apache.asterix.metadata.entities.Function)6 ARecordType (org.apache.asterix.om.types.ARecordType)6