Search in sources :

Example 26 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleCreateFeedPolicyStatement.

protected void handleCreateFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws AlgebricksException, HyracksDataException {
    String dataverse;
    String policy;
    FeedPolicyEntity newPolicy = null;
    MetadataTransactionContext mdTxnCtx = null;
    CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
    dataverse = getActiveDataverse(null);
    policy = cfps.getPolicyName();
    MetadataLockManager.INSTANCE.createFeedPolicyBegin(metadataProvider.getLocks(), dataverse, dataverse + "." + policy);
    try {
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
        if (feedPolicy != null) {
            if (cfps.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A policy with this name " + policy + " already exists.");
            }
        }
        boolean extendingExisting = cfps.getSourcePolicyName() != null;
        String description = cfps.getDescription() == null ? "" : cfps.getDescription();
        if (extendingExisting) {
            FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
            if (sourceFeedPolicy == null) {
                sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
                if (sourceFeedPolicy == null) {
                    throw new AlgebricksException("Unknown policy " + cfps.getSourcePolicyName());
                }
            }
            Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
            policyProperties.putAll(cfps.getProperties());
            newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
        } else {
            Properties prop = new Properties();
            try {
                InputStream stream = new FileInputStream(cfps.getSourcePolicyFile());
                prop.load(stream);
            } catch (Exception e) {
                throw new AlgebricksException("Unable to read policy file" + cfps.getSourcePolicyFile(), e);
            }
            Map<String, String> policyProperties = new HashMap<>();
            for (Entry<Object, Object> entry : prop.entrySet()) {
                policyProperties.put((String) entry.getKey(), (String) entry.getValue());
            }
            newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
        }
        MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (RemoteException | ACIDException e) {
        abort(e, e, mdTxnCtx);
        throw new HyracksDataException(e);
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : HashMap(java.util.HashMap) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Properties(java.util.Properties) ClusterProperties(org.apache.asterix.common.config.ClusterProperties) ExternalProperties(org.apache.asterix.common.config.ExternalProperties) FileInputStream(java.io.FileInputStream) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) ACIDException(org.apache.asterix.common.exceptions.ACIDException) CreateFeedPolicyStatement(org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) MutableObject(org.apache.commons.lang3.mutable.MutableObject) RemoteException(java.rmi.RemoteException)

Example 27 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleUseDataverseStatement.

protected Dataverse handleUseDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    DataverseDecl dvd = (DataverseDecl) stmt;
    String dvName = dvd.getDataverseName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
        if (dv == null) {
            throw new MetadataException("Unknown dataverse " + dvName);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        return dv;
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw new MetadataException(e);
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : DataverseDecl(org.apache.asterix.lang.common.statement.DataverseDecl) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Dataverse(org.apache.asterix.metadata.entities.Dataverse) MetadataException(org.apache.asterix.metadata.MetadataException) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 28 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleFunctionDropStatement.

protected void handleFunctionDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
    FunctionSignature signature = stmtDropFunction.getFunctionSignature();
    signature.setNamespace(getActiveDataverseName(signature.getNamespace()));
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.functionStatementBegin(metadataProvider.getLocks(), signature.getNamespace(), signature.getNamespace() + "." + signature.getName());
    try {
        Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
        if (function == null) {
            if (!stmtDropFunction.getIfExists()) {
                throw new AlgebricksException("Unknonw function " + signature);
            }
        } else if (function.getReferenceCount() != 0) {
            throw new AlgebricksException("Function " + signature + " is being used. It cannot be dropped.");
        } else {
            MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : Function(org.apache.asterix.metadata.entities.Function) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) FunctionDropStatement(org.apache.asterix.lang.common.statement.FunctionDropStatement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 29 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleCreateDatasetStatement.

public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
    String itemTypeName = dd.getItemTypeName().getValue();
    String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
    String metaItemTypeName = dd.getMetaItemTypeName().getValue();
    Identifier ngNameId = dd.getNodegroupName();
    String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
    String compactionPolicy = dd.getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = compactionPolicy == null;
    boolean temp = dd.getDatasetDetailsDecl().isTemp();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;
    try {
        IDatasetDetails datasetDetails = null;
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
        if (compactionPolicy == null) {
            compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
        } else {
            validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
        }
        switch(dd.getDatasetType()) {
            case INTERNAL:
                IAType itemType = dt.getDatatype();
                if (itemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset type has to be a record type.");
                }
                IAType metaItemType = null;
                if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
                    metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
                }
                if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset meta type has to be a record type.");
                }
                ARecordType metaRecType = (ARecordType) metaItemType;
                List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
                List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
                boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
                ARecordType aRecordType = (ARecordType) itemType;
                List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
                List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
                if (filterField != null) {
                    ValidateUtil.validateFilterField(aRecordType, filterField);
                }
                if (compactionPolicy == null && filterField != null) {
                    // If the dataset has a filter and the user didn't specify a merge
                    // policy, then we will pick the
                    // correlated-prefix as the default merge policy.
                    compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                }
                datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
                break;
            case EXTERNAL:
                String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
                Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
                datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
                break;
            default:
                throw new CompilationException("Unknown datatype " + dd.getDatasetType());
        }
        // #. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }
        // #. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
            // #. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
            // #. runJob
            JobUtils.runJob(hcc, jobSpec, true);
            // #. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }
        // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
        dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            // [Notice]
            // As long as we updated(and committed) metadata, we should remove any effect of the job
            // because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ExternalDetailsDecl(org.apache.asterix.lang.common.statement.ExternalDetailsDecl) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) Datatype(org.apache.asterix.metadata.entities.Datatype) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 30 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleDeleteStatement.

public JobSpecification handleDeleteStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc, boolean compileOnly) throws Exception {
    DeleteStatement stmtDelete = (DeleteStatement) stmt;
    String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), dataverseName + "." + stmtDelete.getDatasetName());
    try {
        metadataProvider.setWriteTransaction(true);
        CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName, stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(), stmtDelete.getQuery());
        JobSpecification jobSpec = rewriteCompileQuery(hcc, metadataProvider, clfrqs.getQuery(), clfrqs);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        if (jobSpec != null && !compileOnly) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
        return jobSpec;
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) JobSpecification(org.apache.hyracks.api.job.JobSpecification) CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Aggregations

MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)41 RemoteException (java.rmi.RemoteException)32 ACIDException (org.apache.asterix.common.exceptions.ACIDException)32 IOException (java.io.IOException)30 AsterixException (org.apache.asterix.common.exceptions.AsterixException)30 MetadataException (org.apache.asterix.metadata.MetadataException)30 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)29 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)28 CompilationException (org.apache.asterix.common.exceptions.CompilationException)27 Dataset (org.apache.asterix.metadata.entities.Dataset)14 JobSpecification (org.apache.hyracks.api.job.JobSpecification)12 ArrayList (java.util.ArrayList)11 Dataverse (org.apache.asterix.metadata.entities.Dataverse)11 IDataset (org.apache.asterix.common.metadata.IDataset)8 IHyracksDataset (org.apache.hyracks.api.dataset.IHyracksDataset)8 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)7 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)7 EntityId (org.apache.asterix.active.EntityId)6 Function (org.apache.asterix.metadata.entities.Function)6 ARecordType (org.apache.asterix.om.types.ARecordType)6