Search in sources :

Example 21 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class QueryTranslator method handleIndexDropStatement.

protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
    String datasetName = stmtIndexDrop.getDatasetName().getValue();
    String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.dropIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    String indexName = null;
    // For external index
    boolean dropFilesIndex = false;
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
        StringBuilder builder = null;
        for (IActiveEntityEventsListener listener : listeners) {
            if (listener.isEntityUsingDataset(ds)) {
                if (builder == null) {
                    builder = new StringBuilder();
                }
                builder.append(new FeedConnectionId(listener.getEntityId(), datasetName) + "\n");
            }
        }
        if (builder != null) {
            throw new CompilationException("Dataset" + datasetName + " is currently being fed into by the following active entities: " + builder.toString());
        }
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
        } else {
            // External dataset
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            } else if (ExternalIndexingOperations.isFileIndex(index)) {
                throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
            if (datasetIndexes.size() == 2) {
                dropFilesIndex = true;
                // only one index + the files index, we need to delete both of the indexes
                for (Index externalIndex : datasetIndexes) {
                    if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds));
                        // #. mark PendingDropOp on the existing files index
                        MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, externalIndex.getIndexName());
                        MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex.getIndexType(), externalIndex.getKeyFieldNames(), externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
                    }
                }
            }
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (dropFilesIndex) {
                // delete the files index too
                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
                if (dropFilesIndex) {
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                }
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) IndexDropStatement(org.apache.asterix.lang.common.statement.IndexDropStatement) FeedConnectionId(org.apache.asterix.external.feed.management.FeedConnectionId) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler)

Example 22 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class GlobalRecoveryManager method recoverDataset.

private MetadataTransactionContext recoverDataset(ICcApplicationContext appCtx, MetadataTransactionContext mdTxnCtx, Dataverse dataverse) throws Exception {
    if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
        MetadataProvider metadataProvider = new MetadataProvider(appCtx, dataverse, componentProvider);
        try {
            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName());
            for (Dataset dataset : datasets) {
                if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
                    // External dataset
                    // Get indexes
                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName());
                    // Get the state of the dataset
                    ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
                    TransactionState datasetState = dsd.getState();
                    if (!indexes.isEmpty()) {
                        if (datasetState == TransactionState.BEGIN) {
                            List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
                            // 1. delete all pending files
                            for (ExternalFile file : files) {
                                if (file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                }
                            }
                        }
                        // 2. clean artifacts in NCs
                        metadataProvider.setMetadataTxnContext(mdTxnCtx);
                        JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(dataset, indexes, metadataProvider);
                        executeHyracksJob(jobSpec);
                        // 3. correct the dataset state
                        ((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
                        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                    } else if (datasetState == TransactionState.READY_TO_COMMIT) {
                        List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
                        // if ready to commit, roll forward
                        // 1. commit indexes in NCs
                        metadataProvider.setMetadataTxnContext(mdTxnCtx);
                        JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(dataset, indexes, metadataProvider);
                        executeHyracksJob(jobSpec);
                        // 2. add pending files in metadata
                        for (ExternalFile file : files) {
                            if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                file.setPendingOp(ExternalFilePendingOp.NO_OP);
                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
                            } else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
                                // find original file
                                for (ExternalFile originalFile : files) {
                                    if (originalFile.getFileName().equals(file.getFileName())) {
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
                                        break;
                                    }
                                }
                            } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
                                // find original file
                                for (ExternalFile originalFile : files) {
                                    if (originalFile.getFileName().equals(file.getFileName())) {
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
                                        originalFile.setSize(file.getSize());
                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, originalFile);
                                    }
                                }
                            }
                            // 3. correct the dataset state
                            ((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
                            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
                            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                        }
                    }
                }
            }
        } finally {
            metadataProvider.getLocks().unlock();
        }
    }
    return mdTxnCtx;
}
Also used : TransactionState(org.apache.asterix.common.config.DatasetConfig.TransactionState) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) Dataset(org.apache.asterix.metadata.entities.Dataset) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) Index(org.apache.asterix.metadata.entities.Index) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ExternalFile(org.apache.asterix.external.indexing.ExternalFile)

Example 23 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class QueryTranslator method doDropDataset.

public static void doDropDataset(String dataverseName, String datasetName, MetadataProvider metadataProvider, boolean ifExists, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup) throws Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    MutableObject<MetadataTransactionContext> mdTxnCtx = new MutableObject<>(MetadataManager.INSTANCE.beginTransaction());
    MutableBoolean bActiveTxn = new MutableBoolean(true);
    metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            if (ifExists) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
                return;
            } else {
                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
            }
        }
        ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc, dropCorrespondingNodeGroup);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
    } catch (Exception e) {
        if (bActiveTxn.booleanValue()) {
            abort(e, e, mdTxnCtx.getValue());
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
            metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx.getValue());
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Example 24 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class MetadataProvider method findDataSourceIndex.

@Override
public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException {
    DataSource source = findDataSource(dataSourceId);
    Dataset dataset = ((DatasetDataSource) source).getDataset();
    String indexName = indexId;
    Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexName);
    return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null;
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) Index(org.apache.asterix.metadata.entities.Index) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) IDataSource(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource)

Example 25 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class MetadataProvider method getBTreeRuntime.

private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeRuntime(String dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields, AsterixTupleFilterFactory filterFactory, RecordDescriptor inputRecordDesc, JobGenContext context, JobSpecification spec, IndexOperation indexOp, boolean bulkload, List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys) throws AlgebricksException {
    Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
    boolean temp = dataset.getDatasetDetails().isTemp();
    isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
    int numKeys = primaryKeys.size() + secondaryKeys.size();
    int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
    // generate field permutations
    int[] fieldPermutation = new int[numKeys + numFilterFields];
    int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
    int i = 0;
    int j = 0;
    for (LogicalVariable varKey : secondaryKeys) {
        int idx = propagatedSchema.findVariable(varKey);
        fieldPermutation[i] = idx;
        i++;
    }
    for (LogicalVariable varKey : primaryKeys) {
        int idx = propagatedSchema.findVariable(varKey);
        fieldPermutation[i] = idx;
        modificationCallbackPrimaryKeyFields[j] = i;
        i++;
        j++;
    }
    if (numFilterFields > 0) {
        int idx = propagatedSchema.findVariable(additionalNonKeyFields.get(0));
        fieldPermutation[numKeys] = idx;
    }
    int[] prevFieldPermutation = null;
    if (indexOp == IndexOperation.UPSERT) {
        // generate field permutations for prev record
        prevFieldPermutation = new int[numKeys + numFilterFields];
        int k = 0;
        for (LogicalVariable varKey : prevSecondaryKeys) {
            int idx = propagatedSchema.findVariable(varKey);
            prevFieldPermutation[k] = idx;
            k++;
        }
        for (LogicalVariable varKey : primaryKeys) {
            int idx = propagatedSchema.findVariable(varKey);
            prevFieldPermutation[k] = idx;
            k++;
        }
        // Filter can only be one field!
        if (numFilterFields > 0) {
            int idx = propagatedSchema.findVariable(prevAdditionalFilteringKeys.get(0));
            prevFieldPermutation[numKeys] = idx;
        }
    }
    try {
        // Index parameters.
        Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(), indexName);
        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
        // prepare callback
        JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
        IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(storaegComponentProvider, secondaryIndex, jobId, indexOp, modificationCallbackPrimaryKeyFields);
        IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory(storaegComponentProvider.getStorageManager(), splitsAndConstraint.first);
        IOperatorDescriptor op;
        if (bulkload) {
            long numElementsHint = getCardinalityPerPartitionHint(dataset);
            op = new TreeIndexBulkLoadOperatorDescriptor(spec, inputRecordDesc, fieldPermutation, GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, false, idfh);
        } else if (indexOp == IndexOperation.UPSERT) {
            op = new LSMSecondaryUpsertOperatorDescriptor(spec, inputRecordDesc, fieldPermutation, idfh, filterFactory, modificationCallbackFactory, prevFieldPermutation);
        } else {
            op = new LSMTreeInsertDeleteOperatorDescriptor(spec, inputRecordDesc, fieldPermutation, indexOp, idfh, filterFactory, false, modificationCallbackFactory);
        }
        return new Pair<>(op, splitsAndConstraint.second);
    } catch (Exception e) {
        throw new AlgebricksException(e);
    }
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) Dataset(org.apache.asterix.metadata.entities.Dataset) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Index(org.apache.asterix.metadata.entities.Index) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) JobEventListenerFactory(org.apache.asterix.runtime.job.listener.JobEventListenerFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) DatasetCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) LSMSecondaryUpsertOperatorDescriptor(org.apache.asterix.runtime.operators.LSMSecondaryUpsertOperatorDescriptor) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IModificationOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory) LSMTreeInsertDeleteOperatorDescriptor(org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor) TreeIndexBulkLoadOperatorDescriptor(org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) JobId(org.apache.asterix.common.transactions.JobId) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Aggregations

Dataset (org.apache.asterix.metadata.entities.Dataset)77 ArrayList (java.util.ArrayList)33 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)32 Index (org.apache.asterix.metadata.entities.Index)25 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)23 MetadataException (org.apache.asterix.metadata.MetadataException)19 ARecordType (org.apache.asterix.om.types.ARecordType)19 IAType (org.apache.asterix.om.types.IAType)18 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)18 List (java.util.List)17 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)16 RemoteException (java.rmi.RemoteException)15 AsterixException (org.apache.asterix.common.exceptions.AsterixException)15 MetadataProvider (org.apache.asterix.metadata.declared.MetadataProvider)15 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)15 IOException (java.io.IOException)14 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)14 CompilationException (org.apache.asterix.common.exceptions.CompilationException)13 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)12 ACIDException (org.apache.asterix.common.exceptions.ACIDException)11