Search in sources :

Example 1 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class QueryTranslator method handleExternalDatasetRefreshStatement.

protected void handleExternalDatasetRefreshStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    RefreshExternalDatasetStatement stmtRefresh = (RefreshExternalDatasetStatement) stmt;
    String dataverseName = getActiveDataverse(stmtRefresh.getDataverseName());
    String datasetName = stmtRefresh.getDatasetName().getValue();
    TransactionState transactionState = TransactionState.COMMIT;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    JobSpecification spec = null;
    Dataset ds = null;
    List<ExternalFile> metadataFiles = null;
    List<ExternalFile> deletedFiles = null;
    List<ExternalFile> addedFiles = null;
    List<ExternalFile> appendedFiles = null;
    List<Index> indexes = null;
    Dataset transactionDataset = null;
    boolean lockAquired = false;
    boolean success = false;
    MetadataLockManager.INSTANCE.refreshDatasetBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    try {
        ds = metadataProvider.findDataset(dataverseName, datasetName);
        // Dataset exists ?
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        // Dataset external ?
        if (ds.getDatasetType() != DatasetType.EXTERNAL) {
            throw new AlgebricksException("dataset " + datasetName + " in dataverse " + dataverseName + " is not an external dataset");
        }
        // Dataset has indexes ?
        indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
        if (indexes.isEmpty()) {
            throw new AlgebricksException("External dataset " + datasetName + " in dataverse " + dataverseName + " doesn't have any index");
        }
        // Record transaction time
        Date txnTime = new Date();
        // refresh lock here
        ExternalDatasetsRegistry.INSTANCE.refreshBegin(ds);
        lockAquired = true;
        // Get internal files
        metadataFiles = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, ds);
        deletedFiles = new ArrayList<>();
        addedFiles = new ArrayList<>();
        appendedFiles = new ArrayList<>();
        // Now we compare snapshot with external file system
        if (ExternalIndexingOperations.isDatasetUptodate(ds, metadataFiles, addedFiles, deletedFiles, appendedFiles)) {
            ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(txnTime);
            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            // latch will be released in the finally clause
            return;
        }
        // At this point, we know data has changed in the external file system, record
        // transaction in metadata and start
        transactionDataset = ExternalIndexingOperations.createTransactionDataset(ds);
        /*
             * Remove old dataset record and replace it with a new one
             */
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
        // Add delta files to the metadata
        for (ExternalFile file : addedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        for (ExternalFile file : appendedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        for (ExternalFile file : deletedFiles) {
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        // Create the files index update job
        spec = ExternalIndexingOperations.buildFilesIndexUpdateOp(ds, metadataFiles, addedFiles, appendedFiles, metadataProvider);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        transactionState = TransactionState.BEGIN;
        // run the files update job
        JobUtils.runJob(hcc, spec, true);
        for (Index index : indexes) {
            if (!ExternalIndexingOperations.isFileIndex(index)) {
                spec = ExternalIndexingOperations.buildIndexUpdateOp(ds, index, metadataFiles, addedFiles, appendedFiles, metadataProvider);
                // run the files update job
                JobUtils.runJob(hcc, spec, true);
            }
        }
        // all index updates has completed successfully, record transaction state
        spec = ExternalIndexingOperations.buildCommitJob(ds, indexes, metadataProvider);
        // Aquire write latch again -> start a transaction and record the decision to commit
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        bActiveTxn = true;
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails()).setState(TransactionState.READY_TO_COMMIT);
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails()).setRefreshTimestamp(txnTime);
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        transactionState = TransactionState.READY_TO_COMMIT;
        // We don't release the latch since this job is expected to be quick
        JobUtils.runJob(hcc, spec, true);
        // Start a new metadata transaction to record the final state of the transaction
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        bActiveTxn = true;
        for (ExternalFile file : metadataFiles) {
            if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
            } else if (file.getPendingOp() == ExternalFilePendingOp.NO_OP) {
                Iterator<ExternalFile> iterator = appendedFiles.iterator();
                while (iterator.hasNext()) {
                    ExternalFile appendedFile = iterator.next();
                    if (file.getFileName().equals(appendedFile.getFileName())) {
                        // delete existing file
                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                        // delete existing appended file
                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, appendedFile);
                        // add the original file with appended information
                        appendedFile.setFileNumber(file.getFileNumber());
                        appendedFile.setPendingOp(ExternalFilePendingOp.NO_OP);
                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, appendedFile);
                        iterator.remove();
                    }
                }
            }
        }
        // remove the deleted files delta
        for (ExternalFile file : deletedFiles) {
            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
        }
        // insert new files
        for (ExternalFile file : addedFiles) {
            MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
            file.setPendingOp(ExternalFilePendingOp.NO_OP);
            MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
        }
        // mark the transaction as complete
        ((ExternalDatasetDetails) transactionDataset.getDatasetDetails()).setState(TransactionState.COMMIT);
        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, transactionDataset);
        // commit metadata transaction
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        success = true;
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (transactionState == TransactionState.READY_TO_COMMIT) {
            throw new IllegalStateException("System is inconsistent state: commit of (" + dataverseName + "." + datasetName + ") refresh couldn't carry out the commit phase", e);
        }
        if (transactionState == TransactionState.COMMIT) {
            // Nothing to do , everything should be clean
            throw e;
        }
        if (transactionState == TransactionState.BEGIN) {
            // transaction failed, need to do the following
            // clean NCs removing transaction components
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            spec = ExternalIndexingOperations.buildAbortOp(ds, indexes, metadataProvider);
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            try {
                JobUtils.runJob(hcc, spec, true);
            } catch (Exception e2) {
                // This should never happen -- fix throw illegal
                e.addSuppressed(e2);
                throw new IllegalStateException("System is in inconsistent state. Failed to abort refresh", e);
            }
            // return the state of the dataset to committed
            try {
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                for (ExternalFile file : deletedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                for (ExternalFile file : addedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                for (ExternalFile file : appendedFiles) {
                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                }
                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
                // commit metadata transaction
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                abort(e, e2, mdTxnCtx);
                e.addSuppressed(e2);
                throw new IllegalStateException("System is in inconsistent state. Failed to drop delta files", e);
            }
        }
    } finally {
        if (lockAquired) {
            ExternalDatasetsRegistry.INSTANCE.refreshEnd(ds, success);
        }
        metadataProvider.getLocks().unlock();
    }
}
Also used : TransactionState(org.apache.asterix.common.config.DatasetConfig.TransactionState) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) ExternalFile(org.apache.asterix.external.indexing.ExternalFile) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) RefreshExternalDatasetStatement(org.apache.asterix.lang.common.statement.RefreshExternalDatasetStatement) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) Iterator(java.util.Iterator) JobSpecification(org.apache.hyracks.api.job.JobSpecification)

Example 2 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class QueryTranslator method handleIndexDropStatement.

protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
    String datasetName = stmtIndexDrop.getDatasetName().getValue();
    String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
    ProgressState progress = ProgressState.NO_PROGRESS;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.dropIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    String indexName = null;
    // For external index
    boolean dropFilesIndex = false;
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
        ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
        IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
        StringBuilder builder = null;
        for (IActiveEntityEventsListener listener : listeners) {
            if (listener.isEntityUsingDataset(ds)) {
                if (builder == null) {
                    builder = new StringBuilder();
                }
                builder.append(new FeedConnectionId(listener.getEntityId(), datasetName) + "\n");
            }
        }
        if (builder != null) {
            throw new CompilationException("Dataset" + datasetName + " is currently being fed into by the following active entities: " + builder.toString());
        }
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
        } else {
            // External dataset
            indexName = stmtIndexDrop.getIndexName().getValue();
            Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (index == null) {
                if (stmtIndexDrop.getIfExists()) {
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                    return;
                } else {
                    throw new AlgebricksException("There is no index with this name " + indexName + ".");
                }
            } else if (ExternalIndexingOperations.isFileIndex(index)) {
                throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
            }
            // #. prepare a job to drop the index in NC.
            jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
            List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
            if (datasetIndexes.size() == 2) {
                dropFilesIndex = true;
                // only one index + the files index, we need to delete both of the indexes
                for (Index externalIndex : datasetIndexes) {
                    if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds));
                        // #. mark PendingDropOp on the existing files index
                        MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, externalIndex.getIndexName());
                        MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex.getIndexType(), externalIndex.getKeyFieldNames(), externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
                    }
                }
            }
            // #. mark PendingDropOp on the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
            // #. commit the existing transaction before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
            for (JobSpecification jobSpec : jobsToExecute) {
                JobUtils.runJob(hcc, jobSpec, true);
            }
            // #. begin a new transaction
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            // #. finally, delete the existing index
            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
            if (dropFilesIndex) {
                // delete the files index too
                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
                if (dropFilesIndex) {
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                }
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IActiveEntityEventsListener(org.apache.asterix.active.IActiveEntityEventsListener) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) IndexDropStatement(org.apache.asterix.lang.common.statement.IndexDropStatement) FeedConnectionId(org.apache.asterix.external.feed.management.FeedConnectionId) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler)

Example 3 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class GlobalRecoveryManager method recoverDataset.

private MetadataTransactionContext recoverDataset(ICcApplicationContext appCtx, MetadataTransactionContext mdTxnCtx, Dataverse dataverse) throws Exception {
    if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
        MetadataProvider metadataProvider = new MetadataProvider(appCtx, dataverse, componentProvider);
        try {
            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName());
            for (Dataset dataset : datasets) {
                if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
                    // External dataset
                    // Get indexes
                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName());
                    // Get the state of the dataset
                    ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
                    TransactionState datasetState = dsd.getState();
                    if (!indexes.isEmpty()) {
                        if (datasetState == TransactionState.BEGIN) {
                            List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
                            // 1. delete all pending files
                            for (ExternalFile file : files) {
                                if (file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
                                    MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                }
                            }
                        }
                        // 2. clean artifacts in NCs
                        metadataProvider.setMetadataTxnContext(mdTxnCtx);
                        JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(dataset, indexes, metadataProvider);
                        executeHyracksJob(jobSpec);
                        // 3. correct the dataset state
                        ((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
                        MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                    } else if (datasetState == TransactionState.READY_TO_COMMIT) {
                        List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
                        // if ready to commit, roll forward
                        // 1. commit indexes in NCs
                        metadataProvider.setMetadataTxnContext(mdTxnCtx);
                        JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(dataset, indexes, metadataProvider);
                        executeHyracksJob(jobSpec);
                        // 2. add pending files in metadata
                        for (ExternalFile file : files) {
                            if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                file.setPendingOp(ExternalFilePendingOp.NO_OP);
                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
                            } else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
                                // find original file
                                for (ExternalFile originalFile : files) {
                                    if (originalFile.getFileName().equals(file.getFileName())) {
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
                                        break;
                                    }
                                }
                            } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
                                // find original file
                                for (ExternalFile originalFile : files) {
                                    if (originalFile.getFileName().equals(file.getFileName())) {
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
                                        originalFile.setSize(file.getSize());
                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, originalFile);
                                    }
                                }
                            }
                            // 3. correct the dataset state
                            ((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
                            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
                            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                        }
                    }
                }
            }
        } finally {
            metadataProvider.getLocks().unlock();
        }
    }
    return mdTxnCtx;
}
Also used : TransactionState(org.apache.asterix.common.config.DatasetConfig.TransactionState) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) Dataset(org.apache.asterix.metadata.entities.Dataset) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) Index(org.apache.asterix.metadata.entities.Index) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ExternalFile(org.apache.asterix.external.indexing.ExternalFile)

Example 4 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class QueryTranslator method doDropDataset.

public static void doDropDataset(String dataverseName, String datasetName, MetadataProvider metadataProvider, boolean ifExists, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup) throws Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    MutableObject<MetadataTransactionContext> mdTxnCtx = new MutableObject<>(MetadataManager.INSTANCE.beginTransaction());
    MutableBoolean bActiveTxn = new MutableBoolean(true);
    metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            if (ifExists) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
                return;
            } else {
                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
            }
        }
        ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc, dropCorrespondingNodeGroup);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
    } catch (Exception e) {
        if (bActiveTxn.booleanValue()) {
            abort(e, e, mdTxnCtx.getValue());
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // remove the all indexes in NC
            try {
                for (JobSpecification jobSpec : jobsToExecute) {
                    JobUtils.runJob(hcc, jobSpec, true);
                }
            } catch (Exception e2) {
                // do no throw exception since still the metadata needs to be compensated.
                e.addSuppressed(e2);
            }
            // remove the record from the metadata.
            mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
            metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx.getValue());
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject)

Example 5 with Dataset

use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.

the class MetadataProvider method findDataSourceIndex.

@Override
public IDataSourceIndex<String, DataSourceId> findDataSourceIndex(String indexId, DataSourceId dataSourceId) throws AlgebricksException {
    DataSource source = findDataSource(dataSourceId);
    Dataset dataset = ((DatasetDataSource) source).getDataset();
    String indexName = indexId;
    Index secondaryIndex = getIndex(dataset.getDataverseName(), dataset.getDatasetName(), indexName);
    return (secondaryIndex != null) ? new DataSourceIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this) : null;
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) Index(org.apache.asterix.metadata.entities.Index) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) IDataSource(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource)

Aggregations

Dataset (org.apache.asterix.metadata.entities.Dataset)77 ArrayList (java.util.ArrayList)33 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)32 Index (org.apache.asterix.metadata.entities.Index)25 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)23 MetadataException (org.apache.asterix.metadata.MetadataException)19 ARecordType (org.apache.asterix.om.types.ARecordType)19 IAType (org.apache.asterix.om.types.IAType)18 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)18 List (java.util.List)17 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)16 RemoteException (java.rmi.RemoteException)15 AsterixException (org.apache.asterix.common.exceptions.AsterixException)15 MetadataProvider (org.apache.asterix.metadata.declared.MetadataProvider)15 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)15 IOException (java.io.IOException)14 MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)14 CompilationException (org.apache.asterix.common.exceptions.CompilationException)13 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)12 ACIDException (org.apache.asterix.common.exceptions.ACIDException)11