use of org.apache.asterix.active.IActiveEntityEventsListener in project asterixdb by apache.
the class QueryTranslator method handleIndexDropStatement.
protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
String datasetName = stmtIndexDrop.getDatasetName().getValue();
String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
MetadataLockManager.INSTANCE.dropIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
String indexName = null;
// For external index
boolean dropFilesIndex = false;
try {
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
StringBuilder builder = null;
for (IActiveEntityEventsListener listener : listeners) {
if (listener.isEntityUsingDataset(ds)) {
if (builder == null) {
builder = new StringBuilder();
}
builder.append(new FeedConnectionId(listener.getEntityId(), datasetName) + "\n");
}
}
if (builder != null) {
throw new CompilationException("Dataset" + datasetName + " is currently being fed into by the following active entities: " + builder.toString());
}
if (ds.getDatasetType() == DatasetType.INTERNAL) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (stmtIndexDrop.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
}
// #. prepare a job to drop the index in NC.
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
// #. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
// #. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
// #. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
} else {
// External dataset
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
if (stmtIndexDrop.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
} else if (ExternalIndexingOperations.isFileIndex(index)) {
throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
}
// #. prepare a job to drop the index in NC.
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds));
List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
if (datasetIndexes.size() == 2) {
dropFilesIndex = true;
// only one index + the files index, we need to delete both of the indexes
for (Index externalIndex : datasetIndexes) {
if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds));
// #. mark PendingDropOp on the existing files index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, externalIndex.getIndexName());
MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex.getIndexType(), externalIndex.getKeyFieldNames(), externalIndex.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
}
}
}
// #. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(dataverseName, datasetName, indexName, index.getIndexType(), index.getKeyFieldNames(), index.getKeyFieldSourceIndicators(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index.isPrimaryIndex(), MetadataUtil.PENDING_DROP_OP));
// #. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
// #. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (dropFilesIndex) {
// delete the files index too
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
if (dropFilesIndex) {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
use of org.apache.asterix.active.IActiveEntityEventsListener in project asterixdb by apache.
the class QueryTranslator method validateIfResourceIsActiveInFeed.
protected void validateIfResourceIsActiveInFeed(Dataset dataset) throws CompilationException {
StringBuilder builder = null;
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
for (IActiveEntityEventsListener listener : listeners) {
if (listener.isEntityUsingDataset(dataset)) {
if (builder == null) {
builder = new StringBuilder();
}
builder.append(listener.getEntityId() + "\n");
}
}
if (builder != null) {
throw new CompilationException("Dataset " + dataset.getDataverseName() + "." + dataset.getDatasetName() + " is currently being " + "fed into by the following active entities.\n" + builder.toString());
}
}
use of org.apache.asterix.active.IActiveEntityEventsListener in project asterixdb by apache.
the class Dataset method drop.
/**
* Drop this dataset
*
* @param metadataProvider
* metadata provider that can be used to get metadata info and runtimes
* @param mdTxnCtx
* the transaction context
* @param jobsToExecute
* a list of jobs to be executed as part of the drop operation
* @param bActiveTxn
* whether the metadata transaction is ongoing
* @param progress
* a mutable progress state used for error handling during the drop operation
* @param hcc
* a client connection to hyracks master for job execution
* @throws Exception
* if an error occur during the drop process or if the dataset can't be dropped for any reason
*/
public void drop(MetadataProvider metadataProvider, MutableObject<MetadataTransactionContext> mdTxnCtx, List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, MutableObject<ProgressState> progress, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup) throws Exception {
Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<>();
if (getDatasetType() == DatasetType.INTERNAL) {
// prepare job spec(s) that would disconnect any active feeds involving the dataset.
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) metadataProvider.getApplicationContext().getActiveLifecycleListener();
IActiveEntityEventsListener[] activeListeners = activeListener.getNotificationHandler().getEventListeners();
for (IActiveEntityEventsListener listener : activeListeners) {
if (listener.isEntityUsingDataset(this)) {
throw new CompilationException(ErrorCode.COMPILATION_CANT_DROP_ACTIVE_DATASET, RecordUtil.toFullyQualifiedName(dataverseName, datasetName), listener.getEntityId().toString());
}
}
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this));
}
}
jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, metadataProvider));
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getMetaItemTypeDataverseName(), getMetaItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
bActiveTxn.setValue(false);
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// # disconnect the feeds
for (Pair<JobSpecification, Boolean> p : disconnectJobList.values()) {
JobUtils.runJob(hcc, p.first, true);
}
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
bActiveTxn.setValue(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
} else {
// External dataset
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(this);
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this));
} else {
jobsToExecute.add(DatasetUtil.buildDropFilesIndexJobSpec(metadataProvider, this));
}
}
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
bActiveTxn.setValue(false);
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
if (!indexes.isEmpty()) {
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(this);
}
mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
bActiveTxn.setValue(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
}
// #. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
// Drops the associated nodegroup if it is no longer used by any other dataset.
if (dropCorrespondingNodeGroup) {
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroupName);
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx.getValue(), nodeGroupName, true);
}
}
use of org.apache.asterix.active.IActiveEntityEventsListener in project asterixdb by apache.
the class QueryTranslator method handleDataverseDropStatement.
protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
String dataverseName = stmtDelete.getDataverseName().getValue();
if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
}
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
if (stmtDelete.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
}
}
// # disconnect all feeds from any datasets in the dataverse.
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
Identifier dvId = new Identifier(dataverseName);
MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
tempMdProvider.setConfig(metadataProvider.getConfig());
for (IActiveEntityEventsListener listener : activeListeners) {
EntityId activeEntityId = listener.getEntityId();
if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
tempMdProvider.getLocks().reset();
stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
// prepare job to remove feed log storage
jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
}
}
// #. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (Dataset dataset : datasets) {
String datasetName = dataset.getDatasetName();
DatasetType dsType = dataset.getDatasetType();
if (dsType == DatasetType.INTERNAL) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (Index index : indexes) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
}
} else {
// External dataset
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
} else {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
}
}
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
}
}
jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
// #. mark PendingDropOp on the dataverse record by
// first, deleting the dataverse record from the DATAVERSE_DATASET
// second, inserting the dataverse record with the PendingDropOp value into the
// DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
// Drops all node groups that no longer needed
for (Dataset dataset : datasets) {
String nodeGroup = dataset.getNodeGroupName();
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
}
}
if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
activeDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
activeDataverse = null;
}
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
Aggregations