use of org.apache.asterix.lang.common.statement.DataverseDropStatement in project asterixdb by apache.
the class AbstractLangTranslator method validateOperation.
public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
try {
ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
} catch (HyracksDataException e) {
throw new AsterixException(e);
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
} else {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
}
}
}
if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
int waitCycleCount = 0;
try {
while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
Thread.sleep(1000);
waitCycleCount++;
}
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
}
}
boolean invalidOperation = false;
String message = null;
String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
switch(stmt.getKind()) {
case Statement.Kind.INSERT:
InsertStatement insertStmt = (InsertStatement) stmt;
if (insertStmt.getDataverseName() != null) {
dataverse = insertStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DELETE:
DeleteStatement deleteStmt = (DeleteStatement) stmt;
if (deleteStmt.getDataverseName() != null) {
dataverse = deleteStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATAVERSE_DROP:
DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
if (invalidOperation) {
message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
}
break;
case Statement.Kind.DATASET_DROP:
DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
if (dropStmt.getDataverseName() != null) {
dataverse = dropStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATASET_DECL:
DatasetDecl datasetStmt = (DatasetDecl) stmt;
Map<String, String> hints = datasetStmt.getHints();
if (hints != null && !hints.isEmpty()) {
Pair<Boolean, String> validationResult = null;
StringBuffer errorMsgBuffer = new StringBuffer();
for (Entry<String, String> hint : hints.entrySet()) {
validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
if (!validationResult.first) {
errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
errorMsgBuffer.append(" \n");
}
}
invalidOperation = errorMsgBuffer.length() > 0;
if (invalidOperation) {
message = errorMsgBuffer.toString();
}
}
break;
default:
break;
}
if (invalidOperation) {
throw new AsterixException("Invalid operation - " + message);
}
}
use of org.apache.asterix.lang.common.statement.DataverseDropStatement in project asterixdb by apache.
the class QueryTranslator method handleDataverseDropStatement.
protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
String dataverseName = stmtDelete.getDataverseName().getValue();
if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
throw new HyracksDataException(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
}
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
MetadataLockManager.INSTANCE.acquireDataverseWriteLock(metadataProvider.getLocks(), dataverseName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
if (stmtDelete.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
}
}
// # disconnect all feeds from any datasets in the dataverse.
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
Identifier dvId = new Identifier(dataverseName);
MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(), metadataProvider.getStorageComponentProvider());
tempMdProvider.setConfig(metadataProvider.getConfig());
for (IActiveEntityEventsListener listener : activeListeners) {
EntityId activeEntityId = listener.getEntityId();
if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME) && activeEntityId.getDataverse().equals(dataverseName)) {
tempMdProvider.getLocks().reset();
stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())), tempMdProvider);
// prepare job to remove feed log storage
jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
}
}
// #. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (Dataset dataset : datasets) {
String datasetName = dataset.getDatasetName();
DatasetType dsType = dataset.getDatasetType();
if (dsType == DatasetType.INTERNAL) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (Index index : indexes) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset));
}
} else {
// External dataset
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, dataset));
} else {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(k), metadataProvider, dataset));
}
}
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(dataset);
}
}
jobsToExecute.add(DataverseUtil.dropDataverseJobSpec(dv, metadataProvider));
// #. mark PendingDropOp on the dataverse record by
// first, deleting the dataverse record from the DATAVERSE_DATASET
// second, inserting the dataverse record with the PendingDropOp value into the
// DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
// Drops all node groups that no longer needed
for (Dataset dataset : datasets) {
String nodeGroup = dataset.getNodeGroupName();
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup);
if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) {
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
}
}
if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
activeDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
if (activeDataverse != null && activeDataverse.getDataverseName() == dataverseName) {
activeDataverse = null;
}
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
Aggregations