use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleTypeDropStatement.
protected void handleTypeDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtTypeDrop.getDataverseName());
String typeName = stmtTypeDrop.getTypeName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropTypeBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + typeName);
try {
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt == null) {
if (!stmtTypeDrop.getIfExists()) {
throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
}
} else {
MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleDisconnectFeedStatement.
protected void handleDisconnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
String dataverseName = getActiveDataverse(cfs.getDataverseName());
String datasetName = cfs.getDatasetName().getValue();
String feedName = cfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
// Check whether feed is alive
if (activeEventHandler.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
}
MetadataLockManager.INSTANCE.disconnectFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + cfs.getFeedName());
try {
FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName);
if (fc == null) {
throw new CompilationException("Feed " + feedName + " is currently not connected to " + cfs.getDatasetName().getValue() + ". Invalid operation!");
}
MetadataManager.INSTANCE.dropFeedConnection(mdTxnCtx, dataverseName, feedName, datasetName);
for (FunctionSignature functionSignature : fc.getAppliedFunctions()) {
Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, functionSignature);
function.dereference();
MetadataManager.INSTANCE.updateFunction(mdTxnCtx, function);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleDropFeedPolicyStatement.
protected void handleDropFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName());
String policyName = stmtFeedPolicyDrop.getPolicyName().getValue();
MetadataLockManager.INSTANCE.dropFeedPolicyBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + policyName);
try {
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
if (feedPolicy == null) {
if (!stmtFeedPolicyDrop.getIfExists()) {
throw new AlgebricksException("Unknown policy " + policyName + " in dataverse " + dataverseName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
MetadataManager.INSTANCE.dropFeedPolicy(mdTxnCtx, dataverseName, policyName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleLoadStatement.
protected void handleLoadStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
LoadStatement loadStmt = (LoadStatement) stmt;
String dataverseName = getActiveDataverse(loadStmt.getDataverseName());
String datasetName = loadStmt.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.modifyDatasetBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
try {
CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt.getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(), loadStmt.dataIsAlreadySorted());
JobSpecification spec = apiFramework.compileQuery(hcc, metadataProvider, null, 0, null, sessionOutput, cls);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (spec != null) {
JobUtils.runJob(hcc, spec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method doDropDataset.
public static void doDropDataset(String dataverseName, String datasetName, MetadataProvider metadataProvider, boolean ifExists, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup) throws Exception {
MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
MutableObject<MetadataTransactionContext> mdTxnCtx = new MutableObject<>(MetadataManager.INSTANCE.beginTransaction());
MutableBoolean bActiveTxn = new MutableBoolean(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
List<JobSpecification> jobsToExecute = new ArrayList<>();
try {
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
if (ifExists) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
return;
} else {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
}
}
ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc, dropCorrespondingNodeGroup);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
} catch (Exception e) {
if (bActiveTxn.booleanValue()) {
abort(e, e, mdTxnCtx.getValue());
}
if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// remove the all indexes in NC
try {
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e2) {
// do no throw exception since still the metadata needs to be compensated.
e.addSuppressed(e2);
}
// remove the record from the metadata.
mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx.getValue());
throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
Aggregations