use of org.apache.asterix.lang.common.statement.DropDatasetStatement in project asterixdb by apache.
the class QueryTranslator method handleDatasetDropStatement.
public void handleDatasetDropStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
DropDatasetStatement stmtDelete = (DropDatasetStatement) stmt;
String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
String datasetName = stmtDelete.getDatasetName().getValue();
MetadataLockManager.INSTANCE.dropDatasetBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
try {
doDropDataset(dataverseName, datasetName, metadataProvider, stmtDelete.getIfExists(), hcc, true);
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.lang.common.statement.DropDatasetStatement in project asterixdb by apache.
the class AbstractLangTranslator method validateOperation.
public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
try {
ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
} catch (HyracksDataException e) {
throw new AsterixException(e);
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
} else {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
}
}
}
if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
int waitCycleCount = 0;
try {
while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
Thread.sleep(1000);
waitCycleCount++;
}
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
}
}
boolean invalidOperation = false;
String message = null;
String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
switch(stmt.getKind()) {
case Statement.Kind.INSERT:
InsertStatement insertStmt = (InsertStatement) stmt;
if (insertStmt.getDataverseName() != null) {
dataverse = insertStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DELETE:
DeleteStatement deleteStmt = (DeleteStatement) stmt;
if (deleteStmt.getDataverseName() != null) {
dataverse = deleteStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATAVERSE_DROP:
DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
if (invalidOperation) {
message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
}
break;
case Statement.Kind.DATASET_DROP:
DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
if (dropStmt.getDataverseName() != null) {
dataverse = dropStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATASET_DECL:
DatasetDecl datasetStmt = (DatasetDecl) stmt;
Map<String, String> hints = datasetStmt.getHints();
if (hints != null && !hints.isEmpty()) {
Pair<Boolean, String> validationResult = null;
StringBuffer errorMsgBuffer = new StringBuffer();
for (Entry<String, String> hint : hints.entrySet()) {
validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
if (!validationResult.first) {
errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
errorMsgBuffer.append(" \n");
}
}
invalidOperation = errorMsgBuffer.length() > 0;
if (invalidOperation) {
message = errorMsgBuffer.toString();
}
}
break;
default:
break;
}
if (invalidOperation) {
throw new AsterixException("Invalid operation - " + message);
}
}
use of org.apache.asterix.lang.common.statement.DropDatasetStatement in project asterixdb by apache.
the class QueryTranslator method prepareRunExternalRuntime.
// Prepares to run a program on external runtime.
protected void prepareRunExternalRuntime(MetadataProvider metadataProvider, IHyracksClientConnection hcc, RunStatement pregelixStmt, String dataverseNameFrom, String dataverseNameTo, String datasetNameFrom, String datasetNameTo, MetadataTransactionContext mdTxnCtx) throws Exception {
// Validates the source/sink dataverses and datasets.
Dataset fromDataset = metadataProvider.findDataset(dataverseNameFrom, datasetNameFrom);
if (fromDataset == null) {
throw new CompilationException("The source dataset " + datasetNameFrom + " in dataverse " + dataverseNameFrom + " could not be found for the Run command");
}
Dataset toDataset = metadataProvider.findDataset(dataverseNameTo, datasetNameTo);
if (toDataset == null) {
throw new CompilationException("The sink dataset " + datasetNameTo + " in dataverse " + dataverseNameTo + " could not be found for the Run command");
}
try {
// Find the primary index of the sink dataset.
Index toIndex = null;
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo, pregelixStmt.getDatasetNameTo().getValue());
for (Index index : indexes) {
if (index.isPrimaryIndex()) {
toIndex = index;
break;
}
}
if (toIndex == null) {
throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo);
}
// Cleans up the sink dataset -- Drop and then Create.
DropDatasetStatement dropStmt = new DropDatasetStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), true);
this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc);
IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(), toIndex.getKeyFieldSourceIndicators(), false, null, toDataset.getDatasetDetails().isTemp());
DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeDataverseName()), new Identifier(toDataset.getItemTypeName()), new Identifier(toDataset.getMetaItemTypeDataverseName()), new Identifier(toDataset.getMetaItemTypeName()), new Identifier(toDataset.getNodeGroupName()), toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(), toDataset.getDatasetType(), idd, false);
this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc);
} catch (Exception e) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
throw new AlgebricksException("Error cleaning the result dataset. This should not happen.");
}
// Flushes source dataset.
FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseNameFrom, datasetNameFrom, datasetNameFrom);
}
Aggregations