use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handlePregelixStatement.
protected void handlePregelixStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
RunStatement pregelixStmt = (RunStatement) stmt;
boolean bActiveTxn = true;
String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom());
String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo());
String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
String fullyQualifiedDatasetNameTo = DatasetUtil.isFullyQualifiedName(datasetNameTo) ? datasetNameTo : dataverseNameTo + '.' + datasetNameTo;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), fullyQualifiedDatasetNameTo);
try {
prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo, datasetNameFrom, datasetNameTo, mdTxnCtx);
String pregelixHomeKey = "PREGELIX_HOME";
// Finds PREGELIX_HOME in system environment variables.
String pregelixHome = System.getenv(pregelixHomeKey);
// Finds PREGELIX_HOME in Java properties.
if (pregelixHome == null) {
pregelixHome = System.getProperty(pregelixHomeKey);
}
// Finds PREGELIX_HOME in AsterixDB configuration.
if (pregelixHome == null) {
// Since there is a default value for PREGELIX_HOME in CompilerProperties,
// pregelixHome can never be null.
pregelixHome = appCtx.getCompilerProperties().getPregelixHome();
}
// Constructs the pregelix command line.
List<String> cmd = constructPregelixCommand(pregelixStmt, dataverseNameFrom, datasetNameFrom, dataverseNameTo, datasetNameTo);
ProcessBuilder pb = new ProcessBuilder(cmd);
pb.directory(new File(pregelixHome));
pb.redirectErrorStream(true);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
// Executes the Pregelix command.
int resultState = executeExternalShellProgram(pb);
// Checks the return state of the external Pregelix command.
if (resultState != 0) {
throw new AlgebricksException("Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster " + "needs to be restarted. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? " + "Check the logfiles for more details.");
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleCompactStatement.
protected void handleCompactStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
CompactStatement compactStatement = (CompactStatement) stmt;
String dataverseName = getActiveDataverse(compactStatement.getDataverseName());
String datasetName = compactStatement.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
MetadataLockManager.INSTANCE.compactBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
try {
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
}
// Prepare jobs to compact the datatset and its indexes
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
if (indexes.isEmpty()) {
throw new AlgebricksException("Cannot compact the extrenal dataset " + datasetName + " because it has no indexes");
}
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
jobsToExecute.add(DatasetUtil.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
if (ds.getDatasetType() == DatasetType.INTERNAL) {
for (Index index : indexes) {
if (index.isSecondaryIndex()) {
jobsToExecute.add(IndexUtil.buildSecondaryIndexCompactJobSpec(ds, index, metadataProvider));
}
}
} else {
prepareCompactJobsForExternalDataset(indexes, ds, jobsToExecute, metadataProvider);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class QueryTranslator method handleCreateTypeStatement.
protected void handleCreateTypeStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDecl stmtCreateType = (TypeDecl) stmt;
String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
String typeName = stmtCreateType.getIdent().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createTypeBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + typeName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
throw new AlgebricksException("Unknown dataverse " + dataverseName);
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt != null) {
if (!stmtCreateType.getIfNotExists()) {
throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
}
} else {
if (BuiltinTypeMap.getBuiltinType(typeName) != null) {
throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
} else {
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
IAType type = typeMap.get(typeSignature);
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
}
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class RebalanceUtil method rebalanceSwitch.
private static void rebalanceSwitch(Dataset source, Dataset target, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
// Acquires the metadata write lock for the source/target dataset.
writeLockDataset(metadataProvider.getLocks(), source);
Dataset sourceDataset = MetadataManagerUtil.findDataset(mdTxnCtx, source.getDataverseName(), source.getDatasetName());
if (sourceDataset == null) {
// The dataset has already been dropped.
// In this case, we should drop the generated target dataset files.
dropDatasetFiles(target, metadataProvider, hcc);
return;
}
// Drops the source dataset files.
dropDatasetFiles(source, metadataProvider, hcc);
// Updates the dataset entry in the metadata storage
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, target);
// Drops the metadata entry of source dataset's node group.
String sourceNodeGroup = source.getNodeGroupName();
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), sourceNodeGroup);
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, sourceNodeGroup, true);
}
use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.
the class RebalanceUtil method rebalance.
/**
* Rebalances an existing dataset to a list of target nodes.
*
* @param dataverseName,
* the dataverse name.
* @param datasetName,
* the dataset name.
* @param targetNcNames,
* the list of target nodes.
* @param metadataProvider,
* the metadata provider.
* @param hcc,
* the reusable hyracks connection.
* @throws Exception
*/
public static void rebalance(String dataverseName, String datasetName, Set<String> targetNcNames, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset sourceDataset;
Dataset targetDataset;
// that no one can drop the rebalance source dataset.
try {
// The source dataset.
sourceDataset = metadataProvider.findDataset(dataverseName, datasetName);
// If the source dataset doesn't exist, then it's a no-op.
if (sourceDataset == null) {
return;
}
Set<String> sourceNodes = new HashSet<>(metadataProvider.findNodes(sourceDataset.getNodeGroupName()));
// The the source nodes are identical to the target nodes.
if (sourceNodes.equals(targetNcNames)) {
return;
}
// Creates a node group for rebalance.
String nodeGroupName = DatasetUtil.createNodeGroupForNewDataset(sourceDataset.getDataverseName(), sourceDataset.getDatasetName(), sourceDataset.getRebalanceCount() + 1, targetNcNames, metadataProvider);
// The target dataset for rebalance.
targetDataset = new Dataset(sourceDataset, true, nodeGroupName);
// Rebalances the source dataset into the target dataset.
rebalance(sourceDataset, targetDataset, metadataProvider, hcc);
// Complete the metadata transaction.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().reset();
}
// Starts another transaction for switching the metadata entity.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Atomically switches the rebalance target to become the source dataset.
rebalanceSwitch(sourceDataset, targetDataset, metadataProvider, hcc);
// Complete the metadata transaction.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().reset();
}
}
Aggregations