use of org.apache.asterix.metadata.entities.Dataverse in project asterixdb by apache.
the class GlobalRecoveryManager method startGlobalRecovery.
@Override
public void startGlobalRecovery(ICcApplicationContext appCtx) {
// perform global recovery if state changed to active
final ClusterState newState = ClusterStateManager.INSTANCE.getState();
boolean needToRecover = !newState.equals(state) && (newState == ClusterState.ACTIVE);
if (needToRecover) {
setState(newState);
ccServiceCtx.getControllerService().getExecutor().submit(() -> {
LOGGER.info("Starting Global Recovery");
MetadataTransactionContext mdTxnCtx = null;
try {
MetadataManager.INSTANCE.init();
// Loop over datasets
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
for (Dataverse dataverse : MetadataManager.INSTANCE.getDataverses(mdTxnCtx)) {
mdTxnCtx = recoverDataset(appCtx, mdTxnCtx, dataverse);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
// This needs to be fixed <-- Needs to shutdown the system -->
/*
* Note: Throwing this illegal state exception will terminate this thread
* and feeds listeners will not be notified.
*/
LOGGER.log(Level.SEVERE, "Global recovery was not completed successfully: ", e);
if (mdTxnCtx != null) {
try {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
} catch (Exception e1) {
LOGGER.log(Level.SEVERE, "Exception in aborting", e1);
e1.addSuppressed(e);
throw new IllegalStateException(e1);
}
}
}
ClusterStateManager.INSTANCE.setGlobalRecoveryCompleted(true);
LOGGER.info("Global Recovery Completed");
});
}
}
use of org.apache.asterix.metadata.entities.Dataverse in project asterixdb by apache.
the class QueryTranslator method handleCreateDataverseStatement.
protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
String dvName = stmtCreateDataverse.getDataverseName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv != null) {
if (stmtCreateDataverse.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
}
}
MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName, stmtCreateDataverse.getFormat(), MetadataUtil.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.entities.Dataverse in project asterixdb by apache.
the class MetadataBootstrap method startDDLRecovery.
/**
* Perform recovery of DDL operations metadata records
*/
public static void startDDLRecovery() throws MetadataException {
// #. clean up any record which has pendingAdd/DelOp flag
// as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then to INDEX_DATASET.
MetadataTransactionContext mdTxnCtx = null;
MetadataManager.INSTANCE.acquireWriteLatch();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting DDL recovery ...");
}
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
for (Dataverse dataverse : dataverses) {
recoverDataverse(mdTxnCtx, dataverse);
}
// the commit wasn't there before. yet, everything was working correctly!!!!!!!!!!!
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Completed DDL recovery.");
}
} catch (Exception e) {
try {
if (IS_DEBUG_MODE) {
LOGGER.log(Level.SEVERE, "Failure during DDL recovery", e);
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
}
throw new MetadataException(e);
} finally {
MetadataManager.INSTANCE.releaseWriteLatch();
}
}
use of org.apache.asterix.metadata.entities.Dataverse in project asterixdb by apache.
the class DataverseTupleTranslator method getMetadataEntityFromTuple.
@Override
public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord dataverseRecord = recordSerDes.deserialize(in);
return new Dataverse(((AString) dataverseRecord.getValueByPos(0)).getStringValue(), ((AString) dataverseRecord.getValueByPos(1)).getStringValue(), ((AInt32) dataverseRecord.getValueByPos(3)).getIntegerValue());
}
use of org.apache.asterix.metadata.entities.Dataverse in project asterixdb by apache.
the class ExternalLibraryUtils method uninstallLibrary.
/**
* Remove the library from metadata completely.
* TODO Currently, external libraries only include functions and adapters. we need to extend this to include:
* 1. external data source
* 2. data parser
*
* @param dataverse
* @param libraryName
* @return true if the library was found and removed, false otherwise
* @throws AsterixException
* @throws RemoteException
* @throws ACIDException
*/
protected static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = null;
try {
// begin transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// make sure dataverse exists
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
return false;
}
// make sure library exists
Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
if (library == null) {
return false;
}
// get dataverse functions
List<Function> functions = MetadataManager.INSTANCE.getDataverseFunctions(mdTxnCtx, dataverse);
for (Function function : functions) {
// does function belong to library?
if (function.getName().startsWith(libraryName + "#")) {
// drop the function
MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse, function.getName(), function.getArity()));
}
}
// get the dataverse adapters
List<DatasourceAdapter> adapters = MetadataManager.INSTANCE.getDataverseAdapters(mdTxnCtx, dataverse);
for (DatasourceAdapter adapter : adapters) {
// belong to the library?
if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
// remove adapter <! we didn't check if there are feeds which use this adapter>
MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
}
}
// drop the library itself
MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AsterixException(e);
}
return true;
}
Aggregations