use of org.apache.asterix.common.exceptions.ACIDException in project asterixdb by apache.
the class AbstractCheckpointManager method getLatest.
@Override
public Checkpoint getLatest() throws ACIDException {
// Read all checkpointObjects from the existing checkpoint files
File[] checkpoints = checkpointDir.listFiles(filter);
if (checkpoints == null || checkpoints.length == 0) {
return null;
}
List<Checkpoint> checkpointObjectList = new ArrayList<>();
for (File file : checkpoints) {
try {
LOGGER.log(Level.WARNING, "Reading snapshot file: " + file.getAbsolutePath());
String jsonString = new String(Files.readAllBytes(Paths.get(file.getAbsolutePath())));
checkpointObjectList.add(Checkpoint.fromJson(jsonString));
} catch (IOException e) {
throw new ACIDException("Failed to read a checkpoint file", e);
}
}
// Sort checkpointObjects in descending order by timeStamp to find out the most recent one.
Collections.sort(checkpointObjectList);
// Return the most recent one (the first one in sorted list)
return checkpointObjectList.get(0);
}
use of org.apache.asterix.common.exceptions.ACIDException in project asterixdb by apache.
the class LogFlusher method syncAppendToLogTail.
protected synchronized void syncAppendToLogTail(ILogRecord logRecord) throws ACIDException {
if (logRecord.getLogType() != LogType.FLUSH) {
ITransactionContext txnCtx = logRecord.getTxnCtx();
if (txnCtx.getTxnState() == ITransactionManager.ABORTED && logRecord.getLogType() != LogType.ABORT) {
throw new ACIDException("Aborted job(" + txnCtx.getJobId() + ") tried to write non-abort type log record.");
}
}
/**
* To eliminate the case where the modulo of the next appendLSN = 0 (the next
* appendLSN = the first LSN of the next log file), we do not allow a log to be
* written at the last offset of the current file.
*/
final int logSize = logRecord.getLogSize();
// Make sure the log will not exceed the log file size
if (getLogFileOffset(appendLSN.get()) + logSize >= logFileSize) {
prepareNextLogFile();
prepareNextPage(logSize);
} else if (!appendPage.hasSpace(logSize)) {
prepareNextPage(logSize);
}
appendPage.append(logRecord, appendLSN.get());
if (logRecord.getLogType() == LogType.FLUSH) {
logRecord.setLSN(appendLSN.get());
}
if (logRecord.isMarker()) {
logRecord.logAppended(appendLSN.get());
}
appendLSN.addAndGet(logSize);
}
use of org.apache.asterix.common.exceptions.ACIDException in project asterixdb by apache.
the class FeedMetadataUtil method getOutputType.
public static ARecordType getOutputType(IFeed feed, Map<String, String> configuration, String key) throws MetadataException {
ARecordType outputType = null;
String fqOutputType = configuration.get(key);
if (fqOutputType == null) {
return null;
}
String[] dataverseAndType = fqOutputType.split("[.]");
String dataverseName;
String datatypeName;
if (dataverseAndType.length == 1) {
datatypeName = dataverseAndType[0];
dataverseName = feed.getDataverseName();
} else if (dataverseAndType.length == 2) {
dataverseName = dataverseAndType[0];
datatypeName = dataverseAndType[1];
} else {
throw new IllegalArgumentException("Invalid value for the parameter " + key);
}
MetadataTransactionContext ctx = null;
MetadataManager.INSTANCE.acquireReadLatch();
try {
ctx = MetadataManager.INSTANCE.beginTransaction();
Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, dataverseName, datatypeName);
if (t == null || t.getDatatype().getTypeTag() != ATypeTag.OBJECT) {
throw new MetadataException(ErrorCode.FEED_METADATA_UTIL_UNEXPECTED_FEED_DATATYPE, datatypeName);
}
outputType = (ARecordType) t.getDatatype();
MetadataManager.INSTANCE.commitTransaction(ctx);
} catch (ACIDException | RemoteException e) {
if (ctx != null) {
try {
MetadataManager.INSTANCE.abortTransaction(ctx);
} catch (ACIDException | RemoteException e2) {
e.addSuppressed(e2);
}
throw new MetadataException(ErrorCode.FEED_CREATE_FEED_DATATYPE_ERROR, e, datatypeName);
}
} finally {
MetadataManager.INSTANCE.releaseReadLatch();
}
return outputType;
}
use of org.apache.asterix.common.exceptions.ACIDException in project asterixdb by apache.
the class ExternalLibraryUtils method uninstallLibrary.
/**
* Remove the library from metadata completely.
* TODO Currently, external libraries only include functions and adapters. we need to extend this to include:
* 1. external data source
* 2. data parser
*
* @param dataverse
* @param libraryName
* @return true if the library was found and removed, false otherwise
* @throws AsterixException
* @throws RemoteException
* @throws ACIDException
*/
protected static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = null;
try {
// begin transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// make sure dataverse exists
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
return false;
}
// make sure library exists
Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
if (library == null) {
return false;
}
// get dataverse functions
List<Function> functions = MetadataManager.INSTANCE.getDataverseFunctions(mdTxnCtx, dataverse);
for (Function function : functions) {
// does function belong to library?
if (function.getName().startsWith(libraryName + "#")) {
// drop the function
MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse, function.getName(), function.getArity()));
}
}
// get the dataverse adapters
List<DatasourceAdapter> adapters = MetadataManager.INSTANCE.getDataverseAdapters(mdTxnCtx, dataverse);
for (DatasourceAdapter adapter : adapters) {
// belong to the library?
if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
// remove adapter <! we didn't check if there are feeds which use this adapter>
MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
}
}
// drop the library itself
MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AsterixException(e);
}
return true;
}
use of org.apache.asterix.common.exceptions.ACIDException in project asterixdb by apache.
the class MetadataNode method addAdapter.
@Override
public void addAdapter(JobId jobId, DatasourceAdapter adapter) throws MetadataException, RemoteException {
try {
// Insert into the 'Adapter' dataset.
DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getAdapterTupleTranslator(true);
ITupleReference adapterTuple = tupleReaderWriter.getTupleFromMetadataEntity(adapter);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, adapterTuple);
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
throw new MetadataException("A adapter with this name " + adapter.getAdapterIdentifier().getName() + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
Aggregations