use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method forceDropDatatype.
private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datatypeName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'datatype' dataset.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method dropFeed.
@Override
public void dropFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverse, feedName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'nodegroup' dataset.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method getDataverses.
@Override
public List<Dataverse> getDataverses(JobId jobId) throws MetadataException, RemoteException {
try {
DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(false);
IValueExtractor<Dataverse> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Dataverse> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, null, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method createTuple.
// TODO: Can use Hyrack's TupleUtils for this, once we switch to a newer
// Hyracks version.
public static ITupleReference createTuple(String... fields) {
@SuppressWarnings("unchecked") ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
AMutableString aString = new AMutableString("");
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
for (String s : fields) {
aString.setValue(s);
try {
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
} catch (HyracksDataException e) {
// This should never happen
throw new IllegalStateException("Failed to create search tuple!!!! This should never happen", e);
}
tupleBuilder.addFieldEndOffset();
}
ArrayTupleReference tuple = new ArrayTupleReference();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method dropAdapter.
@Override
public void dropAdapter(JobId jobId, String dataverseName, String adapterName) throws MetadataException, RemoteException {
DatasourceAdapter adapter = getAdapter(jobId, dataverseName, adapterName);
if (adapter == null) {
throw new MetadataException("Cannot drop adapter '" + adapter + "' because it doesn't exist.");
}
try {
// Delete entry from the 'Adapter' dataset.
ITupleReference searchKey = createTuple(dataverseName, adapterName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'Adapter' dataset.
ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, datasetTuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop adapter '" + adapterName + " since it doesn't exist", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
Aggregations