use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method addDatatype.
@Override
public void addDatatype(JobId jobId, Datatype datatype) throws MetadataException, RemoteException {
try {
DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(datatype);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method addEntity.
// TODO(amoudi): make all metadata operations go through the generic methods
/**
* Add entity to index
*
* @param jobId
* @param entity
* @param tupleTranslator
* @param index
* @throws MetadataException
*/
private <T> void addEntity(JobId jobId, T entity, IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index) throws MetadataException {
try {
ITupleReference tuple = tupleTranslator.getTupleFromMetadataEntity(entity);
insertTupleIntoIndex(jobId, index, tuple);
} catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class MetadataNode method updateDataset.
@Override
public void updateDataset(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
try {
// This method will delete previous entry of the dataset and insert the new one
// Delete entry from the 'datasets' dataset.
ITupleReference searchKey;
searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'dataset' dataset.
ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
// Previous tuple was deleted
// Insert into the 'dataset' dataset.
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(true);
datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
} catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class ReplicaResourcesManager method getLaggingReplicaIndexesId2PathMap.
public Map<Long, String> getLaggingReplicaIndexesId2PathMap(String replicaId, long targetLSN) throws IOException {
Map<Long, String> laggingReplicaIndexes = new HashMap<Long, String>();
try {
//for every index in replica
Set<File> remoteIndexes = getReplicaIndexes(replicaId);
for (File indexFolder : remoteIndexes) {
if (getReplicaIndexMaxLSN(indexFolder) < targetLSN) {
File localResource = new File(indexFolder + File.separator + PersistentLocalResourceRepository.METADATA_FILE_NAME);
LocalResource resource = PersistentLocalResourceRepository.readLocalResource(localResource);
laggingReplicaIndexes.put(resource.getId(), indexFolder.getAbsolutePath());
}
}
} catch (HyracksDataException e) {
e.printStackTrace();
}
return laggingReplicaIndexes;
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class OverlapBinsDescriptor method createEvaluatorFactory.
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
private final IPointable argPtr0 = new VoidPointable();
private final IPointable argPtr1 = new VoidPointable();
private final IPointable argPtr2 = new VoidPointable();
private final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private final IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private final IScalarEvaluator eval2 = args[2].createScalarEvaluator(ctx);
// for output
private OrderedListBuilder listBuilder = new OrderedListBuilder();
private ArrayBackedValueStorage listStorage = new ArrayBackedValueStorage();
protected final AOrderedListType intListType = new AOrderedListType(BuiltinType.AINTERVAL, null);
private final AMutableInterval aInterval = new AMutableInterval(0, 0, (byte) -1);
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
private final GregorianCalendarSystem gregCalSys = GregorianCalendarSystem.getInstance();
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval0.evaluate(tuple, argPtr0);
eval1.evaluate(tuple, argPtr1);
eval2.evaluate(tuple, argPtr2);
byte[] bytes0 = argPtr0.getByteArray();
int offset0 = argPtr0.getStartOffset();
ATypeTag type0 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes0[offset0]);
long intervalStart;
long intervalEnd;
byte intervalTypeTag;
if (type0 == ATypeTag.INTERVAL) {
intervalStart = AIntervalSerializerDeserializer.getIntervalStart(bytes0, offset0 + 1);
intervalEnd = AIntervalSerializerDeserializer.getIntervalEnd(bytes0, offset0 + 1);
intervalTypeTag = AIntervalSerializerDeserializer.getIntervalTimeType(bytes0, offset0 + 1);
if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
intervalStart = intervalStart * GregorianCalendarSystem.CHRONON_OF_DAY;
}
} else {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
// get the anchor instance time
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
ATypeTag type1 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes1[offset1]);
if (intervalTypeTag != bytes1[offset1]) {
throw new IncompatibleTypeException(getIdentifier(), intervalTypeTag, bytes1[offset1]);
}
long anchorTime;
switch(type1) {
case DATE:
anchorTime = ADateSerializerDeserializer.getChronon(bytes1, offset1 + 1) * GregorianCalendarSystem.CHRONON_OF_DAY;
break;
case TIME:
anchorTime = ATimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
case DATETIME:
anchorTime = ADateTimeSerializerDeserializer.getChronon(bytes1, offset1 + 1);
break;
default:
throw new TypeMismatchException(getIdentifier(), 1, bytes1[offset1], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
}
byte[] bytes2 = argPtr2.getByteArray();
int offset2 = argPtr2.getStartOffset();
ATypeTag type2 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes2[offset2]);
int yearMonth = 0;
long dayTime = 0;
long firstBinIndex;
switch(type2) {
case YEARMONTHDURATION:
yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(bytes2, offset2 + 1);
int yearStart = gregCalSys.getYear(anchorTime);
int monthStart = gregCalSys.getMonthOfYear(anchorTime, yearStart);
int yearToBin = gregCalSys.getYear(intervalStart);
int monthToBin = gregCalSys.getMonthOfYear(intervalStart, yearToBin);
int totalMonths = (yearToBin - yearStart) * 12 + (monthToBin - monthStart);
firstBinIndex = totalMonths / yearMonth + ((totalMonths < 0 && totalMonths % yearMonth != 0) ? -1 : 0);
if (firstBinIndex > Integer.MAX_VALUE) {
throw new OverflowException(getIdentifier());
}
if (firstBinIndex < Integer.MIN_VALUE) {
throw new UnderflowException(getIdentifier());
}
break;
case DAYTIMEDURATION:
dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes2, offset2 + 1);
long totalChronon = intervalStart - anchorTime;
firstBinIndex = totalChronon / dayTime + ((totalChronon < 0 && totalChronon % dayTime != 0) ? -1 : 0);
break;
default:
throw new TypeMismatchException(getIdentifier(), 2, bytes2[offset2], ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG, ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG);
}
long binStartChronon;
long binEndChronon;
int binOffset;
listBuilder.reset(intListType);
try {
if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
binOffset = 0;
do {
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binStartChronon < 0 && binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binEndChronon < 0 && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
} while (binEndChronon < intervalEnd);
} else if (intervalTypeTag == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
if (yearMonth != 0) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
binOffset = 0;
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon < 0 || binStartChronon >= GregorianCalendarSystem.CHRONON_OF_DAY) {
// avoid the case where a time bin is before 00:00:00 or no early than 24:00:00
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
while (!((binStartChronon < intervalStart && binEndChronon <= intervalStart) || (binStartChronon >= intervalEnd && binEndChronon > intervalEnd))) {
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), true);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon == GregorianCalendarSystem.CHRONON_OF_DAY) {
break;
}
if (binEndChronon < binStartChronon) {
throw new InvalidDataFormatException(getIdentifier(), ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG);
}
}
} else if (intervalTypeTag == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
binOffset = 0;
do {
binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset), false);
binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth * ((int) (firstBinIndex + binOffset) + 1), dayTime * ((firstBinIndex + binOffset) + 1), false);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
} while (binEndChronon < intervalEnd);
} else {
throw new TypeMismatchException(getIdentifier(), 0, bytes0[offset0], ATypeTag.SERIALIZED_DATE_TYPE_TAG, ATypeTag.SERIALIZED_TIME_TYPE_TAG, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
}
listBuilder.write(out, true);
} catch (IOException e1) {
throw new HyracksDataException(e1);
}
result.set(resultStorage);
}
};
}
};
}
Aggregations