use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class DataGenOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
final FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
final RecordDescriptor recDesc = outRecDescs[0];
final ArrayTupleBuilder tb = new ArrayTupleBuilder(recDesc.getFields().length);
final Random rnd = new Random(randomSeed);
final int maxUniqueAttempts = 20;
return new AbstractUnaryOutputSourceOperatorNodePushable() {
// for quick & dirty exclusion of duplicates
// WARNING: could contain numRecord entries and use a lot of memory
HashSet<String> stringHs = new HashSet<String>();
HashSet<Integer> intHs = new HashSet<Integer>();
@Override
public void initialize() throws HyracksDataException {
try {
writer.open();
for (int i = 0; i < numRecords; i++) {
tb.reset();
for (int j = 0; j < recDesc.getFieldCount(); j++) {
genField(tb, j);
}
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
appender.write(writer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
appender.write(writer, true);
} catch (Throwable th) {
writer.fail();
throw new HyracksDataException(th);
} finally {
writer.close();
}
}
private void genField(ArrayTupleBuilder tb, int fieldIndex) throws HyracksDataException {
DataOutput dos = tb.getDataOutput();
if (recDesc.getFields()[fieldIndex] instanceof IntegerSerializerDeserializer) {
int val = -1;
if (fieldIndex == uniqueField) {
int attempt = 0;
while (attempt < maxUniqueAttempts) {
int tmp = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
if (intHs.contains(tmp))
attempt++;
else {
val = tmp;
intHs.add(val);
break;
}
}
if (attempt == maxUniqueAttempts)
throw new HyracksDataException("MaxUnique attempts reached in datagen");
} else {
val = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
}
recDesc.getFields()[fieldIndex].serialize(val, dos);
tb.addFieldEndOffset();
} else if (recDesc.getFields()[fieldIndex] instanceof UTF8StringSerializerDeserializer) {
String val = null;
if (fieldIndex == uniqueField) {
int attempt = 0;
while (attempt < maxUniqueAttempts) {
String tmp = randomString(maxStrLen, rnd);
if (stringHs.contains(tmp))
attempt++;
else {
val = tmp;
stringHs.add(val);
break;
}
}
if (attempt == maxUniqueAttempts)
throw new HyracksDataException("MaxUnique attempts reached in datagen");
} else {
val = randomString(maxStrLen, rnd);
}
recDesc.getFields()[fieldIndex].serialize(val, dos);
tb.addFieldEndOffset();
} else {
throw new HyracksDataException("Type unsupported in data generator. Only integers and strings allowed");
}
}
private String randomString(int length, Random random) {
String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < s.length() && i < length; i++) {
strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
}
return strBuilder.toString();
}
};
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class RunMergingFrameReader method createEntryComparator.
private Comparator<ReferenceEntry> createEntryComparator(final IBinaryComparator[] comparators) {
return new Comparator<ReferenceEntry>() {
public int compare(ReferenceEntry tp1, ReferenceEntry tp2) {
int nmk1 = tp1.getNormalizedKey();
int nmk2 = tp2.getNormalizedKey();
if (nmk1 != nmk2) {
return ((((long) nmk1) & 0xffffffffL) < (((long) nmk2) & 0xffffffffL)) ? -1 : 1;
}
IFrameTupleAccessor fta1 = tp1.getAccessor();
IFrameTupleAccessor fta2 = tp2.getAccessor();
byte[] b1 = fta1.getBuffer().array();
byte[] b2 = fta2.getBuffer().array();
int[] tPointers1 = tp1.getTPointers();
int[] tPointers2 = tp2.getTPointers();
for (int f = 0; f < sortFields.length; ++f) {
int c;
try {
c = comparators[f].compare(b1, tPointers1[2 * f + 1], tPointers1[2 * f + 2], b2, tPointers2[2 * f + 1], tPointers2[2 * f + 2]);
if (c != 0) {
return c;
}
} catch (HyracksDataException e) {
throw new IllegalArgumentException(e);
}
}
int runid1 = tp1.getRunid();
int runid2 = tp2.getRunid();
return runid1 < runid2 ? -1 : (runid1 == runid2 ? 0 : 1);
}
};
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class LSMBTreeWithBuddySortedCursor method reset.
@Override
public void reset() throws HyracksDataException {
depletedBtreeCursors = new boolean[numberOfTrees];
foundNext = false;
try {
for (int i = 0; i < numberOfTrees; i++) {
btreeCursors[i].reset();
btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
if (btreeCursors[i].hasNext()) {
btreeCursors[i].next();
} else {
depletedBtreeCursors[i] = true;
}
}
} catch (Exception e) {
e.printStackTrace();
throw new HyracksDataException("error while reseting the btrees of the lsm btree with buddy btree", e);
} finally {
if (open) {
lsmHarness.endSearch(opCtx);
}
}
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class LSMBTreeWithBuddyFileManager method getTransactionFileReferenceForCommit.
@Override
public LSMComponentFileReferences getTransactionFileReferenceForCommit() throws HyracksDataException {
FilenameFilter transactionFilter;
File dir = new File(baseDir);
String[] files = dir.list(transactionFileNameFilter);
if (files.length == 0) {
return null;
}
if (files.length != 1) {
throw new HyracksDataException("More than one transaction lock found:" + files.length);
} else {
transactionFilter = getTransactionFileFilter(true);
String txnFileName = dir.getPath() + File.separator + files[0];
// get the actual transaction files
files = dir.list(transactionFilter);
if (files.length < 3) {
throw new HyracksDataException("LSM Btree with buddy transaction has less than 3 files :" + files.length);
}
try {
Files.delete(Paths.get(txnFileName));
} catch (IOException e) {
throw new HyracksDataException("Failed to delete transaction lock :" + txnFileName);
}
}
File bTreeFile = null;
File buddyBTreeFile = null;
File bloomFilterFile = null;
for (String fileName : files) {
if (fileName.endsWith(BTREE_STRING)) {
bTreeFile = new File(dir.getPath() + File.separator + fileName);
} else if (fileName.endsWith(BUDDY_BTREE_STRING)) {
buddyBTreeFile = new File(dir.getPath() + File.separator + fileName);
} else if (fileName.endsWith(BLOOM_FILTER_STRING)) {
bloomFilterFile = new File(dir.getPath() + File.separator + fileName);
} else {
throw new HyracksDataException("unrecognized file found = " + fileName);
}
}
FileReference bTreeFileRef = ioManager.resolveAbsolutePath(bTreeFile.getAbsolutePath());
FileReference buddyBTreeFileRef = ioManager.resolveAbsolutePath(buddyBTreeFile.getAbsolutePath());
FileReference bloomFilterFileRef = ioManager.resolveAbsolutePath(bloomFilterFile.getAbsolutePath());
return new LSMComponentFileReferences(bTreeFileRef, buddyBTreeFileRef, bloomFilterFileRef);
}
use of org.apache.hyracks.api.exceptions.HyracksDataException in project asterixdb by apache.
the class AbstractLSMDiskComponentWithBuddyBulkLoader method delete.
@Override
public void delete(ITupleReference tuple) throws HyracksDataException {
try {
ITupleReference t;
if (indexTuple != null) {
indexTuple.reset(tuple);
t = indexTuple;
} else {
t = tuple;
}
buddyBTreeBulkLoader.add(t);
if (bloomFilterBuilder != null) {
bloomFilterBuilder.add(t);
}
updateFilter(tuple);
} catch (HyracksDataException e) {
//deleting a key multiple times is OK
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
cleanupArtifacts();
throw e;
}
} catch (Exception e) {
cleanupArtifacts();
throw e;
}
if (isEmptyComponent) {
isEmptyComponent = false;
}
}
Aggregations