use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class DatasetTupleTranslatorTest method test.
@Test
public void test() throws MetadataException, IOException {
Integer[] indicators = { 0, 1, null };
for (Integer indicator : indicators) {
Map<String, String> compactionPolicyProperties = new HashMap<>();
compactionPolicyProperties.put("max-mergable-component-size", "1073741824");
compactionPolicyProperties.put("max-tolerance-component-count", "3");
InternalDatasetDetails details = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, Collections.singletonList(Collections.singletonList("row_id")), Collections.singletonList(Collections.singletonList("row_id")), indicator == null ? null : Collections.singletonList(indicator), Collections.singletonList(BuiltinType.AINT64), false, Collections.emptyList(), false);
Dataset dataset = new Dataset("test", "log", "foo", "LogType", "CB", "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties, details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0);
DatasetTupleTranslator dtTranslator = new DatasetTupleTranslator(true);
ITupleReference tuple = dtTranslator.getTupleFromMetadataEntity(dataset);
Dataset deserializedDataset = dtTranslator.getMetadataEntityFromTuple(tuple);
Assert.assertEquals(dataset.getMetaItemTypeDataverseName(), deserializedDataset.getMetaItemTypeDataverseName());
Assert.assertEquals(dataset.getMetaItemTypeName(), deserializedDataset.getMetaItemTypeName());
if (indicator == null) {
Assert.assertEquals(Collections.singletonList(new Integer(0)), ((InternalDatasetDetails) deserializedDataset.getDatasetDetails()).getKeySourceIndicator());
} else {
Assert.assertEquals(((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator(), ((InternalDatasetDetails) deserializedDataset.getDatasetDetails()).getKeySourceIndicator());
}
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class BTree method deleteLeaf.
private boolean deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
// pointed to by an interior node.
if (ctx.getLeafFrame().getTupleCount() == 0) {
throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
int tupleIndex = ctx.getLeafFrame().findDeleteTupleIndex(tuple);
ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, tupleIndex);
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().delete(tuple, tupleIndex);
return false;
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class BTree method updateLeaf.
private boolean updateLeaf(ITupleReference tuple, int oldTupleIndex, int pageId, BTreeOpContext ctx) throws Exception {
FrameOpSpaceStatus spaceStatus = ctx.getLeafFrame().hasSpaceUpdate(tuple, oldTupleIndex);
ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, oldTupleIndex);
boolean restartOp = false;
switch(spaceStatus) {
case SUFFICIENT_INPLACE_SPACE:
{
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().update(tuple, oldTupleIndex, true);
ctx.getSplitKey().reset();
break;
}
case EXPAND:
{
// TODO: avoid repeated calculation of tuple size
// TODO: in-place update on expand
// Delete the old tuple, compact the frame, and insert the new tuple.
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().delete(tuple, oldTupleIndex);
ctx.getLeafFrame().compact();
ctx.getLeafFrame().ensureCapacity(bufferCache, tuple, ctx);
int targetTupleIndex = ctx.getLeafFrame().findInsertTupleIndex(tuple);
ctx.getLeafFrame().insert(tuple, targetTupleIndex);
ctx.getSplitKey().reset();
break;
}
case SUFFICIENT_CONTIGUOUS_SPACE:
{
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().update(tuple, oldTupleIndex, false);
ctx.getSplitKey().reset();
break;
}
case SUFFICIENT_SPACE:
{
// Delete the old tuple, compact the frame, and insert the new tuple.
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().delete(tuple, oldTupleIndex);
ctx.getLeafFrame().compact();
int targetTupleIndex = ctx.getLeafFrame().findInsertTupleIndex(tuple);
ctx.getLeafFrame().insert(tuple, targetTupleIndex);
ctx.getSplitKey().reset();
break;
}
case INSUFFICIENT_SPACE:
{
restartOp = performLeafSplit(pageId, tuple, ctx, oldTupleIndex);
break;
}
default:
{
throw new IllegalStateException("NYI: " + spaceStatus);
}
}
return restartOp;
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class FramewriterTest method mockIndexCursors.
private IIndexCursor[] mockIndexCursors() throws HyracksDataException {
ITupleReference[] tuples = mockTuples();
IIndexCursor[] cursors = new IIndexCursor[tuples.length * 2];
int j = 0;
for (int i = 0; i < tuples.length; i++) {
IIndexCursor cursor = Mockito.mock(IIndexCursor.class);
Mockito.when(cursor.hasNext()).thenReturn(true, true, false);
Mockito.when(cursor.getTuple()).thenReturn(tuples[i]);
cursors[j] = cursor;
j++;
cursor = Mockito.mock(IIndexCursor.class);
Mockito.when(cursor.hasNext()).thenReturn(true, true, false);
Mockito.when(cursor.getTuple()).thenReturn(tuples[i]);
Mockito.doThrow(new HyracksDataException("Failed to close cursor")).when(cursor).close();
cursors[j] = cursor;
j++;
}
return cursors;
}
use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.
the class BTree method performLeafSplit.
private boolean performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx, int updateTupleIndex) throws Exception {
// Lock is released in unsetSmPages(), after sm has fully completed.
if (!treeLatch.writeLock().tryLock()) {
return true;
} else {
int tempSmoCount = smoCounter.get();
if (tempSmoCount != ctx.getSmoCount()) {
treeLatch.writeLock().unlock();
return true;
}
}
int rightPageId = freePageManager.takePage(ctx.getMetaFrame());
ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
rightNode.acquireWriteLatch();
try {
IBTreeLeafFrame rightFrame = ctx.createLeafFrame();
rightFrame.setPage(rightNode);
rightFrame.initBuffer((byte) 0);
rightFrame.setMultiComparator(ctx.getCmp());
// Perform an update (delete + insert) if the updateTupleIndex != -1
if (updateTupleIndex != -1) {
ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, updateTupleIndex);
ctx.getModificationCallback().found(beforeTuple, tuple);
ctx.getLeafFrame().delete(tuple, updateTupleIndex);
} else {
ctx.getModificationCallback().found(null, tuple);
}
ctx.getLeafFrame().split(rightFrame, tuple, ctx.getSplitKey(), ctx, bufferCache);
ctx.getSmPages().add(pageId);
ctx.getSmPages().add(rightPageId);
ctx.getLeafFrame().setSmFlag(true);
rightFrame.setSmFlag(true);
rightFrame.setNextLeaf(ctx.getLeafFrame().getNextLeaf());
ctx.getLeafFrame().setNextLeaf(rightPageId);
rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
ctx.getLeafFrame().setPageLsn(ctx.getLeafFrame().getPageLsn() + 1);
ctx.getSplitKey().setPages(pageId, rightPageId);
} catch (Exception e) {
treeLatch.writeLock().unlock();
throw e;
} finally {
rightNode.releaseWriteLatch(true);
bufferCache.unpin(rightNode);
}
return false;
}
Aggregations