Search in sources :

Example 81 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class DatasetTupleTranslatorTest method test.

@Test
public void test() throws MetadataException, IOException {
    Integer[] indicators = { 0, 1, null };
    for (Integer indicator : indicators) {
        Map<String, String> compactionPolicyProperties = new HashMap<>();
        compactionPolicyProperties.put("max-mergable-component-size", "1073741824");
        compactionPolicyProperties.put("max-tolerance-component-count", "3");
        InternalDatasetDetails details = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, Collections.singletonList(Collections.singletonList("row_id")), Collections.singletonList(Collections.singletonList("row_id")), indicator == null ? null : Collections.singletonList(indicator), Collections.singletonList(BuiltinType.AINT64), false, Collections.emptyList(), false);
        Dataset dataset = new Dataset("test", "log", "foo", "LogType", "CB", "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties, details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0);
        DatasetTupleTranslator dtTranslator = new DatasetTupleTranslator(true);
        ITupleReference tuple = dtTranslator.getTupleFromMetadataEntity(dataset);
        Dataset deserializedDataset = dtTranslator.getMetadataEntityFromTuple(tuple);
        Assert.assertEquals(dataset.getMetaItemTypeDataverseName(), deserializedDataset.getMetaItemTypeDataverseName());
        Assert.assertEquals(dataset.getMetaItemTypeName(), deserializedDataset.getMetaItemTypeName());
        if (indicator == null) {
            Assert.assertEquals(Collections.singletonList(new Integer(0)), ((InternalDatasetDetails) deserializedDataset.getDatasetDetails()).getKeySourceIndicator());
        } else {
            Assert.assertEquals(((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator(), ((InternalDatasetDetails) deserializedDataset.getDatasetDetails()).getKeySourceIndicator());
        }
    }
}
Also used : HashMap(java.util.HashMap) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) Test(org.junit.Test)

Example 82 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class BTree method deleteLeaf.

private boolean deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
    // pointed to by an interior node.
    if (ctx.getLeafFrame().getTupleCount() == 0) {
        throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
    }
    int tupleIndex = ctx.getLeafFrame().findDeleteTupleIndex(tuple);
    ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, tupleIndex);
    ctx.getModificationCallback().found(beforeTuple, tuple);
    ctx.getLeafFrame().delete(tuple, tupleIndex);
    return false;
}
Also used : ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)

Example 83 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class BTree method updateLeaf.

private boolean updateLeaf(ITupleReference tuple, int oldTupleIndex, int pageId, BTreeOpContext ctx) throws Exception {
    FrameOpSpaceStatus spaceStatus = ctx.getLeafFrame().hasSpaceUpdate(tuple, oldTupleIndex);
    ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, oldTupleIndex);
    boolean restartOp = false;
    switch(spaceStatus) {
        case SUFFICIENT_INPLACE_SPACE:
            {
                ctx.getModificationCallback().found(beforeTuple, tuple);
                ctx.getLeafFrame().update(tuple, oldTupleIndex, true);
                ctx.getSplitKey().reset();
                break;
            }
        case EXPAND:
            {
                // TODO: avoid repeated calculation of tuple size
                // TODO: in-place update on expand
                // Delete the old tuple, compact the frame, and insert the new tuple.
                ctx.getModificationCallback().found(beforeTuple, tuple);
                ctx.getLeafFrame().delete(tuple, oldTupleIndex);
                ctx.getLeafFrame().compact();
                ctx.getLeafFrame().ensureCapacity(bufferCache, tuple, ctx);
                int targetTupleIndex = ctx.getLeafFrame().findInsertTupleIndex(tuple);
                ctx.getLeafFrame().insert(tuple, targetTupleIndex);
                ctx.getSplitKey().reset();
                break;
            }
        case SUFFICIENT_CONTIGUOUS_SPACE:
            {
                ctx.getModificationCallback().found(beforeTuple, tuple);
                ctx.getLeafFrame().update(tuple, oldTupleIndex, false);
                ctx.getSplitKey().reset();
                break;
            }
        case SUFFICIENT_SPACE:
            {
                // Delete the old tuple, compact the frame, and insert the new tuple.
                ctx.getModificationCallback().found(beforeTuple, tuple);
                ctx.getLeafFrame().delete(tuple, oldTupleIndex);
                ctx.getLeafFrame().compact();
                int targetTupleIndex = ctx.getLeafFrame().findInsertTupleIndex(tuple);
                ctx.getLeafFrame().insert(tuple, targetTupleIndex);
                ctx.getSplitKey().reset();
                break;
            }
        case INSUFFICIENT_SPACE:
            {
                restartOp = performLeafSplit(pageId, tuple, ctx, oldTupleIndex);
                break;
            }
        default:
            {
                throw new IllegalStateException("NYI: " + spaceStatus);
            }
    }
    return restartOp;
}
Also used : ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) FrameOpSpaceStatus(org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus)

Example 84 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class FramewriterTest method mockIndexCursors.

private IIndexCursor[] mockIndexCursors() throws HyracksDataException {
    ITupleReference[] tuples = mockTuples();
    IIndexCursor[] cursors = new IIndexCursor[tuples.length * 2];
    int j = 0;
    for (int i = 0; i < tuples.length; i++) {
        IIndexCursor cursor = Mockito.mock(IIndexCursor.class);
        Mockito.when(cursor.hasNext()).thenReturn(true, true, false);
        Mockito.when(cursor.getTuple()).thenReturn(tuples[i]);
        cursors[j] = cursor;
        j++;
        cursor = Mockito.mock(IIndexCursor.class);
        Mockito.when(cursor.hasNext()).thenReturn(true, true, false);
        Mockito.when(cursor.getTuple()).thenReturn(tuples[i]);
        Mockito.doThrow(new HyracksDataException("Failed to close cursor")).when(cursor).close();
        cursors[j] = cursor;
        j++;
    }
    return cursors;
}
Also used : ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) IIndexCursor(org.apache.hyracks.storage.common.IIndexCursor) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 85 with ITupleReference

use of org.apache.hyracks.dataflow.common.data.accessors.ITupleReference in project asterixdb by apache.

the class BTree method performLeafSplit.

private boolean performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx, int updateTupleIndex) throws Exception {
    // Lock is released in unsetSmPages(), after sm has fully completed.
    if (!treeLatch.writeLock().tryLock()) {
        return true;
    } else {
        int tempSmoCount = smoCounter.get();
        if (tempSmoCount != ctx.getSmoCount()) {
            treeLatch.writeLock().unlock();
            return true;
        }
    }
    int rightPageId = freePageManager.takePage(ctx.getMetaFrame());
    ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
    rightNode.acquireWriteLatch();
    try {
        IBTreeLeafFrame rightFrame = ctx.createLeafFrame();
        rightFrame.setPage(rightNode);
        rightFrame.initBuffer((byte) 0);
        rightFrame.setMultiComparator(ctx.getCmp());
        // Perform an update (delete + insert) if the updateTupleIndex != -1
        if (updateTupleIndex != -1) {
            ITupleReference beforeTuple = ctx.getLeafFrame().getMatchingKeyTuple(tuple, updateTupleIndex);
            ctx.getModificationCallback().found(beforeTuple, tuple);
            ctx.getLeafFrame().delete(tuple, updateTupleIndex);
        } else {
            ctx.getModificationCallback().found(null, tuple);
        }
        ctx.getLeafFrame().split(rightFrame, tuple, ctx.getSplitKey(), ctx, bufferCache);
        ctx.getSmPages().add(pageId);
        ctx.getSmPages().add(rightPageId);
        ctx.getLeafFrame().setSmFlag(true);
        rightFrame.setSmFlag(true);
        rightFrame.setNextLeaf(ctx.getLeafFrame().getNextLeaf());
        ctx.getLeafFrame().setNextLeaf(rightPageId);
        rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
        ctx.getLeafFrame().setPageLsn(ctx.getLeafFrame().getPageLsn() + 1);
        ctx.getSplitKey().setPages(pageId, rightPageId);
    } catch (Exception e) {
        treeLatch.writeLock().unlock();
        throw e;
    } finally {
        rightNode.releaseWriteLatch(true);
        bufferCache.unpin(rightNode);
    }
    return false;
}
Also used : ICachedPage(org.apache.hyracks.storage.common.buffercache.ICachedPage) IBTreeLeafFrame(org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Aggregations

ITupleReference (org.apache.hyracks.dataflow.common.data.accessors.ITupleReference)149 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)80 ArrayList (java.util.ArrayList)40 ACIDException (org.apache.asterix.common.exceptions.ACIDException)31 MetadataEntityValueExtractor (org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor)26 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)20 Test (org.junit.Test)20 RangePredicate (org.apache.hyracks.storage.am.btree.impls.RangePredicate)18 IIndexCursor (org.apache.hyracks.storage.common.IIndexCursor)18 Dataset (org.apache.asterix.metadata.entities.Dataset)10 MultiComparator (org.apache.hyracks.storage.common.MultiComparator)10 CheckTuple (org.apache.hyracks.storage.am.common.CheckTuple)8 ITreeIndexAccessor (org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor)8 ITreeIndexCursor (org.apache.hyracks.storage.am.common.api.ITreeIndexCursor)8 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)7 ILSMDiskComponentBulkLoader (org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentBulkLoader)7 SearchPredicate (org.apache.hyracks.storage.am.rtree.impls.SearchPredicate)7 ExtensionMetadataDataset (org.apache.asterix.metadata.api.ExtensionMetadataDataset)6 Datatype (org.apache.asterix.metadata.entities.Datatype)6 ArrayTupleReference (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference)6