use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class LSMInsertDeleteOperatorNodePushable method open.
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(inputRecDesc);
writeBuffer = new VSizeFrame(ctx);
appender = new FrameTupleAppender(writeBuffer);
indexHelper.open();
lsmIndex = (AbstractLSMIndex) indexHelper.getIndexInstance();
try {
if (isPrimary && ctx.getSharedObject() != null) {
PrimaryIndexLogMarkerCallback callback = new PrimaryIndexLogMarkerCallback(lsmIndex);
TaskUtil.putInSharedMap(ILogMarkerCallback.KEY_MARKER_CALLBACK, callback, ctx);
}
writer.open();
modCallback = modOpCallbackFactory.createModificationOperationCallback(indexHelper.getResource(), ctx, this);
indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
if (tupleFilterFactory != null) {
tupleFilter = tupleFilterFactory.createTupleFilter(ctx);
frameTuple = new FrameTupleReference();
}
INcApplicationContext runtimeCtx = (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
LSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager());
} catch (Throwable th) {
throw new HyracksDataException(th);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class IndexSearchOperatorNodePushable method open.
@Override
public void open() throws HyracksDataException {
writer.open();
indexHelper.open();
index = indexHelper.getIndexInstance();
accessor = new FrameTupleAccessor(inputRecDesc);
if (retainMissing) {
int fieldCount = getFieldCount();
nonMatchTupleBuild = new ArrayTupleBuilder(fieldCount);
buildMissingTuple(fieldCount, nonMatchTupleBuild, nonMatchWriter);
} else {
nonMatchTupleBuild = null;
}
if (appendIndexFilter) {
int numIndexFilterFields = index.getNumOfFilterFields();
nonFilterTupleBuild = new ArrayTupleBuilder(numIndexFilterFields);
buildMissingTuple(numIndexFilterFields, nonFilterTupleBuild, nonMatchWriter);
}
try {
searchPred = createSearchPredicate();
tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, null);
indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, searchCallback);
cursor = createCursor();
if (retainInput) {
frameTuple = new FrameTupleReference();
}
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class LSMPrimaryUpsertOperatorNodePushable method open.
// we have the permutation which has [pk locations, record location, optional:filter-location]
// the index -> we don't need anymore data?
// we need to use the primary index opTracker and secondary indexes callbacks for insert/delete since the lock would
// have been obtained through searchForUpsert operation
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(inputRecDesc);
writeBuffer = new VSizeFrame(ctx);
writer.open();
indexHelper.open();
index = indexHelper.getIndexInstance();
try {
if (ctx.getSharedObject() != null) {
PrimaryIndexLogMarkerCallback callback = new PrimaryIndexLogMarkerCallback((AbstractLSMIndex) index);
TaskUtil.putInSharedMap(ILogMarkerCallback.KEY_MARKER_CALLBACK, callback, ctx);
}
missingTupleBuilder = new ArrayTupleBuilder(1);
DataOutput out = missingTupleBuilder.getDataOutput();
try {
missingWriter.writeMissing(out);
} catch (IOException e) {
throw new HyracksDataException(e);
}
missingTupleBuilder.addFieldEndOffset();
searchPred = createSearchPredicate();
tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
modCallback = modOpCallbackFactory.createModificationOperationCallback(indexHelper.getResource(), ctx, this);
abstractModCallback = (AbstractIndexModificationOperationCallback) modCallback;
searchCallback = (LockThenSearchOperationCallback) searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, this);
indexAccessor = index.createAccessor(abstractModCallback, searchCallback);
lsmAccessor = (LSMTreeIndexAccessor) indexAccessor;
cursor = indexAccessor.createSearchCursor(false);
frameTuple = new FrameTupleReference();
INcApplicationContext appCtx = (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
LSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) index, appCtx.getTransactionSubsystem().getLogManager());
frameOpCallback = new IFrameOperationCallback() {
IFrameOperationCallback callback = frameOpCallbackFactory.createFrameOperationCallback(ctx, (ILSMIndexAccessor) indexAccessor);
@Override
public void frameCompleted() throws HyracksDataException {
callback.frameCompleted();
appender.write(writer, true);
}
};
} catch (Exception e) {
indexHelper.close();
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class AggregateRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private IAggregateEvaluator[] aggregs = new IAggregateEvaluator[aggregFactories.length];
private IPointable result = VoidPointable.FACTORY.createPointable();
private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(aggregs.length);
private boolean first = true;
private boolean isOpen = false;
@Override
public void open() throws HyracksDataException {
if (first) {
first = false;
initAccessAppendRef(ctx);
for (int i = 0; i < aggregFactories.length; i++) {
aggregs[i] = aggregFactories[i].createAggregateEvaluator(ctx);
}
}
for (int i = 0; i < aggregFactories.length; i++) {
aggregs[i].init();
}
isOpen = true;
writer.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
for (int t = 0; t < nTuple; t++) {
tRef.reset(tAccess, t);
processTuple(tRef);
}
}
@Override
public void close() throws HyracksDataException {
if (isOpen) {
try {
computeAggregate();
appendToFrameFromTupleBuilder(tupleBuilder);
} finally {
super.close();
}
}
}
private void computeAggregate() throws HyracksDataException {
tupleBuilder.reset();
for (int f = 0; f < aggregs.length; f++) {
aggregs[f].finish(result);
tupleBuilder.addField(result.getByteArray(), result.getStartOffset(), result.getLength());
}
}
private void processTuple(FrameTupleReference tupleRef) throws HyracksDataException {
for (int f = 0; f < aggregs.length; f++) {
aggregs[f].step(tupleRef);
}
}
@Override
public void fail() throws HyracksDataException {
if (isOpen) {
writer.fail();
}
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class AssignRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
final int[] projectionToOutColumns = new int[projectionList.length];
for (int j = 0; j < projectionList.length; j++) {
projectionToOutColumns[j] = Arrays.binarySearch(outColumns, projectionList[j]);
}
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private IPointable result = VoidPointable.FACTORY.createPointable();
private IScalarEvaluator[] eval = new IScalarEvaluator[evalFactories.length];
private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(projectionList.length);
private boolean first = true;
private boolean isOpen = false;
private int tupleIndex = 0;
@Override
public void open() throws HyracksDataException {
if (first) {
initAccessAppendRef(ctx);
first = false;
int n = evalFactories.length;
for (int i = 0; i < n; i++) {
eval[i] = evalFactories[i].createScalarEvaluator(ctx);
}
}
isOpen = true;
writer.open();
}
@Override
public void close() throws HyracksDataException {
if (isOpen) {
super.close();
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
// what if nTuple is 0?
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
if (nTuple < 1) {
if (nTuple < 0) {
throw new HyracksDataException("Negative number of tuples in the frame: " + nTuple);
}
appender.flush(writer);
} else {
if (nTuple > 1) {
for (; tupleIndex < nTuple - 1; tupleIndex++) {
tRef.reset(tAccess, tupleIndex);
produceTuple(tupleBuilder, tAccess, tupleIndex, tRef);
appendToFrameFromTupleBuilder(tupleBuilder);
}
}
if (tupleIndex < nTuple) {
tRef.reset(tAccess, tupleIndex);
produceTuple(tupleBuilder, tAccess, tupleIndex, tRef);
if (flushFramesRapidly) {
// Whenever all the tuples in the incoming frame have been consumed, the assign operator
// will push its frame to the next operator; i.e., it won't wait until the frame gets full.
appendToFrameFromTupleBuilder(tupleBuilder, true);
} else {
appendToFrameFromTupleBuilder(tupleBuilder);
}
} else {
if (flushFramesRapidly) {
flushAndReset();
}
}
}
tupleIndex = 0;
}
private void produceTuple(ArrayTupleBuilder tb, IFrameTupleAccessor accessor, int tIndex, FrameTupleReference tupleRef) throws HyracksDataException {
try {
tb.reset();
for (int f = 0; f < projectionList.length; f++) {
int k = projectionToOutColumns[f];
if (k >= 0) {
eval[k].evaluate(tupleRef, result);
tb.addField(result.getByteArray(), result.getStartOffset(), result.getLength());
} else {
tb.addField(accessor, tIndex, projectionList[f]);
}
}
} catch (HyracksDataException e) {
throw HyracksDataException.create(ErrorCode.ERROR_PROCESSING_TUPLE, e, tupleIndex);
}
}
@Override
public void fail() throws HyracksDataException {
if (isOpen) {
super.fail();
}
}
@Override
public void flush() throws HyracksDataException {
appender.flush(writer);
}
};
}
Aggregations