use of org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable in project asterixdb by apache.
the class ExternalLookupOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException {
// Create a file index accessor to be used for files lookup operations
final ExternalFileIndexAccessor snapshotAccessor = new ExternalFileIndexAccessor(dataflowHelperFactory.create(ctx, partition), searchOpCallbackFactory, version);
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
// The adapter that uses the file index along with the coming tuples to access files in HDFS
private LookupAdapter<?> adapter;
private boolean indexOpen = false;
@Override
public void open() throws HyracksDataException {
try {
adapter = adapterFactory.createAdapter(ctx, partition, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0), snapshotAccessor, writer);
// Open the file index accessor here
snapshotAccessor.open();
indexOpen = true;
adapter.open();
} catch (Throwable th) {
throw new HyracksDataException(th);
}
}
@Override
public void close() throws HyracksDataException {
HyracksDataException hde = null;
if (indexOpen) {
try {
snapshotAccessor.close();
} catch (Throwable th) {
hde = new HyracksDataException(th);
}
try {
adapter.close();
} catch (Throwable th) {
if (hde == null) {
hde = new HyracksDataException(th);
} else {
hde.addSuppressed(th);
}
}
}
}
@Override
public void fail() throws HyracksDataException {
try {
adapter.fail();
} catch (Throwable th) {
throw new HyracksDataException(th);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
try {
adapter.nextFrame(buffer);
} catch (Throwable th) {
throw new HyracksDataException(th);
}
}
@Override
public void flush() throws HyracksDataException {
adapter.flush();
}
};
}
use of org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable in project asterixdb by apache.
the class LimitOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private FrameTupleAccessor fta;
private int currentSize;
private boolean finished;
@Override
public void open() throws HyracksDataException {
fta = new FrameTupleAccessor(outRecDescs[0]);
currentSize = 0;
finished = false;
writer.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
if (!finished) {
fta.reset(buffer);
int count = fta.getTupleCount();
if ((currentSize + count) > outputLimit) {
FrameTupleAppender partialAppender = new FrameTupleAppender(new VSizeFrame(ctx));
int copyCount = outputLimit - currentSize;
for (int i = 0; i < copyCount; i++) {
FrameUtils.appendToWriter(writer, partialAppender, fta, i);
currentSize++;
}
partialAppender.write(writer, false);
finished = true;
} else {
FrameUtils.flushFrame(buffer, writer);
currentSize += count;
}
}
}
@Override
public void fail() throws HyracksDataException {
writer.fail();
}
@Override
public void close() throws HyracksDataException {
writer.close();
}
@Override
public void flush() throws HyracksDataException {
writer.flush();
}
};
}
use of org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable in project asterixdb by apache.
the class AlgebricksMetaOperatorDescriptor method createOneInputOneOutputPushRuntime.
private IOperatorNodePushable createOneInputOneOutputPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider) {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private IFrameWriter startOfPipeline;
@Override
public void open() throws HyracksDataException {
if (startOfPipeline == null) {
RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.outRecDescs[0] : null;
RecordDescriptor pipelineInputRecordDescriptor = recordDescProvider.getInputRecordDescriptor(AlgebricksMetaOperatorDescriptor.this.getActivityId(), 0);
PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity, pipelineInputRecordDescriptor, pipelineOutputRecordDescriptor);
startOfPipeline = pa.assemblePipeline(writer, ctx);
}
startOfPipeline.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
startOfPipeline.nextFrame(buffer);
}
@Override
public void close() throws HyracksDataException {
startOfPipeline.close();
}
@Override
public void fail() throws HyracksDataException {
startOfPipeline.fail();
}
@Override
public void flush() throws HyracksDataException {
startOfPipeline.flush();
}
@Override
public String toString() {
return AlgebricksMetaOperatorDescriptor.this.toString();
}
};
}
Aggregations