use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class FramewriterTest method createOutputWriters.
public IFrameWriter[] createOutputWriters() throws Exception {
CountAnswer[] opens = new CountAnswer[] { openNormal, openException, openError };
CountAnswer[] nextFrames = new CountAnswer[] { nextFrameNormal, nextFrameException, nextFrameError };
CountAnswer[] fails = new CountAnswer[] { failNormal, failException, failError };
CountAnswer[] closes = new CountAnswer[] { closeNormal, closeException, closeError };
List<IFrameWriter> outputWriters = new ArrayList<>();
for (CountAnswer openAnswer : opens) {
for (CountAnswer nextFrameAnswer : nextFrames) {
for (CountAnswer failAnswer : fails) {
for (CountAnswer closeAnswer : closes) {
IFrameWriter writer = Mockito.mock(IFrameWriter.class);
Mockito.doAnswer(openAnswer).when(writer).open();
Mockito.doAnswer(nextFrameAnswer).when(writer).nextFrame(Mockito.any());
Mockito.doAnswer(failAnswer).when(writer).fail();
Mockito.doAnswer(closeAnswer).when(writer).close();
outputWriters.add(writer);
}
}
}
}
return outputWriters.toArray(new IFrameWriter[outputWriters.size()]);
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class FramewriterTest method testBTreeSearchOperatorNodePushable.
private void testBTreeSearchOperatorNodePushable() throws Exception {
/*
* coverage
* in open(){
* writer.open() succeeds vs. throws exception vs. throws error
* indexHelper.open() succeeds vs. throws exception
* createAccessor() succeeds vs. throws exception
* }
* in nextFrame(){
* indexAccessor.search succeeds vs. throws exception
* writeSearchResults succeeds vs. throws exception vs. throws error
* }
* in fail(){
* writer.fail() succeeds, throws exception, or throws error
* }
* in close(){
* appender.close() succeeds, throws exception, or throws error
* }
*/
int i = 0;
counter = 0;
while (i < NUMBER_OF_APPENDERS) {
i++;
ByteBuffer buffer = mockByteBuffer();
IFrameWriter[] outPutFrameWriters = createOutputWriters();
for (IFrameWriter outputWriter : outPutFrameWriters) {
IFrameWriter[] underTest = createWriters();
for (IFrameWriter writer : underTest) {
((AbstractUnaryOutputOperatorNodePushable) writer).setOutputFrameWriter(0, outputWriter, mockRecordDescriptor());
testWriter(writer, buffer);
}
}
counter = i;
}
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class AlgebricksMetaOperatorDescriptor method createSourceInputPushRuntime.
private IOperatorNodePushable createSourceInputPushRuntime(final IHyracksTaskContext ctx) {
return new AbstractUnaryOutputSourceOperatorNodePushable() {
@Override
public void initialize() throws HyracksDataException {
IFrameWriter startOfPipeline;
RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.outRecDescs[0] : null;
PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity, null, pipelineOutputRecordDescriptor);
startOfPipeline = pa.assemblePipeline(writer, ctx);
try {
startOfPipeline.open();
} catch (Exception e) {
startOfPipeline.fail();
throw e;
} finally {
startOfPipeline.close();
}
}
};
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class AlgebricksMetaOperatorDescriptor method createOneInputOneOutputPushRuntime.
private IOperatorNodePushable createOneInputOneOutputPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider) {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private IFrameWriter startOfPipeline;
@Override
public void open() throws HyracksDataException {
if (startOfPipeline == null) {
RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.outRecDescs[0] : null;
RecordDescriptor pipelineInputRecordDescriptor = recordDescProvider.getInputRecordDescriptor(AlgebricksMetaOperatorDescriptor.this.getActivityId(), 0);
PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity, pipelineInputRecordDescriptor, pipelineOutputRecordDescriptor);
startOfPipeline = pa.assemblePipeline(writer, ctx);
}
startOfPipeline.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
startOfPipeline.nextFrame(buffer);
}
@Override
public void close() throws HyracksDataException {
startOfPipeline.close();
}
@Override
public void fail() throws HyracksDataException {
startOfPipeline.fail();
}
@Override
public void flush() throws HyracksDataException {
startOfPipeline.flush();
}
@Override
public String toString() {
return AlgebricksMetaOperatorDescriptor.this.toString();
}
};
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class SubplanRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
RecordDescriptor pipelineOutputRecordDescriptor = null;
final PipelineAssembler pa = new PipelineAssembler(pipeline, 1, 1, inputRecordDesc, pipelineOutputRecordDescriptor);
final IMissingWriter[] nullWriters = new IMissingWriter[missingWriterFactories.length];
for (int i = 0; i < missingWriterFactories.length; i++) {
nullWriters[i] = missingWriterFactories[i].createMissingWriter();
}
return new AbstractOneInputOneOutputOneFramePushRuntime() {
/**
* Computes the outer product between a given tuple and the frames
* passed.
*/
class TupleOuterProduct implements IFrameWriter {
private boolean smthWasWritten = false;
private FrameTupleAccessor ta = new FrameTupleAccessor(pipeline.getRecordDescriptors()[pipeline.getRecordDescriptors().length - 1]);
private ArrayTupleBuilder tb = new ArrayTupleBuilder(nullWriters.length);
@Override
public void open() throws HyracksDataException {
smthWasWritten = false;
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
ta.reset(buffer);
int nTuple = ta.getTupleCount();
for (int t = 0; t < nTuple; t++) {
appendConcat(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), ta, t);
}
smthWasWritten = true;
}
@Override
public void close() throws HyracksDataException {
if (!smthWasWritten) {
// the case when we need to write nulls
appendNullsToTuple();
appendToFrameFromTupleBuilder(tb);
}
}
@Override
public void fail() throws HyracksDataException {
writer.fail();
}
private void appendNullsToTuple() throws HyracksDataException {
tb.reset();
int n0 = tRef.getFieldCount();
for (int f = 0; f < n0; f++) {
tb.addField(tRef.getFrameTupleAccessor(), tRef.getTupleIndex(), f);
}
DataOutput dos = tb.getDataOutput();
for (int i = 0; i < nullWriters.length; i++) {
nullWriters[i].writeMissing(dos);
tb.addFieldEndOffset();
}
}
}
IFrameWriter endPipe = new TupleOuterProduct();
NestedTupleSourceRuntime startOfPipeline = (NestedTupleSourceRuntime) pa.assemblePipeline(endPipe, ctx);
boolean first = true;
@Override
public void open() throws HyracksDataException {
writer.open();
if (first) {
first = false;
initAccessAppendRef(ctx);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
for (int t = 0; t < nTuple; t++) {
tRef.reset(tAccess, t);
startOfPipeline.writeTuple(buffer, t);
try {
startOfPipeline.open();
} catch (Exception e) {
startOfPipeline.fail();
throw e;
} finally {
startOfPipeline.close();
}
}
}
@Override
public void flush() throws HyracksDataException {
writer.flush();
}
};
}
Aggregations