use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class HDFSWriteOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
return new AbstractUnaryInputSinkOperatorNodePushable() {
private FSDataOutputStream dos;
private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
;
private FrameTupleAccessor accessor = new FrameTupleAccessor(inputRd);
private FrameTupleReference tuple = new FrameTupleReference();
private ITupleWriter tupleWriter;
private ClassLoader ctxCL;
@Override
public void open() throws HyracksDataException {
ctxCL = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
JobConf conf = confFactory.getConf();
String outputDirPath = FileOutputFormat.getOutputPath(conf).toString();
String fileName = outputDirPath + File.separator + "part-" + partition;
tupleWriter = tupleWriterFactory.getTupleWriter(ctx, partition, nPartitions);
try {
FileSystem dfs = FileSystem.get(conf);
dos = dfs.create(new Path(fileName), true);
tupleWriter.open(dos);
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
tupleWriter.write(dos, tuple);
}
}
@Override
public void fail() throws HyracksDataException {
}
@Override
public void close() throws HyracksDataException {
try {
tupleWriter.close(dos);
dos.close();
} catch (Exception e) {
throw new HyracksDataException(e);
} finally {
Thread.currentThread().setContextClassLoader(ctxCL);
}
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class ExternalRTreeSearchOperatorNodePushable method open.
// We override this method to specify the searched version of the index
@Override
public void open() throws HyracksDataException {
writer.open();
accessor = new FrameTupleAccessor(inputRecDesc);
indexHelper.open();
index = indexHelper.getIndexInstance();
if (retainMissing) {
int fieldCount = getFieldCount();
nonMatchTupleBuild = new ArrayTupleBuilder(fieldCount);
DataOutput out = nonMatchTupleBuild.getDataOutput();
for (int i = 0; i < fieldCount; i++) {
try {
nonMatchWriter.writeMissing(out);
} catch (IOException e) {
throw new HyracksDataException(e);
}
nonMatchTupleBuild.addFieldEndOffset();
}
} else {
nonMatchTupleBuild = null;
}
ExternalRTree rTreeIndex = (ExternalRTree) index;
try {
searchPred = createSearchPredicate();
tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(new VSizeFrame(ctx));
ISearchOperationCallback searchCallback = searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, null);
// The next line is the reason we override this method...
// The right thing to do would be to change the signature of createAccessor
indexAccessor = rTreeIndex.createAccessor(searchCallback, version);
cursor = createCursor();
if (retainInput) {
frameTuple = new FrameTupleReference();
}
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class SerializableAggregatorDescriptorFactory method createAggregator.
@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, int[] keyFields, final int[] keyFieldsInPartialResults) throws HyracksDataException {
final int[] keys = keyFields;
/**
* one IAggregatorDescriptor instance per Gby operator
*/
return new IAggregatorDescriptor() {
private FrameTupleReference ftr = new FrameTupleReference();
private ISerializedAggregateEvaluator[] aggs = new ISerializedAggregateEvaluator[aggFactories.length];
private int offsetFieldIndex = keys.length;
private int[] stateFieldLength = new int[aggFactories.length];
@Override
public AggregateState createAggregateStates() {
return new AggregateState();
}
@Override
public void init(ArrayTupleBuilder tb, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
DataOutput output = tb.getDataOutput();
ftr.reset(accessor, tIndex);
for (int i = 0; i < aggs.length; i++) {
int begin = tb.getSize();
if (aggs[i] == null) {
aggs[i] = aggFactories[i].createAggregateEvaluator(ctx);
}
aggs[i].init(output);
tb.addFieldEndOffset();
stateFieldLength[i] = tb.getSize() - begin;
}
// doing initial aggregate
ftr.reset(accessor, tIndex);
for (int i = 0; i < aggs.length; i++) {
byte[] data = tb.getByteArray();
int prevFieldPos = i + keys.length - 1;
int start = prevFieldPos >= 0 ? tb.getFieldEndOffsets()[prevFieldPos] : 0;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
ftr.reset(accessor, tIndex);
int stateTupleStart = stateAccessor.getTupleStartOffset(stateTupleIndex);
int fieldSlotLength = stateAccessor.getFieldSlotsLength();
for (int i = 0; i < aggs.length; i++) {
byte[] data = stateAccessor.getBuffer().array();
int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length) + stateTupleStart + fieldSlotLength;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
@Override
public boolean outputPartialResult(ArrayTupleBuilder tb, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
byte[] data = stateAccessor.getBuffer().array();
int startOffset = stateAccessor.getTupleStartOffset(tIndex);
int aggFieldOffset = stateAccessor.getFieldStartOffset(tIndex, offsetFieldIndex);
int refOffset = startOffset + stateAccessor.getFieldSlotsLength() + aggFieldOffset;
int start = refOffset;
for (int i = 0; i < aggs.length; i++) {
aggs[i].finishPartial(data, start, stateFieldLength[i], tb.getDataOutput());
start += stateFieldLength[i];
tb.addFieldEndOffset();
}
return true;
}
@Override
public boolean outputFinalResult(ArrayTupleBuilder tb, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
byte[] data = stateAccessor.getBuffer().array();
int startOffset = stateAccessor.getTupleStartOffset(tIndex);
int aggFieldOffset = stateAccessor.getFieldStartOffset(tIndex, offsetFieldIndex);
int refOffset = startOffset + stateAccessor.getFieldSlotsLength() + aggFieldOffset;
int start = refOffset;
for (int i = 0; i < aggs.length; i++) {
aggs[i].finish(data, start, stateFieldLength[i], tb.getDataOutput());
start += stateFieldLength[i];
tb.addFieldEndOffset();
}
return true;
}
@Override
public void reset() {
}
@Override
public void close() {
reset();
}
};
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class AbstractOneInputOneOutputOneFramePushRuntime method initAccessAppendRef.
protected final void initAccessAppendRef(IHyracksTaskContext ctx) throws HyracksDataException {
initAccessAppend(ctx);
tRef = new FrameTupleReference();
}
use of org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference in project asterixdb by apache.
the class IndexInsertUpdateDeleteOperatorNodePushable method open.
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(inputRecDesc);
writeBuffer = new VSizeFrame(ctx);
indexHelper.open();
index = indexHelper.getIndexInstance();
try {
writer.open();
LocalResource resource = indexHelper.getResource();
modCallback = modOpCallbackFactory.createModificationOperationCallback(resource, ctx, this);
indexAccessor = index.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
if (tupleFilterFactory != null) {
tupleFilter = tupleFilterFactory.createTupleFilter(ctx);
frameTuple = new FrameTupleReference();
}
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
Aggregations