use of org.apache.hyracks.dataflow.std.group.AggregateState in project asterixdb by apache.
the class NestedPlansAccumulatingAggregatorFactory method createAggregator.
@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDesc, RecordDescriptor outRecordDescriptor, int[] keys, int[] partialKeys) throws HyracksDataException {
final AggregatorOutput outputWriter = new AggregatorOutput(subplans, keyFieldIdx.length, decorFieldIdx.length);
final NestedTupleSourceRuntime[] pipelines = new NestedTupleSourceRuntime[subplans.length];
for (int i = 0; i < subplans.length; i++) {
pipelines[i] = (NestedTupleSourceRuntime) assemblePipeline(subplans[i], outputWriter, ctx);
}
return new IAggregatorDescriptor() {
@Override
public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
ArrayTupleBuilder tb = outputWriter.getTupleBuilder();
tb.reset();
for (int i = 0; i < keyFieldIdx.length; ++i) {
tb.addField(accessor, tIndex, keyFieldIdx[i]);
}
for (int i = 0; i < decorFieldIdx.length; ++i) {
tb.addField(accessor, tIndex, decorFieldIdx[i]);
}
for (int i = 0; i < pipelines.length; ++i) {
pipelines[i].open();
}
// aggregate the first tuple
for (int i = 0; i < pipelines.length; i++) {
pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
}
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
for (int i = 0; i < pipelines.length; i++) {
pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
}
}
@Override
public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
for (int i = 0; i < pipelines.length; i++) {
outputWriter.setInputIdx(i);
pipelines[i].close();
}
// outputWriter.writeTuple(appender);
tupleBuilder.reset();
ArrayTupleBuilder tb = outputWriter.getTupleBuilder();
byte[] data = tb.getByteArray();
int[] fieldEnds = tb.getFieldEndOffsets();
int start = 0;
int offset;
for (int i = 0; i < fieldEnds.length; i++) {
if (i > 0) {
start = fieldEnds[i - 1];
}
offset = fieldEnds[i] - start;
tupleBuilder.addField(data, start, offset);
}
return true;
}
@Override
public AggregateState createAggregateStates() {
return new AggregateState();
}
@Override
public void reset() {
}
@Override
public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
throw new IllegalStateException("this method should not be called");
}
@Override
public void close() {
}
};
}
use of org.apache.hyracks.dataflow.std.group.AggregateState in project asterixdb by apache.
the class NestedPlansRunningAggregatorFactory method createAggregator.
/* (non-Javadoc)
* @see org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory#createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext, org.apache.hyracks.api.dataflow.value.RecordDescriptor, org.apache.hyracks.api.dataflow.value.RecordDescriptor, int[], int[])
*/
@Override
public IAggregatorDescriptor createAggregator(final IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, int[] keyFields, int[] keyFieldsInPartialResults, final IFrameWriter writer) throws HyracksDataException {
final RunningAggregatorOutput outputWriter = new RunningAggregatorOutput(ctx, subplans, keyFieldIdx.length, decorFieldIdx.length, writer);
final NestedTupleSourceRuntime[] pipelines = new NestedTupleSourceRuntime[subplans.length];
for (int i = 0; i < subplans.length; i++) {
pipelines[i] = (NestedTupleSourceRuntime) assemblePipeline(subplans[i], outputWriter, ctx);
}
final ArrayTupleBuilder gbyTb = outputWriter.getGroupByTupleBuilder();
return new IAggregatorDescriptor() {
@Override
public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
for (int i = 0; i < pipelines.length; ++i) {
pipelines[i].open();
}
gbyTb.reset();
for (int i = 0; i < keyFieldIdx.length; ++i) {
gbyTb.addField(accessor, tIndex, keyFieldIdx[i]);
}
for (int i = 0; i < decorFieldIdx.length; ++i) {
gbyTb.addField(accessor, tIndex, decorFieldIdx[i]);
}
// aggregate the first tuple
for (int i = 0; i < pipelines.length; i++) {
outputWriter.setInputIdx(i);
pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
}
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
for (int i = 0; i < pipelines.length; i++) {
outputWriter.setInputIdx(i);
pipelines[i].writeTuple(accessor.getBuffer(), tIndex);
}
}
@Override
public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
for (int i = 0; i < pipelines.length; ++i) {
outputWriter.setInputIdx(i);
pipelines[i].close();
}
return false;
}
@Override
public AggregateState createAggregateStates() {
return new AggregateState();
}
@Override
public void reset() {
}
@Override
public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
throw new IllegalStateException("this method should not be called");
}
@Override
public void close() {
}
};
}
use of org.apache.hyracks.dataflow.std.group.AggregateState in project asterixdb by apache.
the class SerializableAggregatorDescriptorFactory method createAggregator.
@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, int[] keyFields, final int[] keyFieldsInPartialResults) throws HyracksDataException {
final int[] keys = keyFields;
/**
* one IAggregatorDescriptor instance per Gby operator
*/
return new IAggregatorDescriptor() {
private FrameTupleReference ftr = new FrameTupleReference();
private ISerializedAggregateEvaluator[] aggs = new ISerializedAggregateEvaluator[aggFactories.length];
private int offsetFieldIndex = keys.length;
private int[] stateFieldLength = new int[aggFactories.length];
@Override
public AggregateState createAggregateStates() {
return new AggregateState();
}
@Override
public void init(ArrayTupleBuilder tb, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
DataOutput output = tb.getDataOutput();
ftr.reset(accessor, tIndex);
for (int i = 0; i < aggs.length; i++) {
int begin = tb.getSize();
if (aggs[i] == null) {
aggs[i] = aggFactories[i].createAggregateEvaluator(ctx);
}
aggs[i].init(output);
tb.addFieldEndOffset();
stateFieldLength[i] = tb.getSize() - begin;
}
// doing initial aggregate
ftr.reset(accessor, tIndex);
for (int i = 0; i < aggs.length; i++) {
byte[] data = tb.getByteArray();
int prevFieldPos = i + keys.length - 1;
int start = prevFieldPos >= 0 ? tb.getFieldEndOffsets()[prevFieldPos] : 0;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
ftr.reset(accessor, tIndex);
int stateTupleStart = stateAccessor.getTupleStartOffset(stateTupleIndex);
int fieldSlotLength = stateAccessor.getFieldSlotsLength();
for (int i = 0; i < aggs.length; i++) {
byte[] data = stateAccessor.getBuffer().array();
int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length) + stateTupleStart + fieldSlotLength;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
@Override
public boolean outputPartialResult(ArrayTupleBuilder tb, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
byte[] data = stateAccessor.getBuffer().array();
int startOffset = stateAccessor.getTupleStartOffset(tIndex);
int aggFieldOffset = stateAccessor.getFieldStartOffset(tIndex, offsetFieldIndex);
int refOffset = startOffset + stateAccessor.getFieldSlotsLength() + aggFieldOffset;
int start = refOffset;
for (int i = 0; i < aggs.length; i++) {
aggs[i].finishPartial(data, start, stateFieldLength[i], tb.getDataOutput());
start += stateFieldLength[i];
tb.addFieldEndOffset();
}
return true;
}
@Override
public boolean outputFinalResult(ArrayTupleBuilder tb, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
byte[] data = stateAccessor.getBuffer().array();
int startOffset = stateAccessor.getTupleStartOffset(tIndex);
int aggFieldOffset = stateAccessor.getFieldStartOffset(tIndex, offsetFieldIndex);
int refOffset = startOffset + stateAccessor.getFieldSlotsLength() + aggFieldOffset;
int start = refOffset;
for (int i = 0; i < aggs.length; i++) {
aggs[i].finish(data, start, stateFieldLength[i], tb.getDataOutput());
start += stateFieldLength[i];
tb.addFieldEndOffset();
}
return true;
}
@Override
public void reset() {
}
@Override
public void close() {
reset();
}
};
}
use of org.apache.hyracks.dataflow.std.group.AggregateState in project asterixdb by apache.
the class AvgFieldGroupAggregatorFactory method createAggregator.
/*
* (non-Javadoc)
*
* @see org.apache.hyracks.dataflow.std.aggregations.
* IFieldAggregateDescriptorFactory
* #createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor)
*/
@Override
public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor) throws HyracksDataException {
return new IFieldAggregateDescriptor() {
@Override
public void reset() {
}
@Override
public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int sum, count;
if (!useObjectState) {
sum = IntegerPointable.getInteger(data, offset);
count = IntegerPointable.getInteger(data, offset + 4);
} else {
Integer[] fields = (Integer[]) state.state;
sum = fields[0];
count = fields[1];
}
try {
fieldOutput.writeInt(sum);
fieldOutput.writeInt(count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
}
}
@Override
public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int sum, count;
if (!useObjectState) {
sum = IntegerPointable.getInteger(data, offset);
count = IntegerPointable.getInteger(data, offset + 4);
} else {
Integer[] fields = (Integer[]) state.state;
sum = fields[0];
count = fields[1];
}
try {
fieldOutput.writeFloat((float) sum / count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
}
}
@Override
public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state) throws HyracksDataException {
int sum = 0;
int count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
try {
fieldOutput.writeInt(sum);
fieldOutput.writeInt(count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when initializing the aggregator.");
}
} else {
state.state = new Integer[] { sum, count };
}
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int sum = 0, count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
sum += buf.getInt(offset);
count += buf.getInt(offset + 4);
buf.putInt(offset, sum);
buf.putInt(offset + 4, count);
} else {
Integer[] fields = (Integer[]) state.state;
sum += fields[0];
count += fields[1];
state.state = new Integer[] { sum, count };
}
}
@Override
public boolean needsObjectState() {
return useObjectState;
}
@Override
public boolean needsBinaryState() {
return !useObjectState;
}
@Override
public AggregateState createState() {
return new AggregateState(new Integer[] { 0, 0 });
}
};
}
use of org.apache.hyracks.dataflow.std.group.AggregateState in project asterixdb by apache.
the class CountFieldAggregatorFactory method createAggregator.
/*
* (non-Javadoc)
*
* @see org.apache.hyracks.dataflow.std.aggregations.
* IFieldAggregateDescriptorFactory
* #createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor)
*/
@Override
public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor) throws HyracksDataException {
return new IFieldAggregateDescriptor() {
@Override
public void reset() {
}
@Override
public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int count;
if (!useObjectState) {
count = IntegerPointable.getInteger(data, offset);
} else {
count = (Integer) state.state;
}
try {
fieldOutput.writeInt(count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
}
}
@Override
public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int count;
if (!useObjectState) {
count = IntegerPointable.getInteger(data, offset);
} else {
count = (Integer) state.state;
}
try {
fieldOutput.writeInt(count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when writing aggregation to the output buffer.");
}
}
@Override
public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state) throws HyracksDataException {
int count = 1;
if (!useObjectState) {
try {
fieldOutput.writeInt(count);
} catch (IOException e) {
throw new HyracksDataException("I/O exception when initializing the aggregator.");
}
} else {
state.state = count;
}
}
public boolean needsObjectState() {
return useObjectState;
}
public boolean needsBinaryState() {
return !useObjectState;
}
public AggregateState createState() {
return new AggregateState(new Integer(0));
}
@Override
public void close() {
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset, AggregateState state) throws HyracksDataException {
int count = 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
count += buf.getInt(offset);
buf.putInt(offset, count);
} else {
count += (Integer) state.state;
state.state = count;
}
}
};
}
Aggregations