use of org.apache.hyracks.api.dataflow.value.IResultSerializer in project asterixdb by apache.
the class ResultWriterOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
final IDatasetPartitionManager dpm = ctx.getDatasetPartitionManager();
final IFrame frame = new VSizeFrame(ctx);
final FrameOutputStream frameOutputStream = new FrameOutputStream(ctx.getInitialFrameSize());
frameOutputStream.reset(frame, true);
PrintStream printStream = new PrintStream(frameOutputStream);
final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc, printStream);
final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(outRecordDesc);
return new AbstractUnaryInputSinkOperatorNodePushable() {
private IFrameWriter datasetPartitionWriter;
private boolean failed = false;
@Override
public void open() throws HyracksDataException {
try {
datasetPartitionWriter = dpm.createDatasetPartitionWriter(ctx, rsId, ordered, asyncMode, partition, nPartitions);
datasetPartitionWriter.open();
resultSerializer.init();
} catch (HyracksException e) {
throw HyracksDataException.create(e);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
frameTupleAccessor.reset(buffer);
for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
resultSerializer.appendTuple(frameTupleAccessor, tIndex);
if (!frameOutputStream.appendTuple()) {
frameOutputStream.flush(datasetPartitionWriter);
resultSerializer.appendTuple(frameTupleAccessor, tIndex);
frameOutputStream.appendTuple();
}
}
}
@Override
public void fail() throws HyracksDataException {
failed = true;
datasetPartitionWriter.fail();
}
@Override
public void close() throws HyracksDataException {
try {
if (!failed && frameOutputStream.getTupleCount() > 0) {
frameOutputStream.flush(datasetPartitionWriter);
}
} catch (Exception e) {
datasetPartitionWriter.fail();
throw e;
} finally {
datasetPartitionWriter.close();
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{ ");
sb.append("\"rsId\": \"").append(rsId).append("\", ");
sb.append("\"ordered\": ").append(ordered).append(", ");
sb.append("\"asyncMode\": ").append(asyncMode).append(" }");
return sb.toString();
}
};
}
use of org.apache.hyracks.api.dataflow.value.IResultSerializer in project asterixdb by apache.
the class ResultSerializerFactoryProvider method getResultSerializerFactoryProvider.
public IResultSerializerFactory getResultSerializerFactoryProvider() {
return new IResultSerializerFactory() {
private static final long serialVersionUID = 1L;
@Override
public IResultSerializer createResultSerializer(final RecordDescriptor recordDesc, final PrintStream printStream) {
return new IResultSerializer() {
private static final long serialVersionUID = 1L;
ByteBufferInputStream bbis = new ByteBufferInputStream();
DataInputStream di = new DataInputStream(bbis);
@Override
public void init() throws HyracksDataException {
}
@Override
public boolean appendTuple(IFrameTupleAccessor tAccess, int tIdx) throws HyracksDataException {
int start = tAccess.getTupleStartOffset(tIdx) + tAccess.getFieldSlotsLength();
bbis.setByteBuffer(tAccess.getBuffer(), start);
Object[] record = new Object[recordDesc.getFieldCount()];
for (int i = 0; i < record.length; ++i) {
Object instance = recordDesc.getFields()[i].deserialize(di);
if (i == 0) {
printStream.print(String.valueOf(instance));
} else {
printStream.print(", " + String.valueOf(instance));
}
}
printStream.println();
return true;
}
};
}
};
}
Aggregations