use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class RecordDataFlowController method start.
@Override
public void start(IFrameWriter writer) throws HyracksDataException {
try {
ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
tupleForwarder.initialize(ctx, writer);
while (recordReader.hasNext()) {
IRawRecord<? extends T> record = recordReader.next();
tb.reset();
dataParser.parse(record, tb.getDataOutput());
tb.addFieldEndOffset();
appendOtherTupleFields(tb);
tupleForwarder.addTuple(tb);
}
tupleForwarder.close();
recordReader.close();
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class LookupAdapter method configurePropagation.
private void configurePropagation(IMissingWriterFactory iNullWriterFactory) {
if (propagateInput) {
// This LookupAdapter generates an external record as its output.
// Thus, we add 1.
tb = new ArrayTupleBuilder(tupleAccessor.getFieldCount() + 1);
frameTuple = new FrameTupleReference();
} else {
tb = new ArrayTupleBuilder(1);
}
if (retainNull) {
IMissingWriter missingWriter = iNullWriterFactory.createMissingWriter();
missingTupleBuild = new ArrayTupleBuilder(1);
DataOutput out = missingTupleBuild.getDataOutput();
try {
missingWriter.writeMissing(out);
} catch (IOException e) {
e.printStackTrace();
}
} else {
missingTupleBuild = null;
}
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class MultiFieldsAggregatorFactory method createAggregator.
/*
* (non-Javadoc)
*
* @see
* org.apache.hyracks.dataflow.std.aggregations.IAggregatorDescriptorFactory
* #createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor,
* org.apache.hyracks.api.dataflow.value.RecordDescriptor)
*/
@Override
public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor, final int[] keyFields, final int[] keyFieldsInPartialResults) throws HyracksDataException {
final IFieldAggregateDescriptor[] aggregators = new IFieldAggregateDescriptor[aggregatorFactories.length];
for (int i = 0; i < aggregators.length; i++) {
aggregators[i] = aggregatorFactories[i].createAggregator(ctx, inRecordDescriptor, outRecordDescriptor);
}
if (this.keys == null) {
this.keys = keyFields;
}
return new IAggregatorDescriptor() {
@Override
public void reset() {
for (int i = 0; i < aggregators.length; i++) {
aggregators[i].reset();
}
}
@Override
public boolean outputPartialResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
for (int i = 0; i < aggregators.length; i++) {
int fieldOffset = stateAccessor.getFieldStartOffset(tIndex, keys.length + i);
aggregators[i].outputPartialResult(dos, stateAccessor.getBuffer().array(), fieldOffset + stateAccessor.getFieldSlotsLength() + tupleOffset, ((AggregateState[]) state.state)[i]);
tupleBuilder.addFieldEndOffset();
}
return true;
}
@Override
public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex, AggregateState state) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i].needsBinaryState()) {
int fieldOffset = stateAccessor.getFieldStartOffset(tIndex, keys.length + i);
aggregators[i].outputFinalResult(dos, stateAccessor.getBuffer().array(), tupleOffset + stateAccessor.getFieldSlotsLength() + fieldOffset, ((AggregateState[]) state.state)[i]);
} else {
aggregators[i].outputFinalResult(dos, null, 0, ((AggregateState[]) state.state)[i]);
}
tupleBuilder.addFieldEndOffset();
}
return true;
}
@Override
public void init(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
for (int i = 0; i < aggregators.length; i++) {
aggregators[i].init(accessor, tIndex, dos, ((AggregateState[]) state.state)[i]);
if (aggregators[i].needsBinaryState()) {
tupleBuilder.addFieldEndOffset();
}
}
}
@Override
public AggregateState createAggregateStates() {
AggregateState[] states = new AggregateState[aggregators.length];
for (int i = 0; i < states.length; i++) {
states[i] = aggregators[i].createState();
}
return new AggregateState(states);
}
@Override
public void close() {
for (int i = 0; i < aggregators.length; i++) {
aggregators[i].close();
}
}
@Override
public void aggregate(IFrameTupleAccessor accessor, int tIndex, IFrameTupleAccessor stateAccessor, int stateTupleIndex, AggregateState state) throws HyracksDataException {
if (stateAccessor != null) {
int stateTupleOffset = stateAccessor.getTupleStartOffset(stateTupleIndex);
int fieldIndex = 0;
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i].needsBinaryState()) {
int stateFieldOffset = stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length + fieldIndex);
aggregators[i].aggregate(accessor, tIndex, stateAccessor.getBuffer().array(), stateTupleOffset + stateAccessor.getFieldSlotsLength() + stateFieldOffset, ((AggregateState[]) state.state)[i]);
fieldIndex++;
} else {
aggregators[i].aggregate(accessor, tIndex, null, 0, ((AggregateState[]) state.state)[i]);
}
}
} else {
for (int i = 0; i < aggregators.length; i++) {
aggregators[i].aggregate(accessor, tIndex, null, 0, ((AggregateState[]) state.state)[i]);
}
}
}
};
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class ClassAdToADMTest method testSchemaful.
@SuppressWarnings("rawtypes")
public void testSchemaful() {
try {
File file = new File("target/classad-wtih-temporals.adm");
File expected = new File(getClass().getResource("/classad/results/classad-with-temporals.adm").toURI().getPath());
FileUtils.deleteQuietly(file);
PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
String[] recordFieldNames = { "GlobalJobId", "Owner", "ClusterId", "ProcId", "RemoteWallClockTime", "CompletionDate", "QDate", "JobCurrentStartDate", "JobStartDate", "JobCurrentStartExecutingDate" };
IAType[] recordFieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.ADURATION, BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME };
ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
int numOfTupleFields = 1;
ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
serdes[0] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
IPrinterFactory[] printerFactories = new IPrinterFactory[1];
printerFactories[0] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
// create output descriptor
IPrinter[] printers = new IPrinter[printerFactories.length];
for (int i = 0; i < printerFactories.length; i++) {
printers[i] = printerFactories[i].createPrinter();
}
ClassAdObjectPool objectPool = new ClassAdObjectPool();
String[] files = new String[] { "/classad/classad-with-temporals.classads" };
ClassAdParser parser = new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
for (String path : files) {
List<Path> paths = new ArrayList<>();
Map<String, String> config = new HashMap<>();
config.put(ExternalDataConstants.KEY_RECORD_START, "[");
config.put(ExternalDataConstants.KEY_RECORD_END, "]");
paths.add(Paths.get(getClass().getResource(path).toURI()));
FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
LocalFSInputStream in = new LocalFSInputStream(watcher);
SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader();
recordReader.configure(in, config);
while (recordReader.hasNext()) {
tb.reset();
IRawRecord<char[]> record = recordReader.next();
parser.parse(record, tb.getDataOutput());
tb.addFieldEndOffset();
printTuple(tb, printers, printStream);
}
recordReader.close();
printStream.close();
Assert.assertTrue(FileUtils.contentEquals(file, expected));
}
} catch (Throwable th) {
System.err.println("TEST FAILED");
th.printStackTrace();
Assert.assertTrue(false);
}
System.err.println("TEST PASSED");
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class BinaryTokenizerOperatorNodePushable method open.
@Override
public void open() throws HyracksDataException {
writer.open();
accessor = new FrameTupleAccessor(inputRecDesc);
builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount());
builderData = builder.getFieldData();
appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
}
Aggregations