use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class RecordWithMetaTest method runTest.
@SuppressWarnings({ "unchecked", "rawtypes" })
public // @Test commented out due to ASTERIXDB-1881
void runTest() throws Exception {
File file = new File("target/beer.adm");
File expected = new File(getClass().getResource("/openbeerdb/beer.txt").toURI().getPath());
try {
FileUtils.deleteQuietly(file);
PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
// create key type
IAType[] keyTypes = { BuiltinType.ASTRING };
String keyName = "id";
List<String> keyNameAsList = new ArrayList<>(1);
keyNameAsList.add(keyName);
// create record type
String[] recordFieldNames = {};
IAType[] recordFieldTypes = {};
recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
// create the meta type
String[] metaFieldNames = { keyName, "flags", "expiration", "cas", "rev", "vbid", "dtype" };
IAType[] metaFieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.AINT32 };
ARecordType metaType = new ARecordType("meta", metaFieldNames, metaFieldTypes, true);
int valueIndex = 4;
char delimiter = ',';
int numOfTupleFields = 3;
int[] pkIndexes = { 0 };
int[] pkIndicators = { 1 };
List<Path> paths = new ArrayList<>();
paths.add(Paths.get(getClass().getResource("/openbeerdb/beer.csv").toURI()));
FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
// create input stream
LocalFSInputStream inputStream = new LocalFSInputStream(watcher);
// create reader record reader
Map<String, String> config = new HashMap<>();
config.put(ExternalDataConstants.KEY_HEADER, "true");
config.put(ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.DEFAULT_QUOTE);
LineRecordReader lineReader = new LineRecordReader();
lineReader.configure(inputStream, config);
// create csv with json record reader
CSVToRecordWithMetadataAndPKConverter recordConverter = new CSVToRecordWithMetadataAndPKConverter(valueIndex, delimiter, metaType, recordType, pkIndicators, pkIndexes, keyTypes);
// create the value parser <ADM in this case>
ADMDataParser valueParser = new ADMDataParser(recordType, false);
// create parser.
RecordWithMetadataParser parser = new RecordWithMetadataParser(metaType, valueParser, recordConverter);
// create serializer deserializer and printer factories
ISerializerDeserializer[] serdes = new ISerializerDeserializer[keyTypes.length + 2];
IPrinterFactory[] printerFactories = new IPrinterFactory[keyTypes.length + 2];
for (int i = 0; i < keyTypes.length; i++) {
serdes[i + 2] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(keyTypes[i]);
printerFactories[i + 2] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(keyTypes[i]);
}
serdes[0] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
serdes[1] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
printerFactories[0] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
printerFactories[1] = ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(metaType);
// create output descriptor
IPrinter[] printers = new IPrinter[printerFactories.length];
for (int i = 0; i < printerFactories.length; i++) {
printers[i] = printerFactories[i].createPrinter();
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
while (lineReader.hasNext()) {
IRawRecord<char[]> record = lineReader.next();
tb.reset();
parser.parse(record, tb.getDataOutput());
tb.addFieldEndOffset();
parser.parseMeta(tb.getDataOutput());
tb.addFieldEndOffset();
parser.appendLastParsedPrimaryKeyToTuple(tb);
//print tuple
printTuple(tb, printers, printStream);
}
lineReader.close();
printStream.close();
Assert.assertTrue(FileUtils.contentEquals(file, expected));
} catch (Throwable th) {
System.err.println("TEST FAILED");
th.printStackTrace();
throw th;
} finally {
FileUtils.deleteQuietly(file);
}
System.err.println("TEST PASSED.");
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class LSMPrimaryUpsertOperatorNodePushable method open.
// we have the permutation which has [pk locations, record location, optional:filter-location]
// the index -> we don't need anymore data?
// we need to use the primary index opTracker and secondary indexes callbacks for insert/delete since the lock would
// have been obtained through searchForUpsert operation
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(inputRecDesc);
writeBuffer = new VSizeFrame(ctx);
writer.open();
indexHelper.open();
index = indexHelper.getIndexInstance();
try {
if (ctx.getSharedObject() != null) {
PrimaryIndexLogMarkerCallback callback = new PrimaryIndexLogMarkerCallback((AbstractLSMIndex) index);
TaskUtil.putInSharedMap(ILogMarkerCallback.KEY_MARKER_CALLBACK, callback, ctx);
}
missingTupleBuilder = new ArrayTupleBuilder(1);
DataOutput out = missingTupleBuilder.getDataOutput();
try {
missingWriter.writeMissing(out);
} catch (IOException e) {
throw new HyracksDataException(e);
}
missingTupleBuilder.addFieldEndOffset();
searchPred = createSearchPredicate();
tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
dos = tb.getDataOutput();
appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
modCallback = modOpCallbackFactory.createModificationOperationCallback(indexHelper.getResource(), ctx, this);
abstractModCallback = (AbstractIndexModificationOperationCallback) modCallback;
searchCallback = (LockThenSearchOperationCallback) searchCallbackFactory.createSearchOperationCallback(indexHelper.getResource().getId(), ctx, this);
indexAccessor = index.createAccessor(abstractModCallback, searchCallback);
lsmAccessor = (LSMTreeIndexAccessor) indexAccessor;
cursor = indexAccessor.createSearchCursor(false);
frameTuple = new FrameTupleReference();
INcApplicationContext appCtx = (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
LSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) index, appCtx.getTransactionSubsystem().getLogManager());
frameOpCallback = new IFrameOperationCallback() {
IFrameOperationCallback callback = frameOpCallbackFactory.createFrameOperationCallback(ctx, (ILSMIndexAccessor) indexAccessor);
@Override
public void frameCompleted() throws HyracksDataException {
callback.frameCompleted();
appender.write(writer, true);
}
};
} catch (Exception e) {
indexHelper.close();
throw new HyracksDataException(e);
}
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class AggregateRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private IAggregateEvaluator[] aggregs = new IAggregateEvaluator[aggregFactories.length];
private IPointable result = VoidPointable.FACTORY.createPointable();
private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(aggregs.length);
private boolean first = true;
private boolean isOpen = false;
@Override
public void open() throws HyracksDataException {
if (first) {
first = false;
initAccessAppendRef(ctx);
for (int i = 0; i < aggregFactories.length; i++) {
aggregs[i] = aggregFactories[i].createAggregateEvaluator(ctx);
}
}
for (int i = 0; i < aggregFactories.length; i++) {
aggregs[i].init();
}
isOpen = true;
writer.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
for (int t = 0; t < nTuple; t++) {
tRef.reset(tAccess, t);
processTuple(tRef);
}
}
@Override
public void close() throws HyracksDataException {
if (isOpen) {
try {
computeAggregate();
appendToFrameFromTupleBuilder(tupleBuilder);
} finally {
super.close();
}
}
}
private void computeAggregate() throws HyracksDataException {
tupleBuilder.reset();
for (int f = 0; f < aggregs.length; f++) {
aggregs[f].finish(result);
tupleBuilder.addField(result.getByteArray(), result.getStartOffset(), result.getLength());
}
}
private void processTuple(FrameTupleReference tupleRef) throws HyracksDataException {
for (int f = 0; f < aggregs.length; f++) {
aggregs[f].step(tupleRef);
}
}
@Override
public void fail() throws HyracksDataException {
if (isOpen) {
writer.fail();
}
}
};
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class StreamSelectRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) {
final IBinaryBooleanInspector bbi = binaryBooleanInspectorFactory.createBinaryBooleanInspector(ctx);
return new AbstractOneInputOneOutputOneFieldFramePushRuntime() {
private IPointable p = VoidPointable.FACTORY.createPointable();
private IScalarEvaluator eval;
private IMissingWriter missingWriter = null;
private ArrayTupleBuilder missingTupleBuilder = null;
private boolean isOpen = false;
@Override
public void open() throws HyracksDataException {
if (eval == null) {
initAccessAppendFieldRef(ctx);
eval = cond.createScalarEvaluator(ctx);
}
isOpen = true;
writer.open();
//prepare nullTupleBuilder
if (retainMissing && missingWriter == null) {
missingWriter = missingWriterFactory.createMissingWriter();
missingTupleBuilder = new ArrayTupleBuilder(1);
DataOutput out = missingTupleBuilder.getDataOutput();
missingWriter.writeMissing(out);
missingTupleBuilder.addFieldEndOffset();
}
}
@Override
public void fail() throws HyracksDataException {
if (isOpen) {
super.fail();
}
}
@Override
public void close() throws HyracksDataException {
if (isOpen) {
try {
flushIfNotFailed();
} finally {
writer.close();
}
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
for (int t = 0; t < nTuple; t++) {
tRef.reset(tAccess, t);
eval.evaluate(tRef, p);
if (bbi.getBooleanValue(p.getByteArray(), p.getStartOffset(), p.getLength())) {
if (projectionList != null) {
appendProjectionToFrame(t, projectionList);
} else {
appendTupleToFrame(t);
}
} else {
if (retainMissing) {
for (int i = 0; i < tRef.getFieldCount(); i++) {
if (i == missingPlaceholderVariableIndex) {
appendField(missingTupleBuilder.getByteArray(), 0, missingTupleBuilder.getSize());
} else {
appendField(tAccess, t, i);
}
}
}
}
}
}
@Override
public void flush() throws HyracksDataException {
appender.flush(writer);
}
};
}
use of org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder in project asterixdb by apache.
the class AssignRuntimeFactory method createOneOutputPushRuntime.
@Override
public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) throws HyracksDataException {
final int[] projectionToOutColumns = new int[projectionList.length];
for (int j = 0; j < projectionList.length; j++) {
projectionToOutColumns[j] = Arrays.binarySearch(outColumns, projectionList[j]);
}
return new AbstractOneInputOneOutputOneFramePushRuntime() {
private IPointable result = VoidPointable.FACTORY.createPointable();
private IScalarEvaluator[] eval = new IScalarEvaluator[evalFactories.length];
private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(projectionList.length);
private boolean first = true;
private boolean isOpen = false;
private int tupleIndex = 0;
@Override
public void open() throws HyracksDataException {
if (first) {
initAccessAppendRef(ctx);
first = false;
int n = evalFactories.length;
for (int i = 0; i < n; i++) {
eval[i] = evalFactories[i].createScalarEvaluator(ctx);
}
}
isOpen = true;
writer.open();
}
@Override
public void close() throws HyracksDataException {
if (isOpen) {
super.close();
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
// what if nTuple is 0?
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
if (nTuple < 1) {
if (nTuple < 0) {
throw new HyracksDataException("Negative number of tuples in the frame: " + nTuple);
}
appender.flush(writer);
} else {
if (nTuple > 1) {
for (; tupleIndex < nTuple - 1; tupleIndex++) {
tRef.reset(tAccess, tupleIndex);
produceTuple(tupleBuilder, tAccess, tupleIndex, tRef);
appendToFrameFromTupleBuilder(tupleBuilder);
}
}
if (tupleIndex < nTuple) {
tRef.reset(tAccess, tupleIndex);
produceTuple(tupleBuilder, tAccess, tupleIndex, tRef);
if (flushFramesRapidly) {
// Whenever all the tuples in the incoming frame have been consumed, the assign operator
// will push its frame to the next operator; i.e., it won't wait until the frame gets full.
appendToFrameFromTupleBuilder(tupleBuilder, true);
} else {
appendToFrameFromTupleBuilder(tupleBuilder);
}
} else {
if (flushFramesRapidly) {
flushAndReset();
}
}
}
tupleIndex = 0;
}
private void produceTuple(ArrayTupleBuilder tb, IFrameTupleAccessor accessor, int tIndex, FrameTupleReference tupleRef) throws HyracksDataException {
try {
tb.reset();
for (int f = 0; f < projectionList.length; f++) {
int k = projectionToOutColumns[f];
if (k >= 0) {
eval[k].evaluate(tupleRef, result);
tb.addField(result.getByteArray(), result.getStartOffset(), result.getLength());
} else {
tb.addField(accessor, tIndex, projectionList[f]);
}
}
} catch (HyracksDataException e) {
throw HyracksDataException.create(ErrorCode.ERROR_PROCESSING_TUPLE, e, tupleIndex);
}
}
@Override
public void fail() throws HyracksDataException {
if (isOpen) {
super.fail();
}
}
@Override
public void flush() throws HyracksDataException {
appender.flush(writer);
}
};
}
Aggregations