use of org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor in project asterixdb by apache.
the class PlainFileWriterOperatorDescriptor method createPushRuntime.
/*
* (non-Javadoc)
*
* @see
* org.apache.hyracks.api.dataflow.IActivityNode#createPushRuntime(edu.
* uci.ics.hyracks.api.context.IHyracksContext,
* org.apache.hyracks.api.job.IOperatorEnvironment,
* org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider, int,
* int)
*/
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException {
// Output files
final FileSplit[] splits = fileSplitProvider.getFileSplits();
IIOManager ioManager = ctx.getIoManager();
// Frame accessor
final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
// Record descriptor
final RecordDescriptor recordDescriptor = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
return new AbstractUnaryInputSinkOperatorNodePushable() {
private BufferedWriter out;
private ByteBufferInputStream bbis;
private DataInputStream di;
@Override
public void open() throws HyracksDataException {
try {
out = new BufferedWriter(new FileWriter(splits[partition].getFile(ioManager)));
bbis = new ByteBufferInputStream();
di = new DataInputStream(bbis);
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
try {
frameTupleAccessor.reset(buffer);
for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
int start = frameTupleAccessor.getTupleStartOffset(tIndex) + frameTupleAccessor.getFieldSlotsLength();
bbis.setByteBuffer(buffer, start);
Object[] record = new Object[recordDescriptor.getFieldCount()];
for (int i = 0; i < record.length; ++i) {
Object instance = recordDescriptor.getFields()[i].deserialize(di);
if (i == 0) {
out.write(String.valueOf(instance));
} else {
out.write(delim + String.valueOf(instance));
}
}
out.write("\n");
}
} catch (IOException ex) {
throw new HyracksDataException(ex);
}
}
@Override
public void fail() throws HyracksDataException {
}
@Override
public void close() throws HyracksDataException {
try {
out.close();
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
};
}
use of org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor in project asterixdb by apache.
the class OptimizedHybridHashJoin method createInMemoryJoiner.
private void createInMemoryJoiner(int inMemTupCount) throws HyracksDataException {
ISerializableTable table = new SerializableHashTable(inMemTupCount, ctx, bufferManagerForHashTable);
this.inMemJoiner = new InMemoryHashJoin(ctx, inMemTupCount, new FrameTupleAccessor(probeRd), probeHpc, new FrameTupleAccessor(buildRd), buildRd, buildHpc, new FrameTuplePairComparator(probeKeys, buildKeys, comparators), isLeftOuter, nonMatchWriters, table, predEvaluator, isReversed, bufferManagerForHashTable);
}
use of org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor in project asterixdb by apache.
the class HDFSWriteOperatorDescriptor method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
return new AbstractUnaryInputSinkOperatorNodePushable() {
private FSDataOutputStream dos;
private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
;
private FrameTupleAccessor accessor = new FrameTupleAccessor(inputRd);
private FrameTupleReference tuple = new FrameTupleReference();
private ITupleWriter tupleWriter;
private ClassLoader ctxCL;
@Override
public void open() throws HyracksDataException {
ctxCL = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
Job conf = confFactory.getConf();
String outputPath = FileOutputFormat.getOutputPath(conf).toString();
String fileName = outputPath + File.separator + "part-" + partition;
tupleWriter = tupleWriterFactory.getTupleWriter(ctx, partition, nPartitions);
try {
FileSystem dfs = FileSystem.get(conf.getConfiguration());
dos = dfs.create(new Path(fileName), true);
tupleWriter.open(dos);
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
tupleWriter.write(dos, tuple);
}
}
@Override
public void fail() throws HyracksDataException {
}
@Override
public void close() throws HyracksDataException {
try {
tupleWriter.close(dos);
dos.close();
} catch (Exception e) {
throw new HyracksDataException(e);
} finally {
Thread.currentThread().setContextClassLoader(ctxCL);
}
}
};
}
use of org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor in project asterixdb by apache.
the class BTreeStatsTest method test01.
@Test
public void test01() throws Exception {
TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = harness.getBufferCache();
IFileMapProvider fmp = harness.getFileMapProvider();
// declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// declare keys
int keyFieldCount = 1;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
ITreeIndexMetadataFrame metaFrame = metaFrameFactory.createFrame();
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories, fieldCount, harness.getFileReference());
btree.create();
btree.activate();
Random rnd = new Random();
rnd.setSeed(50);
long start = System.currentTimeMillis();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("INSERTING INTO TREE");
}
IFrame frame = new VSizeFrame(ctx);
FrameTupleAppender appender = new FrameTupleAppender();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
DataOutput dos = tb.getDataOutput();
ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
accessor.reset(frame.getBuffer());
FrameTupleReference tuple = new FrameTupleReference();
ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// 10000
for (int i = 0; i < 100000; i++) {
int f0 = rnd.nextInt() % 100000;
int f1 = 5;
tb.reset();
IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
tb.addFieldEndOffset();
appender.reset(frame, true);
appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
tuple.reset(accessor, 0);
if (LOGGER.isLoggable(Level.INFO)) {
if (i % 10000 == 0) {
long end = System.currentTimeMillis();
LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
}
}
try {
indexAccessor.insert(tuple);
} catch (HyracksDataException e) {
if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
e.printStackTrace();
throw e;
}
}
}
int fileId = fmp.lookupFileId(harness.getFileReference());
TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("\n" + stats.toString());
}
TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
btree.deactivate();
btree.destroy();
bufferCache.close();
}
use of org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor in project asterixdb by apache.
the class FieldPrefixNSMTest method createTuple.
private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print) throws HyracksDataException {
if (print) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
}
}
IFrame buf = new VSizeFrame(ctx);
FrameTupleAppender appender = new FrameTupleAppender(buf);
ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
DataOutput dos = tb.getDataOutput();
@SuppressWarnings("rawtypes") ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
accessor.reset(buf.getBuffer());
FrameTupleReference tuple = new FrameTupleReference();
tb.reset();
IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
tb.addFieldEndOffset();
IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
tb.addFieldEndOffset();
appender.reset(buf, true);
appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
tuple.reset(accessor, 0);
return tuple;
}
Aggregations