use of org.apache.hyracks.api.comm.VSizeFrame in project asterixdb by apache.
the class InputHandlerTest method testMemoryFixedSizeFrameNoDiskNoDiscard.
/*
* Spill = false;
* Discard = false;
* Fixed size frames
*/
@Test
public void testMemoryFixedSizeFrameNoDiskNoDiscard() {
try {
IHyracksTaskContext ctx = TestUtils.create(DEFAULT_FRAME_SIZE);
// No spill, No discard
FeedPolicyAccessor fpa = createFeedPolicyAccessor(false, false, 0L, DISCARD_ALLOWANCE);
// Non-Active Writer
TestControlledFrameWriter writer = FrameWriterTestUtils.create(DEFAULT_FRAME_SIZE, false);
writer.freeze();
// FramePool
ConcurrentFramePool framePool = new ConcurrentFramePool(NODE_ID, FEED_MEM_BUDGET, DEFAULT_FRAME_SIZE);
FeedRuntimeInputHandler handler = createInputHandler(ctx, writer, fpa, framePool);
handler.open();
VSizeFrame frame = new VSizeFrame(ctx);
// add NUM_FRAMES times
for (int i = 0; i < NUM_FRAMES; i++) {
handler.nextFrame(frame.getBuffer());
}
// Next call should block we will do it in a different thread
Future<?> result = EXECUTOR.submit(new Pusher(frame.getBuffer(), handler));
// Check that the nextFrame didn't return
if (result.isDone()) {
Assert.fail();
} else {
// Check that no records were discarded
Assert.assertEquals(handler.getNumDiscarded(), 0);
// Check that no records were spilled
Assert.assertEquals(handler.getNumSpilled(), 0);
// Check that no records were discarded
// Check that the inputHandler subscribed to the framePool
// Check that number of stalled is not greater than 1
Assert.assertTrue(handler.getNumStalled() <= 1);
writer.kick();
}
result.get();
writer.unfreeze();
handler.close();
} catch (Throwable th) {
th.printStackTrace();
Assert.fail();
}
Assert.assertNull(cause);
}
use of org.apache.hyracks.api.comm.VSizeFrame in project asterixdb by apache.
the class InputHandlerTest method testMemoryFixedSizeFrameNoSpillWithDiscard.
/*
* Spill = false;
* Discard = true; discard only 5%
* Fixed size frames
*/
@Test
public void testMemoryFixedSizeFrameNoSpillWithDiscard() {
try {
int discardTestFrames = 100;
IHyracksTaskContext ctx = TestUtils.create(DEFAULT_FRAME_SIZE);
// Spill budget = Memory budget, No discard
FeedPolicyAccessor fpa = createFeedPolicyAccessor(false, true, DEFAULT_FRAME_SIZE, DISCARD_ALLOWANCE);
// Non-Active Writer
TestControlledFrameWriter writer = FrameWriterTestUtils.create(DEFAULT_FRAME_SIZE, false);
writer.freeze();
// FramePool
ConcurrentFramePool framePool = new ConcurrentFramePool(NODE_ID, discardTestFrames * DEFAULT_FRAME_SIZE, DEFAULT_FRAME_SIZE);
FeedRuntimeInputHandler handler = createInputHandler(ctx, writer, fpa, framePool);
handler.open();
VSizeFrame frame = new VSizeFrame(ctx);
// add NUM_FRAMES times
for (int i = 0; i < discardTestFrames; i++) {
handler.nextFrame(frame.getBuffer());
}
// Next 5 calls call should NOT block but should discard.
double numDiscarded = 0.0;
boolean nextShouldDiscard = ((numDiscarded + 1.0) / (handler.getTotal() + 1.0)) <= fpa.getMaxFractionDiscard();
while (nextShouldDiscard) {
handler.nextFrame(frame.getBuffer());
numDiscarded++;
nextShouldDiscard = ((numDiscarded + 1.0) / (handler.getTotal() + 1.0)) <= fpa.getMaxFractionDiscard();
}
// Next Call should block since we're exceeding the discard allowance
Future<?> result = EXECUTOR.submit(new Pusher(frame.getBuffer(), handler));
if (result.isDone()) {
Assert.fail("The producer should switch to stall mode since it is exceeding the discard allowance");
} else {
// Check that no records were discarded
Assert.assertEquals((int) numDiscarded, handler.getNumDiscarded());
// Check that one frame is spilled
Assert.assertEquals(handler.getNumSpilled(), 0);
}
// consume memory frames
writer.unfreeze();
result.get();
handler.close();
Assert.assertEquals(writer.nextFrameCount(), discardTestFrames + 1);
// exit
} catch (Throwable th) {
th.printStackTrace();
Assert.fail();
}
Assert.assertNull(cause);
}
use of org.apache.hyracks.api.comm.VSizeFrame in project asterixdb by apache.
the class WordTupleParserFactory method createTupleParser.
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
return new ITupleParser() {
@Override
public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
try {
FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
DataOutput dos = tb.getDataOutput();
IValueParser utf8StringParser = UTF8StringParserFactory.INSTANCE.createValueParser();
WordCursor cursor = new WordCursor(new InputStreamReader(in));
while (cursor.nextWord()) {
tb.reset();
utf8StringParser.parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, dos);
tb.addFieldEndOffset();
FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
}
appender.write(writer, true);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
};
}
use of org.apache.hyracks.api.comm.VSizeFrame in project asterixdb by apache.
the class TextKeyValueParserFactory method createKeyValueParser.
@Override
public IKeyValueParser<LongWritable, Text> createKeyValueParser(final IHyracksTaskContext ctx) throws HyracksDataException {
final ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
final FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
return new IKeyValueParser<LongWritable, Text>() {
@Override
public void open(IFrameWriter writer) {
}
@Override
public void parse(LongWritable key, Text value, IFrameWriter writer, String fileString) throws HyracksDataException {
tb.reset();
tb.addField(value.getBytes(), 0, value.getLength());
FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
}
@Override
public void close(IFrameWriter writer) throws HyracksDataException {
appender.write(writer, false);
}
};
}
use of org.apache.hyracks.api.comm.VSizeFrame in project asterixdb by apache.
the class AbstractMultiNCIntegrationTest method runTest.
protected void runTest(JobSpecification spec) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(jobId.toString());
}
int nReaders = 1;
FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
VSizeFrame resultFrame = new VSizeFrame(resultDisplayFrameMgr);
IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
if (!spec.getResultSetIds().isEmpty()) {
IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
ObjectMapper om = new ObjectMapper();
ArrayNode resultRecords = om.createArrayNode();
ByteBufferInputStream bbis = new ByteBufferInputStream();
int readSize = reader.read(resultFrame);
while (readSize > 0) {
try {
frameTupleAccessor.reset(resultFrame.getBuffer());
for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
int start = frameTupleAccessor.getTupleStartOffset(tIndex);
int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
bbis.setByteBuffer(resultFrame.getBuffer(), start);
byte[] recordBytes = new byte[length];
bbis.read(recordBytes, 0, length);
resultRecords.add(new String(recordBytes, 0, length));
}
} finally {
try {
bbis.close();
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
readSize = reader.read(resultFrame);
}
}
hcc.waitForCompletion(jobId);
dumpOutputFiles();
}
Aggregations