use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class AbstractSortRunGenerator method flushFramesToRun.
protected void flushFramesToRun() throws HyracksDataException {
getSorter().sort();
RunFileWriter runWriter = getRunFileWriter();
IFrameWriter flushWriter = getFlushableFrameWriter(runWriter);
flushWriter.open();
try {
getSorter().flush(flushWriter);
} finally {
flushWriter.close();
}
generatedRunFileReaders.add(runWriter.createDeleteOnCloseReader());
getSorter().reset();
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class DelimitedDataTupleParserFactory method createTupleParser.
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
return new ITupleParser() {
@Override
public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
try {
IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
for (int i = 0; i < valueParserFactories.length; ++i) {
valueParsers[i] = valueParserFactories[i].createValueParser();
}
IFrame frame = new VSizeFrame(ctx);
FrameTupleAppender appender = new FrameTupleAppender();
appender.reset(frame, true);
ArrayTupleBuilder tb = new ArrayTupleBuilder(valueParsers.length);
DataOutput dos = tb.getDataOutput();
FieldCursorForDelimitedDataParser cursor = new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
while (cursor.nextRecord()) {
tb.reset();
for (int i = 0; i < valueParsers.length; ++i) {
if (!cursor.nextField()) {
break;
}
// Eliminate double quotes in the field that we are going to parse
if (cursor.isDoubleQuoteIncludedInThisField) {
cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
cursor.fEnd -= cursor.doubleQuoteCount;
cursor.isDoubleQuoteIncludedInThisField = false;
}
valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, dos);
tb.addFieldEndOffset();
}
FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
}
appender.write(writer, true);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
};
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class ConnectorDescriptorWithMessagingTest method testMessageLargerThanEmptyFrame.
@Test
public void testMessageLargerThanEmptyFrame() throws Exception {
try {
List<Integer> routing = Arrays.asList(0, 1, 2, 3, 4);
IConnectorDescriptorRegistry connDescRegistry = Mockito.mock(IConnectorDescriptorRegistry.class);
ITuplePartitionComputerFactory partitionComputerFactory = new TestPartitionComputerFactory(routing);
MToNPartitioningWithMessageConnectorDescriptor connector = new MToNPartitioningWithMessageConnectorDescriptor(connDescRegistry, partitionComputerFactory);
IHyracksTaskContext ctx = TestUtils.create(DEFAULT_FRAME_SIZE);
VSizeFrame message = new VSizeFrame(ctx);
VSizeFrame tempBuffer = new VSizeFrame(ctx);
TaskUtil.putInSharedMap(HyracksConstants.KEY_MESSAGE, message, ctx);
writeRandomMessage(message, MessagingFrameTupleAppender.MARKER_MESSAGE, DEFAULT_FRAME_SIZE + 1);
ISerializerDeserializer<?>[] serdes = new ISerializerDeserializer<?>[] { Integer64SerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, BooleanSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() };
RecordDescriptor rDesc = new RecordDescriptor(serdes);
TestPartitionWriterFactory partitionWriterFactory = new TestPartitionWriterFactory();
IFrameWriter partitioner = connector.createPartitioner(ctx, rDesc, partitionWriterFactory, CURRENT_PRODUCER, NUMBER_OF_CONSUMERS, NUMBER_OF_CONSUMERS);
partitioner.open();
FrameTupleAccessor fta = new FrameTupleAccessor(rDesc);
List<TestFrameWriter> recipients = new ArrayList<>();
for (IFrameWriter writer : partitionWriterFactory.getWriters().values()) {
recipients.add((TestFrameWriter) writer);
}
partitioner.flush();
for (TestFrameWriter writer : recipients) {
Assert.assertEquals(writer.nextFrameCount(), 1);
fta.reset(writer.getLastFrame());
Assert.assertEquals(fta.getTupleCount(), 1);
FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
Assert.assertEquals(MessagingFrameTupleAppender.MARKER_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
}
message.getBuffer().clear();
message.getBuffer().put(MessagingFrameTupleAppender.ACK_REQ_FEED_MESSAGE);
message.getBuffer().flip();
partitioner.flush();
for (TestFrameWriter writer : recipients) {
Assert.assertEquals(writer.nextFrameCount(), 2);
fta.reset(writer.getLastFrame());
Assert.assertEquals(fta.getTupleCount(), 1);
FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
Assert.assertEquals(MessagingFrameTupleAppender.ACK_REQ_FEED_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
}
message.getBuffer().clear();
message.getBuffer().put(MessagingFrameTupleAppender.NULL_FEED_MESSAGE);
message.getBuffer().flip();
partitioner.flush();
for (TestFrameWriter writer : recipients) {
Assert.assertEquals(writer.nextFrameCount(), 3);
fta.reset(writer.getLastFrame());
Assert.assertEquals(fta.getTupleCount(), 1);
FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
Assert.assertEquals(MessagingFrameTupleAppender.NULL_FEED_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
}
partitioner.close();
for (TestFrameWriter writer : recipients) {
Assert.assertEquals(writer.nextFrameCount(), 4);
Assert.assertEquals(writer.closeCount(), 1);
}
} catch (Throwable th) {
th.printStackTrace();
throw th;
}
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class StartTasksWork method run.
@Override
public void run() {
Task task = null;
try {
NCServiceContext serviceCtx = ncs.getContext();
Joblet joblet = getOrCreateLocalJoblet(deploymentId, jobId, serviceCtx, acgBytes);
final ActivityClusterGraph acg = joblet.getActivityClusterGraph();
IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() {
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
ActivityCluster ac = acg.getActivityMap().get(aid);
IConnectorDescriptor conn = ac.getActivityOutputMap().get(aid).get(outputIndex);
return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
ActivityCluster ac = acg.getActivityMap().get(aid);
IConnectorDescriptor conn = ac.getActivityInputMap().get(aid).get(inputIndex);
return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
};
for (TaskAttemptDescriptor td : taskDescriptors) {
TaskAttemptId taId = td.getTaskAttemptId();
TaskId tid = taId.getTaskId();
ActivityId aid = tid.getActivityId();
ActivityCluster ac = acg.getActivityMap().get(aid);
IActivity han = ac.getActivityMap().get(aid);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Initializing " + taId + " -> " + han);
}
final int partition = tid.getPartition();
List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(aid);
task = new Task(joblet, taId, han.getClass().getName(), ncs.getExecutor(), ncs, createInputChannels(td, inputs));
IOperatorNodePushable operator = han.createPushRuntime(task, rdp, partition, td.getPartitionCount());
List<IPartitionCollector> collectors = new ArrayList<>();
if (inputs != null) {
for (int i = 0; i < inputs.size(); ++i) {
IConnectorDescriptor conn = inputs.get(i);
IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("input: " + i + ": " + conn.getConnectorId());
}
RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
IPartitionCollector collector = createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
collectors.add(collector);
}
}
List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(aid);
if (outputs != null) {
for (int i = 0; i < outputs.size(); ++i) {
final IConnectorDescriptor conn = outputs.get(i);
RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
IPartitionWriterFactory pwFactory = createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("output: " + i + ": " + conn.getConnectorId());
}
IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition, td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
operator.setOutputFrameWriter(i, writer, recordDesc);
}
}
task.setTaskRuntime(collectors.toArray(new IPartitionCollector[collectors.size()]), operator);
joblet.addTask(task);
task.start();
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failure starting a task", e);
// notify cc of start task failure
List<Exception> exceptions = new ArrayList<>();
ExceptionUtils.setNodeIds(exceptions, ncs.getId());
ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, task, exceptions));
}
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class FramewriterTest method setUp.
@Before
public void setUp() throws Exception {
// Mock static methods
PowerMockito.mockStatic(BTreeUtils.class);
PowerMockito.when(BTreeUtils.getSearchMultiComparator(Matchers.any(), Matchers.any())).thenReturn(mockMultiComparator());
PowerMockito.mockStatic(FrameUtils.class);
// Custom implementation for FrameUtils that push to next frame immediately
PowerMockito.when(FrameUtils.appendToWriter(Matchers.any(IFrameWriter.class), Matchers.any(IFrameTupleAppender.class), Matchers.any(IFrameTupleAccessor.class), Matchers.anyInt(), Matchers.anyInt())).thenAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
IFrameWriter writer = (IFrameWriter) args[0];
writer.nextFrame(EMPTY_BUFFER);
return BUFFER_SIZE;
}
});
// create global mock for FrameTupleAccessor, ArrayTupleBuilder
FrameTupleAccessor frameAccessor = Mockito.mock(FrameTupleAccessor.class);
Mockito.when(frameAccessor.getTupleCount()).thenReturn(RECORDS_PER_FRAME);
// Global custom implementations for FrameTupleAppender
// since we have two appenders, then we need to test each test twice
FrameTupleAppender[] appenders = mockAppenders();
// Mock all instances of a class <Note that you need to prepare the class calling this constructor as well>
PowerMockito.whenNew(FrameTupleAccessor.class).withAnyArguments().thenReturn(frameAccessor);
PowerMockito.whenNew(FrameTupleAppender.class).withAnyArguments().thenAnswer(new Answer<FrameTupleAppender>() {
@Override
public FrameTupleAppender answer(InvocationOnMock invocation) throws Throwable {
counter++;
if (counter % 2 == 1) {
return appenders[0];
}
return appenders[1];
}
});
}
Aggregations