Search in sources :

Example 11 with IFrameWriter

use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.

the class AbstractSortRunGenerator method flushFramesToRun.

protected void flushFramesToRun() throws HyracksDataException {
    getSorter().sort();
    RunFileWriter runWriter = getRunFileWriter();
    IFrameWriter flushWriter = getFlushableFrameWriter(runWriter);
    flushWriter.open();
    try {
        getSorter().flush(flushWriter);
    } finally {
        flushWriter.close();
    }
    generatedRunFileReaders.add(runWriter.createDeleteOnCloseReader());
    getSorter().reset();
}
Also used : IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) RunFileWriter(org.apache.hyracks.dataflow.common.io.RunFileWriter)

Example 12 with IFrameWriter

use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.

the class DelimitedDataTupleParserFactory method createTupleParser.

@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
    return new ITupleParser() {

        @Override
        public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
            try {
                IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
                for (int i = 0; i < valueParserFactories.length; ++i) {
                    valueParsers[i] = valueParserFactories[i].createValueParser();
                }
                IFrame frame = new VSizeFrame(ctx);
                FrameTupleAppender appender = new FrameTupleAppender();
                appender.reset(frame, true);
                ArrayTupleBuilder tb = new ArrayTupleBuilder(valueParsers.length);
                DataOutput dos = tb.getDataOutput();
                FieldCursorForDelimitedDataParser cursor = new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
                while (cursor.nextRecord()) {
                    tb.reset();
                    for (int i = 0; i < valueParsers.length; ++i) {
                        if (!cursor.nextField()) {
                            break;
                        }
                        // Eliminate double quotes in the field that we are going to parse
                        if (cursor.isDoubleQuoteIncludedInThisField) {
                            cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
                            cursor.fEnd -= cursor.doubleQuoteCount;
                            cursor.isDoubleQuoteIncludedInThisField = false;
                        }
                        valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, dos);
                        tb.addFieldEndOffset();
                    }
                    FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
                }
                appender.write(writer, true);
            } catch (IOException e) {
                throw new HyracksDataException(e);
            }
        }
    };
}
Also used : IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) DataOutput(java.io.DataOutput) InputStreamReader(java.io.InputStreamReader) IFrame(org.apache.hyracks.api.comm.IFrame) InputStream(java.io.InputStream) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) IOException(java.io.IOException) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) FrameTupleAppender(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender) IValueParser(org.apache.hyracks.dataflow.common.data.parsers.IValueParser)

Example 13 with IFrameWriter

use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.

the class ConnectorDescriptorWithMessagingTest method testMessageLargerThanEmptyFrame.

@Test
public void testMessageLargerThanEmptyFrame() throws Exception {
    try {
        List<Integer> routing = Arrays.asList(0, 1, 2, 3, 4);
        IConnectorDescriptorRegistry connDescRegistry = Mockito.mock(IConnectorDescriptorRegistry.class);
        ITuplePartitionComputerFactory partitionComputerFactory = new TestPartitionComputerFactory(routing);
        MToNPartitioningWithMessageConnectorDescriptor connector = new MToNPartitioningWithMessageConnectorDescriptor(connDescRegistry, partitionComputerFactory);
        IHyracksTaskContext ctx = TestUtils.create(DEFAULT_FRAME_SIZE);
        VSizeFrame message = new VSizeFrame(ctx);
        VSizeFrame tempBuffer = new VSizeFrame(ctx);
        TaskUtil.putInSharedMap(HyracksConstants.KEY_MESSAGE, message, ctx);
        writeRandomMessage(message, MessagingFrameTupleAppender.MARKER_MESSAGE, DEFAULT_FRAME_SIZE + 1);
        ISerializerDeserializer<?>[] serdes = new ISerializerDeserializer<?>[] { Integer64SerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, BooleanSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() };
        RecordDescriptor rDesc = new RecordDescriptor(serdes);
        TestPartitionWriterFactory partitionWriterFactory = new TestPartitionWriterFactory();
        IFrameWriter partitioner = connector.createPartitioner(ctx, rDesc, partitionWriterFactory, CURRENT_PRODUCER, NUMBER_OF_CONSUMERS, NUMBER_OF_CONSUMERS);
        partitioner.open();
        FrameTupleAccessor fta = new FrameTupleAccessor(rDesc);
        List<TestFrameWriter> recipients = new ArrayList<>();
        for (IFrameWriter writer : partitionWriterFactory.getWriters().values()) {
            recipients.add((TestFrameWriter) writer);
        }
        partitioner.flush();
        for (TestFrameWriter writer : recipients) {
            Assert.assertEquals(writer.nextFrameCount(), 1);
            fta.reset(writer.getLastFrame());
            Assert.assertEquals(fta.getTupleCount(), 1);
            FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
            Assert.assertEquals(MessagingFrameTupleAppender.MARKER_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
        }
        message.getBuffer().clear();
        message.getBuffer().put(MessagingFrameTupleAppender.ACK_REQ_FEED_MESSAGE);
        message.getBuffer().flip();
        partitioner.flush();
        for (TestFrameWriter writer : recipients) {
            Assert.assertEquals(writer.nextFrameCount(), 2);
            fta.reset(writer.getLastFrame());
            Assert.assertEquals(fta.getTupleCount(), 1);
            FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
            Assert.assertEquals(MessagingFrameTupleAppender.ACK_REQ_FEED_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
        }
        message.getBuffer().clear();
        message.getBuffer().put(MessagingFrameTupleAppender.NULL_FEED_MESSAGE);
        message.getBuffer().flip();
        partitioner.flush();
        for (TestFrameWriter writer : recipients) {
            Assert.assertEquals(writer.nextFrameCount(), 3);
            fta.reset(writer.getLastFrame());
            Assert.assertEquals(fta.getTupleCount(), 1);
            FeedUtils.processFeedMessage(writer.getLastFrame(), tempBuffer, fta);
            Assert.assertEquals(MessagingFrameTupleAppender.NULL_FEED_MESSAGE, MessagingFrameTupleAppender.getMessageType(tempBuffer));
        }
        partitioner.close();
        for (TestFrameWriter writer : recipients) {
            Assert.assertEquals(writer.nextFrameCount(), 4);
            Assert.assertEquals(writer.closeCount(), 1);
        }
    } catch (Throwable th) {
        th.printStackTrace();
        throw th;
    }
}
Also used : ITuplePartitionComputerFactory(org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory) MToNPartitioningWithMessageConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningWithMessageConnectorDescriptor) IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) TestFrameWriter(org.apache.hyracks.api.test.TestFrameWriter) ArrayList(java.util.ArrayList) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) VSizeFrame(org.apache.hyracks.api.comm.VSizeFrame) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IConnectorDescriptorRegistry(org.apache.hyracks.api.job.IConnectorDescriptorRegistry) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor) Test(org.junit.Test)

Example 14 with IFrameWriter

use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.

the class StartTasksWork method run.

@Override
public void run() {
    Task task = null;
    try {
        NCServiceContext serviceCtx = ncs.getContext();
        Joblet joblet = getOrCreateLocalJoblet(deploymentId, jobId, serviceCtx, acgBytes);
        final ActivityClusterGraph acg = joblet.getActivityClusterGraph();
        IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() {

            @Override
            public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
                ActivityCluster ac = acg.getActivityMap().get(aid);
                IConnectorDescriptor conn = ac.getActivityOutputMap().get(aid).get(outputIndex);
                return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
            }

            @Override
            public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
                ActivityCluster ac = acg.getActivityMap().get(aid);
                IConnectorDescriptor conn = ac.getActivityInputMap().get(aid).get(inputIndex);
                return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
            }
        };
        for (TaskAttemptDescriptor td : taskDescriptors) {
            TaskAttemptId taId = td.getTaskAttemptId();
            TaskId tid = taId.getTaskId();
            ActivityId aid = tid.getActivityId();
            ActivityCluster ac = acg.getActivityMap().get(aid);
            IActivity han = ac.getActivityMap().get(aid);
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Initializing " + taId + " -> " + han);
            }
            final int partition = tid.getPartition();
            List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(aid);
            task = new Task(joblet, taId, han.getClass().getName(), ncs.getExecutor(), ncs, createInputChannels(td, inputs));
            IOperatorNodePushable operator = han.createPushRuntime(task, rdp, partition, td.getPartitionCount());
            List<IPartitionCollector> collectors = new ArrayList<>();
            if (inputs != null) {
                for (int i = 0; i < inputs.size(); ++i) {
                    IConnectorDescriptor conn = inputs.get(i);
                    IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("input: " + i + ": " + conn.getConnectorId());
                    }
                    RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                    IPartitionCollector collector = createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
                    collectors.add(collector);
                }
            }
            List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(aid);
            if (outputs != null) {
                for (int i = 0; i < outputs.size(); ++i) {
                    final IConnectorDescriptor conn = outputs.get(i);
                    RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                    IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
                    IPartitionWriterFactory pwFactory = createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
                    if (LOGGER.isLoggable(Level.INFO)) {
                        LOGGER.info("output: " + i + ": " + conn.getConnectorId());
                    }
                    IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition, td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
                    operator.setOutputFrameWriter(i, writer, recordDesc);
                }
            }
            task.setTaskRuntime(collectors.toArray(new IPartitionCollector[collectors.size()]), operator);
            joblet.addTask(task);
            task.start();
        }
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failure starting a task", e);
        // notify cc of start task failure
        List<Exception> exceptions = new ArrayList<>();
        ExceptionUtils.setNodeIds(exceptions, ncs.getId());
        ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, task, exceptions));
    }
}
Also used : IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) Task(org.apache.hyracks.control.nc.Task) TaskId(org.apache.hyracks.api.dataflow.TaskId) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) ActivityId(org.apache.hyracks.api.dataflow.ActivityId) ArrayList(java.util.ArrayList) IRecordDescriptorProvider(org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider) Joblet(org.apache.hyracks.control.nc.Joblet) IActivity(org.apache.hyracks.api.dataflow.IActivity) NCServiceContext(org.apache.hyracks.control.nc.application.NCServiceContext) INCServiceContext(org.apache.hyracks.api.application.INCServiceContext) List(java.util.List) ArrayList(java.util.ArrayList) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) IPartitionCollector(org.apache.hyracks.api.comm.IPartitionCollector) TaskAttemptId(org.apache.hyracks.api.dataflow.TaskAttemptId) IConnectorPolicy(org.apache.hyracks.api.dataflow.connectors.IConnectorPolicy) IPartitionWriterFactory(org.apache.hyracks.api.comm.IPartitionWriterFactory) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) UnknownHostException(java.net.UnknownHostException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ActivityCluster(org.apache.hyracks.api.job.ActivityCluster) TaskAttemptDescriptor(org.apache.hyracks.control.common.job.TaskAttemptDescriptor) ActivityClusterGraph(org.apache.hyracks.api.job.ActivityClusterGraph) IOperatorNodePushable(org.apache.hyracks.api.dataflow.IOperatorNodePushable)

Example 15 with IFrameWriter

use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.

the class FramewriterTest method setUp.

@Before
public void setUp() throws Exception {
    // Mock static methods
    PowerMockito.mockStatic(BTreeUtils.class);
    PowerMockito.when(BTreeUtils.getSearchMultiComparator(Matchers.any(), Matchers.any())).thenReturn(mockMultiComparator());
    PowerMockito.mockStatic(FrameUtils.class);
    // Custom implementation for FrameUtils that push to next frame immediately
    PowerMockito.when(FrameUtils.appendToWriter(Matchers.any(IFrameWriter.class), Matchers.any(IFrameTupleAppender.class), Matchers.any(IFrameTupleAccessor.class), Matchers.anyInt(), Matchers.anyInt())).thenAnswer(new Answer<Integer>() {

        @Override
        public Integer answer(InvocationOnMock invocation) throws Throwable {
            Object[] args = invocation.getArguments();
            IFrameWriter writer = (IFrameWriter) args[0];
            writer.nextFrame(EMPTY_BUFFER);
            return BUFFER_SIZE;
        }
    });
    // create global mock for FrameTupleAccessor, ArrayTupleBuilder
    FrameTupleAccessor frameAccessor = Mockito.mock(FrameTupleAccessor.class);
    Mockito.when(frameAccessor.getTupleCount()).thenReturn(RECORDS_PER_FRAME);
    // Global custom implementations for FrameTupleAppender
    // since we have two appenders, then we need to test each test twice
    FrameTupleAppender[] appenders = mockAppenders();
    // Mock all instances of a class <Note that you need to prepare the class calling this constructor as well>
    PowerMockito.whenNew(FrameTupleAccessor.class).withAnyArguments().thenReturn(frameAccessor);
    PowerMockito.whenNew(FrameTupleAppender.class).withAnyArguments().thenAnswer(new Answer<FrameTupleAppender>() {

        @Override
        public FrameTupleAppender answer(InvocationOnMock invocation) throws Throwable {
            counter++;
            if (counter % 2 == 1) {
                return appenders[0];
            }
            return appenders[1];
        }
    });
}
Also used : IFrameWriter(org.apache.hyracks.api.comm.IFrameWriter) InvocationOnMock(org.mockito.invocation.InvocationOnMock) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) FrameTupleAppender(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender) IFrameTupleAppender(org.apache.hyracks.api.comm.IFrameTupleAppender) IFrameTupleAppender(org.apache.hyracks.api.comm.IFrameTupleAppender) FrameTupleAccessor(org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor) IFrameTupleAccessor(org.apache.hyracks.api.comm.IFrameTupleAccessor) Before(org.junit.Before)

Aggregations

IFrameWriter (org.apache.hyracks.api.comm.IFrameWriter)32 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)15 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)13 VSizeFrame (org.apache.hyracks.api.comm.VSizeFrame)8 ByteBuffer (java.nio.ByteBuffer)6 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)6 FrameTupleAppender (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender)6 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 FrameTupleAccessor (org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor)5 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)4 HyracksException (org.apache.hyracks.api.exceptions.HyracksException)4 DataOutput (java.io.DataOutput)3 InputStream (java.io.InputStream)3 IPushRuntime (org.apache.hyracks.algebricks.runtime.base.IPushRuntime)3 IFrame (org.apache.hyracks.api.comm.IFrame)3 AbstractOperatorNodePushable (org.apache.hyracks.dataflow.std.base.AbstractOperatorNodePushable)3 InputStreamReader (java.io.InputStreamReader)2 Semaphore (java.util.concurrent.Semaphore)2 ExternalFile (org.apache.asterix.external.indexing.ExternalFile)2