Search in sources :

Example 11 with Context

use of org.apache.flume.Context in project phoenix by apache.

the class PhoenixConsumer method initializeSerializer.

/**
     * Initializes the serializer for kafka messages.
     * @param context
     * @param eventSerializerType
     */
private void initializeSerializer(final Context context, final String eventSerializerType) {
    String serializerClazz = null;
    EventSerializers eventSerializer = null;
    try {
        eventSerializer = EventSerializers.valueOf(eventSerializerType.toUpperCase());
    } catch (IllegalArgumentException iae) {
        serializerClazz = eventSerializerType;
    }
    final Context serializerContext = new Context();
    serializerContext.putAll(context.getSubProperties(FlumeConstants.CONFIG_SERIALIZER_PREFIX));
    copyPropertiesToSerializerContext(context, serializerContext);
    try {
        @SuppressWarnings("unchecked") Class<? extends EventSerializer> clazz = null;
        if (serializerClazz == null) {
            clazz = (Class<? extends EventSerializer>) Class.forName(eventSerializer.getClassName());
        } else {
            clazz = (Class<? extends EventSerializer>) Class.forName(serializerClazz);
        }
        serializer = clazz.newInstance();
        serializer.configure(serializerContext);
    } catch (Exception e) {
        logger.error("Could not instantiate event serializer.", e);
        Throwables.propagate(e);
    }
}
Also used : Context(org.apache.flume.Context) EventSerializers(org.apache.phoenix.flume.serializer.EventSerializers) SQLException(java.sql.SQLException) IOException(java.io.IOException)

Example 12 with Context

use of org.apache.flume.Context in project rocketmq-externals by apache.

the class RocketMQSinkTest method testNullEvent.

@Test
public void testNullEvent() throws MQClientException, InterruptedException, EventDeliveryException, RemotingException, MQBrokerException, UnsupportedEncodingException {
    /*
        start sink
         */
    Context context = new Context();
    context.put(NAME_SERVER_CONFIG, nameServer);
    context.put(TAG_CONFIG, tag);
    RocketMQSink sink = new RocketMQSink();
    Configurables.configure(sink, context);
    MemoryChannel channel = new MemoryChannel();
    Configurables.configure(channel, context);
    sink.setChannel(channel);
    sink.start();
    Sink.Status status = sink.process();
    assertEquals(status, Sink.Status.BACKOFF);
    sink.stop();
}
Also used : Context(org.apache.flume.Context) MemoryChannel(org.apache.flume.channel.MemoryChannel) Sink(org.apache.flume.Sink) Test(org.junit.Test)

Example 13 with Context

use of org.apache.flume.Context in project rocketmq-externals by apache.

the class RocketMQSinkTest method testBatchEvent.

@Test
public void testBatchEvent() throws MQClientException, InterruptedException, EventDeliveryException, RemotingException, MQBrokerException, UnsupportedEncodingException {
    /*
        start sink
         */
    Context context = new Context();
    context.put(NAME_SERVER_CONFIG, nameServer);
    context.put(TAG_CONFIG, tag);
    context.put(BATCH_SIZE_CONFIG, String.valueOf(batchSize));
    RocketMQSink sink = new RocketMQSink();
    Configurables.configure(sink, context);
    MemoryChannel channel = new MemoryChannel();
    Configurables.configure(channel, context);
    sink.setChannel(channel);
    sink.start();
    /*
        mock flume source
         */
    Map<String, String> msgs = new HashMap<>();
    Transaction tx = channel.getTransaction();
    tx.begin();
    int sendNum = 0;
    for (int i = 0; i < batchSize; i++) {
        String sendMsg = "\"Hello RocketMQ\"" + "," + DateFormatUtils.format(new Date(), "yyyy-MM-DD hh:mm:ss:SSSS");
        Event event = EventBuilder.withBody(sendMsg.getBytes(), null);
        channel.put(event);
        log.info("publish message : {}", sendMsg);
        String[] sendMsgKv = sendMsg.split(",");
        msgs.put(sendMsgKv[1], sendMsgKv[0]);
        sendNum++;
        Thread.sleep(10);
    }
    log.info("send message num={}", sendNum);
    tx.commit();
    tx.close();
    Sink.Status status = sink.process();
    if (status == Sink.Status.BACKOFF) {
        fail("Error");
    }
    sink.stop();
    /*
        consumer message
         */
    consumer = new DefaultMQPullConsumer(consumerGroup);
    consumer.setNamesrvAddr(nameServer);
    consumer.setMessageModel(MessageModel.valueOf("BROADCASTING"));
    consumer.registerMessageQueueListener(TOPIC_DEFAULT, null);
    consumer.start();
    int receiveNum = 0;
    String receiveMsg = null;
    Set<MessageQueue> queues = consumer.fetchSubscribeMessageQueues(TOPIC_DEFAULT);
    for (MessageQueue queue : queues) {
        long offset = getMessageQueueOffset(queue);
        PullResult pullResult = consumer.pull(queue, tag, offset, batchSize);
        if (pullResult.getPullStatus() == PullStatus.FOUND) {
            for (MessageExt message : pullResult.getMsgFoundList()) {
                byte[] body = message.getBody();
                receiveMsg = new String(body, "UTF-8");
                String[] receiveMsgKv = receiveMsg.split(",");
                msgs.remove(receiveMsgKv[1]);
                log.info("receive message : {}", receiveMsg);
                receiveNum++;
            }
            long nextBeginOffset = pullResult.getNextBeginOffset();
            putMessageQueueOffset(queue, nextBeginOffset);
        }
    }
    log.info("receive message num={}", receiveNum);
    /*
        wait for processQueueTable init
         */
    Thread.sleep(1000);
    consumer.shutdown();
    assertEquals(msgs.size(), 0);
}
Also used : Context(org.apache.flume.Context) MemoryChannel(org.apache.flume.channel.MemoryChannel) HashMap(java.util.HashMap) Date(java.util.Date) DefaultMQPullConsumer(org.apache.rocketmq.client.consumer.DefaultMQPullConsumer) PullResult(org.apache.rocketmq.client.consumer.PullResult) MessageExt(org.apache.rocketmq.common.message.MessageExt) Transaction(org.apache.flume.Transaction) Sink(org.apache.flume.Sink) MessageQueue(org.apache.rocketmq.common.message.MessageQueue) Event(org.apache.flume.Event) Test(org.junit.Test)

Example 14 with Context

use of org.apache.flume.Context in project apex-malhar by apache.

the class ColumnFilteringFormattingInterceptorTest method testInterceptEventWithPrefix.

@Test
public void testInterceptEventWithPrefix() {
    HashMap<String, String> contextMap = new HashMap<String, String>();
    contextMap.put(ColumnFilteringInterceptor.Constants.SRC_SEPARATOR, Byte.toString((byte) 2));
    contextMap.put(ColumnFilteringFormattingInterceptor.Constants.COLUMNS_FORMATTER, "\001{1}\001{2}\001{3}\001");
    ColumnFilteringFormattingInterceptor.Builder builder = new ColumnFilteringFormattingInterceptor.Builder();
    builder.configure(new Context(contextMap));
    Interceptor interceptor = builder.build();
    assertArrayEquals("Six Fields", "\001\001Second\001\001".getBytes(), interceptor.intercept(new InterceptorTestHelper.MyEvent("First\002\002Second\002\002\002".getBytes())).getBody());
}
Also used : Context(org.apache.flume.Context) HashMap(java.util.HashMap) Interceptor(org.apache.flume.interceptor.Interceptor) Test(org.junit.Test)

Example 15 with Context

use of org.apache.flume.Context in project MSEC by Tencent.

the class TestProfobufSource method testBasic.

@Test
public void testBasic() throws Exception {
    Context context = new Context();
    context.put("bind", "localhost");
    context.put("port", "45673");
    context.put("threads", "1");
    ProtobufSource source = new ProtobufSource();
    source.configure(context);
    Map<String, String> channelContext = new HashMap();
    channelContext.put("capacity", "1000000");
    // for faster tests
    channelContext.put("keep-alive", "0");
    Channel channel = new MemoryChannel();
    Configurables.configure(channel, new Context(channelContext));
    Sink sink = new LoggerSink();
    sink.setChannel(channel);
    sink.start();
    DefaultSinkProcessor proc = new DefaultSinkProcessor();
    proc.setSinks(Collections.singletonList(sink));
    SinkRunner sinkRunner = new SinkRunner(proc);
    sinkRunner.start();
    ChannelSelector rcs = new ReplicatingChannelSelector();
    rcs.setChannels(Collections.singletonList(channel));
    ChannelProcessor chp = new ChannelProcessor(rcs);
    source.setChannelProcessor(chp);
    source.start();
    Thread.sleep(5000);
    source.stop();
    sinkRunner.stop();
    sink.stop();
}
Also used : Context(org.apache.flume.Context) MemoryChannel(org.apache.flume.channel.MemoryChannel) HashMap(java.util.HashMap) MemoryChannel(org.apache.flume.channel.MemoryChannel) Channel(org.apache.flume.Channel) SinkRunner(org.apache.flume.SinkRunner) ChannelProcessor(org.apache.flume.channel.ChannelProcessor) ProtobufSource(org.ngse.source.protobuf.ProtobufSource) DefaultSinkProcessor(org.apache.flume.sink.DefaultSinkProcessor) ReplicatingChannelSelector(org.apache.flume.channel.ReplicatingChannelSelector) LoggerSink(org.apache.flume.sink.LoggerSink) Sink(org.apache.flume.Sink) LoggerSink(org.apache.flume.sink.LoggerSink) ChannelSelector(org.apache.flume.ChannelSelector) ReplicatingChannelSelector(org.apache.flume.channel.ReplicatingChannelSelector) Test(org.junit.Test)

Aggregations

Context (org.apache.flume.Context)39 Test (org.junit.Test)24 MemoryChannel (org.apache.flume.channel.MemoryChannel)19 Channel (org.apache.flume.Channel)16 PhoenixSink (org.apache.phoenix.flume.sink.PhoenixSink)12 Transaction (org.apache.flume.Transaction)11 Event (org.apache.flume.Event)9 HashMap (java.util.HashMap)8 NullPhoenixSink (org.apache.phoenix.flume.sink.NullPhoenixSink)8 Interceptor (org.apache.flume.interceptor.Interceptor)7 Properties (java.util.Properties)6 Connection (java.sql.Connection)5 ResultSet (java.sql.ResultSet)5 Sink (org.apache.flume.Sink)5 Date (java.util.Date)3 ChannelException (org.apache.flume.ChannelException)3 ChannelSelector (org.apache.flume.ChannelSelector)3 ChannelProcessor (org.apache.flume.channel.ChannelProcessor)3 ReplicatingChannelSelector (org.apache.flume.channel.ReplicatingChannelSelector)3 ThreadContext (org.apache.logging.log4j.ThreadContext)3