use of org.apache.flume.Context in project phoenix by apache.
the class PhoenixConsumer method initializeSerializer.
/**
* Initializes the serializer for kafka messages.
* @param context
* @param eventSerializerType
*/
private void initializeSerializer(final Context context, final String eventSerializerType) {
String serializerClazz = null;
EventSerializers eventSerializer = null;
try {
eventSerializer = EventSerializers.valueOf(eventSerializerType.toUpperCase());
} catch (IllegalArgumentException iae) {
serializerClazz = eventSerializerType;
}
final Context serializerContext = new Context();
serializerContext.putAll(context.getSubProperties(FlumeConstants.CONFIG_SERIALIZER_PREFIX));
copyPropertiesToSerializerContext(context, serializerContext);
try {
@SuppressWarnings("unchecked") Class<? extends EventSerializer> clazz = null;
if (serializerClazz == null) {
clazz = (Class<? extends EventSerializer>) Class.forName(eventSerializer.getClassName());
} else {
clazz = (Class<? extends EventSerializer>) Class.forName(serializerClazz);
}
serializer = clazz.newInstance();
serializer.configure(serializerContext);
} catch (Exception e) {
logger.error("Could not instantiate event serializer.", e);
Throwables.propagate(e);
}
}
use of org.apache.flume.Context in project rocketmq-externals by apache.
the class RocketMQSinkTest method testNullEvent.
@Test
public void testNullEvent() throws MQClientException, InterruptedException, EventDeliveryException, RemotingException, MQBrokerException, UnsupportedEncodingException {
/*
start sink
*/
Context context = new Context();
context.put(NAME_SERVER_CONFIG, nameServer);
context.put(TAG_CONFIG, tag);
RocketMQSink sink = new RocketMQSink();
Configurables.configure(sink, context);
MemoryChannel channel = new MemoryChannel();
Configurables.configure(channel, context);
sink.setChannel(channel);
sink.start();
Sink.Status status = sink.process();
assertEquals(status, Sink.Status.BACKOFF);
sink.stop();
}
use of org.apache.flume.Context in project rocketmq-externals by apache.
the class RocketMQSinkTest method testBatchEvent.
@Test
public void testBatchEvent() throws MQClientException, InterruptedException, EventDeliveryException, RemotingException, MQBrokerException, UnsupportedEncodingException {
/*
start sink
*/
Context context = new Context();
context.put(NAME_SERVER_CONFIG, nameServer);
context.put(TAG_CONFIG, tag);
context.put(BATCH_SIZE_CONFIG, String.valueOf(batchSize));
RocketMQSink sink = new RocketMQSink();
Configurables.configure(sink, context);
MemoryChannel channel = new MemoryChannel();
Configurables.configure(channel, context);
sink.setChannel(channel);
sink.start();
/*
mock flume source
*/
Map<String, String> msgs = new HashMap<>();
Transaction tx = channel.getTransaction();
tx.begin();
int sendNum = 0;
for (int i = 0; i < batchSize; i++) {
String sendMsg = "\"Hello RocketMQ\"" + "," + DateFormatUtils.format(new Date(), "yyyy-MM-DD hh:mm:ss:SSSS");
Event event = EventBuilder.withBody(sendMsg.getBytes(), null);
channel.put(event);
log.info("publish message : {}", sendMsg);
String[] sendMsgKv = sendMsg.split(",");
msgs.put(sendMsgKv[1], sendMsgKv[0]);
sendNum++;
Thread.sleep(10);
}
log.info("send message num={}", sendNum);
tx.commit();
tx.close();
Sink.Status status = sink.process();
if (status == Sink.Status.BACKOFF) {
fail("Error");
}
sink.stop();
/*
consumer message
*/
consumer = new DefaultMQPullConsumer(consumerGroup);
consumer.setNamesrvAddr(nameServer);
consumer.setMessageModel(MessageModel.valueOf("BROADCASTING"));
consumer.registerMessageQueueListener(TOPIC_DEFAULT, null);
consumer.start();
int receiveNum = 0;
String receiveMsg = null;
Set<MessageQueue> queues = consumer.fetchSubscribeMessageQueues(TOPIC_DEFAULT);
for (MessageQueue queue : queues) {
long offset = getMessageQueueOffset(queue);
PullResult pullResult = consumer.pull(queue, tag, offset, batchSize);
if (pullResult.getPullStatus() == PullStatus.FOUND) {
for (MessageExt message : pullResult.getMsgFoundList()) {
byte[] body = message.getBody();
receiveMsg = new String(body, "UTF-8");
String[] receiveMsgKv = receiveMsg.split(",");
msgs.remove(receiveMsgKv[1]);
log.info("receive message : {}", receiveMsg);
receiveNum++;
}
long nextBeginOffset = pullResult.getNextBeginOffset();
putMessageQueueOffset(queue, nextBeginOffset);
}
}
log.info("receive message num={}", receiveNum);
/*
wait for processQueueTable init
*/
Thread.sleep(1000);
consumer.shutdown();
assertEquals(msgs.size(), 0);
}
use of org.apache.flume.Context in project apex-malhar by apache.
the class ColumnFilteringFormattingInterceptorTest method testInterceptEventWithPrefix.
@Test
public void testInterceptEventWithPrefix() {
HashMap<String, String> contextMap = new HashMap<String, String>();
contextMap.put(ColumnFilteringInterceptor.Constants.SRC_SEPARATOR, Byte.toString((byte) 2));
contextMap.put(ColumnFilteringFormattingInterceptor.Constants.COLUMNS_FORMATTER, "\001{1}\001{2}\001{3}\001");
ColumnFilteringFormattingInterceptor.Builder builder = new ColumnFilteringFormattingInterceptor.Builder();
builder.configure(new Context(contextMap));
Interceptor interceptor = builder.build();
assertArrayEquals("Six Fields", "\001\001Second\001\001".getBytes(), interceptor.intercept(new InterceptorTestHelper.MyEvent("First\002\002Second\002\002\002".getBytes())).getBody());
}
use of org.apache.flume.Context in project MSEC by Tencent.
the class TestProfobufSource method testBasic.
@Test
public void testBasic() throws Exception {
Context context = new Context();
context.put("bind", "localhost");
context.put("port", "45673");
context.put("threads", "1");
ProtobufSource source = new ProtobufSource();
source.configure(context);
Map<String, String> channelContext = new HashMap();
channelContext.put("capacity", "1000000");
// for faster tests
channelContext.put("keep-alive", "0");
Channel channel = new MemoryChannel();
Configurables.configure(channel, new Context(channelContext));
Sink sink = new LoggerSink();
sink.setChannel(channel);
sink.start();
DefaultSinkProcessor proc = new DefaultSinkProcessor();
proc.setSinks(Collections.singletonList(sink));
SinkRunner sinkRunner = new SinkRunner(proc);
sinkRunner.start();
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(Collections.singletonList(channel));
ChannelProcessor chp = new ChannelProcessor(rcs);
source.setChannelProcessor(chp);
source.start();
Thread.sleep(5000);
source.stop();
sinkRunner.stop();
sink.stop();
}
Aggregations