use of org.apache.flume.channel.ChannelProcessor in project nifi by apache.
the class ExecuteFlumeSource method onScheduled.
@OnScheduled
public void onScheduled(final ProcessContext context) {
try {
source = SOURCE_FACTORY.create(context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SOURCE_TYPE).getValue());
String flumeConfig = context.getProperty(FLUME_CONFIG).getValue();
String agentName = context.getProperty(AGENT_NAME).getValue();
String sourceName = context.getProperty(SOURCE_NAME).getValue();
Configurables.configure(source, getFlumeSourceContext(flumeConfig, agentName, sourceName));
if (source instanceof PollableSource) {
source.setChannelProcessor(new ChannelProcessor(new NifiChannelSelector(pollableSourceChannel)));
source.start();
}
} catch (Throwable th) {
getLogger().error("Error creating source", th);
throw Throwables.propagate(th);
}
}
use of org.apache.flume.channel.ChannelProcessor in project apex-malhar by apache.
the class HdfsTestSource method start.
@Override
public void start() {
super.start();
emitTimer = new Timer();
final ChannelProcessor channelProcessor = getChannelProcessor();
emitTimer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
int lineCount = 0;
events.clear();
try {
while (lineCount < rate && !finished) {
String line = br.readLine();
if (line == null) {
logger.debug("completed file {}", currentFile);
br.close();
currentFile++;
if (currentFile == dataFiles.size()) {
logger.info("finished all files");
finished = true;
break;
}
Path filePath = new Path(dataFiles.get(currentFile));
br = new BufferedReader(new InputStreamReader(new GzipCompressorInputStream(fs.open(filePath))));
logger.info("opening file {}. {}", currentFile, filePath);
continue;
}
lineCount++;
Event flumeEvent = EventBuilder.withBody(line.getBytes());
events.add(flumeEvent);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
if (events.size() > 0) {
channelProcessor.processEventBatch(events);
}
if (finished) {
emitTimer.cancel();
}
}
}, 0, 1000);
}
use of org.apache.flume.channel.ChannelProcessor in project apex-malhar by apache.
the class TestSource method start.
@Override
public void start() {
super.start();
emitTimer = new Timer();
final ChannelProcessor channel = getChannelProcessor();
final int cacheSize = cache.size();
emitTimer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
int lastIndex = startIndex + rate;
if (lastIndex > cacheSize) {
lastIndex -= cacheSize;
processBatch(channel, cache.subList(startIndex, cacheSize));
startIndex = 0;
while (lastIndex > cacheSize) {
processBatch(channel, cache);
lastIndex -= cacheSize;
}
processBatch(channel, cache.subList(0, lastIndex));
} else {
processBatch(channel, cache.subList(startIndex, lastIndex));
}
startIndex = lastIndex;
}
}, 0, 1000);
}
use of org.apache.flume.channel.ChannelProcessor in project rocketmq-externals by apache.
the class RocketMQSourceTest method testEvent.
@Test
public void testEvent() throws EventDeliveryException, MQBrokerException, MQClientException, InterruptedException, UnsupportedEncodingException {
// publish test message
DefaultMQProducer producer = new DefaultMQProducer(producerGroup);
producer.setNamesrvAddr(nameServer);
String sendMsg = "\"Hello Flume\"" + "," + DateFormatUtils.format(new Date(), "yyyy-MM-DD hh:mm:ss");
try {
producer.start();
Message msg = new Message(TOPIC_DEFAULT, tag, sendMsg.getBytes("UTF-8"));
SendResult sendResult = producer.send(msg);
log.info("publish message : {}, sendResult:{}", sendMsg, sendResult);
} catch (Exception e) {
throw new MQClientException("Failed to publish messages", e);
} finally {
producer.shutdown();
}
// start source
Context context = new Context();
context.put(NAME_SERVER_CONFIG, nameServer);
context.put(TAG_CONFIG, tag);
Channel channel = new MemoryChannel();
Configurables.configure(channel, context);
List<Channel> channels = new ArrayList<>();
channels.add(channel);
ChannelSelector channelSelector = new ReplicatingChannelSelector();
channelSelector.setChannels(channels);
ChannelProcessor channelProcessor = new ChannelProcessor(channelSelector);
RocketMQSource source = new RocketMQSource();
source.setChannelProcessor(channelProcessor);
Configurables.configure(source, context);
source.start();
PollableSource.Status status = source.process();
if (status == PollableSource.Status.BACKOFF) {
fail("Error");
}
/*
wait for processQueueTable init
*/
Thread.sleep(1000);
source.stop();
/*
mock flume sink
*/
Transaction transaction = channel.getTransaction();
transaction.begin();
Event event = channel.take();
if (event == null) {
transaction.commit();
fail("Error");
}
byte[] body = event.getBody();
String receiveMsg = new String(body, "UTF-8");
log.info("receive message : {}", receiveMsg);
assertEquals(sendMsg, receiveMsg);
}
use of org.apache.flume.channel.ChannelProcessor in project logging-log4j2 by apache.
the class FlumeAppenderTest method setUp.
@Before
public void setUp() throws Exception {
eventSource = new AvroSource();
channel = new MemoryChannel();
Configurables.configure(channel, new Context());
avroLogger = (Logger) LogManager.getLogger("avrologger");
/*
* Clear out all other appenders associated with this logger to ensure
* we're only hitting the Avro appender.
*/
removeAppenders(avroLogger);
final Context context = new Context();
testPort = String.valueOf(AvailablePortFinder.getNextAvailable());
context.put("port", testPort);
context.put("bind", "0.0.0.0");
Configurables.configure(eventSource, context);
final List<Channel> channels = new ArrayList<>();
channels.add(channel);
final ChannelSelector cs = new ReplicatingChannelSelector();
cs.setChannels(channels);
eventSource.setChannelProcessor(new ChannelProcessor(cs));
eventSource.start();
Assert.assertTrue("Reached start or error", LifecycleController.waitForOneOf(eventSource, LifecycleState.START_OR_ERROR));
Assert.assertEquals("Server is started", LifecycleState.START, eventSource.getLifecycleState());
}
Aggregations