use of com.linkedin.databus.core.util.IdNamePair in project databus by linkedin.
the class TestGenericDispatcher method testOneWindowTwoGroupedConsumersHappyPath.
@Test(groups = { "small", "functional" })
public void testOneWindowTwoGroupedConsumersHappyPath() {
final Logger log = Logger.getLogger("TestGenericDispatcher.testOneWindowTwoGroupedConsumersHappyPath");
log.info("start");
int source1EventsNum = 2;
int source2EventsNum = 2;
Hashtable<Long, AtomicInteger> keyCounts = new Hashtable<Long, AtomicInteger>();
Hashtable<Short, AtomicInteger> srcidCounts = new Hashtable<Short, AtomicInteger>();
final TestGenericDispatcherEventBuffer eventsBuf = new TestGenericDispatcherEventBuffer(_generic100KBufferStaticConfig);
eventsBuf.start(0);
eventsBuf.startEvents();
initBufferWithEvents(eventsBuf, 1, source1EventsNum, (short) 1, keyCounts, srcidCounts);
initBufferWithEvents(eventsBuf, 1 + source1EventsNum, source2EventsNum, (short) 2, keyCounts, srcidCounts);
eventsBuf.endEvents(100L);
DatabusStreamConsumer mockConsumer = new EventCountingConsumer(new StateVerifyingStreamConsumer(null), keyCounts, srcidCounts);
DatabusStreamConsumer mockConsumer2 = new EventCountingConsumer(new StateVerifyingStreamConsumer(null), keyCounts, srcidCounts);
DatabusCombinedConsumer sdccMockConsumer = new SelectingDatabusCombinedConsumer(mockConsumer);
DatabusCombinedConsumer sdccMockConsumer2 = new SelectingDatabusCombinedConsumer(mockConsumer2);
List<String> sources = new ArrayList<String>();
Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
for (int i = 1; i <= 3; ++i) {
IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
sources.add(sourcePair.getName());
sourcesMap.put(sourcePair.getId(), sourcePair);
}
DatabusV2ConsumerRegistration consumerReg = new DatabusV2ConsumerRegistration(Arrays.asList(sdccMockConsumer, sdccMockConsumer2), sources, null);
List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg);
MultiConsumerCallback callback = new MultiConsumerCallback(allRegistrations, Executors.newFixedThreadPool(2), 1000, new StreamConsumerCallbackFactory(null, null), null, null, null, null);
callback.setSourceMap(sourcesMap);
List<DatabusSubscription> subs = DatabusSubscription.createSubscriptionList(sources);
RelayDispatcher dispatcher = new RelayDispatcher("dispatcher", _genericRelayConnStaticConfig, subs, new InMemoryPersistenceProvider(), eventsBuf, callback, null, null, null, null, null);
Thread dispatcherThread = new Thread(dispatcher);
//dispatcherThread.setDaemon(true);
dispatcherThread.start();
HashMap<Long, List<RegisterResponseEntry>> schemaMap = new HashMap<Long, List<RegisterResponseEntry>>();
List<RegisterResponseEntry> l1 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l2 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l3 = new ArrayList<RegisterResponseEntry>();
l1.add(new RegisterResponseEntry(1L, (short) 1, SOURCE1_SCHEMA_STR));
l2.add(new RegisterResponseEntry(2L, (short) 1, SOURCE2_SCHEMA_STR));
l3.add(new RegisterResponseEntry(3L, (short) 1, SOURCE3_SCHEMA_STR));
schemaMap.put(1L, l1);
schemaMap.put(2L, l2);
schemaMap.put(3L, l3);
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesIdsMessage(sourcesMap.values()));
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesSchemasMessage(schemaMap));
try {
Thread.sleep(2000);
} catch (InterruptedException ie) {
}
dispatcher.shutdown();
for (long i = 1; i <= source1EventsNum + source2EventsNum; ++i) {
assertEquals("correct amount of callbacks for key " + i, 1, keyCounts.get(i).intValue());
}
assertEquals("correct amount of callbacks for srcid 1", source1EventsNum, srcidCounts.get((short) 1).intValue());
assertEquals("correct amount of callbacks for srcid 2", source2EventsNum, srcidCounts.get((short) 2).intValue());
verifyNoLocks(null, eventsBuf);
log.info("end\n");
}
use of com.linkedin.databus.core.util.IdNamePair in project databus by linkedin.
the class TestGenericDispatcher method runPartialWindowCheckpointPersistence.
/**
*
* @param numEvents : number of events in buffer
* @param maxWindowSize : window size expressed as number of events
* @param numFailWindow : nth end-of-window that will fail
* @throws Exception
*/
void runPartialWindowCheckpointPersistence(int numEvents, int maxWindowSize, int numFailWindow) throws Exception {
/* Experiment setup */
int payloadSize = 20;
int numCheckpoints = numEvents / maxWindowSize;
/* Consumer creation */
//setup consumer to fail on data callback at the nth event
int timeTakenForDataEventInMs = 1;
int timeTakenForControlEventInMs = 1;
int numFailCheckpointEvent = 0;
int numFailDataEvent = 0;
int numFailEndWindow = numFailWindow;
int numFailures = 1;
//fail at the specified window; no retries; so the dispatcher should stop having written one window; but having checkpointed the other
//thanks to a very small checkpoint frequency threshold
TimeoutTestConsumer tConsumer = new TimeoutTestConsumer(timeTakenForDataEventInMs, timeTakenForControlEventInMs, numFailCheckpointEvent, numFailDataEvent, numFailEndWindow, numFailures);
HashMap<Long, List<RegisterResponseEntry>> schemaMap = new HashMap<Long, List<RegisterResponseEntry>>();
short srcId = 1;
List<RegisterResponseEntry> l1 = new ArrayList<RegisterResponseEntry>();
l1.add(new RegisterResponseEntry(1L, srcId, SOURCE1_SCHEMA_STR));
schemaMap.put(1L, l1);
Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
List<String> sources = new ArrayList<String>();
for (int i = 1; i <= 1; ++i) {
IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
sources.add(sourcePair.getName());
sourcesMap.put(sourcePair.getId(), sourcePair);
}
long consumerTimeBudgetMs = 60 * 1000;
DatabusV2ConsumerRegistration consumerReg = new DatabusV2ConsumerRegistration(tConsumer, sources, null);
List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg);
//Single threaded execution of consumer
MultiConsumerCallback mConsumer = new MultiConsumerCallback(allRegistrations, Executors.newFixedThreadPool(1), consumerTimeBudgetMs, new StreamConsumerCallbackFactory(null, null), null, null, null, null);
/* Generate events **/
Vector<DbusEvent> srcTestEvents = new Vector<DbusEvent>();
Vector<Short> srcIdList = new Vector<Short>();
srcIdList.add(srcId);
DbusEventGenerator evGen = new DbusEventGenerator(15000, srcIdList);
//Assumption: generates events with non-decreasing timestamps
Assert.assertTrue(evGen.generateEvents(numEvents, maxWindowSize, 512, payloadSize, true, srcTestEvents) > 0);
int totalSize = 0;
int maxSize = 0;
for (DbusEvent e : srcTestEvents) {
totalSize += e.size();
maxSize = (e.size() > maxSize) ? e.size() : maxSize;
}
/* Source configuration */
double thresholdChkptPct = 5.0;
DatabusSourcesConnection.Config conf = new DatabusSourcesConnection.Config();
conf.setCheckpointThresholdPct(thresholdChkptPct);
conf.getDispatcherRetries().setMaxRetryNum(0);
conf.setFreeBufferThreshold(maxSize);
conf.setConsumerTimeBudgetMs(consumerTimeBudgetMs);
int freeBufferThreshold = conf.getFreeBufferThreshold();
DatabusSourcesConnection.StaticConfig connConfig = conf.build();
//make buffer large enough to hold data; the control events are large that contain checkpoints
int producerBufferSize = totalSize * 2 + numCheckpoints * 10 * maxSize * 5 + freeBufferThreshold;
int individualBufferSize = producerBufferSize;
int indexSize = producerBufferSize / 10;
int stagingBufferSize = producerBufferSize;
/*Event Buffer creation */
TestGenericDispatcherEventBuffer dataEventsBuffer = new TestGenericDispatcherEventBuffer(getConfig(producerBufferSize, individualBufferSize, indexSize, stagingBufferSize, AllocationPolicy.HEAP_MEMORY, QueuePolicy.BLOCK_ON_WRITE));
List<DatabusSubscription> subs = DatabusSubscription.createSubscriptionList(sources);
/* Generic Dispatcher creation */
InMemoryPersistenceProvider cpPersister = new InMemoryPersistenceProvider();
TestDispatcher<DatabusCombinedConsumer> dispatcher = new TestDispatcher<DatabusCombinedConsumer>("OnlinePartialWindowCheckpointPersistence", connConfig, subs, cpPersister, dataEventsBuffer, mConsumer, true);
/* Launch writer */
DbusEventAppender eventProducer = new DbusEventAppender(srcTestEvents, dataEventsBuffer, 0, null);
Thread tEmitter = new Thread(eventProducer);
//be generous ; use worst case for num control events
long waitTimeMs = (numEvents * timeTakenForDataEventInMs + numEvents * timeTakenForControlEventInMs) * 4;
tEmitter.start();
tEmitter.join(waitTimeMs);
/* Launch dispatcher */
Thread tDispatcher = new Thread(dispatcher);
tDispatcher.start();
/* Now initialize this state machine */
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesIdsMessage(sourcesMap.values()));
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesSchemasMessage(schemaMap));
//wait for dispatcher to finish reading the events;
tDispatcher.join(waitTimeMs);
Assert.assertFalse(tEmitter.isAlive());
Assert.assertFalse(tDispatcher.isAlive());
LOG.info("tConsumer: " + tConsumer);
HashMap<List<String>, Checkpoint> cps = cpPersister.getCheckpoints();
for (Map.Entry<List<String>, Checkpoint> i : cps.entrySet()) {
Checkpoint cp = i.getValue();
LOG.info("checkpoint=" + cp);
Assert.assertEquals(cp.getWindowOffset().longValue(), -1L);
//check if lastSeenCheckpoint by consumer is higher than scn persisted
Assert.assertTrue(tConsumer.getLastSeenCheckpointScn() > cp.getWindowScn());
//the latest event seen should be newer (or at least as new) as the checkpoint
Assert.assertTrue(tConsumer.getLastTsInNanosOfEvent() >= tConsumer.getLastTsInNanosOfWindow());
if (tConsumer.getLastSeenWindowScn() > 0) {
Assert.assertEquals(cp.getWindowScn(), tConsumer.getLastSeenWindowScn());
//check if the timestamp in checkpoint is the same as checkpoint of last completed window (ts of last event of the window)
Assert.assertEquals(tConsumer.getLastTsInNanosOfWindow(), cp.getTsNsecs());
} else {
//not even one window was processed before error; expect uninitialized timestamp
Assert.assertEquals(Checkpoint.UNSET_TS_NSECS, cp.getTsNsecs());
}
}
}
use of com.linkedin.databus.core.util.IdNamePair in project databus by linkedin.
the class TestGenericDispatcher method testOneWindowTwoIndependentConsumersHappyPath.
@Test(groups = { "small", "functional" })
public void testOneWindowTwoIndependentConsumersHappyPath() {
final Logger log = Logger.getLogger("TestGenericDispatcher.testOneWindowTwoIndependentConsumersHappyPath");
log.setLevel(Level.INFO);
log.info("start");
final Level saveLevel = Logger.getLogger("com.linkedin.databus.client").getLevel();
//Logger.getLogger("com.linkedin.databus.client").setLevel(Level.DEBUG);
final int source1EventsNum = 2;
final int source2EventsNum = 2;
final Hashtable<Long, AtomicInteger> keyCounts = new Hashtable<Long, AtomicInteger>();
final Hashtable<Short, AtomicInteger> srcidCounts = new Hashtable<Short, AtomicInteger>();
final TestGenericDispatcherEventBuffer eventsBuf = new TestGenericDispatcherEventBuffer(_generic100KBufferStaticConfig);
eventsBuf.start(0);
eventsBuf.startEvents();
initBufferWithEvents(eventsBuf, 1, source1EventsNum, (short) 1, keyCounts, srcidCounts);
initBufferWithEvents(eventsBuf, 1 + source1EventsNum, source2EventsNum, (short) 2, keyCounts, srcidCounts);
eventsBuf.endEvents(100L);
Hashtable<Long, AtomicInteger> keyCounts2 = new Hashtable<Long, AtomicInteger>();
Hashtable<Short, AtomicInteger> srcidCounts2 = new Hashtable<Short, AtomicInteger>();
for (Long key : keyCounts.keySet()) {
keyCounts2.put(key, new AtomicInteger(0));
}
for (Short srcid : srcidCounts.keySet()) {
srcidCounts2.put(srcid, new AtomicInteger(0));
}
DatabusStreamConsumer mockConsumer = new EventCountingConsumer(new StateVerifyingStreamConsumer(null), keyCounts, srcidCounts);
DatabusStreamConsumer mockConsumer2 = new EventCountingConsumer(new StateVerifyingStreamConsumer(null), keyCounts2, srcidCounts2);
SelectingDatabusCombinedConsumer sdccMockConsumer = new SelectingDatabusCombinedConsumer(mockConsumer);
SelectingDatabusCombinedConsumer sdccMockConsumer2 = new SelectingDatabusCombinedConsumer(mockConsumer2);
List<String> sources = new ArrayList<String>();
Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
for (int i = 1; i <= 3; ++i) {
IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
sources.add(sourcePair.getName());
sourcesMap.put(sourcePair.getId(), sourcePair);
}
DatabusV2ConsumerRegistration consumerReg = new DatabusV2ConsumerRegistration(sdccMockConsumer, sources, null);
DatabusV2ConsumerRegistration consumer2Reg = new DatabusV2ConsumerRegistration(sdccMockConsumer2, sources, null);
List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg, consumer2Reg);
MultiConsumerCallback callback = new MultiConsumerCallback(allRegistrations, Executors.newFixedThreadPool(2), 1000, new StreamConsumerCallbackFactory(null, null), null, null, null, null);
callback.setSourceMap(sourcesMap);
List<DatabusSubscription> subs = DatabusSubscription.createSubscriptionList(sources);
final RelayDispatcher dispatcher = new RelayDispatcher("dispatcher", _genericRelayConnStaticConfig, subs, new InMemoryPersistenceProvider(), eventsBuf, callback, null, null, null, null, null);
Thread dispatcherThread = new Thread(dispatcher);
dispatcherThread.setDaemon(true);
log.info("starting dispatcher thread");
dispatcherThread.start();
HashMap<Long, List<RegisterResponseEntry>> schemaMap = new HashMap<Long, List<RegisterResponseEntry>>();
List<RegisterResponseEntry> l1 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l2 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l3 = new ArrayList<RegisterResponseEntry>();
l1.add(new RegisterResponseEntry(1L, (short) 1, SOURCE1_SCHEMA_STR));
l2.add(new RegisterResponseEntry(2L, (short) 1, SOURCE2_SCHEMA_STR));
l3.add(new RegisterResponseEntry(3L, (short) 1, SOURCE3_SCHEMA_STR));
schemaMap.put(1L, l1);
schemaMap.put(2L, l2);
schemaMap.put(3L, l3);
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesIdsMessage(sourcesMap.values()));
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesSchemasMessage(schemaMap));
log.info("starting event dispatch");
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return null != dispatcher.getDispatcherState().getEventsIterator() && !dispatcher.getDispatcherState().getEventsIterator().hasNext();
}
}, "all events processed", 5000, log);
dispatcher.shutdown();
log.info("all events processed");
for (long i = 1; i <= source1EventsNum + source2EventsNum; ++i) {
assertEquals("correct amount of callbacks for key " + i, 1, keyCounts.get(i).intValue());
assertEquals("correct amount of callbacks for key " + i, 1, keyCounts2.get(i).intValue());
}
assertEquals("correct amount of callbacks for srcid 1", source1EventsNum, srcidCounts.get((short) 1).intValue());
assertEquals("correct amount of callbacks for srcid 2", source2EventsNum, srcidCounts.get((short) 2).intValue());
assertEquals("correct amount of callbacks for srcid 1", source1EventsNum, srcidCounts2.get((short) 1).intValue());
assertEquals("correct amount of callbacks for srcid 2", source2EventsNum, srcidCounts2.get((short) 2).intValue());
verifyNoLocks(null, eventsBuf);
Logger.getLogger("com.linkedin.databus.client").setLevel(saveLevel);
log.info("end\n");
}
use of com.linkedin.databus.core.util.IdNamePair in project databus by linkedin.
the class TestGenericDispatcher method testLargeWindowCheckpointFrequency.
@Test(groups = { "small", "functional" })
public void testLargeWindowCheckpointFrequency() throws Exception {
final Logger log = Logger.getLogger("TestGenericDispatcher.testLargeWindowCheckpointFrequency");
log.info("start");
/* Consumer creation */
int timeTakenForEventInMs = 1;
DatabusStreamConsumer tConsumer = new TimeoutTestConsumer(timeTakenForEventInMs);
DatabusCombinedConsumer sdccTConsumer = new SelectingDatabusCombinedConsumer(tConsumer);
HashMap<Long, List<RegisterResponseEntry>> schemaMap = new HashMap<Long, List<RegisterResponseEntry>>();
short srcId = 1;
List<RegisterResponseEntry> l1 = new ArrayList<RegisterResponseEntry>();
l1.add(new RegisterResponseEntry(1L, srcId, SOURCE1_SCHEMA_STR));
schemaMap.put(1L, l1);
Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
List<String> sources = new ArrayList<String>();
for (int i = 1; i <= 1; ++i) {
IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
sources.add(sourcePair.getName());
sourcesMap.put(sourcePair.getId(), sourcePair);
}
DatabusV2ConsumerRegistration consumerReg = new DatabusV2ConsumerRegistration(sdccTConsumer, sources, null);
List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg);
MultiConsumerCallback mConsumer = new MultiConsumerCallback(allRegistrations, Executors.newFixedThreadPool(2), 1000, new StreamConsumerCallbackFactory(null, null), null, null, null, null);
/* Source configuration */
double thresholdChkptPct = 50.0;
DatabusSourcesConnection.Config conf = new DatabusSourcesConnection.Config();
conf.setCheckpointThresholdPct(thresholdChkptPct);
DatabusSourcesConnection.StaticConfig connConfig = conf.build();
/* Generate events **/
Vector<DbusEvent> srcTestEvents = new Vector<DbusEvent>();
Vector<Short> srcIdList = new Vector<Short>();
srcIdList.add(srcId);
int numEvents = 100;
int payloadSize = 20;
int maxWindowSize = 80;
DbusEventGenerator evGen = new DbusEventGenerator(0, srcIdList);
Assert.assertTrue(evGen.generateEvents(numEvents, maxWindowSize, 512, payloadSize, srcTestEvents) > 0);
int size = 0;
for (DbusEvent e : srcTestEvents) {
if (!e.isControlMessage()) {
size = e.size();
break;
}
}
//make buffer large enough to hold data
int producerBufferSize = (int) (numEvents * size * 1.1);
int individualBufferSize = producerBufferSize;
int indexSize = producerBufferSize / 10;
int stagingBufferSize = producerBufferSize;
/*Event Buffer creation */
final TestGenericDispatcherEventBuffer dataEventsBuffer = new TestGenericDispatcherEventBuffer(getConfig(producerBufferSize, individualBufferSize, indexSize, stagingBufferSize, AllocationPolicy.HEAP_MEMORY, QueuePolicy.BLOCK_ON_WRITE));
List<DatabusSubscription> subs = DatabusSubscription.createSubscriptionList(sources);
/* Generic Dispatcher creation */
TestDispatcher<DatabusCombinedConsumer> dispatcher = new TestDispatcher<DatabusCombinedConsumer>("freqCkpt", connConfig, subs, new InMemoryPersistenceProvider(), dataEventsBuffer, mConsumer, true);
/* Launch writer */
DbusEventAppender eventProducer = new DbusEventAppender(srcTestEvents, dataEventsBuffer, null);
Thread tEmitter = new Thread(eventProducer);
tEmitter.start();
tEmitter.join();
/* Launch dispatcher */
Thread tDispatcher = new Thread(dispatcher);
tDispatcher.start();
/* Now initialize this damn state machine */
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesIdsMessage(sourcesMap.values()));
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesSchemasMessage(schemaMap));
Thread.sleep(2000);
System.out.println("Number of checkpoints = " + dispatcher.getNumCheckPoints());
Assert.assertTrue(dispatcher.getNumCheckPoints() == 3);
dispatcher.shutdown();
verifyNoLocks(null, dataEventsBuffer);
log.info("end\n");
}
use of com.linkedin.databus.core.util.IdNamePair in project databus by linkedin.
the class TestGenericDispatcher method testShutdownBeforeRollback.
@Test(groups = { "small", "functional" })
public /**
*
* 1. Dispatcher is dispatching 2 window of events.
* 2. First window consumption is successfully done.
* 3. The second window's onStartDataEventSequence() of the callback registered is blocked (interruptible),
* causing the dispatcher to wait.
* 4. At this instant the dispatcher is shut down. The callback is made to return Failure status which would
* cause rollback in normal scenario.
* 5. As the shutdown message is passed, the blocked callback is expected to be interrupted and no rollback
* calls MUST be made.
*/
void testShutdownBeforeRollback() throws Exception {
final Logger log = Logger.getLogger("TestGenericDispatcher.testShutdownBeforeRollback");
log.setLevel(Level.INFO);
//log.getRoot().setLevel(Level.DEBUG);
LOG.info("start");
// generate events
Vector<Short> srcIdList = new Vector<Short>();
srcIdList.add((short) 1);
DbusEventGenerator evGen = new DbusEventGenerator(0, srcIdList);
Vector<DbusEvent> srcTestEvents = new Vector<DbusEvent>();
final int numEvents = 8;
final int numEventsPerWindow = 4;
final int payloadSize = 200;
Assert.assertTrue(evGen.generateEvents(numEvents, numEventsPerWindow, 500, payloadSize, srcTestEvents) > 0);
// find out how much data we need to stream for the failure
// account for the EOW event which is < payload size
int win1Size = payloadSize - 1;
for (DbusEvent e : srcTestEvents) {
win1Size += e.size();
}
//serialize the events to a buffer so they can be sent to the client
final TestGenericDispatcherEventBuffer srcEventsBuf = new TestGenericDispatcherEventBuffer(_generic100KBufferStaticConfig);
DbusEventAppender eventProducer = new DbusEventAppender(srcTestEvents, srcEventsBuf, null, true);
Thread tEmitter = new Thread(eventProducer);
tEmitter.start();
//Create destination (client) buffer
final TestGenericDispatcherEventBuffer destEventsBuf = new TestGenericDispatcherEventBuffer(_generic100KBufferStaticConfig);
/**
*
* Consumer with ability to wait for latch during onStartDataEventSequence()
*/
class TimeoutDESConsumer extends TimeoutTestConsumer {
private final CountDownLatch latch = new CountDownLatch(1);
private int _countStartWindow = 0;
private final int _failedRequestNumber;
public TimeoutDESConsumer(int failedRequestNumber) {
super(1, 1, 0, 0, 0, 0);
_failedRequestNumber = failedRequestNumber;
}
public CountDownLatch getLatch() {
return latch;
}
@Override
public ConsumerCallbackResult onStartDataEventSequence(SCN startScn) {
_countStartWindow++;
if (_countStartWindow == _failedRequestNumber) {
// Wait for the latch to open
try {
latch.await();
} catch (InterruptedException e) {
}
return ConsumerCallbackResult.ERROR_FATAL;
}
return super.onStartDataEventSequence(startScn);
}
@Override
public ConsumerCallbackResult onDataEvent(DbusEvent e, DbusEventDecoder eventDecoder) {
return ConsumerCallbackResult.SUCCESS;
}
public int getNumBeginWindowCalls() {
return _countStartWindow;
}
}
//Create dispatcher
//fail on second window
final TimeoutDESConsumer mockConsumer = new TimeoutDESConsumer(2);
SelectingDatabusCombinedConsumer sdccMockConsumer = new SelectingDatabusCombinedConsumer((DatabusStreamConsumer) mockConsumer);
List<String> sources = new ArrayList<String>();
Map<Long, IdNamePair> sourcesMap = new HashMap<Long, IdNamePair>();
for (int i = 1; i <= 3; ++i) {
IdNamePair sourcePair = new IdNamePair((long) i, "source" + i);
sources.add(sourcePair.getName());
sourcesMap.put(sourcePair.getId(), sourcePair);
}
DatabusV2ConsumerRegistration consumerReg = new DatabusV2ConsumerRegistration(sdccMockConsumer, sources, null);
List<DatabusV2ConsumerRegistration> allRegistrations = Arrays.asList(consumerReg);
final ConsumerCallbackStats callbackStats = new ConsumerCallbackStats(0, "test", "test", true, false, null);
final UnifiedClientStats unifiedStats = new UnifiedClientStats(0, "test", "test.unified");
MultiConsumerCallback callback = new MultiConsumerCallback(allRegistrations, Executors.newFixedThreadPool(2), // 100 ms budget
100, new StreamConsumerCallbackFactory(callbackStats, unifiedStats), callbackStats, unifiedStats, null, null);
callback.setSourceMap(sourcesMap);
List<DatabusSubscription> subs = DatabusSubscription.createSubscriptionList(sources);
final RelayDispatcher dispatcher = new RelayDispatcher("dispatcher", _genericRelayConnStaticConfig, subs, new InMemoryPersistenceProvider(), destEventsBuf, callback, null, null, null, null, null);
final Thread dispatcherThread = new Thread(dispatcher);
log.info("starting dispatcher thread");
dispatcherThread.start();
// Generate RegisterRespone for schema
HashMap<Long, List<RegisterResponseEntry>> schemaMap = new HashMap<Long, List<RegisterResponseEntry>>();
List<RegisterResponseEntry> l1 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l2 = new ArrayList<RegisterResponseEntry>();
List<RegisterResponseEntry> l3 = new ArrayList<RegisterResponseEntry>();
l1.add(new RegisterResponseEntry(1L, (short) 1, SOURCE1_SCHEMA_STR));
l2.add(new RegisterResponseEntry(2L, (short) 1, SOURCE2_SCHEMA_STR));
l3.add(new RegisterResponseEntry(3L, (short) 1, SOURCE3_SCHEMA_STR));
schemaMap.put(1L, l1);
schemaMap.put(2L, l2);
schemaMap.put(3L, l3);
// Enqueue Necessary messages before starting dispatch
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesIdsMessage(sourcesMap.values()));
dispatcher.enqueueMessage(SourcesMessage.createSetSourcesSchemasMessage(schemaMap));
log.info("starting event dispatch");
//comm channels between reader and writer
Pipe pipe = Pipe.open();
Pipe.SinkChannel writerStream = pipe.sink();
Pipe.SourceChannel readerStream = pipe.source();
writerStream.configureBlocking(true);
/*
* Needed for DbusEventBuffer.readEvents() to exit their loops when no more data is available.
* With Pipe mimicking ChunkedBodyReadableByteChannel, we need to make Pipe non-blocking on the
* reader side to achieve the behavior that ChunkedBodyReadableByte channel provides.
*/
readerStream.configureBlocking(false);
//Event writer - Relay in the real world
Checkpoint cp = Checkpoint.createFlexibleCheckpoint();
//Event readers - Clients in the real world
//Checkpoint pullerCheckpoint = Checkpoint.createFlexibleCheckpoint();
DbusEventsStatisticsCollector clientStats = new DbusEventsStatisticsCollector(0, "client", true, false, null);
DbusEventBufferReader reader = new DbusEventBufferReader(destEventsBuf, readerStream, null, clientStats);
UncaughtExceptionTrackingThread tReader = new UncaughtExceptionTrackingThread(reader, "Reader");
tReader.setDaemon(true);
tReader.start();
try {
log.info("send both windows");
StreamEventsResult streamRes = srcEventsBuf.streamEvents(cp, writerStream, new StreamEventsArgs(win1Size));
// EOP events, presumably?
Assert.assertEquals(// EOP events, presumably?
"num events streamed should equal total number of events plus 2", numEvents + 2, streamRes.getNumEventsStreamed());
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return 2 == mockConsumer.getNumBeginWindowCalls();
}
}, "second window processing started", 5000, log);
dispatcher.shutdown();
// remove the barrier
mockConsumer.getLatch().countDown();
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return !dispatcherThread.isAlive();
}
}, "Ensure Dispatcher thread is shutdown", 5000, log);
TestUtil.assertWithBackoff(new ConditionCheck() {
@Override
public boolean check() {
return 0 == mockConsumer.getNumRollbacks();
}
}, "Ensure No Rollback is called", 10, log);
} finally {
reader.stop();
}
log.info("end\n");
}
Aggregations