use of org.openkilda.wfm.topology.TestKafkaConsumer in project open-kilda by telstra.
the class CacheTopologyTest method setupOnce.
@BeforeClass
public static void setupOnce() throws Exception {
AbstractStormTest.setupOnce();
flows.add(firstFlow);
flows.add(secondFlow);
topology = new CacheTopology(makeLaunchEnvironment());
StormTopology stormTopology = topology.createTopology();
Config config = stormConfig();
cluster.submitTopology(CacheTopologyTest.class.getSimpleName(), config, stormTopology);
teConsumer = new TestKafkaConsumer(topology.getConfig().getKafkaTopoEngTopic(), Destination.TOPOLOGY_ENGINE, kafkaProperties(UUID.nameUUIDFromBytes(Destination.TOPOLOGY_ENGINE.toString().getBytes()).toString()));
teConsumer.start();
flowConsumer = new TestKafkaConsumer(topology.getConfig().getKafkaFlowTopic(), Destination.WFM, kafkaProperties(UUID.nameUUIDFromBytes(Destination.WFM.toString().getBytes()).toString()));
flowConsumer.start();
ctrlConsumer = new TestKafkaConsumer(topology.getConfig().getKafkaCtrlTopic(), Destination.CTRL_CLIENT, kafkaProperties(UUID.nameUUIDFromBytes(Destination.CTRL_CLIENT.toString().getBytes()).toString()));
ctrlConsumer.start();
}
use of org.openkilda.wfm.topology.TestKafkaConsumer in project open-kilda by telstra.
the class OFELinkBoltFloodTest method warmBoltOnHighLoadedTopic.
@Test(timeout = 5000 * 60)
public void warmBoltOnHighLoadedTopic() throws Exception {
topology = new OFEventWFMTopology(makeLaunchEnvironment());
teConsumer = new TestKafkaConsumer(topology.getConfig().getKafkaTopoEngTopic(), kafkaProperties(UUID.nameUUIDFromBytes(Destination.TOPOLOGY_ENGINE.toString().getBytes()).toString()));
teConsumer.start();
// Size of messages in topic before bolt start
final int floodSize = 100000;
SwitchInfoData data = new SwitchInfoData("switchId", SwitchState.ADDED, "address", "hostname", "description", "controller");
InfoMessage message = new InfoMessage(data, System.currentTimeMillis(), UUID.randomUUID().toString());
// Floooding
sendMessages(message, topology.getConfig().getKafkaTopoDiscoTopic(), floodSize);
StormTopology stormTopology = topology.createTopology();
Config config = stormConfig();
cluster.submitTopology(OFELinkBoltFloodTest.class.getSimpleName(), config, stormTopology);
NetworkInfoData dump = new NetworkInfoData("test", Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), Collections.emptySet());
InfoMessage info = new InfoMessage(dump, 0, DEFAULT_CORRELATION_ID, Destination.WFM);
String request = objectMapper.writeValueAsString(info);
// Send DumpMessage to topic with offset floodSize+1.
kProducer.pushMessage(topology.getConfig().getKafkaTopoDiscoTopic(), request);
// Wait all messages
int pooled = 0;
while (pooled < floodSize) {
if (teConsumer.pollMessage() != null)
++pooled;
}
assertEquals(floodSize, pooled);
}
use of org.openkilda.wfm.topology.TestKafkaConsumer in project open-kilda by telstra.
the class FlowTopologyTest method setupOnce.
@BeforeClass
public static void setupOnce() throws Exception {
AbstractStormTest.setupOnce();
flowTopology = new FlowTopology(makeLaunchEnvironment(), new PathComputerAuth());
topologyConfig = flowTopology.getConfig();
StormTopology stormTopology = flowTopology.createTopology();
Config config = stormConfig();
cluster.submitTopology(FlowTopologyTest.class.getSimpleName(), config, stormTopology);
nbConsumer = new TestKafkaConsumer(topologyConfig.getKafkaNorthboundTopic(), Destination.NORTHBOUND, kafkaProperties(UUID.nameUUIDFromBytes(Destination.NORTHBOUND.toString().getBytes()).toString()));
nbConsumer.start();
ofsConsumer = new TestKafkaConsumer(topologyConfig.getKafkaSpeakerTopic(), Destination.CONTROLLER, kafkaProperties(UUID.nameUUIDFromBytes(Destination.CONTROLLER.toString().getBytes()).toString()));
ofsConsumer.start();
cacheConsumer = new TestKafkaConsumer(topologyConfig.getKafkaTopoCacheTopic(), null, kafkaProperties(UUID.nameUUIDFromBytes(Destination.TOPOLOGY_ENGINE.toString().getBytes()).toString()));
cacheConsumer.start();
// teResponseConsumer = new TestKafkaConsumer(topologyConfig.getKafkaTopoEngTopic(),
teResponseConsumer = new TestKafkaConsumer(topologyConfig.getKafkaFlowTopic(), Destination.WFM, kafkaProperties(UUID.nameUUIDFromBytes(Destination.WFM.toString().getBytes()).toString()));
teResponseConsumer.start();
ctrlConsumer = new TestKafkaConsumer(flowTopology.getConfig().getKafkaCtrlTopic(), Destination.CTRL_CLIENT, kafkaProperties(UUID.nameUUIDFromBytes(Destination.CTRL_CLIENT.toString().getBytes()).toString()));
ctrlConsumer.start();
Utils.sleep(10000);
}
Aggregations