use of org.apache.metron.integration.InMemoryComponent in project metron by apache.
the class ElasticsearchSearchIntegrationTest method startIndex.
@Override
protected InMemoryComponent startIndex() throws Exception {
InMemoryComponent es = new ElasticSearchComponent.Builder().withHttpPort(9211).withIndexDir(new File(indexDir)).build();
es.start();
return es;
}
use of org.apache.metron.integration.InMemoryComponent in project metron by apache.
the class IndexingIntegrationTest method test.
@Test
public void test() throws Exception {
preTest();
final List<byte[]> inputMessages = TestUtils.readSampleData(sampleParsedPath);
final Properties topologyProperties = new Properties() {
{
setProperty("indexing_kafka_start", "UNCOMMITTED_EARLIEST");
setProperty("kafka_security_protocol", "PLAINTEXT");
setProperty("topology_auto_credentials", "[]");
setProperty("indexing_workers", "1");
setProperty("indexing_acker_executors", "0");
setProperty("indexing_topology_worker_childopts", "");
setProperty("indexing_topology_max_spout_pending", "");
setProperty("indexing_input_topic", Constants.INDEXING_TOPIC);
setProperty("indexing_error_topic", ERROR_TOPIC);
setProperty("indexing_kafka_spout_parallelism", "1");
setProperty("indexing_writer_parallelism", "1");
}
};
setAdditionalProperties(topologyProperties);
final ZKServerComponent zkServerComponent = getZKServerComponent(topologyProperties);
final KafkaComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaComponent.Topic>() {
{
add(new KafkaComponent.Topic(Constants.INDEXING_TOPIC, 1));
add(new KafkaComponent.Topic(ERROR_TOPIC, 1));
}
});
List<Map<String, Object>> inputDocs = new ArrayList<>();
for (byte[] b : inputMessages) {
Map<String, Object> m = JSONUtils.INSTANCE.load(new String(b), JSONUtils.MAP_SUPPLIER);
inputDocs.add(m);
}
final AtomicBoolean isLoaded = new AtomicBoolean(false);
ConfigUploadComponent configUploadComponent = new ConfigUploadComponent().withTopologyProperties(topologyProperties).withGlobalConfigsPath(TestConstants.SAMPLE_CONFIG_PATH).withEnrichmentConfigsPath(TestConstants.SAMPLE_CONFIG_PATH).withIndexingConfigsPath(TestConstants.SAMPLE_CONFIG_PATH).withPostStartCallback(component -> {
try {
waitForIndex(component.getTopologyProperties().getProperty(ZKServerComponent.ZOOKEEPER_PROPERTY));
} catch (Exception e) {
e.printStackTrace();
}
isLoaded.set(true);
});
FluxTopologyComponent fluxComponent = new FluxTopologyComponent.Builder().withTopologyLocation(new File(getFluxPath())).withTopologyName("test").withTemplateLocation(new File(getTemplatePath())).withTopologyProperties(topologyProperties).build();
ComponentRunner runner = null;
InMemoryComponent searchComponent = getSearchComponent(topologyProperties);
ComponentRunner.Builder componentBuilder = new ComponentRunner.Builder();
componentBuilder = componentBuilder.withComponent("zk", zkServerComponent).withComponent("kafka", kafkaComponent).withComponent("config", configUploadComponent).withComponent("storm", fluxComponent).withMillisecondsBetweenAttempts(1500).withNumRetries(NUM_RETRIES).withMaxTimeMS(TOTAL_TIME_MS);
if (searchComponent != null) {
componentBuilder = componentBuilder.withComponent("search", getSearchComponent(topologyProperties)).withCustomShutdownOrder(new String[] { "search", "storm", "config", "kafka", "zk" });
} else {
componentBuilder = componentBuilder.withCustomShutdownOrder(new String[] { "storm", "config", "kafka", "zk" });
}
runner = componentBuilder.build();
try {
runner.start();
while (!isLoaded.get()) {
Thread.sleep(100);
}
fluxComponent.submitTopology();
kafkaComponent.writeMessages(Constants.INDEXING_TOPIC, inputMessages);
List<Map<String, Object>> docs = cleanDocs(runner.process(getProcessor(inputMessages)));
Assert.assertEquals(docs.size(), inputMessages.size());
// assert that our input docs are equivalent to the output docs, converting the input docs keys based
// on the field name converter
assertInputDocsMatchOutputs(inputDocs, docs, getFieldNameConverter());
} finally {
if (runner != null) {
runner.stop();
}
}
}
Aggregations