Search in sources :

Example 1 with ParserTopologyComponent

use of org.apache.metron.parsers.integration.components.ParserTopologyComponent in project metron by apache.

the class WriterBoltIntegrationTest method test.

@Test
public void test() throws UnableToStartException, IOException, ParseException {
    UnitTestHelper.setLog4jLevel(CSVParser.class, org.apache.log4j.Level.FATAL);
    final String sensorType = "dummy";
    final List<byte[]> inputMessages = new ArrayList<byte[]>() {

        {
            add(Bytes.toBytes("valid,foo"));
            add(Bytes.toBytes("invalid,foo"));
            add(Bytes.toBytes("error"));
        }
    };
    final Properties topologyProperties = new Properties();
    final ZKServerComponent zkServerComponent = getZKServerComponent(topologyProperties);
    final KafkaComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaComponent.Topic>() {

        {
            add(new KafkaComponent.Topic(sensorType, 1));
            add(new KafkaComponent.Topic(ERROR_TOPIC, 1));
            add(new KafkaComponent.Topic(Constants.ENRICHMENT_TOPIC, 1));
        }
    });
    topologyProperties.setProperty("kafka.broker", kafkaComponent.getBrokerList());
    ConfigUploadComponent configUploadComponent = new ConfigUploadComponent().withTopologyProperties(topologyProperties).withGlobalConfig(globalConfig).withParserSensorConfig(sensorType, JSONUtils.INSTANCE.load(parserConfig, SensorParserConfig.class));
    ParserTopologyComponent parserTopologyComponent = new ParserTopologyComponent.Builder().withSensorType(sensorType).withTopologyProperties(topologyProperties).withBrokerUrl(kafkaComponent.getBrokerList()).build();
    // UnitTestHelper.verboseLogging();
    ComponentRunner runner = new ComponentRunner.Builder().withComponent("zk", zkServerComponent).withComponent("kafka", kafkaComponent).withComponent("config", configUploadComponent).withComponent("org/apache/storm", parserTopologyComponent).withMillisecondsBetweenAttempts(5000).withNumRetries(10).withCustomShutdownOrder(new String[] { "org/apache/storm", "config", "kafka", "zk" }).build();
    try {
        runner.start();
        kafkaComponent.writeMessages(sensorType, inputMessages);
        ProcessorResult<Map<String, List<JSONObject>>> result = runner.process(getProcessor());
        Map<String, List<JSONObject>> outputMessages = result.getResult();
        Assert.assertEquals(2, outputMessages.size());
        Assert.assertEquals(1, outputMessages.get(Constants.ENRICHMENT_TOPIC).size());
        Assert.assertEquals("valid", outputMessages.get(Constants.ENRICHMENT_TOPIC).get(0).get("action"));
        Assert.assertEquals(2, outputMessages.get(ERROR_TOPIC).size());
        JSONObject invalidMessage = outputMessages.get(ERROR_TOPIC).get(0);
        Assert.assertEquals(Constants.ErrorType.PARSER_INVALID.getType(), invalidMessage.get(Constants.ErrorFields.ERROR_TYPE.getName()));
        JSONObject rawMessage = JSONUtils.INSTANCE.load((String) invalidMessage.get(Constants.ErrorFields.RAW_MESSAGE.getName()), JSONObject.class);
        Assert.assertEquals("foo", rawMessage.get("dummy"));
        Assert.assertEquals("invalid", rawMessage.get("action"));
        JSONObject errorMessage = outputMessages.get(ERROR_TOPIC).get(1);
        Assert.assertEquals(Constants.ErrorType.PARSER_ERROR.getType(), errorMessage.get(Constants.ErrorFields.ERROR_TYPE.getName()));
        Assert.assertEquals("error", errorMessage.get(Constants.ErrorFields.RAW_MESSAGE.getName()));
    // It's unclear if we need a rawMessageBytes field so commenting out for now
    // Assert.assertTrue(Arrays.equals(listToBytes(errorMessage.get(Constants.ErrorFields.RAW_MESSAGE_BYTES.getName())), "error".getBytes()));
    } finally {
        if (runner != null) {
            runner.stop();
        }
    }
}
Also used : KafkaComponent(org.apache.metron.integration.components.KafkaComponent) ZKServerComponent(org.apache.metron.integration.components.ZKServerComponent) SensorParserConfig(org.apache.metron.common.configuration.SensorParserConfig) JSONObject(org.json.simple.JSONObject) ConfigUploadComponent(org.apache.metron.enrichment.integration.components.ConfigUploadComponent) ParserTopologyComponent(org.apache.metron.parsers.integration.components.ParserTopologyComponent) Test(org.junit.Test)

Example 2 with ParserTopologyComponent

use of org.apache.metron.parsers.integration.components.ParserTopologyComponent in project metron by apache.

the class SimpleHbaseEnrichmentWriterIntegrationTest method test.

@Test
public void test() throws UnableToStartException, IOException {
    final String sensorType = "dummy";
    final List<byte[]> inputMessages = new ArrayList<byte[]>() {

        {
            add(Bytes.toBytes("col11,col12,col13"));
            add(Bytes.toBytes("col21,col22,col23"));
            add(Bytes.toBytes("col31,col32,col33"));
        }
    };
    MockHBaseTableProvider.addToCache(sensorType, "cf");
    final Properties topologyProperties = new Properties();
    final ZKServerComponent zkServerComponent = getZKServerComponent(topologyProperties);
    final KafkaComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaComponent.Topic>() {

        {
            add(new KafkaComponent.Topic(sensorType, 1));
        }
    });
    topologyProperties.setProperty("kafka.broker", kafkaComponent.getBrokerList());
    ConfigUploadComponent configUploadComponent = new ConfigUploadComponent().withTopologyProperties(topologyProperties).withGlobalConfigsPath(TestConstants.SAMPLE_CONFIG_PATH).withParserSensorConfig(sensorType, JSONUtils.INSTANCE.load(parserConfig, SensorParserConfig.class));
    ParserTopologyComponent parserTopologyComponent = new ParserTopologyComponent.Builder().withSensorType(sensorType).withTopologyProperties(topologyProperties).withBrokerUrl(kafkaComponent.getBrokerList()).build();
    // UnitTestHelper.verboseLogging();
    ComponentRunner runner = new ComponentRunner.Builder().withComponent("zk", zkServerComponent).withComponent("kafka", kafkaComponent).withComponent("config", configUploadComponent).withComponent("org/apache/storm", parserTopologyComponent).withMillisecondsBetweenAttempts(5000).withCustomShutdownOrder(new String[] { "org/apache/storm", "config", "kafka", "zk" }).withNumRetries(10).build();
    try {
        runner.start();
        kafkaComponent.writeMessages(sensorType, inputMessages);
        ProcessorResult<List<LookupKV<EnrichmentKey, EnrichmentValue>>> result = runner.process(new Processor<List<LookupKV<EnrichmentKey, EnrichmentValue>>>() {

            List<LookupKV<EnrichmentKey, EnrichmentValue>> messages = null;

            @Override
            public ReadinessState process(ComponentRunner runner) {
                MockHTable table = (MockHTable) MockHBaseTableProvider.getFromCache(sensorType);
                if (table != null && table.size() == inputMessages.size()) {
                    EnrichmentConverter converter = new EnrichmentConverter();
                    messages = new ArrayList<>();
                    try {
                        for (Result r : table.getScanner(Bytes.toBytes("cf"))) {
                            messages.add(converter.fromResult(r, "cf"));
                        }
                    } catch (IOException e) {
                    }
                    return ReadinessState.READY;
                }
                return ReadinessState.NOT_READY;
            }

            @Override
            public ProcessorResult<List<LookupKV<EnrichmentKey, EnrichmentValue>>> getResult() {
                ProcessorResult.Builder<List<LookupKV<EnrichmentKey, EnrichmentValue>>> builder = new ProcessorResult.Builder();
                return builder.withResult(messages).build();
            }
        });
        Set<String> validIndicators = new HashSet<>(ImmutableList.of("col12", "col22", "col32"));
        Map<String, Map<String, String>> validMetadata = new HashMap<String, Map<String, String>>() {

            {
                put("col12", new HashMap<String, String>() {

                    {
                        put("col1", "col11");
                        put("col3", "col13");
                    }
                });
                put("col22", new HashMap<String, String>() {

                    {
                        put("col1", "col21");
                        put("col3", "col23");
                    }
                });
                put("col32", new HashMap<String, String>() {

                    {
                        put("col1", "col31");
                        put("col3", "col33");
                    }
                });
            }
        };
        for (LookupKV<EnrichmentKey, EnrichmentValue> kv : result.getResult()) {
            Assert.assertTrue(validIndicators.contains(kv.getKey().indicator));
            Assert.assertEquals(kv.getValue().getMetadata().get("source.type"), "dummy");
            Assert.assertNotNull(kv.getValue().getMetadata().get("timestamp"));
            Assert.assertNotNull(kv.getValue().getMetadata().get("original_string"));
            Map<String, String> metadata = validMetadata.get(kv.getKey().indicator);
            for (Map.Entry<String, String> x : metadata.entrySet()) {
                Assert.assertEquals(kv.getValue().getMetadata().get(x.getKey()), x.getValue());
            }
            Assert.assertEquals(metadata.size() + 4, kv.getValue().getMetadata().size());
        }
    } finally {
        if (runner != null) {
            runner.stop();
        }
    }
}
Also used : KafkaComponent(org.apache.metron.integration.components.KafkaComponent) ZKServerComponent(org.apache.metron.integration.components.ZKServerComponent) SensorParserConfig(org.apache.metron.common.configuration.SensorParserConfig) Result(org.apache.hadoop.hbase.client.Result) EnrichmentConverter(org.apache.metron.enrichment.converter.EnrichmentConverter) ConfigUploadComponent(org.apache.metron.enrichment.integration.components.ConfigUploadComponent) ParserTopologyComponent(org.apache.metron.parsers.integration.components.ParserTopologyComponent) ImmutableList(com.google.common.collect.ImmutableList) EnrichmentValue(org.apache.metron.enrichment.converter.EnrichmentValue) IOException(java.io.IOException) MockHTable(org.apache.metron.hbase.mock.MockHTable) EnrichmentKey(org.apache.metron.enrichment.converter.EnrichmentKey) LookupKV(org.apache.metron.enrichment.lookup.LookupKV) Test(org.junit.Test)

Aggregations

SensorParserConfig (org.apache.metron.common.configuration.SensorParserConfig)2 ConfigUploadComponent (org.apache.metron.enrichment.integration.components.ConfigUploadComponent)2 KafkaComponent (org.apache.metron.integration.components.KafkaComponent)2 ZKServerComponent (org.apache.metron.integration.components.ZKServerComponent)2 ParserTopologyComponent (org.apache.metron.parsers.integration.components.ParserTopologyComponent)2 Test (org.junit.Test)2 ImmutableList (com.google.common.collect.ImmutableList)1 IOException (java.io.IOException)1 Result (org.apache.hadoop.hbase.client.Result)1 EnrichmentConverter (org.apache.metron.enrichment.converter.EnrichmentConverter)1 EnrichmentKey (org.apache.metron.enrichment.converter.EnrichmentKey)1 EnrichmentValue (org.apache.metron.enrichment.converter.EnrichmentValue)1 LookupKV (org.apache.metron.enrichment.lookup.LookupKV)1 MockHTable (org.apache.metron.hbase.mock.MockHTable)1 JSONObject (org.json.simple.JSONObject)1