use of org.apache.metron.enrichment.converter.EnrichmentKey in project metron by apache.
the class SimpleHbaseEnrichmentWriter method write.
@Override
public BulkWriterResponse write(String sensorType, WriterConfiguration configurations, Iterable<Tuple> tuples, List<JSONObject> messages) throws Exception {
Map<String, Object> sensorConfig = configurations.getSensorConfig(sensorType);
HTableInterface table = getTable(sensorConfig);
KeyTransformer transformer = getTransformer(sensorConfig);
Object enrichmentTypeObj = Configurations.ENRICHMENT_TYPE.get(sensorConfig);
String enrichmentType = enrichmentTypeObj == null ? null : enrichmentTypeObj.toString();
Set<String> valueColumns = new HashSet<>(getColumns(Configurations.VALUE_COLUMNS.get(sensorConfig), true));
List<Put> puts = new ArrayList<>();
for (JSONObject message : messages) {
EnrichmentKey key = getKey(message, transformer, enrichmentType);
EnrichmentValue value = getValue(message, transformer.keySet, valueColumns);
if (key == null || value == null) {
continue;
}
Put put = converter.toPut(this.cf, key, value);
if (put != null) {
LOG.debug("Put: {Column Family: '{}', Key: '{}', Value: '{}'}", this.cf, key, value);
puts.add(put);
}
}
BulkWriterResponse response = new BulkWriterResponse();
try {
table.put(puts);
} catch (Exception e) {
response.addAllErrors(e, tuples);
return response;
}
// Can return no errors, because put will throw Exception on error.
response.addAllSuccesses(tuples);
return response;
}
use of org.apache.metron.enrichment.converter.EnrichmentKey in project metron by apache.
the class ThreatIntelAdapterTest method setup.
@Before
public void setup() throws Exception {
final MockHTable trackerTable = (MockHTable) MockHBaseTableProvider.addToCache(atTableName, cf);
final MockHTable threatIntelTable = (MockHTable) MockHBaseTableProvider.addToCache(threatIntelTableName, cf);
EnrichmentHelper.INSTANCE.load(threatIntelTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {
{
add(new LookupKV<>(new EnrichmentKey("10.0.2.3", "10.0.2.3"), new EnrichmentValue(new HashMap<>())));
}
});
BloomAccessTracker bat = new BloomAccessTracker(threatIntelTableName, 100, 0.03);
PersistentAccessTracker pat = new PersistentAccessTracker(threatIntelTableName, "0", trackerTable, cf, bat, 0L);
lookup = new EnrichmentLookup(threatIntelTable, cf, pat);
JSONParser jsonParser = new JSONParser();
expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
}
use of org.apache.metron.enrichment.converter.EnrichmentKey in project metron by apache.
the class SimpleHbaseEnrichmentWriterIntegrationTest method test.
@Test
public void test() throws UnableToStartException, IOException {
final String sensorType = "dummy";
final List<byte[]> inputMessages = new ArrayList<byte[]>() {
{
add(Bytes.toBytes("col11,col12,col13"));
add(Bytes.toBytes("col21,col22,col23"));
add(Bytes.toBytes("col31,col32,col33"));
}
};
MockHBaseTableProvider.addToCache(sensorType, "cf");
final Properties topologyProperties = new Properties();
final ZKServerComponent zkServerComponent = getZKServerComponent(topologyProperties);
final KafkaComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaComponent.Topic>() {
{
add(new KafkaComponent.Topic(sensorType, 1));
}
});
topologyProperties.setProperty("kafka.broker", kafkaComponent.getBrokerList());
ConfigUploadComponent configUploadComponent = new ConfigUploadComponent().withTopologyProperties(topologyProperties).withGlobalConfigsPath(TestConstants.SAMPLE_CONFIG_PATH).withParserSensorConfig(sensorType, JSONUtils.INSTANCE.load(parserConfig, SensorParserConfig.class));
ParserTopologyComponent parserTopologyComponent = new ParserTopologyComponent.Builder().withSensorType(sensorType).withTopologyProperties(topologyProperties).withBrokerUrl(kafkaComponent.getBrokerList()).build();
// UnitTestHelper.verboseLogging();
ComponentRunner runner = new ComponentRunner.Builder().withComponent("zk", zkServerComponent).withComponent("kafka", kafkaComponent).withComponent("config", configUploadComponent).withComponent("org/apache/storm", parserTopologyComponent).withMillisecondsBetweenAttempts(5000).withCustomShutdownOrder(new String[] { "org/apache/storm", "config", "kafka", "zk" }).withNumRetries(10).build();
try {
runner.start();
kafkaComponent.writeMessages(sensorType, inputMessages);
ProcessorResult<List<LookupKV<EnrichmentKey, EnrichmentValue>>> result = runner.process(new Processor<List<LookupKV<EnrichmentKey, EnrichmentValue>>>() {
List<LookupKV<EnrichmentKey, EnrichmentValue>> messages = null;
@Override
public ReadinessState process(ComponentRunner runner) {
MockHTable table = (MockHTable) MockHBaseTableProvider.getFromCache(sensorType);
if (table != null && table.size() == inputMessages.size()) {
EnrichmentConverter converter = new EnrichmentConverter();
messages = new ArrayList<>();
try {
for (Result r : table.getScanner(Bytes.toBytes("cf"))) {
messages.add(converter.fromResult(r, "cf"));
}
} catch (IOException e) {
}
return ReadinessState.READY;
}
return ReadinessState.NOT_READY;
}
@Override
public ProcessorResult<List<LookupKV<EnrichmentKey, EnrichmentValue>>> getResult() {
ProcessorResult.Builder<List<LookupKV<EnrichmentKey, EnrichmentValue>>> builder = new ProcessorResult.Builder();
return builder.withResult(messages).build();
}
});
Set<String> validIndicators = new HashSet<>(ImmutableList.of("col12", "col22", "col32"));
Map<String, Map<String, String>> validMetadata = new HashMap<String, Map<String, String>>() {
{
put("col12", new HashMap<String, String>() {
{
put("col1", "col11");
put("col3", "col13");
}
});
put("col22", new HashMap<String, String>() {
{
put("col1", "col21");
put("col3", "col23");
}
});
put("col32", new HashMap<String, String>() {
{
put("col1", "col31");
put("col3", "col33");
}
});
}
};
for (LookupKV<EnrichmentKey, EnrichmentValue> kv : result.getResult()) {
Assert.assertTrue(validIndicators.contains(kv.getKey().indicator));
Assert.assertEquals(kv.getValue().getMetadata().get("source.type"), "dummy");
Assert.assertNotNull(kv.getValue().getMetadata().get("timestamp"));
Assert.assertNotNull(kv.getValue().getMetadata().get("original_string"));
Map<String, String> metadata = validMetadata.get(kv.getKey().indicator);
for (Map.Entry<String, String> x : metadata.entrySet()) {
Assert.assertEquals(kv.getValue().getMetadata().get(x.getKey()), x.getValue());
}
Assert.assertEquals(metadata.size() + 4, kv.getValue().getMetadata().size());
}
} finally {
if (runner != null) {
runner.stop();
}
}
}
use of org.apache.metron.enrichment.converter.EnrichmentKey in project metron by apache.
the class SimpleHBaseAdapterTest method setup.
@Before
public void setup() throws Exception {
final MockHTable trackerTable = (MockHTable) MockHBaseTableProvider.addToCache(atTableName, cf);
final MockHTable hbaseTable = (MockHTable) MockHBaseTableProvider.addToCache(hbaseTableName, cf);
EnrichmentHelper.INSTANCE.load(hbaseTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {
{
add(new LookupKV<>(new EnrichmentKey(PLAYFUL_CLASSIFICATION_TYPE, "10.0.2.3"), new EnrichmentValue(PLAYFUL_ENRICHMENT)));
}
});
EnrichmentHelper.INSTANCE.load(hbaseTable, cf1, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {
{
add(new LookupKV<>(new EnrichmentKey(CF1_CLASSIFICATION_TYPE, "10.0.2.4"), new EnrichmentValue(CF1_ENRICHMENT)));
}
});
BloomAccessTracker bat = new BloomAccessTracker(hbaseTableName, 100, 0.03);
PersistentAccessTracker pat = new PersistentAccessTracker(hbaseTableName, "0", trackerTable, cf, bat, 0L);
lookup = new EnrichmentLookup(hbaseTable, cf, pat);
JSONParser jsonParser = new JSONParser();
expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
}
use of org.apache.metron.enrichment.converter.EnrichmentKey in project metron by apache.
the class EnrichmentIntegrationTest method test.
@Test
public void test() throws Exception {
final String cf = "cf";
final String trackerHBaseTableName = "tracker";
final String threatIntelTableName = "threat_intel";
final String enrichmentsTableName = "enrichments";
final Properties topologyProperties = new Properties() {
{
setProperty("enrichment_workers", "1");
setProperty("enrichment_acker_executors", "0");
setProperty("enrichment_topology_worker_childopts", "");
setProperty("topology_auto_credentials", "[]");
setProperty("enrichment_topology_max_spout_pending", "");
setProperty("enrichment_kafka_start", "UNCOMMITTED_EARLIEST");
setProperty("kafka_security_protocol", "PLAINTEXT");
setProperty("enrichment_input_topic", Constants.ENRICHMENT_TOPIC);
setProperty("enrichment_output_topic", Constants.INDEXING_TOPIC);
setProperty("enrichment_error_topic", ERROR_TOPIC);
setProperty("threatintel_error_topic", ERROR_TOPIC);
setProperty("enrichment_join_cache_size", "1000");
setProperty("threatintel_join_cache_size", "1000");
setProperty("enrichment_hbase_provider_impl", "" + MockHBaseTableProvider.class.getName());
setProperty("enrichment_hbase_table", enrichmentsTableName);
setProperty("enrichment_hbase_cf", cf);
setProperty("enrichment_host_known_hosts", "[{\"ip\":\"10.1.128.236\", \"local\":\"YES\", \"type\":\"webserver\", \"asset_value\" : \"important\"}," + "{\"ip\":\"10.1.128.237\", \"local\":\"UNKNOWN\", \"type\":\"unknown\", \"asset_value\" : \"important\"}," + "{\"ip\":\"10.60.10.254\", \"local\":\"YES\", \"type\":\"printer\", \"asset_value\" : \"important\"}," + "{\"ip\":\"10.0.2.15\", \"local\":\"YES\", \"type\":\"printer\", \"asset_value\" : \"important\"}]");
setProperty("threatintel_hbase_table", threatIntelTableName);
setProperty("threatintel_hbase_cf", cf);
setProperty("enrichment_kafka_spout_parallelism", "1");
setProperty("enrichment_split_parallelism", "1");
setProperty("enrichment_stellar_parallelism", "1");
setProperty("enrichment_join_parallelism", "1");
setProperty("threat_intel_split_parallelism", "1");
setProperty("threat_intel_stellar_parallelism", "1");
setProperty("threat_intel_join_parallelism", "1");
setProperty("kafka_writer_parallelism", "1");
}
};
final ZKServerComponent zkServerComponent = getZKServerComponent(topologyProperties);
final KafkaComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaComponent.Topic>() {
{
add(new KafkaComponent.Topic(Constants.ENRICHMENT_TOPIC, 1));
add(new KafkaComponent.Topic(Constants.INDEXING_TOPIC, 1));
add(new KafkaComponent.Topic(ERROR_TOPIC, 1));
}
});
String globalConfigStr = null;
{
File globalConfig = new File(new File(TestConstants.SAMPLE_CONFIG_PATH), "global.json");
Map<String, Object> config = JSONUtils.INSTANCE.load(globalConfig, JSONUtils.MAP_SUPPLIER);
config.put(SimpleHBaseEnrichmentFunctions.TABLE_PROVIDER_TYPE_CONF, MockHBaseTableProvider.class.getName());
config.put(SimpleHBaseEnrichmentFunctions.ACCESS_TRACKER_TYPE_CONF, "PERSISTENT_BLOOM");
config.put(PersistentBloomTrackerCreator.Config.PERSISTENT_BLOOM_TABLE, trackerHBaseTableName);
config.put(PersistentBloomTrackerCreator.Config.PERSISTENT_BLOOM_CF, cf);
config.put(GeoLiteDatabase.GEO_HDFS_FILE, geoHdfsFile.getAbsolutePath());
globalConfigStr = JSONUtils.INSTANCE.toJSON(config, true);
}
ConfigUploadComponent configUploadComponent = new ConfigUploadComponent().withTopologyProperties(topologyProperties).withGlobalConfig(globalConfigStr).withEnrichmentConfigsPath(TestConstants.SAMPLE_CONFIG_PATH);
// create MockHBaseTables
final MockHTable trackerTable = (MockHTable) MockHBaseTableProvider.addToCache(trackerHBaseTableName, cf);
final MockHTable threatIntelTable = (MockHTable) MockHBaseTableProvider.addToCache(threatIntelTableName, cf);
EnrichmentHelper.INSTANCE.load(threatIntelTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {
{
add(new LookupKV<>(new EnrichmentKey(MALICIOUS_IP_TYPE, "10.0.2.3"), new EnrichmentValue(new HashMap<>())));
}
});
final MockHTable enrichmentTable = (MockHTable) MockHBaseTableProvider.addToCache(enrichmentsTableName, cf);
EnrichmentHelper.INSTANCE.load(enrichmentTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {
{
add(new LookupKV<>(new EnrichmentKey(PLAYFUL_CLASSIFICATION_TYPE, "10.0.2.3"), new EnrichmentValue(PLAYFUL_ENRICHMENT)));
}
});
FluxTopologyComponent fluxComponent = new FluxTopologyComponent.Builder().withTopologyLocation(new File(fluxPath())).withTopologyName("test").withTemplateLocation(new File(templatePath)).withTopologyProperties(topologyProperties).build();
// UnitTestHelper.verboseLogging();
ComponentRunner runner = new ComponentRunner.Builder().withComponent("zk", zkServerComponent).withComponent("kafka", kafkaComponent).withComponent("config", configUploadComponent).withComponent("storm", fluxComponent).withMillisecondsBetweenAttempts(15000).withCustomShutdownOrder(new String[] { "storm", "config", "kafka", "zk" }).withNumRetries(10).build();
try {
runner.start();
fluxComponent.submitTopology();
kafkaComponent.writeMessages(Constants.ENRICHMENT_TOPIC, inputMessages);
ProcessorResult<Map<String, List<Map<String, Object>>>> result = runner.process(getProcessor());
Map<String, List<Map<String, Object>>> outputMessages = result.getResult();
List<Map<String, Object>> docs = outputMessages.get(Constants.INDEXING_TOPIC);
Assert.assertEquals(inputMessages.size(), docs.size());
validateAll(docs);
List<Map<String, Object>> errors = outputMessages.get(ERROR_TOPIC);
Assert.assertEquals(inputMessages.size(), errors.size());
validateErrors(errors);
} finally {
runner.stop();
}
}
Aggregations