use of org.apache.metron.common.writer.BulkMessage in project metron by apache.
the class SimpleHBaseEnrichmentWriterTest method testBatchOneNormalPath.
@Test
public void testBatchOneNormalPath() throws Exception {
final String sensorType = "dummy";
SimpleHbaseEnrichmentWriter writer = new SimpleHbaseEnrichmentWriter();
WriterConfiguration configuration = createConfig(1, new HashMap<String, Object>(BASE_WRITER_CONFIG) {
{
put(SimpleHbaseEnrichmentWriter.Configurations.KEY_COLUMNS.getKey(), "ip");
}
});
writer.configure(sensorType, configuration);
writer.write(SENSOR_TYPE, configuration, new ArrayList<BulkMessage<JSONObject>>() {
{
add(new BulkMessage<>("messageId", new JSONObject(ImmutableMap.of("ip", "localhost", "user", "cstella", "foo", "bar"))));
}
});
List<LookupKV<EnrichmentKey, EnrichmentValue>> values = getValues();
assertEquals(1, values.size());
assertEquals("localhost", values.get(0).getKey().indicator);
assertEquals("cstella", values.get(0).getValue().getMetadata().get("user"));
assertEquals("bar", values.get(0).getValue().getMetadata().get("foo"));
assertEquals(2, values.get(0).getValue().getMetadata().size());
}
use of org.apache.metron.common.writer.BulkMessage in project metron by apache.
the class KafkaWriterTest method testWriteShouldReturnErrorsOnFailedFlush.
@Test
public void testWriteShouldReturnErrorsOnFailedFlush() throws Exception {
KafkaWriter writer = spy(new KafkaWriter());
writer.setKafkaProducer(kafkaProducer);
List<BulkMessage<JSONObject>> messages = new ArrayList<>();
JSONObject message1 = new JSONObject();
message1.put("value", "message1");
JSONObject message2 = new JSONObject();
message2.put("value", "message2");
messages.add(new BulkMessage<>("messageId1", message1));
messages.add(new BulkMessage<>("messageId2", message2));
doReturn(Optional.of("topic1")).when(writer).getKafkaTopic(message1);
doReturn(Optional.of("topic2")).when(writer).getKafkaTopic(message2);
Future future1 = mock(Future.class);
Future future2 = mock(Future.class);
when(kafkaProducer.send(new ProducerRecord<String, String>("topic1", "{\"value\":\"message1\"}"))).thenReturn(future1);
when(kafkaProducer.send(new ProducerRecord<String, String>("topic2", "{\"value\":\"message2\"}"))).thenReturn(future2);
InterruptException throwable = new InterruptException("kafka flush exception");
doThrow(throwable).when(kafkaProducer).flush();
BulkWriterResponse response = new BulkWriterResponse();
response.addAllErrors(throwable, Arrays.asList(new MessageId("messageId1"), new MessageId("messageId2")));
assertEquals(response, writer.write(SENSOR_TYPE, createConfiguration(new HashMap<>()), messages));
verify(kafkaProducer, times(1)).flush();
verify(kafkaProducer, times(1)).send(new ProducerRecord<String, String>("topic1", "{\"value\":\"message1\"}"));
verify(kafkaProducer, times(1)).send(new ProducerRecord<String, String>("topic2", "{\"value\":\"message2\"}"));
verifyNoMoreInteractions(kafkaProducer);
}
use of org.apache.metron.common.writer.BulkMessage in project metron by apache.
the class BulkWriterComponentTest method flushShouldAckMissingTuples.
@Test
public void flushShouldAckMissingTuples() throws Exception {
BulkWriterComponent<JSONObject> bulkWriterComponent = new BulkWriterComponent<>(Collections.singletonList(flushPolicy));
BulkMessageWriter<JSONObject> bulkMessageWriter = mock(BulkMessageWriter.class);
MessageId successId = new MessageId("successId");
MessageId errorId = new MessageId("errorId");
MessageId missingId = new MessageId("missingId");
JSONObject successMessage = new JSONObject();
successMessage.put("name", "success");
JSONObject errorMessage = new JSONObject();
errorMessage.put("name", "error");
JSONObject missingMessage = new JSONObject();
missingMessage.put("name", "missing");
List<BulkMessage<JSONObject>> allMessages = new ArrayList<BulkMessage<JSONObject>>() {
{
add(new BulkMessage<>(successId, successMessage));
add(new BulkMessage<>(errorId, errorMessage));
add(new BulkMessage<>(missingId, missingMessage));
}
};
BulkWriterResponse bulkWriterResponse = new BulkWriterResponse();
bulkWriterResponse.addSuccess(successId);
Throwable throwable = mock(Throwable.class);
bulkWriterResponse.addError(throwable, errorId);
when(bulkMessageWriter.write(sensorType, configurations, allMessages)).thenReturn(bulkWriterResponse);
bulkWriterComponent.flush(sensorType, bulkMessageWriter, configurations, allMessages);
BulkWriterResponse expectedResponse = new BulkWriterResponse();
expectedResponse.addSuccess(successId);
expectedResponse.addError(throwable, errorId);
expectedResponse.addSuccess(missingId);
verify(flushPolicy, times(1)).onFlush(sensorType, expectedResponse);
verifyNoMoreInteractions(flushPolicy);
}
use of org.apache.metron.common.writer.BulkMessage in project metron by apache.
the class SimpleHBaseEnrichmentWriterTest method testFilteredKey.
@Test
public void testFilteredKey() throws Exception {
final String sensorType = "dummy";
SimpleHbaseEnrichmentWriter writer = new SimpleHbaseEnrichmentWriter();
WriterConfiguration configuration = createConfig(1, new HashMap<String, Object>(BASE_WRITER_CONFIG) {
{
put(SimpleHbaseEnrichmentWriter.Configurations.KEY_COLUMNS.getKey(), "ip");
put(SimpleHbaseEnrichmentWriter.Configurations.VALUE_COLUMNS.getKey(), "user");
}
});
writer.configure(sensorType, configuration);
writer.write(SENSOR_TYPE, configuration, new ArrayList<BulkMessage<JSONObject>>() {
{
add(new BulkMessage<>("messageId", new JSONObject(ImmutableMap.of("ip", "localhost", "user", "cstella", "foo", "bar"))));
}
});
List<LookupKV<EnrichmentKey, EnrichmentValue>> values = getValues();
assertEquals(1, values.size());
assertEquals("localhost", values.get(0).getKey().indicator);
assertEquals("cstella", values.get(0).getValue().getMetadata().get("user"));
assertNull(values.get(0).getValue().getMetadata().get("foo"));
assertEquals(1, values.get(0).getValue().getMetadata().size());
}
use of org.apache.metron.common.writer.BulkMessage in project metron by apache.
the class BulkWriterComponent method write.
/**
* Accepts a message to be written and stores it in an internal cache of messages. Iterates through {@link org.apache.metron.writer.FlushPolicy}
* implementations to determine if a batch should be flushed.
* @param sensorType sensor type
* @param bulkWriterMessage message to be written
* @param bulkMessageWriter writer that will do the actual writing
* @param configurations writer configurations
*/
public void write(String sensorType, BulkMessage<MESSAGE_T> bulkWriterMessage, BulkMessageWriter<MESSAGE_T> bulkMessageWriter, WriterConfiguration configurations) {
List<BulkMessage<MESSAGE_T>> messages = sensorMessageCache.getOrDefault(sensorType, new ArrayList<>());
sensorMessageCache.put(sensorType, messages);
// if a sensor type is disabled flush all pending messages and discard the new message
if (!configurations.isEnabled(sensorType)) {
// flush pending messages
flush(sensorType, bulkMessageWriter, configurations, messages);
// Include the new message for any post-processing but don't write it
BulkWriterResponse response = new BulkWriterResponse();
response.addSuccess(bulkWriterMessage.getId());
onFlush(sensorType, response);
} else {
messages.add(bulkWriterMessage);
applyShouldFlush(sensorType, bulkMessageWriter, configurations, sensorMessageCache.get(sensorType));
}
}
Aggregations