use of org.komamitsu.fluency.ingester.Ingester in project fluency by komamitsu.
the class FluentdIngesterTest method ingestWithoutAck.
@Test
void ingestWithoutAck() throws IOException {
Ingester ingester = new FluentdIngester(new FluentdIngester.Config(), fluentdSender);
ingester.ingest(TAG, ByteBuffer.wrap(DATA));
verify(fluentdSender, times(1)).send(byteBuffersArgumentCaptor.capture());
List<ByteBuffer> byteBuffers = byteBuffersArgumentCaptor.getAllValues().get(0);
byte[] ingested = getIngestedData(byteBuffers);
MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(ingested);
ImmutableArrayValue arrayValue = unpacker.unpackValue().asArrayValue();
assertEquals(3, arrayValue.size());
assertEquals(TAG, arrayValue.get(0).asStringValue().asString());
assertArrayEquals(DATA, arrayValue.get(1).asRawValue().asByteArray());
Map<Value, Value> options = arrayValue.get(2).asMapValue().map();
assertEquals(1, options.size());
assertEquals(DATA.length, options.get(ValueFactory.newString("size")).asIntegerValue().asInt());
}
use of org.komamitsu.fluency.ingester.Ingester in project fluency by komamitsu.
the class BufferTest method testFileBackup.
@Test
void testFileBackup() throws IOException {
bufferConfig.setFileBackupDir(System.getProperty("java.io.tmpdir"));
bufferConfig.setFileBackupPrefix("FileBackupTest");
// Just for cleaning backup files
try (Buffer buffer = new Buffer(bufferConfig, recordFormatter)) {
buffer.clearBackupFiles();
}
try (Buffer buffer = new Buffer(bufferConfig, recordFormatter)) {
assertEquals(0, buffer.getBufferedDataSize());
}
long currentTime = System.currentTimeMillis() / 1000;
Map<String, Object> event0 = ImmutableMap.of("name", "a", "age", 42);
Map<String, Object> event1 = ImmutableMap.of("name", "b", "age", 99);
try (Buffer buffer = new Buffer(bufferConfig, recordFormatter)) {
buffer.append("foo", currentTime, event0);
buffer.append("bar", currentTime, event1);
}
Ingester ingester = mock(Ingester.class);
try (Buffer buffer = new Buffer(bufferConfig, recordFormatter)) {
buffer.flushInternal(ingester, true);
}
ObjectMapper objectMapper = new ObjectMapper();
for (Tuple<String, Map<String, Object>> tagAndEvent : ImmutableList.of(new Tuple<>("foo", event0), new Tuple<>("bar", event1))) {
ArgumentCaptor<ByteBuffer> byteBufferArgumentCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(ingester, times(1)).ingest(eq(tagAndEvent.getFirst()), byteBufferArgumentCaptor.capture());
ByteBuffer byteBuffer = byteBufferArgumentCaptor.getValue();
byte[] bytes = new byte[byteBuffer.remaining()];
byteBuffer.get(bytes);
Map<String, Object> map = objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
});
assertEquals(tagAndEvent.getSecond(), map);
}
}
use of org.komamitsu.fluency.ingester.Ingester in project fluency by komamitsu.
the class FluencyTest method testWaitUntilFlusherTerminated.
@ParameterizedTest
@CsvSource({ "1, false", "3, true" })
void testWaitUntilFlusherTerminated(int waitUntilFlusherTerm, boolean expected) throws IOException, InterruptedException {
flusherConfig.setWaitUntilTerminated(1);
// Wait before actually closing in Buffer
int waitBeforeCloseMillis = 2000;
Buffer buffer = spy(new Buffer(bufferConfig, new JsonRecordFormatter()));
doAnswer((invocation) -> {
long start = System.currentTimeMillis();
try {
TimeUnit.MILLISECONDS.sleep(waitBeforeCloseMillis);
} catch (InterruptedException e) {
long rest = waitBeforeCloseMillis - (System.currentTimeMillis() - start);
if (rest > 0) {
try {
TimeUnit.MILLISECONDS.sleep(rest);
} catch (InterruptedException e1) {
}
}
}
return null;
}).doCallRealMethod().when(buffer).close();
Flusher flusher = new Flusher(flusherConfig, buffer, ingester);
Fluency fluency = new Fluency(buffer, flusher);
fluency.emit("foo.bar", new HashMap<>());
fluency.close();
assertThat(fluency.waitUntilFlusherTerminated(waitUntilFlusherTerm), is(expected));
}
use of org.komamitsu.fluency.ingester.Ingester in project fluency by komamitsu.
the class FluencyBuilderForAwsS3Test method buildWithDefaultConfig.
@ParameterizedTest
@EnumSource(FluencyBuilderForAwsS3.FormatType.class)
void buildWithDefaultConfig(FluencyBuilderForAwsS3.FormatType formatType) {
FluencyBuilderForAwsS3 builder = builderWithDefaultConfig;
AwsS3Sender sender = mock(AwsS3Sender.class);
doReturn(sender).when(builder).createSender(any(AwsS3Sender.Config.class));
builder.setFormatType(formatType);
Fluency fluency = builder.build();
assertEquals(fluency, this.fluency);
ArgumentCaptor<AwsS3Sender.Config> configArgumentCaptor = ArgumentCaptor.forClass(AwsS3Sender.Config.class);
verify(builder, times(1)).createSender(configArgumentCaptor.capture());
AwsS3Sender.Config senderConfig = configArgumentCaptor.getValue();
assertNull(senderConfig.getEndpoint());
assertNull(senderConfig.getRegion());
assertNull(senderConfig.getAwsAccessKeyId());
assertNull(senderConfig.getAwsSecretAccessKey());
assertEquals(10, senderConfig.getRetryMax());
assertEquals(1000, senderConfig.getRetryIntervalMs());
assertEquals(30000, senderConfig.getMaxRetryIntervalMs());
assertEquals(2.0, senderConfig.getRetryFactor());
assertEquals(8192, senderConfig.getWorkBufSize());
assertTrue(senderConfig.isCompressionEnabled());
ArgumentCaptor<RecordFormatter> recordFormatterArgumentCaptor = ArgumentCaptor.forClass(RecordFormatter.class);
ArgumentCaptor<Ingester> ingesterArgumentCaptor = ArgumentCaptor.forClass(Ingester.class);
verify(builder, times(1)).buildFromIngester(recordFormatterArgumentCaptor.capture(), ingesterArgumentCaptor.capture());
RecordFormatter recordFormatter = recordFormatterArgumentCaptor.getValue();
Class<? extends AwsS3RecordFormatter> expectedAwsS3RecordFormatter = null;
String expectedS3KeySuffix = null;
switch(formatType) {
case MESSAGE_PACK:
expectedAwsS3RecordFormatter = MessagePackRecordFormatter.class;
expectedS3KeySuffix = ".msgpack.gz";
break;
case JSONL:
expectedAwsS3RecordFormatter = JsonlRecordFormatter.class;
expectedS3KeySuffix = ".jsonl.gz";
break;
case CSV:
expectedAwsS3RecordFormatter = CsvRecordFormatter.class;
expectedS3KeySuffix = ".csv.gz";
break;
}
assertEquals(expectedAwsS3RecordFormatter, recordFormatter.getClass());
AwsS3Ingester ingester = (AwsS3Ingester) ingesterArgumentCaptor.getValue();
assertEquals(sender, ingester.getSender());
DefaultS3DestinationDecider destinationDecider = (DefaultS3DestinationDecider) ingester.getS3DestinationDecider();
assertNull(destinationDecider.getKeyPrefix());
assertEquals(expectedS3KeySuffix, destinationDecider.getKeySuffix());
assertEquals(UTC, destinationDecider.getZoneId());
ArgumentCaptor<Buffer.Config> bufferConfigArgumentCaptor = ArgumentCaptor.forClass(Buffer.Config.class);
ArgumentCaptor<Flusher.Config> flusherConfigArgumentCaptor = ArgumentCaptor.forClass(Flusher.Config.class);
verify(builder, times(1)).createFluency(eq(recordFormatter), eq(ingester), bufferConfigArgumentCaptor.capture(), flusherConfigArgumentCaptor.capture());
Buffer.Config bufferConfig = bufferConfigArgumentCaptor.getValue();
assertEquals(512 * 1024 * 1024, bufferConfig.getMaxBufferSize());
assertEquals(4 * 1024 * 1024, bufferConfig.getChunkInitialSize());
assertEquals(64 * 1024 * 1024, bufferConfig.getChunkRetentionSize());
assertEquals(30 * 1000, bufferConfig.getChunkRetentionTimeMillis());
}
Aggregations