use of com.amazon.dataprepper.model.record.Record in project data-prepper by opensearch-project.
the class StdInSourceTests method testStdInSourceSuccessfulWriteToBuffer.
@Test
public void testStdInSourceSuccessfulWriteToBuffer() {
final Queue<Record<Event>> bufferQueue = new LinkedList<>();
final TestBuffer buffer = new TestBuffer(bufferQueue, 1);
final StdInSource stdInSource = new StdInSource(TEST_WRITE_TIMEOUT, TEST_PIPELINE_NAME);
assertThat(buffer.size(), is(equalTo(0)));
stdInSource.start(buffer);
assertThat(buffer.size(), is(equalTo(1)));
final Map.Entry<Collection<Record<Event>>, CheckpointState> readResult = buffer.read(TEST_WRITE_TIMEOUT);
final Collection<Record<Event>> recordsFromBuffer = readResult.getKey();
assertThat(recordsFromBuffer.size(), is(equalTo(1)));
recordsFromBuffer.forEach(actualRecord -> assertThat(actualRecord.getData().get("message", String.class), is(equalTo(READ_CONTENT))));
}
use of com.amazon.dataprepper.model.record.Record in project data-prepper by opensearch-project.
the class BlockingBufferTests method testBufferIsEmpty.
@Test
public void testBufferIsEmpty() {
final PluginSetting completePluginSetting = completePluginSettingForBlockingBuffer();
final BlockingBuffer<Record<String>> blockingBuffer = new BlockingBuffer<>(completePluginSetting);
assertTrue(blockingBuffer.isEmpty());
}
use of com.amazon.dataprepper.model.record.Record in project data-prepper by opensearch-project.
the class BlockingBufferTests method testBatchRead.
@Test
public void testBatchRead() throws Exception {
final PluginSetting completePluginSetting = completePluginSettingForBlockingBuffer();
final BlockingBuffer<Record<String>> blockingBuffer = new BlockingBuffer<>(completePluginSetting);
assertThat(blockingBuffer, notNullValue());
final int testSize = 5;
for (int i = 0; i < testSize; i++) {
Record<String> record = new Record<>("TEST" + i);
blockingBuffer.write(record, TEST_WRITE_TIMEOUT);
}
final Map.Entry<Collection<Record<String>>, CheckpointState> partialReadResult = blockingBuffer.read(TEST_BATCH_READ_TIMEOUT);
final Collection<Record<String>> partialRecords = partialReadResult.getKey();
final CheckpointState partialCheckpointState = partialReadResult.getValue();
final int expectedBatchSize = (Integer) completePluginSetting.getAttributeFromSettings(ATTRIBUTE_BATCH_SIZE);
assertThat(partialRecords.size(), is(expectedBatchSize));
assertEquals(expectedBatchSize, partialCheckpointState.getNumRecordsToBeChecked());
int i = 0;
for (Record<String> record : partialRecords) {
assertThat(record.getData(), equalTo("TEST" + i));
i++;
}
final Map.Entry<Collection<Record<String>>, CheckpointState> finalReadResult = blockingBuffer.read(TEST_BATCH_READ_TIMEOUT);
final Collection<Record<String>> finalBatch = finalReadResult.getKey();
final CheckpointState finalCheckpointState = finalReadResult.getValue();
assertThat(finalBatch.size(), is(testSize - expectedBatchSize));
assertEquals(testSize - expectedBatchSize, finalCheckpointState.getNumRecordsToBeChecked());
for (Record<String> record : finalBatch) {
assertThat(record.getData(), equalTo("TEST" + i));
i++;
}
}
use of com.amazon.dataprepper.model.record.Record in project data-prepper by opensearch-project.
the class AggregateProcessorIT method getBatchOfEvents.
private List<Record<Event>> getBatchOfEvents() {
final List<Record<Event>> events = new ArrayList<>();
for (int i = 0; i < NUM_EVENTS_PER_BATCH; i++) {
final Map<String, Object> eventMap = getEventMap(i % NUM_UNIQUE_EVENTS_PER_BATCH);
final Event event = JacksonEvent.builder().withEventType("event").withData(eventMap).build();
uniqueEventMaps.add(eventMap);
events.add(new Record<>(event));
}
return events;
}
use of com.amazon.dataprepper.model.record.Record in project data-prepper by opensearch-project.
the class OTelTraceGrpcServiceTest method export_BufferTimeout_responseObserverOnError_withOTLPRecordType.
@Test
public void export_BufferTimeout_responseObserverOnError_withOTLPRecordType() throws Exception {
objectUnderTest = generateOTelTraceGrpcService(new OTelProtoCodec.OTelProtoDecoder(), RecordType.OTLP);
doThrow(new TimeoutException()).when(buffer).write(any(Record.class), anyInt());
objectUnderTest.export(SUCCESS_REQUEST, responseObserver);
verify(buffer, times(1)).write(any(Record.class), anyInt());
verify(responseObserver, times(0)).onNext(any());
verify(responseObserver, times(0)).onCompleted();
verify(responseObserver, times(1)).onError(statusExceptionArgumentCaptor.capture());
verify(timeoutCounter, times(1)).increment();
verify(requestsReceivedCounter, times(1)).increment();
verifyNoInteractions(successRequestsCounter);
verifyNoInteractions(badRequestsCounter);
verifyNoInteractions(requestsTooLargeCounter);
final ArgumentCaptor<Double> payloadLengthCaptor = ArgumentCaptor.forClass(Double.class);
verify(payloadSizeSummary, times(1)).record(payloadLengthCaptor.capture());
assertThat(payloadLengthCaptor.getValue().intValue(), equalTo(SUCCESS_REQUEST.getSerializedSize()));
verify(requestProcessDuration, times(1)).record(ArgumentMatchers.<Runnable>any());
StatusException capturedStatusException = statusExceptionArgumentCaptor.getValue();
assertThat(capturedStatusException.getStatus().getCode(), equalTo(Status.RESOURCE_EXHAUSTED.getCode()));
}
Aggregations