use of co.cask.common.io.ByteBufferInputStream in project cdap by caskdata.
the class QueueToStreamConsumer method poll.
@Override
public DequeueResult<StreamEvent> poll(int maxEvents, long timeout, TimeUnit timeoutUnit) throws IOException, InterruptedException {
final DequeueResult<byte[]> result = consumer.dequeue(maxEvents);
// Decode byte array into stream event
ImmutableList.Builder<StreamEvent> builder = ImmutableList.builder();
for (byte[] content : result) {
try {
builder.add(STREAM_EVENT_CODEC.decodePayload(content));
} catch (Throwable t) {
// If failed to decode, it maybe using old (pre 2.1) stream codec. Try to decode with old one.
ByteBuffer buffer = ByteBuffer.wrap(content);
SchemaHash schemaHash = new SchemaHash(buffer);
Preconditions.checkArgument(schemaHash.equals(StreamEventDataCodec.STREAM_DATA_SCHEMA.getSchemaHash()), "Schema from payload not matching with StreamEventData schema.");
Decoder decoder = new BinaryDecoder(new ByteBufferInputStream(buffer));
// In old schema, timestamp is not recorded.
builder.add(new StreamEvent(StreamEventDataCodec.decode(decoder), 0));
}
}
final List<StreamEvent> events = builder.build();
return new DequeueResult<StreamEvent>() {
@Override
public boolean isEmpty() {
return events.isEmpty();
}
@Override
public void reclaim() {
result.reclaim();
}
@Override
public int size() {
return events.size();
}
@Override
public Iterator<StreamEvent> iterator() {
return events.iterator();
}
};
}
use of co.cask.common.io.ByteBufferInputStream in project cdap by caskdata.
the class StreamEventCodecTest method testEncodeDecodeWithDatumDecoder.
@Test
public void testEncodeDecodeWithDatumDecoder() throws UnsupportedTypeException, IOException {
StreamEvent event = new StreamEvent(Maps.<String, String>newHashMap(), ByteBuffer.wrap("Event string".getBytes(Charsets.UTF_8)));
StreamEventCodec codec = new StreamEventCodec();
ByteBuffer payload = ByteBuffer.wrap(codec.encodePayload(event));
SchemaHash schemaHash = new SchemaHash(payload);
Schema schema = new ReflectionSchemaGenerator().generate(StreamEvent.class);
Assert.assertEquals(schema.getSchemaHash(), schemaHash);
StreamEvent decoded = new ReflectionDatumReader<>(schema, TypeToken.of(StreamEvent.class)).read(new BinaryDecoder(new ByteBufferInputStream(payload)), schema);
Assert.assertEquals(event.getHeaders(), decoded.getHeaders());
Assert.assertEquals(event.getBody(), decoded.getBody());
}
use of co.cask.common.io.ByteBufferInputStream in project cdap by caskdata.
the class StreamBodyConsumerTestBase method testChunkedContent.
@Test
public void testChunkedContent() throws Exception {
int recordCount = 1000;
// Generate a file for upload
ContentInfo contentInfo = generateFile(recordCount);
final Map<String, String> contentHeaders = Maps.newHashMap();
final TestContentWriter contentWriter = new TestContentWriter();
BodyConsumer bodyConsumer = createBodyConsumer(new ContentWriterFactory() {
@Override
public StreamId getStream() {
return new StreamId("test_namespace", "test-stream");
}
@Override
public ContentWriter create(Map<String, String> headers) throws IOException {
contentHeaders.putAll(headers);
return contentWriter;
}
});
TestHttpResponder responder = new TestHttpResponder();
// Feed the file content in small chunk
sendChunks(contentInfo.getContentSupplier(), 10, bodyConsumer, responder);
// Verify the processing is completed correctly
Assert.assertTrue(contentWriter.waitForClose(5, TimeUnit.SECONDS));
Assert.assertEquals(HttpResponseStatus.OK, responder.getResponseStatus());
Assert.assertEquals(recordCount, contentWriter.getEvents());
Assert.assertTrue(contentInfo.verify(contentHeaders, new InputSupplier<InputStream>() {
@Override
public InputStream getInput() throws IOException {
return new ByteBufferInputStream(contentWriter.getContent().duplicate());
}
}));
}
use of co.cask.common.io.ByteBufferInputStream in project cdap by caskdata.
the class MessagingMetricsCollectionServiceTest method assertMetricsFromMessaging.
private void assertMetricsFromMessaging(final Schema schema, ReflectionDatumReader recordReader, Table<String, String, Long> expected) throws InterruptedException, TopicNotFoundException, IOException {
// Consume from kafka
final Map<String, MetricValues> metrics = Maps.newHashMap();
ByteBufferInputStream is = new ByteBufferInputStream(null);
for (int i = 0; i < PARTITION_SIZE; i++) {
TopicId topicId = NamespaceId.SYSTEM.topic(TOPIC_PREFIX + i);
try (CloseableIterator<RawMessage> iterator = messagingService.prepareFetch(topicId).fetch()) {
while (iterator.hasNext()) {
RawMessage message = iterator.next();
MetricValues metricsRecord = (MetricValues) recordReader.read(new BinaryDecoder(is.reset(ByteBuffer.wrap(message.getPayload()))), schema);
StringBuilder flattenContext = new StringBuilder();
// for verifying expected results, sorting tags
Map<String, String> tags = Maps.newTreeMap();
tags.putAll(metricsRecord.getTags());
for (Map.Entry<String, String> tag : tags.entrySet()) {
flattenContext.append(tag.getKey()).append(".").append(tag.getValue()).append(".");
}
// removing trailing "."
if (flattenContext.length() > 0) {
flattenContext.deleteCharAt(flattenContext.length() - 1);
}
metrics.put(flattenContext.toString(), metricsRecord);
}
} catch (IOException e) {
LOG.info("Failed to decode message to MetricValue. Skipped. {}", e.getMessage());
}
}
Assert.assertEquals(expected.rowKeySet().size(), metrics.size());
checkReceivedMetrics(expected, metrics);
}
use of co.cask.common.io.ByteBufferInputStream in project cdap by caskdata.
the class MetricsMessageCallback method onReceived.
@Override
public long onReceived(Iterator<FetchedMessage> messages) {
// Decode the metrics records.
ByteBufferInputStream is = new ByteBufferInputStream(null);
List<MetricValues> records = Lists.newArrayList();
long nextOffset = 0L;
while (messages.hasNext()) {
FetchedMessage input = messages.next();
nextOffset = input.getNextOffset();
try {
MetricValues metricValues = recordReader.read(new BinaryDecoder(is.reset(input.getPayload())), recordSchema);
records.add(metricValues);
} catch (IOException e) {
LOG.warn("Failed to decode message to MetricValue. Skipped. {}", e.getMessage());
}
}
if (records.isEmpty()) {
LOG.info("No records to process.");
return nextOffset;
}
long now = System.currentTimeMillis();
try {
addProcessingStats(records, now);
metricStore.add(records);
} catch (Exception e) {
// SimpleKafkaConsumer will log the error, and continue on past these messages
throw new RuntimeException("Failed to add metrics data to a store", e);
}
recordsProcessed += records.size();
// avoid logging more than once a minute
if (now > lastLoggedMillis + TimeUnit.MINUTES.toMillis(1)) {
lastLoggedMillis = now;
LOG.debug("{} metrics records processed. Last record time: {}.", recordsProcessed, records.get(records.size() - 1).getTimestamp());
}
return nextOffset;
}
Aggregations