use of com.hortonworks.streamline.streams.StreamlineEvent in project streamline by hortonworks.
the class AvroKafkaSpoutTranslator method apply.
@Override
public List<Object> apply(ConsumerRecord<Object, ByteBuffer> consumerRecord) {
Map<String, Object> keyValues = (Map<String, Object>) deserializer().deserialize(new ByteBufferInputStream(consumerRecord.value()), readerSchemaVersion);
StreamlineEvent streamlineEvent = StreamlineEventImpl.builder().putAll(keyValues).dataSourceId(dataSourceId).build();
KafkaTuple kafkaTuple = new KafkaTuple(streamlineEvent);
kafkaTuple.routedTo(outputStream);
return kafkaTuple;
}
use of com.hortonworks.streamline.streams.StreamlineEvent in project streamline by hortonworks.
the class StreamlineEventSerializerTest method runPrimitiveTest.
private void runPrimitiveTest(Map data, Schema schema, Object expectedValue) {
StreamlineEvent streamlineEvent = StreamlineEventImpl.builder().fieldsAndValues(data).dataSourceId("dataSourceId").build();
Assert.assertEquals(expectedValue, StreamlineEventSerializer.getAvroRecord(streamlineEvent, schema));
}
use of com.hortonworks.streamline.streams.StreamlineEvent in project streamline by hortonworks.
the class StreamlineEventSerializerTest method testRecordWithFixed.
@Test
public void testRecordWithFixed() {
List<Schema.Field> fields = new ArrayList<>();
Map<String, Object> data = new HashMap<>();
for (int i = 0; i < PRIMITIVE_VALUES.length; ++i) {
Schema.Field field = new Schema.Field(NAMES[i], Schema.create(SCHEMA_TYPES[i]), null, null);
fields.add(field);
data.put(NAMES[i], PRIMITIVE_VALUES[i]);
}
// add fixed to test case
fields.add(new Schema.Field("fixed", Schema.createFixed("fixedSchema", null, null, 10), null, null));
data.put("fixed", "bytes".getBytes());
// add array to test case
fields.add(new Schema.Field("array", Schema.createArray(Schema.create(Schema.Type.INT)), null, null));
List<Integer> integerList = new ArrayList<>();
integerList.add(1);
integerList.add(2);
data.put("array", integerList);
Schema schema = Schema.createRecord(fields);
GenericRecord expected = new GenericData.Record(schema);
for (int i = 0; i < PRIMITIVE_VALUES.length; ++i) {
expected.put(NAMES[i], PRIMITIVE_VALUES[i]);
}
expected.put("fixed", new GenericData.Fixed(Schema.createFixed("fixedSchema", null, null, 10), "bytes".getBytes()));
expected.put("array", new GenericData.Array<Integer>(Schema.createArray(Schema.create(Schema.Type.INT)), integerList));
StreamlineEvent streamlineEvent = StreamlineEventImpl.builder().fieldsAndValues(data).dataSourceId("dataSourceId").build();
Assert.assertEquals(expected, StreamlineEventSerializer.getAvroRecord(streamlineEvent, schema));
}
use of com.hortonworks.streamline.streams.StreamlineEvent in project streamline by hortonworks.
the class TestRealtimeJoinBolt method makeStreamLineEventStream.
// NOTE: Streamline Specific
private static ArrayList<Tuple> makeStreamLineEventStream(String streamName, String[] fieldNames, Object[][] records) {
MockTopologyContext mockContext = new MockTopologyContext(new String[] { StreamlineEvent.STREAMLINE_EVENT });
ArrayList<Tuple> result = new ArrayList<>(records.length);
// convert each record into a HashMap using fieldNames as keys
for (Object[] record : records) {
HashMap<String, Object> recordMap = new HashMap<>(fieldNames.length);
for (int i = 0; i < fieldNames.length; i++) {
recordMap.put(fieldNames[i], record[i]);
}
StreamlineEvent streamLineEvent = StreamlineEventImpl.builder().fieldsAndValues(recordMap).dataSourceId("multiple sources").build();
ArrayList<Object> tupleValues = new ArrayList<>(1);
tupleValues.add(streamLineEvent);
TupleImpl tuple = new TupleImpl(mockContext, tupleValues, 0, streamName);
result.add(tuple);
}
return result;
}
use of com.hortonworks.streamline.streams.StreamlineEvent in project streamline by hortonworks.
the class WindowRulesBoltTest method testCountBasedWindowWithGroupbyUnordered.
@Test
public void testCountBasedWindowWithGroupbyUnordered() throws Exception {
String rulesJson = readFile("/window-rule-groupby-unordered.json");
RulesProcessor rulesProcessor = Utils.createObjectFromJson(rulesJson, RulesProcessor.class);
Window windowConfig = rulesProcessor.getRules().get(0).getWindow();
WindowRulesBolt wb = new WindowRulesBolt(rulesJson, RuleProcessorRuntime.ScriptType.SQL);
wb.withWindowConfig(windowConfig);
WindowedBoltExecutor wbe = new WindowedBoltExecutor(wb);
Map<String, Object> conf = wb.getComponentConfiguration();
wbe.prepare(conf, mockContext, mockCollector);
wbe.execute(getNextTuple(10));
wbe.execute(getNextTuple(15));
wbe.execute(getNextTuple(11));
wbe.execute(getNextTuple(16));
new Verifications() {
{
String streamId;
Collection<Tuple> anchors;
List<List<Object>> tuples = new ArrayList<>();
mockCollector.emit(streamId = withCapture(), anchors = withCapture(), withCapture(tuples));
Assert.assertEquals(2, tuples.size());
Map<String, Object> fieldsAndValues = ((StreamlineEvent) tuples.get(0).get(0));
Assert.assertEquals(2, fieldsAndValues.get("deptid"));
Assert.assertEquals(110, fieldsAndValues.get("salary_MAX"));
fieldsAndValues = ((StreamlineEvent) tuples.get(1).get(0));
Assert.assertEquals(3, fieldsAndValues.get("deptid"));
Assert.assertEquals(160, fieldsAndValues.get("salary_MAX"));
}
};
}
Aggregations