use of com.hortonworks.registries.common.Schema in project streamline by hortonworks.
the class SchemaTest method testfromMapData.
@Test
public void testfromMapData() throws Exception {
Map<String, Object> data = new LinkedHashMap<>();
data.put("stringField", "test");
data.put("arrayField", Arrays.asList(1, 2, 3));
List<Object> members = Arrays.asList(1, "abc");
data.put("arrayField2", members);
Schema schema = Schema.fromMapData(data);
List<Schema.Field> fields = schema.getFields();
assertEquals("stringField", fields.get(0).getName());
assertEquals(Type.STRING, fields.get(0).getType());
assertEquals("arrayField", fields.get(1).getName());
assertEquals(Type.ARRAY, fields.get(1).getType());
assertEquals(1, ((Schema.ArrayField) fields.get(1)).getMembers().size());
assertEquals(Type.INTEGER, ((Schema.ArrayField) fields.get(1)).getMembers().get(0).getType());
assertTrue(((Schema.ArrayField) fields.get(1)).isHomogenous());
assertEquals("arrayField2", fields.get(2).getName());
assertEquals(Type.ARRAY, fields.get(2).getType());
assertEquals(Type.INTEGER, ((Schema.ArrayField) fields.get(2)).getMembers().get(0).getType());
assertEquals(Type.STRING, ((Schema.ArrayField) fields.get(2)).getMembers().get(1).getType());
assertFalse(((Schema.ArrayField) fields.get(2)).isHomogenous());
}
use of com.hortonworks.registries.common.Schema in project streamline by hortonworks.
the class SchemaValueConverterTest method convertMapValueDoesNotHaveRequiredField.
@Test(expected = SchemaValidationFailedException.class)
public void convertMapValueDoesNotHaveRequiredField() {
Schema schema = Schema.of(Schema.Field.of("a", Schema.Type.BINARY), Schema.Field.of("b", Schema.Type.STRING), Schema.Field.of("c", Schema.Type.ARRAY));
Map<String, Object> value = new HashMap<>();
value.put("a", new byte[] { 0x01, 0x02 });
value.put("c", Collections.singletonList("hello"));
SchemaValueConverter.convertMap(schema, value);
}
use of com.hortonworks.registries.common.Schema in project streamline by hortonworks.
the class CustomProcessorBolt method process.
@Override
protected void process(Tuple input, StreamlineEvent event) {
try {
StreamlineEvent toProcess = null;
if (inputSchemaMap == null || inputSchemaMap.isEmpty() || !inputSchemaMap.containsKey(input.getSourceStreamId())) {
toProcess = event;
} else {
// Create a new mapped event based on mapping to pass it to CP implementation
Map<String, Object> mappedEventMap = new HashMap<>();
for (Map.Entry<String, String> entry : inputSchemaMap.get(input.getSourceStreamId()).entrySet()) {
if (event.get(entry.getValue()) != null) {
mappedEventMap.put(entry.getKey(), event.get(entry.getValue()));
}
}
toProcess = StreamlineEventImpl.builder().from(event).sourceStream(input.getSourceStreamId()).fieldsAndValues(mappedEventMap).build();
}
List<StreamlineEvent> results = customProcessorRuntime.process(toProcess);
if (results != null) {
Schema schema = outputSchema.values().iterator().next();
String outputStream = outputSchema.keySet().iterator().next();
for (StreamlineEvent e : results) {
Map<String, Object> newFieldsAndValues = new HashMap<>();
// event and CP defined output schema. UI will make sure that the fields are from one of the two sets.
for (Schema.Field field : schema.getFields()) {
// value has to be present either in the input event
newFieldsAndValues.put(field.getName(), e.containsKey(field.getName()) ? e.get(field.getName()) : event.get(field.getName()));
}
StreamlineEvent toEmit = StreamlineEventImpl.builder().from(e).sourceStream(outputStream).fieldsAndValues(newFieldsAndValues).build();
collector.emit(outputStream, input, new Values(toEmit));
}
}
} catch (ProcessingException e) {
LOG.error("Custom Processor threw a ProcessingException. ", e);
throw new RuntimeException(e);
}
}
use of com.hortonworks.registries.common.Schema in project streamline by hortonworks.
the class NormalizationBoltTest method createBulkNormalizationProcessor.
public static NormalizationProcessor createBulkNormalizationProcessor(String outputStreamId) throws NormalizationException, IOException {
Map<String, NormalizationConfig> inputStreamsWithConfig = new HashMap<>();
Schema inputSchema = Schema.of(Schema.Field.of("temp", Schema.Type.INTEGER), Schema.Field.of("foo", Schema.Type.STRING));
String bulkScriptText = getBulkScriptText();
BulkNormalizationConfig bulkNormalizationConfig = new BulkNormalizationConfig(inputSchema, bulkScriptText);
inputStreamsWithConfig.put(INPUT_STREAM_ID, bulkNormalizationConfig);
Stream declaredOutputStream = new Stream(outputStreamId, OUTPUT_SCHEMA_FIELDS);
NormalizationProcessor normalizationProcessor = new NormalizationProcessor(inputStreamsWithConfig, declaredOutputStream, NormalizationProcessor.Type.bulk);
normalizationProcessor.addOutputStream(declaredOutputStream);
return normalizationProcessor;
}
use of com.hortonworks.registries.common.Schema in project streamline by hortonworks.
the class AvroStreamlineSchemaConverter method convertAvroSchemaToStreamlineSchema.
/**
* Converts the given {@code avroSchemaText} to streamline schema {@link Schema}.
* @param avroSchemaText
* @return streamline schema for the given {@code avroSchemaText}
* @throws JsonProcessingException if any error occurs in generating json for generated streams schema fields.
*/
public static String convertAvroSchemaToStreamlineSchema(String avroSchemaText) throws JsonProcessingException {
org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(avroSchemaText);
LOG.debug("Generating streams schema for given avro schema [{}]", avroSchemaText);
Schema.Field field = generateStreamsSchemaField(avroSchema);
List<Schema.Field> effFields;
// root record is expanded directly as streamline schema represents that as the root element schema
if (field instanceof Schema.NestedField) {
effFields = ((Schema.NestedField) field).getFields();
} else {
effFields = Collections.singletonList(field);
}
return new ObjectMapper().writeValueAsString(effFields);
}
Aggregations