use of org.codehaus.jackson.map.ObjectMapper in project databus by linkedin.
the class TestDbusEventBufferMult method convertToPhysicalSourceConfig.
public PhysicalSourceConfig convertToPhysicalSourceConfig(String str) {
_mapper = new ObjectMapper();
InputStreamReader isr = new InputStreamReader(IOUtils.toInputStream(str));
PhysicalSourceConfig pConfig = null;
try {
pConfig = _mapper.readValue(isr, PhysicalSourceConfig.class);
} catch (JsonParseException e) {
fail("Failed parsing", e);
} catch (JsonMappingException e) {
fail("Failed parsing", e);
} catch (IOException e) {
fail("Failed parsing", e);
}
try {
isr.close();
} catch (IOException e) {
fail("Failed", e);
}
return pConfig;
}
use of org.codehaus.jackson.map.ObjectMapper in project databus by linkedin.
the class TestDbusEventBufferPersistence method createBufferMult.
private DbusEventBufferMult createBufferMult(DbusEventBuffer.StaticConfig config) throws IOException, InvalidConfigException {
ObjectMapper mapper = new ObjectMapper();
InputStreamReader isr = new InputStreamReader(IOUtils.toInputStream(TestDbusEventBufferMult._configSource1));
PhysicalSourceConfig pConfig1 = mapper.readValue(isr, PhysicalSourceConfig.class);
isr.close();
isr = new InputStreamReader(IOUtils.toInputStream(TestDbusEventBufferMult._configSource2));
PhysicalSourceConfig pConfig2 = mapper.readValue(isr, PhysicalSourceConfig.class);
PhysicalSourceStaticConfig pStatConf1 = pConfig1.build();
PhysicalSourceStaticConfig pStatConf2 = pConfig2.build();
PhysicalSourceStaticConfig[] _physConfigs = new PhysicalSourceStaticConfig[] { pStatConf1, pStatConf2 };
return new DbusEventBufferMult(_physConfigs, config, new DbusEventV2Factory());
}
use of org.codehaus.jackson.map.ObjectMapper in project databus by linkedin.
the class TestDbusKeyCompositeFilter method testDbusKeyRangeFilter.
@Test
public void testDbusKeyRangeFilter() throws Exception {
KeyFilterConfigHolder.Config partConf = new KeyFilterConfigHolder.Config();
partConf.setType("RANGE");
KeyRangeFilterConfig.Config rangeConf = new KeyRangeFilterConfig.Config();
rangeConf.setSize(100);
rangeConf.setPartitions("[0,3-4]");
partConf.setRange(rangeConf);
DbusKeyFilter filter = new DbusKeyFilter(new KeyFilterConfigHolder(partConf.build()));
List<DbusEvent> dbusEvents = new ArrayList<DbusEvent>();
List<Long> keys = new ArrayList<Long>();
for (long i = 0; i < 1000; ++i) {
keys.add(i);
}
generateEvents(1000, (short) 1, keys, dbusEvents);
List<DbusEvent> expPassedEvents = new ArrayList<DbusEvent>();
List<DbusEvent> expFailedEvents = new ArrayList<DbusEvent>();
for (DbusEvent event : dbusEvents) {
long key = event.key();
if ((key < 100) || ((key >= 300) && (key < 500)))
expPassedEvents.add(event);
else
expFailedEvents.add(event);
}
List<DbusEvent> passedEvents = new ArrayList<DbusEvent>();
List<DbusEvent> failedEvents = new ArrayList<DbusEvent>();
for (DbusEvent event : dbusEvents) {
if (filter.allow(event)) {
passedEvents.add(event);
} else {
failedEvents.add(event);
}
}
System.out.println("Passed Event Size :" + passedEvents.size());
System.out.println("Failed Event Size :" + failedEvents.size());
assertEquals("Passed Size", expPassedEvents.size(), passedEvents.size());
assertEquals("Failed Size", expFailedEvents.size(), failedEvents.size());
for (int i = 0; i < passedEvents.size(); ++i) {
assertEquals("Passed Element " + i, expPassedEvents.get(i), passedEvents.get(i));
}
for (int i = 0; i < passedEvents.size(); ++i) {
assertEquals("Failed Element " + i, expFailedEvents.get(i), failedEvents.get(i));
}
ObjectMapper objMapper = new ObjectMapper();
String objStr = objMapper.writeValueAsString(filter);
System.out.println("KeyRangeFilter :" + objStr);
DbusKeyFilter filter2 = KeyFilterConfigJSONFactory.parseDbusKeyFilter(objStr);
String objStr2 = objMapper.writeValueAsString(filter2);
System.out.println("KeyRangeFilter2 :" + objStr2);
assertEquals("KeyRangeFilter JSON Serialization Test", objStr, objStr2);
}
use of org.codehaus.jackson.map.ObjectMapper in project databus by linkedin.
the class TestDbusPhysicalPartitionEventBuffer method deserializePhysicalSourceConfigWithoutEbuffer.
@Test
public void deserializePhysicalSourceConfigWithoutEbuffer() throws JSONException, JsonParseException, JsonMappingException, IOException {
JSONObject jsonObject = new JSONObject(PhysicalSourcesConfigWithoutEBuffer.toString());
Assert.assertEquals(jsonObject.get("dbusEventBuffer"), JSONObject.NULL);
ObjectMapper mapper = new ObjectMapper();
PhysicalSourceConfig config = mapper.readValue(jsonObject.toString(), PhysicalSourceConfig.class);
Assert.assertEquals(config.isDbusEventBufferSet(), false);
}
use of org.codehaus.jackson.map.ObjectMapper in project databus by linkedin.
the class FieldToAvro method buildAvroSchema.
public String buildAvroSchema(String namespace, String topRecordAvroName, String topRecordDatabaseName, String[][] headers, TableTypeInfo topRecordTypeInfo) {
if (namespace == null)
throw new IllegalArgumentException("namespace should not be null.");
if (topRecordAvroName == null)
throw new IllegalArgumentException("topRecordAvroName should not be null.");
if (topRecordDatabaseName == null)
throw new IllegalArgumentException("topRecordDatabaseName should not be null.");
if (topRecordTypeInfo == null)
throw new IllegalArgumentException("topRecordTypeInfo should not be null.");
FieldInfo fieldInfo = new FieldInfo(topRecordDatabaseName, topRecordTypeInfo, -1);
Map<String, Object> field = fieldToAvro(fieldInfo, true);
// Overwrite the name with the nice Java record name
field.put("name", topRecordAvroName);
// Add namespace
field.put("namespace", namespace);
// Add doc and serialize to JSON
try {
SimpleDateFormat df = new SimpleDateFormat("MMM dd, yyyy hh:mm:ss a zzz");
field.put("doc", "Auto-generated Avro schema for " + topRecordDatabaseName + ". Generated at " + df.format(new Date(System.currentTimeMillis())));
ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = new JsonFactory();
StringWriter writer = new StringWriter();
JsonGenerator jgen = factory.createJsonGenerator(writer);
jgen.useDefaultPrettyPrinter();
mapper.writeValue(jgen, field);
return writer.getBuffer().toString();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
Aggregations