use of org.codehaus.jackson.JsonFactory in project hbase by apache.
the class TestTableScan method testStreamingJSON.
@Test
public void testStreamingJSON() throws Exception {
// Test scanning particular columns with limit.
StringBuilder builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
builder.append("&");
builder.append(Constants.SCAN_LIMIT + "=20");
Response response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
ObjectMapper mapper = new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
CellSetModel model = mapper.readValue(response.getStream(), CellSetModel.class);
int count = TestScannerResource.countCellSet(model);
assertEquals(20, count);
checkRowsNotNull(model);
//Test scanning with no limit.
builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_2);
response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
model = mapper.readValue(response.getStream(), CellSetModel.class);
count = TestScannerResource.countCellSet(model);
assertEquals(expectedRows2, count);
checkRowsNotNull(model);
//Test with start row and end row.
builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
builder.append("&");
builder.append(Constants.SCAN_START_ROW + "=aaa");
builder.append("&");
builder.append(Constants.SCAN_END_ROW + "=aay");
response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
count = 0;
JsonFactory jfactory = new JsonFactory(mapper);
JsonParser jParser = jfactory.createJsonParser(response.getStream());
boolean found = false;
while (jParser.nextToken() != JsonToken.END_OBJECT) {
if (jParser.getCurrentToken() == JsonToken.START_OBJECT && found) {
RowModel row = jParser.readValueAs(RowModel.class);
assertNotNull(row.getKey());
for (int i = 0; i < row.getCells().size(); i++) {
if (count == 0) {
assertEquals("aaa", Bytes.toString(row.getKey()));
}
if (count == 23) {
assertEquals("aax", Bytes.toString(row.getKey()));
}
count++;
}
jParser.skipChildren();
} else {
found = jParser.getCurrentToken() == JsonToken.START_ARRAY;
}
}
assertEquals(24, count);
}
use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class JsonUtils method toJsonString.
/**
* Serializes a bean as JSON
* @param <T> the bean type
* @param bean the bean to serialize
* @param pretty a flag if the output is to be pretty printed
* @return the JSON string
*/
public static <T> String toJsonString(T bean, boolean pretty) throws JsonGenerationException, JsonMappingException, IOException {
JsonFactory jsonFactory = new JsonFactory(new ObjectMapper());
StringWriter out = new StringWriter(1000);
JsonGenerator jsonGenerator = jsonFactory.createJsonGenerator(out);
if (pretty)
jsonGenerator.useDefaultPrettyPrinter();
jsonGenerator.writeObject(bean);
out.flush();
return out.toString();
}
use of org.codehaus.jackson.JsonFactory in project pinot by linkedin.
the class JSONRecordReader method init.
@Override
public void init() throws Exception {
final Reader reader = new FileReader(_dataFile);
_parser = new JsonFactory().createJsonParser(reader);
_iterator = new ObjectMapper().readValues(_parser, Map.class);
}
use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class DbusEventAvroDecoder method dumpEventValueInJSON.
public void dumpEventValueInJSON(DbusEvent e, OutputStream out) {
byte[] md5 = new byte[16];
e.schemaId(md5);
SchemaId schemaId = new SchemaId(md5);
VersionedSchema sourceSchema = _schemaSet.getById(schemaId);
ByteBuffer valueBuffer = e.value();
byte[] valueBytes = new byte[valueBuffer.remaining()];
valueBuffer.get(valueBytes);
try {
Schema schema = sourceSchema.getSchema();
DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
binDecoder.set(DecoderFactory.defaultFactory().createBinaryDecoder(valueBytes, binDecoder.get()));
Object datum = reader.read(null, binDecoder.get());
DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
JsonGenerator g = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
// write the src ID
g.writeStartObject();
g.writeFieldName(SRC_ID_FIELD_NAME);
g.writeNumber(e.getSourceId());
g.writeFieldName(OPCODE_FIELD_NAME);
g.writeString(e.getOpcode().toString());
g.writeFieldName("partId");
g.writeNumber(Integer.valueOf(e.getPartitionId()));
g.writeFieldName(VALUE_FIELD_NAME);
writer.write(datum, new JsonEncoder(schema, g));
g.writeEndObject();
g.writeEndObject();
try {
g.writeEndObject();
} catch (JsonGenerationException e_json) {
// ignore the error as some how avro JsonEncoder may some times missing two }
}
g.flush();
} catch (IOException e1) {
LOG.error("event value serialization error; event = " + e, e1);
}
}
use of org.codehaus.jackson.JsonFactory in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
Aggregations