use of org.codehaus.jackson.JsonParser in project OpenRefine by OpenRefine.
the class Cell method loadStreaming.
public static Cell loadStreaming(String s, Pool pool) throws Exception {
JsonFactory jsonFactory = new JsonFactory();
JsonParser jp = jsonFactory.createJsonParser(s);
if (jp.nextToken() != JsonToken.START_OBJECT) {
return null;
}
return loadStreaming(jp, pool);
}
use of org.codehaus.jackson.JsonParser in project eiger by wlloyd.
the class SSTableImport method importSorted.
public static int importSorted(String jsonFile, ColumnFamily columnFamily, String ssTablePath, IPartitioner<?> partitioner) throws IOException {
// already imported keys count
int importedKeys = 0;
long start = System.currentTimeMillis();
JsonParser parser = getParser(jsonFile);
if (keyCountToImport == null) {
keyCountToImport = 0;
System.out.println("Counting keys to import, please wait... (NOTE: to skip this use -n <num_keys>)");
// START_OBJECT
parser.nextToken();
while (parser.nextToken() != null) {
parser.nextToken();
parser.skipChildren();
if (parser.getCurrentName() == null)
continue;
keyCountToImport++;
}
}
System.out.printf("Importing %s keys...%n", keyCountToImport);
// renewing parser
parser = getParser(jsonFile);
SSTableWriter writer = new SSTableWriter(ssTablePath, keyCountToImport);
int lineNumber = 1;
DecoratedKey prevStoredKey = null;
while (parser.nextToken() != null) {
String key = parser.getCurrentName();
if (key != null) {
String tokenName = parser.nextToken().name();
if (tokenName.equals("START_ARRAY")) {
if (columnFamily.getType() == ColumnFamilyType.Super) {
throw new RuntimeException("Can't write Standard columns to the Super Column Family.");
}
List<?> columns = parser.readValueAs(new TypeReference<List<?>>() {
});
addToStandardCF(columns, columnFamily);
} else if (tokenName.equals("START_OBJECT")) {
if (columnFamily.getType() == ColumnFamilyType.Standard) {
throw new RuntimeException("Can't write Super columns to the Standard Column Family.");
}
Map<?, ?> columns = parser.readValueAs(new TypeReference<Map<?, ?>>() {
});
addToSuperCF(columns, columnFamily);
} else {
throw new UnsupportedOperationException("Only Array or Hash allowed as row content.");
}
DecoratedKey currentKey = partitioner.decorateKey(hexToBytes(key));
if (prevStoredKey != null && prevStoredKey.compareTo(currentKey) != -1) {
System.err.printf("Line %d: Key %s is greater than previous, collection is not sorted properly. Aborting import. You might need to delete SSTables manually.%n", lineNumber, key);
return -1;
}
// saving decorated key
writer.append(currentKey, columnFamily);
columnFamily.clear();
prevStoredKey = currentKey;
importedKeys++;
lineNumber++;
long current = System.currentTimeMillis();
if (// 5 secs.
current - start >= 5000) {
System.out.printf("Currently imported %d keys.%n", importedKeys);
start = current;
}
if (keyCountToImport == importedKeys)
break;
}
}
writer.closeAndOpenReader();
return importedKeys;
}
use of org.codehaus.jackson.JsonParser in project stanbol by apache.
the class AnalyzedTextParser method parse.
/**
* Parses {@link AnalysedText} {@link Span}s including annotations from the
* {@link InputStream}. The {@link AnalysedText} instance that is going to
* be enrichted with the parsed data needs to be parsed. In the simplest case
* the caller can create an empty instance by using a
* {@link AnalysedTextFactory}.
* @param in The stream to read the data from
* @param charset the {@link Charset} used by the stream
* @param at The {@link AnalysedText} instance used to add the data to
* @return the parsed {@link AnalysedText} instance enrichted with the
* information parsed from the Stream
* @throws IOException on any Error while reading or parsing the data
* from the Stream
*/
public AnalysedText parse(InputStream in, Charset charset, final AnalysedText at) throws IOException {
if (in == null) {
throw new IllegalArgumentException("The parsed InputStream MUST NOT be NULL!");
}
if (charset == null) {
charset = UTF8;
}
JsonParser parser = mapper.getJsonFactory().createJsonParser(new InputStreamReader(in, charset));
if (parser.nextToken() != JsonToken.START_OBJECT) {
//start object
throw new IOException("JSON serialized AnalyzedTexts MUST use a JSON Object as Root!");
}
if (!parser.nextFieldName(new SerializedString("spans"))) {
throw new IOException("JSON serialized AnalyzedText MUST define the 'spans' field as first entry " + "in the root JSON object!");
}
if (parser.nextValue() != JsonToken.START_ARRAY) {
throw new IOException("The value of the 'span' field MUST BE an Json Array!");
}
boolean first = true;
while (parser.nextValue() == JsonToken.START_OBJECT) {
if (first) {
parseAnalyzedTextSpan(parser.readValueAsTree(), at);
first = false;
} else {
parseSpan(at, parser.readValueAsTree());
}
}
return at;
}
use of org.codehaus.jackson.JsonParser in project hive by apache.
the class JsonSerDe method deserialize.
/**
* Takes JSON string in Text form, and has to return an object representation above
* it that's readable by the corresponding object inspector.
*
* For this implementation, since we're using the jackson parser, we can construct
* our own object implementation, and we use HCatRecord for it
*/
@Override
public Object deserialize(Writable blob) throws SerDeException {
Text t = (Text) blob;
JsonParser p;
List<Object> r = new ArrayList<Object>(Collections.nCopies(columnNames.size(), null));
try {
p = jsonFactory.createJsonParser(new ByteArrayInputStream((t.getBytes())));
if (p.nextToken() != JsonToken.START_OBJECT) {
throw new IOException("Start token not found where expected");
}
JsonToken token;
while (((token = p.nextToken()) != JsonToken.END_OBJECT) && (token != null)) {
// iterate through each token, and create appropriate object here.
populateRecord(r, token, p, schema);
}
} catch (JsonParseException e) {
LOG.warn("Error [{}] parsing json text [{}].", e, t);
throw new SerDeException(e);
} catch (IOException e) {
LOG.warn("Error [{}] parsing json text [{}].", e, t);
throw new SerDeException(e);
}
return new DefaultHCatRecord(r);
}
use of org.codehaus.jackson.JsonParser in project hive by apache.
the class JSONMessageFactory method getJsonTree.
public static ObjectNode getJsonTree(String eventMessage) throws Exception {
JsonParser jsonParser = (new JsonFactory()).createJsonParser(eventMessage);
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonParser, ObjectNode.class);
}
Aggregations