use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode in project asterixdb by apache.
the class ABinary method toJSON.
@Override
public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
int start = getStart();
ArrayNode byteArray = om.createArrayNode();
for (int i = 0; i < getLength(); i++) {
byteArray.add(bytes[start + i]);
}
json.set("ABinary", byteArray);
return json;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode in project swagger-core by swagger-api.
the class PropertyDeserializer method getEnum.
private static List<String> getEnum(JsonNode node, PropertyBuilder.PropertyId type) {
final List<String> result = new ArrayList<String>();
JsonNode detailNode = getDetailNode(node, type);
if (detailNode != null) {
ArrayNode an = (ArrayNode) detailNode;
for (JsonNode child : an) {
if (child instanceof TextNode || child instanceof NumericNode || child instanceof IntNode || child instanceof LongNode || child instanceof DoubleNode || child instanceof FloatNode) {
result.add(child.asText());
}
}
}
return result.isEmpty() ? null : result;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode in project swagger-core by swagger-api.
the class PropertyDeserializer method getRequired.
private static List<String> getRequired(JsonNode node, PropertyBuilder.PropertyId type) {
List<String> result = new ArrayList<String>();
final JsonNode detailNode = getDetailNode(node, type);
if (detailNode == null) {
return result;
}
if (detailNode.isArray()) {
ArrayNode arrayNode = (ArrayNode) detailNode;
Iterator<JsonNode> fieldNameIter = arrayNode.iterator();
while (fieldNameIter.hasNext()) {
JsonNode item = fieldNameIter.next();
result.add(item.asText());
}
return result;
} else {
throw new RuntimeException("Required property should be a list");
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode in project realm-java by realm.
the class RealmListNYTimesMultimediumDeserializer method deserialize.
@Override
public List<NYTimesMultimedium> deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
RealmList<NYTimesMultimedium> list = new RealmList<>();
TreeNode treeNode = jp.getCodec().readTree(jp);
if (!(treeNode instanceof ArrayNode)) {
return list;
}
ArrayNode arrayNode = (ArrayNode) treeNode;
for (JsonNode node : arrayNode) {
NYTimesMultimedium nyTimesMultimedium = objectMapper.treeToValue(node, NYTimesMultimedium.class);
list.add(nyTimesMultimedium);
}
return list;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode in project flink by apache.
the class JobDetailsHandlerTest method compareJobDetails.
private static void compareJobDetails(AccessExecutionGraph originalJob, String json) throws IOException {
JsonNode result = ArchivedJobGenerationUtils.mapper.readTree(json);
Assert.assertEquals(originalJob.getJobID().toString(), result.get("jid").asText());
Assert.assertEquals(originalJob.getJobName(), result.get("name").asText());
Assert.assertEquals(originalJob.isStoppable(), result.get("isStoppable").asBoolean());
Assert.assertEquals(originalJob.getState().name(), result.get("state").asText());
Assert.assertEquals(originalJob.getStatusTimestamp(JobStatus.CREATED), result.get("start-time").asLong());
Assert.assertEquals(originalJob.getStatusTimestamp(originalJob.getState()), result.get("end-time").asLong());
Assert.assertEquals(originalJob.getStatusTimestamp(originalJob.getState()) - originalJob.getStatusTimestamp(JobStatus.CREATED), result.get("duration").asLong());
JsonNode timestamps = result.get("timestamps");
for (JobStatus status : JobStatus.values()) {
Assert.assertEquals(originalJob.getStatusTimestamp(status), timestamps.get(status.name()).asLong());
}
ArrayNode tasks = (ArrayNode) result.get("vertices");
int x = 0;
for (AccessExecutionJobVertex expectedTask : originalJob.getVerticesTopologically()) {
JsonNode task = tasks.get(x);
Assert.assertEquals(expectedTask.getJobVertexId().toString(), task.get("id").asText());
Assert.assertEquals(expectedTask.getName(), task.get("name").asText());
Assert.assertEquals(expectedTask.getParallelism(), task.get("parallelism").asInt());
Assert.assertEquals(expectedTask.getAggregateState().name(), task.get("status").asText());
Assert.assertEquals(3, task.get("start-time").asLong());
Assert.assertEquals(5, task.get("end-time").asLong());
Assert.assertEquals(2, task.get("duration").asLong());
JsonNode subtasksPerState = task.get("tasks");
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.CREATED.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.SCHEDULED.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.DEPLOYING.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.RUNNING.name()).asInt());
Assert.assertEquals(1, subtasksPerState.get(ExecutionState.FINISHED.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.CANCELING.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.CANCELED.name()).asInt());
Assert.assertEquals(0, subtasksPerState.get(ExecutionState.FAILED.name()).asInt());
long expectedNumBytesIn = 0;
long expectedNumBytesOut = 0;
long expectedNumRecordsIn = 0;
long expectedNumRecordsOut = 0;
for (AccessExecutionVertex vertex : expectedTask.getTaskVertices()) {
IOMetrics ioMetrics = vertex.getCurrentExecutionAttempt().getIOMetrics();
expectedNumBytesIn += ioMetrics.getNumBytesInLocal() + ioMetrics.getNumBytesInRemote();
expectedNumBytesOut += ioMetrics.getNumBytesOut();
expectedNumRecordsIn += ioMetrics.getNumRecordsIn();
expectedNumRecordsOut += ioMetrics.getNumRecordsOut();
}
JsonNode metrics = task.get("metrics");
Assert.assertEquals(expectedNumBytesIn, metrics.get("read-bytes").asLong());
Assert.assertEquals(expectedNumBytesOut, metrics.get("write-bytes").asLong());
Assert.assertEquals(expectedNumRecordsIn, metrics.get("read-records").asLong());
Assert.assertEquals(expectedNumRecordsOut, metrics.get("write-records").asLong());
x++;
}
Assert.assertEquals(1, tasks.size());
JsonNode statusCounts = result.get("status-counts");
Assert.assertEquals(0, statusCounts.get(ExecutionState.CREATED.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.SCHEDULED.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.DEPLOYING.name()).asInt());
Assert.assertEquals(1, statusCounts.get(ExecutionState.RUNNING.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.FINISHED.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.CANCELING.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.CANCELED.name()).asInt());
Assert.assertEquals(0, statusCounts.get(ExecutionState.FAILED.name()).asInt());
Assert.assertEquals(ArchivedJobGenerationUtils.mapper.readTree(originalJob.getJsonPlan()), result.get("plan"));
}
Aggregations