use of org.codehaus.jackson.JsonNode in project camel by apache.
the class GeoCoderProducer method processCurrentLocation.
protected void processCurrentLocation(Exchange exchange) throws Exception {
LOG.debug("Geocode for current address");
String json = exchange.getContext().getTypeConverter().mandatoryConvertTo(String.class, new URL("http://freegeoip.net/json/"));
if (isEmpty(json)) {
throw new IllegalStateException("Got the unexpected value '" + json + "' for the geolocation");
}
LOG.debug("Geocode response {}", json);
exchange.getIn().setHeader(GeoCoderConstants.STATUS, GeocoderStatus.OK);
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.readValue(json, JsonNode.class);
JsonNode latitudeNode = notNull(node.get("latitude"), "latitude");
JsonNode longitudeNode = notNull(node.get("longitude"), "longitude");
String resLatlng = latitudeNode.asText() + "," + longitudeNode.asText();
exchange.getIn().setHeader(GeoCoderConstants.LATLNG, resLatlng);
JsonNode countryCode = node.get("country_code");
JsonNode countryName = node.get("country_name");
if (countryCode != null) {
exchange.getIn().setHeader(GeoCoderConstants.COUNTRY_SHORT, countryCode.asText());
}
if (countryName != null) {
exchange.getIn().setHeader(GeoCoderConstants.COUNTRY_LONG, countryName.asText());
}
JsonNode regionCode = node.get("region_code");
JsonNode regionName = node.get("region_name");
if (regionCode != null) {
exchange.getIn().setHeader(GeoCoderConstants.REGION_CODE, regionCode.asText());
}
if (regionName != null) {
exchange.getIn().setHeader(GeoCoderConstants.REGION_NAME, regionName.asText());
}
JsonNode city = node.get("city");
if (city != null) {
exchange.getIn().setHeader(GeoCoderConstants.CITY, city.asText());
}
// should we include body
if (!endpoint.isHeadersOnly()) {
exchange.getIn().setBody(json);
}
}
use of org.codehaus.jackson.JsonNode in project flink by apache.
the class JobCancellationWithSavepointHandlersTest method testFailedCancellation.
/**
* Tests response when a request fails.
*/
@Test
public void testFailedCancellation() throws Exception {
JobID jobId = new JobID();
ExecutionGraphHolder holder = mock(ExecutionGraphHolder.class);
ExecutionGraph graph = mock(ExecutionGraph.class);
CheckpointCoordinator coord = mock(CheckpointCoordinator.class);
when(holder.getExecutionGraph(eq(jobId), any(ActorGateway.class))).thenReturn(graph);
when(graph.getCheckpointCoordinator()).thenReturn(coord);
JobCancellationWithSavepointHandlers handlers = new JobCancellationWithSavepointHandlers(holder, EC);
JobCancellationWithSavepointHandlers.TriggerHandler trigger = handlers.getTriggerHandler();
JobCancellationWithSavepointHandlers.InProgressHandler progress = handlers.getInProgressHandler();
Map<String, String> params = new HashMap<>();
params.put("jobid", jobId.toString());
params.put("targetDirectory", "custom-directory");
ActorGateway jobManager = mock(ActorGateway.class);
// Successful
Future<Object> future = Futures.failed(new Exception("Test Exception"));
when(jobManager.ask(any(Object.class), any(FiniteDuration.class))).thenReturn(future);
// Trigger
trigger.handleRequest(params, Collections.<String, String>emptyMap(), jobManager);
verify(jobManager).ask(eq(new CancelJobWithSavepoint(jobId, "custom-directory")), any(FiniteDuration.class));
// Query progress
params.put("requestId", "1");
FullHttpResponse response = progress.handleRequest(params, Collections.<String, String>emptyMap(), jobManager);
assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR, response.getStatus());
assertEquals("application/json", response.headers().get(HttpHeaders.Names.CONTENT_TYPE));
assertEquals(Integer.toString(response.content().readableBytes()), response.headers().get(HttpHeaders.Names.CONTENT_LENGTH));
String json = response.content().toString(Charset.forName("UTF-8"));
JsonNode root = new ObjectMapper().readTree(json);
assertEquals("failed", root.get("status").getValueAsText());
assertEquals("1", root.get("request-id").getValueAsText());
assertEquals("Test Exception", root.get("cause").getValueAsText());
}
use of org.codehaus.jackson.JsonNode in project hive by apache.
the class TypeInfoToSchema method getFields.
private List<Schema.Field> getFields(Schema.Field schemaField) {
List<Schema.Field> fields = new ArrayList<Schema.Field>();
JsonNode nullDefault = JsonNodeFactory.instance.nullNode();
if (schemaField.schema().getType() == Schema.Type.RECORD) {
for (Schema.Field field : schemaField.schema().getFields()) {
fields.add(new Schema.Field(field.name(), field.schema(), field.doc(), nullDefault));
}
} else {
fields.add(new Schema.Field(schemaField.name(), schemaField.schema(), schemaField.doc(), nullDefault));
}
return fields;
}
use of org.codehaus.jackson.JsonNode in project SimianArmy by Netflix.
the class EddaASGJanitorCrawler method getASGResourcesInRegion.
private List<Resource> getASGResourcesInRegion(String region, String... asgNames) {
String url = eddaClient.getBaseUrl(region) + "/aws/autoScalingGroups;";
if (asgNames != null && asgNames.length != 0) {
url += StringUtils.join(asgNames, ',');
LOGGER.info(String.format("Getting ASGs in region %s for %d ids", region, asgNames.length));
} else {
LOGGER.info(String.format("Getting all ASGs in region %s", region));
}
url += ";_expand:(autoScalingGroupName,createdTime,maxSize,suspendedProcesses:(processName,suspensionReason)," + "tags:(key,value),instances:(instanceId),loadBalancerNames,launchConfigurationName)";
JsonNode jsonNode = null;
try {
jsonNode = eddaClient.getJsonNodeFromUrl(url);
} catch (Exception e) {
LOGGER.error(String.format("Failed to get Jason node from edda for ASGs in region %s.", region), e);
}
if (jsonNode == null || !jsonNode.isArray()) {
throw new RuntimeException(String.format("Failed to get valid document from %s, got: %s", url, jsonNode));
}
Map<String, Long> lcNameToCreationTime = getLaunchConfigCreationTimes(region);
List<Resource> resources = Lists.newArrayList();
for (Iterator<JsonNode> it = jsonNode.getElements(); it.hasNext(); ) {
resources.add(parseJsonElementToresource(region, it.next(), lcNameToCreationTime));
}
return resources;
}
use of org.codehaus.jackson.JsonNode in project SimianArmy by Netflix.
the class EddaASGJanitorCrawler method getLaunchConfigCreationTimes.
private Map<String, Long> getLaunchConfigCreationTimes(String region) {
LOGGER.info(String.format("Getting launch configuration creation times in region %s", region));
String url = eddaClient.getBaseUrl(region) + "/aws/launchConfigurations;_expand:(launchConfigurationName,createdTime)";
JsonNode jsonNode = null;
try {
jsonNode = eddaClient.getJsonNodeFromUrl(url);
} catch (Exception e) {
LOGGER.error(String.format("Failed to get Jason node from edda for lc creation times in region %s.", region), e);
}
if (jsonNode == null || !jsonNode.isArray()) {
throw new RuntimeException(String.format("Failed to get valid document from %s, got: %s", url, jsonNode));
}
Map<String, Long> nameToCreationTime = Maps.newHashMap();
for (Iterator<JsonNode> it = jsonNode.getElements(); it.hasNext(); ) {
JsonNode elem = it.next();
nameToCreationTime.put(elem.get("launchConfigurationName").getTextValue(), elem.get("createdTime").getLongValue());
}
return nameToCreationTime;
}
Aggregations