use of com.fasterxml.jackson.core.JsonProcessingException in project kafka by apache.
the class VerifiableProducer method toJsonString.
private String toJsonString(Map<String, Object> data) {
String json;
try {
ObjectMapper mapper = new ObjectMapper();
json = mapper.writeValueAsString(data);
} catch (JsonProcessingException e) {
json = "Bad data can't be written as json: " + e.getMessage();
}
return json;
}
use of com.fasterxml.jackson.core.JsonProcessingException in project pinot by linkedin.
the class AlertJobRunnerV2 method createTasks.
private List<Long> createTasks(DateTime monitoringWindowStartTime, DateTime monitoringWindowEndTime) {
List<Long> taskIds = new ArrayList<>();
try {
List<AlertTaskInfo> tasks = taskGenerator.createAlertTasksV2(alertJobContext, monitoringWindowStartTime, monitoringWindowEndTime);
for (AlertTaskInfo taskInfo : tasks) {
String taskInfoJson = null;
try {
taskInfoJson = OBJECT_MAPPER.writeValueAsString(taskInfo);
} catch (JsonProcessingException e) {
LOG.error("Exception when converting AlertTaskInfo {} to jsonString", taskInfo, e);
}
TaskDTO taskSpec = new TaskDTO();
taskSpec.setTaskType(TaskConstants.TaskType.ALERT2);
taskSpec.setJobName(alertJobContext.getJobName());
taskSpec.setStatus(TaskConstants.TaskStatus.WAITING);
taskSpec.setStartTime(System.currentTimeMillis());
taskSpec.setTaskInfo(taskInfoJson);
taskSpec.setJobId(alertJobContext.getJobExecutionId());
long taskId = taskDAO.save(taskSpec);
taskIds.add(taskId);
LOG.info("Created alert task {} with taskId {}", taskSpec, taskId);
}
} catch (Exception e) {
LOG.error("Exception in creating alert tasks", e);
}
return taskIds;
}
use of com.fasterxml.jackson.core.JsonProcessingException in project pinot by linkedin.
the class DetectionJobRunner method createTasks.
private List<Long> createTasks(DetectionJobContext detectionJobContext, List<DateTime> monitoringWindowStartTimes, List<DateTime> monitoringWindowEndTimes) {
List<Long> taskIds = new ArrayList<>();
try {
List<DetectionTaskInfo> tasks = taskGenerator.createDetectionTasks(detectionJobContext, monitoringWindowStartTimes, monitoringWindowEndTimes);
for (DetectionTaskInfo taskInfo : tasks) {
String taskInfoJson = null;
try {
taskInfoJson = OBJECT_MAPPER.writeValueAsString(taskInfo);
} catch (JsonProcessingException e) {
LOG.error("Exception when converting DetectionTaskInfo {} to jsonString", taskInfo, e);
}
TaskDTO taskSpec = new TaskDTO();
taskSpec.setTaskType(TaskType.ANOMALY_DETECTION);
taskSpec.setJobName(detectionJobContext.getJobName());
taskSpec.setStatus(TaskStatus.WAITING);
taskSpec.setStartTime(System.currentTimeMillis());
taskSpec.setTaskInfo(taskInfoJson);
taskSpec.setJobId(detectionJobContext.getJobExecutionId());
long taskId = DAO_REGISTRY.getTaskDAO().save(taskSpec);
taskIds.add(taskId);
LOG.info("Created anomalyTask {} with taskId {}", taskSpec, taskId);
}
} catch (Exception e) {
LOG.error("Exception in creating detection tasks", e);
}
return taskIds;
}
use of com.fasterxml.jackson.core.JsonProcessingException in project pinot by linkedin.
the class DataCompletenessJobRunner method createTasks.
public List<Long> createTasks() {
List<Long> taskIds = new ArrayList<>();
try {
LOG.info("Creating data completeness checker tasks");
List<DataCompletenessTaskInfo> dataCompletenessTasks = createDataCompletenessTasks(dataCompletenessJobContext);
LOG.info("DataCompleteness tasks {}", dataCompletenessTasks);
for (DataCompletenessTaskInfo taskInfo : dataCompletenessTasks) {
String taskInfoJson = null;
try {
taskInfoJson = OBJECT_MAPPER.writeValueAsString(taskInfo);
} catch (JsonProcessingException e) {
LOG.error("Exception when converting DataCompletenessTaskInfo {} to jsonString", taskInfo, e);
}
TaskDTO taskSpec = new TaskDTO();
taskSpec.setTaskType(TaskType.DATA_COMPLETENESS);
taskSpec.setJobName(dataCompletenessJobContext.getJobName());
taskSpec.setStatus(TaskStatus.WAITING);
taskSpec.setStartTime(System.currentTimeMillis());
taskSpec.setTaskInfo(taskInfoJson);
taskSpec.setJobId(dataCompletenessJobContext.getJobExecutionId());
long taskId = DAO_REGISTRY.getTaskDAO().save(taskSpec);
taskIds.add(taskId);
LOG.info("Created dataCompleteness task {} with taskId {}", taskSpec, taskId);
}
} catch (Exception e) {
LOG.error("Exception in creating data completeness tasks", e);
}
return taskIds;
}
use of com.fasterxml.jackson.core.JsonProcessingException in project druid by druid-io.
the class JobHelper method getURIFromSegment.
public static URI getURIFromSegment(DataSegment dataSegment) {
// There is no good way around this...
// TODO: add getURI() to URIDataPuller
final Map<String, Object> loadSpec = dataSegment.getLoadSpec();
final String type = loadSpec.get("type").toString();
final URI segmentLocURI;
if ("s3_zip".equals(type)) {
segmentLocURI = URI.create(String.format("s3n://%s/%s", loadSpec.get("bucket"), loadSpec.get("key")));
} else if ("hdfs".equals(type)) {
segmentLocURI = URI.create(loadSpec.get("path").toString());
} else if ("google".equals(type)) {
// Segment names contain : in their path.
// Google Cloud Storage supports : but Hadoop does not.
// This becomes an issue when re-indexing using the current segments.
// The Hadoop getSplits code doesn't understand the : and returns "Relative path in absolute URI"
// This could be fixed using the same code that generates path names for hdfs segments using
// getHdfsStorageDir. But that wouldn't fix this issue for people who already have segments with ":".
// Because of this we just URL encode the : making everything work as it should.
segmentLocURI = URI.create(String.format("gs://%s/%s", loadSpec.get("bucket"), loadSpec.get("path").toString().replace(":", "%3A")));
} else if ("local".equals(type)) {
try {
segmentLocURI = new URI("file", null, loadSpec.get("path").toString(), null, null);
} catch (URISyntaxException e) {
throw new ISE(e, "Unable to form simple file uri");
}
} else {
try {
throw new IAE("Cannot figure out loadSpec %s", HadoopDruidConverterConfig.jsonMapper.writeValueAsString(loadSpec));
} catch (JsonProcessingException e) {
throw new ISE("Cannot write Map with json mapper");
}
}
return segmentLocURI;
}
Aggregations