use of org.codehaus.jackson.map.ObjectMapper in project pinot by linkedin.
the class SegmentMetadataImpl method toJson.
/**
* Converts segment metadata to json
* @param columnFilter list only the columns in the set. Lists all the columns if
* the parameter value is null
* @return json representation of segment metadata
*/
public JSONObject toJson(@Nullable Set<String> columnFilter) throws JSONException {
JSONObject rootMeta = new JSONObject();
try {
rootMeta.put("segmentName", _segmentName);
rootMeta.put("schemaName", _schema != null ? _schema.getSchemaName() : JSONObject.NULL);
rootMeta.put("crc", _crc);
rootMeta.put("creationTimeMillis", _creationTime);
TimeZone timeZone = TimeZone.getTimeZone("UTC");
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss:SSS' UTC'");
dateFormat.setTimeZone(timeZone);
String creationTimeStr = _creationTime != Long.MIN_VALUE ? dateFormat.format(new Date(_creationTime)) : "";
rootMeta.put("creationTimeReadable", creationTimeStr);
rootMeta.put("timeGranularitySec", _timeGranularity != null ? _timeGranularity.getStandardSeconds() : null);
if (_timeInterval == null) {
rootMeta.put("startTimeMillis", (String) null);
rootMeta.put("startTimeReadable", "null");
rootMeta.put("endTimeMillis", (String) null);
rootMeta.put("endTimeReadable", "null");
} else {
rootMeta.put("startTimeMillis", _timeInterval.getStartMillis());
rootMeta.put("startTimeReadable", _timeInterval.getStart().toString());
rootMeta.put("endTimeMillis", _timeInterval.getEndMillis());
rootMeta.put("endTimeReadable", _timeInterval.getEnd().toString());
}
rootMeta.put("pushTimeMillis", _pushTime);
String pushTimeStr = _pushTime != Long.MIN_VALUE ? dateFormat.format(new Date(_pushTime)) : "";
rootMeta.put("pushTimeReadable", pushTimeStr);
rootMeta.put("refreshTimeMillis", _refreshTime);
String refreshTimeStr = _refreshTime != Long.MIN_VALUE ? dateFormat.format(new Date(_refreshTime)) : "";
rootMeta.put("refreshTimeReadable", refreshTimeStr);
rootMeta.put("segmentVersion", _segmentVersion.toString());
rootMeta.put("hasStarTree", hasStarTree());
rootMeta.put("creatorName", _creatorName == null ? JSONObject.NULL : _creatorName);
rootMeta.put("paddingCharacter", String.valueOf(_paddingCharacter));
rootMeta.put("hllLog2m", _hllLog2m);
JSONArray columnsJson = new JSONArray();
ObjectMapper mapper = new ObjectMapper();
for (String column : _allColumns) {
if (columnFilter != null && !columnFilter.contains(column)) {
continue;
}
ColumnMetadata columnMetadata = _columnMetadataMap.get(column);
JSONObject columnJson = new JSONObject(mapper.writeValueAsString(columnMetadata));
columnsJson.put(columnJson);
}
rootMeta.put("columns", columnsJson);
return rootMeta;
} catch (Exception e) {
LOGGER.error("Failed to convert field to json for segment: {}", _segmentName, e);
throw new RuntimeException("Failed to convert segment metadata to json", e);
}
}
use of org.codehaus.jackson.map.ObjectMapper in project pinot by linkedin.
the class HybridClusterScanComparisonIntegrationTest method runQuery.
protected void runQuery(String pqlQuery, ScanBasedQueryProcessor scanBasedQueryProcessor, boolean displayStatus, String scanResult) throws Exception {
JSONObject scanJson;
if (scanResult == null) {
QueryResponse scanResponse = scanBasedQueryProcessor.processQuery(pqlQuery);
String scanRspStr = new ObjectMapper().writeValueAsString(scanResponse);
if (_scanRspFileWriter != null) {
if (scanRspStr.contains("\n")) {
throw new RuntimeException("We don't handle new lines in json responses yet. The reader will parse newline as separator between query responses");
}
_scanRspFileWriter.write(scanRspStr + "\n");
}
scanJson = new JSONObject(scanRspStr);
} else {
scanJson = new JSONObject(scanResult);
}
JSONObject pinotJson = postQuery(pqlQuery);
QueryComparison.setCompareNumDocs(false);
try {
QueryComparison.ComparisonStatus comparisonStatus = QueryComparison.compareWithEmpty(pinotJson, scanJson);
if (comparisonStatus.equals(QueryComparison.ComparisonStatus.FAILED)) {
_compareStatusFileWriter.write("\nQuery comparison failed for query " + _nQueriesRead + ":" + pqlQuery + "\n" + "Scan json: " + scanJson + "\n" + "Pinot json: " + pinotJson + "\n");
_failedQueries.getAndIncrement();
} else {
_successfulQueries.getAndIncrement();
if (comparisonStatus.equals(QueryComparison.ComparisonStatus.EMPTY)) {
_emptyResults.getAndIncrement();
} else if (_logMatchingResults) {
_compareStatusFileWriter.write("\nMatched for query:" + pqlQuery + "\n" + scanJson + "\n");
}
}
_compareStatusFileWriter.flush();
} catch (Exception e) {
_compareStatusFileWriter.write("Caught exception while running query comparison, failed for query " + pqlQuery + "\n" + "Scan json: " + scanJson + "\n" + "Pinot json: " + pinotJson + "\n");
_failedQueries.getAndIncrement();
_compareStatusFileWriter.flush();
}
int totalQueries = _successfulQueries.get() + _failedQueries.get();
if (displayStatus || totalQueries % 5000 == 0) {
doDisplayStatus(totalQueries);
}
}
use of org.codehaus.jackson.map.ObjectMapper in project head by mifos.
the class CenterRESTController method createCenterMaping.
private ObjectMapper createCenterMaping() {
ObjectMapper om = new ObjectMapper();
om.getDeserializationConfig().addMixInAnnotations(CreateCenterDetailsDto.class, CenterCreationDetailMixIn.class);
om.getDeserializationConfig().addMixInAnnotations(CreationAddresDto.class, CreationAddresDtoMixIn.class);
om.getDeserializationConfig().addMixInAnnotations(CreationFeeDto.class, CreationFeeDtoMixIn.class);
om.getDeserializationConfig().addMixInAnnotations(CreationMeetingDto.class, CreationMeetingDtoMixIn.class);
om.getJsonFactory().configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, true);
return om;
}
use of org.codehaus.jackson.map.ObjectMapper in project head by mifos.
the class CollectionSheetRESTController method saveCollectionSheet.
@RequestMapping(value = "/collectionsheet/save", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> saveCollectionSheet(@RequestBody JSONSaveCollectionsheet request) throws Throwable {
Map<String, Object> map = new HashMap<String, Object>();
ObjectMapper om = createObjectMapper();
List<InvalidSaveCollectionSheetReason> reasons = new ArrayList<InvalidSaveCollectionSheetReason>();
CollectionSheetErrorsDto errors = null;
SaveCollectionSheetDto saveCollectionSheetDto = null;
try {
saveCollectionSheetDto = om.readValue(request.getJson(), SaveCollectionSheetDto.class);
} catch (JsonMappingException e) {
if (e.getCause() instanceof SaveCollectionSheetException) {
reasons.addAll(((SaveCollectionSheetException) e.getCause()).getInvalidSaveCollectionSheetReasons());
} else {
throw e.getCause();
}
}
if (saveCollectionSheetDto != null) {
try {
errors = collectionSheetServiceFacade.saveCollectionSheet(saveCollectionSheetDto);
map.put("errors", errors != null ? errors.getErrorText() : null);
} catch (MifosRuntimeException e) {
map.put("errors", e.getMessage());
}
}
map.put("invalidCollectionSheet", reasons);
return map;
}
use of org.codehaus.jackson.map.ObjectMapper in project head by mifos.
the class GroupRESTController method createGroup.
@RequestMapping(value = "group/create", method = RequestMethod.POST)
@ResponseBody
public Map<String, String> createGroup(@RequestBody String request) throws Throwable {
ObjectMapper om = createGroupMapping();
CreateGroupCreationDetailDto creationDetail = null;
MeetingBO meetingBO = null;
try {
creationDetail = om.readValue(request, CreateGroupCreationDetailDto.class);
} catch (JsonMappingException e) {
throw e.getCause();
}
validate(creationDetail);
meetingBO = (MeetingBO) creationDetail.getMeeting().toBO();
GroupCreationDetail group = createGroup(creationDetail);
CustomerDetailsDto groupDetails = groupServiceFacade.createNewGroup(group, meetingBO.toDto());
GroupInformationDto groupInfo = groupServiceFacade.getGroupInformationDto(groupDetails.getGlobalCustNum());
Map<String, String> map = new HashMap<String, String>();
map.put("status", "success");
map.put("globalCusNum", groupInfo.getGroupDisplay().getGlobalCustNum());
map.put("accountNum", groupInfo.getCustomerAccountSummary().getGlobalAccountNum());
map.put("address", groupInfo.getAddress().getDisplayAddress());
map.put("city", groupInfo.getAddress().getCity());
map.put("state", groupInfo.getAddress().getState());
map.put("country", groupInfo.getAddress().getCountry());
map.put("postal code", groupInfo.getAddress().getZip());
map.put("phone", groupInfo.getAddress().getPhoneNumber());
map.put("dispalyName", groupInfo.getGroupDisplay().getDisplayName());
map.put("externalId", groupInfo.getGroupDisplay().getExternalId());
map.put("loanOfficer", groupInfo.getGroupDisplay().getLoanOfficerName());
return map;
}
Aggregations