use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project hadoop by apache.
the class StartupProgressServlet method doGet.
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
resp.setContentType("application/json; charset=UTF-8");
StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(getServletContext());
StartupProgressView view = prog.createView();
JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
try {
json.writeStartObject();
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete());
json.writeArrayFieldStart(PHASES);
for (Phase phase : view.getPhases()) {
json.writeStartObject();
json.writeStringField(NAME, phase.getName());
json.writeStringField(DESC, phase.getDescription());
json.writeStringField(STATUS, view.getStatus(phase).toString());
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase));
writeStringFieldIfNotNull(json, FILE, view.getFile(phase));
writeNumberFieldIfDefined(json, SIZE, view.getSize(phase));
json.writeArrayFieldStart(STEPS);
for (Step step : view.getSteps(phase)) {
json.writeStartObject();
StepType type = step.getType();
if (type != null) {
json.writeStringField(NAME, type.getName());
json.writeStringField(DESC, type.getDescription());
}
json.writeNumberField(COUNT, view.getCount(phase, step));
writeStringFieldIfNotNull(json, FILE, step.getFile());
writeNumberFieldIfDefined(json, SIZE, step.getSize());
json.writeNumberField(TOTAL, view.getTotal(phase, step));
json.writeNumberField(PERCENT_COMPLETE, view.getPercentComplete(phase, step));
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime(phase, step));
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
}
json.writeEndArray();
json.writeEndObject();
} finally {
IOUtils.cleanup(LOG, json);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project pinpoint by naver.
the class MappingJackson2JsonpView method renderMergedOutputModel.
@Override
protected void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request, HttpServletResponse response) throws Exception {
Object value = filterModel(model);
JsonGenerator generator = this.objectMapper.getFactory().createGenerator(response.getOutputStream(), this.encoding);
if (this.prefixJson) {
generator.writeRaw("{} && ");
}
final String callBackParameter = getCallBackParameter(request);
if (StringUtils.isEmpty(callBackParameter)) {
this.objectMapper.writeValue(generator, value);
} else {
generator.writeRaw(callBackParameter);
generator.writeRaw(cbPrefix);
this.objectMapper.writeValue(generator, value);
generator.writeRaw(cbSuffix);
generator.writeRaw(cbEnd);
}
generator.flush();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project killbill by killbill.
the class AdminResource method getQueueEntries.
@GET
@Path("/queues")
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Get queues entries", response = Response.class)
@ApiResponses(value = {})
public Response getQueueEntries(@QueryParam("accountId") final String accountIdStr, @QueryParam("queueName") final String queueName, @QueryParam("serviceName") final String serviceName, @QueryParam("withHistory") @DefaultValue("true") final Boolean withHistory, @QueryParam("minDate") final String minDateOrNull, @QueryParam("maxDate") final String maxDateOrNull, @QueryParam("withInProcessing") @DefaultValue("true") final Boolean withInProcessing, @QueryParam("withBusEvents") @DefaultValue("true") final Boolean withBusEvents, @QueryParam("withNotifications") @DefaultValue("true") final Boolean withNotifications, @javax.ws.rs.core.Context final HttpServletRequest request) {
final TenantContext tenantContext = context.createContext(request);
final Long tenantRecordId = recordIdApi.getRecordId(tenantContext.getTenantId(), ObjectType.TENANT, tenantContext);
final Long accountRecordId = Strings.isNullOrEmpty(accountIdStr) ? null : recordIdApi.getRecordId(UUID.fromString(accountIdStr), ObjectType.ACCOUNT, tenantContext);
// Limit search results by default
final DateTime minDate = Strings.isNullOrEmpty(minDateOrNull) ? clock.getUTCNow().minusDays(2) : DATE_TIME_FORMATTER.parseDateTime(minDateOrNull).toDateTime(DateTimeZone.UTC);
final DateTime maxDate = Strings.isNullOrEmpty(maxDateOrNull) ? clock.getUTCNow().plusDays(2) : DATE_TIME_FORMATTER.parseDateTime(maxDateOrNull).toDateTime(DateTimeZone.UTC);
final StreamingOutput json = new StreamingOutput() {
@Override
public void write(final OutputStream output) throws IOException, WebApplicationException {
Iterator<BusEventWithMetadata<BusEvent>> busEventsIterator = null;
Iterator<NotificationEventWithMetadata<NotificationEvent>> notificationsIterator = null;
try {
final JsonGenerator generator = mapper.getFactory().createGenerator(output);
generator.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
generator.writeStartObject();
if (withBusEvents) {
generator.writeFieldName("busEvents");
generator.writeStartArray();
busEventsIterator = getBusEvents(withInProcessing, withHistory, minDate, maxDate, accountRecordId, tenantRecordId).iterator();
while (busEventsIterator.hasNext()) {
final BusEventWithMetadata<BusEvent> busEvent = busEventsIterator.next();
generator.writeObject(new BusEventWithRichMetadata(busEvent));
}
generator.writeEndArray();
}
if (withNotifications) {
generator.writeFieldName("notifications");
generator.writeStartArray();
notificationsIterator = getNotifications(queueName, serviceName, withInProcessing, withHistory, minDate, maxDate, accountRecordId, tenantRecordId).iterator();
while (notificationsIterator.hasNext()) {
final NotificationEventWithMetadata<NotificationEvent> notification = notificationsIterator.next();
generator.writeObject(notification);
}
generator.writeEndArray();
}
generator.writeEndObject();
generator.close();
} finally {
// In case the client goes away (IOException), make sure to close the underlying DB connection
if (busEventsIterator != null) {
while (busEventsIterator.hasNext()) {
busEventsIterator.next();
}
}
if (notificationsIterator != null) {
while (notificationsIterator.hasNext()) {
notificationsIterator.next();
}
}
}
}
};
return Response.status(Status.OK).entity(json).build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project druid by druid-io.
the class SqlResource method doPost.
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery) throws SQLException, IOException {
// This is not integrated with the experimental authorization framework.
// (Non-trivial since we don't know the dataSources up-front)
final PlannerResult plannerResult;
final DateTimeZone timeZone;
try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) {
plannerResult = planner.plan(sqlQuery.getQuery());
timeZone = planner.getPlannerContext().getTimeZone();
// Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.
final List<RelDataTypeField> fieldList = plannerResult.rowType().getFieldList();
final boolean[] timeColumns = new boolean[fieldList.size()];
final boolean[] dateColumns = new boolean[fieldList.size()];
for (int i = 0; i < fieldList.size(); i++) {
final SqlTypeName sqlTypeName = fieldList.get(i).getType().getSqlTypeName();
timeColumns[i] = sqlTypeName == SqlTypeName.TIMESTAMP;
dateColumns[i] = sqlTypeName == SqlTypeName.DATE;
}
final Yielder<Object[]> yielder0 = Yielders.each(plannerResult.run());
try {
return Response.ok(new StreamingOutput() {
@Override
public void write(final OutputStream outputStream) throws IOException, WebApplicationException {
Yielder<Object[]> yielder = yielder0;
try (final JsonGenerator jsonGenerator = jsonMapper.getFactory().createGenerator(outputStream)) {
jsonGenerator.writeStartArray();
while (!yielder.isDone()) {
final Object[] row = yielder.get();
jsonGenerator.writeStartObject();
for (int i = 0; i < fieldList.size(); i++) {
final Object value;
if (timeColumns[i]) {
value = ISODateTimeFormat.dateTime().print(Calcites.calciteTimestampToJoda((long) row[i], timeZone));
} else if (dateColumns[i]) {
value = ISODateTimeFormat.dateTime().print(Calcites.calciteDateToJoda((int) row[i], timeZone));
} else {
value = row[i];
}
jsonGenerator.writeObjectField(fieldList.get(i).getName(), value);
}
jsonGenerator.writeEndObject();
yielder = yielder.next(null);
}
jsonGenerator.writeEndArray();
jsonGenerator.flush();
// End with CRLF
outputStream.write('\r');
outputStream.write('\n');
} finally {
yielder.close();
}
}
}).build();
} catch (Throwable e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder0.close();
throw Throwables.propagate(e);
}
} catch (Exception e) {
log.warn(e, "Failed to handle query: %s", sqlQuery);
final Exception exceptionToReport;
if (e instanceof RelOptPlanner.CannotPlanException) {
exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery());
} else {
exceptionToReport = e;
}
return Response.serverError().type(MediaType.APPLICATION_JSON_TYPE).entity(jsonMapper.writeValueAsBytes(QueryInterruptedException.wrapIfNeeded(exceptionToReport))).build();
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project aem-core-wcm-components by Adobe-Marketing-Cloud.
the class PageSerializerTest method serialize.
@Test
public void serialize() throws Exception {
Page page = mock(Page.class);
when(page.getName()).thenReturn(PAGE_NAME);
when(page.getTitle()).thenReturn(PAGE_TITLE);
when(page.getPageTitle()).thenReturn(PAGE_TITLE);
when(page.getPath()).thenReturn(PAGE_PATH);
when(page.getDescription()).thenReturn(PAGE_DESCRIPTION);
JsonGenerator jsonGenerator = mock(JsonGenerator.class);
SerializerProvider serializerProvider = mock(SerializerProvider.class);
PageSerializer pageSerializer = new PageSerializer(Page.class);
pageSerializer.serialize(page, jsonGenerator, serializerProvider);
verify(jsonGenerator).writeStartObject();
verify(jsonGenerator).writeStringField(PageSerializer.JSON_KEY_NAME, page.getName());
verify(jsonGenerator).writeStringField(PageSerializer.JSON_KEY_TITLE, page.getTitle());
verify(jsonGenerator).writeStringField(PageSerializer.JSON_KEY_PAGE_TITLE, page.getPageTitle());
verify(jsonGenerator).writeStringField(PageSerializer.JSON_KEY_PATH, page.getPath());
verify(jsonGenerator).writeStringField(PageSerializer.JSON_KEY_DESCRIPTION, page.getDescription());
verify(jsonGenerator).writeEndObject();
verifyNoMoreInteractions(jsonGenerator);
}
Aggregations