use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project data-prep by Talend.
the class XlsRunnable method run.
/**
* @see Runnable#run()
*/
@Override
public void run() {
try {
Workbook workbook = WorkbookFactory.create(rawContent);
JsonGenerator generator = jsonFactory.createGenerator(jsonOutput);
// if no sheet name just get the first one (take it easy mate :-) )
Sheet sheet = isEmpty(metadata.getSheetName()) ? workbook.getSheetAt(0) : workbook.getSheet(metadata.getSheetName());
if (sheet == null) {
// auto generated sheet name so take care!! "sheet-" + i
if (StringUtils.startsWith(metadata.getSheetName(), "sheet-")) {
String sheetNumberStr = StringUtils.removeStart(metadata.getSheetName(), "sheet-");
sheet = workbook.getSheetAt(Integer.valueOf(sheetNumberStr));
}
// still null so use the first one
if (sheet == null) {
sheet = workbook.getSheetAt(0);
}
}
generator.writeStartArray();
List<ColumnMetadata> columns = metadata.getRowMetadata().getColumns();
serializeColumns(workbook, generator, sheet, columns);
generator.writeEndArray();
generator.flush();
} catch (Exception e) {
// Consumer may very well interrupt consumption of stream (in case of limit(n) use for sampling).
// This is not an issue as consumer is allowed to partially consumes results, it's up to the
// consumer to ensure data it consumed is consistent.
LOG.debug("Unable to continue serialization for {}. Skipping remaining content.", metadata.getId(), e);
} finally {
try {
jsonOutput.close();
} catch (IOException e) {
LOG.error("Unable to close output", e);
}
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project data-prep by Talend.
the class XlsxStreamRunnable method run.
/**
* @see Runnable#run()
*/
@Override
public void run() {
try {
JsonGenerator generator = jsonFactory.createGenerator(jsonOutput);
Workbook workbook = //
StreamingReader.builder().bufferSize(//
4096).rowCacheSize(//
1).open(rawContent);
try {
Sheet sheet = //
StringUtils.isEmpty(metadata.getSheetName()) ? workbook.getSheetAt(0) : workbook.getSheet(metadata.getSheetName());
generator.writeStartArray();
for (Row row : sheet) {
if (limit > 0 && row.getRowNum() > limit) {
break;
}
if (!XlsSerializer.isHeaderLine(row.getRowNum(), metadata.getRowMetadata().getColumns())) {
generator.writeStartObject();
// data quality Analyzer doesn't like to not have all columns even if we don't have any values
// so create so field with empty value otherwise we get exceptions
int i = 0;
for (ColumnMetadata columnMetadata : metadata.getRowMetadata().getColumns()) {
Cell cell = row.getCell(i);
// StringUtils.EMPTY
String cellValue = cell == null ? null : cell.getStringCellValue();
generator.writeFieldName(columnMetadata.getId());
if (cellValue != null) {
generator.writeString(cellValue);
} else {
generator.writeNull();
}
i++;
}
generator.writeEndObject();
}
}
generator.writeEndArray();
generator.flush();
} finally {
workbook.close();
}
} catch (Exception e) {
// Consumer may very well interrupt consumption of stream (in case of limit(n) use for sampling).
// This is not an issue as consumer is allowed to partially consumes results, it's up to the
// consumer to ensure data it consumed is consistent.
LOG.debug("Unable to continue serialization for {}. Skipping remaining content.", metadata.getId(), e);
} finally {
try {
jsonOutput.close();
} catch (IOException e) {
LOG.error("Unable to close output", e);
}
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project data-prep by Talend.
the class CSVSerializer method serialize.
@Override
public InputStream serialize(InputStream rawContent, DataSetMetadata metadata, long limit) {
try {
PipedInputStream pipe = new PipedInputStream();
PipedOutputStream jsonOutput = new PipedOutputStream(pipe);
// Serialize asynchronously for better performance (especially if caller doesn't consume all, see sampling).
Runnable r = () -> {
final Map<String, String> parameters = metadata.getContent().getParameters();
final String separator = parameters.get(CSVFormatFamily.SEPARATOR_PARAMETER);
final char actualSeparator = separator.charAt(0);
final char textEnclosureChar = getFromParameters(parameters, TEXT_ENCLOSURE_CHAR, defaultTextEnclosure);
final char escapeChar = getFromParameters(parameters, CSVFormatFamily.ESCAPE_CHAR, defaultEscapeChar);
try (InputStreamReader input = new InputStreamReader(rawContent, metadata.getEncoding());
CSVReader reader = new CSVReader(input, actualSeparator, textEnclosureChar, escapeChar)) {
JsonGenerator generator = new JsonFactory().createGenerator(jsonOutput);
int i = 0;
while (i++ < metadata.getContent().getNbLinesInHeader()) {
// Skip all header lines
reader.readNext();
}
generator.writeStartArray();
writeLineContent(reader, metadata, generator, separator, limit);
generator.writeEndArray();
generator.flush();
} catch (Exception e) {
// Consumer may very well interrupt consumption of stream (in case of limit(n) use for sampling).
// This is not an issue as consumer is allowed to partially consumes results, it's up to the
// consumer to ensure data it consumed is consistent.
LOGGER.debug("Unable to continue serialization for {}. Skipping remaining content.", metadata.getId(), e);
} finally {
try {
jsonOutput.close();
} catch (IOException e) {
LOGGER.error("Unable to close output", e);
}
}
};
executor.execute(r);
return pipe;
} catch (IOException e) {
throw new TDPException(CommonErrorCodes.UNABLE_TO_SERIALIZE_TO_JSON, e);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project java by wavefrontHQ.
the class JsonMetricsGenerator method generateJsonMetrics.
public static void generateJsonMetrics(OutputStream outputStream, MetricsRegistry registry, boolean includeVMMetrics, boolean includeBuildMetrics, boolean clearMetrics, MetricTranslator metricTranslator) throws IOException {
JsonGenerator json = factory.createGenerator(outputStream, JsonEncoding.UTF8);
writeJson(json, registry, includeVMMetrics, includeBuildMetrics, clearMetrics, null, metricTranslator);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator in project keycloak by keycloak.
the class ExportUtils method exportFederatedUsersToStream.
public static void exportFederatedUsersToStream(KeycloakSession session, RealmModel realm, List<String> usersToExport, ObjectMapper mapper, OutputStream os, ExportOptions options) throws IOException {
JsonFactory factory = mapper.getFactory();
JsonGenerator generator = factory.createGenerator(os, JsonEncoding.UTF8);
try {
if (mapper.isEnabled(SerializationFeature.INDENT_OUTPUT)) {
generator.useDefaultPrettyPrinter();
}
generator.writeStartObject();
generator.writeStringField("realm", realm.getName());
// generator.writeStringField("strategy", strategy.toString());
generator.writeFieldName("federatedUsers");
generator.writeStartArray();
for (String userId : usersToExport) {
UserRepresentation userRep = ExportUtils.exportFederatedUser(session, realm, userId, options);
generator.writeObject(userRep);
}
generator.writeEndArray();
generator.writeEndObject();
} finally {
generator.close();
}
}
Aggregations