use of org.apache.commons.csv.CSVParser in project ArachneCentralAPI by OHDSI.
the class SubmissionHelper method parseCsvDataframeToJson.
private JsonObject parseCsvDataframeToJson(String filepath) throws IOException {
final JsonObject resultInfo = new JsonObject();
final CSVParser parser = CSVParser.parse(contentStorageService.getContentByFilepath(filepath), Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
final Map<String, Integer> headerMap = parser.getHeaderMap();
final List<CSVRecord> csvRecordList = parser.getRecords();
JsonArray jsonHeaders = new JsonArray();
headerMap.forEach((key, value) -> jsonHeaders.add(key));
resultInfo.add("headers", jsonHeaders);
JsonArray jsonRecords = new JsonArray();
csvRecordList.forEach(record -> {
final JsonObject jsonRecord = new JsonObject();
for (Map.Entry<String, Integer> entry : headerMap.entrySet()) {
final String key = entry.getKey();
final String value = record.get(entry.getValue());
if (NumberUtils.isCreatable(value)) {
jsonRecord.addProperty(key, Float.parseFloat(value));
} else {
jsonRecord.addProperty(key, value);
}
}
jsonRecords.add(jsonRecord);
});
resultInfo.add("records", jsonRecords);
return resultInfo;
}
use of org.apache.commons.csv.CSVParser in project opennms by OpenNMS.
the class MeasurementQueryExecutorRemoteIT method testReportHwForecast.
@Test
public void testReportHwForecast() throws IOException, JRException {
createReport("Forecast", new ReportFiller() {
@Override
public void fill(Map<String, Object> params) throws Exception {
params.put(JRParameter.IS_IGNORE_PAGINATION, true);
params.put("MEASUREMENT_URL", "http://localhost:9999/opennms/rest/measurements");
params.put("dsName", "ifInOctets");
params.put("startDate", "1414602000000");
params.put("endDate", "1417046400000");
}
});
// Verify the results of the generated report
Table<Integer, String, Double> forecasts = TreeBasedTable.create();
FileReader reader = new FileReader(createFileName("Forecast", "csv"));
CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader());
int k = 0;
for (CSVRecord record : parser) {
try {
Double fit = Double.parseDouble(record.get("HWFit"));
Double lwr = Double.parseDouble(record.get("HWLwr"));
Double upr = Double.parseDouble(record.get("HWUpr"));
if (Double.isNaN(fit)) {
continue;
}
forecasts.put(k, "fit", fit);
forecasts.put(k, "lwr", lwr);
forecasts.put(k, "upr", upr);
k++;
} catch (NumberFormatException e) {
// pass
}
}
Assert.assertEquals(340, forecasts.rowKeySet().size());
// First fitted value
Assert.assertEquals(432.526086422424, forecasts.get(0, "fit"), 0.00001);
// Last fitted value for which there is a known data point
Assert.assertEquals(24079.4692522087, forecasts.get(327, "fit"), 0.00001);
// First forecasted value
Assert.assertEquals(22245.5417010936, forecasts.get(328, "fit"), 0.00001);
}
use of org.apache.commons.csv.CSVParser in project camel by apache.
the class CsvRecordConvertersTest method setUp.
@Before
public void setUp() throws Exception {
CSVFormat format = CSVFormat.DEFAULT.withHeader("A", "B", "C");
CSVParser parser = new CSVParser(new StringReader("1,2,3"), format);
List<CSVRecord> records = parser.getRecords();
record = records.get(0);
}
use of org.apache.commons.csv.CSVParser in project jackrabbit-oak by apache.
the class CSVFileBinaryResourceProvider method getBinaries.
@Override
public FluentIterable<BinaryResource> getBinaries(final String path) throws IOException {
CSVParser parser = CSVParser.parse(dataFile, Charsets.UTF_8, FORMAT);
closer.register(parser);
return FluentIterable.from(parser).transform(new RecordTransformer()).filter(notNull()).filter(new Predicate<BinaryResource>() {
@Override
public boolean apply(BinaryResource input) {
return PathUtils.isAncestor(path, input.getPath());
}
});
}
use of org.apache.commons.csv.CSVParser in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVCommonsUpsertEncapsulatedControlChars.
@Test
public void testCSVCommonsUpsertEncapsulatedControlChars() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + ENCAPSULATED_CHARS_TABLE + "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, ENCAPSULATED_CHARS_TABLE, Collections.<String>emptyList(), true);
csvUtil.upsert(new StringReader(CSV_VALUES_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn.prepareStatement("SELECT MYKEY, MYVALUE FROM " + ENCAPSULATED_CHARS_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(CSV_VALUES_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
Aggregations