Search in sources :

Example 56 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project webapp by elimu-ai.

the class CsvContentExtractionHelper method getLettersFromCsvBackup.

/**
 * For information on how the CSV files were generated, see {@link LetterCsvExportController#handleRequest}.
 */
public static List<Letter> getLettersFromCsvBackup(File csvFile, SoundDao soundDao) {
    logger.info("getLettersFromCsvBackup");
    List<Letter> letters = new ArrayList<>();
    Path csvFilePath = Paths.get(csvFile.toURI());
    logger.info("csvFilePath: " + csvFilePath);
    try {
        Reader reader = Files.newBufferedReader(csvFilePath);
        CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "text", "diacritic", "usage_count").withSkipHeaderRecord();
        CSVParser csvParser = new CSVParser(reader, csvFormat);
        for (CSVRecord csvRecord : csvParser) {
            logger.info("csvRecord: " + csvRecord);
            Letter letter = new Letter();
            String text = csvRecord.get("text");
            letter.setText(text);
            boolean diacritic = Boolean.valueOf(csvRecord.get("diacritic"));
            letter.setDiacritic(diacritic);
            Integer usageCount = Integer.valueOf(csvRecord.get("usage_count"));
            letter.setUsageCount(usageCount);
            letters.add(letter);
        }
    } catch (IOException ex) {
        logger.error(ex);
    }
    return letters;
}
Also used : Letter(ai.elimu.model.content.Letter) Path(java.nio.file.Path) CSVParser(org.apache.commons.csv.CSVParser) ArrayList(java.util.ArrayList) Reader(java.io.Reader) CSVFormat(org.apache.commons.csv.CSVFormat) CSVRecord(org.apache.commons.csv.CSVRecord) IOException(java.io.IOException)

Example 57 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project webapp by elimu-ai.

the class CsvContentExtractionHelper method getStoryBooksFromCsvBackup.

/**
 * For information on how the CSV files were generated, see {@link StoryBookCsvExportController#handleRequest}.
 * <p />
 * Also see {@link #getStoryBookChaptersFromCsvBackup}
 */
public static List<StoryBookGson> getStoryBooksFromCsvBackup(File csvFile) {
    logger.info("getStoryBooksFromCsvBackup");
    List<StoryBookGson> storyBookGsons = new ArrayList<>();
    Path csvFilePath = Paths.get(csvFile.toURI());
    logger.info("csvFilePath: " + csvFilePath);
    try {
        Reader reader = Files.newBufferedReader(csvFilePath);
        CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "title", "description", "content_license", "attribution_url", "reading_level", "cover_image_id", "chapters").withSkipHeaderRecord();
        CSVParser csvParser = new CSVParser(reader, csvFormat);
        for (CSVRecord csvRecord : csvParser) {
            logger.info("csvRecord: " + csvRecord);
            // Convert from CSV to GSON
            StoryBookGson storyBookGson = new StoryBookGson();
            String title = csvRecord.get("title");
            storyBookGson.setTitle(title);
            String description = csvRecord.get("description");
            storyBookGson.setDescription(description);
            if (StringUtils.isNotBlank(csvRecord.get("content_license"))) {
                ContentLicense contentLicense = ContentLicense.valueOf(csvRecord.get("content_license"));
            // storyBookGson.setContentLicense(contentLicense);
            }
            String attributionUrl = csvRecord.get("attribution_url");
            if (StringUtils.isNotBlank(csvRecord.get("reading_level"))) {
                ReadingLevel readingLevel = ReadingLevel.valueOf(csvRecord.get("reading_level"));
                storyBookGson.setReadingLevel(readingLevel);
            }
            if (StringUtils.isNotBlank(csvRecord.get("cover_image_id"))) {
                Long coverImageId = Long.valueOf(csvRecord.get("cover_image_id"));
            // storyBookGson.setCoverImage();
            }
            List<StoryBookChapterGson> storyBookChapterGsons = new ArrayList<>();
            JSONArray chaptersJsonArray = new JSONArray(csvRecord.get("chapters"));
            logger.info("chaptersJsonArray: " + chaptersJsonArray);
            for (int i = 0; i < chaptersJsonArray.length(); i++) {
                JSONObject chapterJsonObject = chaptersJsonArray.getJSONObject(i);
                logger.info("chapterJsonObject: " + chapterJsonObject);
                StoryBookChapterGson storyBookChapterGson = new StoryBookChapterGson();
                storyBookChapterGson.setSortOrder(chapterJsonObject.getInt("sortOrder"));
                List<StoryBookParagraphGson> storyBookParagraphGsons = new ArrayList<>();
                JSONArray paragraphsJsonArray = chapterJsonObject.getJSONArray("storyBookParagraphs");
                logger.info("paragraphsJsonArray: " + paragraphsJsonArray);
                for (int j = 0; j < paragraphsJsonArray.length(); j++) {
                    JSONObject paragraphJsonObject = paragraphsJsonArray.getJSONObject(j);
                    logger.info("paragraphJsonObject: " + paragraphJsonObject);
                    StoryBookParagraphGson storyBookParagraphGson = new StoryBookParagraphGson();
                    storyBookParagraphGson.setSortOrder(paragraphJsonObject.getInt("sortOrder"));
                    storyBookParagraphGson.setOriginalText(paragraphJsonObject.getString("originalText"));
                    // TODO: setWords
                    storyBookParagraphGsons.add(storyBookParagraphGson);
                }
                storyBookChapterGson.setStoryBookParagraphs(storyBookParagraphGsons);
                storyBookChapterGsons.add(storyBookChapterGson);
            }
            storyBookGson.setStoryBookChapters(storyBookChapterGsons);
            storyBookGsons.add(storyBookGson);
        }
    } catch (IOException ex) {
        logger.error(ex);
    }
    return storyBookGsons;
}
Also used : Path(java.nio.file.Path) ReadingLevel(ai.elimu.model.v2.enums.ReadingLevel) StoryBookParagraphGson(ai.elimu.model.v2.gson.content.StoryBookParagraphGson) StoryBookGson(ai.elimu.model.v2.gson.content.StoryBookGson) ArrayList(java.util.ArrayList) JSONArray(org.json.JSONArray) Reader(java.io.Reader) IOException(java.io.IOException) ContentLicense(ai.elimu.model.enums.ContentLicense) JSONObject(org.json.JSONObject) CSVParser(org.apache.commons.csv.CSVParser) CSVFormat(org.apache.commons.csv.CSVFormat) CSVRecord(org.apache.commons.csv.CSVRecord) StoryBookChapterGson(ai.elimu.model.v2.gson.content.StoryBookChapterGson)

Example 58 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project kylo by Teradata.

the class CSVFileSchemaParser method populateSchema.

private DefaultFileSchema populateSchema(CSVParser parser) {
    DefaultFileSchema fileSchema = new DefaultFileSchema();
    int i = 0;
    ArrayList<Field> fields = new ArrayList<>();
    for (CSVRecord record : parser) {
        if (i > 9) {
            break;
        }
        int size = record.size();
        for (int j = 0; j < size; j++) {
            DefaultField field = null;
            if (i == 0) {
                field = new DefaultField();
                if (headerRow) {
                    field.setName(record.get(j));
                } else {
                    field.setName("Col_" + (j + 1));
                }
                fields.add(field);
            } else {
                try {
                    field = (DefaultField) fields.get(j);
                    field.getSampleValues().add(StringUtils.defaultString(record.get(j), ""));
                } catch (IndexOutOfBoundsException e) {
                    LOG.warn("Sample file has potential sparse column problem at row [?] field [?]", i + 1, j + 1);
                }
            }
        }
        i++;
    }
    fileSchema.setFields(fields);
    return fileSchema;
}
Also used : DefaultField(com.thinkbiganalytics.discovery.model.DefaultField) Field(com.thinkbiganalytics.discovery.schema.Field) DefaultFileSchema(com.thinkbiganalytics.discovery.model.DefaultFileSchema) ArrayList(java.util.ArrayList) CSVRecord(org.apache.commons.csv.CSVRecord) DefaultField(com.thinkbiganalytics.discovery.model.DefaultField)

Example 59 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project drools-wb by kiegroup.

the class ScenarioCsvImportExport method importData.

public <T extends AbstractScesimData> AbstractScesimModel<T> importData(String raw, AbstractScesimModel<T> originalScesimModel) throws IOException {
    CSVParser csvParser = CSVFormat.DEFAULT.parse(new StringReader(raw));
    AbstractScesimModel<T> toReturn = originalScesimModel.cloneModel();
    toReturn.clearDatas();
    List<FactMapping> factMappings = toReturn.getScesimModelDescriptor().getUnmodifiableFactMappings();
    List<CSVRecord> csvRecords = csvParser.getRecords();
    if (csvRecords.size() < HEADER_SIZE) {
        throw new IllegalArgumentException("Malformed file, missing header");
    }
    csvRecords = csvRecords.subList(HEADER_SIZE, csvRecords.size());
    for (CSVRecord csvRecord : csvRecords) {
        T scesimDataToFill = toReturn.addData();
        if (csvRecord.size() != factMappings.size()) {
            throw new IllegalArgumentException("Malformed row " + csvRecord);
        }
        for (int i = 0; i < factMappings.size(); i += 1) {
            FactMapping factMapping = factMappings.get(i);
            String valueToImport = "".equals(csvRecord.get(i)) ? null : csvRecord.get(i);
            scesimDataToFill.addMappingValue(factMapping.getFactIdentifier(), factMapping.getExpressionIdentifier(), valueToImport);
        }
    }
    return toReturn;
}
Also used : FactMapping(org.drools.scenariosimulation.api.model.FactMapping) CSVParser(org.apache.commons.csv.CSVParser) StringReader(java.io.StringReader) CSVRecord(org.apache.commons.csv.CSVRecord)

Example 60 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project ArachneCentralAPI by OHDSI.

the class SubmissionHelper method parseCsvDataframeToJson.

private JsonObject parseCsvDataframeToJson(String filepath) throws IOException {
    final JsonObject resultInfo = new JsonObject();
    final CSVParser parser = CSVParser.parse(contentStorageService.getContentByFilepath(filepath), Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
    final Map<String, Integer> headerMap = parser.getHeaderMap();
    final List<CSVRecord> csvRecordList = parser.getRecords();
    JsonArray jsonHeaders = new JsonArray();
    headerMap.forEach((key, value) -> jsonHeaders.add(key));
    resultInfo.add("headers", jsonHeaders);
    JsonArray jsonRecords = new JsonArray();
    csvRecordList.forEach(record -> {
        final JsonObject jsonRecord = new JsonObject();
        for (Map.Entry<String, Integer> entry : headerMap.entrySet()) {
            final String key = entry.getKey();
            final String value = record.get(entry.getValue());
            if (NumberUtils.isCreatable(value)) {
                jsonRecord.addProperty(key, Float.parseFloat(value));
            } else {
                jsonRecord.addProperty(key, value);
            }
        }
        jsonRecords.add(jsonRecord);
    });
    resultInfo.add("records", jsonRecords);
    return resultInfo;
}
Also used : JsonArray(com.google.gson.JsonArray) CSVParser(org.apache.commons.csv.CSVParser) JsonObject(com.google.gson.JsonObject) CSVRecord(org.apache.commons.csv.CSVRecord) Map(java.util.Map) HashMap(java.util.HashMap)

Aggregations

CSVRecord (org.apache.commons.csv.CSVRecord)127 CSVParser (org.apache.commons.csv.CSVParser)71 IOException (java.io.IOException)40 CSVFormat (org.apache.commons.csv.CSVFormat)40 ArrayList (java.util.ArrayList)36 Reader (java.io.Reader)24 StringReader (java.io.StringReader)22 InputStreamReader (java.io.InputStreamReader)18 FileReader (java.io.FileReader)16 Test (org.junit.Test)14 Path (java.nio.file.Path)13 HashMap (java.util.HashMap)11 File (java.io.File)10 PreparedStatement (java.sql.PreparedStatement)10 InputStream (java.io.InputStream)9 ResultSet (java.sql.ResultSet)9 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)9 CSVCommonsLoader (org.apache.phoenix.util.CSVCommonsLoader)9 BufferedReader (java.io.BufferedReader)8 Map (java.util.Map)7