use of ai.elimu.model.content.Sound in project webapp by elimu-ai.
the class DbContentImportHelper method performDatabaseContentImport.
/**
* Extracts educational content from the CSV files in {@code src/main/resources/db/content_TEST/<Language>/} and
* stores it in the database.
*
* @param environment The environment from which to import the database content.
* @param language The language to use during the import.
* @param webApplicationContext Context needed to access DAOs.
*/
public synchronized void performDatabaseContentImport(Environment environment, Language language, WebApplicationContext webApplicationContext) {
logger.info("performDatabaseContentImport");
logger.info("environment: " + environment + ", language: " + language);
if (!((environment == Environment.TEST) || (environment == Environment.PROD))) {
throw new IllegalArgumentException("Database content can only be imported from the TEST environment or from the PROD environment");
}
String contentDirectoryPath = "db" + File.separator + "content_" + environment + File.separator + language.toString().toLowerCase();
logger.info("contentDirectoryPath: \"" + contentDirectoryPath + "\"");
URL contentDirectoryURL = getClass().getClassLoader().getResource(contentDirectoryPath);
logger.info("contentDirectoryURL: " + contentDirectoryURL);
if (contentDirectoryURL == null) {
logger.warn("The content directory was not found. Aborting content import.");
return;
}
File contentDirectory = new File(contentDirectoryURL.getPath());
logger.info("contentDirectory: " + contentDirectory);
contributorDao = (ContributorDao) webApplicationContext.getBean("contributorDao");
Contributor contributor = new Contributor();
contributor.setEmail("dev@elimu.ai");
contributor.setFirstName("Dev");
contributor.setLastName("Contributor");
contributor.setRoles(new HashSet<>(Arrays.asList(Role.CONTRIBUTOR, Role.EDITOR, Role.ANALYST, Role.ADMIN)));
contributor.setRegistrationTime(Calendar.getInstance());
contributorDao.create(contributor);
// Extract and import Letters from CSV file in src/main/resources/
File lettersCsvFile = new File(contentDirectory, "letters.csv");
List<Letter> letters = CsvContentExtractionHelper.getLettersFromCsvBackup(lettersCsvFile, soundDao);
logger.info("letters.size(): " + letters.size());
letterDao = (LetterDao) webApplicationContext.getBean("letterDao");
letterContributionEventDao = (LetterContributionEventDao) webApplicationContext.getBean("letterContributionEventDao");
for (Letter letter : letters) {
letterDao.create(letter);
LetterContributionEvent letterContributionEvent = new LetterContributionEvent();
letterContributionEvent.setContributor(contributor);
letterContributionEvent.setLetter(letter);
letterContributionEvent.setRevisionNumber(1);
letterContributionEvent.setTime(Calendar.getInstance());
letterContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterContributionEvent.setPlatform(Platform.WEBAPP);
letterContributionEventDao.create(letterContributionEvent);
}
// Extract and import Sounds from CSV file in src/main/resources/
File soundsCsvFile = new File(contentDirectory, "sounds.csv");
List<Sound> sounds = CsvContentExtractionHelper.getSoundsFromCsvBackup(soundsCsvFile);
logger.info("sounds.size(): " + sounds.size());
soundDao = (SoundDao) webApplicationContext.getBean("soundDao");
for (Sound sound : sounds) {
soundDao.create(sound);
}
// Extract and import letter-sound correspondences in src/main/resources/
File letterToAllophioneMappingsCsvFile = new File(contentDirectory, "letter-sound-correspondences.csv");
List<LetterSoundCorrespondence> letterSoundCorrespondences = CsvContentExtractionHelper.getLetterSoundCorrespondencesFromCsvBackup(letterToAllophioneMappingsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao);
logger.info("letterSoundCorrespondences.size(): " + letterSoundCorrespondences.size());
letterSoundCorrespondenceDao = (LetterSoundCorrespondenceDao) webApplicationContext.getBean("letterSoundCorrespondenceDao");
letterSoundCorrespondenceContributionEventDao = (LetterSoundCorrespondenceContributionEventDao) webApplicationContext.getBean("letterSoundCorrespondenceContributionEventDao");
for (LetterSoundCorrespondence letterSoundCorrespondence : letterSoundCorrespondences) {
letterSoundCorrespondenceDao.create(letterSoundCorrespondence);
LetterSoundCorrespondenceContributionEvent letterSoundCorrespondenceContributionEvent = new LetterSoundCorrespondenceContributionEvent();
letterSoundCorrespondenceContributionEvent.setContributor(contributor);
letterSoundCorrespondenceContributionEvent.setLetterSoundCorrespondence(letterSoundCorrespondence);
letterSoundCorrespondenceContributionEvent.setRevisionNumber(1);
letterSoundCorrespondenceContributionEvent.setTime(Calendar.getInstance());
letterSoundCorrespondenceContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterSoundCorrespondenceContributionEvent.setPlatform(Platform.WEBAPP);
letterSoundCorrespondenceContributionEventDao.create(letterSoundCorrespondenceContributionEvent);
}
// Extract and import Words from CSV file in src/main/resources/
File wordsCsvFile = new File(contentDirectory, "words.csv");
List<Word> words = CsvContentExtractionHelper.getWordsFromCsvBackup(wordsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao, wordDao);
logger.info("words.size(): " + words.size());
wordDao = (WordDao) webApplicationContext.getBean("wordDao");
wordContributionEventDao = (WordContributionEventDao) webApplicationContext.getBean("wordContributionEventDao");
for (Word word : words) {
wordDao.create(word);
WordContributionEvent wordContributionEvent = new WordContributionEvent();
wordContributionEvent.setContributor(contributor);
wordContributionEvent.setWord(word);
wordContributionEvent.setRevisionNumber(1);
wordContributionEvent.setTime(Calendar.getInstance());
wordContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
wordContributionEvent.setPlatform(Platform.WEBAPP);
wordContributionEventDao.create(wordContributionEvent);
}
// Extract and import Numbers from CSV file in src/main/resources/
File numbersCsvFile = new File(contentDirectory, "numbers.csv");
List<Number> numbers = CsvContentExtractionHelper.getNumbersFromCsvBackup(numbersCsvFile, wordDao);
logger.info("numbers.size(): " + numbers.size());
numberDao = (NumberDao) webApplicationContext.getBean("numberDao");
numberContributionEventDao = (NumberContributionEventDao) webApplicationContext.getBean("numberContributionEventDao");
for (Number number : numbers) {
numberDao.create(number);
NumberContributionEvent numberContributionEvent = new NumberContributionEvent();
numberContributionEvent.setContributor(contributor);
numberContributionEvent.setNumber(number);
numberContributionEvent.setRevisionNumber(1);
numberContributionEvent.setTime(Calendar.getInstance());
numberContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
numberContributionEvent.setPlatform(Platform.WEBAPP);
numberContributionEventDao.create(numberContributionEvent);
}
// Extract and import Syllables from CSV file in src/main/resources/
// TODO
// Extract and import Emojis from CSV file in src/main/resources/
File emojisCsvFile = new File(contentDirectory, "emojis.csv");
List<Emoji> emojis = CsvContentExtractionHelper.getEmojisFromCsvBackup(emojisCsvFile, wordDao);
logger.info("emojis.size(): " + emojis.size());
emojiDao = (EmojiDao) webApplicationContext.getBean("emojiDao");
for (Emoji emoji : emojis) {
emojiDao.create(emoji);
}
// Extract and import Images from CSV file in src/main/resources/
// TODO
// Extract and import Audios from CSV file in src/main/resources/
// TODO
// Extract and import StoryBooks from CSV file in src/main/resources/
File storyBooksCsvFile = new File(contentDirectory, "storybooks.csv");
List<StoryBookGson> storyBookGsons = CsvContentExtractionHelper.getStoryBooksFromCsvBackup(storyBooksCsvFile);
logger.info("storyBookGsons.size(): " + storyBookGsons.size());
storyBookDao = (StoryBookDao) webApplicationContext.getBean("storyBookDao");
storyBookChapterDao = (StoryBookChapterDao) webApplicationContext.getBean("storyBookChapterDao");
storyBookParagraphDao = (StoryBookParagraphDao) webApplicationContext.getBean("storyBookParagraphDao");
storyBookContributionEventDao = (StoryBookContributionEventDao) webApplicationContext.getBean("storyBookContributionEventDao");
for (StoryBookGson storyBookGson : storyBookGsons) {
// Convert from GSON to JPA
StoryBook storyBook = new StoryBook();
storyBook.setTitle(storyBookGson.getTitle());
storyBook.setDescription(storyBookGson.getDescription());
// TODO: storyBook.setContentLicense();
// TODO: storyBook.setAttributionUrl();
storyBook.setReadingLevel(storyBookGson.getReadingLevel());
storyBookDao.create(storyBook);
for (StoryBookChapterGson storyBookChapterGson : storyBookGson.getStoryBookChapters()) {
// Convert from GSON to JPA
StoryBookChapter storyBookChapter = new StoryBookChapter();
storyBookChapter.setStoryBook(storyBook);
storyBookChapter.setSortOrder(storyBookChapterGson.getSortOrder());
// TODO: storyBookChapter.setImage();
storyBookChapterDao.create(storyBookChapter);
for (StoryBookParagraphGson storyBookParagraphGson : storyBookChapterGson.getStoryBookParagraphs()) {
// Convert from GSON to JPA
StoryBookParagraph storyBookParagraph = new StoryBookParagraph();
storyBookParagraph.setStoryBookChapter(storyBookChapter);
storyBookParagraph.setSortOrder(storyBookParagraphGson.getSortOrder());
storyBookParagraph.setOriginalText(storyBookParagraphGson.getOriginalText());
List<String> wordsInOriginalText = WordExtractionHelper.getWords(storyBookParagraph.getOriginalText(), language);
logger.info("wordsInOriginalText.size(): " + wordsInOriginalText.size());
List<Word> paragraphWords = new ArrayList<>();
logger.info("paragraphWords.size(): " + paragraphWords.size());
for (String wordInOriginalText : wordsInOriginalText) {
logger.info("wordInOriginalText: \"" + wordInOriginalText + "\"");
wordInOriginalText = wordInOriginalText.toLowerCase();
logger.info("wordInOriginalText (lower-case): \"" + wordInOriginalText + "\"");
Word word = wordDao.readByText(wordInOriginalText);
logger.info("word: " + word);
paragraphWords.add(word);
}
storyBookParagraph.setWords(paragraphWords);
storyBookParagraphDao.create(storyBookParagraph);
}
}
StoryBookContributionEvent storyBookContributionEvent = new StoryBookContributionEvent();
storyBookContributionEvent.setContributor(contributor);
storyBookContributionEvent.setStoryBook(storyBook);
storyBookContributionEvent.setRevisionNumber(1);
storyBookContributionEvent.setTime(Calendar.getInstance());
storyBookContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
storyBookContributionEvent.setPlatform(Platform.WEBAPP);
storyBookContributionEventDao.create(storyBookContributionEvent);
}
// Extract and import Videos from CSV file in src/main/resources/
// TODO
String analyticsDirectoryPath = "db" + File.separator + "analytics_" + environment + File.separator + language.toString().toLowerCase();
logger.info("analyticsDirectoryPath: \"" + analyticsDirectoryPath + "\"");
URL analyticsDirectoryURL = getClass().getClassLoader().getResource(analyticsDirectoryPath);
logger.info("analyticsDirectoryURL: " + analyticsDirectoryURL);
if (analyticsDirectoryURL == null) {
logger.warn("The analytics directory was not found. Aborting analytics import.");
return;
}
File analyticsDirectory = new File(analyticsDirectoryURL.getPath());
logger.info("analyticsDirectory: " + analyticsDirectory);
// Extract and import LetterLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import WordLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import StoryBookLearningEvents from CSV file in src/main/resources/
File storyBookLearningEventsCsvFile = new File(analyticsDirectory, "storybook-learning-events.csv");
applicationDao = (ApplicationDao) webApplicationContext.getBean("applicationDao");
List<StoryBookLearningEvent> storyBookLearningEvents = CsvAnalyticsExtractionHelper.getStoryBookLearningEventsFromCsvBackup(storyBookLearningEventsCsvFile, applicationDao, storyBookDao);
logger.info("storyBookLearningEvents.size(): " + storyBookLearningEvents.size());
storyBookLearningEventDao = (StoryBookLearningEventDao) webApplicationContext.getBean("storyBookLearningEventDao");
for (StoryBookLearningEvent storyBookLearningEvent : storyBookLearningEvents) {
storyBookLearningEventDao.create(storyBookLearningEvent);
}
logger.info("Content import complete");
}
use of ai.elimu.model.content.Sound in project webapp by elimu-ai.
the class CsvContentExtractionHelper method getLetterSoundCorrespondencesFromCsvBackup.
/**
* For information on how the CSV files were generated, see {@link LetterSoundCorrespondenceCsvExportController#handleRequest}.
*/
public static List<LetterSoundCorrespondence> getLetterSoundCorrespondencesFromCsvBackup(File csvFile, LetterDao letterDao, SoundDao soundDao, LetterSoundCorrespondenceDao letterSoundCorrespondenceDao) {
logger.info("getLetterSoundCorrespondencesFromCsvBackup");
List<LetterSoundCorrespondence> letterSoundCorrespondences = new ArrayList<>();
Path csvFilePath = Paths.get(csvFile.toURI());
logger.info("csvFilePath: " + csvFilePath);
try {
Reader reader = Files.newBufferedReader(csvFilePath);
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "letter_ids", "letter_texts", "sound_ids", "sound_values_ipa", "usage_count").withSkipHeaderRecord();
CSVParser csvParser = new CSVParser(reader, csvFormat);
for (CSVRecord csvRecord : csvParser) {
logger.info("csvRecord: " + csvRecord);
LetterSoundCorrespondence letterSoundCorrespondence = new LetterSoundCorrespondence();
JSONArray letterIdsJsonArray = new JSONArray(csvRecord.get("letter_ids"));
logger.info("letterIdsJsonArray: " + letterIdsJsonArray);
JSONArray letterTextsJsonArray = new JSONArray(csvRecord.get("letter_texts"));
logger.info("letterTextsJsonArray: " + letterTextsJsonArray);
List<Letter> letters = new ArrayList<>();
for (int i = 0; i < letterTextsJsonArray.length(); i++) {
String letterText = letterTextsJsonArray.getString(i);
logger.info("Looking up Letter with text '" + letterText + "'");
Letter letter = letterDao.readByText(letterText);
logger.info("letter.getId(): " + letter.getId());
letters.add(letter);
}
letterSoundCorrespondence.setLetters(letters);
JSONArray soundIdsJsonArray = new JSONArray(csvRecord.get("sound_ids"));
logger.info("soundIdsJsonArray: " + soundIdsJsonArray);
JSONArray soundValuesIpaJsonArray = new JSONArray(csvRecord.get("sound_values_ipa"));
logger.info("soundValuesIpaJsonArray: " + soundValuesIpaJsonArray);
List<Sound> sounds = new ArrayList<>();
for (int i = 0; i < soundValuesIpaJsonArray.length(); i++) {
String soundValueIpa = soundValuesIpaJsonArray.getString(i);
logger.info("Looking up Sound with IPA value /" + soundValueIpa + "/");
Sound sound = soundDao.readByValueIpa(soundValueIpa);
logger.info("sound.getId(): " + sound.getId());
sounds.add(sound);
}
letterSoundCorrespondence.setSounds(sounds);
Integer usageCount = Integer.valueOf(csvRecord.get("usage_count"));
letterSoundCorrespondence.setUsageCount(usageCount);
letterSoundCorrespondences.add(letterSoundCorrespondence);
}
} catch (IOException ex) {
logger.error(ex);
}
return letterSoundCorrespondences;
}
use of ai.elimu.model.content.Sound in project webapp by elimu-ai.
the class CsvContentExtractionHelper method getWordsFromCsvBackup.
/**
* For information on how the CSV files were generated, see {@link WordCsvExportController#handleRequest}.
*/
public static List<Word> getWordsFromCsvBackup(File csvFile, LetterDao letterDao, SoundDao soundDao, LetterSoundCorrespondenceDao letterSoundCorrespondenceDao, WordDao wordDao) {
logger.info("getWordsFromCsvBackup");
List<Word> words = new ArrayList<>();
Path csvFilePath = Paths.get(csvFile.toURI());
logger.info("csvFilePath: " + csvFilePath);
try {
Reader reader = Files.newBufferedReader(csvFilePath);
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "text", "letter_sound_correspondences", "usage_count", "word_type", "spelling_consistency", "root_word_id", "root_word_text").withSkipHeaderRecord();
CSVParser csvParser = new CSVParser(reader, csvFormat);
for (CSVRecord csvRecord : csvParser) {
logger.info("csvRecord: " + csvRecord);
Word word = new Word();
String text = csvRecord.get("text");
word.setText(text);
JSONArray letterSoundCorrespondencesJsonArray = new JSONArray(csvRecord.get("letter_sound_correspondences"));
logger.info("letterSoundCorrespondencesJsonArray: " + letterSoundCorrespondencesJsonArray);
List<LetterSoundCorrespondence> letterSoundCorrespondences = new ArrayList<>();
for (int i = 0; i < letterSoundCorrespondencesJsonArray.length(); i++) {
JSONObject letterSoundCorrespondenceJsonObject = letterSoundCorrespondencesJsonArray.getJSONObject(i);
logger.info("letterSoundCorrespondenceJsonObject: " + letterSoundCorrespondenceJsonObject);
List<Letter> letters = new ArrayList<>();
JSONArray lettersJsonArray = letterSoundCorrespondenceJsonObject.getJSONArray("letters");
for (int j = 0; j < lettersJsonArray.length(); j++) {
Letter letter = letterDao.readByText(lettersJsonArray.getString(j));
letters.add(letter);
}
List<Sound> sounds = new ArrayList<>();
JSONArray soundsJsonArray = letterSoundCorrespondenceJsonObject.getJSONArray("sounds");
for (int j = 0; j < soundsJsonArray.length(); j++) {
Sound sound = soundDao.readByValueIpa(soundsJsonArray.getString(j));
sounds.add(sound);
}
LetterSoundCorrespondence letterSoundCorrespondence = letterSoundCorrespondenceDao.read(letters, sounds);
logger.info("letterSoundCorrespondence.getId(): " + letterSoundCorrespondence.getId());
letterSoundCorrespondences.add(letterSoundCorrespondence);
}
word.setLetterSoundCorrespondences(letterSoundCorrespondences);
Integer usageCount = Integer.valueOf(csvRecord.get("usage_count"));
word.setUsageCount(usageCount);
if (StringUtils.isNotBlank(csvRecord.get("word_type"))) {
WordType wordType = WordType.valueOf(csvRecord.get("word_type"));
word.setWordType(wordType);
}
if (StringUtils.isNotBlank(csvRecord.get("spelling_consistency"))) {
SpellingConsistency spellingConsistency = SpellingConsistency.valueOf(csvRecord.get("spelling_consistency"));
word.setSpellingConsistency(spellingConsistency);
}
// TODO: Store rootWords _after_ all Words have been stored
// if (StringUtils.isNotBlank(csvRecord.get("root_word_text"))) {
// String rootWordText = csvRecord.get("root_word_text");
// Word rootWord = wordDao.readByText(language, rootWordText);
// word.setRootWord(rootWord);
// }
words.add(word);
}
} catch (IOException ex) {
logger.error(ex);
}
return words;
}
use of ai.elimu.model.content.Sound in project webapp by elimu-ai.
the class CsvContentExtractionHelper method getSoundsFromCsvBackup.
/**
* For information on how the CSV files were generated, see {@link SoundCsvExportController#handleRequest}.
*/
public static List<Sound> getSoundsFromCsvBackup(File csvFile) {
logger.info("getSoundsFromCsvBackup");
List<Sound> sounds = new ArrayList<>();
Path csvFilePath = Paths.get(csvFile.toURI());
logger.info("csvFilePath: " + csvFilePath);
try {
Reader reader = Files.newBufferedReader(csvFilePath);
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "value_ipa", "value_sampa", "audio_id", "diacritic", "sound_type", "usage_count").withSkipHeaderRecord();
CSVParser csvParser = new CSVParser(reader, csvFormat);
for (CSVRecord csvRecord : csvParser) {
logger.info("csvRecord: " + csvRecord);
Sound sound = new Sound();
String valueIpa = csvRecord.get("value_ipa");
sound.setValueIpa(valueIpa);
String valueSampa = csvRecord.get("value_sampa");
sound.setValueSampa(valueSampa);
boolean diacritic = Boolean.valueOf(csvRecord.get("diacritic"));
sound.setDiacritic(diacritic);
if (StringUtils.isNotBlank(csvRecord.get("sound_type"))) {
SoundType soundType = SoundType.valueOf(csvRecord.get("sound_type"));
sound.setSoundType(soundType);
}
Integer usageCount = Integer.valueOf(csvRecord.get("usage_count"));
sound.setUsageCount(usageCount);
sounds.add(sound);
}
} catch (IOException ex) {
logger.error(ex);
}
return sounds;
}
use of ai.elimu.model.content.Sound in project webapp by elimu-ai.
the class LetterSoundCorrespondenceCsvExportController method handleRequest.
@RequestMapping(value = "/letter-sound-correspondences.csv", method = RequestMethod.GET)
public void handleRequest(HttpServletResponse response, OutputStream outputStream) throws IOException {
logger.info("handleRequest");
List<LetterSoundCorrespondence> letterSoundCorrespondences = letterSoundCorrespondenceDao.readAllOrderedByUsage();
logger.info("letterSoundCorrespondences.size(): " + letterSoundCorrespondences.size());
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "letter_ids", "letter_texts", "sound_ids", "sound_values_ipa", "usage_count");
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, csvFormat);
for (LetterSoundCorrespondence letterSoundCorrespondence : letterSoundCorrespondences) {
logger.info("letterSoundCorrespondence.getId(): \"" + letterSoundCorrespondence.getId() + "\"");
JSONArray letterIdsJsonArray = new JSONArray();
int index = 0;
for (Letter letter : letterSoundCorrespondence.getLetters()) {
letterIdsJsonArray.put(index, letter.getId());
index++;
}
JSONArray letterTextsJsonArray = new JSONArray();
index = 0;
for (Letter letter : letterSoundCorrespondence.getLetters()) {
letterTextsJsonArray.put(index, letter.getText());
index++;
}
JSONArray soundIdsJsonArray = new JSONArray();
index = 0;
for (Sound sound : letterSoundCorrespondence.getSounds()) {
soundIdsJsonArray.put(index, sound.getId());
index++;
}
JSONArray soundValuesIpaJsonArray = new JSONArray();
index = 0;
for (Sound sound : letterSoundCorrespondence.getSounds()) {
soundValuesIpaJsonArray.put(index, sound.getValueIpa());
index++;
}
csvPrinter.printRecord(letterSoundCorrespondence.getId(), letterIdsJsonArray, letterTextsJsonArray, soundIdsJsonArray, soundValuesIpaJsonArray, letterSoundCorrespondence.getUsageCount());
csvPrinter.flush();
}
String csvFileContent = stringWriter.toString();
response.setContentType("text/csv");
byte[] bytes = csvFileContent.getBytes();
response.setContentLength(bytes.length);
try {
outputStream.write(bytes);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
logger.error(ex);
}
}
Aggregations