use of ai.elimu.model.content.LetterSoundCorrespondence in project webapp by elimu-ai.
the class SoundUsageCountScheduler method execute.
// At 06:30 every day
@Scheduled(cron = "00 30 06 * * *")
public synchronized void execute() {
logger.info("execute");
logger.info("Calculating usage count of Sounds");
// Long = Sound ID
// Integer = Usage count
Map<Long, Integer> soundFrequencyMap = new HashMap<>();
// Summarize the usage count of each Word's Sounds based on the LetterSoundCorrespondence's
// usage count (see LetterSoundCorrespondenceUsageCountScheduler).
List<Word> words = wordDao.readAllOrdered();
logger.info("words.size(): " + words.size());
for (Word word : words) {
for (LetterSoundCorrespondence letterSoundCorrespondence : word.getLetterSoundCorrespondences()) {
for (Sound sound : letterSoundCorrespondence.getSounds()) {
soundFrequencyMap.put(sound.getId(), soundFrequencyMap.getOrDefault(sound.getId(), 0) + letterSoundCorrespondence.getUsageCount());
}
}
}
// Update each Sound's usage count in the database
for (Long soundId : soundFrequencyMap.keySet()) {
Sound sound = soundDao.read(soundId);
sound.setUsageCount(soundFrequencyMap.get(soundId));
soundDao.update(sound);
}
logger.info("execute complete");
}
use of ai.elimu.model.content.LetterSoundCorrespondence in project webapp by elimu-ai.
the class WordCreateController method autoSelectLetterSoundCorrespondences.
private void autoSelectLetterSoundCorrespondences(Word word) {
logger.info("autoSelectLetterSoundCorrespondences");
String wordText = word.getText();
List<LetterSoundCorrespondence> letterSoundCorrespondences = new ArrayList<>();
List<LetterSoundCorrespondence> allLetterSoundCorrespondencesOrderedByLettersLength = letterSoundCorrespondenceDao.readAllOrderedByLettersLength();
while (StringUtils.isNotBlank(wordText)) {
logger.info("wordText: \"" + wordText + "\"");
boolean isMatch = false;
for (LetterSoundCorrespondence letterSoundCorrespondence : allLetterSoundCorrespondencesOrderedByLettersLength) {
String letterSoundCorrespondenceLetters = letterSoundCorrespondence.getLetters().stream().map(Letter::getText).collect(Collectors.joining());
logger.info("letterSoundCorrespondenceLetters: \"" + letterSoundCorrespondenceLetters + "\"");
if (wordText.startsWith(letterSoundCorrespondenceLetters)) {
isMatch = true;
logger.info("Found match at the beginning of \"" + wordText + "\"");
letterSoundCorrespondences.add(letterSoundCorrespondence);
// Remove the match from the word
wordText = wordText.substring(letterSoundCorrespondenceLetters.length());
break;
}
}
if (!isMatch) {
// Skip auto-selection for the subsequent letters
break;
}
}
word.setLetterSoundCorrespondences(letterSoundCorrespondences);
}
use of ai.elimu.model.content.LetterSoundCorrespondence in project webapp by elimu-ai.
the class WordCsvExportController method handleRequest.
@RequestMapping(value = "/words.csv", method = RequestMethod.GET)
public void handleRequest(HttpServletResponse response, OutputStream outputStream) throws IOException {
logger.info("handleRequest");
List<Word> words = wordDao.readAllOrderedByUsage();
logger.info("words.size(): " + words.size());
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "text", "letter_sound_correspondences", "usage_count", "word_type", "spelling_consistency", "root_word_id", "root_word_text");
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, csvFormat);
for (Word word : words) {
logger.info("word.getText(): \"" + word.getText() + "\"");
JSONArray letterSoundCorrespondencesJsonArray = new JSONArray();
int index = 0;
for (LetterSoundCorrespondence letterSoundCorrespondence : word.getLetterSoundCorrespondences()) {
JSONObject letterSoundCorrespondenceJsonObject = new JSONObject();
letterSoundCorrespondenceJsonObject.put("id", letterSoundCorrespondence.getId());
String[] lettersArray = new String[letterSoundCorrespondence.getLetters().size()];
for (int i = 0; i < lettersArray.length; i++) {
lettersArray[i] = letterSoundCorrespondence.getLetters().get(i).getText();
}
letterSoundCorrespondenceJsonObject.put("letters", lettersArray);
String[] soundsArray = new String[letterSoundCorrespondence.getSounds().size()];
for (int i = 0; i < soundsArray.length; i++) {
soundsArray[i] = letterSoundCorrespondence.getSounds().get(i).getValueIpa();
}
letterSoundCorrespondenceJsonObject.put("sounds", soundsArray);
letterSoundCorrespondenceJsonObject.put("usageCount", letterSoundCorrespondence.getUsageCount());
letterSoundCorrespondencesJsonArray.put(index, letterSoundCorrespondenceJsonObject);
index++;
}
Long rootWordId = null;
String rootWordText = null;
if (word.getRootWord() != null) {
rootWordId = word.getRootWord().getId();
rootWordText = word.getRootWord().getText();
}
csvPrinter.printRecord(word.getId(), word.getText(), letterSoundCorrespondencesJsonArray, word.getUsageCount(), word.getWordType(), word.getSpellingConsistency(), rootWordId, rootWordText);
csvPrinter.flush();
}
String csvFileContent = stringWriter.toString();
response.setContentType("text/csv");
byte[] bytes = csvFileContent.getBytes();
response.setContentLength(bytes.length);
try {
outputStream.write(bytes);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
logger.error(ex);
}
}
use of ai.elimu.model.content.LetterSoundCorrespondence in project webapp by elimu-ai.
the class WordEditController method autoSelectLetterSoundCorrespondences.
private void autoSelectLetterSoundCorrespondences(Word word) {
logger.info("autoSelectLetterSoundCorrespondences");
String wordText = word.getText();
List<LetterSoundCorrespondence> letterSoundCorrespondences = new ArrayList<>();
List<LetterSoundCorrespondence> allLetterSoundCorrespondencesOrderedByLettersLength = letterSoundCorrespondenceDao.readAllOrderedByLettersLength();
while (StringUtils.isNotBlank(wordText)) {
logger.info("wordText: \"" + wordText + "\"");
boolean isMatch = false;
for (LetterSoundCorrespondence letterSoundCorrespondence : allLetterSoundCorrespondencesOrderedByLettersLength) {
String letterSoundCorrespondenceLetters = letterSoundCorrespondence.getLetters().stream().map(Letter::getText).collect(Collectors.joining());
logger.info("letterSoundCorrespondenceLetters: \"" + letterSoundCorrespondenceLetters + "\"");
if (wordText.startsWith(letterSoundCorrespondenceLetters)) {
isMatch = true;
logger.info("Found match at the beginning of \"" + wordText + "\"");
letterSoundCorrespondences.add(letterSoundCorrespondence);
// Remove the match from the word
wordText = wordText.substring(letterSoundCorrespondenceLetters.length());
break;
}
}
if (!isMatch) {
// Skip auto-selection for the subsequent letters
break;
}
}
word.setLetterSoundCorrespondences(letterSoundCorrespondences);
}
use of ai.elimu.model.content.LetterSoundCorrespondence in project webapp by elimu-ai.
the class DbContentImportHelper method performDatabaseContentImport.
/**
* Extracts educational content from the CSV files in {@code src/main/resources/db/content_TEST/<Language>/} and
* stores it in the database.
*
* @param environment The environment from which to import the database content.
* @param language The language to use during the import.
* @param webApplicationContext Context needed to access DAOs.
*/
public synchronized void performDatabaseContentImport(Environment environment, Language language, WebApplicationContext webApplicationContext) {
logger.info("performDatabaseContentImport");
logger.info("environment: " + environment + ", language: " + language);
if (!((environment == Environment.TEST) || (environment == Environment.PROD))) {
throw new IllegalArgumentException("Database content can only be imported from the TEST environment or from the PROD environment");
}
String contentDirectoryPath = "db" + File.separator + "content_" + environment + File.separator + language.toString().toLowerCase();
logger.info("contentDirectoryPath: \"" + contentDirectoryPath + "\"");
URL contentDirectoryURL = getClass().getClassLoader().getResource(contentDirectoryPath);
logger.info("contentDirectoryURL: " + contentDirectoryURL);
if (contentDirectoryURL == null) {
logger.warn("The content directory was not found. Aborting content import.");
return;
}
File contentDirectory = new File(contentDirectoryURL.getPath());
logger.info("contentDirectory: " + contentDirectory);
contributorDao = (ContributorDao) webApplicationContext.getBean("contributorDao");
Contributor contributor = new Contributor();
contributor.setEmail("dev@elimu.ai");
contributor.setFirstName("Dev");
contributor.setLastName("Contributor");
contributor.setRoles(new HashSet<>(Arrays.asList(Role.CONTRIBUTOR, Role.EDITOR, Role.ANALYST, Role.ADMIN)));
contributor.setRegistrationTime(Calendar.getInstance());
contributorDao.create(contributor);
// Extract and import Letters from CSV file in src/main/resources/
File lettersCsvFile = new File(contentDirectory, "letters.csv");
List<Letter> letters = CsvContentExtractionHelper.getLettersFromCsvBackup(lettersCsvFile, soundDao);
logger.info("letters.size(): " + letters.size());
letterDao = (LetterDao) webApplicationContext.getBean("letterDao");
letterContributionEventDao = (LetterContributionEventDao) webApplicationContext.getBean("letterContributionEventDao");
for (Letter letter : letters) {
letterDao.create(letter);
LetterContributionEvent letterContributionEvent = new LetterContributionEvent();
letterContributionEvent.setContributor(contributor);
letterContributionEvent.setLetter(letter);
letterContributionEvent.setRevisionNumber(1);
letterContributionEvent.setTime(Calendar.getInstance());
letterContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterContributionEvent.setPlatform(Platform.WEBAPP);
letterContributionEventDao.create(letterContributionEvent);
}
// Extract and import Sounds from CSV file in src/main/resources/
File soundsCsvFile = new File(contentDirectory, "sounds.csv");
List<Sound> sounds = CsvContentExtractionHelper.getSoundsFromCsvBackup(soundsCsvFile);
logger.info("sounds.size(): " + sounds.size());
soundDao = (SoundDao) webApplicationContext.getBean("soundDao");
for (Sound sound : sounds) {
soundDao.create(sound);
}
// Extract and import letter-sound correspondences in src/main/resources/
File letterToAllophioneMappingsCsvFile = new File(contentDirectory, "letter-sound-correspondences.csv");
List<LetterSoundCorrespondence> letterSoundCorrespondences = CsvContentExtractionHelper.getLetterSoundCorrespondencesFromCsvBackup(letterToAllophioneMappingsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao);
logger.info("letterSoundCorrespondences.size(): " + letterSoundCorrespondences.size());
letterSoundCorrespondenceDao = (LetterSoundCorrespondenceDao) webApplicationContext.getBean("letterSoundCorrespondenceDao");
letterSoundCorrespondenceContributionEventDao = (LetterSoundCorrespondenceContributionEventDao) webApplicationContext.getBean("letterSoundCorrespondenceContributionEventDao");
for (LetterSoundCorrespondence letterSoundCorrespondence : letterSoundCorrespondences) {
letterSoundCorrespondenceDao.create(letterSoundCorrespondence);
LetterSoundCorrespondenceContributionEvent letterSoundCorrespondenceContributionEvent = new LetterSoundCorrespondenceContributionEvent();
letterSoundCorrespondenceContributionEvent.setContributor(contributor);
letterSoundCorrespondenceContributionEvent.setLetterSoundCorrespondence(letterSoundCorrespondence);
letterSoundCorrespondenceContributionEvent.setRevisionNumber(1);
letterSoundCorrespondenceContributionEvent.setTime(Calendar.getInstance());
letterSoundCorrespondenceContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterSoundCorrespondenceContributionEvent.setPlatform(Platform.WEBAPP);
letterSoundCorrespondenceContributionEventDao.create(letterSoundCorrespondenceContributionEvent);
}
// Extract and import Words from CSV file in src/main/resources/
File wordsCsvFile = new File(contentDirectory, "words.csv");
List<Word> words = CsvContentExtractionHelper.getWordsFromCsvBackup(wordsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao, wordDao);
logger.info("words.size(): " + words.size());
wordDao = (WordDao) webApplicationContext.getBean("wordDao");
wordContributionEventDao = (WordContributionEventDao) webApplicationContext.getBean("wordContributionEventDao");
for (Word word : words) {
wordDao.create(word);
WordContributionEvent wordContributionEvent = new WordContributionEvent();
wordContributionEvent.setContributor(contributor);
wordContributionEvent.setWord(word);
wordContributionEvent.setRevisionNumber(1);
wordContributionEvent.setTime(Calendar.getInstance());
wordContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
wordContributionEvent.setPlatform(Platform.WEBAPP);
wordContributionEventDao.create(wordContributionEvent);
}
// Extract and import Numbers from CSV file in src/main/resources/
File numbersCsvFile = new File(contentDirectory, "numbers.csv");
List<Number> numbers = CsvContentExtractionHelper.getNumbersFromCsvBackup(numbersCsvFile, wordDao);
logger.info("numbers.size(): " + numbers.size());
numberDao = (NumberDao) webApplicationContext.getBean("numberDao");
numberContributionEventDao = (NumberContributionEventDao) webApplicationContext.getBean("numberContributionEventDao");
for (Number number : numbers) {
numberDao.create(number);
NumberContributionEvent numberContributionEvent = new NumberContributionEvent();
numberContributionEvent.setContributor(contributor);
numberContributionEvent.setNumber(number);
numberContributionEvent.setRevisionNumber(1);
numberContributionEvent.setTime(Calendar.getInstance());
numberContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
numberContributionEvent.setPlatform(Platform.WEBAPP);
numberContributionEventDao.create(numberContributionEvent);
}
// Extract and import Syllables from CSV file in src/main/resources/
// TODO
// Extract and import Emojis from CSV file in src/main/resources/
File emojisCsvFile = new File(contentDirectory, "emojis.csv");
List<Emoji> emojis = CsvContentExtractionHelper.getEmojisFromCsvBackup(emojisCsvFile, wordDao);
logger.info("emojis.size(): " + emojis.size());
emojiDao = (EmojiDao) webApplicationContext.getBean("emojiDao");
for (Emoji emoji : emojis) {
emojiDao.create(emoji);
}
// Extract and import Images from CSV file in src/main/resources/
// TODO
// Extract and import Audios from CSV file in src/main/resources/
// TODO
// Extract and import StoryBooks from CSV file in src/main/resources/
File storyBooksCsvFile = new File(contentDirectory, "storybooks.csv");
List<StoryBookGson> storyBookGsons = CsvContentExtractionHelper.getStoryBooksFromCsvBackup(storyBooksCsvFile);
logger.info("storyBookGsons.size(): " + storyBookGsons.size());
storyBookDao = (StoryBookDao) webApplicationContext.getBean("storyBookDao");
storyBookChapterDao = (StoryBookChapterDao) webApplicationContext.getBean("storyBookChapterDao");
storyBookParagraphDao = (StoryBookParagraphDao) webApplicationContext.getBean("storyBookParagraphDao");
storyBookContributionEventDao = (StoryBookContributionEventDao) webApplicationContext.getBean("storyBookContributionEventDao");
for (StoryBookGson storyBookGson : storyBookGsons) {
// Convert from GSON to JPA
StoryBook storyBook = new StoryBook();
storyBook.setTitle(storyBookGson.getTitle());
storyBook.setDescription(storyBookGson.getDescription());
// TODO: storyBook.setContentLicense();
// TODO: storyBook.setAttributionUrl();
storyBook.setReadingLevel(storyBookGson.getReadingLevel());
storyBookDao.create(storyBook);
for (StoryBookChapterGson storyBookChapterGson : storyBookGson.getStoryBookChapters()) {
// Convert from GSON to JPA
StoryBookChapter storyBookChapter = new StoryBookChapter();
storyBookChapter.setStoryBook(storyBook);
storyBookChapter.setSortOrder(storyBookChapterGson.getSortOrder());
// TODO: storyBookChapter.setImage();
storyBookChapterDao.create(storyBookChapter);
for (StoryBookParagraphGson storyBookParagraphGson : storyBookChapterGson.getStoryBookParagraphs()) {
// Convert from GSON to JPA
StoryBookParagraph storyBookParagraph = new StoryBookParagraph();
storyBookParagraph.setStoryBookChapter(storyBookChapter);
storyBookParagraph.setSortOrder(storyBookParagraphGson.getSortOrder());
storyBookParagraph.setOriginalText(storyBookParagraphGson.getOriginalText());
List<String> wordsInOriginalText = WordExtractionHelper.getWords(storyBookParagraph.getOriginalText(), language);
logger.info("wordsInOriginalText.size(): " + wordsInOriginalText.size());
List<Word> paragraphWords = new ArrayList<>();
logger.info("paragraphWords.size(): " + paragraphWords.size());
for (String wordInOriginalText : wordsInOriginalText) {
logger.info("wordInOriginalText: \"" + wordInOriginalText + "\"");
wordInOriginalText = wordInOriginalText.toLowerCase();
logger.info("wordInOriginalText (lower-case): \"" + wordInOriginalText + "\"");
Word word = wordDao.readByText(wordInOriginalText);
logger.info("word: " + word);
paragraphWords.add(word);
}
storyBookParagraph.setWords(paragraphWords);
storyBookParagraphDao.create(storyBookParagraph);
}
}
StoryBookContributionEvent storyBookContributionEvent = new StoryBookContributionEvent();
storyBookContributionEvent.setContributor(contributor);
storyBookContributionEvent.setStoryBook(storyBook);
storyBookContributionEvent.setRevisionNumber(1);
storyBookContributionEvent.setTime(Calendar.getInstance());
storyBookContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
storyBookContributionEvent.setPlatform(Platform.WEBAPP);
storyBookContributionEventDao.create(storyBookContributionEvent);
}
// Extract and import Videos from CSV file in src/main/resources/
// TODO
String analyticsDirectoryPath = "db" + File.separator + "analytics_" + environment + File.separator + language.toString().toLowerCase();
logger.info("analyticsDirectoryPath: \"" + analyticsDirectoryPath + "\"");
URL analyticsDirectoryURL = getClass().getClassLoader().getResource(analyticsDirectoryPath);
logger.info("analyticsDirectoryURL: " + analyticsDirectoryURL);
if (analyticsDirectoryURL == null) {
logger.warn("The analytics directory was not found. Aborting analytics import.");
return;
}
File analyticsDirectory = new File(analyticsDirectoryURL.getPath());
logger.info("analyticsDirectory: " + analyticsDirectory);
// Extract and import LetterLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import WordLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import StoryBookLearningEvents from CSV file in src/main/resources/
File storyBookLearningEventsCsvFile = new File(analyticsDirectory, "storybook-learning-events.csv");
applicationDao = (ApplicationDao) webApplicationContext.getBean("applicationDao");
List<StoryBookLearningEvent> storyBookLearningEvents = CsvAnalyticsExtractionHelper.getStoryBookLearningEventsFromCsvBackup(storyBookLearningEventsCsvFile, applicationDao, storyBookDao);
logger.info("storyBookLearningEvents.size(): " + storyBookLearningEvents.size());
storyBookLearningEventDao = (StoryBookLearningEventDao) webApplicationContext.getBean("storyBookLearningEventDao");
for (StoryBookLearningEvent storyBookLearningEvent : storyBookLearningEvents) {
storyBookLearningEventDao.create(storyBookLearningEvent);
}
logger.info("Content import complete");
}
Aggregations