use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class DbContentImportHelper method performDatabaseContentImport.
/**
* Extracts educational content from the CSV files in {@code src/main/resources/db/content_TEST/<Language>/} and
* stores it in the database.
*
* @param environment The environment from which to import the database content.
* @param language The language to use during the import.
* @param webApplicationContext Context needed to access DAOs.
*/
public synchronized void performDatabaseContentImport(Environment environment, Language language, WebApplicationContext webApplicationContext) {
logger.info("performDatabaseContentImport");
logger.info("environment: " + environment + ", language: " + language);
if (!((environment == Environment.TEST) || (environment == Environment.PROD))) {
throw new IllegalArgumentException("Database content can only be imported from the TEST environment or from the PROD environment");
}
String contentDirectoryPath = "db" + File.separator + "content_" + environment + File.separator + language.toString().toLowerCase();
logger.info("contentDirectoryPath: \"" + contentDirectoryPath + "\"");
URL contentDirectoryURL = getClass().getClassLoader().getResource(contentDirectoryPath);
logger.info("contentDirectoryURL: " + contentDirectoryURL);
if (contentDirectoryURL == null) {
logger.warn("The content directory was not found. Aborting content import.");
return;
}
File contentDirectory = new File(contentDirectoryURL.getPath());
logger.info("contentDirectory: " + contentDirectory);
contributorDao = (ContributorDao) webApplicationContext.getBean("contributorDao");
Contributor contributor = new Contributor();
contributor.setEmail("dev@elimu.ai");
contributor.setFirstName("Dev");
contributor.setLastName("Contributor");
contributor.setRoles(new HashSet<>(Arrays.asList(Role.CONTRIBUTOR, Role.EDITOR, Role.ANALYST, Role.ADMIN)));
contributor.setRegistrationTime(Calendar.getInstance());
contributorDao.create(contributor);
// Extract and import Letters from CSV file in src/main/resources/
File lettersCsvFile = new File(contentDirectory, "letters.csv");
List<Letter> letters = CsvContentExtractionHelper.getLettersFromCsvBackup(lettersCsvFile, soundDao);
logger.info("letters.size(): " + letters.size());
letterDao = (LetterDao) webApplicationContext.getBean("letterDao");
letterContributionEventDao = (LetterContributionEventDao) webApplicationContext.getBean("letterContributionEventDao");
for (Letter letter : letters) {
letterDao.create(letter);
LetterContributionEvent letterContributionEvent = new LetterContributionEvent();
letterContributionEvent.setContributor(contributor);
letterContributionEvent.setLetter(letter);
letterContributionEvent.setRevisionNumber(1);
letterContributionEvent.setTime(Calendar.getInstance());
letterContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterContributionEvent.setPlatform(Platform.WEBAPP);
letterContributionEventDao.create(letterContributionEvent);
}
// Extract and import Sounds from CSV file in src/main/resources/
File soundsCsvFile = new File(contentDirectory, "sounds.csv");
List<Sound> sounds = CsvContentExtractionHelper.getSoundsFromCsvBackup(soundsCsvFile);
logger.info("sounds.size(): " + sounds.size());
soundDao = (SoundDao) webApplicationContext.getBean("soundDao");
for (Sound sound : sounds) {
soundDao.create(sound);
}
// Extract and import letter-sound correspondences in src/main/resources/
File letterToAllophioneMappingsCsvFile = new File(contentDirectory, "letter-sound-correspondences.csv");
List<LetterSoundCorrespondence> letterSoundCorrespondences = CsvContentExtractionHelper.getLetterSoundCorrespondencesFromCsvBackup(letterToAllophioneMappingsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao);
logger.info("letterSoundCorrespondences.size(): " + letterSoundCorrespondences.size());
letterSoundCorrespondenceDao = (LetterSoundCorrespondenceDao) webApplicationContext.getBean("letterSoundCorrespondenceDao");
letterSoundCorrespondenceContributionEventDao = (LetterSoundCorrespondenceContributionEventDao) webApplicationContext.getBean("letterSoundCorrespondenceContributionEventDao");
for (LetterSoundCorrespondence letterSoundCorrespondence : letterSoundCorrespondences) {
letterSoundCorrespondenceDao.create(letterSoundCorrespondence);
LetterSoundCorrespondenceContributionEvent letterSoundCorrespondenceContributionEvent = new LetterSoundCorrespondenceContributionEvent();
letterSoundCorrespondenceContributionEvent.setContributor(contributor);
letterSoundCorrespondenceContributionEvent.setLetterSoundCorrespondence(letterSoundCorrespondence);
letterSoundCorrespondenceContributionEvent.setRevisionNumber(1);
letterSoundCorrespondenceContributionEvent.setTime(Calendar.getInstance());
letterSoundCorrespondenceContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
letterSoundCorrespondenceContributionEvent.setPlatform(Platform.WEBAPP);
letterSoundCorrespondenceContributionEventDao.create(letterSoundCorrespondenceContributionEvent);
}
// Extract and import Words from CSV file in src/main/resources/
File wordsCsvFile = new File(contentDirectory, "words.csv");
List<Word> words = CsvContentExtractionHelper.getWordsFromCsvBackup(wordsCsvFile, letterDao, soundDao, letterSoundCorrespondenceDao, wordDao);
logger.info("words.size(): " + words.size());
wordDao = (WordDao) webApplicationContext.getBean("wordDao");
wordContributionEventDao = (WordContributionEventDao) webApplicationContext.getBean("wordContributionEventDao");
for (Word word : words) {
wordDao.create(word);
WordContributionEvent wordContributionEvent = new WordContributionEvent();
wordContributionEvent.setContributor(contributor);
wordContributionEvent.setWord(word);
wordContributionEvent.setRevisionNumber(1);
wordContributionEvent.setTime(Calendar.getInstance());
wordContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
wordContributionEvent.setPlatform(Platform.WEBAPP);
wordContributionEventDao.create(wordContributionEvent);
}
// Extract and import Numbers from CSV file in src/main/resources/
File numbersCsvFile = new File(contentDirectory, "numbers.csv");
List<Number> numbers = CsvContentExtractionHelper.getNumbersFromCsvBackup(numbersCsvFile, wordDao);
logger.info("numbers.size(): " + numbers.size());
numberDao = (NumberDao) webApplicationContext.getBean("numberDao");
numberContributionEventDao = (NumberContributionEventDao) webApplicationContext.getBean("numberContributionEventDao");
for (Number number : numbers) {
numberDao.create(number);
NumberContributionEvent numberContributionEvent = new NumberContributionEvent();
numberContributionEvent.setContributor(contributor);
numberContributionEvent.setNumber(number);
numberContributionEvent.setRevisionNumber(1);
numberContributionEvent.setTime(Calendar.getInstance());
numberContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
numberContributionEvent.setPlatform(Platform.WEBAPP);
numberContributionEventDao.create(numberContributionEvent);
}
// Extract and import Syllables from CSV file in src/main/resources/
// TODO
// Extract and import Emojis from CSV file in src/main/resources/
File emojisCsvFile = new File(contentDirectory, "emojis.csv");
List<Emoji> emojis = CsvContentExtractionHelper.getEmojisFromCsvBackup(emojisCsvFile, wordDao);
logger.info("emojis.size(): " + emojis.size());
emojiDao = (EmojiDao) webApplicationContext.getBean("emojiDao");
for (Emoji emoji : emojis) {
emojiDao.create(emoji);
}
// Extract and import Images from CSV file in src/main/resources/
// TODO
// Extract and import Audios from CSV file in src/main/resources/
// TODO
// Extract and import StoryBooks from CSV file in src/main/resources/
File storyBooksCsvFile = new File(contentDirectory, "storybooks.csv");
List<StoryBookGson> storyBookGsons = CsvContentExtractionHelper.getStoryBooksFromCsvBackup(storyBooksCsvFile);
logger.info("storyBookGsons.size(): " + storyBookGsons.size());
storyBookDao = (StoryBookDao) webApplicationContext.getBean("storyBookDao");
storyBookChapterDao = (StoryBookChapterDao) webApplicationContext.getBean("storyBookChapterDao");
storyBookParagraphDao = (StoryBookParagraphDao) webApplicationContext.getBean("storyBookParagraphDao");
storyBookContributionEventDao = (StoryBookContributionEventDao) webApplicationContext.getBean("storyBookContributionEventDao");
for (StoryBookGson storyBookGson : storyBookGsons) {
// Convert from GSON to JPA
StoryBook storyBook = new StoryBook();
storyBook.setTitle(storyBookGson.getTitle());
storyBook.setDescription(storyBookGson.getDescription());
// TODO: storyBook.setContentLicense();
// TODO: storyBook.setAttributionUrl();
storyBook.setReadingLevel(storyBookGson.getReadingLevel());
storyBookDao.create(storyBook);
for (StoryBookChapterGson storyBookChapterGson : storyBookGson.getStoryBookChapters()) {
// Convert from GSON to JPA
StoryBookChapter storyBookChapter = new StoryBookChapter();
storyBookChapter.setStoryBook(storyBook);
storyBookChapter.setSortOrder(storyBookChapterGson.getSortOrder());
// TODO: storyBookChapter.setImage();
storyBookChapterDao.create(storyBookChapter);
for (StoryBookParagraphGson storyBookParagraphGson : storyBookChapterGson.getStoryBookParagraphs()) {
// Convert from GSON to JPA
StoryBookParagraph storyBookParagraph = new StoryBookParagraph();
storyBookParagraph.setStoryBookChapter(storyBookChapter);
storyBookParagraph.setSortOrder(storyBookParagraphGson.getSortOrder());
storyBookParagraph.setOriginalText(storyBookParagraphGson.getOriginalText());
List<String> wordsInOriginalText = WordExtractionHelper.getWords(storyBookParagraph.getOriginalText(), language);
logger.info("wordsInOriginalText.size(): " + wordsInOriginalText.size());
List<Word> paragraphWords = new ArrayList<>();
logger.info("paragraphWords.size(): " + paragraphWords.size());
for (String wordInOriginalText : wordsInOriginalText) {
logger.info("wordInOriginalText: \"" + wordInOriginalText + "\"");
wordInOriginalText = wordInOriginalText.toLowerCase();
logger.info("wordInOriginalText (lower-case): \"" + wordInOriginalText + "\"");
Word word = wordDao.readByText(wordInOriginalText);
logger.info("word: " + word);
paragraphWords.add(word);
}
storyBookParagraph.setWords(paragraphWords);
storyBookParagraphDao.create(storyBookParagraph);
}
}
StoryBookContributionEvent storyBookContributionEvent = new StoryBookContributionEvent();
storyBookContributionEvent.setContributor(contributor);
storyBookContributionEvent.setStoryBook(storyBook);
storyBookContributionEvent.setRevisionNumber(1);
storyBookContributionEvent.setTime(Calendar.getInstance());
storyBookContributionEvent.setTimeSpentMs((long) (Math.random() * 10) * 60000L);
storyBookContributionEvent.setPlatform(Platform.WEBAPP);
storyBookContributionEventDao.create(storyBookContributionEvent);
}
// Extract and import Videos from CSV file in src/main/resources/
// TODO
String analyticsDirectoryPath = "db" + File.separator + "analytics_" + environment + File.separator + language.toString().toLowerCase();
logger.info("analyticsDirectoryPath: \"" + analyticsDirectoryPath + "\"");
URL analyticsDirectoryURL = getClass().getClassLoader().getResource(analyticsDirectoryPath);
logger.info("analyticsDirectoryURL: " + analyticsDirectoryURL);
if (analyticsDirectoryURL == null) {
logger.warn("The analytics directory was not found. Aborting analytics import.");
return;
}
File analyticsDirectory = new File(analyticsDirectoryURL.getPath());
logger.info("analyticsDirectory: " + analyticsDirectory);
// Extract and import LetterLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import WordLearningEvents from CSV file in src/main/resources/
// TODO
// Extract and import StoryBookLearningEvents from CSV file in src/main/resources/
File storyBookLearningEventsCsvFile = new File(analyticsDirectory, "storybook-learning-events.csv");
applicationDao = (ApplicationDao) webApplicationContext.getBean("applicationDao");
List<StoryBookLearningEvent> storyBookLearningEvents = CsvAnalyticsExtractionHelper.getStoryBookLearningEventsFromCsvBackup(storyBookLearningEventsCsvFile, applicationDao, storyBookDao);
logger.info("storyBookLearningEvents.size(): " + storyBookLearningEvents.size());
storyBookLearningEventDao = (StoryBookLearningEventDao) webApplicationContext.getBean("storyBookLearningEventDao");
for (StoryBookLearningEvent storyBookLearningEvent : storyBookLearningEvents) {
storyBookLearningEventDao.create(storyBookLearningEvent);
}
logger.info("Content import complete");
}
use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class CsvContentExtractionHelper method getWordsFromCsvBackup.
/**
* For information on how the CSV files were generated, see {@link WordCsvExportController#handleRequest}.
*/
public static List<Word> getWordsFromCsvBackup(File csvFile, LetterDao letterDao, SoundDao soundDao, LetterSoundCorrespondenceDao letterSoundCorrespondenceDao, WordDao wordDao) {
logger.info("getWordsFromCsvBackup");
List<Word> words = new ArrayList<>();
Path csvFilePath = Paths.get(csvFile.toURI());
logger.info("csvFilePath: " + csvFilePath);
try {
Reader reader = Files.newBufferedReader(csvFilePath);
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "text", "letter_sound_correspondences", "usage_count", "word_type", "spelling_consistency", "root_word_id", "root_word_text").withSkipHeaderRecord();
CSVParser csvParser = new CSVParser(reader, csvFormat);
for (CSVRecord csvRecord : csvParser) {
logger.info("csvRecord: " + csvRecord);
Word word = new Word();
String text = csvRecord.get("text");
word.setText(text);
JSONArray letterSoundCorrespondencesJsonArray = new JSONArray(csvRecord.get("letter_sound_correspondences"));
logger.info("letterSoundCorrespondencesJsonArray: " + letterSoundCorrespondencesJsonArray);
List<LetterSoundCorrespondence> letterSoundCorrespondences = new ArrayList<>();
for (int i = 0; i < letterSoundCorrespondencesJsonArray.length(); i++) {
JSONObject letterSoundCorrespondenceJsonObject = letterSoundCorrespondencesJsonArray.getJSONObject(i);
logger.info("letterSoundCorrespondenceJsonObject: " + letterSoundCorrespondenceJsonObject);
List<Letter> letters = new ArrayList<>();
JSONArray lettersJsonArray = letterSoundCorrespondenceJsonObject.getJSONArray("letters");
for (int j = 0; j < lettersJsonArray.length(); j++) {
Letter letter = letterDao.readByText(lettersJsonArray.getString(j));
letters.add(letter);
}
List<Sound> sounds = new ArrayList<>();
JSONArray soundsJsonArray = letterSoundCorrespondenceJsonObject.getJSONArray("sounds");
for (int j = 0; j < soundsJsonArray.length(); j++) {
Sound sound = soundDao.readByValueIpa(soundsJsonArray.getString(j));
sounds.add(sound);
}
LetterSoundCorrespondence letterSoundCorrespondence = letterSoundCorrespondenceDao.read(letters, sounds);
logger.info("letterSoundCorrespondence.getId(): " + letterSoundCorrespondence.getId());
letterSoundCorrespondences.add(letterSoundCorrespondence);
}
word.setLetterSoundCorrespondences(letterSoundCorrespondences);
Integer usageCount = Integer.valueOf(csvRecord.get("usage_count"));
word.setUsageCount(usageCount);
if (StringUtils.isNotBlank(csvRecord.get("word_type"))) {
WordType wordType = WordType.valueOf(csvRecord.get("word_type"));
word.setWordType(wordType);
}
if (StringUtils.isNotBlank(csvRecord.get("spelling_consistency"))) {
SpellingConsistency spellingConsistency = SpellingConsistency.valueOf(csvRecord.get("spelling_consistency"));
word.setSpellingConsistency(spellingConsistency);
}
// TODO: Store rootWords _after_ all Words have been stored
// if (StringUtils.isNotBlank(csvRecord.get("root_word_text"))) {
// String rootWordText = csvRecord.get("root_word_text");
// Word rootWord = wordDao.readByText(language, rootWordText);
// word.setRootWord(rootWord);
// }
words.add(word);
}
} catch (IOException ex) {
logger.error(ex);
}
return words;
}
use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class SyllableUsageCountScheduler method execute.
// At 07:30 every morning
@Scheduled(cron = "00 30 07 * * *")
public synchronized void execute() {
logger.info("execute");
logger.info("Calculating usage count for Syllables");
Map<String, Integer> syllableFrequencyMap = new HashMap<>();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
List<StoryBook> storyBooks = storyBookDao.readAllOrdered();
logger.info("storyBooks.size(): " + storyBooks.size());
for (StoryBook storyBook : storyBooks) {
logger.info("storyBook.getTitle(): " + storyBook.getTitle());
List<String> paragraphs = new ArrayList<>();
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
for (StoryBookChapter storyBookChapter : storyBookChapters) {
List<StoryBookParagraph> storyBookParagraphs = storyBookParagraphDao.readAll(storyBookChapter);
for (StoryBookParagraph storyBookParagraph : storyBookParagraphs) {
paragraphs.add(storyBookParagraph.getOriginalText());
}
}
Map<String, Integer> syllableFrequencyMapForBook = SyllableFrequencyHelper.getSyllableFrequency(paragraphs, language);
syllableFrequencyMapForBook.keySet().forEach(syllableText -> syllableFrequencyMap.put(syllableText, syllableFrequencyMap.getOrDefault(syllableText, 0) + syllableFrequencyMapForBook.get(syllableText)));
}
logger.info("syllableFrequencyMap: " + syllableFrequencyMap);
for (String syllableText : syllableFrequencyMap.keySet()) {
// Skip syllables that are actual words
// TODO: add logic to Word editing
Word word = wordDao.readByText(syllableText);
if (word != null) {
continue;
}
// TODO: add support for trigrams
if (syllableText.length() != 2) {
continue;
}
Syllable existingSyllable = syllableDao.readByText(syllableText);
if (existingSyllable == null) {
Syllable syllable = new Syllable();
syllable.setTimeLastUpdate(Calendar.getInstance());
syllable.setText(syllableText);
syllable.setUsageCount(syllableFrequencyMap.get(syllableText));
syllableDao.create(syllable);
} else {
existingSyllable.setUsageCount(syllableFrequencyMap.get(syllableText));
syllableDao.update(existingSyllable);
}
}
logger.info("execute complete");
}
use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class StoryBookEditController method handleSubmit.
@RequestMapping(value = "/{id}", method = RequestMethod.POST)
public String handleSubmit(@Valid StoryBook storyBook, BindingResult result, Model model, HttpServletRequest request, HttpSession session) {
logger.info("handleSubmit");
StoryBook existingStoryBook = storyBookDao.readByTitle(storyBook.getTitle());
if ((existingStoryBook != null) && !existingStoryBook.getId().equals(storyBook.getId())) {
result.rejectValue("title", "NonUnique");
}
if (result.hasErrors()) {
model.addAttribute("storyBook", storyBook);
model.addAttribute("timeStart", System.currentTimeMillis());
model.addAttribute("contentLicenses", ContentLicense.values());
List<Image> coverImages = imageDao.readAllOrdered();
model.addAttribute("coverImages", coverImages);
model.addAttribute("readingLevels", ReadingLevel.values());
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
model.addAttribute("storyBookChapters", storyBookChapters);
// Map<StoryBookChapter.id, List<StoryBookParagraph>>
Map<Long, List<StoryBookParagraph>> paragraphsPerStoryBookChapterMap = new HashMap<>();
for (StoryBookChapter storyBookChapter : storyBookChapters) {
paragraphsPerStoryBookChapterMap.put(storyBookChapter.getId(), storyBookParagraphDao.readAll(storyBookChapter));
}
model.addAttribute("paragraphsPerStoryBookChapterMap", paragraphsPerStoryBookChapterMap);
List<String> paragraphs = new ArrayList<>();
for (StoryBookChapter storyBookChapter : storyBookChapters) {
List<StoryBookParagraph> storyBookParagraphs = storyBookParagraphDao.readAll(storyBookChapter);
for (StoryBookParagraph storyBookParagraph : storyBookParagraphs) {
paragraphs.add(storyBookParagraph.getOriginalText());
}
}
model.addAttribute("storyBookContributionEvents", storyBookContributionEventDao.readAll(storyBook));
model.addAttribute("storyBookPeerReviewEvents", storyBookPeerReviewEventDao.readAll(storyBook));
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
Map<String, Integer> wordFrequencyMap = WordFrequencyHelper.getWordFrequency(paragraphs, language);
model.addAttribute("wordFrequencyMap", wordFrequencyMap);
Map<String, Word> wordMap = new HashMap<>();
for (Word word : wordDao.readAllOrdered()) {
wordMap.put(word.getText(), word);
}
model.addAttribute("wordMap", wordMap);
model.addAttribute("emojisByWordId", getEmojisByWordId());
Map<String, Integer> letterFrequencyMap = LetterFrequencyHelper.getLetterFrequency(paragraphs, language);
model.addAttribute("letterFrequencyMap", letterFrequencyMap);
Map<String, Letter> letterMap = new HashMap<>();
for (Letter letter : letterDao.readAllOrdered()) {
letterMap.put(letter.getText(), letter);
}
model.addAttribute("letterMap", letterMap);
return "content/storybook/edit";
} else {
storyBook.setTimeLastUpdate(Calendar.getInstance());
storyBook.setRevisionNumber(storyBook.getRevisionNumber() + 1);
storyBookDao.update(storyBook);
StoryBookContributionEvent storyBookContributionEvent = new StoryBookContributionEvent();
storyBookContributionEvent.setContributor((Contributor) session.getAttribute("contributor"));
storyBookContributionEvent.setTime(Calendar.getInstance());
storyBookContributionEvent.setStoryBook(storyBook);
storyBookContributionEvent.setRevisionNumber(storyBook.getRevisionNumber());
storyBookContributionEvent.setComment(StringUtils.abbreviate(request.getParameter("contributionComment"), 1000));
storyBookContributionEvent.setTimeSpentMs(System.currentTimeMillis() - Long.valueOf(request.getParameter("timeStart")));
storyBookContributionEvent.setPlatform(Platform.WEBAPP);
storyBookContributionEventDao.create(storyBookContributionEvent);
String contentUrl = "http://" + EnvironmentContextLoaderListener.PROPERTIES.getProperty("content.language").toLowerCase() + ".elimu.ai/content/storybook/edit/" + storyBook.getId();
String embedThumbnailUrl = null;
if (storyBook.getCoverImage() != null) {
embedThumbnailUrl = "http://" + EnvironmentContextLoaderListener.PROPERTIES.getProperty("content.language").toLowerCase() + ".elimu.ai/image/" + storyBook.getCoverImage().getId() + "_r" + storyBook.getCoverImage().getRevisionNumber() + "." + storyBook.getCoverImage().getImageFormat().toString().toLowerCase();
}
DiscordHelper.sendChannelMessage("Storybook edited: " + contentUrl, "\"" + storyBookContributionEvent.getStoryBook().getTitle() + "\"", "Comment: \"" + storyBookContributionEvent.getComment() + "\"", null, embedThumbnailUrl);
// Refresh REST API cache
storyBooksJsonService.refreshStoryBooksJSONArray();
return "redirect:/content/storybook/list#" + storyBook.getId();
}
}
use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class WordCreateController method handleSubmit.
@RequestMapping(method = RequestMethod.POST)
public String handleSubmit(HttpServletRequest request, HttpSession session, @Valid Word word, BindingResult result, Model model) {
logger.info("handleSubmit");
Word existingWord = wordDao.readByText(word.getText());
if (existingWord != null) {
result.rejectValue("text", "NonUnique");
}
if (StringUtils.containsAny(word.getText(), " ")) {
result.rejectValue("text", "WordSpace");
}
if (result.hasErrors()) {
model.addAttribute("word", word);
model.addAttribute("timeStart", request.getParameter("timeStart"));
// TODO: sort by letter(s) text
model.addAttribute("letterSoundCorrespondences", letterSoundCorrespondenceDao.readAllOrderedByUsage());
model.addAttribute("rootWords", wordDao.readAllOrdered());
model.addAttribute("emojisByWordId", getEmojisByWordId());
model.addAttribute("wordTypes", WordType.values());
model.addAttribute("spellingConsistencies", SpellingConsistency.values());
model.addAttribute("audio", audioDao.readByTranscription(word.getText()));
return "content/word/create";
} else {
word.setTimeLastUpdate(Calendar.getInstance());
wordDao.create(word);
WordContributionEvent wordContributionEvent = new WordContributionEvent();
wordContributionEvent.setContributor((Contributor) session.getAttribute("contributor"));
wordContributionEvent.setTime(Calendar.getInstance());
wordContributionEvent.setWord(word);
wordContributionEvent.setRevisionNumber(word.getRevisionNumber());
wordContributionEvent.setComment(StringUtils.abbreviate(request.getParameter("contributionComment"), 1000));
wordContributionEvent.setTimeSpentMs(System.currentTimeMillis() - Long.valueOf(request.getParameter("timeStart")));
wordContributionEvent.setPlatform(Platform.WEBAPP);
wordContributionEventDao.create(wordContributionEvent);
String contentUrl = "http://" + EnvironmentContextLoaderListener.PROPERTIES.getProperty("content.language").toLowerCase() + ".elimu.ai/content/word/edit/" + word.getId();
DiscordHelper.sendChannelMessage("Word created: " + contentUrl, "\"" + wordContributionEvent.getWord().getText() + "\"", "Comment: \"" + wordContributionEvent.getComment() + "\"", null, null);
// Note: updating the list of Words in StoryBookParagraphs is handled by the ParagraphWordScheduler
// Label Image with Word of matching title
Image matchingImage = imageDao.read(word.getText());
if (matchingImage != null) {
Set<Word> labeledWords = matchingImage.getWords();
if (!labeledWords.contains(word)) {
labeledWords.add(word);
matchingImage.setWords(labeledWords);
imageDao.update(matchingImage);
}
}
// Delete syllables that are actual words
Syllable syllable = syllableDao.readByText(word.getText());
if (syllable != null) {
syllableDao.delete(syllable);
}
// Generate Audio for this Word (if it has not been done already)
List<Audio> audios = audioDao.readAll(word);
if (audios.isEmpty()) {
Calendar timeStart = Calendar.getInstance();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
try {
byte[] audioBytes = GoogleCloudTextToSpeechHelper.synthesizeText(word.getText(), language);
logger.info("audioBytes: " + audioBytes);
if (audioBytes != null) {
Audio audio = new Audio();
audio.setTimeLastUpdate(Calendar.getInstance());
audio.setContentType(AudioFormat.MP3.getContentType());
audio.setWord(word);
audio.setTitle("word-id-" + word.getId());
audio.setTranscription(word.getText());
audio.setBytes(audioBytes);
// TODO: Convert from byte[] to File, and extract audio duration
audio.setDurationMs(null);
audio.setAudioFormat(AudioFormat.MP3);
audioDao.create(audio);
audios.add(audio);
model.addAttribute("audios", audios);
AudioContributionEvent audioContributionEvent = new AudioContributionEvent();
audioContributionEvent.setContributor((Contributor) session.getAttribute("contributor"));
audioContributionEvent.setTime(Calendar.getInstance());
audioContributionEvent.setAudio(audio);
audioContributionEvent.setRevisionNumber(audio.getRevisionNumber());
audioContributionEvent.setComment("Google Cloud Text-to-Speech (🤖 auto-generated comment)️");
audioContributionEvent.setTimeSpentMs(System.currentTimeMillis() - timeStart.getTimeInMillis());
audioContributionEvent.setPlatform(Platform.WEBAPP);
audioContributionEventDao.create(audioContributionEvent);
}
} catch (Exception ex) {
logger.error(ex);
}
}
return "redirect:/content/word/list#" + word.getId();
}
}
Aggregations