use of ai.elimu.model.content.StoryBookChapter in project webapp by elimu-ai.
the class LetterUsageCountScheduler method execute.
// At 06:15 every day
@Scheduled(cron = "00 15 06 * * *")
public synchronized void execute() {
logger.info("execute");
logger.info("Calculating usage count for Letters");
Map<String, Integer> letterFrequencyMap = new HashMap<>();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
List<StoryBook> storyBooks = storyBookDao.readAllOrdered();
logger.info("storyBooks.size(): " + storyBooks.size());
for (StoryBook storyBook : storyBooks) {
logger.info("storyBook.getTitle(): " + storyBook.getTitle());
List<String> paragraphs = new ArrayList<>();
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
for (StoryBookChapter storyBookChapter : storyBookChapters) {
List<StoryBookParagraph> storyBookParagraphs = storyBookParagraphDao.readAll(storyBookChapter);
for (StoryBookParagraph storyBookParagraph : storyBookParagraphs) {
paragraphs.add(storyBookParagraph.getOriginalText());
}
}
Map<String, Integer> letterFrequencyMapForBook = LetterFrequencyHelper.getLetterFrequency(paragraphs, language);
letterFrequencyMapForBook.keySet().forEach(letterText -> letterFrequencyMap.put(letterText, letterFrequencyMap.getOrDefault(letterText, 0) + letterFrequencyMapForBook.get(letterText)));
}
logger.info("letterFrequencyMap: " + letterFrequencyMap);
for (String letterText : letterFrequencyMap.keySet()) {
Letter existingLetter = letterDao.readByText(letterText);
if (existingLetter != null) {
existingLetter.setUsageCount(letterFrequencyMap.get(letterText));
letterDao.update(existingLetter);
}
}
logger.info("execute complete");
}
use of ai.elimu.model.content.StoryBookChapter in project webapp by elimu-ai.
the class WordUsageCountScheduler method execute.
// At 06:00 every day
@Scheduled(cron = "00 00 06 * * *")
public synchronized void execute() {
logger.info("execute");
logger.info("Calculating usage count for Words");
Map<String, Integer> wordFrequencyMap = new HashMap<>();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
List<StoryBook> storyBooks = storyBookDao.readAllOrdered();
logger.info("storyBooks.size(): " + storyBooks.size());
for (StoryBook storyBook : storyBooks) {
logger.info("storyBook.getTitle(): " + storyBook.getTitle());
List<String> paragraphs = new ArrayList<>();
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
for (StoryBookChapter storyBookChapter : storyBookChapters) {
List<StoryBookParagraph> storyBookParagraphs = storyBookParagraphDao.readAll(storyBookChapter);
for (StoryBookParagraph storyBookParagraph : storyBookParagraphs) {
paragraphs.add(storyBookParagraph.getOriginalText());
}
}
Map<String, Integer> wordFrequencyMapForBook = WordFrequencyHelper.getWordFrequency(paragraphs, language);
wordFrequencyMapForBook.keySet().forEach(word -> wordFrequencyMap.put(word, wordFrequencyMap.getOrDefault(word, 0) + wordFrequencyMapForBook.get(word)));
}
for (String key : wordFrequencyMap.keySet()) {
String wordLowerCase = key.toLowerCase();
logger.info("wordLowerCase: \"" + wordLowerCase + "\"");
Word word = wordDao.readByText(wordLowerCase);
if (word != null) {
word.setUsageCount(wordFrequencyMap.get(wordLowerCase));
wordDao.update(word);
}
}
logger.info("execute complete");
}
use of ai.elimu.model.content.StoryBookChapter in project webapp by elimu-ai.
the class StringToStoryBookChapterConverter method convert.
/**
* Convert StoryBookChapter id to StoryBookChapter entity
*/
public StoryBookChapter convert(String id) {
if (StringUtils.isBlank(id)) {
return null;
} else {
Long storyBookChapterId = Long.parseLong(id);
StoryBookChapter storyBookChapter = storyBookChapterDao.read(storyBookChapterId);
return storyBookChapter;
}
}
use of ai.elimu.model.content.StoryBookChapter in project webapp by elimu-ai.
the class StoryBookEditController method handleRequest.
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public String handleRequest(Model model, @PathVariable Long id) {
logger.info("handleRequest");
StoryBook storyBook = storyBookDao.read(id);
model.addAttribute("storyBook", storyBook);
model.addAttribute("timeStart", System.currentTimeMillis());
model.addAttribute("contentLicenses", ContentLicense.values());
List<Image> coverImages = imageDao.readAllOrdered();
model.addAttribute("coverImages", coverImages);
model.addAttribute("readingLevels", ReadingLevel.values());
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
model.addAttribute("storyBookChapters", storyBookChapters);
// Map<StoryBookChapter.id, List<StoryBookParagraph>>
Map<Long, List<StoryBookParagraph>> paragraphsPerStoryBookChapterMap = new HashMap<>();
for (StoryBookChapter storyBookChapter : storyBookChapters) {
paragraphsPerStoryBookChapterMap.put(storyBookChapter.getId(), storyBookParagraphDao.readAll(storyBookChapter));
}
model.addAttribute("paragraphsPerStoryBookChapterMap", paragraphsPerStoryBookChapterMap);
List<String> paragraphs = new ArrayList<>();
for (StoryBookChapter storyBookChapter : storyBookChapters) {
List<StoryBookParagraph> storyBookParagraphs = storyBookParagraphDao.readAll(storyBookChapter);
for (StoryBookParagraph storyBookParagraph : storyBookParagraphs) {
paragraphs.add(storyBookParagraph.getOriginalText());
}
}
model.addAttribute("storyBookContributionEvents", storyBookContributionEventDao.readAll(storyBook));
model.addAttribute("storyBookPeerReviewEvents", storyBookPeerReviewEventDao.readAll(storyBook));
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
Map<String, Integer> wordFrequencyMap = WordFrequencyHelper.getWordFrequency(paragraphs, language);
model.addAttribute("wordFrequencyMap", wordFrequencyMap);
Map<String, Word> wordMap = new HashMap<>();
for (Word word : wordDao.readAllOrdered()) {
wordMap.put(word.getText(), word);
}
model.addAttribute("wordMap", wordMap);
model.addAttribute("emojisByWordId", getEmojisByWordId());
Map<String, Integer> letterFrequencyMap = LetterFrequencyHelper.getLetterFrequency(paragraphs, language);
model.addAttribute("letterFrequencyMap", letterFrequencyMap);
Map<String, Letter> letterMap = new HashMap<>();
for (Letter letter : letterDao.readAllOrdered()) {
letterMap.put(letter.getText(), letter);
}
model.addAttribute("letterMap", letterMap);
return "content/storybook/edit";
}
use of ai.elimu.model.content.StoryBookChapter in project webapp by elimu-ai.
the class StoryBookCsvExportController method handleRequest.
@RequestMapping(value = "/storybooks.csv", method = RequestMethod.GET)
public void handleRequest(HttpServletResponse response, OutputStream outputStream) throws IOException {
logger.info("handleRequest");
List<StoryBook> storyBooks = storyBookDao.readAllOrdered();
logger.info("storyBooks.size(): " + storyBooks.size());
CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader("id", "title", "description", "content_license", "attribution_url", "reading_level", "cover_image_id", "chapters");
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, csvFormat);
for (StoryBook storyBook : storyBooks) {
logger.info("storyBook.getTitle(): \"" + storyBook.getTitle() + "\"");
Long coverImageId = null;
if (storyBook.getCoverImage() != null) {
coverImageId = storyBook.getCoverImage().getId();
}
// Store chapters as JSON objects
JSONArray chaptersJsonArray = new JSONArray();
List<StoryBookChapter> storyBookChapters = storyBookChapterDao.readAll(storyBook);
logger.info("storyBookChapters.size(): " + storyBookChapters.size());
for (StoryBookChapter storyBookChapter : storyBookChapters) {
logger.info("storyBookChapter.getId(): " + storyBookChapter.getId());
StoryBookChapterGson storyBookChapterGson = JpaToGsonConverter.getStoryBookChapterGson(storyBookChapter);
// TODO: move this code block to JpaToGsonConverter?
if (storyBookChapterGson.getImage() != null) {
ImageGson imageGsonWithIdOnly = new ImageGson();
imageGsonWithIdOnly.setId(storyBookChapterGson.getImage().getId());
storyBookChapterGson.setImage(imageGsonWithIdOnly);
}
// Store paragraphs as JSON objects
List<StoryBookParagraphGson> storyBookParagraphs = new ArrayList<>();
logger.info("storyBookParagraphs.size(): " + storyBookParagraphs.size());
for (StoryBookParagraph storyBookParagraph : storyBookParagraphDao.readAll(storyBookChapter)) {
logger.info("storyBookParagraph.getId(): " + storyBookParagraph.getId());
StoryBookParagraphGson storyBookParagraphGson = JpaToGsonConverter.getStoryBookParagraphGson(storyBookParagraph);
storyBookParagraphGson.setWords(null);
storyBookParagraphs.add(storyBookParagraphGson);
}
storyBookChapterGson.setStoryBookParagraphs(storyBookParagraphs);
String json = new Gson().toJson(storyBookChapterGson);
JSONObject jsonObject = new JSONObject(json);
logger.info("jsonObject: " + jsonObject);
chaptersJsonArray.put(jsonObject);
}
logger.info("chaptersJsonArray: " + chaptersJsonArray);
csvPrinter.printRecord(storyBook.getId(), storyBook.getTitle(), storyBook.getDescription(), storyBook.getContentLicense(), storyBook.getAttributionUrl(), storyBook.getReadingLevel(), coverImageId, chaptersJsonArray);
csvPrinter.flush();
}
String csvFileContent = stringWriter.toString();
response.setContentType("text/csv");
byte[] bytes = csvFileContent.getBytes();
response.setContentLength(bytes.length);
try {
outputStream.write(bytes);
outputStream.flush();
outputStream.close();
} catch (IOException ex) {
logger.error(ex);
}
}
Aggregations