use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class WordEditController method handleRequest.
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public String handleRequest(HttpSession session, Model model, @PathVariable Long id) {
logger.info("handleRequest");
Word word = wordDao.read(id);
if (word.getLetterSoundCorrespondences().isEmpty()) {
autoSelectLetterSoundCorrespondences(word);
// TODO: display information message to the Contributor that the letter-sound correspondences were auto-selected, and that they should be verified
}
model.addAttribute("word", word);
model.addAttribute("timeStart", System.currentTimeMillis());
// TODO: sort by letter(s) text
model.addAttribute("letterSoundCorrespondences", letterSoundCorrespondenceDao.readAllOrderedByUsage());
model.addAttribute("rootWords", wordDao.readAllOrdered());
model.addAttribute("emojisByWordId", getEmojisByWordId());
model.addAttribute("wordTypes", WordType.values());
model.addAttribute("spellingConsistencies", SpellingConsistency.values());
model.addAttribute("wordContributionEvents", wordContributionEventDao.readAll(word));
model.addAttribute("wordPeerReviewEvents", wordPeerReviewEventDao.readAll(word));
List<Audio> audios = audioDao.readAll(word);
model.addAttribute("audios", audios);
// Generate Audio for this Word (if it has not been done already)
if (audios.isEmpty()) {
Calendar timeStart = Calendar.getInstance();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
try {
byte[] audioBytes = GoogleCloudTextToSpeechHelper.synthesizeText(word.getText(), language);
logger.info("audioBytes: " + audioBytes);
if (audioBytes != null) {
Audio audio = new Audio();
audio.setTimeLastUpdate(Calendar.getInstance());
audio.setContentType(AudioFormat.MP3.getContentType());
audio.setWord(word);
audio.setTitle("word_" + word.getText());
audio.setTranscription(word.getText());
audio.setBytes(audioBytes);
// TODO: Convert from byte[] to File, and extract audio duration
audio.setDurationMs(null);
audio.setAudioFormat(AudioFormat.MP3);
audioDao.create(audio);
audios.add(audio);
model.addAttribute("audios", audios);
AudioContributionEvent audioContributionEvent = new AudioContributionEvent();
audioContributionEvent.setContributor((Contributor) session.getAttribute("contributor"));
audioContributionEvent.setTime(Calendar.getInstance());
audioContributionEvent.setAudio(audio);
audioContributionEvent.setRevisionNumber(audio.getRevisionNumber());
audioContributionEvent.setComment("Google Cloud Text-to-Speech (🤖 auto-generated comment)️");
audioContributionEvent.setTimeSpentMs(System.currentTimeMillis() - timeStart.getTimeInMillis());
audioContributionEvent.setPlatform(Platform.WEBAPP);
audioContributionEventDao.create(audioContributionEvent);
}
} catch (Exception ex) {
logger.error(ex);
}
}
// Look up variants of the same wordByTextMatch
model.addAttribute("wordInflections", wordDao.readInflections(word));
// Look up Multimedia content that has been labeled with this Word
// TODO: labeled Audios
List<Emoji> labeledEmojis = emojiDao.readAllLabeled(word);
model.addAttribute("labeledEmojis", labeledEmojis);
List<Image> labeledImages = imageDao.readAllLabeled(word);
model.addAttribute("labeledImages", labeledImages);
// TODO: labeled Videos
// Look up StoryBook Paragraphs that contain this Word
List<StoryBookParagraph> storyBookParagraphsContainingWord = storyBookParagraphDao.readAllContainingWord(word.getText());
model.addAttribute("storyBookParagraphsContainingWord", storyBookParagraphsContainingWord);
return "content/word/edit";
}
use of ai.elimu.model.v2.enums.Language in project webapp by elimu-ai.
the class StoryBookParagraphEditController method handleRequest.
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public String handleRequest(Model model, @PathVariable Long id, HttpSession session) {
logger.info("handleRequest");
StoryBookParagraph storyBookParagraph = storyBookParagraphDao.read(id);
logger.info("storyBookParagraph: " + storyBookParagraph);
model.addAttribute("storyBookParagraph", storyBookParagraph);
// Generate Audio for this StoryBookParagraph (if it has not been done already)
List<Audio> paragraphAudios = audioDao.readAll(storyBookParagraph);
if (paragraphAudios.isEmpty()) {
Calendar timeStart = Calendar.getInstance();
Language language = Language.valueOf(ConfigHelper.getProperty("content.language"));
try {
byte[] audioBytes = GoogleCloudTextToSpeechHelper.synthesizeText(storyBookParagraph.getOriginalText(), language);
logger.info("audioBytes: " + audioBytes);
if (audioBytes != null) {
Audio audio = new Audio();
audio.setTimeLastUpdate(Calendar.getInstance());
audio.setContentType(AudioFormat.MP3.getContentType());
audio.setStoryBookParagraph(storyBookParagraph);
audio.setTitle("storybook-" + storyBookParagraph.getStoryBookChapter().getStoryBook().getId() + "-ch-" + (storyBookParagraph.getStoryBookChapter().getSortOrder() + 1) + "-par-" + (storyBookParagraph.getSortOrder() + 1));
audio.setTranscription(storyBookParagraph.getOriginalText());
audio.setBytes(audioBytes);
// TODO: Convert from byte[] to File, and extract audio duration
audio.setDurationMs(null);
audio.setAudioFormat(AudioFormat.MP3);
audioDao.create(audio);
AudioContributionEvent audioContributionEvent = new AudioContributionEvent();
audioContributionEvent.setContributor((Contributor) session.getAttribute("contributor"));
audioContributionEvent.setTime(Calendar.getInstance());
audioContributionEvent.setAudio(audio);
audioContributionEvent.setRevisionNumber(audio.getRevisionNumber());
audioContributionEvent.setComment("Google Cloud Text-to-Speech (🤖 auto-generated comment)️");
audioContributionEvent.setTimeSpentMs(System.currentTimeMillis() - timeStart.getTimeInMillis());
audioContributionEvent.setPlatform(Platform.WEBAPP);
audioContributionEventDao.create(audioContributionEvent);
paragraphAudios = audioDao.readAll(storyBookParagraph);
}
} catch (Exception ex) {
logger.error(ex);
}
}
model.addAttribute("audios", paragraphAudios);
model.addAttribute("timeStart", System.currentTimeMillis());
return "content/storybook/paragraph/edit";
}
use of ai.elimu.model.v2.enums.Language in project KaellyBot by Kaysoro.
the class HelpCommand method request.
@Override
public boolean request(IMessage message) {
if (super.request(message)) {
String prefixe = getPrefixMdEscaped(message);
Language lg = Translator.getLanguageFrom(message.getChannel());
Matcher m = getMatcher(message);
m.find();
StringBuilder st = new StringBuilder();
boolean argumentFound = m.group(1) != null && m.group(1).replaceAll("^\\s+", "").length() > 0;
for (Command command : CommandManager.getCommands()) if (command.isPublic() && !command.isAdmin() && (message.getChannel().isPrivate() || !command.isForbidden(Guild.getGuild(message.getGuild())))) {
if (!argumentFound)
st.append(command.help(lg, prefixe)).append("\n");
else if (command.getName().equals(m.group(1).trim())) {
st.append(command.helpDetailed(lg, prefixe));
break;
}
}
if (argumentFound && st.length() == 0)
notFoundCmd.throwException(message, this, lg);
else
Message.sendText(message.getChannel(), st.toString());
}
return false;
}
use of ai.elimu.model.v2.enums.Language in project KaellyBot by Kaysoro.
the class ItemCommand method request.
@Override
public boolean request(IMessage message) {
if (super.request(message)) {
Matcher m = getMatcher(message);
Language lg = Translator.getLanguageFrom(message.getChannel());
m.find();
if (message.getChannel().getModifiedPermissions(ClientConfig.DISCORD().getOurUser()).contains(Permissions.USE_EXTERNAL_EMOJIS) && ClientConfig.DISCORD().getOurUser().getPermissionsForGuild(message.getGuild()).contains(Permissions.USE_EXTERNAL_EMOJIS)) {
String normalName = Normalizer.normalize(m.group(2).trim(), Normalizer.Form.NFD).replaceAll("\\p{InCombiningDiacriticalMarks}+", "").toLowerCase();
BestMatcher matcher = new BestMatcher(normalName);
try {
for (TypeEquipment equip : TypeEquipment.values()) {
String[] names = equip.getNames(lg);
gatherData(message, matcher, names, normalName, equip, notFoundItem);
}
if (matcher.isEmpty())
for (SuperTypeEquipment type : SuperTypeEquipment.values()) matcher.evaluateAll(getListRequestableFrom(getSearchURL(type.getUrl(lg), normalName, null, lg), message, notFoundItem));
if (matcher.isUnique()) {
// We have found it !
Embedded item = Item.getItem(lg, Translator.getLabel(lg, "game.url") + matcher.getBest().getUrl());
if (m.group(1) != null)
Message.sendEmbed(message.getChannel(), item.getMoreEmbedObject(lg));
else
Message.sendEmbed(message.getChannel(), item.getEmbedObject(lg));
} else if (// Too much items
!matcher.isEmpty())
tooMuchItems.throwException(message, this, lg, matcher.getBests());
else
// empty
notFoundItem.throwException(message, this, lg);
} catch (IOException e) {
ExceptionManager.manageIOException(e, message, this, lg, notFoundItem);
}
return true;
} else
noExternalEmojiPermission.throwException(message, this, lg);
}
return false;
}
use of ai.elimu.model.v2.enums.Language in project KaellyBot by Kaysoro.
the class MapCommand method request.
@Override
public boolean request(IMessage message) {
if (super.request(message)) {
Language lg = Translator.getLanguageFrom(message.getChannel());
List<String> classicMaps = new ArrayList<>();
for (int i = 1; i < 18; i++) classicMaps.add(String.valueOf(i));
List<String> maps = new ArrayList<>();
Matcher m = getMatcher(message);
m.find();
if (m.group(1) == null && m.group(2) == null)
maps.addAll(classicMaps);
else if (m.group(2) != null) {
String[] text = m.group(2).trim().toUpperCase().split("\\s+");
for (String value : text) {
value = getNumberValue(value);
if (value != null)
maps.add(value);
}
} else {
new BadUseCommandDiscordException().throwException(message, this, lg);
return false;
}
if (m.group(1) == null && maps.isEmpty()) {
new BadUseCommandDiscordException().throwException(message, this, lg);
return false;
} else if (m.group(1) != null) {
classicMaps.removeAll(maps);
maps = classicMaps;
}
String number = maps.get(new Random().nextInt(maps.size()));
String url = Constants.turnamentMapImg.replace("{number}", number);
String[] punchlines = Translator.getLabel(lg, "map.punchline").split(";");
String punchline = punchlines[new Random().nextInt(punchlines.length)];
EmbedBuilder builder = new EmbedBuilder();
builder.withTitle(Translator.getLabel(lg, "map.embed.title") + " " + numberToRoman(number));
builder.withDescription(punchline);
builder.withImage(url);
builder.withColor(new Random().nextInt(16777216));
builder.withImage(url);
Message.sendEmbed(message.getChannel(), builder.build());
}
return false;
}
Aggregations