use of me.devsaki.hentoid.database.CollectionDAO in project Hentoid by avluis.
the class SplashActivity method hasToMigrateAPI29.
/**
* Test if the ImageFiles stored in the DB have their URI's filled
* If not, it indicates the collection has not been updated to fit the Android 10 I/O update
*
* @return True if a migration has to happen; false if not
*/
private boolean hasToMigrateAPI29() {
CollectionDAO dao = new ObjectBoxDAO(this);
try {
long imagesKO = dao.countOldStoredContent();
Timber.d("Splash / API 29 migration detector : %s books KO", imagesKO);
return imagesKO > 0;
} finally {
dao.cleanup();
}
}
use of me.devsaki.hentoid.database.CollectionDAO in project Hentoid by avluis.
the class ImportWorker method startImport.
/**
* Import books from known source folders
*
* @param rename True if the user has asked for a folder renaming when calling import from Preferences
* @param cleanNoJSON True if the user has asked for a cleanup of folders with no JSONs when calling import from Preferences
* @param cleanNoImages True if the user has asked for a cleanup of folders with no images when calling import from Preferences
*/
private void startImport(boolean rename, boolean cleanNoJSON, boolean cleanNoImages) {
booksOK = 0;
booksKO = 0;
nbFolders = 0;
List<LogHelper.LogEntry> log = new ArrayList<>();
Context context = getApplicationContext();
// Stop downloads; it can get messy if downloading _and_ refresh / import happen at the same time
EventBus.getDefault().post(new DownloadEvent(DownloadEvent.Type.EV_PAUSE));
DocumentFile rootFolder = FileHelper.getFolderFromTreeUriString(context, Preferences.getStorageUri());
if (null == rootFolder) {
Timber.e("Root folder is not defined (%s)", Preferences.getStorageUri());
return;
}
List<DocumentFile> bookFolders = new ArrayList<>();
CollectionDAO dao = new ObjectBoxDAO(context);
try (FileExplorer explorer = new FileExplorer(context, Uri.parse(Preferences.getStorageUri()))) {
// 1st pass : Import groups JSON
// Flag existing groups for cleanup
dao.flagAllGroups(Grouping.CUSTOM);
DocumentFile groupsFile = explorer.findFile(context, rootFolder, Consts.GROUPS_JSON_FILE_NAME);
if (groupsFile != null)
importGroups(context, groupsFile, dao, log);
else
trace(Log.INFO, STEP_GROUPS, log, "No groups file found");
// 2nd pass : count subfolders of every site folder
List<DocumentFile> siteFolders = explorer.listFolders(context, rootFolder);
int foldersProcessed = 1;
for (DocumentFile f : siteFolders) {
bookFolders.addAll(explorer.listFolders(context, f));
eventProgress(STEP_2_BOOK_FOLDERS, siteFolders.size(), foldersProcessed++, 0);
}
eventComplete(STEP_2_BOOK_FOLDERS, siteFolders.size(), siteFolders.size(), 0, null);
notificationManager.notify(new ImportProgressNotification(context.getResources().getString(R.string.starting_import), 0, 0));
// 3rd pass : scan every folder for a JSON file or subdirectories
String enabled = context.getResources().getString(R.string.enabled);
String disabled = context.getResources().getString(R.string.disabled);
trace(Log.DEBUG, 0, log, "Import books starting - initial detected count : %s", bookFolders.size() + "");
trace(Log.INFO, 0, log, "Rename folders %s", (rename ? enabled : disabled));
trace(Log.INFO, 0, log, "Remove folders with no JSONs %s", (cleanNoJSON ? enabled : disabled));
trace(Log.INFO, 0, log, "Remove folders with no images %s", (cleanNoImages ? enabled : disabled));
// Cleanup previously detected duplicates
DuplicatesDAO duplicatesDAO = new DuplicatesDAO(context);
try {
duplicatesDAO.clearEntries();
} finally {
duplicatesDAO.cleanup();
}
// Flag DB content for cleanup
dao.flagAllInternalBooks();
dao.flagAllErrorBooksWithJson();
for (int i = 0; i < bookFolders.size(); i++) {
if (isStopped())
throw new InterruptedException();
importFolder(context, explorer, dao, bookFolders, bookFolders.get(i), log, rename, cleanNoJSON, cleanNoImages);
}
trace(Log.INFO, STEP_3_BOOKS, log, "Import books complete - %s OK; %s KO; %s final count", booksOK + "", booksKO + "", bookFolders.size() - nbFolders + "");
eventComplete(STEP_3_BOOKS, bookFolders.size(), booksOK, booksKO, null);
// 4th pass : Import queue & bookmarks JSON
DocumentFile queueFile = explorer.findFile(context, rootFolder, Consts.QUEUE_JSON_FILE_NAME);
if (queueFile != null)
importQueue(context, queueFile, dao, log);
else
trace(Log.INFO, STEP_4_QUEUE_FINAL, log, "No queue file found");
DocumentFile bookmarksFile = explorer.findFile(context, rootFolder, Consts.BOOKMARKS_JSON_FILE_NAME);
if (bookmarksFile != null)
importBookmarks(context, bookmarksFile, dao, log);
else
trace(Log.INFO, STEP_4_QUEUE_FINAL, log, "No bookmarks file found");
} catch (IOException | InterruptedException e) {
Timber.w(e);
// Restore interrupted state
Thread.currentThread().interrupt();
} finally {
// Write log in root folder
DocumentFile logFile = LogHelper.writeLog(context, buildLogInfo(rename || cleanNoJSON || cleanNoImages, log));
dao.deleteAllFlaggedBooks(true);
dao.deleteAllFlaggedGroups();
dao.cleanup();
eventComplete(STEP_4_QUEUE_FINAL, bookFolders.size(), booksOK, booksKO, logFile);
notificationManager.notify(new ImportCompleteNotification(booksOK, booksKO));
}
}
use of me.devsaki.hentoid.database.CollectionDAO in project Hentoid by avluis.
the class ContentHelper method findDuplicate.
/**
* Find the best match for the given Content inside the library and queue
*
* @param context Context to use
* @param content Content to find the duplicate for
* @param pHash Cover perceptual hash to use as an override for the given Content's cover hash; Long.MIN_VALUE not to override
* @param dao DAO to use
* @return Pair containing
* - left side : Best match for the given Content inside the library and queue
* - Right side : Similarity score (between 0 and 1; 1=100%)
*/
@Nullable
public static ImmutablePair<Content, Float> findDuplicate(@NonNull final Context context, @NonNull final Content content, long pHash, @NonNull final CollectionDAO dao) {
// First find good rough candidates by searching for the longest word in the title
String[] words = StringHelper.cleanMultipleSpaces(StringHelper.cleanup(content.getTitle())).split(" ");
Optional<String> longestWord = Stream.of(words).sorted((o1, o2) -> Integer.compare(o1.length(), o2.length())).findLast();
if (longestWord.isEmpty())
return null;
int[] contentStatuses = ArrayUtils.addAll(libraryStatus, queueTabStatus);
List<Content> roughCandidates = dao.searchTitlesWith(longestWord.get(), contentStatuses);
if (roughCandidates.isEmpty())
return null;
// Compute cover hashes for selected candidates
for (Content c : roughCandidates) if (0 == c.getCover().getImageHash())
computeAndSaveCoverHash(context, c, dao);
// Refine by running the actual duplicate detection algorithm against the rough candidates
List<DuplicateEntry> entries = new ArrayList<>();
StringSimilarity cosine = new Cosine();
// TODO make useLanguage a setting ?
DuplicateHelper.DuplicateCandidate reference = new DuplicateHelper.DuplicateCandidate(content, true, true, false, pHash);
List<DuplicateHelper.DuplicateCandidate> candidates = Stream.of(roughCandidates).map(c -> new DuplicateHelper.DuplicateCandidate(c, true, true, false, Long.MIN_VALUE)).toList();
for (DuplicateHelper.DuplicateCandidate candidate : candidates) {
DuplicateEntry entry = DuplicateHelper.Companion.processContent(reference, candidate, true, true, true, false, true, 2, cosine);
if (entry != null)
entries.add(entry);
}
// Sort by similarity and size (unfortunately, Comparator.comparing is API24...)
Optional<DuplicateEntry> bestMatch = Stream.of(entries).sorted(DuplicateEntry::compareTo).findFirst();
if (bestMatch.isPresent()) {
Content resultContent = dao.selectContent(bestMatch.get().getDuplicateId());
float resultScore = bestMatch.get().calcTotalScore();
return new ImmutablePair<>(resultContent, resultScore);
}
return null;
}
use of me.devsaki.hentoid.database.CollectionDAO in project Hentoid by avluis.
the class ContentHelper method mergeContents.
/**
* Merge the given list of Content into one single new Content with the given title
* NB : The Content's of the given list are _not_ removed
*
* @param context Context to use
* @param contentList List of Content to merge together
* @param newTitle Title of the new merged Content
* @param dao DAO to use
* @throws ContentNotProcessedException If something terrible happens
*/
public static void mergeContents(@NonNull Context context, @NonNull List<Content> contentList, @NonNull String newTitle, @NonNull final CollectionDAO dao) throws ContentNotProcessedException {
Helper.assertNonUiThread();
// New book inherits properties of the first content of the list
// which takes "precedence" as the 1st chapter
Content firstContent = contentList.get(0);
// Initiate a new Content
Content mergedContent = new Content();
mergedContent.setSite(firstContent.getSite());
mergedContent.setUrl(firstContent.getUrl());
// Not to create a copy of firstContent
mergedContent.setUniqueSiteId(firstContent.getUniqueSiteId() + "_");
mergedContent.setDownloadMode(firstContent.getDownloadMode());
mergedContent.setTitle(newTitle);
mergedContent.setCoverImageUrl(firstContent.getCoverImageUrl());
mergedContent.setUploadDate(firstContent.getUploadDate());
mergedContent.setDownloadDate(Instant.now().toEpochMilli());
mergedContent.setStatus(firstContent.getStatus());
mergedContent.setFavourite(firstContent.isFavourite());
mergedContent.setBookPreferences(firstContent.getBookPreferences());
mergedContent.setManuallyMerged(true);
// Merge attributes
List<Attribute> mergedAttributes = Stream.of(contentList).flatMap(c -> Stream.of(c.getAttributes())).toList();
mergedContent.addAttributes(mergedAttributes);
// Create destination folder for new content
DocumentFile targetFolder;
// External library root for external content
if (mergedContent.getStatus().equals(StatusContent.EXTERNAL)) {
DocumentFile externalRootFolder = FileHelper.getFolderFromTreeUriString(context, Preferences.getExternalLibraryUri());
if (null == externalRootFolder || !externalRootFolder.exists())
throw new ContentNotProcessedException(mergedContent, "Could not create target directory : external root unreachable");
ImmutablePair<String, String> bookFolderName = formatBookFolderName(mergedContent);
// First try finding the folder with new naming...
targetFolder = FileHelper.findFolder(context, externalRootFolder, bookFolderName.left);
if (null == targetFolder) {
// ...then with old (sanitized) naming...
targetFolder = FileHelper.findFolder(context, externalRootFolder, bookFolderName.right);
if (null == targetFolder) {
// ...if not, create a new folder with the new naming...
targetFolder = externalRootFolder.createDirectory(bookFolderName.left);
if (null == targetFolder) {
// ...if it fails, create a new folder with the old naming
targetFolder = externalRootFolder.createDirectory(bookFolderName.right);
}
}
}
} else {
// Hentoid download folder for non-external content
targetFolder = ContentHelper.getOrCreateContentDownloadDir(context, mergedContent);
}
if (null == targetFolder || !targetFolder.exists())
throw new ContentNotProcessedException(mergedContent, "Could not create target directory");
mergedContent.setStorageUri(targetFolder.getUri().toString());
// Renumber all picture files and dispatch chapters
long nbImages = Stream.of(contentList).map(Content::getImageFiles).withoutNulls().flatMap(Stream::of).filter(ImageFile::isReadable).count();
int nbMaxDigits = (int) (Math.floor(Math.log10(nbImages)) + 1);
List<ImageFile> mergedImages = new ArrayList<>();
List<Chapter> mergedChapters = new ArrayList<>();
ImageFile firstCover = firstContent.getCover();
ImageFile coverPic = ImageFile.newCover(firstCover.getUrl(), firstCover.getStatus());
boolean isError = false;
try {
// Set cover
if (isInLibrary(coverPic.getStatus())) {
String extension = HttpHelper.getExtensionFromUri(firstCover.getFileUri());
Uri newUri = FileHelper.copyFile(context, Uri.parse(firstCover.getFileUri()), targetFolder.getUri(), firstCover.getMimeType(), firstCover.getName() + "." + extension);
if (newUri != null)
coverPic.setFileUri(newUri.toString());
else
Timber.w("Could not move file %s", firstCover.getFileUri());
}
mergedImages.add(coverPic);
// Merge images and chapters
int chapterOrder = 0;
int pictureOrder = 1;
int nbProcessedPics = 1;
Chapter newChapter;
for (Content c : contentList) {
if (null == c.getImageFiles())
continue;
newChapter = null;
// Create a default "content chapter" that represents the original book before merging
Chapter contentChapter = new Chapter(chapterOrder++, c.getGalleryUrl(), c.getTitle());
contentChapter.setUniqueId(c.getUniqueSiteId() + "-" + contentChapter.getOrder());
for (ImageFile img : c.getImageFiles()) {
if (!img.isReadable())
continue;
ImageFile newImg = new ImageFile(img);
// Force working on a new picture
newImg.setId(0);
// Clear content
newImg.getContent().setTarget(null);
newImg.setOrder(pictureOrder++);
newImg.computeName(nbMaxDigits);
Chapter chapLink = img.getLinkedChapter();
if (null == chapLink) {
// No chapter -> set content chapter
newChapter = contentChapter;
} else {
if (chapLink.getUniqueId().isEmpty())
chapLink.populateUniqueId();
if (null == newChapter || !chapLink.getUniqueId().equals(newChapter.getUniqueId()))
newChapter = Chapter.fromChapter(chapLink).setOrder(chapterOrder++);
}
if (!mergedChapters.contains(newChapter))
mergedChapters.add(newChapter);
newImg.setChapter(newChapter);
// If exists, move the picture to the merged books' folder
if (isInLibrary(newImg.getStatus())) {
String extension = HttpHelper.getExtensionFromUri(img.getFileUri());
Uri newUri = FileHelper.copyFile(context, Uri.parse(img.getFileUri()), targetFolder.getUri(), newImg.getMimeType(), newImg.getName() + "." + extension);
if (newUri != null)
newImg.setFileUri(newUri.toString());
else
Timber.w("Could not move file %s", img.getFileUri());
EventBus.getDefault().post(new ProcessEvent(ProcessEvent.EventType.PROGRESS, R.id.generic_progress, 0, nbProcessedPics++, 0, (int) nbImages));
}
mergedImages.add(newImg);
}
}
} catch (IOException e) {
Timber.w(e);
isError = true;
}
if (!isError) {
mergedContent.setImageFiles(mergedImages);
// Chapters have to be attached to Content too
mergedContent.setChapters(mergedChapters);
mergedContent.setQtyPages(mergedImages.size() - 1);
mergedContent.computeSize();
DocumentFile jsonFile = ContentHelper.createContentJson(context, mergedContent);
if (jsonFile != null)
mergedContent.setJsonUri(jsonFile.getUri().toString());
// Save new content (incl. non-custom group operations)
ContentHelper.addContent(context, dao, mergedContent);
// Merge custom groups and update
// Merged book can be a member of one custom group only
Optional<Group> customGroup = Stream.of(contentList).flatMap(c -> Stream.of(c.groupItems)).map(GroupItem::getGroup).withoutNulls().distinct().filter(g -> g.grouping.equals(Grouping.CUSTOM)).findFirst();
if (customGroup.isPresent())
GroupHelper.moveContentToCustomGroup(mergedContent, customGroup.get(), dao);
}
EventBus.getDefault().post(new ProcessEvent(ProcessEvent.EventType.COMPLETE, R.id.generic_progress, 0, (int) nbImages, 0, (int) nbImages));
}
use of me.devsaki.hentoid.database.CollectionDAO in project Hentoid by avluis.
the class AppStartup method createBookmarksJson.
// Creates the JSON file for bookmarks if it doesn't exist
private static void createBookmarksJson(@NonNull final Context context, ObservableEmitter<Float> emitter) {
Timber.i("Create bookmarks JSON : start");
try {
DocumentFile appRoot = FileHelper.getFolderFromTreeUriString(context, Preferences.getStorageUri());
if (appRoot != null) {
DocumentFile bookmarksJson = FileHelper.findFile(context, appRoot, Consts.BOOKMARKS_JSON_FILE_NAME);
if (null == bookmarksJson) {
Timber.i("Create bookmarks JSON : creating JSON");
CollectionDAO dao = new ObjectBoxDAO(context);
try {
Helper.updateBookmarksJson(context, dao);
} finally {
dao.cleanup();
}
} else {
Timber.i("Create bookmarks JSON : already exists");
}
}
} finally {
emitter.onComplete();
}
Timber.i("Create bookmarks JSON : done");
}
Aggregations