Search in sources :

Example 16 with IMPORT

use of com.ichi2.async.CollectionTask.TASK_TYPE.IMPORT in project AnkiChinaAndroid by ankichinateam.

the class ImportTest method testAnki2DiffmodelTemplates.

@Test
public void testAnki2DiffmodelTemplates() throws IOException, JSONException, ImportExportException {
    // different from the above as this one tests only the template text being
    // changed, not the number of cards/fields
    // import the first version of the model
    String tmp = Shared.getTestFilePath(InstrumentationRegistry.getInstrumentation().getTargetContext(), "diffmodeltemplates-1.apkg");
    AnkiPackageImporter imp = new AnkiPackageImporter(testCol, tmp);
    imp.setDupeOnSchemaChange(true);
    imp.run();
    // then the version with updated template
    tmp = Shared.getTestFilePath(InstrumentationRegistry.getInstrumentation().getTargetContext(), "diffmodeltemplates-2.apkg");
    imp = new AnkiPackageImporter(testCol, tmp);
    imp.setDupeOnSchemaChange(true);
    imp.run();
    // collection should contain the note we imported
    assertEquals(1, testCol.noteCount());
    // the front template should contain the text added in the 2nd package
    Long tcid = testCol.findCards("").get(0);
    Note tnote = testCol.getCard(tcid).note();
    assertTrue(testCol.findTemplates(tnote).get(0).getString("qfmt").contains("Changed Front Template"));
}
Also used : AnkiPackageImporter(com.ichi2.libanki.importer.AnkiPackageImporter) Note(com.ichi2.libanki.Note) Test(org.junit.Test)

Example 17 with IMPORT

use of com.ichi2.async.CollectionTask.TASK_TYPE.IMPORT in project AnkiChinaAndroid by ankichinateam.

the class ImportTest method testApkg.

@Test
public void testApkg() throws IOException, ImportExportException {
    List<String> expected;
    List<String> actual;
    String apkg = Shared.getTestFilePath(InstrumentationRegistry.getInstrumentation().getTargetContext(), "media.apkg");
    Importer imp = new AnkiPackageImporter(testCol, apkg);
    expected = Collections.emptyList();
    actual = Arrays.asList(new File(testCol.getMedia().dir()).list());
    actual.retainAll(expected);
    assertEquals(actual.size(), expected.size());
    imp.run();
    expected = Collections.singletonList("foo.wav");
    actual = Arrays.asList(new File(testCol.getMedia().dir()).list());
    actual.retainAll(expected);
    assertEquals(expected.size(), actual.size());
    // import again should be idempotent in terms of media
    testCol.remCards(testCol.getDb().queryLongList("select id from cards"));
    imp = new AnkiPackageImporter(testCol, apkg);
    imp.run();
    expected = Collections.singletonList("foo.wav");
    actual = Arrays.asList(new File(testCol.getMedia().dir()).list());
    actual.retainAll(expected);
    assertEquals(expected.size(), actual.size());
    // but if the local file has different data, it will rename
    testCol.remCards(testCol.getDb().queryLongList("select id from cards"));
    FileOutputStream os;
    os = new FileOutputStream(new File(testCol.getMedia().dir(), "foo.wav"), false);
    os.write("xyz".getBytes());
    os.close();
    imp = new AnkiPackageImporter(testCol, apkg);
    imp.run();
    assertEquals(2, new File(testCol.getMedia().dir()).list().length);
}
Also used : AnkiPackageImporter(com.ichi2.libanki.importer.AnkiPackageImporter) FileOutputStream(java.io.FileOutputStream) File(java.io.File) Anki2Importer(com.ichi2.libanki.importer.Anki2Importer) Importer(com.ichi2.libanki.importer.Importer) AnkiPackageImporter(com.ichi2.libanki.importer.AnkiPackageImporter) NoteImporter(com.ichi2.libanki.importer.NoteImporter) TextImporter(com.ichi2.libanki.importer.TextImporter) Test(org.junit.Test)

Example 18 with IMPORT

use of com.ichi2.async.CollectionTask.TASK_TYPE.IMPORT in project AnkiChinaAndroid by ankichinateam.

the class Anki2Importer method _importNotes.

/**
 * Notes
 * ***********************************************************
 */
private void _importNotes() {
    // build guid -> (id,mod,mid) hash & map of existing note ids
    mNotes = new HashMap<>();
    Set<Long> existing = new HashSet<>();
    Cursor cur = null;
    try {
        cur = mDst.getDb().getDatabase().query("select id, guid, mod, mid from notes", null);
        while (cur.moveToNext()) {
            long id = cur.getLong(0);
            String guid = cur.getString(1);
            long mod = cur.getLong(2);
            long mid = cur.getLong(3);
            mNotes.put(guid, new Object[] { id, mod, mid });
            existing.add(id);
        }
    } finally {
        if (cur != null) {
            cur.close();
        }
    }
    // we may need to rewrite the guid if the model schemas don't match,
    // so we need to keep track of the changes for the card import stage
    mChangedGuids = new HashMap<>();
    // we ignore updates to changed schemas. we need to note the ignored
    // guids, so we avoid importing invalid cards
    mIgnoredGuids = new HashMap<>();
    // iterate over source collection
    ArrayList<Object[]> add = new ArrayList<>();
    int totalAddCount = 0;
    final int thresExecAdd = 1000;
    ArrayList<Object[]> update = new ArrayList<>();
    int totalUpdateCount = 0;
    final int thresExecUpdate = 1000;
    ArrayList<Long> dirty = new ArrayList<>();
    int totalDirtyCount = 0;
    final int thresExecDirty = 1000;
    int usn = mDst.usn();
    int dupes = 0;
    ArrayList<String> dupesIgnored = new ArrayList<>();
    try {
        mDst.getDb().getDatabase().beginTransaction();
        cur = mSrc.getDb().getDatabase().query("select * from notes", null);
        // Counters for progress updates
        int total = cur.getCount();
        boolean largeCollection = total > 200;
        int onePercent = total / 100;
        int i = 0;
        while (cur.moveToNext()) {
            // turn the db result into a mutable list
            Object[] note = new Object[] { cur.getLong(0), cur.getString(1), cur.getLong(2), cur.getLong(3), cur.getInt(4), cur.getString(5), cur.getString(6), cur.getString(7), cur.getLong(8), cur.getInt(9), cur.getString(10) };
            boolean shouldAdd = _uniquifyNote(note);
            if (shouldAdd) {
                // ensure id is unique
                while (existing.contains(note[0])) {
                    note[0] = ((Long) note[0]) + 999;
                }
                existing.add((Long) note[0]);
                // bump usn
                note[4] = usn;
                // update media references in case of dupes
                note[6] = _mungeMedia((Long) note[MID], (String) note[6]);
                add.add(note);
                dirty.add((Long) note[0]);
                // note we have the added guid
                mNotes.put((String) note[GUID], new Object[] { note[0], note[3], note[MID] });
            } else {
                // a duplicate or changed schema - safe to update?
                dupes += 1;
                if (mAllowUpdate) {
                    Object[] n = mNotes.get(note[GUID]);
                    long oldNid = (Long) n[0];
                    long oldMod = (Long) n[1];
                    long oldMid = (Long) n[2];
                    // will update if incoming note more recent
                    if (oldMod < (Long) note[MOD]) {
                        // safe if note types identical
                        if (oldMid == (Long) note[MID]) {
                            // incoming note should use existing id
                            note[0] = oldNid;
                            note[4] = usn;
                            note[6] = _mungeMedia((Long) note[MID], (String) note[6]);
                            update.add(note);
                            dirty.add((Long) note[0]);
                        } else {
                            dupesIgnored.add(String.format("%s: %s", mCol.getModels().get(oldMid).getString("name"), ((String) note[6]).replace("\u001f", ",")));
                            mIgnoredGuids.put((String) note[GUID], true);
                        }
                    }
                }
            }
            i++;
            // add to col partially, so as to avoid OOM
            if (add.size() >= thresExecAdd) {
                totalAddCount += add.size();
                addNotes(add);
                add.clear();
                Timber.d("add notes: %d", totalAddCount);
            }
            // add to col partially, so as to avoid OOM
            if (update.size() >= thresExecUpdate) {
                totalUpdateCount += update.size();
                updateNotes(update);
                update.clear();
                Timber.d("update notes: %d", totalUpdateCount);
            }
            // add to col partially, so as to avoid OOM
            if (dirty.size() >= thresExecDirty) {
                totalDirtyCount += dirty.size();
                long[] das = Utils.collection2Array(dirty);
                mDst.updateFieldCache(das);
                mDst.getTags().registerNotes(das);
                dirty.clear();
                Timber.d("dirty notes: %d", totalDirtyCount);
            }
            if (total != 0 && (!largeCollection || i % onePercent == 0)) {
                // Calls to publishProgress are reasonably expensive due to res.getString()
                publishProgress(i * 100 / total, 0, 0);
            }
        }
        publishProgress(100, 0, 0);
        // summarize partial add/update/dirty results for total values
        totalAddCount += add.size();
        totalUpdateCount += update.size();
        totalDirtyCount += dirty.size();
        if (dupes > 0) {
            mLog.add(getRes().getString(R.string.import_update_details, totalUpdateCount, dupes));
            if (dupesIgnored.size() > 0) {
                mLog.add(getRes().getString(R.string.import_update_ignored));
            }
        }
        // export info for calling code
        mDupes = dupes;
        mAdded = totalAddCount;
        mUpdated = totalUpdateCount;
        Timber.d("add notes total:    %d", totalAddCount);
        Timber.d("update notes total: %d", totalUpdateCount);
        Timber.d("dirty notes total:  %d", totalDirtyCount);
        // add to col (for last chunk)
        addNotes(add);
        add.clear();
        updateNotes(update);
        update.clear();
        mDst.getDb().getDatabase().setTransactionSuccessful();
    } finally {
        if (cur != null) {
            cur.close();
        }
        if (mDst.getDb().getDatabase().inTransaction()) {
            try {
                mDst.getDb().getDatabase().endTransaction();
            } catch (Exception e) {
                Timber.w(e);
            }
        }
    }
    long[] das = Utils.collection2Array(dirty);
    mDst.updateFieldCache(das);
    mDst.getTags().registerNotes(das);
}
Also used : ArrayList(java.util.ArrayList) Cursor(android.database.Cursor) ConfirmModSchemaException(com.ichi2.anki.exception.ConfirmModSchemaException) ImportExportException(com.ichi2.anki.exception.ImportExportException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) HashSet(java.util.HashSet)

Example 19 with IMPORT

use of com.ichi2.async.CollectionTask.TASK_TYPE.IMPORT in project AnkiChinaAndroid by ankichinateam.

the class AnkiPackageImporter method run.

@Override
public void run() throws ImportExportException {
    publishProgress(0, 0, 0);
    File tempDir = new File(new File(mCol.getPath()).getParent(), "tmpzip");
    // self.col into Anki.
    Collection tmpCol;
    Timber.d("Attempting to import package %s", mFile);
    String tmpApkgPath = "";
    if (mFile.endsWith(".card")) {
        tmpApkgPath = mFile.replace(".card", ".apkg");
        AESUtil.decryptionFile(mFile, tmpApkgPath);
        mFile = tmpApkgPath;
    }
    try {
        // We extract the zip contents into a temporary directory and do a little more
        // validation than the desktop client to ensure the extracted collection is an apkg.
        String colname = "collection.anki21";
        try {
            // extract the deck from the zip file
            try {
                mZip = new ZipFile(new File(mFile));
            } catch (FileNotFoundException fileNotFound) {
                // The cache can be cleared between copying the file in and importing. This is temporary
                if (fileNotFound.getMessage().contains("ENOENT")) {
                    mLog.add(getRes().getString(R.string.import_log_file_cache_cleared));
                    return;
                }
                // displays: failed to unzip
                throw fileNotFound;
            }
            // v2 scheduler?
            if (mZip.getEntry(colname) == null) {
                colname = CollectionHelper.COLLECTION_FILENAME;
            }
            // Make sure we have sufficient free space
            long uncompressedSize = Utils.calculateUncompressedSize(mZip);
            long availableSpace = Utils.determineBytesAvailable(mCol.getPath());
            Timber.d("Total uncompressed size will be: %d", uncompressedSize);
            Timber.d("Total available size is:         %d", availableSpace);
            if (uncompressedSize > availableSpace) {
                Timber.e("Not enough space to unzip, need %d, available %d", uncompressedSize, availableSpace);
                mLog.add(getRes().getString(R.string.import_log_insufficient_space, uncompressedSize, availableSpace));
                return;
            }
            // The filename that we extract should be collection.anki2
            // Importing collection.anki21 fails due to some media regexes expecting collection.anki2.
            // We follow how Anki does it and fix the problem here.
            HashMap<String, String> mediaToFileNameMap = new HashMap<>();
            mediaToFileNameMap.put(colname, CollectionHelper.COLLECTION_FILENAME);
            Utils.unzipFiles(mZip, tempDir.getAbsolutePath(), new String[] { colname, "media" }, mediaToFileNameMap);
            colname = CollectionHelper.COLLECTION_FILENAME;
        } catch (IOException e) {
            Timber.e(e, "Failed to unzip apkg.");
            AnkiDroidApp.sendExceptionReport(e, "AnkiPackageImporter::run() - unzip");
            mLog.add(getRes().getString(R.string.import_log_failed_unzip, e.getLocalizedMessage()));
            return;
        }
        String colpath = new File(tempDir, colname).getAbsolutePath();
        if (!(new File(colpath)).exists()) {
            mLog.add(getRes().getString(R.string.import_log_failed_copy_to, colpath));
            return;
        }
        tmpCol = Storage.Collection(mContext, colpath);
        try {
            if (!tmpCol.validCollection()) {
                mLog.add(getRes().getString(R.string.import_log_failed_validate));
                return;
            }
        } finally {
            if (tmpCol != null) {
                tmpCol.close();
            }
        }
        mFile = colpath;
        // we need the media dict in advance, and we'll need a map of fname ->
        // number to use during the import
        File mediaMapFile = new File(tempDir, "media");
        mNameToNum = new HashMap<>();
        String dirPath = tmpCol.getMedia().dir();
        File dir = new File(dirPath);
        // We need the opposite mapping in AnkiDroid since our extraction method requires it.
        Map<String, String> numToName = new HashMap<>();
        try (JsonReader jr = new JsonReader(new FileReader(mediaMapFile))) {
            jr.beginObject();
            // v in anki
            String name;
            // k in anki
            String num;
            while (jr.hasNext()) {
                num = jr.nextName();
                name = jr.nextString();
                File file = new File(dir, name);
                if (!Utils.isInside(file, dir)) {
                    throw (new RuntimeException("Invalid file"));
                }
                Utils.nfcNormalized(num);
                mNameToNum.put(name, num);
                numToName.put(num, name);
            }
            jr.endObject();
        } catch (FileNotFoundException e) {
            Timber.e("Apkg did not contain a media dict. No media will be imported.");
        } catch (IOException e) {
            Timber.e("Malformed media dict. Media import will be incomplete.");
        }
        // run anki2 importer
        super.run();
        // import static media
        for (Map.Entry<String, String> entry : mNameToNum.entrySet()) {
            String file = entry.getKey();
            String c = entry.getValue();
            if (!file.startsWith("_") && !file.startsWith("latex-")) {
                continue;
            }
            File path = new File(mCol.getMedia().dir(), Utils.nfcNormalized(file));
            if (!path.exists()) {
                try {
                    Utils.unzipFiles(mZip, mCol.getMedia().dir(), new String[] { c }, numToName);
                } catch (IOException e) {
                    Timber.e("Failed to extract static media file. Ignoring.");
                }
            }
        }
    } finally {
        long availableSpace = Utils.determineBytesAvailable(mCol.getPath());
        Timber.d("Total available size is: %d", availableSpace);
        // Clean up our temporary files
        if (tempDir.exists()) {
            BackupManager.removeDir(tempDir);
        }
    }
    publishProgress(100, 100, 100);
// if(!tmpApkgPath.isEmpty()){
// new File(tmpApkgPath).delete();
// }
}
Also used : HashMap(java.util.HashMap) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) ZipFile(org.apache.commons.compress.archivers.zip.ZipFile) Collection(com.ichi2.libanki.Collection) JsonReader(com.google.gson.stream.JsonReader) FileReader(java.io.FileReader) ZipFile(org.apache.commons.compress.archivers.zip.ZipFile) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map)

Example 20 with IMPORT

use of com.ichi2.async.CollectionTask.TASK_TYPE.IMPORT in project Anki-Android by ankidroid.

the class Anki2Importer method _importCards.

/**
 * Cards
 * ***********************************************************
 */
private void _importCards() {
    if (mMustResetLearning) {
        try {
            mSrc.changeSchedulerVer(2);
        } catch (ConfirmModSchemaException e) {
            throw new RuntimeException("Changing the scheduler of an import should not cause schema modification", e);
        }
    }
    // build map of guid -> (ord -> cid) and used id cache
    /*
         * Since we can't use a tuple as a key in Java, we resort to indexing twice with nested maps.
         * Python: (guid, ord) -> cid
         * Java: guid -> ord -> cid
         */
    int nbCard = mDst.cardCount();
    Map<String, Map<Integer, Long>> cardsByGuid = HashUtil.HashMapInit(nbCard);
    Set<Long> existing = HashUtil.HashSetInit(nbCard);
    try (Cursor cur = mDst.getDb().query("select f.guid, c.ord, c.id from cards c, notes f " + "where c.nid = f.id")) {
        while (cur.moveToNext()) {
            String guid = cur.getString(0);
            int ord = cur.getInt(1);
            long cid = cur.getLong(2);
            existing.add(cid);
            if (cardsByGuid.containsKey(guid)) {
                cardsByGuid.get(guid).put(ord, cid);
            } else {
                // The size is at most the number of card type in the note type.
                Map<Integer, Long> map = new HashMap<>();
                map.put(ord, cid);
                cardsByGuid.put(guid, map);
            }
        }
    }
    // loop through src
    int nbCardsToImport = mSrc.cardCount();
    List<Object[]> cards = new ArrayList<>(nbCardsToImport);
    int totalCardCount = 0;
    final int thresExecCards = 1000;
    List<Object[]> revlog = new ArrayList<>(mSrc.getSched().logCount());
    int totalRevlogCount = 0;
    final int thresExecRevlog = 1000;
    int usn = mDst.usn();
    long aheadBy = mSrc.getSched().getToday() - mDst.getSched().getToday();
    mDst.getDb().getDatabase().beginTransaction();
    try (Cursor cur = mSrc.getDb().query("select f.guid, c.id, c.did, c.ord, c.type, c.queue, c.due, c.ivl, c.factor, c.reps, c.lapses, c.left, c.odue, c.odid, c.flags, c.data from cards c, notes f " + "where c.nid = f.id")) {
        // Counters for progress updates
        int total = cur.getCount();
        boolean largeCollection = total > 200;
        int onePercent = total / 100;
        int i = 0;
        while (cur.moveToNext()) {
            String guid = cur.getString(0);
            long cid = cur.getLong(1);
            // To keep track of card id in source
            long scid = cid;
            long did = cur.getLong(2);
            int ord = cur.getInt(3);
            @Consts.CARD_TYPE int type = cur.getInt(4);
            @Consts.CARD_QUEUE int queue = cur.getInt(5);
            long due = cur.getLong(6);
            long ivl = cur.getLong(7);
            long factor = cur.getLong(8);
            int reps = cur.getInt(9);
            int lapses = cur.getInt(10);
            int left = cur.getInt(11);
            long odue = cur.getLong(12);
            long odid = cur.getLong(13);
            int flags = cur.getInt(14);
            String data = cur.getString(15);
            if (mIgnoredGuids.contains(guid)) {
                continue;
            }
            // does the card's note exist in dst col?
            if (!mNotes.containsKey(guid)) {
                continue;
            }
            NoteTriple dnid = mNotes.get(guid);
            // does the card already exist in the dst col?
            if (cardsByGuid.containsKey(guid) && cardsByGuid.get(guid).containsKey(ord)) {
                // fixme: in future, could update if newer mod time
                continue;
            }
            // ensure the card id is unique
            while (existing.contains(cid)) {
                cid += 999;
            }
            existing.add(cid);
            // update cid, nid, etc
            long nid = mNotes.get(guid).mNid;
            did = _did(did);
            long mod = mCol.getTime().intTime();
            // review cards have a due date relative to collection
            if (queue == QUEUE_TYPE_REV || queue == QUEUE_TYPE_DAY_LEARN_RELEARN || type == CARD_TYPE_REV) {
                due -= aheadBy;
            }
            // odue needs updating too
            if (odue != 0) {
                odue -= aheadBy;
            }
            // if odid true, convert card from filtered to normal
            if (odid != 0) {
                // odid
                odid = 0;
                // odue
                due = odue;
                odue = 0;
                // queue
                if (type == CARD_TYPE_LRN) {
                    // type
                    queue = QUEUE_TYPE_NEW;
                } else {
                    queue = type;
                }
                // type
                if (type == CARD_TYPE_LRN) {
                    type = CARD_TYPE_NEW;
                }
            }
            cards.add(new Object[] { cid, nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps, lapses, left, odue, odid, flags, data });
            // we need to import revlog, rewriting card ids and bumping usn
            try (Cursor cur2 = mSrc.getDb().query("select * from revlog where cid = " + scid)) {
                while (cur2.moveToNext()) {
                    Object[] rev = new Object[] { cur2.getLong(0), cur2.getLong(1), cur2.getInt(2), cur2.getInt(3), cur2.getLong(4), cur2.getLong(5), cur2.getLong(6), cur2.getLong(7), cur2.getInt(8) };
                    rev[1] = cid;
                    rev[2] = mDst.usn();
                    revlog.add(rev);
                }
            }
            i++;
            // apply card changes partially
            if (cards.size() >= thresExecCards) {
                totalCardCount += cards.size();
                insertCards(cards);
                cards.clear();
                Timber.d("add cards: %d", totalCardCount);
            }
            // apply revlog changes partially
            if (revlog.size() >= thresExecRevlog) {
                totalRevlogCount += revlog.size();
                insertRevlog(revlog);
                revlog.clear();
                Timber.d("add revlog: %d", totalRevlogCount);
            }
            if (total != 0 && (!largeCollection || i % onePercent == 0)) {
                publishProgress(100, i * 100 / total, 0);
            }
        }
        publishProgress(100, 100, 0);
        // count total values
        totalCardCount += cards.size();
        totalRevlogCount += revlog.size();
        Timber.d("add cards total:  %d", totalCardCount);
        Timber.d("add revlog total: %d", totalRevlogCount);
        // apply (for last chunk)
        insertCards(cards);
        cards.clear();
        insertRevlog(revlog);
        revlog.clear();
        mLog.add(getRes().getString(R.string.import_complete_count, totalCardCount));
        mDst.getDb().getDatabase().setTransactionSuccessful();
    } finally {
        DB.safeEndInTransaction(mDst.getDb());
    }
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Cursor(android.database.Cursor) ConfirmModSchemaException(com.ichi2.anki.exception.ConfirmModSchemaException) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

File (java.io.File)11 Test (org.junit.Test)9 Collection (com.ichi2.libanki.Collection)8 AnkiPackageImporter (com.ichi2.libanki.importer.AnkiPackageImporter)7 IOException (java.io.IOException)7 Note (com.ichi2.libanki.Note)6 TextImporter (com.ichi2.libanki.importer.TextImporter)6 FileNotFoundException (java.io.FileNotFoundException)6 HashMap (java.util.HashMap)6 Resources (android.content.res.Resources)5 ConfirmModSchemaException (com.ichi2.anki.exception.ConfirmModSchemaException)5 Anki2Importer (com.ichi2.libanki.importer.Anki2Importer)5 ArrayList (java.util.ArrayList)5 Cursor (android.database.Cursor)4 InstrumentedTest (com.ichi2.anki.tests.InstrumentedTest)4 Importer (com.ichi2.libanki.importer.Importer)4 NoteImporter (com.ichi2.libanki.importer.NoteImporter)4 FileOutputStream (java.io.FileOutputStream)4 ImportExportException (com.ichi2.anki.exception.ImportExportException)3 List (java.util.List)3