2012-12-21 08:51:59 +01:00
|
|
|
# coding: utf-8
|
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
import os
|
2020-01-02 15:01:44 +01:00
|
|
|
from tempfile import NamedTemporaryFile
|
2019-12-25 22:36:26 +01:00
|
|
|
|
2020-04-06 12:24:17 +02:00
|
|
|
import pytest
|
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
from anki.importing import (
|
|
|
|
Anki2Importer,
|
|
|
|
AnkiPackageImporter,
|
2019-12-25 22:36:26 +01:00
|
|
|
MnemosyneImporter,
|
|
|
|
SupermemoXmlImporter,
|
|
|
|
TextImporter,
|
2019-12-25 05:18:34 +01:00
|
|
|
)
|
2019-12-25 22:36:26 +01:00
|
|
|
from tests.shared import getEmptyCol, getUpgradeDeckPath
|
2012-12-21 08:51:59 +01:00
|
|
|
|
|
|
|
testDir = os.path.dirname(__file__)
|
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
srcNotes = None
|
|
|
|
srcCards = None
|
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
|
2020-03-16 19:18:37 +01:00
|
|
|
def clear_tempfile(tf):
|
|
|
|
""" https://stackoverflow.com/questions/23212435/permission-denied-to-write-to-my-temporary-file """
|
|
|
|
try:
|
|
|
|
tf.close()
|
|
|
|
os.unlink(tf.name)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_anki2_mediadupes():
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getEmptyCol()
|
2012-12-21 08:51:59 +01:00
|
|
|
# add a note that references a sound
|
2020-07-17 17:37:17 +02:00
|
|
|
n = col.newNote()
|
2019-12-25 05:18:34 +01:00
|
|
|
n["Front"] = "[sound:foo.mp3]"
|
|
|
|
mid = n.model()["id"]
|
2020-07-17 17:37:17 +02:00
|
|
|
col.addNote(n)
|
2012-12-21 08:51:59 +01:00
|
|
|
# add that sound to media folder
|
2020-07-17 17:37:17 +02:00
|
|
|
with open(os.path.join(col.media.dir(), "foo.mp3"), "w") as note:
|
2020-07-17 05:18:09 +02:00
|
|
|
note.write("foo")
|
2020-07-17 17:37:17 +02:00
|
|
|
col.close()
|
2012-12-21 08:51:59 +01:00
|
|
|
# it should be imported correctly into an empty deck
|
2014-06-03 10:38:47 +02:00
|
|
|
empty = getEmptyCol()
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = Anki2Importer(empty, col.path)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
|
|
|
assert os.listdir(empty.media.dir()) == ["foo.mp3"]
|
|
|
|
# and importing again will not duplicate, as the file content matches
|
2020-06-04 10:21:04 +02:00
|
|
|
empty.remove_cards_and_orphaned_notes(empty.db.list("select id from cards"))
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = Anki2Importer(empty, col.path)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
|
|
|
assert os.listdir(empty.media.dir()) == ["foo.mp3"]
|
|
|
|
n = empty.getNote(empty.db.scalar("select id from notes"))
|
|
|
|
assert "foo.mp3" in n.fields[0]
|
|
|
|
# if the local file content is different, and import should trigger a
|
|
|
|
# rename
|
2020-06-04 10:21:04 +02:00
|
|
|
empty.remove_cards_and_orphaned_notes(empty.db.list("select id from cards"))
|
2020-07-17 05:18:09 +02:00
|
|
|
with open(os.path.join(empty.media.dir(), "foo.mp3"), "w") as note:
|
|
|
|
note.write("bar")
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = Anki2Importer(empty, col.path)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
2019-12-25 05:18:34 +01:00
|
|
|
assert sorted(os.listdir(empty.media.dir())) == ["foo.mp3", "foo_%s.mp3" % mid]
|
2012-12-21 08:51:59 +01:00
|
|
|
n = empty.getNote(empty.db.scalar("select id from notes"))
|
|
|
|
assert "_" in n.fields[0]
|
|
|
|
# if the localized media file already exists, we rewrite the note and
|
|
|
|
# media
|
2020-06-04 10:21:04 +02:00
|
|
|
empty.remove_cards_and_orphaned_notes(empty.db.list("select id from cards"))
|
2020-07-17 05:18:09 +02:00
|
|
|
with open(os.path.join(empty.media.dir(), "foo.mp3"), "w") as note:
|
|
|
|
note.write("bar")
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = Anki2Importer(empty, col.path)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
2019-12-25 05:18:34 +01:00
|
|
|
assert sorted(os.listdir(empty.media.dir())) == ["foo.mp3", "foo_%s.mp3" % mid]
|
|
|
|
assert sorted(os.listdir(empty.media.dir())) == ["foo.mp3", "foo_%s.mp3" % mid]
|
2012-12-21 08:51:59 +01:00
|
|
|
n = empty.getNote(empty.db.scalar("select id from notes"))
|
|
|
|
assert "_" in n.fields[0]
|
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_apkg():
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getEmptyCol()
|
2016-05-12 06:45:35 +02:00
|
|
|
apkg = str(os.path.join(testDir, "support/media.apkg"))
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = AnkiPackageImporter(col, apkg)
|
|
|
|
assert os.listdir(col.media.dir()) == []
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
2020-07-17 17:37:17 +02:00
|
|
|
assert os.listdir(col.media.dir()) == ["foo.wav"]
|
2012-12-21 08:51:59 +01:00
|
|
|
# importing again should be idempotent in terms of media
|
2020-07-17 17:37:17 +02:00
|
|
|
col.remove_cards_and_orphaned_notes(col.db.list("select id from cards"))
|
|
|
|
imp = AnkiPackageImporter(col, apkg)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
2020-07-17 17:37:17 +02:00
|
|
|
assert os.listdir(col.media.dir()) == ["foo.wav"]
|
2012-12-21 08:51:59 +01:00
|
|
|
# but if the local file has different data, it will rename
|
2020-07-17 17:37:17 +02:00
|
|
|
col.remove_cards_and_orphaned_notes(col.db.list("select id from cards"))
|
|
|
|
with open(os.path.join(col.media.dir(), "foo.wav"), "w") as note:
|
2020-07-17 05:18:09 +02:00
|
|
|
note.write("xyz")
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = AnkiPackageImporter(col, apkg)
|
2012-12-21 08:51:59 +01:00
|
|
|
imp.run()
|
2020-07-17 17:37:17 +02:00
|
|
|
assert len(os.listdir(col.media.dir())) == 2
|
2012-12-21 08:51:59 +01:00
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2014-06-28 20:51:30 +02:00
|
|
|
def test_anki2_diffmodel_templates():
|
|
|
|
# different from the above as this one tests only the template text being
|
|
|
|
# changed, not the number of cards/fields
|
|
|
|
dst = getEmptyCol()
|
|
|
|
# import the first version of the model
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getUpgradeDeckPath("diffmodeltemplates-1.apkg")
|
|
|
|
imp = AnkiPackageImporter(dst, col)
|
2014-06-28 20:51:30 +02:00
|
|
|
imp.dupeOnSchemaChange = True
|
|
|
|
imp.run()
|
|
|
|
# then the version with updated template
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getUpgradeDeckPath("diffmodeltemplates-2.apkg")
|
|
|
|
imp = AnkiPackageImporter(dst, col)
|
2014-06-28 20:51:30 +02:00
|
|
|
imp.dupeOnSchemaChange = True
|
|
|
|
imp.run()
|
|
|
|
# collection should contain the note we imported
|
2019-12-25 05:18:34 +01:00
|
|
|
assert dst.noteCount() == 1
|
2014-06-28 20:51:30 +02:00
|
|
|
# the front template should contain the text added in the 2nd package
|
2019-12-25 05:18:34 +01:00
|
|
|
tcid = dst.findCards("")[0] # only 1 note in collection
|
2014-06-28 20:51:30 +02:00
|
|
|
tnote = dst.getCard(tcid).note()
|
2020-05-13 02:35:01 +02:00
|
|
|
assert "Changed Front Template" in tnote.cards()[0].template()["qfmt"]
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2014-06-28 20:51:30 +02:00
|
|
|
|
2013-08-08 06:01:47 +02:00
|
|
|
def test_anki2_updates():
|
|
|
|
# create a new empty deck
|
2014-06-03 10:38:47 +02:00
|
|
|
dst = getEmptyCol()
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getUpgradeDeckPath("update1.apkg")
|
|
|
|
imp = AnkiPackageImporter(dst, col)
|
2013-08-08 06:01:47 +02:00
|
|
|
imp.run()
|
|
|
|
assert imp.dupes == 0
|
|
|
|
assert imp.added == 1
|
|
|
|
assert imp.updated == 0
|
|
|
|
# importing again should be idempotent
|
2020-07-17 17:37:17 +02:00
|
|
|
imp = AnkiPackageImporter(dst, col)
|
2013-08-08 06:01:47 +02:00
|
|
|
imp.run()
|
|
|
|
assert imp.dupes == 1
|
|
|
|
assert imp.added == 0
|
|
|
|
assert imp.updated == 0
|
|
|
|
# importing a newer note should update
|
|
|
|
assert dst.noteCount() == 1
|
|
|
|
assert dst.db.scalar("select flds from notes").startswith("hello")
|
2020-07-17 17:37:17 +02:00
|
|
|
col = getUpgradeDeckPath("update2.apkg")
|
|
|
|
imp = AnkiPackageImporter(dst, col)
|
2013-08-08 06:01:47 +02:00
|
|
|
imp.run()
|
2018-11-18 06:27:37 +01:00
|
|
|
assert imp.dupes == 0
|
2013-08-08 06:01:47 +02:00
|
|
|
assert imp.added == 0
|
|
|
|
assert imp.updated == 1
|
|
|
|
assert dst.noteCount() == 1
|
|
|
|
assert dst.db.scalar("select flds from notes").startswith("goodbye")
|
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_csv():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
2016-05-12 06:45:35 +02:00
|
|
|
file = str(os.path.join(testDir, "support/text-2fields.txt"))
|
2020-07-17 05:21:01 +02:00
|
|
|
i = TextImporter(col, file)
|
2012-12-21 08:51:59 +01:00
|
|
|
i.initMapping()
|
|
|
|
i.run()
|
|
|
|
# four problems - too many & too few fields, a missing front, and a
|
|
|
|
# duplicate entry
|
|
|
|
assert len(i.log) == 5
|
|
|
|
assert i.total == 5
|
|
|
|
# if we run the import again, it should update instead
|
|
|
|
i.run()
|
2013-05-16 10:19:11 +02:00
|
|
|
assert len(i.log) == 10
|
2012-12-21 08:51:59 +01:00
|
|
|
assert i.total == 5
|
|
|
|
# but importing should not clobber tags if they're unmapped
|
2020-07-17 05:21:01 +02:00
|
|
|
n = col.getNote(col.db.scalar("select id from notes"))
|
2012-12-21 08:51:59 +01:00
|
|
|
n.addTag("test")
|
|
|
|
n.flush()
|
|
|
|
i.run()
|
|
|
|
n.load()
|
2019-12-25 05:18:34 +01:00
|
|
|
assert n.tags == ["test"]
|
2012-12-21 08:51:59 +01:00
|
|
|
# if add-only mode, count will be 0
|
|
|
|
i.importMode = 1
|
|
|
|
i.run()
|
|
|
|
assert i.total == 0
|
|
|
|
# and if dupes mode, will reimport everything
|
2020-07-17 05:21:01 +02:00
|
|
|
assert col.cardCount() == 5
|
2012-12-21 08:51:59 +01:00
|
|
|
i.importMode = 2
|
|
|
|
i.run()
|
|
|
|
# includes repeated field
|
|
|
|
assert i.total == 6
|
2020-07-17 05:21:01 +02:00
|
|
|
assert col.cardCount() == 11
|
|
|
|
col.close()
|
2012-12-21 08:51:59 +01:00
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_csv2():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
|
|
|
mm = col.models
|
2012-12-21 08:51:59 +01:00
|
|
|
m = mm.current()
|
2020-07-17 05:18:09 +02:00
|
|
|
note = mm.newField("Three")
|
|
|
|
mm.addField(m, note)
|
2012-12-21 08:51:59 +01:00
|
|
|
mm.save(m)
|
2020-07-17 05:21:01 +02:00
|
|
|
n = col.newNote()
|
2019-12-25 05:18:34 +01:00
|
|
|
n["Front"] = "1"
|
|
|
|
n["Back"] = "2"
|
|
|
|
n["Three"] = "3"
|
2020-07-17 05:21:01 +02:00
|
|
|
col.addNote(n)
|
2012-12-21 08:51:59 +01:00
|
|
|
# an update with unmapped fields should not clobber those fields
|
2016-05-12 06:45:35 +02:00
|
|
|
file = str(os.path.join(testDir, "support/text-update.txt"))
|
2020-07-17 05:21:01 +02:00
|
|
|
i = TextImporter(col, file)
|
2012-12-21 08:51:59 +01:00
|
|
|
i.initMapping()
|
|
|
|
i.run()
|
|
|
|
n.load()
|
2019-12-25 05:18:34 +01:00
|
|
|
assert n["Front"] == "1"
|
|
|
|
assert n["Back"] == "x"
|
|
|
|
assert n["Three"] == "3"
|
2020-07-17 05:21:01 +02:00
|
|
|
col.close()
|
2012-12-21 08:51:59 +01:00
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2020-01-02 15:01:44 +01:00
|
|
|
def test_tsv_tag_modified():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
|
|
|
mm = col.models
|
2020-01-02 15:01:44 +01:00
|
|
|
m = mm.current()
|
2020-07-17 05:18:09 +02:00
|
|
|
note = mm.newField("Top")
|
|
|
|
mm.addField(m, note)
|
2020-01-02 15:01:44 +01:00
|
|
|
mm.save(m)
|
2020-07-17 05:21:01 +02:00
|
|
|
n = col.newNote()
|
2020-01-02 15:01:44 +01:00
|
|
|
n["Front"] = "1"
|
|
|
|
n["Back"] = "2"
|
|
|
|
n["Top"] = "3"
|
|
|
|
n.addTag("four")
|
2020-07-17 05:21:01 +02:00
|
|
|
col.addNote(n)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
2020-03-16 19:18:37 +01:00
|
|
|
# https://stackoverflow.com/questions/23212435/permission-denied-to-write-to-my-temporary-file
|
|
|
|
with NamedTemporaryFile(mode="w", delete=False) as tf:
|
2020-01-02 15:01:44 +01:00
|
|
|
tf.write("1\tb\tc\n")
|
|
|
|
tf.flush()
|
2020-07-17 05:21:01 +02:00
|
|
|
i = TextImporter(col, tf.name)
|
2020-01-02 15:01:44 +01:00
|
|
|
i.initMapping()
|
|
|
|
i.tagModified = "boom"
|
|
|
|
i.run()
|
2020-03-16 19:18:37 +01:00
|
|
|
clear_tempfile(tf)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
n.load()
|
|
|
|
assert n["Front"] == "1"
|
|
|
|
assert n["Back"] == "b"
|
|
|
|
assert n["Top"] == "c"
|
|
|
|
assert "four" in n.tags
|
|
|
|
assert "boom" in n.tags
|
|
|
|
assert len(n.tags) == 2
|
|
|
|
assert i.updateCount == 1
|
|
|
|
|
2020-07-17 05:21:01 +02:00
|
|
|
col.close()
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_tsv_tag_multiple_tags():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
|
|
|
mm = col.models
|
2020-01-02 15:01:44 +01:00
|
|
|
m = mm.current()
|
2020-07-17 05:18:09 +02:00
|
|
|
note = mm.newField("Top")
|
|
|
|
mm.addField(m, note)
|
2020-01-02 15:01:44 +01:00
|
|
|
mm.save(m)
|
2020-07-17 05:21:01 +02:00
|
|
|
n = col.newNote()
|
2020-01-02 15:01:44 +01:00
|
|
|
n["Front"] = "1"
|
|
|
|
n["Back"] = "2"
|
|
|
|
n["Top"] = "3"
|
|
|
|
n.addTag("four")
|
|
|
|
n.addTag("five")
|
2020-07-17 05:21:01 +02:00
|
|
|
col.addNote(n)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
2020-03-16 19:18:37 +01:00
|
|
|
# https://stackoverflow.com/questions/23212435/permission-denied-to-write-to-my-temporary-file
|
|
|
|
with NamedTemporaryFile(mode="w", delete=False) as tf:
|
2020-01-02 15:01:44 +01:00
|
|
|
tf.write("1\tb\tc\n")
|
|
|
|
tf.flush()
|
2020-07-17 05:21:01 +02:00
|
|
|
i = TextImporter(col, tf.name)
|
2020-01-02 15:01:44 +01:00
|
|
|
i.initMapping()
|
2020-01-02 20:05:56 +01:00
|
|
|
i.tagModified = "five six"
|
2020-01-02 15:01:44 +01:00
|
|
|
i.run()
|
2020-03-16 19:18:37 +01:00
|
|
|
clear_tempfile(tf)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
n.load()
|
|
|
|
assert n["Front"] == "1"
|
|
|
|
assert n["Back"] == "b"
|
|
|
|
assert n["Top"] == "c"
|
|
|
|
assert list(sorted(n.tags)) == list(sorted(["four", "five", "six"]))
|
|
|
|
|
2020-07-17 05:21:01 +02:00
|
|
|
col.close()
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_csv_tag_only_if_modified():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
|
|
|
mm = col.models
|
2020-01-02 15:01:44 +01:00
|
|
|
m = mm.current()
|
2020-07-17 05:18:09 +02:00
|
|
|
note = mm.newField("Left")
|
|
|
|
mm.addField(m, note)
|
2020-01-02 15:01:44 +01:00
|
|
|
mm.save(m)
|
2020-07-17 05:21:01 +02:00
|
|
|
n = col.newNote()
|
2020-01-02 15:01:44 +01:00
|
|
|
n["Front"] = "1"
|
|
|
|
n["Back"] = "2"
|
|
|
|
n["Left"] = "3"
|
2020-07-17 05:21:01 +02:00
|
|
|
col.addNote(n)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
2020-03-16 19:18:37 +01:00
|
|
|
# https://stackoverflow.com/questions/23212435/permission-denied-to-write-to-my-temporary-file
|
|
|
|
with NamedTemporaryFile(mode="w", delete=False) as tf:
|
2020-01-02 15:01:44 +01:00
|
|
|
tf.write("1,2,3\n")
|
|
|
|
tf.flush()
|
2020-07-17 05:21:01 +02:00
|
|
|
i = TextImporter(col, tf.name)
|
2020-01-02 15:01:44 +01:00
|
|
|
i.initMapping()
|
|
|
|
i.tagModified = "right"
|
|
|
|
i.run()
|
2020-03-16 19:18:37 +01:00
|
|
|
clear_tempfile(tf)
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
n.load()
|
|
|
|
assert n.tags == []
|
|
|
|
assert i.updateCount == 0
|
|
|
|
|
2020-07-17 05:21:01 +02:00
|
|
|
col.close()
|
2020-01-02 15:01:44 +01:00
|
|
|
|
|
|
|
|
2020-04-06 12:24:17 +02:00
|
|
|
@pytest.mark.filterwarnings("ignore:Using or importing the ABCs")
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_supermemo_xml_01_unicode():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
2016-05-12 06:45:35 +02:00
|
|
|
file = str(os.path.join(testDir, "support/supermemo1.xml"))
|
2020-07-17 05:21:01 +02:00
|
|
|
i = SupermemoXmlImporter(col, file)
|
2019-12-25 05:18:34 +01:00
|
|
|
# i.META.logToStdOutput = True
|
2012-12-21 08:51:59 +01:00
|
|
|
i.run()
|
|
|
|
assert i.total == 1
|
2020-07-17 05:21:01 +02:00
|
|
|
cid = col.db.scalar("select id from cards")
|
|
|
|
c = col.getCard(cid)
|
2014-02-13 08:46:38 +01:00
|
|
|
# Applies A Factor-to-E Factor conversion
|
|
|
|
assert c.factor == 2879
|
2012-12-21 08:51:59 +01:00
|
|
|
assert c.reps == 7
|
2020-07-17 05:21:01 +02:00
|
|
|
col.close()
|
2012-12-21 08:51:59 +01:00
|
|
|
|
2019-12-25 05:18:34 +01:00
|
|
|
|
2012-12-21 08:51:59 +01:00
|
|
|
def test_mnemo():
|
2020-07-17 05:21:01 +02:00
|
|
|
col = getEmptyCol()
|
2016-05-12 06:45:35 +02:00
|
|
|
file = str(os.path.join(testDir, "support/mnemo.db"))
|
2020-07-17 05:21:01 +02:00
|
|
|
i = MnemosyneImporter(col, file)
|
2012-12-21 08:51:59 +01:00
|
|
|
i.run()
|
2020-07-17 05:21:01 +02:00
|
|
|
assert col.cardCount() == 7
|
|
|
|
assert "a_longer_tag" in col.tags.all()
|
|
|
|
assert col.db.scalar("select count() from cards where type = 0") == 1
|
|
|
|
col.close()
|