From 289f0a6452cdc655c53a1d527b837ba4a56613d7 Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Tue, 8 Jan 2013 09:04:35 +0900 Subject: [PATCH] automatically fix 1.2 errors when upgrading --- README | 2 +- anki/importing/anki1.py | 11 +- anki/upgrade.py | 68 +- aqt/importing.py | 4 +- oldanki/README | 2 + oldanki/__init__.py | 58 + oldanki/cards.py | 311 ++ oldanki/db.py | 149 + oldanki/deck.py | 4522 ++++++++++++++++++++++++++++++ oldanki/errors.py | 49 + oldanki/exporting.py | 274 ++ oldanki/facts.py | 157 ++ oldanki/fonts.py | 55 + oldanki/graphs.py | 406 +++ oldanki/history.py | 75 + oldanki/hooks.py | 65 + oldanki/lang.py | 59 + oldanki/latex.py | 136 + oldanki/media.py | 286 ++ oldanki/models.py | 220 ++ oldanki/sound.py | 364 +++ oldanki/stats.py | 560 ++++ oldanki/stdmodels.py | 51 + oldanki/sync.py | 1236 ++++++++ oldanki/tags.py | 59 + oldanki/template/LICENSE | 20 + oldanki/template/README.anki | 6 + oldanki/template/README.rst | 78 + oldanki/template/__init__.py | 7 + oldanki/template/template.py | 156 ++ oldanki/template/view.py | 116 + oldanki/utils.py | 297 ++ tests/support/anki12-broken.anki | Bin 0 -> 64512 bytes tests/support/anki12.anki | Bin 196608 -> 64512 bytes tests/test_importing.py | 3 +- tests/test_upgrade.py | 16 +- 36 files changed, 9839 insertions(+), 39 deletions(-) create mode 100644 oldanki/README create mode 100644 oldanki/__init__.py create mode 100644 oldanki/cards.py create mode 100644 oldanki/db.py create mode 100644 oldanki/deck.py create mode 100644 oldanki/errors.py create mode 100644 oldanki/exporting.py create mode 100644 oldanki/facts.py create mode 100644 oldanki/fonts.py create mode 100644 oldanki/graphs.py create mode 100644 oldanki/history.py create mode 100644 oldanki/hooks.py create mode 100644 oldanki/lang.py create mode 100644 oldanki/latex.py create mode 100644 oldanki/media.py create mode 100644 oldanki/models.py create mode 100644 oldanki/sound.py create mode 100644 oldanki/stats.py create mode 100644 oldanki/stdmodels.py create mode 100644 oldanki/sync.py create mode 100644 oldanki/tags.py create mode 100644 oldanki/template/LICENSE create mode 100644 oldanki/template/README.anki create mode 100644 oldanki/template/README.rst create mode 100644 oldanki/template/__init__.py create mode 100644 oldanki/template/template.py create mode 100644 oldanki/template/view.py create mode 100644 oldanki/utils.py create mode 100644 tests/support/anki12-broken.anki diff --git a/README b/README index f152da926..f2f14be3d 100644 --- a/README +++ b/README @@ -7,7 +7,7 @@ Prerequisites To install the prerequisites on Ubuntu/Debian, please use the following command: -sudo apt-get install python-qt4 mplayer lame libportaudio2 +sudo apt-get install python-qt4 mplayer lame libportaudio2 python-sqlalchemy If you're on another distribution the packages may be named differently, so please consult your package manager. diff --git a/anki/importing/anki1.py b/anki/importing/anki1.py index 9292d441c..6a18a2323 100644 --- a/anki/importing/anki1.py +++ b/anki/importing/anki1.py @@ -12,14 +12,17 @@ class Anki1Importer(Anki2Importer): def run(self): u = Upgrader() # check - if not u.check(self.file): + res = u.check(self.file) + if res == "invalid": self.log.append(_( - "File is old or damaged; please run Tools>Advanced>Check DB " - "in Anki 1.2 first.")) + "File is invalid. Please restore from backup.")) raise Exception("invalidFile") # upgrade + if res != "ok": + self.log.append( + "Problems fixed during upgrade:\n***\n%s\n***\n" % res) try: - deck = u.upgrade(self.file) + deck = u.upgrade() except: traceback.print_exc() self.log.append(traceback.format_exc()) diff --git a/anki/upgrade.py b/anki/upgrade.py index 0c71e2d71..d7e2ad2a5 100644 --- a/anki/upgrade.py +++ b/anki/upgrade.py @@ -18,34 +18,35 @@ from anki.storage import _addSchema, _getColVars, _addColVars, \ class Upgrader(object): def __init__(self): - pass + self.tmppath = None - # Upgrading - ###################################################################### - - def upgrade(self, path): - self.path = path - self._openDB(path) - self._upgradeSchema() - self._openCol() - self._upgradeRest() - return self.col - - # Integrity checking + # Integrity checking & initial setup ###################################################################### def check(self, path): - "True if deck looks ok." - with DB(path) as db: - return self._check(db) + "Returns 'ok', 'invalid', or log of fixes applied." + # copy into a temp file before we open + self.tmppath = tmpfile(suffix=".anki2") + shutil.copy(path, self.tmppath) + # run initial check + with DB(self.tmppath) as db: + res = self._check(db) + # needs fixing? + if res not in ("ok", "invalid"): + res = self._fix(self.tmppath) + # don't allow .upgrade() if invalid + if res == "invalid": + os.unlink(self.tmppath) + self.tmppath = None + return res def _check(self, db): # corrupt? try: if db.scalar("pragma integrity_check") != "ok": - return + return "invalid" except: - return + return "invalid" # old version? if db.scalar("select version from decks") < 65: return @@ -98,18 +99,35 @@ f.id = cards.factId)"""): select id from cards where type != (case when type >= 0 then relativeDelay else relativeDelay - 3 end)"""): return - return True + return "ok" - # DB/Deck opening + def _fix(self, path): + from oldanki import DeckStorage + try: + deck = DeckStorage.Deck(path, backup=False) + except: + # if we can't open the file, it's invalid + return "invalid" + # run a db check + res = deck.fixIntegrity() + if "Database file is damaged" in res: + # we can't recover from a corrupt db + return "invalid" + # other errors are non-fatal + deck.close() + return res + + # Upgrading ###################################################################### - def _openDB(self, path): - self.tmppath = tmpfile(suffix=".anki2") - shutil.copy(path, self.tmppath) + def upgrade(self): + assert self.tmppath self.db = DB(self.tmppath) - - def _openCol(self): + self._upgradeSchema() self.col = _Collection(self.db) + self._upgradeRest() + self.tmppath = None + return self.col # Schema upgrade ###################################################################### diff --git a/aqt/importing.py b/aqt/importing.py index 67c78ade5..6050636a1 100644 --- a/aqt/importing.py +++ b/aqt/importing.py @@ -297,9 +297,7 @@ backup, please see the 'Backups' section of the user manual.""")) except Exception, e: if "invalidFile" in unicode(e): msg = _("""\ -Invalid file. Please run a DB check in Anki 1.2 and try again.""") - msg += _(""" \ -Even if the DB check reports 'no problems found', a subsequent import should work.""") +Invalid file. Please restore from backup.""") showWarning(msg) elif "readonly" in unicode(e): showWarning(_("""\ diff --git a/oldanki/README b/oldanki/README new file mode 100644 index 000000000..edc4df461 --- /dev/null +++ b/oldanki/README @@ -0,0 +1,2 @@ +This is libanki 1.2.11, for the purposes of fixing problems when upgrading +1.2.x decks. diff --git a/oldanki/__init__.py b/oldanki/__init__.py new file mode 100644 index 000000000..56ea01a09 --- /dev/null +++ b/oldanki/__init__.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Anki (libanki) +==================== + +Open a deck: + + deck = oldanki.DeckStorage.Deck(path) + +Get a card: + + card = deck.getCard() + if not card: + # deck is finished + +Show the card: + + print card.question, card.answer + +Answer the card: + + deck.answerCard(card, ease) + +Edit the card: + + fields = card.fact.model.fieldModels + for field in fields: + card.fact[field.name] = "newvalue" + card.fact.setModified(textChanged=True, deck=deck) + deck.setModified() + +Get all cards via ORM (slow): + + from oldanki.cards import Card + cards = deck.s.query(Card).all() + +Get all q/a/ids via SQL (fast): + + cards = deck.s.all("select id, question, answer from cards") + +Save & close: + + deck.save() + deck.close() +""" +__docformat__ = 'restructuredtext' + +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + pass + +version = "1.2.11" + +from oldanki.deck import DeckStorage diff --git a/oldanki/cards.py b/oldanki/cards.py new file mode 100644 index 000000000..c4b4d70eb --- /dev/null +++ b/oldanki/cards.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Cards +==================== +""" +__docformat__ = 'restructuredtext' + +import time, sys, math, random +from oldanki.db import * +from oldanki.models import CardModel, Model, FieldModel, formatQA +from oldanki.facts import Fact, factsTable, Field +from oldanki.utils import parseTags, findTag, stripHTML, genID, hexifyID +from oldanki.media import updateMediaCount, mediaFiles + +MAX_TIMER = 60 + +# Cards +########################################################################## + +cardsTable = Table( + 'cards', metadata, + Column('id', Integer, primary_key=True), + Column('factId', Integer, ForeignKey("facts.id"), nullable=False), + Column('cardModelId', Integer, ForeignKey("cardModels.id"), nullable=False), + Column('created', Float, nullable=False, default=time.time), + Column('modified', Float, nullable=False, default=time.time), + Column('tags', UnicodeText, nullable=False, default=u""), + Column('ordinal', Integer, nullable=False), + # cached - changed on fact update + Column('question', UnicodeText, nullable=False, default=u""), + Column('answer', UnicodeText, nullable=False, default=u""), + # default to 'normal' priority; + # this is indexed in deck.py as we need to create a reverse index + Column('priority', Integer, nullable=False, default=2), + Column('interval', Float, nullable=False, default=0), + Column('lastInterval', Float, nullable=False, default=0), + Column('due', Float, nullable=False, default=time.time), + Column('lastDue', Float, nullable=False, default=0), + Column('factor', Float, nullable=False, default=2.5), + Column('lastFactor', Float, nullable=False, default=2.5), + Column('firstAnswered', Float, nullable=False, default=0), + # stats + Column('reps', Integer, nullable=False, default=0), + Column('successive', Integer, nullable=False, default=0), + Column('averageTime', Float, nullable=False, default=0), + Column('reviewTime', Float, nullable=False, default=0), + Column('youngEase0', Integer, nullable=False, default=0), + Column('youngEase1', Integer, nullable=False, default=0), + Column('youngEase2', Integer, nullable=False, default=0), + Column('youngEase3', Integer, nullable=False, default=0), + Column('youngEase4', Integer, nullable=False, default=0), + Column('matureEase0', Integer, nullable=False, default=0), + Column('matureEase1', Integer, nullable=False, default=0), + Column('matureEase2', Integer, nullable=False, default=0), + Column('matureEase3', Integer, nullable=False, default=0), + Column('matureEase4', Integer, nullable=False, default=0), + # this duplicates the above data, because there's no way to map imported + # data to the above + Column('yesCount', Integer, nullable=False, default=0), + Column('noCount', Integer, nullable=False, default=0), + # obsolete + Column('spaceUntil', Float, nullable=False, default=0), + # relativeDelay is reused as type without scheduling (ie, it remains 0-2 + # even if card is suspended, etc) + Column('relativeDelay', Float, nullable=False, default=0), + Column('isDue', Boolean, nullable=False, default=0), # obsolete + Column('type', Integer, nullable=False, default=2), + Column('combinedDue', Integer, nullable=False, default=0)) + +class Card(object): + "A card." + + def __init__(self, fact=None, cardModel=None, created=None): + self.tags = u"" + self.id = genID() + # new cards start as new & due + self.type = 2 + self.relativeDelay = self.type + self.timerStarted = False + self.timerStopped = False + self.modified = time.time() + if created: + self.created = created + self.due = created + else: + self.due = self.modified + self.combinedDue = self.due + if fact: + self.fact = fact + if cardModel: + self.cardModel = cardModel + # for non-orm use + self.cardModelId = cardModel.id + self.ordinal = cardModel.ordinal + + def rebuildQA(self, deck, media=True): + # format qa + d = {} + for f in self.fact.model.fieldModels: + d[f.name] = (f.id, self.fact[f.name]) + qa = formatQA(None, self.fact.modelId, d, self.splitTags(), + self.cardModel, deck) + # find old media references + files = {} + for type in ("question", "answer"): + for f in mediaFiles(getattr(self, type) or ""): + if f in files: + files[f] -= 1 + else: + files[f] = -1 + # update q/a + self.question = qa['question'] + self.answer = qa['answer'] + # determine media delta + for type in ("question", "answer"): + for f in mediaFiles(getattr(self, type)): + if f in files: + files[f] += 1 + else: + files[f] = 1 + # update media counts if we're attached to deck + if media: + for (f, cnt) in files.items(): + updateMediaCount(deck, f, cnt) + self.setModified() + + def setModified(self): + self.modified = time.time() + + def startTimer(self): + self.timerStarted = time.time() + + def stopTimer(self): + self.timerStopped = time.time() + + def thinkingTime(self): + return (self.timerStopped or time.time()) - self.timerStarted + + def totalTime(self): + return time.time() - self.timerStarted + + def genFuzz(self): + "Generate a random offset to spread intervals." + self.fuzz = random.uniform(0.95, 1.05) + + def htmlQuestion(self, type="question", align=True): + div = '''
%s
''' % ( + type[0], type[0], hexifyID(self.cardModelId), + getattr(self, type)) + # add outer div & alignment (with tables due to qt's html handling) + if not align: + return div + attr = type + 'Align' + if getattr(self.cardModel, attr) == 0: + align = "center" + elif getattr(self.cardModel, attr) == 1: + align = "left" + else: + align = "right" + return (("
" % align) + + div + "
") + + def htmlAnswer(self, align=True): + return self.htmlQuestion(type="answer", align=align) + + def updateStats(self, ease, state): + self.reps += 1 + if ease > 1: + self.successive += 1 + else: + self.successive = 0 + delay = min(self.totalTime(), MAX_TIMER) + self.reviewTime += delay + if self.averageTime: + self.averageTime = (self.averageTime + delay) / 2.0 + else: + self.averageTime = delay + # we don't track first answer for cards + if state == "new": + state = "young" + # update ease and yes/no count + attr = state + "Ease%d" % ease + setattr(self, attr, getattr(self, attr) + 1) + if ease < 2: + self.noCount += 1 + else: + self.yesCount += 1 + if not self.firstAnswered: + self.firstAnswered = time.time() + self.setModified() + + def splitTags(self): + return (self.fact.tags, self.fact.model.tags, self.cardModel.name) + + def allTags(self): + "Non-canonified string of all tags." + return (self.fact.tags + "," + + self.fact.model.tags) + + def hasTag(self, tag): + return findTag(tag, parseTags(self.allTags())) + + def fromDB(self, s, id): + r = s.first("""select +id, factId, cardModelId, created, modified, tags, ordinal, question, answer, +priority, interval, lastInterval, due, lastDue, factor, +lastFactor, firstAnswered, reps, successive, averageTime, reviewTime, +youngEase0, youngEase1, youngEase2, youngEase3, youngEase4, +matureEase0, matureEase1, matureEase2, matureEase3, matureEase4, +yesCount, noCount, spaceUntil, isDue, type, combinedDue +from cards where id = :id""", id=id) + if not r: + return + (self.id, + self.factId, + self.cardModelId, + self.created, + self.modified, + self.tags, + self.ordinal, + self.question, + self.answer, + self.priority, + self.interval, + self.lastInterval, + self.due, + self.lastDue, + self.factor, + self.lastFactor, + self.firstAnswered, + self.reps, + self.successive, + self.averageTime, + self.reviewTime, + self.youngEase0, + self.youngEase1, + self.youngEase2, + self.youngEase3, + self.youngEase4, + self.matureEase0, + self.matureEase1, + self.matureEase2, + self.matureEase3, + self.matureEase4, + self.yesCount, + self.noCount, + self.spaceUntil, + self.isDue, + self.type, + self.combinedDue) = r + return True + + def toDB(self, s): + "Write card to DB." + s.execute("""update cards set +modified=:modified, +tags=:tags, +interval=:interval, +lastInterval=:lastInterval, +due=:due, +lastDue=:lastDue, +factor=:factor, +lastFactor=:lastFactor, +firstAnswered=:firstAnswered, +reps=:reps, +successive=:successive, +averageTime=:averageTime, +reviewTime=:reviewTime, +youngEase0=:youngEase0, +youngEase1=:youngEase1, +youngEase2=:youngEase2, +youngEase3=:youngEase3, +youngEase4=:youngEase4, +matureEase0=:matureEase0, +matureEase1=:matureEase1, +matureEase2=:matureEase2, +matureEase3=:matureEase3, +matureEase4=:matureEase4, +yesCount=:yesCount, +noCount=:noCount, +spaceUntil = :spaceUntil, +isDue = 0, +type = :type, +combinedDue = :combinedDue, +relativeDelay = :relativeDelay, +priority = :priority +where id=:id""", self.__dict__) + +mapper(Card, cardsTable, properties={ + 'cardModel': relation(CardModel), + 'fact': relation(Fact, backref="cards", primaryjoin= + cardsTable.c.factId == factsTable.c.id), + }) + +mapper(Fact, factsTable, properties={ + 'model': relation(Model), + 'fields': relation(Field, backref="fact", order_by=Field.ordinal), + }) + + +# Card deletions +########################################################################## + +cardsDeletedTable = Table( + 'cardsDeleted', metadata, + Column('cardId', Integer, ForeignKey("cards.id"), + nullable=False), + Column('deletedTime', Float, nullable=False)) diff --git a/oldanki/db.py b/oldanki/db.py new file mode 100644 index 000000000..24f59de60 --- /dev/null +++ b/oldanki/db.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +DB tools +==================== + +SessionHelper is a wrapper for the standard sqlalchemy session, which provides +some convenience routines, and manages transactions itself. + +object_session() is a replacement for the standard object_session(), which +provides the features of SessionHelper, and avoids taking out another +transaction. +""" +__docformat__ = 'restructuredtext' + +try: + from pysqlite2 import dbapi2 as sqlite +except ImportError: + try: + from sqlite3 import dbapi2 as sqlite + except: + raise Exception("Please install pysqlite2 or python2.5") + +from sqlalchemy import (Table, Integer, Float, Column, MetaData, + ForeignKey, Boolean, String, Date, + UniqueConstraint, Index, PrimaryKeyConstraint) +from sqlalchemy import create_engine +from sqlalchemy.orm import mapper, sessionmaker as _sessionmaker, relation, backref, \ + object_session as _object_session, class_mapper +from sqlalchemy.sql import select, text, and_ +from sqlalchemy.exc import DBAPIError, OperationalError +from sqlalchemy.pool import NullPool +import sqlalchemy + +# some users are still on 0.4.x.. +import warnings +warnings.filterwarnings('ignore', 'Use session.add()') +warnings.filterwarnings('ignore', 'Use session.expunge_all()') + +# sqlalchemy didn't handle the move to unicodetext nicely +try: + from sqlalchemy import UnicodeText +except ImportError: + from sqlalchemy import Unicode + UnicodeText = Unicode + +from oldanki.hooks import runHook + +# shared metadata +metadata = MetaData() + +# this class assumes the provided session is called with transactional=False +class SessionHelper(object): + "Add some convenience routines to a session." + + def __init__(self, session, lock=False, transaction=True): + self._session = session + self._lock = lock + self._transaction = transaction + if self._transaction: + self._session.begin() + if self._lock: + self._lockDB() + self._seen = True + + def save(self, obj): + # compat + if sqlalchemy.__version__.startswith("0.4."): + self._session.save(obj) + else: + self._session.add(obj) + + def clear(self): + # compat + if sqlalchemy.__version__.startswith("0.4."): + self._session.clear() + else: + self._session.expunge_all() + + def update(self, obj): + # compat + if sqlalchemy.__version__.startswith("0.4."): + self._session.update(obj) + else: + self._session.add(obj) + + def execute(self, *a, **ka): + x = self._session.execute(*a, **ka) + runHook("dbFinished") + return x + + def __getattr__(self, k): + return getattr(self.__dict__['_session'], k) + + def scalar(self, sql, **args): + return self.execute(text(sql), args).scalar() + + def all(self, sql, **args): + return self.execute(text(sql), args).fetchall() + + def first(self, sql, **args): + c = self.execute(text(sql), args) + r = c.fetchone() + c.close() + return r + + def column0(self, sql, **args): + return [x[0] for x in self.execute(text(sql), args).fetchall()] + + def statement(self, sql, **kwargs): + "Execute a statement without returning any results. Flush first." + return self.execute(text(sql), kwargs) + + def statements(self, sql, data): + "Execute a statement across data. Flush first." + return self.execute(text(sql), data) + + def __repr__(self): + return repr(self._session) + + def commit(self): + self._session.commit() + if self._transaction: + self._session.begin() + if self._lock: + self._lockDB() + + def _lockDB(self): + "Take out a write lock." + self._session.execute(text("update decks set modified=modified")) + +def object_session(*args): + s = _object_session(*args) + if s: + return SessionHelper(s, transaction=False) + return None + +def sessionmaker(*args, **kwargs): + if sqlalchemy.__version__ < "0.5": + if 'autocommit' in kwargs: + kwargs['transactional'] = not kwargs['autocommit'] + del kwargs['autocommit'] + else: + if 'transactional' in kwargs: + kwargs['autocommit'] = not kwargs['transactional'] + del kwargs['transactional'] + return _sessionmaker(*args, **kwargs) diff --git a/oldanki/deck.py b/oldanki/deck.py new file mode 100644 index 000000000..9af60d919 --- /dev/null +++ b/oldanki/deck.py @@ -0,0 +1,4522 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +The Deck +==================== +""" +__docformat__ = 'restructuredtext' + +import tempfile, time, os, random, sys, re, stat, shutil +import types, traceback, datetime +from anki.utils import json as simplejson + +from oldanki.db import * +from oldanki.lang import _, ngettext +from oldanki.errors import DeckAccessError +from oldanki.stdmodels import BasicModel +from oldanki.utils import parseTags, tidyHTML, genID, ids2str, hexifyID, \ + canonifyTags, joinTags, addTags, checksum +from oldanki.history import CardHistoryEntry +from oldanki.models import Model, CardModel, formatQA +from oldanki.stats import dailyStats, globalStats, genToday +from oldanki.fonts import toPlatformFont +from oldanki.tags import initTagTables, tagIds +from operator import itemgetter +from itertools import groupby +from oldanki.hooks import runHook, hookEmpty +from oldanki.template import render +from oldanki.media import updateMediaCount, mediaFiles, \ + rebuildMediaDir +import oldanki.latex # sets up hook + +# ensure all the DB metadata in other files is loaded before proceeding +import oldanki.models, oldanki.facts, oldanki.cards, oldanki.stats +import oldanki.history, oldanki.media + +# the current code set type -= 3 for manually suspended cards, and += 3*n +# for temporary suspends, (where n=1 for bury, n=2 for review/cram). +# This way we don't need to recalculate priorities when enabling the cards +# again, and paves the way for an arbitrary number of priorities in the +# future. But until all clients are upgraded, we need to keep munging the +# priorities to prevent older clients from getting confused +# PRIORITY_REVEARLY = -1 +# PRIORITY_BURIED = -2 +# PRIORITY_SUSPENDED = -3 + +# priorities +PRIORITY_HIGH = 4 +PRIORITY_MED = 3 +PRIORITY_NORM = 2 +PRIORITY_LOW = 1 +PRIORITY_NONE = 0 +# rest +MATURE_THRESHOLD = 21 +NEW_CARDS_DISTRIBUTE = 0 +NEW_CARDS_LAST = 1 +NEW_CARDS_FIRST = 2 +NEW_CARDS_RANDOM = 0 +NEW_CARDS_OLD_FIRST = 1 +NEW_CARDS_NEW_FIRST = 2 +REV_CARDS_OLD_FIRST = 0 +REV_CARDS_NEW_FIRST = 1 +REV_CARDS_DUE_FIRST = 2 +REV_CARDS_RANDOM = 3 +SEARCH_TAG = 0 +SEARCH_TYPE = 1 +SEARCH_PHRASE = 2 +SEARCH_FID = 3 +SEARCH_CARD = 4 +SEARCH_DISTINCT = 5 +SEARCH_FIELD = 6 +SEARCH_FIELD_EXISTS = 7 +SEARCH_QA = 8 +SEARCH_PHRASE_WB = 9 +DECK_VERSION = 65 + +deckVarsTable = Table( + 'deckVars', metadata, + Column('key', UnicodeText, nullable=False, primary_key=True), + Column('value', UnicodeText)) + +# parts of the code assume we only have one deck +decksTable = Table( + 'decks', metadata, + Column('id', Integer, primary_key=True), + Column('created', Float, nullable=False, default=time.time), + Column('modified', Float, nullable=False, default=time.time), + Column('description', UnicodeText, nullable=False, default=u""), + Column('version', Integer, nullable=False, default=DECK_VERSION), + Column('currentModelId', Integer, ForeignKey("models.id")), + # syncName stores an md5sum of the deck path when syncing is enabled. If + # it doesn't match the current deck path, the deck has been moved, + # and syncing is disabled on load. + Column('syncName', UnicodeText), + Column('lastSync', Float, nullable=False, default=0), + # scheduling + ############## + # initial intervals + Column('hardIntervalMin', Float, nullable=False, default=1.0), + Column('hardIntervalMax', Float, nullable=False, default=1.1), + Column('midIntervalMin', Float, nullable=False, default=3.0), + Column('midIntervalMax', Float, nullable=False, default=5.0), + Column('easyIntervalMin', Float, nullable=False, default=7.0), + Column('easyIntervalMax', Float, nullable=False, default=9.0), + # delays on failure + Column('delay0', Integer, nullable=False, default=600), + # days to delay mature fails + Column('delay1', Integer, nullable=False, default=0), + Column('delay2', Float, nullable=False, default=0.0), + # collapsing future cards + Column('collapseTime', Integer, nullable=False, default=1), + # priorities & postponing + Column('highPriority', UnicodeText, nullable=False, default=u"PriorityVeryHigh"), + Column('medPriority', UnicodeText, nullable=False, default=u"PriorityHigh"), + Column('lowPriority', UnicodeText, nullable=False, default=u"PriorityLow"), + Column('suspended', UnicodeText, nullable=False, default=u""), # obsolete + # 0 is random, 1 is by input date + Column('newCardOrder', Integer, nullable=False, default=1), + # when to show new cards + Column('newCardSpacing', Integer, nullable=False, default=NEW_CARDS_DISTRIBUTE), + # limit the number of failed cards in play + Column('failedCardMax', Integer, nullable=False, default=20), + # number of new cards to show per day + Column('newCardsPerDay', Integer, nullable=False, default=20), + # currently unused + Column('sessionRepLimit', Integer, nullable=False, default=0), + Column('sessionTimeLimit', Integer, nullable=False, default=600), + # stats offset + Column('utcOffset', Float, nullable=False, default=-1), + # count cache + Column('cardCount', Integer, nullable=False, default=0), + Column('factCount', Integer, nullable=False, default=0), + Column('failedNowCount', Integer, nullable=False, default=0), # obsolete + Column('failedSoonCount', Integer, nullable=False, default=0), + Column('revCount', Integer, nullable=False, default=0), + Column('newCount', Integer, nullable=False, default=0), + # rev order + Column('revCardOrder', Integer, nullable=False, default=0)) + +class Deck(object): + "Top-level object. Manages facts, cards and scheduling information." + + factorFour = 1.3 + initialFactor = 2.5 + minimumAverage = 1.7 + maxScheduleTime = 36500 + + def __init__(self, path=None): + "Create a new deck." + # a limit of 1 deck in the table + self.id = 1 + # db session factory and instance + self.Session = None + self.s = None + + def _initVars(self): + self.tmpMediaDir = None + self.mediaPrefix = "" + self.lastTags = u"" + self.lastLoaded = time.time() + self.undoEnabled = False + self.sessionStartReps = 0 + self.sessionStartTime = 0 + self.lastSessionStart = 0 + self.queueLimit = 200 + # if most recent deck var not defined, make sure defaults are set + if not self.s.scalar("select 1 from deckVars where key = 'revSpacing'"): + self.setVarDefault("suspendLeeches", True) + self.setVarDefault("leechFails", 16) + self.setVarDefault("perDay", True) + self.setVarDefault("newActive", "") + self.setVarDefault("revActive", "") + self.setVarDefault("newInactive", self.suspended) + self.setVarDefault("revInactive", self.suspended) + self.setVarDefault("newSpacing", 60) + self.setVarDefault("mediaURL", "") + self.setVarDefault("latexPre", """\ +\\documentclass[12pt]{article} +\\special{papersize=3in,5in} +\\usepackage[utf8]{inputenc} +\\usepackage{amssymb,amsmath} +\\pagestyle{empty} +\\setlength{\\parindent}{0in} +\\begin{document} +""") + self.setVarDefault("latexPost", "\\end{document}") + self.setVarDefault("revSpacing", 0.1) + self.updateCutoff() + self.setupStandardScheduler() + + def modifiedSinceSave(self): + return self.modified > self.lastLoaded + + # Queue management + ########################################################################## + + def setupStandardScheduler(self): + self.getCardId = self._getCardId + self.fillFailedQueue = self._fillFailedQueue + self.fillRevQueue = self._fillRevQueue + self.fillNewQueue = self._fillNewQueue + self.rebuildFailedCount = self._rebuildFailedCount + self.rebuildRevCount = self._rebuildRevCount + self.rebuildNewCount = self._rebuildNewCount + self.requeueCard = self._requeueCard + self.timeForNewCard = self._timeForNewCard + self.updateNewCountToday = self._updateNewCountToday + self.cardQueue = self._cardQueue + self.finishScheduler = None + self.answerCard = self._answerCard + self.cardLimit = self._cardLimit + self.answerPreSave = None + self.spaceCards = self._spaceCards + self.scheduler = "standard" + # restore any cards temporarily suspended by alternate schedulers + try: + self.resetAfterReviewEarly() + except OperationalError, e: + # will fail if deck hasn't been upgraded yet + pass + + def fillQueues(self): + self.fillFailedQueue() + self.fillRevQueue() + self.fillNewQueue() + + def rebuildCounts(self): + # global counts + self.cardCount = self.s.scalar("select count(*) from cards") + self.factCount = self.s.scalar("select count(*) from facts") + # due counts + self.rebuildFailedCount() + self.rebuildRevCount() + self.rebuildNewCount() + + def _cardLimit(self, active, inactive, sql): + yes = parseTags(self.getVar(active)) + no = parseTags(self.getVar(inactive)) + if yes: + yids = tagIds(self.s, yes).values() + nids = tagIds(self.s, no).values() + return sql.replace( + "where", + "where +c.id in (select cardId from cardTags where " + "tagId in %s) and +c.id not in (select cardId from " + "cardTags where tagId in %s) and" % ( + ids2str(yids), + ids2str(nids))) + elif no: + nids = tagIds(self.s, no).values() + return sql.replace( + "where", + "where +c.id not in (select cardId from cardTags where " + "tagId in %s) and" % ids2str(nids)) + else: + return sql + + def _rebuildFailedCount(self): + # This is a count of all failed cards within the current day cutoff. + # The cards may not be ready for review yet, but can still be + # displayed if failedCardsMax is reached. + self.failedSoonCount = self.s.scalar( + self.cardLimit( + "revActive", "revInactive", + "select count(*) from cards c where type = 0 " + "and combinedDue < :lim"), lim=self.failedCutoff) + + def _rebuildRevCount(self): + self.revCount = self.s.scalar( + self.cardLimit( + "revActive", "revInactive", + "select count(*) from cards c where type = 1 " + "and combinedDue < :lim"), lim=self.dueCutoff) + + def _rebuildNewCount(self): + self.newCount = self.s.scalar( + self.cardLimit( + "newActive", "newInactive", + "select count(*) from cards c where type = 2 " + "and combinedDue < :lim"), lim=self.dueCutoff) + self.updateNewCountToday() + self.spacedCards = [] + + def _updateNewCountToday(self): + self.newCountToday = max(min( + self.newCount, self.newCardsPerDay - + self.newCardsDoneToday()), 0) + + def _fillFailedQueue(self): + if self.failedSoonCount and not self.failedQueue: + self.failedQueue = self.s.all( + self.cardLimit( + "revActive", "revInactive", """ +select c.id, factId, combinedDue from cards c where +type = 0 and combinedDue < :lim order by combinedDue +limit %d""" % self.queueLimit), lim=self.failedCutoff) + self.failedQueue.reverse() + + def _fillRevQueue(self): + if self.revCount and not self.revQueue: + self.revQueue = self.s.all( + self.cardLimit( + "revActive", "revInactive", """ +select c.id, factId from cards c where +type = 1 and combinedDue < :lim order by %s +limit %d""" % (self.revOrder(), self.queueLimit)), lim=self.dueCutoff) + self.revQueue.reverse() + + def _fillNewQueue(self): + if self.newCountToday and not self.newQueue and not self.spacedCards: + self.newQueue = self.s.all( + self.cardLimit( + "newActive", "newInactive", """ +select c.id, factId from cards c where +type = 2 and combinedDue < :lim order by %s +limit %d""" % (self.newOrder(), self.queueLimit)), lim=self.dueCutoff) + self.newQueue.reverse() + + def queueNotEmpty(self, queue, fillFunc, new=False): + while True: + self.removeSpaced(queue, new) + if queue: + return True + fillFunc() + if not queue: + return False + + def removeSpaced(self, queue, new=False): + popped = [] + delay = None + while queue: + fid = queue[-1][1] + if fid in self.spacedFacts: + # still spaced + id = queue.pop()[0] + # assuming 10 cards/minute, track id if likely to expire + # before queue refilled + if new and self.newSpacing < self.queueLimit * 6: + popped.append(id) + delay = self.spacedFacts[fid] + else: + if popped: + self.spacedCards.append((delay, popped)) + return + + def revNoSpaced(self): + return self.queueNotEmpty(self.revQueue, self.fillRevQueue) + + def newNoSpaced(self): + return self.queueNotEmpty(self.newQueue, self.fillNewQueue, True) + + def _requeueCard(self, card, oldSuc): + newType = None + try: + if card.reps == 1: + if self.newFromCache: + # fetched from spaced cache + newType = 2 + cards = self.spacedCards.pop(0)[1] + # reschedule the siblings + if len(cards) > 1: + self.spacedCards.append( + (time.time() + self.newSpacing, cards[1:])) + else: + # fetched from normal queue + newType = 1 + self.newQueue.pop() + elif oldSuc == 0: + self.failedQueue.pop() + else: + self.revQueue.pop() + except: + raise Exception("""\ +requeueCard() failed. Please report this along with the steps you take to +produce the problem. + +Counts %d %d %d +Queue %d %d %d +Card info: %d %d %d +New type: %s""" % (self.failedSoonCount, self.revCount, self.newCountToday, + len(self.failedQueue), len(self.revQueue), + len(self.newQueue), + card.reps, card.successive, oldSuc, `newType`)) + + def revOrder(self): + return ("priority desc, interval desc", + "priority desc, interval", + "priority desc, combinedDue", + "priority desc, factId, ordinal")[self.revCardOrder] + + def newOrder(self): + return ("priority desc, due", + "priority desc, due", + "priority desc, due desc")[self.newCardOrder] + + def rebuildTypes(self): + "Rebuild the type cache. Only necessary on upgrade." + # set canonical type first + self.s.statement(""" +update cards set +relativeDelay = (case +when successive then 1 when reps then 0 else 2 end) +""") + # then current type based on that + self.s.statement(""" +update cards set +type = (case +when type >= 0 then relativeDelay else relativeDelay - 3 end) +""") + + def _cardQueue(self, card): + return self.cardType(card) + + def cardType(self, card): + "Return the type of the current card (what queue it's in)" + if card.successive: + return 1 + elif card.reps: + return 0 + else: + return 2 + + def updateCutoff(self): + d = datetime.datetime.utcfromtimestamp( + time.time() - self.utcOffset) + datetime.timedelta(days=1) + d = datetime.datetime(d.year, d.month, d.day) + newday = self.utcOffset - time.timezone + d += datetime.timedelta(seconds=newday) + cutoff = time.mktime(d.timetuple()) + # cutoff must not be in the past + while cutoff < time.time(): + cutoff += 86400 + # cutoff must not be more than 24 hours in the future + cutoff = min(time.time() + 86400, cutoff) + self.failedCutoff = cutoff + if self.getBool("perDay"): + self.dueCutoff = cutoff + else: + self.dueCutoff = time.time() + + def reset(self): + # setup global/daily stats + self._globalStats = globalStats(self) + self._dailyStats = dailyStats(self) + # recheck counts + self.rebuildCounts() + # empty queues; will be refilled by getCard() + self.failedQueue = [] + self.revQueue = [] + self.newQueue = [] + self.spacedFacts = {} + # determine new card distribution + if self.newCardSpacing == NEW_CARDS_DISTRIBUTE: + if self.newCountToday: + self.newCardModulus = ( + (self.newCountToday + self.revCount) / self.newCountToday) + # if there are cards to review, ensure modulo >= 2 + if self.revCount: + self.newCardModulus = max(2, self.newCardModulus) + else: + self.newCardModulus = 0 + else: + self.newCardModulus = 0 + # recache css + self.rebuildCSS() + # spacing for delayed cards - not to be confused with newCardSpacing + # above + self.newSpacing = self.getFloat('newSpacing') + self.revSpacing = self.getFloat('revSpacing') + + def checkDay(self): + # check if the day has rolled over + if genToday(self) != self._dailyStats.day: + self.updateCutoff() + self.reset() + + # Review early + ########################################################################## + + def setupReviewEarlyScheduler(self): + self.fillRevQueue = self._fillRevEarlyQueue + self.rebuildRevCount = self._rebuildRevEarlyCount + self.finishScheduler = self._onReviewEarlyFinished + self.answerPreSave = self._reviewEarlyPreSave + self.scheduler = "reviewEarly" + + def _reviewEarlyPreSave(self, card, ease): + if ease > 1: + # prevent it from appearing in next queue fill + card.type += 6 + + def resetAfterReviewEarly(self): + "Put temporarily suspended cards back into play. Caller must .reset()" + # FIXME: can ignore priorities in the future + ids = self.s.column0( + "select id from cards where type between 6 and 8 or priority = -1") + if ids: + self.updatePriorities(ids) + self.s.statement( + "update cards set type = type - 6 where type between 6 and 8") + self.flushMod() + + def _onReviewEarlyFinished(self): + # clean up buried cards + self.resetAfterReviewEarly() + # and go back to regular scheduler + self.setupStandardScheduler() + + def _rebuildRevEarlyCount(self): + # in the future it would be nice to skip the first x days of due cards + self.revCount = self.s.scalar( + self.cardLimit( + "revActive", "revInactive", """ +select count() from cards c where type = 1 and combinedDue > :now +"""), now=self.dueCutoff) + + def _fillRevEarlyQueue(self): + if self.revCount and not self.revQueue: + self.revQueue = self.s.all( + self.cardLimit( + "revActive", "revInactive", """ +select id, factId from cards c where type = 1 and combinedDue > :lim +order by combinedDue limit %d""" % self.queueLimit), lim=self.dueCutoff) + self.revQueue.reverse() + + # Learn more + ########################################################################## + + def setupLearnMoreScheduler(self): + self.rebuildNewCount = self._rebuildLearnMoreCount + self.updateNewCountToday = self._updateLearnMoreCountToday + self.finishScheduler = self.setupStandardScheduler + self.scheduler = "learnMore" + + def _rebuildLearnMoreCount(self): + self.newCount = self.s.scalar( + self.cardLimit( + "newActive", "newInactive", + "select count(*) from cards c where type = 2 " + "and combinedDue < :lim"), lim=self.dueCutoff) + self.spacedCards = [] + + def _updateLearnMoreCountToday(self): + self.newCountToday = self.newCount + + # Cramming + ########################################################################## + + def setupCramScheduler(self, active, order): + self.getCardId = self._getCramCardId + self.activeCramTags = active + self.cramOrder = order + self.rebuildNewCount = self._rebuildCramNewCount + self.rebuildRevCount = self._rebuildCramCount + self.rebuildFailedCount = self._rebuildFailedCramCount + self.fillRevQueue = self._fillCramQueue + self.fillFailedQueue = self._fillFailedCramQueue + self.finishScheduler = self.setupStandardScheduler + self.failedCramQueue = [] + self.requeueCard = self._requeueCramCard + self.cardQueue = self._cramCardQueue + self.answerCard = self._answerCramCard + self.spaceCards = self._spaceCramCards + # reuse review early's code + self.answerPreSave = self._cramPreSave + self.cardLimit = self._cramCardLimit + self.scheduler = "cram" + + def _cramPreSave(self, card, ease): + # prevent it from appearing in next queue fill + card.lastInterval = self.cramLastInterval + card.type += 6 + + def _spaceCramCards(self, card): + self.spacedFacts[card.factId] = time.time() + self.newSpacing + + def _answerCramCard(self, card, ease): + self.cramLastInterval = card.lastInterval + self._answerCard(card, ease) + if ease == 1: + self.failedCramQueue.insert(0, [card.id, card.factId]) + + def _getCramCardId(self, check=True): + self.checkDay() + self.fillQueues() + if self.failedCardMax and self.failedSoonCount >= self.failedCardMax: + return self.failedQueue[-1][0] + # card due for review? + if self.revNoSpaced(): + return self.revQueue[-1][0] + if self.failedQueue: + return self.failedQueue[-1][0] + if check: + # collapse spaced cards before reverting back to old scheduler + self.reset() + return self.getCardId(False) + # if we're in a custom scheduler, we may need to switch back + if self.finishScheduler: + self.finishScheduler() + self.reset() + return self.getCardId() + + def _cramCardQueue(self, card): + if self.revQueue and self.revQueue[-1][0] == card.id: + return 1 + else: + return 0 + + def _requeueCramCard(self, card, oldSuc): + if self.cardQueue(card) == 1: + self.revQueue.pop() + else: + self.failedCramQueue.pop() + + def _rebuildCramNewCount(self): + self.newCount = 0 + self.newCountToday = 0 + + def _cramCardLimit(self, active, inactive, sql): + # inactive is (currently) ignored + if isinstance(active, list): + return sql.replace( + "where", "where +c.id in " + ids2str(active) + " and") + else: + yes = parseTags(active) + if yes: + yids = tagIds(self.s, yes).values() + return sql.replace( + "where ", + "where +c.id in (select cardId from cardTags where " + "tagId in %s) and " % ids2str(yids)) + else: + return sql + + def _fillCramQueue(self): + if self.revCount and not self.revQueue: + self.revQueue = self.s.all(self.cardLimit( + self.activeCramTags, "", """ +select id, factId from cards c +where type between 0 and 2 +order by %s +limit %s""" % (self.cramOrder, self.queueLimit))) + self.revQueue.reverse() + + def _rebuildCramCount(self): + self.revCount = self.s.scalar(self.cardLimit( + self.activeCramTags, "", + "select count(*) from cards c where type between 0 and 2")) + + def _rebuildFailedCramCount(self): + self.failedSoonCount = len(self.failedCramQueue) + + def _fillFailedCramQueue(self): + self.failedQueue = self.failedCramQueue + + # Getting the next card + ########################################################################## + + def getCard(self, orm=True): + "Return the next card object, or None." + id = self.getCardId() + if id: + return self.cardFromId(id, orm) + else: + self.stopSession() + + def _getCardId(self, check=True): + "Return the next due card id, or None." + self.checkDay() + self.fillQueues() + self.updateNewCountToday() + if self.failedQueue: + # failed card due? + if self.delay0: + if self.failedQueue[-1][2] + self.delay0 < time.time(): + return self.failedQueue[-1][0] + # failed card queue too big? + if (self.failedCardMax and + self.failedSoonCount >= self.failedCardMax): + return self.failedQueue[-1][0] + # distribute new cards? + if self.newNoSpaced() and self.timeForNewCard(): + return self.getNewCard() + # card due for review? + if self.revNoSpaced(): + return self.revQueue[-1][0] + # new cards left? + if self.newCountToday: + id = self.getNewCard() + if id: + return id + if check: + # check for expired cards, or new day rollover + self.updateCutoff() + self.reset() + return self.getCardId(check=False) + # display failed cards early/last + if not check and self.showFailedLast() and self.failedQueue: + return self.failedQueue[-1][0] + # if we're in a custom scheduler, we may need to switch back + if self.finishScheduler: + self.finishScheduler() + self.reset() + return self.getCardId() + + # Get card: helper functions + ########################################################################## + + def _timeForNewCard(self): + "True if it's time to display a new card when distributing." + if not self.newCountToday: + return False + if self.newCardSpacing == NEW_CARDS_LAST: + return False + if self.newCardSpacing == NEW_CARDS_FIRST: + return True + # force review if there are very high priority cards + if self.revQueue: + if self.s.scalar( + "select 1 from cards where id = :id and priority = 4", + id = self.revQueue[-1][0]): + return False + if self.newCardModulus: + return self._dailyStats.reps % self.newCardModulus == 0 + else: + return False + + def getNewCard(self): + src = None + if (self.spacedCards and + self.spacedCards[0][0] < time.time()): + # spaced card has expired + src = 0 + elif self.newQueue: + # card left in new queue + src = 1 + elif self.spacedCards: + # card left in spaced queue + src = 0 + else: + # only cards spaced to another day left + return + if src == 0: + cards = self.spacedCards[0][1] + self.newFromCache = True + return cards[0] + else: + self.newFromCache = False + return self.newQueue[-1][0] + + def showFailedLast(self): + return self.collapseTime or not self.delay0 + + def cardFromId(self, id, orm=False): + "Given a card ID, return a card, and start the card timer." + if orm: + card = self.s.query(oldanki.cards.Card).get(id) + if not card: + return + card.timerStopped = False + else: + card = oldanki.cards.Card() + if not card.fromDB(self.s, id): + return + card.deck = self + card.genFuzz() + card.startTimer() + return card + + # Answering a card + ########################################################################## + + def _answerCard(self, card, ease): + undoName = _("Answer Card") + self.setUndoStart(undoName) + now = time.time() + # old state + oldState = self.cardState(card) + oldQueue = self.cardQueue(card) + lastDelaySecs = time.time() - card.combinedDue + lastDelay = lastDelaySecs / 86400.0 + oldSuc = card.successive + # update card details + last = card.interval + card.interval = self.nextInterval(card, ease) + card.lastInterval = last + if card.reps: + # only update if card was not new + card.lastDue = card.due + card.due = self.nextDue(card, ease, oldState) + card.isDue = 0 + card.lastFactor = card.factor + card.spaceUntil = 0 + if not self.finishScheduler: + # don't update factor in custom schedulers + self.updateFactor(card, ease) + # spacing + self.spaceCards(card) + # adjust counts for current card + if ease == 1: + if card.due < self.failedCutoff: + self.failedSoonCount += 1 + if oldQueue == 0: + self.failedSoonCount -= 1 + elif oldQueue == 1: + self.revCount -= 1 + else: + self.newCount -= 1 + # card stats + oldanki.cards.Card.updateStats(card, ease, oldState) + # update type & ensure past cutoff + card.type = self.cardType(card) + card.relativeDelay = card.type + if ease != 1: + card.due = max(card.due, self.dueCutoff+1) + # allow custom schedulers to munge the card + if self.answerPreSave: + self.answerPreSave(card, ease) + # save + card.combinedDue = card.due + card.toDB(self.s) + # global/daily stats + oldanki.stats.updateAllStats(self.s, self._globalStats, self._dailyStats, + card, ease, oldState) + # review history + entry = CardHistoryEntry(card, ease, lastDelay) + entry.writeSQL(self.s) + self.modified = now + # remove from queue + self.requeueCard(card, oldSuc) + # leech handling - we need to do this after the queue, as it may cause + # a reset() + isLeech = self.isLeech(card) + if isLeech: + self.handleLeech(card) + runHook("cardAnswered", card.id, isLeech) + self.setUndoEnd(undoName) + + def _spaceCards(self, card): + new = time.time() + self.newSpacing + self.s.statement(""" +update cards set +combinedDue = (case +when type = 1 then combinedDue + 86400 * (case + when interval*:rev < 1 then 0 + else interval*:rev + end) +when type = 2 then :new +end), +modified = :now, isDue = 0 +where id != :id and factId = :factId +and combinedDue < :cut +and type between 1 and 2""", + id=card.id, now=time.time(), factId=card.factId, + cut=self.dueCutoff, new=new, rev=self.revSpacing) + # update local cache of seen facts + self.spacedFacts[card.factId] = new + + def isLeech(self, card): + no = card.noCount + fmax = self.getInt('leechFails') + if not fmax: + return + return ( + # failed + not card.successive and + # greater than fail threshold + no >= fmax and + # at least threshold/2 reps since last time + (fmax - no) % (max(fmax/2, 1)) == 0) + + def handleLeech(self, card): + self.refreshSession() + scard = self.cardFromId(card.id, True) + tags = scard.fact.tags + tags = addTags("Leech", tags) + scard.fact.tags = canonifyTags(tags) + scard.fact.setModified(textChanged=True, deck=self) + self.updateFactTags([scard.fact.id]) + self.s.flush() + self.s.expunge(scard) + if self.getBool('suspendLeeches'): + self.suspendCards([card.id]) + self.reset() + self.refreshSession() + + # Interval management + ########################################################################## + + def nextInterval(self, card, ease): + "Return the next interval for CARD given EASE." + delay = self._adjustedDelay(card, ease) + return self._nextInterval(card, delay, ease) + + def _nextInterval(self, card, delay, ease): + interval = card.interval + factor = card.factor + # if shown early + if delay < 0: + # FIXME: this should recreate lastInterval from interval / + # lastFactor, or we lose delay information when reviewing early + interval = max(card.lastInterval, card.interval + delay) + if interval < self.midIntervalMin: + interval = 0 + delay = 0 + # if interval is less than mid interval, use presets + if ease == 1: + interval *= self.delay2 + if interval < self.hardIntervalMin: + interval = 0 + elif interval == 0: + if ease == 2: + interval = random.uniform(self.hardIntervalMin, + self.hardIntervalMax) + elif ease == 3: + interval = random.uniform(self.midIntervalMin, + self.midIntervalMax) + elif ease == 4: + interval = random.uniform(self.easyIntervalMin, + self.easyIntervalMax) + else: + # if not cramming, boost initial 2 + if (interval < self.hardIntervalMax and + interval > 0.166): + mid = (self.midIntervalMin + self.midIntervalMax) / 2.0 + interval = mid / factor + # multiply last interval by factor + if ease == 2: + interval = (interval + delay/4) * 1.2 + elif ease == 3: + interval = (interval + delay/2) * factor + elif ease == 4: + interval = (interval + delay) * factor * self.factorFour + fuzz = random.uniform(0.95, 1.05) + interval *= fuzz + if self.maxScheduleTime: + interval = min(interval, self.maxScheduleTime) + return interval + + def nextIntervalStr(self, card, ease, short=False): + "Return the next interval for CARD given EASE as a string." + int = self.nextInterval(card, ease) + return oldanki.utils.fmtTimeSpan(int*86400, short=short) + + def nextDue(self, card, ease, oldState): + "Return time when CARD will expire given EASE." + if ease == 1: + # 600 is a magic value which means no bonus, and is used to ease + # upgrades + cram = self.scheduler == "cram" + if (not cram and oldState == "mature" + and self.delay1 and self.delay1 != 600): + # user wants a bonus of 1+ days. put the failed cards at the + # start of the future day, so that failures that day will come + # after the waiting cards + return self.failedCutoff + (self.delay1 - 1)*86400 + else: + due = 0 + else: + due = card.interval * 86400.0 + return due + time.time() + + def updateFactor(self, card, ease): + "Update CARD's factor based on EASE." + card.lastFactor = card.factor + if not card.reps: + # card is new, inherit beginning factor + card.factor = self.averageFactor + if card.successive and not self.cardIsBeingLearnt(card): + if ease == 1: + card.factor -= 0.20 + elif ease == 2: + card.factor -= 0.15 + if ease == 4: + card.factor += 0.10 + card.factor = max(1.3, card.factor) + + def _adjustedDelay(self, card, ease): + "Return an adjusted delay value for CARD based on EASE." + if self.cardIsNew(card): + return 0 + if card.reps and not card.successive: + return 0 + if card.combinedDue <= self.dueCutoff: + return (self.dueCutoff - card.due) / 86400.0 + else: + return (self.dueCutoff - card.combinedDue) / 86400.0 + + def resetCards(self, ids): + "Reset progress on cards in IDS." + self.s.statement(""" +update cards set interval = :new, lastInterval = 0, lastDue = 0, +factor = 2.5, reps = 0, successive = 0, averageTime = 0, reviewTime = 0, +youngEase0 = 0, youngEase1 = 0, youngEase2 = 0, youngEase3 = 0, +youngEase4 = 0, matureEase0 = 0, matureEase1 = 0, matureEase2 = 0, +matureEase3 = 0,matureEase4 = 0, yesCount = 0, noCount = 0, +spaceUntil = 0, type = 2, relativeDelay = 2, +combinedDue = created, modified = :now, due = created, isDue = 0 +where id in %s""" % ids2str(ids), now=time.time(), new=0) + if self.newCardOrder == NEW_CARDS_RANDOM: + # we need to re-randomize now + self.randomizeNewCards(ids) + self.flushMod() + self.refreshSession() + + def randomizeNewCards(self, cardIds=None): + "Randomize 'due' on all new cards." + now = time.time() + query = "select distinct factId from cards where reps = 0" + if cardIds: + query += " and id in %s" % ids2str(cardIds) + fids = self.s.column0(query) + data = [{'fid': fid, + 'rand': random.uniform(0, now), + 'now': now} for fid in fids] + self.s.statements(""" +update cards +set due = :rand + ordinal, +combinedDue = :rand + ordinal, +modified = :now +where factId = :fid +and relativeDelay = 2""", data) + + def orderNewCards(self): + "Set 'due' to card creation time." + self.s.statement(""" +update cards set +due = created, +combinedDue = created, +modified = :now +where relativeDelay = 2""", now=time.time()) + + def rescheduleCards(self, ids, min, max): + "Reset cards and schedule with new interval in days (min, max)." + self.resetCards(ids) + vals = [] + for id in ids: + r = random.uniform(min*86400, max*86400) + vals.append({ + 'id': id, + 'due': r + time.time(), + 'int': r / 86400.0, + 't': time.time(), + }) + self.s.statements(""" +update cards set +interval = :int, +due = :due, +combinedDue = :due, +reps = 1, +successive = 1, +yesCount = 1, +firstAnswered = :t, +type = 1, +relativeDelay = 1, +isDue = 0 +where id = :id""", vals) + self.flushMod() + + # Times + ########################################################################## + + def nextDueMsg(self): + next = self.earliestTime() + if next: + # all new cards except suspended + newCount = self.newCardsDueBy(self.dueCutoff + 86400) + newCardsTomorrow = min(newCount, self.newCardsPerDay) + cards = self.cardsDueBy(self.dueCutoff + 86400) + msg = _('''\ + +At this time tomorrow:
+%(wait)s
+%(new)s''') % { + 'new': ngettext("There will be %d new card.", + "There will be %d new cards.", + newCardsTomorrow) % newCardsTomorrow, + 'wait': ngettext("There will be %s review.", + "There will be %s reviews.", cards) % cards, + } + if next > (self.dueCutoff+86400) and not newCardsTomorrow: + msg = (_("The next review is in %s.") % + self.earliestTimeStr()) + else: + msg = _("No cards are due.") + return msg + + def earliestTime(self): + """Return the time of the earliest card. +This may be in the past if the deck is not finished. +If the deck has no (enabled) cards, return None. +Ignore new cards.""" + earliestRev = self.s.scalar(self.cardLimit("revActive", "revInactive", """ +select combinedDue from cards c where type = 1 +order by combinedDue +limit 1""")) + earliestFail = self.s.scalar(self.cardLimit("revActive", "revInactive", """ +select combinedDue+%d from cards c where type = 0 +order by combinedDue +limit 1""" % self.delay0)) + if earliestRev and earliestFail: + return min(earliestRev, earliestFail) + elif earliestRev: + return earliestRev + else: + return earliestFail + + def earliestTimeStr(self, next=None): + """Return the relative time to the earliest card as a string.""" + if next == None: + next = self.earliestTime() + if not next: + return _("unknown") + diff = next - time.time() + return oldanki.utils.fmtTimeSpan(diff) + + def cardsDueBy(self, time): + "Number of cards due at TIME. Ignore new cards" + return self.s.scalar( + self.cardLimit( + "revActive", "revInactive", + "select count(*) from cards c where type between 0 and 1 " + "and combinedDue < :lim"), lim=time) + + def newCardsDueBy(self, time): + "Number of new cards due at TIME." + return self.s.scalar( + self.cardLimit( + "newActive", "newInactive", + "select count(*) from cards c where type = 2 " + "and combinedDue < :lim"), lim=time) + + def deckFinishedMsg(self): + spaceSusp = "" + c= self.spacedCardCount() + if c: + spaceSusp += ngettext( + 'There is %d delayed card.', + 'There are %d delayed cards.', c) % c + c2 = self.hiddenCards() + if c2: + if spaceSusp: + spaceSusp += "
" + spaceSusp += _( + "Some cards are inactive or suspended.") + if spaceSusp: + spaceSusp = "

" + spaceSusp + return _('''\ +
+

Congratulations!

You have finished for now.

+%(next)s +%(spaceSusp)s +
''') % { + "next": self.nextDueMsg(), + "spaceSusp": spaceSusp, + } + + # Priorities + ########################################################################## + + def updateAllPriorities(self, partial=False, dirty=True): + "Update all card priorities if changed. Caller must .reset()" + new = self.updateTagPriorities() + if not partial: + new = self.s.all("select id, priority as pri from tags") + cids = self.s.column0( + "select distinct cardId from cardTags where tagId in %s" % + ids2str([x['id'] for x in new])) + self.updatePriorities(cids, dirty=dirty) + + def updateTagPriorities(self): + "Update priority setting on tags table." + # make sure all priority tags exist + for s in (self.lowPriority, self.medPriority, + self.highPriority): + tagIds(self.s, parseTags(s)) + tags = self.s.all("select tag, id, priority from tags") + tags = [(x[0].lower(), x[1], x[2]) for x in tags] + up = {} + for (type, pri) in ((self.lowPriority, 1), + (self.medPriority, 3), + (self.highPriority, 4)): + for tag in parseTags(type.lower()): + up[tag] = pri + new = [] + for (tag, id, pri) in tags: + if tag in up and up[tag] != pri: + new.append({'id': id, 'pri': up[tag]}) + elif tag not in up and pri != 2: + new.append({'id': id, 'pri': 2}) + self.s.statements( + "update tags set priority = :pri where id = :id", + new) + return new + + def updatePriorities(self, cardIds, suspend=[], dirty=True): + "Update priorities for cardIds. Caller must .reset()." + # any tags to suspend + if suspend: + ids = tagIds(self.s, suspend) + self.s.statement( + "update tags set priority = 0 where id in %s" % + ids2str(ids.values())) + if len(cardIds) > 1000: + limit = "" + else: + limit = "and cardTags.cardId in %s" % ids2str(cardIds) + cards = self.s.all(""" +select cardTags.cardId, +case +when max(tags.priority) > 2 then max(tags.priority) +when min(tags.priority) = 1 then 1 +else 2 end +from cardTags, tags +where cardTags.tagId = tags.id +%s +group by cardTags.cardId""" % limit) + if dirty: + extra = ", modified = :m " + else: + extra = "" + for pri in range(5): + cs = [c[0] for c in cards if c[1] == pri] + if cs: + # catch review early & buried but not suspended + self.s.statement(( + "update cards set priority = :pri %s where id in %s " + "and priority != :pri and priority >= -2") % ( + extra, ids2str(cs)), pri=pri, m=time.time()) + + def updatePriority(self, card): + "Update priority on a single card." + self.s.flush() + self.updatePriorities([card.id]) + + # Suspending + ########################################################################## + + # when older clients are upgraded, we can remove the code which touches + # priorities & isDue + + def suspendCards(self, ids): + "Suspend cards. Caller must .reset()" + self.startProgress() + self.s.statement(""" +update cards +set type = relativeDelay - 3, +priority = -3, modified = :t, isDue=0 +where type >= 0 and id in %s""" % ids2str(ids), t=time.time()) + self.flushMod() + self.finishProgress() + + def unsuspendCards(self, ids): + "Unsuspend cards. Caller must .reset()" + self.startProgress() + self.s.statement(""" +update cards set type = relativeDelay, priority=0, modified=:t +where type < 0 and id in %s""" % + ids2str(ids), t=time.time()) + self.updatePriorities(ids) + self.flushMod() + self.finishProgress() + + def buryFact(self, fact): + "Bury all cards for fact until next session. Caller must .reset()" + for card in fact.cards: + if card.type in (0,1,2): + card.priority = -2 + card.type += 3 + card.isDue = 0 + self.flushMod() + + # Counts + ########################################################################## + + def hiddenCards(self): + "Assumes queue finished. True if some due cards have not been shown." + return self.s.scalar(""" +select 1 from cards where combinedDue < :now +and type between 0 and 1 limit 1""", now=self.dueCutoff) + + def newCardsDoneToday(self): + return (self._dailyStats.newEase0 + + self._dailyStats.newEase1 + + self._dailyStats.newEase2 + + self._dailyStats.newEase3 + + self._dailyStats.newEase4) + + def spacedCardCount(self): + "Number of spaced cards." + return self.s.scalar(""" +select count(cards.id) from cards where +combinedDue > :now and due < :now""", now=time.time()) + + def isEmpty(self): + return not self.cardCount + + def matureCardCount(self): + return self.s.scalar( + "select count(id) from cards where interval >= :t ", + t=MATURE_THRESHOLD) + + def youngCardCount(self): + return self.s.scalar( + "select count(id) from cards where interval < :t " + "and reps != 0", t=MATURE_THRESHOLD) + + def newCountAll(self): + "All new cards, including spaced." + return self.s.scalar( + "select count(id) from cards where relativeDelay = 2") + + def seenCardCount(self): + return self.s.scalar( + "select count(id) from cards where relativeDelay between 0 and 1") + + # Card predicates + ########################################################################## + + def cardState(self, card): + if self.cardIsNew(card): + return "new" + elif card.interval > MATURE_THRESHOLD: + return "mature" + return "young" + + def cardIsNew(self, card): + "True if a card has never been seen before." + return card.reps == 0 + + def cardIsBeingLearnt(self, card): + "True if card should use present intervals." + return card.lastInterval < 7 + + def cardIsYoung(self, card): + "True if card is not new and not mature." + return (not self.cardIsNew(card) and + not self.cardIsMature(card)) + + def cardIsMature(self, card): + return card.interval >= MATURE_THRESHOLD + + # Stats + ########################################################################## + + def getStats(self, short=False): + "Return some commonly needed stats." + stats = oldanki.stats.getStats(self.s, self._globalStats, self._dailyStats) + # add scheduling related stats + stats['new'] = self.newCountToday + stats['failed'] = self.failedSoonCount + stats['rev'] = self.revCount + if stats['dAverageTime']: + stats['timeLeft'] = oldanki.utils.fmtTimeSpan( + self.getETA(stats), pad=0, point=1, short=short) + else: + stats['timeLeft'] = _("Unknown") + return stats + + def getETA(self, stats): + # rev + new cards first, account for failures + count = stats['rev'] + stats['new'] + count *= 1 + stats['gYoungNo%'] / 100.0 + left = count * stats['dAverageTime'] + # failed - higher time per card for higher amount of cards + failedBaseMulti = 1.5 + failedMod = 0.07 + failedBaseCount = 20 + factor = (failedBaseMulti + + (failedMod * (stats['failed'] - failedBaseCount))) + left += stats['failed'] * stats['dAverageTime'] * factor + return left + + # Facts + ########################################################################## + + def newFact(self, model=None): + "Return a new fact with the current model." + if model is None: + model = self.currentModel + return oldanki.facts.Fact(model) + + def addFact(self, fact, reset=True): + "Add a fact to the deck. Return list of new cards." + if not fact.model: + fact.model = self.currentModel + # validate + fact.assertValid() + fact.assertUnique(self.s) + # check we have card models available + cms = self.availableCardModels(fact) + if not cms: + return None + # proceed + cards = [] + self.s.save(fact) + # update field cache + self.factCount += 1 + self.flushMod() + isRandom = self.newCardOrder == NEW_CARDS_RANDOM + if isRandom: + due = random.uniform(0, time.time()) + t = time.time() + for cardModel in cms: + created = fact.created + 0.00001*cardModel.ordinal + card = oldanki.cards.Card(fact, cardModel, created) + if isRandom: + card.due = due + card.combinedDue = due + self.flushMod() + cards.append(card) + # update card q/a + fact.setModified(True, self) + self.updateFactTags([fact.id]) + # this will call reset() which will update counts + self.updatePriorities([c.id for c in cards]) + # keep track of last used tags for convenience + self.lastTags = fact.tags + self.flushMod() + if reset: + self.reset() + return fact + + def availableCardModels(self, fact, checkActive=True): + "List of active card models that aren't empty for FACT." + models = [] + for cardModel in fact.model.cardModels: + if cardModel.active or not checkActive: + ok = True + for (type, format) in [("q", cardModel.qformat), + ("a", cardModel.aformat)]: + # compat + format = re.sub("%\((.+?)\)s", "{{\\1}}", format) + empty = {} + local = {}; local.update(fact) + local['tags'] = u"" + local['Tags'] = u"" + local['cardModel'] = u"" + local['modelName'] = u"" + for k in local.keys(): + empty[k] = u"" + empty["text:"+k] = u"" + local["text:"+k] = local[k] + empty['tags'] = "" + local['tags'] = fact.tags + try: + if (render(format, local) == + render(format, empty)): + ok = False + break + except (KeyError, TypeError, ValueError): + ok = False + break + if ok or type == "a" and cardModel.allowEmptyAnswer: + models.append(cardModel) + return models + + def addCards(self, fact, cardModelIds): + "Caller must flush first, flushMod after, rebuild priorities." + ids = [] + for cardModel in self.availableCardModels(fact, False): + if cardModel.id not in cardModelIds: + continue + if self.s.scalar(""" +select count(id) from cards +where factId = :fid and cardModelId = :cmid""", + fid=fact.id, cmid=cardModel.id) == 0: + # enough for 10 card models assuming 0.00001 timer precision + card = oldanki.cards.Card( + fact, cardModel, + fact.created+0.0001*cardModel.ordinal) + self.updateCardTags([card.id]) + self.updatePriority(card) + self.cardCount += 1 + self.newCount += 1 + ids.append(card.id) + + if ids: + fact.setModified(textChanged=True, deck=self) + self.setModified() + return ids + + def factIsInvalid(self, fact): + "True if existing fact is invalid. Returns the error." + try: + fact.assertValid() + fact.assertUnique(self.s) + except FactInvalidError, e: + return e + + def factUseCount(self, factId): + "Return number of cards referencing a given fact id." + return self.s.scalar("select count(id) from cards where factId = :id", + id=factId) + + def deleteFact(self, factId): + "Delete a fact. Removes any associated cards. Don't flush." + self.s.flush() + # remove any remaining cards + self.s.statement("insert into cardsDeleted select id, :time " + "from cards where factId = :factId", + time=time.time(), factId=factId) + self.s.statement( + "delete from cards where factId = :id", id=factId) + # and then the fact + self.deleteFacts([factId]) + self.setModified() + + def deleteFacts(self, ids): + "Bulk delete facts by ID; don't touch cards. Caller must .reset()." + if not ids: + return + self.s.flush() + now = time.time() + strids = ids2str(ids) + self.s.statement("delete from facts where id in %s" % strids) + self.s.statement("delete from fields where factId in %s" % strids) + data = [{'id': id, 'time': now} for id in ids] + self.s.statements("insert into factsDeleted values (:id, :time)", data) + self.setModified() + + def deleteDanglingFacts(self): + "Delete any facts without cards. Return deleted ids." + ids = self.s.column0(""" +select facts.id from facts +where facts.id not in (select distinct factId from cards)""") + self.deleteFacts(ids) + return ids + + def previewFact(self, oldFact, cms=None): + "Duplicate fact and generate cards for preview. Don't add to deck." + # check we have card models available + if cms is None: + cms = self.availableCardModels(oldFact, checkActive=True) + if not cms: + return [] + fact = self.cloneFact(oldFact) + # proceed + cards = [] + for cardModel in cms: + card = oldanki.cards.Card(fact, cardModel) + cards.append(card) + fact.setModified(textChanged=True, deck=self, media=False) + return cards + + def cloneFact(self, oldFact): + "Copy fact into new session." + model = self.s.query(Model).get(oldFact.model.id) + fact = self.newFact(model) + for field in fact.fields: + fact[field.name] = oldFact[field.name] + fact.tags = oldFact.tags + return fact + + # Cards + ########################################################################## + + def deleteCard(self, id): + "Delete a card given its id. Delete any unused facts. Don't flush." + self.deleteCards([id]) + + def deleteCards(self, ids): + "Bulk delete cards by ID. Caller must .reset()" + if not ids: + return + self.s.flush() + now = time.time() + strids = ids2str(ids) + self.startProgress() + # grab fact ids + factIds = self.s.column0("select factId from cards where id in %s" + % strids) + # drop from cards + self.s.statement("delete from cards where id in %s" % strids) + # note deleted + data = [{'id': id, 'time': now} for id in ids] + self.s.statements("insert into cardsDeleted values (:id, :time)", data) + # gather affected tags + tags = self.s.column0( + "select tagId from cardTags where cardId in %s" % + strids) + # delete + self.s.statement("delete from cardTags where cardId in %s" % strids) + # find out if they're used by anything else + unused = [] + for tag in tags: + if not self.s.scalar( + "select 1 from cardTags where tagId = :d limit 1", d=tag): + unused.append(tag) + # delete unused + self.s.statement("delete from tags where id in %s and priority = 2" % + ids2str(unused)) + # remove any dangling facts + self.deleteDanglingFacts() + self.refreshSession() + self.flushMod() + self.finishProgress() + + # Models + ########################################################################## + + def addModel(self, model): + if model not in self.models: + self.models.append(model) + self.currentModel = model + self.flushMod() + + def deleteModel(self, model): + "Delete MODEL, and all its cards/facts. Caller must .reset()." + if self.s.scalar("select count(id) from models where id=:id", + id=model.id): + # delete facts/cards + self.currentModel + self.deleteCards(self.s.column0(""" +select cards.id from cards, facts where +facts.modelId = :id and +facts.id = cards.factId""", id=model.id)) + # then the model + self.models.remove(model) + self.s.delete(model) + self.s.flush() + if self.currentModel == model: + self.currentModel = self.models[0] + self.s.statement("insert into modelsDeleted values (:id, :time)", + id=model.id, time=time.time()) + self.flushMod() + self.refreshSession() + self.setModified() + + def modelUseCount(self, model): + "Return number of facts using model." + return self.s.scalar("select count(facts.modelId) from facts " + "where facts.modelId = :id", + id=model.id) + + def deleteEmptyModels(self): + for model in self.models: + if not self.modelUseCount(model): + self.deleteModel(model) + + def rebuildCSS(self): + # css for all fields + def _genCSS(prefix, row): + (id, fam, siz, col, align, rtl, pre) = row + t = "" + if fam: t += 'font-family:"%s";' % toPlatformFont(fam) + if siz: t += 'font-size:%dpx;' % siz + if col: t += 'color:%s;' % col + if rtl == "rtl": + t += "direction:rtl;unicode-bidi:embed;" + if pre: + t += "white-space:pre-wrap;" + if align != -1: + if align == 0: align = "center" + elif align == 1: align = "left" + else: align = "right" + t += 'text-align:%s;' % align + if t: + t = "%s%s {%s}\n" % (prefix, hexifyID(id), t) + return t + css = "".join([_genCSS(".fm", row) for row in self.s.all(""" +select id, quizFontFamily, quizFontSize, quizFontColour, -1, + features, editFontFamily from fieldModels""")]) + cardRows = self.s.all(""" +select id, null, null, null, questionAlign, 0, 0 from cardModels""") + css += "".join([_genCSS("#cmq", row) for row in cardRows]) + css += "".join([_genCSS("#cma", row) for row in cardRows]) + css += "".join([".cmb%s {background:%s;}\n" % + (hexifyID(row[0]), row[1]) for row in self.s.all(""" +select id, lastFontColour from cardModels""")]) + self.css = css + self.setVar("cssCache", css, mod=False) + self.addHexCache() + return css + + def addHexCache(self): + ids = self.s.column0(""" +select id from fieldModels union +select id from cardModels union +select id from models""") + cache = {} + for id in ids: + cache[id] = hexifyID(id) + self.setVar("hexCache", simplejson.dumps(cache), mod=False) + + def copyModel(self, oldModel): + "Add a new model to DB based on MODEL." + m = Model(_("%s copy") % oldModel.name) + for f in oldModel.fieldModels: + f = f.copy() + m.addFieldModel(f) + for c in oldModel.cardModels: + c = c.copy() + m.addCardModel(c) + for attr in ("tags", "spacing", "initialSpacing"): + setattr(m, attr, getattr(oldModel, attr)) + self.addModel(m) + return m + + def changeModel(self, factIds, newModel, fieldMap, cardMap): + "Caller must .reset()" + self.s.flush() + fids = ids2str(factIds) + changed = False + # field remapping + if fieldMap: + changed = True + self.startProgress(len(fieldMap)+2) + seen = {} + for (old, new) in fieldMap.items(): + self.updateProgress(_("Changing fields...")) + seen[new] = 1 + if new: + # can rename + self.s.statement(""" +update fields set +fieldModelId = :new, +ordinal = :ord +where fieldModelId = :old +and factId in %s""" % fids, new=new.id, ord=new.ordinal, old=old.id) + else: + # no longer used + self.s.statement(""" +delete from fields where factId in %s +and fieldModelId = :id""" % fids, id=old.id) + # new + for field in newModel.fieldModels: + self.updateProgress() + if field not in seen: + d = [{'id': genID(), + 'fid': f, + 'fmid': field.id, + 'ord': field.ordinal} + for f in factIds] + self.s.statements(''' +insert into fields +(id, factId, fieldModelId, ordinal, value) +values +(:id, :fid, :fmid, :ord, "")''', d) + # fact modtime + self.updateProgress() + self.s.statement(""" +update facts set +modified = :t, +modelId = :id +where id in %s""" % fids, t=time.time(), id=newModel.id) + self.finishProgress() + # template remapping + self.startProgress(len(cardMap)+4) + toChange = [] + self.updateProgress(_("Changing cards...")) + for (old, new) in cardMap.items(): + if not new: + # delete + self.s.statement(""" +delete from cards +where cardModelId = :cid and +factId in %s""" % fids, cid=old.id) + elif old != new: + # gather ids so we can rename x->y and y->x + ids = self.s.column0(""" +select id from cards where +cardModelId = :id and factId in %s""" % fids, id=old.id) + toChange.append((new, ids)) + for (new, ids) in toChange: + self.updateProgress() + self.s.statement(""" +update cards set +cardModelId = :new, +ordinal = :ord +where id in %s""" % ids2str(ids), new=new.id, ord=new.ordinal) + self.updateProgress() + self.updateCardQACacheFromIds(factIds, type="facts") + self.flushMod() + self.updateProgress() + cardIds = self.s.column0( + "select id from cards where factId in %s" % + ids2str(factIds)) + self.updateCardTags(cardIds) + self.updateProgress() + self.updatePriorities(cardIds) + self.updateProgress() + self.refreshSession() + self.finishProgress() + + # Fields + ########################################################################## + + def allFields(self): + "Return a list of all possible fields across all models." + return self.s.column0("select distinct name from fieldmodels") + + def deleteFieldModel(self, model, field): + self.startProgress() + self.s.statement("delete from fields where fieldModelId = :id", + id=field.id) + self.s.statement("update facts set modified = :t where modelId = :id", + id=model.id, t=time.time()) + model.fieldModels.remove(field) + # update q/a formats + for cm in model.cardModels: + types = ("%%(%s)s" % field.name, + "%%(text:%s)s" % field.name, + # new style + "<<%s>>" % field.name, + "<>" % field.name) + for t in types: + for fmt in ('qformat', 'aformat'): + setattr(cm, fmt, getattr(cm, fmt).replace(t, "")) + self.updateCardsFromModel(model) + model.setModified() + self.flushMod() + self.finishProgress() + + def addFieldModel(self, model, field): + "Add FIELD to MODEL and update cards." + model.addFieldModel(field) + # commit field to disk + self.s.flush() + self.s.statement(""" +insert into fields (factId, fieldModelId, ordinal, value) +select facts.id, :fmid, :ordinal, "" from facts +where facts.modelId = :mid""", fmid=field.id, mid=model.id, ordinal=field.ordinal) + # ensure facts are marked updated + self.s.statement(""" +update facts set modified = :t where modelId = :mid""" + , t=time.time(), mid=model.id) + model.setModified() + self.flushMod() + + def renameFieldModel(self, model, field, newName): + "Change FIELD's name in MODEL and update FIELD in all facts." + for cm in model.cardModels: + types = ("%%(%s)s", + "%%(text:%s)s", + # new styles + "{{%s}}", + "{{text:%s}}", + "{{#%s}}", + "{{^%s}}", + "{{/%s}}") + for t in types: + for fmt in ('qformat', 'aformat'): + setattr(cm, fmt, getattr(cm, fmt).replace(t%field.name, + t%newName)) + field.name = newName + model.setModified() + self.flushMod() + + def fieldModelUseCount(self, fieldModel): + "Return the number of cards using fieldModel." + return self.s.scalar(""" +select count(id) from fields where +fieldModelId = :id and value != "" +""", id=fieldModel.id) + + def rebuildFieldOrdinals(self, modelId, ids): + """Update field ordinal for all fields given field model IDS. +Caller must update model modtime.""" + self.s.flush() + strids = ids2str(ids) + self.s.statement(""" +update fields +set ordinal = (select ordinal from fieldModels where id = fieldModelId) +where fields.fieldModelId in %s""" % strids) + # dirty associated facts + self.s.statement(""" +update facts +set modified = strftime("%s", "now") +where modelId = :id""", id=modelId) + self.flushMod() + + # Card models + ########################################################################## + + def cardModelUseCount(self, cardModel): + "Return the number of cards using cardModel." + return self.s.scalar(""" +select count(id) from cards where +cardModelId = :id""", id=cardModel.id) + + def deleteCardModel(self, model, cardModel): + "Delete all cards that use CARDMODEL from the deck." + cards = self.s.column0("select id from cards where cardModelId = :id", + id=cardModel.id) + self.deleteCards(cards) + model.cardModels.remove(cardModel) + model.setModified() + self.flushMod() + + def updateCardsFromModel(self, model, dirty=True): + "Update all card question/answer when model changes." + ids = self.s.all(""" +select cards.id, cards.cardModelId, cards.factId, facts.modelId from +cards, facts where +cards.factId = facts.id and +facts.modelId = :id""", id=model.id) + if not ids: + return + self.updateCardQACache(ids, dirty) + + def updateCardsFromFactIds(self, ids, dirty=True): + "Update all card question/answer when model changes." + ids = self.s.all(""" +select cards.id, cards.cardModelId, cards.factId, facts.modelId from +cards, facts where +cards.factId = facts.id and +facts.id in %s""" % ids2str(ids)) + if not ids: + return + self.updateCardQACache(ids, dirty) + + def updateCardQACacheFromIds(self, ids, type="cards"): + "Given a list of card or fact ids, update q/a cache." + if type == "facts": + # convert to card ids + ids = self.s.column0( + "select id from cards where factId in %s" % ids2str(ids)) + rows = self.s.all(""" +select c.id, c.cardModelId, f.id, f.modelId +from cards as c, facts as f +where c.factId = f.id +and c.id in %s""" % ids2str(ids)) + self.updateCardQACache(rows) + + def updateCardQACache(self, ids, dirty=True): + "Given a list of (cardId, cardModelId, factId, modId), update q/a cache." + if dirty: + mod = ", modified = %f" % time.time() + else: + mod = "" + # tags + cids = ids2str([x[0] for x in ids]) + tags = dict([(x[0], x[1:]) for x in + self.splitTagsList( + where="and cards.id in %s" % cids)]) + facts = {} + # fields + for k, g in groupby(self.s.all(""" +select fields.factId, fieldModels.name, fieldModels.id, fields.value +from fields, fieldModels where fields.factId in %s and +fields.fieldModelId = fieldModels.id +order by fields.factId""" % ids2str([x[2] for x in ids])), + itemgetter(0)): + facts[k] = dict([(r[1], (r[2], r[3])) for r in g]) + # card models + cms = {} + for c in self.s.query(CardModel).all(): + cms[c.id] = c + pend = [formatQA(cid, mid, facts[fid], tags[cid], cms[cmid], self) + for (cid, cmid, fid, mid) in ids] + if pend: + # find existing media references + files = {} + for txt in self.s.column0( + "select question || answer from cards where id in %s" % + cids): + for f in mediaFiles(txt): + if f in files: + files[f] -= 1 + else: + files[f] = -1 + # determine ref count delta + for p in pend: + for type in ("question", "answer"): + txt = p[type] + for f in mediaFiles(txt): + if f in files: + files[f] += 1 + else: + files[f] = 1 + # update references - this could be more efficient + for (f, cnt) in files.items(): + if not cnt: + continue + updateMediaCount(self, f, cnt) + # update q/a + self.s.execute(""" + update cards set + question = :question, answer = :answer + %s + where id = :id""" % mod, pend) + # update fields cache + self.updateFieldCache(facts.keys()) + if dirty: + self.flushMod() + + def updateFieldCache(self, fids): + "Add stripped HTML cache for sorting/searching." + try: + all = self.s.all( + ("select factId, group_concat(value, ' ') from fields " + "where factId in %s group by factId") % ids2str(fids)) + except: + # older sqlite doesn't support group_concat. this code taken from + # the wm port + all=[] + for factId in fids: + values=self.s.all("select value from fields where value is not NULL and factId=%(factId)i" % {"factId": factId}) + value_list=[] + for row in values: + value_list.append(row[0]) + concatenated_values=' '.join(value_list) + all.append([factId, concatenated_values]) + r = [] + from oldanki.utils import stripHTMLMedia + for a in all: + r.append({'id':a[0], 'v':stripHTMLMedia(a[1])}) + self.s.statements( + "update facts set spaceUntil=:v where id=:id", r) + + def rebuildCardOrdinals(self, ids): + "Update all card models in IDS. Caller must update model modtime." + self.s.flush() + strids = ids2str(ids) + self.s.statement(""" +update cards set +ordinal = (select ordinal from cardModels where id = cardModelId), +modified = :now +where cardModelId in %s""" % strids, now=time.time()) + self.flushMod() + + def changeCardModel(self, cardIds, newCardModelId): + self.s.statement(""" +update cards set cardModelId = :newId +where id in %s""" % ids2str(cardIds), newId=newCardModelId) + self.updateCardQACacheFromIds(cardIds) + self.flushMod() + + # Tags: querying + ########################################################################## + + def tagsList(self, where="", priority=", cards.priority", kwargs={}): + "Return a list of (cardId, allTags, priority)" + return self.s.all(""" +select cards.id, facts.tags || " " || models.tags || " " || +cardModels.name %s from cards, facts, models, cardModels where +cards.factId == facts.id and facts.modelId == models.id +and cards.cardModelId = cardModels.id %s""" % (priority, where), + **kwargs) + + return self.s.all(""" +select cards.id, facts.tags || " " || models.tags || " " || +cardModels.name %s from cards, facts, models, cardModels where +cards.factId == facts.id and facts.modelId == models.id +and cards.cardModelId = cardModels.id %s""" % (priority, where)) + + def splitTagsList(self, where=""): + return self.s.all(""" +select cards.id, facts.tags, models.tags, cardModels.name +from cards, facts, models, cardModels where +cards.factId == facts.id and facts.modelId == models.id +and cards.cardModelId = cardModels.id +%s""" % where) + + def cardsWithNoTags(self): + return self.s.column0(""" +select cards.id from cards, facts where +facts.tags = "" +and cards.factId = facts.id""") + + def cardsWithTags(self, tagStr, search="and"): + tagIds = [] + # get ids + for tag in tagStr.split(" "): + tag = tag.replace("*", "%") + if "%" in tag: + ids = self.s.column0( + "select id from tags where tag like :tag", tag=tag) + if search == "and" and not ids: + return [] + tagIds.append(ids) + else: + id = self.s.scalar( + "select id from tags where tag = :tag", tag=tag) + if search == "and" and not id: + return [] + tagIds.append(id) + # search for any + if search == "or": + return self.s.column0( + "select cardId from cardTags where tagId in %s" % + ids2str(tagIds)) + else: + # search for all + l = [] + for ids in tagIds: + if isinstance(ids, types.ListType): + l.append("select cardId from cardTags where tagId in %s" % + ids2str(ids)) + else: + l.append("select cardId from cardTags where tagId = %d" % + ids) + q = " intersect ".join(l) + return self.s.column0(q) + + def allTags(self): + return self.s.column0("select tag from tags order by tag") + + def allTags_(self, where=""): + t = self.s.column0("select tags from facts %s" % where) + t += self.s.column0("select tags from models") + t += self.s.column0("select name from cardModels") + return sorted(list(set(parseTags(joinTags(t))))) + + def allUserTags(self): + return sorted(list(set(parseTags(joinTags(self.s.column0( + "select tags from facts")))))) + + def factTags(self, ids): + return self.s.all(""" +select id, tags from facts +where id in %s""" % ids2str(ids)) + + # Tags: caching + ########################################################################## + + def updateFactTags(self, factIds): + self.updateCardTags(self.s.column0( + "select id from cards where factId in %s" % + ids2str(factIds))) + + def updateModelTags(self, modelId): + self.updateCardTags(self.s.column0(""" +select cards.id from cards, facts where +cards.factId = facts.id and +facts.modelId = :id""", id=modelId)) + + def updateCardTags(self, cardIds=None): + self.s.flush() + if cardIds is None: + self.s.statement("delete from cardTags") + self.s.statement("delete from tags") + tids = tagIds(self.s, self.allTags_()) + rows = self.splitTagsList() + else: + self.s.statement("delete from cardTags where cardId in %s" % + ids2str(cardIds)) + fids = ids2str(self.s.column0( + "select factId from cards where id in %s" % + ids2str(cardIds))) + tids = tagIds(self.s, self.allTags_( + where="where id in %s" % fids)) + rows = self.splitTagsList( + where="and facts.id in %s" % fids) + d = [] + for (id, fact, model, templ) in rows: + for tag in parseTags(fact): + d.append({"cardId": id, + "tagId": tids[tag.lower()], + "src": 0}) + for tag in parseTags(model): + d.append({"cardId": id, + "tagId": tids[tag.lower()], + "src": 1}) + for tag in parseTags(templ): + d.append({"cardId": id, + "tagId": tids[tag.lower()], + "src": 2}) + if d: + self.s.statements(""" +insert into cardTags +(cardId, tagId, src) values +(:cardId, :tagId, :src)""", d) + self.s.execute( + "delete from tags where priority = 2 and id not in "+ + "(select distinct tagId from cardTags)") + + def updateTagsForModel(self, model): + cards = self.s.all(""" +select cards.id, cards.cardModelId from cards, facts where +facts.modelId = :m and cards.factId = facts.id""", m=model.id) + cardIds = [x[0] for x in cards] + factIds = self.s.column0(""" +select facts.id from facts where +facts.modelId = :m""", m=model.id) + cmtags = " ".join([cm.name for cm in model.cardModels]) + tids = tagIds(self.s, parseTags(model.tags) + + parseTags(cmtags)) + self.s.statement(""" +delete from cardTags where cardId in %s +and src in (1, 2)""" % ids2str(cardIds)) + d = [] + for tag in parseTags(model.tags): + for id in cardIds: + d.append({"cardId": id, + "tagId": tids[tag.lower()], + "src": 1}) + cmtags = {} + for cm in model.cardModels: + cmtags[cm.id] = parseTags(cm.name) + for c in cards: + for tag in cmtags[c[1]]: + d.append({"cardId": c[0], + "tagId": tids[tag.lower()], + "src": 2}) + if d: + self.s.statements(""" +insert into cardTags +(cardId, tagId, src) values +(:cardId, :tagId, :src)""", d) + self.s.statement(""" +delete from tags where id not in (select distinct tagId from cardTags) +and priority = 2 +""") + + # Tags: adding/removing in bulk + ########################################################################## + # these could be optimized to use the tag cache in the future + + def addTags(self, ids, tags): + "Add tags in bulk. Caller must .reset()" + self.startProgress() + tlist = self.factTags(ids) + newTags = parseTags(tags) + now = time.time() + pending = [] + for (id, tags) in tlist: + oldTags = parseTags(tags) + tmpTags = list(set(oldTags + newTags)) + if tmpTags != oldTags: + pending.append( + {'id': id, 'now': now, 'tags': " ".join(tmpTags)}) + self.s.statements(""" +update facts set +tags = :tags, +modified = :now +where id = :id""", pending) + factIds = [c['id'] for c in pending] + cardIds = self.s.column0( + "select id from cards where factId in %s" % + ids2str(factIds)) + self.updateCardQACacheFromIds(factIds, type="facts") + self.updateCardTags(cardIds) + self.updatePriorities(cardIds) + self.flushMod() + self.finishProgress() + self.refreshSession() + + def deleteTags(self, ids, tags): + "Delete tags in bulk. Caller must .reset()" + self.startProgress() + tlist = self.factTags(ids) + newTags = parseTags(tags) + now = time.time() + pending = [] + for (id, tags) in tlist: + oldTags = parseTags(tags) + tmpTags = oldTags[:] + for tag in newTags: + try: + tmpTags.remove(tag) + except ValueError: + pass + if tmpTags != oldTags: + pending.append( + {'id': id, 'now': now, 'tags': " ".join(tmpTags)}) + self.s.statements(""" +update facts set +tags = :tags, +modified = :now +where id = :id""", pending) + factIds = [c['id'] for c in pending] + cardIds = self.s.column0( + "select id from cards where factId in %s" % + ids2str(factIds)) + self.updateCardQACacheFromIds(factIds, type="facts") + self.updateCardTags(cardIds) + self.updatePriorities(cardIds) + self.flushMod() + self.finishProgress() + self.refreshSession() + + # Find + ########################################################################## + + def allFMFields(self, tolower=False): + fields = [] + try: + fields = self.s.column0( + "select distinct name from fieldmodels order by name") + except: + fields = [] + if tolower is True: + for i, v in enumerate(fields): + fields[i] = v.lower() + return fields + + def _parseQuery(self, query): + tokens = [] + res = [] + + allowedfields = self.allFMFields(True) + def addSearchFieldToken(field, value, isNeg, filter): + if field.lower() in allowedfields: + res.append((field + ':' + value, isNeg, SEARCH_FIELD, filter)) + elif field in ['question', 'answer']: + res.append((field + ':' + value, isNeg, SEARCH_QA, filter)) + else: + for p in phraselog: + res.append((p['value'], p['is_neg'], p['type'], p['filter'])) + # break query into words or phraselog + # an extra space is added so the loop never ends in the middle + # completing a token + for match in re.findall( + r'(-)?\'(([^\'\\]|\\.)*)\'|(-)?"(([^"\\]|\\.)*)"|(-)?([^ ]+)|([ ]+)', + query + ' '): + type = ' ' + if match[1]: type = "'" + elif match[4]: type = '"' + + value = (match[1] or match[4] or match[7]) + isNeg = (match[0] == '-' or match[3] == '-' or match[6] == '-') + + tokens.append({'type': type, 'value': value, 'is_neg': isNeg, + 'filter': ('wb' if type == "'" else 'none')}) + intoken = isNeg = False + field = '' #name of the field for field related commands + phraselog = [] #log of phrases in case potential command is not a commad + for c, token in enumerate(tokens): + doprocess = True # only look for commands when this is true + #prevent cases such as "field" : value as being processed as a command + if len(token['value']) == 0: + if intoken is True and type == SEARCH_FIELD and field: + #case: fieldname: any thing here check for existance of fieldname + addSearchFieldToken(field, '*', isNeg, 'none') + phraselog = [] # reset phrases since command is completed + intoken = doprocess = False + if intoken is True: + if type == SEARCH_FIELD_EXISTS: + #case: field:"value" + res.append((token['value'], isNeg, type, 'none')) + intoken = doprocess = False + elif type == SEARCH_FIELD and field: + #case: fieldname:"value" + addSearchFieldToken( + field, token['value'], isNeg, token['filter']) + intoken = doprocess = False + + elif type == SEARCH_FIELD and not field: + #case: "fieldname":"name" or "field" anything + if token['value'].startswith(":") and len(phraselog) == 1: + #we now know a colon is next, so mark it as field + # and keep looking for the value + field = phraselog[0]['value'] + parts = token['value'].split(':', 1) + phraselog.append( + {'value': token['value'], 'is_neg': False, + 'type': SEARCH_PHRASE, 'filter': token['filter']}) + if parts[1]: + #value is included with the :, so wrap it up + addSearchFieldToken(field, parts[1], isNeg, 'none') + intoken = doprocess = False + doprocess = False + else: + #case: "fieldname"string/"fieldname"tag:name + intoken = False + if intoken is False and doprocess is False: + #command has been fully processed + phraselog = [] # reset phraselog, since we used it for a command + if intoken is False: + #include any non-command related phrases in the query + for p in phraselog: res.append( + (p['value'], p['is_neg'], p['type'], p['filter'])) + phraselog = [] + if intoken is False and doprocess is True: + field = '' + isNeg = token['is_neg'] + if token['value'].startswith("tag:"): + token['value'] = token['value'][4:] + type = SEARCH_TAG + elif token['value'].startswith("is:"): + token['value'] = token['value'][3:].lower() + type = SEARCH_TYPE + elif token['value'].startswith("fid:") and len(token['value']) > 4: + dec = token['value'][4:] + try: + int(dec) + token['value'] = token['value'][4:] + except: + try: + for d in dec.split(","): + int(d) + token['value'] = token['value'][4:] + except: + token['value'] = "0" + type = SEARCH_FID + elif token['value'].startswith("card:"): + token['value'] = token['value'][5:] + type = SEARCH_CARD + elif token['value'].startswith("show:"): + token['value'] = token['value'][5:].lower() + type = SEARCH_DISTINCT + elif token['value'].startswith("field:"): + type = SEARCH_FIELD_EXISTS + parts = token['value'][6:].split(':', 1) + field = parts[0] + if len(parts) == 1 and parts[0]: + token['value'] = parts[0] + elif len(parts) == 1 and not parts[0]: + intoken = True + else: + type = SEARCH_FIELD + intoken = True + parts = token['value'].split(':', 1) + + phraselog.append( + {'value': token['value'], 'is_neg': isNeg, + 'type': SEARCH_PHRASE, 'filter': token['filter']}) + if len(parts) == 2 and parts[0]: + field = parts[0] + if parts[1]: + #simple fieldname:value case - no need to look for more data + addSearchFieldToken(field, parts[1], isNeg, 'none') + intoken = doprocess = False + + if intoken is False: phraselog = [] + if intoken is False and doprocess is True: + res.append((token['value'], isNeg, type, token['filter'])) + return res + + def findCards(self, query): + (q, cmquery, showdistinct, filters, args) = self.findCardsWhere(query) + (factIdList, cardIdList) = self.findCardsMatchingFilters(filters) + query = "select id from cards" + hasWhere = False + if q: + query += " where " + q + hasWhere = True + if cmquery['pos'] or cmquery['neg']: + if hasWhere is False: + query += " where " + hasWhere = True + else: query += " and " + if cmquery['pos']: + query += (" factId in(select distinct factId from cards "+ + "where id in (" + cmquery['pos'] + ")) ") + query += " and id in(" + cmquery['pos'] + ") " + if cmquery['neg']: + query += (" factId not in(select distinct factId from "+ + "cards where id in (" + cmquery['neg'] + ")) ") + if factIdList is not None: + if hasWhere is False: + query += " where " + hasWhere = True + else: query += " and " + query += " factId IN %s" % ids2str(factIdList) + if cardIdList is not None: + if hasWhere is False: + query += " where " + hasWhere = True + else: query += " and " + query += " id IN %s" % ids2str(cardIdList) + if showdistinct: + query += " group by factId" + #print query, args + return self.s.column0(query, **args) + + def findCardsWhere(self, query): + (tquery, fquery, qquery, fidquery, cmquery, sfquery, qaquery, + showdistinct, filters, args) = self._findCards(query) + q = "" + x = [] + if tquery: + x.append(" id in (%s)" % tquery) + if fquery: + x.append(" factId in (%s)" % fquery) + if qquery: + x.append(" id in (%s)" % qquery) + if fidquery: + x.append(" id in (%s)" % fidquery) + if sfquery: + x.append(" factId in (%s)" % sfquery) + if qaquery: + x.append(" id in (%s)" % qaquery) + if x: + q += " and ".join(x) + return q, cmquery, showdistinct, filters, args + + def findCardsMatchingFilters(self, filters): + factFilters = [] + fieldFilters = {} + cardFilters = {} + + factFilterMatches = [] + fieldFilterMatches = [] + cardFilterMatches = [] + + if (len(filters) > 0): + for filter in filters: + if filter['scope'] == 'fact': + regexp = re.compile( + r'\b' + re.escape(filter['value']) + r'\b', flags=re.I) + factFilters.append( + {'value': filter['value'], 'regexp': regexp, + 'is_neg': filter['is_neg']}) + if filter['scope'] == 'field': + fieldName = filter['field'].lower() + if (fieldName in fieldFilters) is False: + fieldFilters[fieldName] = [] + regexp = re.compile( + r'\b' + re.escape(filter['value']) + r'\b', flags=re.I) + fieldFilters[fieldName].append( + {'value': filter['value'], 'regexp': regexp, + 'is_neg': filter['is_neg']}) + if filter['scope'] == 'card': + fieldName = filter['field'].lower() + if (fieldName in cardFilters) is False: + cardFilters[fieldName] = [] + regexp = re.compile(r'\b' + re.escape(filter['value']) + + r'\b', flags=re.I) + cardFilters[fieldName].append( + {'value': filter['value'], 'regexp': regexp, + 'is_neg': filter['is_neg']}) + + if len(factFilters) > 0: + fquery = '' + args = {} + for filter in factFilters: + c = len(args) + if fquery: + if filter['is_neg']: fquery += " except " + else: fquery += " intersect " + elif filter['is_neg']: fquery += "select id from fields except " + + value = filter['value'].replace("*", "%") + args["_ff_%d" % c] = "%"+value+"%" + + fquery += ( + "select id from fields where value like "+ + ":_ff_%d escape '\\'" % c) + + rows = self.s.execute( + 'select factId, value from fields where id in (' + + fquery + ')', args) + while (1): + row = rows.fetchone() + if row is None: break + doesMatch = False + for filter in factFilters: + res = filter['regexp'].search(row[1]) + if ((filter['is_neg'] is False and res) or + (filter['is_neg'] is True and res is None)): + factFilterMatches.append(row[0]) + + if len(fieldFilters) > 0: + sfquery = '' + args = {} + for field, filters in fieldFilters.iteritems(): + for filter in filters: + c = len(args) + if sfquery: + if filter['is_neg']: sfquery += " except " + else: sfquery += " intersect " + elif filter['is_neg']: sfquery += "select id from fields except " + field = field.replace("*", "%") + value = filter['value'].replace("*", "%") + args["_ff_%d" % c] = "%"+value+"%" + + ids = self.s.column0( + "select id from fieldmodels where name like "+ + ":field escape '\\'", field=field) + sfquery += ("select id from fields where "+ + "fieldModelId in %s and value like "+ + ":_ff_%d escape '\\'") % (ids2str(ids), c) + + rows = self.s.execute( + 'select f.factId, f.value, fm.name from fields as f '+ + 'left join fieldmodels as fm ON (f.fieldModelId = '+ + 'fm.id) where f.id in (' + sfquery + ')', args) + while (1): + row = rows.fetchone() + if row is None: break + field = row[2].lower() + doesMatch = False + if field in fieldFilters: + for filter in fieldFilters[field]: + res = filter['regexp'].search(row[1]) + if ((filter['is_neg'] is False and res) or + (filter['is_neg'] is True and res is None)): + fieldFilterMatches.append(row[0]) + + + if len(cardFilters) > 0: + qaquery = '' + args = {} + for field, filters in cardFilters.iteritems(): + for filter in filters: + c = len(args) + if qaquery: + if filter['is_neg']: qaquery += " except " + else: qaquery += " intersect " + elif filter['is_neg']: qaquery += "select id from cards except " + value = value.replace("*", "%") + args["_ff_%d" % c] = "%"+value+"%" + + if field == 'question': + qaquery += "select id from cards where question " + qaquery += "like :_ff_%d escape '\\'" % c + else: + qaquery += "select id from cards where answer " + qaquery += "like :_ff_%d escape '\\'" % c + + rows = self.s.execute( + 'select id, question, answer from cards where id IN (' + + qaquery + ')', args) + while (1): + row = rows.fetchone() + if row is None: break + doesMatch = False + if field in cardFilters: + rowValue = row[1] if field == 'question' else row[2] + for filter in cardFilters[field]: + res = filter['regexp'].search(rowValue) + if ((filter['is_neg'] is False and res) or + (filter['is_neg'] is True and res is None)): + cardFilterMatches.append(row[0]) + + factIds = None + if len(factFilters) > 0 or len(fieldFilters) > 0: + factIds = [] + factIds.extend(factFilterMatches) + factIds.extend(fieldFilterMatches) + + cardIds = None + if len(cardFilters) > 0: + cardIds = [] + cardIds.extend(cardFilterMatches) + + return (factIds, cardIds) + + def _findCards(self, query): + "Find facts matching QUERY." + tquery = "" + fquery = "" + qquery = "" + fidquery = "" + cmquery = { 'pos': '', 'neg': '' } + sfquery = qaquery = "" + showdistinct = False + filters = [] + args = {} + for c, (token, isNeg, type, filter) in enumerate(self._parseQuery(query)): + if type == SEARCH_TAG: + # a tag + if tquery: + if isNeg: + tquery += " except " + else: + tquery += " intersect " + elif isNeg: + tquery += "select id from cards except " + if token == "none": + tquery += """ +select cards.id from cards, facts where facts.tags = '' and cards.factId = facts.id """ + else: + token = token.replace("*", "%") + ids = self.s.column0(""" +select id from tags where tag like :tag escape '\\'""", tag=token) + tquery += """ +select cardId from cardTags where cardTags.tagId in %s""" % ids2str(ids) + elif type == SEARCH_TYPE: + if qquery: + if isNeg: + qquery += " except " + else: + qquery += " intersect " + elif isNeg: + qquery += "select id from cards except " + if token in ("rev", "new", "failed"): + if token == "rev": + n = 1 + elif token == "new": + n = 2 + else: + n = 0 + qquery += "select id from cards where type = %d" % n + elif token == "delayed": + qquery += ("select id from cards where " + "due < %d and combinedDue > %d and " + "type in (0,1,2)") % ( + self.dueCutoff, self.dueCutoff) + elif token == "suspended": + qquery += ("select id from cards where " + "priority = -3") + elif token == "leech": + qquery += ( + "select id from cards where noCount >= (select value " + "from deckvars where key = 'leechFails')") + else: # due + qquery += ("select id from cards where " + "type in (0,1) and combinedDue < %d") % self.dueCutoff + elif type == SEARCH_FID: + if fidquery: + if isNeg: + fidquery += " except " + else: + fidquery += " intersect " + elif isNeg: + fidquery += "select id from cards except " + fidquery += "select id from cards where factId in (%s)" % token + elif type == SEARCH_CARD: + token = token.replace("*", "%") + ids = self.s.column0(""" +select id from tags where tag like :tag escape '\\'""", tag=token) + if isNeg: + if cmquery['neg']: + cmquery['neg'] += " intersect " + cmquery['neg'] += """ +select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str(ids) + else: + if cmquery['pos']: + cmquery['pos'] += " intersect " + cmquery['pos'] += """ +select cardId from cardTags where src = 2 and cardTags.tagId in %s""" % ids2str(ids) + elif type == SEARCH_FIELD or type == SEARCH_FIELD_EXISTS: + field = value = '' + if type == SEARCH_FIELD: + parts = token.split(':', 1); + if len(parts) == 2: + field = parts[0] + value = parts[1] + elif type == SEARCH_FIELD_EXISTS: + field = token + value = '*' + if (type == SEARCH_FIELD and filter != 'none'): + if field and value: + filters.append( + {'scope': 'field', 'type': filter, + 'field': field, 'value': value, 'is_neg': isNeg}) + else: + if field and value: + if sfquery: + if isNeg: + sfquery += " except " + else: + sfquery += " intersect " + elif isNeg: + sfquery += "select id from facts except " + field = field.replace("*", "%") + value = value.replace("*", "%") + args["_ff_%d" % c] = "%"+value+"%" + ids = self.s.column0(""" +select id from fieldmodels where name like :field escape '\\'""", field=field) + sfquery += """ +select factId from fields where fieldModelId in %s and +value like :_ff_%d escape '\\'""" % (ids2str(ids), c) + elif type == SEARCH_QA: + field = value = '' + parts = token.split(':', 1); + if len(parts) == 2: + field = parts[0] + value = parts[1] + if (filter != 'none'): + if field and value: + filters.append( + {'scope': 'card', 'type': filter, 'field': field, + 'value': value, 'is_neg': isNeg}) + else: + if field and value: + if qaquery: + if isNeg: + qaquery += " except " + else: + qaquery += " intersect " + elif isNeg: + qaquery += "select id from cards except " + value = value.replace("*", "%") + args["_ff_%d" % c] = "%"+value+"%" + + if field == 'question': + qaquery += """ +select id from cards where question like :_ff_%d escape '\\'""" % c + else: + qaquery += """ +select id from cards where answer like :_ff_%d escape '\\'""" % c + elif type == SEARCH_DISTINCT: + if isNeg is False: + showdistinct = True if token == "one" else False + else: + showdistinct = False if token == "one" else True + else: + if (filter != 'none'): + filters.append( + {'scope': 'fact', 'type': filter, + 'value': token, 'is_neg': isNeg}) + else: + if fquery: + if isNeg: + fquery += " except " + else: + fquery += " intersect " + elif isNeg: + fquery += "select id from facts except " + token = token.replace("*", "%") + args["_ff_%d" % c] = "%"+token+"%" + fquery += """ +select id from facts where spaceUntil like :_ff_%d escape '\\'""" % c + return (tquery, fquery, qquery, fidquery, cmquery, sfquery, + qaquery, showdistinct, filters, args) + + # Find and replace + ########################################################################## + + def findReplace(self, factIds, src, dst, isRe=False, field=None): + "Find and replace fields in a fact." + # find + s = "select id, factId, value from fields where factId in %s" + if isRe: + isRe = re.compile(src) + else: + s += " and value like :v" + if field: + s += " and fieldModelId = :fmid" + rows = self.s.all(s % ids2str(factIds), + v="%"+src.replace("%", "%%")+"%", + fmid=field) + modded = [] + if isRe: + modded = [ + {'id': id, 'fid': fid, 'val': re.sub(isRe, dst, val)} + for (id, fid, val) in rows + if isRe.search(val)] + else: + modded = [ + {'id': id, 'fid': fid, 'val': val.replace(src, dst)} + for (id, fid, val) in rows + if val.find(src) != -1] + # update + self.s.statements( + 'update fields set value = :val where id = :id', modded) + self.updateCardQACacheFromIds([f['fid'] for f in modded], + type="facts") + return len(set([f['fid'] for f in modded])) + + # Find duplicates + ########################################################################## + + def findDuplicates(self, fmids): + data = self.s.all( + "select factId, value from fields where fieldModelId in %s" % + ids2str(fmids)) + vals = {} + for (fid, val) in data: + if not val.strip(): + continue + if val not in vals: + vals[val] = [fid] + else: + vals[val].append(fid) + return [(k,v) for (k,v) in vals.items() if len(v) > 1] + + # Progress info + ########################################################################## + + def startProgress(self, max=0, min=0, title=None): + self.enableProgressHandler() + runHook("startProgress", max, min, title) + self.s.flush() + + def updateProgress(self, label=None, value=None): + runHook("updateProgress", label, value) + + def finishProgress(self): + runHook("updateProgress") + runHook("finishProgress") + self.disableProgressHandler() + + def progressHandler(self): + if (time.time() - self.progressHandlerCalled) < 0.2: + return + self.progressHandlerCalled = time.time() + if self.progressHandlerEnabled: + runHook("dbProgress") + + def enableProgressHandler(self): + self.progressHandlerEnabled = True + + def disableProgressHandler(self): + self.progressHandlerEnabled = False + + # Notifications + ########################################################################## + + def notify(self, msg): + "Send a notice to all listeners, or display on stdout." + if hookEmpty("notify"): + pass + else: + runHook("notify", msg) + + # File-related + ########################################################################## + + def name(self): + if not self.path: + return u"untitled" + n = os.path.splitext(os.path.basename(self.path))[0] + assert '/' not in n + assert '\\' not in n + return n + + # Session handling + ########################################################################## + + def startSession(self): + self.lastSessionStart = self.sessionStartTime + self.sessionStartTime = time.time() + self.sessionStartReps = self.getStats()['dTotal'] + + def stopSession(self): + self.sessionStartTime = 0 + + def sessionLimitReached(self): + if not self.sessionStartTime: + # not started + return False + if (self.sessionTimeLimit and time.time() > + (self.sessionStartTime + self.sessionTimeLimit)): + return True + if (self.sessionRepLimit and self.sessionRepLimit <= + self.getStats()['dTotal'] - self.sessionStartReps): + return True + return False + + # Meta vars + ########################################################################## + + def getInt(self, key, type=int): + ret = self.s.scalar("select value from deckVars where key = :k", + k=key) + if ret is not None: + ret = type(ret) + return ret + + def getFloat(self, key): + return self.getInt(key, float) + + def getBool(self, key): + ret = self.s.scalar("select value from deckVars where key = :k", + k=key) + if ret is not None: + # hack to work around ankidroid bug + if ret.lower() == "true": + return True + elif ret.lower() == "false": + return False + else: + ret = not not int(ret) + return ret + + def getVar(self, key): + "Return value for key as string, or None." + return self.s.scalar("select value from deckVars where key = :k", + k=key) + + def setVar(self, key, value, mod=True): + if self.s.scalar(""" +select value = :value from deckVars +where key = :key""", key=key, value=value): + return + # can't use insert or replace as it confuses the undo code + if self.s.scalar("select 1 from deckVars where key = :key", key=key): + self.s.statement("update deckVars set value=:value where key = :key", + key=key, value=value) + else: + self.s.statement("insert into deckVars (key, value) " + "values (:key, :value)", key=key, value=value) + if mod: + self.setModified() + + def setVarDefault(self, key, value): + if not self.s.scalar( + "select 1 from deckVars where key = :key", key=key): + self.s.statement("insert into deckVars (key, value) " + "values (:key, :value)", key=key, value=value) + + # Failed card handling + ########################################################################## + + def setFailedCardPolicy(self, idx): + if idx == 5: + # custom + return + self.collapseTime = 0 + self.failedCardMax = 0 + if idx == 0: + d = 600 + self.collapseTime = 1 + self.failedCardMax = 20 + elif idx == 1: + d = 0 + elif idx == 2: + d = 600 + elif idx == 3: + d = 28800 + elif idx == 4: + d = 259200 + self.delay0 = d + self.delay1 = 0 + + def getFailedCardPolicy(self): + if self.delay1: + return 5 + d = self.delay0 + if self.collapseTime == 1: + if d == 600 and self.failedCardMax == 20: + return 0 + return 5 + if d == 0 and self.failedCardMax == 0: + return 1 + elif d == 600: + return 2 + elif d == 28800: + return 3 + elif d == 259200: + return 4 + return 5 + + # Media + ########################################################################## + + def mediaDir(self, create=False): + "Return the media directory if exists. None if couldn't create." + if self.path: + if self.mediaPrefix: + dir = os.path.join( + self.mediaPrefix, os.path.basename(self.path)) + else: + dir = self.path + dir = re.sub("(?i)\.(oldanki)$", ".media", dir) + if create == None: + # don't create, but return dir + return dir + if not os.path.exists(dir) and create: + try: + os.makedirs(dir) + except OSError: + # permission denied + return None + else: + # memory-backed; need temp store + if not self.tmpMediaDir and create: + self.tmpMediaDir = tempfile.mkdtemp(prefix="oldanki") + dir = self.tmpMediaDir + if not dir or not os.path.exists(dir): + return None + # change to the current dir + os.chdir(dir) + return dir + + def addMedia(self, path): + """Add PATH to the media directory. +Return new path, relative to media dir.""" + return oldanki.media.copyToMedia(self, path) + + def renameMediaDir(self, oldPath): + "Copy oldPath to our current media dir. " + assert os.path.exists(oldPath) + newPath = self.mediaDir(create=None) + # copytree doesn't want the dir to exist + try: + shutil.copytree(oldPath, newPath) + except: + # FIXME: should really remove everything in old dir instead of + # giving up + pass + + # DB helpers + ########################################################################## + + def save(self): + "Commit any pending changes to disk." + if self.lastLoaded == self.modified: + return + self.lastLoaded = self.modified + self.s.commit() + + def close(self): + if self.s: + self.s.rollback() + self.s.clear() + self.s.close() + self.engine.dispose() + runHook("deckClosed") + + def rollback(self): + "Roll back the current transaction and reset session state." + self.s.rollback() + self.s.clear() + self.s.update(self) + self.s.refresh(self) + + def refreshSession(self): + "Flush and expire all items from the session." + self.s.flush() + self.s.expire_all() + + def openSession(self): + "Open a new session. Assumes old session is already closed." + self.s = SessionHelper(self.Session(), lock=self.needLock) + self.s.update(self) + self.refreshSession() + + def closeSession(self): + "Close the current session, saving any changes. Do nothing if no session." + if self.s: + self.save() + try: + self.s.expunge(self) + except: + import sys + sys.stderr.write("ERROR expunging deck..\n") + self.s.close() + self.s = None + + def setModified(self, newTime=None): + #import traceback; traceback.print_stack() + self.modified = newTime or time.time() + + def flushMod(self): + "Mark modified and flush to DB." + self.setModified() + self.s.flush() + + def saveAs(self, newPath): + "Returns new deck. Old connection is closed without saving." + oldMediaDir = self.mediaDir() + self.s.flush() + # remove new deck if it exists + try: + os.unlink(newPath) + except OSError: + pass + self.startProgress() + # copy tables, avoiding implicit commit on current db + DeckStorage.Deck(newPath, backup=False).close() + new = sqlite.connect(newPath) + for table in self.s.column0( + "select name from sqlite_master where type = 'table'"): + if table.startswith("sqlite_"): + continue + new.execute("delete from %s" % table) + cols = [str(x[1]) for x in new.execute( + "pragma table_info('%s')" % table).fetchall()] + q = "select 'insert into %(table)s values(" + q += ",".join(["'||quote(\"" + col + "\")||'" for col in cols]) + q += ")' from %(table)s" + q = q % {'table': table} + c = 0 + for row in self.s.execute(q): + new.execute(row[0]) + if c % 1000: + self.updateProgress() + c += 1 + # save new, close both + new.commit() + new.close() + self.close() + # open again in orm + newDeck = DeckStorage.Deck(newPath, backup=False) + # move media + if oldMediaDir: + newDeck.renameMediaDir(oldMediaDir) + # forget sync name + newDeck.syncName = None + newDeck.s.commit() + # and return the new deck + self.finishProgress() + return newDeck + + # Syncing + ########################################################################## + # toggling does not bump deck mod time, since it may happen on upgrade, + # and the variable is not synced + + def enableSyncing(self): + self.syncName = unicode(checksum(self.path.encode("utf-8"))) + self.s.commit() + + def disableSyncing(self): + self.syncName = None + self.s.commit() + + def syncingEnabled(self): + return self.syncName + + def checkSyncHash(self): + if self.syncName and self.syncName != checksum(self.path.encode("utf-8")): + self.notify(_("""\ +Because '%s' has been moved or copied, automatic synchronisation \ +has been disabled (ERR-0100). + +You can disable this check in Settings>Preferences>Network.""") % self.name()) + self.disableSyncing() + self.syncName = None + + # DB maintenance + ########################################################################## + + def recoverCards(self, ids): + "Put cards with damaged facts into new facts." + # create a new model in case the user has modified a previous one + from oldanki.stdmodels import RecoveryModel + m = RecoveryModel() + last = self.currentModel + self.addModel(m) + def repl(s): + # strip field model text + return re.sub("(.*?)", "\\1", s) + # add new facts, pointing old card at new fact + for (id, q, a) in self.s.all(""" +select id, question, answer from cards +where id in %s""" % ids2str(ids)): + f = self.newFact() + f['Question'] = repl(q) + f['Answer'] = repl(a) + try: + f.tags = self.s.scalar(""" +select group_concat(tag, " ") from tags t, cardTags ct +where cardId = :cid and ct.tagId = t.id""", cid=id) or u"" + except: + raise Exception("Your sqlite is too old.") + cards = self.addFact(f) + # delete the freshly created card and point old card to this fact + self.s.statement("delete from cards where id = :id", + id=f.cards[0].id) + self.s.statement(""" +update cards set factId = :fid, cardModelId = :cmid, ordinal = 0 +where id = :id""", fid=f.id, cmid=m.cardModels[0].id, id=id) + # restore old model + self.currentModel = last + + def fixIntegrity(self, quick=False): + "Fix some problems and rebuild caches. Caller must .reset()" + self.s.commit() + self.resetUndo() + problems = [] + recover = False + if quick: + num = 4 + else: + num = 9 + self.startProgress(num) + self.updateProgress(_("Checking integrity...")) + if self.s.scalar("pragma integrity_check") != "ok": + self.finishProgress() + return _("Database file is damaged.\n" + "Please restore from automatic backup (see FAQ).") + # ensure correct views and indexes are available + self.updateProgress() + DeckStorage._addViews(self) + DeckStorage._addIndices(self) + # does the user have a model? + self.updateProgress(_("Checking schema...")) + if not self.s.scalar("select count(id) from models"): + self.addModel(BasicModel()) + problems.append(_("Deck was missing a model")) + # is currentModel pointing to a valid model? + if not self.s.all(""" +select decks.id from decks, models where +decks.currentModelId = models.id"""): + self.currentModelId = self.models[0].id + problems.append(_("The current model didn't exist")) + # fields missing a field model + ids = self.s.column0(""" +select id from fields where fieldModelId not in ( +select distinct id from fieldModels)""") + if ids: + self.s.statement("delete from fields where id in %s" % + ids2str(ids)) + problems.append(ngettext("Deleted %d field with missing field model", + "Deleted %d fields with missing field model", len(ids)) % + len(ids)) + # facts missing a field? + ids = self.s.column0(""" +select distinct facts.id from facts, fieldModels where +facts.modelId = fieldModels.modelId and fieldModels.id not in +(select fieldModelId from fields where factId = facts.id)""") + if ids: + self.deleteFacts(ids) + problems.append(ngettext("Deleted %d fact with missing fields", + "Deleted %d facts with missing fields", len(ids)) % + len(ids)) + # cards missing a fact? + ids = self.s.column0(""" +select id from cards where factId not in (select id from facts)""") + if ids: + recover = True + self.recoverCards(ids) + problems.append(ngettext("Recovered %d card with missing fact", + "Recovered %d cards with missing fact", len(ids)) % + len(ids)) + # cards missing a card model? + ids = self.s.column0(""" +select id from cards where cardModelId not in +(select id from cardModels)""") + if ids: + recover = True + self.recoverCards(ids) + problems.append(ngettext("Recovered %d card with no card template", + "Recovered %d cards with no card template", len(ids)) % + len(ids)) + # cards with a card model from the wrong model + ids = self.s.column0(""" +select id from cards where cardModelId not in (select cm.id from +cardModels cm, facts f where cm.modelId = f.modelId and +f.id = cards.factId)""") + if ids: + recover = True + self.recoverCards(ids) + problems.append(ngettext("Recovered %d card with wrong card template", + "Recovered %d cards with wrong card template", len(ids)) % + len(ids)) + # facts missing a card? + ids = self.deleteDanglingFacts() + if ids: + problems.append(ngettext("Deleted %d fact with no cards", + "Deleted %d facts with no cards", len(ids)) % + len(ids)) + # dangling fields? + ids = self.s.column0(""" +select id from fields where factId not in (select id from facts)""") + if ids: + self.s.statement( + "delete from fields where id in %s" % ids2str(ids)) + problems.append(ngettext("Deleted %d dangling field", + "Deleted %d dangling fields", len(ids)) % + len(ids)) + self.s.flush() + if not quick: + self.updateProgress() + # these sometimes end up null on upgrade + self.s.statement("update models set source = 0 where source is null") + self.s.statement( + "update cardModels set allowEmptyAnswer = 1, typeAnswer = '' " + "where allowEmptyAnswer is null or typeAnswer is null") + # fix tags + self.updateProgress(_("Rebuilding tag cache...")) + self.updateCardTags() + # fix any priorities + self.updateProgress(_("Updating priorities...")) + self.updateAllPriorities(dirty=False) + # make sure + self.updateProgress(_("Updating ordinals...")) + self.s.statement(""" +update fields set ordinal = (select ordinal from fieldModels +where id = fieldModelId)""") + # fix problems with stripping html + self.updateProgress(_("Rebuilding QA cache...")) + fields = self.s.all("select id, value from fields") + newFields = [] + for (id, value) in fields: + newFields.append({'id': id, 'value': tidyHTML(value)}) + self.s.statements( + "update fields set value=:value where id=:id", + newFields) + # regenerate question/answer cache + for m in self.models: + self.updateCardsFromModel(m, dirty=False) + # force a full sync + self.s.flush() + self.s.statement("update cards set modified = :t", t=time.time()) + self.s.statement("update facts set modified = :t", t=time.time()) + self.s.statement("update models set modified = :t", t=time.time()) + self.lastSync = 0 + # rebuild + self.updateProgress(_("Rebuilding types...")) + self.rebuildTypes() + # update deck and save + if not quick: + self.flushMod() + self.save() + self.refreshSession() + self.finishProgress() + if problems: + if recover: + problems.append("\n" + _("""\ +Cards with corrupt or missing facts have been placed into new facts. \ +Your scheduling info and card content has been preserved, but the \ +original layout of the facts has been lost.""")) + return "\n".join(problems) + return "ok" + + def optimize(self): + oldSize = os.stat(self.path)[stat.ST_SIZE] + self.s.commit() + self.s.statement("vacuum") + self.s.statement("analyze") + newSize = os.stat(self.path)[stat.ST_SIZE] + return oldSize - newSize + + # Undo/redo + ########################################################################## + + def initUndo(self): + # note this code ignores 'unique', as it's an sqlite reserved word + self.undoStack = [] + self.redoStack = [] + self.undoEnabled = True + self.s.statement( + "create temporary table undoLog (seq integer primary key not null, sql text)") + tables = self.s.column0( + "select name from sqlite_master where type = 'table'") + for table in tables: + if table in ("undoLog", "sqlite_stat1"): + continue + columns = [r[1] for r in + self.s.all("pragma table_info(%s)" % table)] + # insert + self.s.statement(""" +create temp trigger _undo_%(t)s_it +after insert on %(t)s begin +insert into undoLog values +(null, 'delete from %(t)s where rowid = ' || new.rowid); end""" % {'t': table}) + # update + sql = """ +create temp trigger _undo_%(t)s_ut +after update on %(t)s begin +insert into undoLog values (null, 'update %(t)s """ % {'t': table} + sep = "set " + for c in columns: + if c == "unique": + continue + sql += "%(s)s%(c)s=' || quote(old.%(c)s) || '" % { + 's': sep, 'c': c} + sep = "," + sql += " where rowid = ' || old.rowid); end" + self.s.statement(sql) + # delete + sql = """ +create temp trigger _undo_%(t)s_dt +before delete on %(t)s begin +insert into undoLog values (null, 'insert into %(t)s (rowid""" % {'t': table} + for c in columns: + sql += ",\"%s\"" % c + sql += ") values (' || old.rowid ||'" + for c in columns: + if c == "unique": + sql += ",1" + continue + sql += ",' || quote(old.%s) ||'" % c + sql += ")'); end" + self.s.statement(sql) + + def undoName(self): + for n in reversed(self.undoStack): + if n: + return n[0] + + def redoName(self): + return self.redoStack[-1][0] + + def undoAvailable(self): + if not self.undoEnabled: + return + for r in reversed(self.undoStack): + if r: + return True + + def redoAvailable(self): + return self.undoEnabled and self.redoStack + + def resetUndo(self): + try: + self.s.statement("delete from undoLog") + except: + pass + self.undoStack = [] + self.redoStack = [] + + def setUndoBarrier(self): + if not self.undoStack or self.undoStack[-1] is not None: + self.undoStack.append(None) + + def setUndoStart(self, name, merge=False): + if not self.undoEnabled: + return + self.s.flush() + if merge and self.undoStack: + if self.undoStack[-1] and self.undoStack[-1][0] == name: + # merge with last entry? + return + start = self._latestUndoRow() + self.undoStack.append([name, start, None]) + + def setUndoEnd(self, name): + if not self.undoEnabled: + return + self.s.flush() + end = self._latestUndoRow() + while self.undoStack[-1] is None: + # strip off barrier + self.undoStack.pop() + self.undoStack[-1][2] = end + if self.undoStack[-1][1] == self.undoStack[-1][2]: + self.undoStack.pop() + else: + self.redoStack = [] + runHook("undoEnd") + + def _latestUndoRow(self): + return self.s.scalar("select max(rowid) from undoLog") or 0 + + def _undoredo(self, src, dst): + self.s.flush() + while 1: + u = src.pop() + if u: + break + (start, end) = (u[1], u[2]) + if end is None: + end = self._latestUndoRow() + sql = self.s.column0(""" +select sql from undoLog where +seq > :s and seq <= :e order by seq desc""", s=start, e=end) + mod = len(sql) / 35 + if mod: + self.startProgress(36) + self.updateProgress(_("Processing...")) + newstart = self._latestUndoRow() + for c, s in enumerate(sql): + if mod and not c % mod: + self.updateProgress() + self.engine.execute(s) + newend = self._latestUndoRow() + dst.append([u[0], newstart, newend]) + if mod: + self.finishProgress() + + def undo(self): + "Undo the last action(s). Caller must .reset()" + self._undoredo(self.undoStack, self.redoStack) + self.refreshSession() + runHook("postUndoRedo") + + def redo(self): + "Redo the last action(s). Caller must .reset()" + self._undoredo(self.redoStack, self.undoStack) + self.refreshSession() + runHook("postUndoRedo") + + # Dynamic indices + ########################################################################## + + def updateDynamicIndices(self): + indices = { + 'intervalDesc': + '(type, priority desc, interval desc, factId, combinedDue)', + 'intervalAsc': + '(type, priority desc, interval, factId, combinedDue)', + 'randomOrder': + '(type, priority desc, factId, ordinal, combinedDue)', + 'dueAsc': + '(type, priority desc, due, factId, combinedDue)', + 'dueDesc': + '(type, priority desc, due desc, factId, combinedDue)', + } + # determine required + required = [] + if self.revCardOrder == REV_CARDS_OLD_FIRST: + required.append("intervalDesc") + if self.revCardOrder == REV_CARDS_NEW_FIRST: + required.append("intervalAsc") + if self.revCardOrder == REV_CARDS_RANDOM: + required.append("randomOrder") + if (self.revCardOrder == REV_CARDS_DUE_FIRST or + self.newCardOrder == NEW_CARDS_OLD_FIRST or + self.newCardOrder == NEW_CARDS_RANDOM): + required.append("dueAsc") + if (self.newCardOrder == NEW_CARDS_NEW_FIRST): + required.append("dueDesc") + # add/delete + analyze = False + for (k, v) in indices.items(): + n = "ix_cards_%s2" % k + if k in required: + if not self.s.scalar( + "select 1 from sqlite_master where name = :n", n=n): + self.s.statement( + "create index %s on cards %s" % + (n, v)) + analyze = True + else: + # leave old indices for older clients + #self.s.statement("drop index if exists ix_cards_%s" % k) + self.s.statement("drop index if exists %s" % n) + if analyze: + self.s.statement("analyze") + +# Shared decks +########################################################################## + +sourcesTable = Table( + 'sources', metadata, + Column('id', Integer, nullable=False, primary_key=True), + Column('name', UnicodeText, nullable=False, default=u""), + Column('created', Float, nullable=False, default=time.time), + Column('lastSync', Float, nullable=False, default=0), + # -1 = never check, 0 = always check, 1+ = number of seconds passed. + # not currently exposed in the GUI + Column('syncPeriod', Integer, nullable=False, default=0)) + +# Maps +########################################################################## + +mapper(Deck, decksTable, properties={ + 'currentModel': relation(oldanki.models.Model, primaryjoin= + decksTable.c.currentModelId == + oldanki.models.modelsTable.c.id), + 'models': relation(oldanki.models.Model, post_update=True, + primaryjoin= + decksTable.c.id == + oldanki.models.modelsTable.c.deckId), + }) + +# Deck storage +########################################################################## + +numBackups = 30 +backupDir = os.path.expanduser("~/.oldanki/backups") + +class DeckStorage(object): + + def Deck(path=None, backup=True, lock=True, pool=True, rebuild=True): + "Create a new deck or attach to an existing one." + create = True + if path is None: + sqlpath = None + else: + path = os.path.abspath(path) + # check if we need to init + if os.path.exists(path): + create = False + # sqlite needs utf8 + sqlpath = path.encode("utf-8") + try: + (engine, session) = DeckStorage._attach(sqlpath, create, pool) + s = session() + if create: + ver = 999 + metadata.create_all(engine) + deck = DeckStorage._init(s) + else: + ver = s.scalar("select version from decks limit 1") + if ver < 19: + for st in ( + "decks add column newCardsPerDay integer not null default 20", + "decks add column sessionRepLimit integer not null default 100", + "decks add column sessionTimeLimit integer not null default 1800", + "decks add column utcOffset numeric(10, 2) not null default 0", + "decks add column cardCount integer not null default 0", + "decks add column factCount integer not null default 0", + "decks add column failedNowCount integer not null default 0", + "decks add column failedSoonCount integer not null default 0", + "decks add column revCount integer not null default 0", + "decks add column newCount integer not null default 0", + "decks add column revCardOrder integer not null default 0", + "cardModels add column allowEmptyAnswer boolean not null default 1", + "cardModels add column typeAnswer text not null default ''"): + try: + s.execute("alter table " + st) + except: + pass + if ver < DECK_VERSION: + metadata.create_all(engine) + deck = s.query(Deck).get(1) + if not deck: + raise DeckAccessError(_("Deck missing core table"), + type="nocore") + # attach db vars + deck.path = path + deck.engine = engine + deck.Session = session + deck.needLock = lock + deck.progressHandlerCalled = 0 + deck.progressHandlerEnabled = False + if pool: + try: + deck.engine.raw_connection().set_progress_handler( + deck.progressHandler, 100) + except: + print "please install pysqlite 2.4 for better progress dialogs" + deck.engine.execute("pragma locking_mode = exclusive") + deck.s = SessionHelper(s, lock=lock) + # force a write lock + deck.s.execute("update decks set modified = modified") + needUnpack = False + if deck.utcOffset in (-1, -2): + # do the rest later + needUnpack = deck.utcOffset == -1 + # make sure we do this before initVars + DeckStorage._setUTCOffset(deck) + deck.created = time.time() + if ver < 27: + initTagTables(deck.s) + if create: + # new-style file format + deck.s.commit() + deck.s.execute("pragma legacy_file_format = off") + deck.s.execute("pragma default_cache_size= 20000") + deck.s.execute("vacuum") + # add views/indices + initTagTables(deck.s) + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.s.statement("analyze") + deck._initVars() + deck.updateTagPriorities() + else: + if backup: + DeckStorage.backup(deck, path) + deck._initVars() + try: + deck = DeckStorage._upgradeDeck(deck, path) + except: + traceback.print_exc() + deck.fixIntegrity() + deck = DeckStorage._upgradeDeck(deck, path) + except OperationalError, e: + engine.dispose() + if (str(e.orig).startswith("database table is locked") or + str(e.orig).startswith("database is locked")): + raise DeckAccessError(_("File is in use by another process"), + type="inuse") + else: + raise e + if not rebuild: + # minimal startup + deck._globalStats = globalStats(deck) + deck._dailyStats = dailyStats(deck) + return deck + if needUnpack: + deck.startProgress() + DeckStorage._addIndices(deck) + for m in deck.models: + deck.updateCardsFromModel(m) + deck.finishProgress() + oldMod = deck.modified + # fix a bug with current model being unset + if not deck.currentModel and deck.models: + deck.currentModel = deck.models[0] + # ensure the necessary indices are available + deck.updateDynamicIndices() + # FIXME: temporary code for upgrade + # - ensure cards suspended on older clients are recognized + deck.s.statement(""" +update cards set type = type - 3 where type between 0 and 2 and priority = -3""") + # - new delay1 handling + if deck.delay1 > 7: + deck.delay1 = 0 + # unsuspend buried/rev early - can remove priorities in the future + ids = deck.s.column0( + "select id from cards where type > 2 or priority between -2 and -1") + if ids: + deck.updatePriorities(ids) + deck.s.statement( + "update cards set type = relativeDelay where type > 2") + deck.s.commit() + # check if deck has been moved, and disable syncing + deck.checkSyncHash() + # determine starting factor for new cards + deck.averageFactor = (deck.s.scalar( + "select avg(factor) from cards where type = 1") + or Deck.initialFactor) + deck.averageFactor = max(deck.averageFactor, Deck.minimumAverage) + # rebuild queue + deck.reset() + # make sure we haven't accidentally bumped the modification time + assert deck.modified == oldMod + return deck + Deck = staticmethod(Deck) + + def _attach(path, create, pool=True): + "Attach to a file, initializing DB" + if path is None: + path = "sqlite://" + else: + path = "sqlite:///" + path + if pool: + # open and lock connection for single use + from sqlalchemy.pool import SingletonThreadPool + # temporary tables are effectively useless with the default + # settings in 0.7, so we need to force the pool class + engine = create_engine(path, connect_args={'timeout': 0}, + poolclass=SingletonThreadPool) + else: + # no pool & concurrent access w/ timeout + engine = create_engine(path, + poolclass=NullPool, + connect_args={'timeout': 60}) + session = sessionmaker(bind=engine, + autoflush=False, + autocommit=True) + return (engine, session) + _attach = staticmethod(_attach) + + def _init(s): + "Add a new deck to the database. Return saved deck." + deck = Deck() + if sqlalchemy.__version__.startswith("0.4."): + s.save(deck) + else: + s.add(deck) + s.flush() + return deck + _init = staticmethod(_init) + + def _addIndices(deck): + "Add indices to the DB." + # counts, failed cards + deck.s.statement(""" +create index if not exists ix_cards_typeCombined on cards +(type, combinedDue, factId)""") + # scheduler-agnostic type + deck.s.statement(""" +create index if not exists ix_cards_relativeDelay on cards +(relativeDelay)""") + # index on modified, to speed up sync summaries + deck.s.statement(""" +create index if not exists ix_cards_modified on cards +(modified)""") + deck.s.statement(""" +create index if not exists ix_facts_modified on facts +(modified)""") + # priority - temporary index to make compat code faster. this can be + # removed when all clients are on 1.2, as can the ones below + deck.s.statement(""" +create index if not exists ix_cards_priority on cards +(priority)""") + # average factor + deck.s.statement(""" +create index if not exists ix_cards_factor on cards +(type, factor)""") + # card spacing + deck.s.statement(""" +create index if not exists ix_cards_factId on cards (factId)""") + # stats + deck.s.statement(""" +create index if not exists ix_stats_typeDay on stats (type, day)""") + # fields + deck.s.statement(""" +create index if not exists ix_fields_factId on fields (factId)""") + deck.s.statement(""" +create index if not exists ix_fields_fieldModelId on fields (fieldModelId)""") + deck.s.statement(""" +create index if not exists ix_fields_value on fields (value)""") + # media + deck.s.statement(""" +create unique index if not exists ix_media_filename on media (filename)""") + deck.s.statement(""" +create index if not exists ix_media_originalPath on media (originalPath)""") + # deletion tracking + deck.s.statement(""" +create index if not exists ix_cardsDeleted_cardId on cardsDeleted (cardId)""") + deck.s.statement(""" +create index if not exists ix_modelsDeleted_modelId on modelsDeleted (modelId)""") + deck.s.statement(""" +create index if not exists ix_factsDeleted_factId on factsDeleted (factId)""") + deck.s.statement(""" +create index if not exists ix_mediaDeleted_factId on mediaDeleted (mediaId)""") + # tags + txt = "create unique index if not exists ix_tags_tag on tags (tag)" + try: + deck.s.statement(txt) + except: + deck.s.statement(""" +delete from tags where exists (select 1 from tags t2 where tags.tag = t2.tag +and tags.rowid > t2.rowid)""") + deck.s.statement(txt) + deck.s.statement(""" +create index if not exists ix_cardTags_tagCard on cardTags (tagId, cardId)""") + deck.s.statement(""" +create index if not exists ix_cardTags_cardId on cardTags (cardId)""") + _addIndices = staticmethod(_addIndices) + + def _addViews(deck): + "Add latest version of SQL views to DB." + s = deck.s + # old views + s.statement("drop view if exists failedCards") + s.statement("drop view if exists revCardsOld") + s.statement("drop view if exists revCardsNew") + s.statement("drop view if exists revCardsDue") + s.statement("drop view if exists revCardsRandom") + s.statement("drop view if exists acqCardsRandom") + s.statement("drop view if exists acqCardsOld") + s.statement("drop view if exists acqCardsNew") + # failed cards + s.statement(""" +create view failedCards as +select * from cards +where type = 0 and isDue = 1 +order by type, isDue, combinedDue +""") + # rev cards + s.statement(""" +create view revCardsOld as +select * from cards +where type = 1 and isDue = 1 +order by priority desc, interval desc""") + s.statement(""" +create view revCardsNew as +select * from cards +where type = 1 and isDue = 1 +order by priority desc, interval""") + s.statement(""" +create view revCardsDue as +select * from cards +where type = 1 and isDue = 1 +order by priority desc, due""") + s.statement(""" +create view revCardsRandom as +select * from cards +where type = 1 and isDue = 1 +order by priority desc, factId, ordinal""") + # new cards + s.statement(""" +create view acqCardsOld as +select * from cards +where type = 2 and isDue = 1 +order by priority desc, due""") + s.statement(""" +create view acqCardsNew as +select * from cards +where type = 2 and isDue = 1 +order by priority desc, due desc""") + _addViews = staticmethod(_addViews) + + def _upgradeDeck(deck, path): + "Upgrade deck to the latest version." + if deck.version < DECK_VERSION: + prog = True + deck.startProgress() + deck.updateProgress(_("Upgrading Deck...")) + if deck.utcOffset == -1: + # we're opening a shared deck with no indices - we'll need + # them if we want to rebuild the queue + DeckStorage._addIndices(deck) + oldmod = deck.modified + else: + prog = False + deck.path = path + if deck.version == 0: + # new columns + try: + deck.s.statement(""" + alter table cards add column spaceUntil float not null default 0""") + deck.s.statement(""" + alter table cards add column relativeDelay float not null default 0.0""") + deck.s.statement(""" + alter table cards add column isDue boolean not null default 0""") + deck.s.statement(""" + alter table cards add column type integer not null default 0""") + deck.s.statement(""" + alter table cards add column combinedDue float not null default 0""") + # update cards.spaceUntil based on old facts + deck.s.statement(""" + update cards + set spaceUntil = (select (case + when cards.id = facts.lastCardId + then 0 + else facts.spaceUntil + end) from cards as c, facts + where c.factId = facts.id + and cards.id = c.id)""") + deck.s.statement(""" + update cards + set combinedDue = max(due, spaceUntil) + """) + except: + print "failed to upgrade" + # rebuild with new file format + deck.s.commit() + deck.s.execute("pragma legacy_file_format = off") + deck.s.execute("vacuum") + # add views/indices + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + # rebuild type and delay cache + deck.rebuildTypes() + deck.reset() + # bump version + deck.version = 1 + # optimize indices + deck.s.statement("analyze") + if deck.version == 1: + # fix indexes and views + deck.s.statement("drop index if exists ix_cards_newRandomOrder") + deck.s.statement("drop index if exists ix_cards_newOrderedOrder") + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.rebuildTypes() + # optimize indices + deck.s.statement("analyze") + deck.version = 2 + if deck.version == 2: + # compensate for bug in 0.9.7 by rebuilding isDue and priorities + deck.s.statement("update cards set isDue = 0") + deck.updateAllPriorities(dirty=False) + # compensate for bug in early 0.9.x where fieldId was not unique + deck.s.statement("update fields set id = random()") + deck.version = 3 + if deck.version == 3: + # remove conflicting and unused indexes + deck.s.statement("drop index if exists ix_cards_isDueCombined") + deck.s.statement("drop index if exists ix_facts_lastCardId") + deck.s.statement("drop index if exists ix_cards_successive") + deck.s.statement("drop index if exists ix_cards_priority") + deck.s.statement("drop index if exists ix_cards_reps") + deck.s.statement("drop index if exists ix_cards_due") + deck.s.statement("drop index if exists ix_stats_type") + deck.s.statement("drop index if exists ix_stats_day") + deck.s.statement("drop index if exists ix_factsDeleted_cardId") + deck.s.statement("drop index if exists ix_modelsDeleted_cardId") + DeckStorage._addIndices(deck) + deck.s.statement("analyze") + deck.version = 4 + if deck.version == 4: + # decks field upgraded earlier + deck.version = 5 + if deck.version == 5: + # new spacing + deck.newCardSpacing = NEW_CARDS_DISTRIBUTE + deck.version = 6 + # low priority cards now stay in same queue + deck.rebuildTypes() + if deck.version == 6: + # removed 'new cards first' option, so order has changed + deck.newCardSpacing = NEW_CARDS_DISTRIBUTE + deck.version = 7 + # 8 upgrade code removed as obsolete> + if deck.version < 9: + # back up the media dir again, just in case + shutil.copytree(deck.mediaDir(create=True), + deck.mediaDir() + "-old-%s" % + hash(time.time())) + # backup media + media = deck.s.all(""" +select filename, size, created, originalPath, description from media""") + # fix mediaDeleted definition + deck.s.execute("drop table mediaDeleted") + deck.s.execute("drop table media") + metadata.create_all(deck.engine) + # restore + h = [] + for row in media: + h.append({ + 'id': genID(), + 'filename': row[0], + 'size': row[1], + 'created': row[2], + 'originalPath': row[3], + 'description': row[4]}) + if h: + deck.s.statements(""" +insert into media values ( +:id, :filename, :size, :created, :originalPath, :description)""", h) + deck.version = 9 + if deck.version < 10: + deck.s.statement(""" +alter table models add column source integer not null default 0""") + deck.version = 10 + if deck.version < 11: + DeckStorage._setUTCOffset(deck) + deck.version = 11 + deck.s.commit() + if deck.version < 12: + deck.s.statement("drop index if exists ix_cards_revisionOrder") + deck.s.statement("drop index if exists ix_cards_newRandomOrder") + deck.s.statement("drop index if exists ix_cards_newOrderedOrder") + deck.s.statement("drop index if exists ix_cards_markExpired") + deck.s.statement("drop index if exists ix_cards_failedIsDue") + deck.s.statement("drop index if exists ix_cards_failedOrder") + deck.s.statement("drop index if exists ix_cards_type") + deck.s.statement("drop index if exists ix_cards_priority") + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.s.statement("analyze") + if deck.version < 13: + deck.reset() + deck.rebuildCounts() + # regenerate question/answer cache + for m in deck.models: + deck.updateCardsFromModel(m, dirty=False) + deck.version = 13 + if deck.version < 14: + deck.s.statement(""" +update cards set interval = 0 +where interval < 1""") + deck.version = 14 + if deck.version < 15: + deck.delay1 = deck.delay0 + deck.delay2 = 0.0 + deck.version = 15 + if deck.version < 16: + deck.version = 16 + if deck.version < 17: + deck.s.statement("drop view if exists acqCards") + deck.s.statement("drop view if exists futureCards") + deck.s.statement("drop view if exists revCards") + deck.s.statement("drop view if exists typedCards") + deck.s.statement("drop view if exists failedCardsNow") + deck.s.statement("drop view if exists failedCardsSoon") + deck.s.statement("drop index if exists ix_cards_revisionOrder") + deck.s.statement("drop index if exists ix_cards_newRandomOrder") + deck.s.statement("drop index if exists ix_cards_newOrderedOrder") + deck.s.statement("drop index if exists ix_cards_combinedDue") + # add new views + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.version = 17 + if deck.version < 18: + deck.s.statement( + "create table undoLog (seq integer primary key, sql text)") + deck.version = 18 + deck.s.commit() + DeckStorage._addIndices(deck) + deck.s.statement("analyze") + if deck.version < 19: + # permanent undo log causes various problems, revert to temp + deck.s.statement("drop table undoLog") + deck.sessionTimeLimit = 600 + deck.sessionRepLimit = 0 + deck.version = 19 + deck.s.commit() + if deck.version < 20: + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.version = 20 + deck.s.commit() + if deck.version < 21: + deck.s.statement("vacuum") + deck.s.statement("analyze") + deck.version = 21 + deck.s.commit() + if deck.version < 22: + deck.s.statement( + 'update cardModels set typeAnswer = ""') + deck.version = 22 + deck.s.commit() + if deck.version < 23: + try: + deck.s.execute("drop table undoLog") + except: + pass + deck.version = 23 + deck.s.commit() + if deck.version < 24: + deck.s.statement( + "update cardModels set lastFontColour = '#ffffff'") + deck.version = 24 + deck.s.commit() + if deck.version < 25: + deck.s.statement("drop index if exists ix_cards_priorityDue") + deck.s.statement("drop index if exists ix_cards_priorityDueReal") + DeckStorage._addViews(deck) + DeckStorage._addIndices(deck) + deck.updateDynamicIndices() + deck.version = 25 + deck.s.commit() + if deck.version < 26: + # no spaces in tags anymore, as separated by space + def munge(tags): + tags = re.sub(", ?", "--tmp--", tags) + tags = re.sub(" - ", "-", tags) + tags = re.sub(" ", "-", tags) + tags = re.sub("--tmp--", " ", tags) + tags = canonifyTags(tags) + return tags + rows = deck.s.all('select id, tags from facts') + d = [] + for (id, tags) in rows: + d.append({ + 'i': id, + 't': munge(tags), + }) + deck.s.statements( + "update facts set tags = :t where id = :i", d) + for k in ('highPriority', 'medPriority', + 'lowPriority', 'suspended'): + x = getattr(deck, k) + setattr(deck, k, munge(x)) + for m in deck.models: + for cm in m.cardModels: + cm.name = munge(cm.name) + m.tags = munge(m.tags) + deck.updateCardsFromModel(m, dirty=False) + deck.version = 26 + deck.s.commit() + deck.s.statement("vacuum") + if deck.version < 27: + DeckStorage._addIndices(deck) + deck.updateCardTags() + deck.updateAllPriorities(dirty=False) + deck.version = 27 + deck.s.commit() + if deck.version < 28: + deck.s.statement("pragma default_cache_size= 20000") + deck.version = 28 + deck.s.commit() + if deck.version < 30: + # remove duplicates from review history + deck.s.statement(""" +delete from reviewHistory where id not in ( +select min(id) from reviewHistory group by cardId, time);""") + deck.version = 30 + deck.s.commit() + if deck.version < 31: + # recreate review history table + deck.s.statement("drop index if exists ix_reviewHistory_unique") + schema = """ +CREATE TABLE %s ( +cardId INTEGER NOT NULL, +time NUMERIC(10, 2) NOT NULL, +lastInterval NUMERIC(10, 2) NOT NULL, +nextInterval NUMERIC(10, 2) NOT NULL, +ease INTEGER NOT NULL, +delay NUMERIC(10, 2) NOT NULL, +lastFactor NUMERIC(10, 2) NOT NULL, +nextFactor NUMERIC(10, 2) NOT NULL, +reps NUMERIC(10, 2) NOT NULL, +thinkingTime NUMERIC(10, 2) NOT NULL, +yesCount NUMERIC(10, 2) NOT NULL, +noCount NUMERIC(10, 2) NOT NULL, +PRIMARY KEY (cardId, time))""" + deck.s.statement(schema % "revtmp") + deck.s.statement(""" +insert into revtmp +select cardId, time, lastInterval, nextInterval, ease, delay, lastFactor, +nextFactor, reps, thinkingTime, yesCount, noCount from reviewHistory""") + deck.s.statement("drop table reviewHistory") + metadata.create_all(deck.engine) + deck.s.statement( + "insert into reviewHistory select * from revtmp") + deck.s.statement("drop table revtmp") + deck.version = 31 + deck.s.commit() + deck.s.statement("vacuum") + if deck.version < 32: + deck.s.execute("drop index if exists ix_cardTags_tagId") + deck.s.execute("drop index if exists ix_cardTags_cardId") + DeckStorage._addIndices(deck) + deck.s.execute("analyze") + deck.version = 32 + deck.s.commit() + if deck.version < 33: + deck.s.execute("drop index if exists ix_tags_tag") + DeckStorage._addIndices(deck) + deck.version = 33 + deck.s.commit() + if deck.version < 34: + deck.s.execute("drop view if exists acqCardsRandom") + deck.s.execute("drop index if exists ix_cards_factId") + DeckStorage._addIndices(deck) + deck.updateDynamicIndices() + deck.version = 34 + deck.s.commit() + if deck.version < 36: + deck.s.statement("drop index if exists ix_cards_priorityDue") + DeckStorage._addIndices(deck) + deck.s.execute("analyze") + deck.version = 36 + deck.s.commit() + if deck.version < 37: + if deck.getFailedCardPolicy() == 1: + deck.failedCardMax = 0 + deck.version = 37 + deck.s.commit() + if deck.version < 39: + deck.reset() + # manually suspend all suspended cards + ids = deck.findCards("tag:suspended") + if ids: + # unrolled from suspendCards() to avoid marking dirty + deck.s.statement( + "update cards set isDue=0, priority=-3 " + "where id in %s" % ids2str(ids)) + deck.rebuildCounts() + # suspended tag obsolete - don't do this yet + deck.suspended = re.sub(u" ?Suspended ?", u"", deck.suspended) + deck.updateTagPriorities() + deck.version = 39 + deck.s.commit() + if deck.version < 40: + # now stores media url + deck.s.statement("update models set features = ''") + deck.version = 40 + deck.s.commit() + if deck.version < 43: + deck.s.statement("update fieldModels set features = ''") + deck.version = 43 + deck.s.commit() + if deck.version < 44: + # leaner indices + deck.s.statement("drop index if exists ix_cards_factId") + deck.version = 44 + deck.s.commit() + if deck.version < 48: + deck.updateFieldCache(deck.s.column0("select id from facts")) + deck.version = 48 + deck.s.commit() + if deck.version < 50: + # more new type handling + deck.rebuildTypes() + deck.version = 50 + deck.s.commit() + if deck.version < 52: + dname = deck.name() + sname = deck.syncName + if sname and dname != sname: + deck.notify(_("""\ +When syncing, Anki now uses the same deck name on the server as the deck \ +name on your computer. Because you had '%(dname)s' set to sync to \ +'%(sname)s' on the server, syncing has been temporarily disabled. + +If you want to keep your changes to the online version, please use \ +File>Download>Personal Deck to download the online version. + +If you want to keep the version on your computer, please enable \ +syncing again via Settings>Deck Properties>Synchronisation. + +If you have syncing disabled in the preferences, you can ignore \ +this message. (ERR-0101)""") % { + 'sname':sname, 'dname':dname}) + deck.disableSyncing() + elif sname: + deck.enableSyncing() + deck.version = 52 + deck.s.commit() + if deck.version < 53: + if deck.getBool("perDay"): + if deck.hardIntervalMin == 0.333: + deck.hardIntervalMin = max(1.0, deck.hardIntervalMin) + deck.hardIntervalMax = max(1.1, deck.hardIntervalMax) + deck.version = 53 + deck.s.commit() + if deck.version < 54: + # broken versions of the DB orm die if this is a bool with a + # non-int value + deck.s.statement("update fieldModels set editFontFamily = 1"); + deck.version = 54 + deck.s.commit() + if deck.version < 57: + deck.version = 57 + deck.s.commit() + if deck.version < 61: + # do our best to upgrade templates to the new style + txt = '''\ +%s''' + for m in deck.models: + unstyled = [] + for fm in m.fieldModels: + # find which fields had explicit formatting + if fm.quizFontFamily or fm.quizFontSize or fm.quizFontColour: + pass + else: + unstyled.append(fm.name) + # fill out missing info + fm.quizFontFamily = fm.quizFontFamily or u"Arial" + fm.quizFontSize = fm.quizFontSize or 20 + fm.quizFontColour = fm.quizFontColour or "#000000" + fm.editFontSize = fm.editFontSize or 20 + unstyled = set(unstyled) + for cm in m.cardModels: + # embed the old font information into card templates + cm.qformat = txt % ( + cm.questionFontFamily, + cm.questionFontSize, + cm.questionFontColour, + cm.qformat) + cm.aformat = txt % ( + cm.answerFontFamily, + cm.answerFontSize, + cm.answerFontColour, + cm.aformat) + # escape fields that had no previous styling + for un in unstyled: + cm.qformat = cm.qformat.replace("%("+un+")s", "{{{%s}}}"%un) + cm.aformat = cm.aformat.replace("%("+un+")s", "{{{%s}}}"%un) + # rebuild q/a for the above & because latex has changed + for m in deck.models: + deck.updateCardsFromModel(m, dirty=False) + # rebuild the media db based on new format + rebuildMediaDir(deck, dirty=False) + deck.version = 61 + deck.s.commit() + if deck.version < 62: + # updated indices + for d in ("intervalDesc", "intervalAsc", "randomOrder", + "dueAsc", "dueDesc"): + deck.s.statement("drop index if exists ix_cards_%s2" % d) + deck.s.statement("drop index if exists ix_cards_typeCombined") + DeckStorage._addIndices(deck) + deck.updateDynamicIndices() + deck.s.execute("vacuum") + deck.version = 62 + deck.s.commit() + if deck.version < 64: + # remove old static indices, as all clients should be libanki1.2+ + for d in ("ix_cards_duePriority", + "ix_cards_priorityDue"): + deck.s.statement("drop index if exists %s" % d) + # remove old dynamic indices + for d in ("intervalDesc", "intervalAsc", "randomOrder", + "dueAsc", "dueDesc"): + deck.s.statement("drop index if exists ix_cards_%s" % d) + deck.s.execute("analyze") + deck.version = 64 + deck.s.commit() + # note: we keep the priority index for now + if deck.version < 65: + # we weren't correctly setting relativeDelay when answering cards + # in previous versions, so ensure everything is set correctly + deck.rebuildTypes() + deck.version = 65 + deck.s.commit() + # executing a pragma here is very slow on large decks, so we store + # our own record + if not deck.getInt("pageSize") == 4096: + deck.s.commit() + deck.s.execute("pragma page_size = 4096") + deck.s.execute("pragma legacy_file_format = 0") + deck.s.execute("vacuum") + deck.setVar("pageSize", 4096, mod=False) + deck.s.commit() + if prog: + assert deck.modified == oldmod + deck.finishProgress() + return deck + _upgradeDeck = staticmethod(_upgradeDeck) + + def _setUTCOffset(deck): + # 4am + deck.utcOffset = time.timezone + 60*60*4 + _setUTCOffset = staticmethod(_setUTCOffset) + + def backup(deck, path): + """Path must not be unicode.""" + if not numBackups: + return + def escape(path): + path = os.path.abspath(path) + path = path.replace("\\", "!") + path = path.replace("/", "!") + path = path.replace(":", "") + return path + escp = escape(path) + # make sure backup dir exists + try: + os.makedirs(backupDir) + except (OSError, IOError): + pass + # find existing backups + gen = re.sub("\.oldanki$", ".backup-(\d+).oldanki", re.escape(escp)) + backups = [] + for file in os.listdir(backupDir): + m = re.match(gen, file) + if m: + backups.append((int(m.group(1)), file)) + backups.sort() + # check if last backup is the same + if backups: + latest = os.path.join(backupDir, backups[-1][1]) + if int(deck.modified) == int( + os.stat(latest)[stat.ST_MTIME]): + return + # check integrity + if not deck.s.scalar("pragma integrity_check") == "ok": + raise DeckAccessError(_("Deck is corrupt."), type="corrupt") + # get next num + if not backups: + n = 1 + else: + n = backups[-1][0] + 1 + # do backup + newpath = os.path.join(backupDir, os.path.basename( + re.sub("\.oldanki$", ".backup-%s.oldanki" % n, escp))) + shutil.copy2(path, newpath) + # set mtimes to be identical + if deck.modified: + os.utime(newpath, (deck.modified, deck.modified)) + # remove if over + if len(backups) + 1 > numBackups: + delete = len(backups) + 1 - numBackups + delete = backups[:delete] + for file in delete: + os.unlink(os.path.join(backupDir, file[1])) + backup = staticmethod(backup) + +def newCardOrderLabels(): + return { + 0: _("Show new cards in random order"), + 1: _("Show new cards in order added"), + 2: _("Show new cards in reverse order added"), + } + +def newCardSchedulingLabels(): + return { + 0: _("Spread new cards out through reviews"), + 1: _("Show new cards after all other cards"), + 2: _("Show new cards before reviews"), + } + +def revCardOrderLabels(): + return { + 0: _("Review cards from largest interval"), + 1: _("Review cards from smallest interval"), + 2: _("Review cards in order due"), + 3: _("Review cards in random order"), + } + +def failedCardOptionLabels(): + return { + 0: _("Show failed cards soon"), + 1: _("Show failed cards at end"), + 2: _("Show failed cards in 10 minutes"), + 3: _("Show failed cards in 8 hours"), + 4: _("Show failed cards in 3 days"), + 5: _("Custom failed cards handling"), + } diff --git a/oldanki/errors.py b/oldanki/errors.py new file mode 100644 index 000000000..dad83a63f --- /dev/null +++ b/oldanki/errors.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Errors +============================== +""" +__docformat__ = 'restructuredtext' + +class Error(Exception): + def __init__(self, message="", **data): + self.data = data + self._message = message + def __str__(self): + m = self._message + if self.data: + m += ": %s" % repr(self.data) + return m + +class DeckAccessError(Error): + pass + +class ImportFileError(Error): + "Unable to load file to import from." + pass + +class ImportFormatError(Error): + "Unable to determine pattern in text file." + pass + +class ImportEncodingError(Error): + "The file was not in utf-8." + pass + +class ExportFileError(Error): + "Unable to save file." + pass + +class SyncError(Error): + "A problem occurred during syncing." + pass + +# facts, models +class FactInvalidError(Error): + """A fact was invalid/not unique according to the model. +'field' defines the problem field. +'type' defines the type of error ('fieldEmpty', 'fieldNotUnique')""" + pass diff --git a/oldanki/exporting.py b/oldanki/exporting.py new file mode 100644 index 000000000..27c2554d3 --- /dev/null +++ b/oldanki/exporting.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Exporting support +============================== +""" +__docformat__ = 'restructuredtext' + +import itertools, time, re, os, HTMLParser +from operator import itemgetter +from oldanki import DeckStorage +from oldanki.cards import Card +from oldanki.sync import SyncClient, SyncServer, copyLocalMedia +from oldanki.lang import _ +from oldanki.utils import findTag, parseTags, stripHTML, ids2str +from oldanki.tags import tagIds +from oldanki.db import * + +class Exporter(object): + def __init__(self, deck): + self.deck = deck + self.limitTags = [] + self.limitCardIds = [] + + def exportInto(self, path): + self._escapeCount = 0 + file = open(path, "wb") + self.doExport(file) + file.close() + + def escapeText(self, text, removeFields=False): + "Escape newlines and tabs, and strip Anki HTML." + from BeautifulSoup import BeautifulSoup as BS + text = text.replace("\n", "
") + text = text.replace("\t", " " * 8) + if removeFields: + # beautifulsoup is slow + self._escapeCount += 1 + if self._escapeCount % 100 == 0: + self.deck.updateProgress() + try: + s = BS(text) + all = s('span', {'class': re.compile("fm.*")}) + for e in all: + e.replaceWith("".join([unicode(x) for x in e.contents])) + text = unicode(s) + except HTMLParser.HTMLParseError: + pass + return text + + def cardIds(self): + "Return all cards, limited by tags or provided ids." + if self.limitCardIds: + return self.limitCardIds + if not self.limitTags: + cards = self.deck.s.column0("select id from cards") + else: + d = tagIds(self.deck.s, self.limitTags, create=False) + cards = self.deck.s.column0( + "select cardId from cardTags where tagid in %s" % + ids2str(d.values())) + self.count = len(cards) + return cards + +class AnkiExporter(Exporter): + + key = _("Anki Deck (*.oldanki)") + ext = ".oldanki" + + def __init__(self, deck): + Exporter.__init__(self, deck) + self.includeSchedulingInfo = False + self.includeMedia = True + + def exportInto(self, path): + n = 3 + if not self.includeSchedulingInfo: + n += 1 + self.deck.startProgress(n) + self.deck.updateProgress(_("Exporting...")) + try: + os.unlink(path) + except (IOError, OSError): + pass + self.newDeck = DeckStorage.Deck(path) + client = SyncClient(self.deck) + server = SyncServer(self.newDeck) + client.setServer(server) + client.localTime = self.deck.modified + client.remoteTime = 0 + self.deck.s.flush() + # set up a custom change list and sync + lsum = self.localSummary() + rsum = server.summary(0) + self.deck.updateProgress() + payload = client.genPayload((lsum, rsum)) + self.deck.updateProgress() + res = server.applyPayload(payload) + if not self.includeSchedulingInfo: + self.deck.updateProgress() + self.newDeck.s.statement(""" +delete from reviewHistory""") + self.newDeck.s.statement(""" +update cards set +interval = 0, +lastInterval = 0, +due = created, +lastDue = 0, +factor = 2.5, +firstAnswered = 0, +reps = 0, +successive = 0, +averageTime = 0, +reviewTime = 0, +youngEase0 = 0, +youngEase1 = 0, +youngEase2 = 0, +youngEase3 = 0, +youngEase4 = 0, +matureEase0 = 0, +matureEase1 = 0, +matureEase2 = 0, +matureEase3 = 0, +matureEase4 = 0, +yesCount = 0, +noCount = 0, +spaceUntil = 0, +type = 2, +relativeDelay = 2, +combinedDue = created, +modified = :now +""", now=time.time()) + self.newDeck.s.statement(""" +delete from stats""") + # media + if self.includeMedia: + server.deck.mediaPrefix = "" + copyLocalMedia(client.deck, server.deck) + # need to save manually + self.newDeck.rebuildCounts() + self.newDeck.updateAllPriorities() + self.exportedCards = self.newDeck.cardCount + self.newDeck.utcOffset = -1 + self.newDeck.s.commit() + self.newDeck.close() + self.deck.finishProgress() + + def localSummary(self): + cardIds = self.cardIds() + cStrIds = ids2str(cardIds) + cards = self.deck.s.all(""" +select id, modified from cards +where id in %s""" % cStrIds) + facts = self.deck.s.all(""" +select facts.id, facts.modified from cards, facts where +facts.id = cards.factId and +cards.id in %s""" % cStrIds) + models = self.deck.s.all(""" +select models.id, models.modified from models, facts where +facts.modelId = models.id and +facts.id in %s""" % ids2str([f[0] for f in facts])) + media = self.deck.s.all(""" +select id, created from media""") + return { + # cards + "cards": cards, + "delcards": [], + # facts + "facts": facts, + "delfacts": [], + # models + "models": models, + "delmodels": [], + # media + "media": media, + "delmedia": [], + } + +class TextCardExporter(Exporter): + + key = _("Text files (*.txt)") + ext = ".txt" + + def __init__(self, deck): + Exporter.__init__(self, deck) + self.includeTags = False + + def doExport(self, file): + ids = self.cardIds() + strids = ids2str(ids) + self.deck.startProgress((len(ids) + 1) / 50) + self.deck.updateProgress(_("Exporting...")) + cards = self.deck.s.all(""" +select cards.question, cards.answer, cards.id from cards +where cards.id in %s +order by cards.created""" % strids) + self.deck.updateProgress() + if self.includeTags: + self.cardTags = dict(self.deck.s.all(""" +select cards.id, facts.tags from cards, facts +where cards.factId = facts.id +and cards.id in %s +order by cards.created""" % strids)) + out = u"\n".join(["%s\t%s%s" % ( + self.escapeText(c[0], removeFields=True), + self.escapeText(c[1], removeFields=True), + self.tags(c[2])) + for c in cards]) + if out: + out += "\n" + file.write(out.encode("utf-8")) + self.deck.finishProgress() + + def tags(self, id): + if self.includeTags: + return "\t" + ", ".join(parseTags(self.cardTags[id])) + return "" + +class TextFactExporter(Exporter): + + key = _("Text files (*.txt)") + ext = ".txt" + + def __init__(self, deck): + Exporter.__init__(self, deck) + self.includeTags = False + + def doExport(self, file): + cardIds = self.cardIds() + self.deck.startProgress() + self.deck.updateProgress(_("Exporting...")) + facts = self.deck.s.all(""" +select factId, value, facts.created from facts, fields +where +facts.id in +(select distinct factId from cards +where cards.id in %s) +and facts.id = fields.factId +order by factId, ordinal""" % ids2str(cardIds)) + txt = "" + self.deck.updateProgress() + if self.includeTags: + self.factTags = dict(self.deck.s.all( + "select id, tags from facts where id in %s" % + ids2str([fact[0] for fact in facts]))) + groups = itertools.groupby(facts, itemgetter(0)) + groups = [[x for x in y[1]] for y in groups] + groups = [(group[0][2], + "\t".join([self.escapeText(x[1]) for x in group]) + + self.tags(group[0][0])) + for group in groups] + self.deck.updateProgress() + groups.sort(key=itemgetter(0)) + out = [ret[1] for ret in groups] + self.count = len(out) + out = "\n".join(out) + file.write(out.encode("utf-8")) + self.deck.finishProgress() + + def tags(self, id): + if self.includeTags: + return "\t" + self.factTags[id] + return "" + +# Export modules +########################################################################## + +def exporters(): + return ( + (_("Anki Deck (*.oldanki)"), AnkiExporter), + (_("Cards in tab-separated text file (*.txt)"), TextCardExporter), + (_("Facts in tab-separated text file (*.txt)"), TextFactExporter)) diff --git a/oldanki/facts.py b/oldanki/facts.py new file mode 100644 index 000000000..bfa060329 --- /dev/null +++ b/oldanki/facts.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Facts +======== +""" +__docformat__ = 'restructuredtext' + +import time +from oldanki.db import * +from oldanki.errors import * +from oldanki.models import Model, FieldModel, fieldModelsTable +from oldanki.utils import genID, stripHTMLMedia +from oldanki.hooks import runHook + +# Fields in a fact +########################################################################## + +fieldsTable = Table( + 'fields', metadata, + Column('id', Integer, primary_key=True), + Column('factId', Integer, ForeignKey("facts.id"), nullable=False), + Column('fieldModelId', Integer, ForeignKey("fieldModels.id"), + nullable=False), + Column('ordinal', Integer, nullable=False), + Column('value', UnicodeText, nullable=False)) + +class Field(object): + "A field in a fact." + + def __init__(self, fieldModel=None): + if fieldModel: + self.fieldModel = fieldModel + self.ordinal = fieldModel.ordinal + self.value = u"" + self.id = genID() + + def getName(self): + return self.fieldModel.name + name = property(getName) + +mapper(Field, fieldsTable, properties={ + 'fieldModel': relation(FieldModel) + }) + +# Facts: a set of fields and a model +########################################################################## +# mapped in cards.py + +factsTable = Table( + 'facts', metadata, + Column('id', Integer, primary_key=True), + Column('modelId', Integer, ForeignKey("models.id"), nullable=False), + Column('created', Float, nullable=False, default=time.time), + Column('modified', Float, nullable=False, default=time.time), + Column('tags', UnicodeText, nullable=False, default=u""), + # spaceUntil is reused as a html-stripped cache of the fields + Column('spaceUntil', UnicodeText, nullable=False, default=u""), + # obsolete + Column('lastCardId', Integer, ForeignKey( + "cards.id", use_alter=True, name="lastCardIdfk"))) + +class Fact(object): + "A single fact. Fields exposed as dict interface." + + def __init__(self, model=None): + self.model = model + self.id = genID() + if model: + for fm in model.fieldModels: + self.fields.append(Field(fm)) + self.new = True + + def isNew(self): + return getattr(self, 'new', False) + + def keys(self): + return [field.name for field in self.fields] + + def values(self): + return [field.value for field in self.fields] + + def __getitem__(self, key): + try: + return [f.value for f in self.fields if f.name == key][0] + except IndexError: + raise KeyError(key) + + def __setitem__(self, key, value): + try: + [f for f in self.fields if f.name == key][0].value = value + except IndexError: + raise KeyError + + def get(self, key, default): + try: + return self[key] + except (IndexError, KeyError): + return default + + def assertValid(self): + "Raise an error if required fields are empty." + for field in self.fields: + if not self.fieldValid(field): + raise FactInvalidError(type="fieldEmpty", + field=field.name) + + def fieldValid(self, field): + return not (field.fieldModel.required and not field.value.strip()) + + def assertUnique(self, s): + "Raise an error if duplicate fields are found." + for field in self.fields: + if not self.fieldUnique(field, s): + raise FactInvalidError(type="fieldNotUnique", + field=field.name) + + def fieldUnique(self, field, s): + if not field.fieldModel.unique: + return True + req = ("select value from fields " + "where fieldModelId = :fmid and value = :val") + if field.id: + req += " and id != %s" % field.id + return not s.scalar(req, val=field.value, fmid=field.fieldModel.id) + + def focusLost(self, field): + runHook('fact.focusLost', self, field) + + def setModified(self, textChanged=False, deck=None, media=True): + "Mark modified and update cards." + self.modified = time.time() + if textChanged: + if not deck: + # FIXME: compat code + import ankiqt + if not getattr(ankiqt, 'setModWarningShown', None): + import sys; sys.stderr.write( + "plugin needs to pass deck to fact.setModified()") + ankiqt.setModWarningShown = True + deck = ankiqt.mw.deck + assert deck + self.spaceUntil = stripHTMLMedia(u" ".join( + self.values())) + for card in self.cards: + card.rebuildQA(deck) + +# Fact deletions +########################################################################## + +factsDeletedTable = Table( + 'factsDeleted', metadata, + Column('factId', Integer, ForeignKey("facts.id"), + nullable=False), + Column('deletedTime', Float, nullable=False)) diff --git a/oldanki/fonts.py b/oldanki/fonts.py new file mode 100644 index 000000000..d3de3f512 --- /dev/null +++ b/oldanki/fonts.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Fonts - mapping to/from platform-specific fonts +============================================================== +""" + +import sys + +# set this to 'all', to get all fonts in a list +policy="platform" + +mapping = [ + [u"Mincho", u"MS Mincho", "win32"], + [u"Mincho", u"MS 明朝", "win32"], + [u"Mincho", u"ヒラギノ明朝 Pro W3", "mac"], + [u"Mincho", u"Kochi Mincho", "linux"], + [u"Mincho", u"東風明朝", "linux"], + ] + +def platform(): + if sys.platform == "win32": + return "win32" + elif sys.platform.startswith("darwin"): + return "mac" + else: + return "linux" + +def toCanonicalFont(family): + "Turn a platform-specific family into a canonical one." + for (s, p, type) in mapping: + if family == p: + return s + return family + +def toPlatformFont(family): + "Turn a canonical font into a platform-specific one." + if policy == "all": + return allFonts(family) + ltype = platform() + for (s, p, type) in mapping: + if family == s and type == ltype: + return p + return family + +def substitutions(): + "Return a tuple mapping canonical fonts to platform ones." + type = platform() + return [(s, p) for (s, p, t) in mapping if t == type] + +def allFonts(family): + ret = ", ".join([p for (s, p, t) in mapping if s == family]) + return ret or family diff --git a/oldanki/graphs.py b/oldanki/graphs.py new file mode 100644 index 000000000..a4b6648ab --- /dev/null +++ b/oldanki/graphs.py @@ -0,0 +1,406 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Graphs of deck statistics +============================== +""" +__docformat__ = 'restructuredtext' + +import os, sys, time +import oldanki.stats +from oldanki.lang import _ + +import datetime + +#colours for graphs +dueYoungC = "#ffb380" +dueMatureC = "#ff5555" +dueCumulC = "#ff8080" + +reviewNewC = "#80ccff" +reviewYoungC = "#3377ff" +reviewMatureC = "#0000ff" +reviewTimeC = "#0fcaff" + +easesNewC = "#80b3ff" +easesYoungC = "#5555ff" +easesMatureC = "#0f5aff" + +addedC = "#b3ff80" +firstC = "#b380ff" +intervC = "#80e5ff" + +# support frozen distribs +if sys.platform.startswith("darwin"): + try: + del os.environ['MATPLOTLIBDATA'] + except: + pass + +try: + from matplotlib.figure import Figure +except UnicodeEncodeError: + # haven't tracked down the cause of this yet, but reloading fixes it + try: + from matplotlib.figure import Figure + except ImportError: + pass +except ImportError: + pass + +def graphsAvailable(): + return 'matplotlib' in sys.modules + +class DeckGraphs(object): + + def __init__(self, deck, width=8, height=3, dpi=75, selective=True): + self.deck = deck + self.stats = None + self.width = width + self.height = height + self.dpi = dpi + self.selective = selective + + def calcStats (self): + if not self.stats: + days = {} + daysYoung = {} + daysMature = {} + months = {} + next = {} + lowestInDay = 0 + self.endOfDay = self.deck.failedCutoff + t = time.time() + young = """ +select interval, combinedDue from cards c +where relativeDelay between 0 and 1 and type >= 0 and interval <= 21""" + mature = """ +select interval, combinedDue +from cards c where relativeDelay = 1 and type >= 0 and interval > 21""" + if self.selective: + young = self.deck._cardLimit("revActive", "revInactive", + young) + mature = self.deck._cardLimit("revActive", "revInactive", + mature) + young = self.deck.s.all(young) + mature = self.deck.s.all(mature) + for (src, dest) in [(young, daysYoung), + (mature, daysMature)]: + for (interval, due) in src: + day=int(round(interval)) + days[day] = days.get(day, 0) + 1 + indays = int(((due - self.endOfDay) / 86400.0) + 1) + next[indays] = next.get(indays, 0) + 1 # type-agnostic stats + dest[indays] = dest.get(indays, 0) + 1 # type-specific stats + if indays < lowestInDay: + lowestInDay = indays + self.stats = {} + self.stats['next'] = next + self.stats['days'] = days + self.stats['daysByType'] = {'young': daysYoung, + 'mature': daysMature} + self.stats['months'] = months + self.stats['lowestInDay'] = lowestInDay + + dayReps = self.deck.s.all(""" +select day, + matureEase0+matureEase1+matureEase2+matureEase3+matureEase4 as matureReps, + reps-(newEase0+newEase1+newEase2+newEase3+newEase4) as combinedYoungReps, + reps as combinedNewReps +from stats +where type = 1""") + + dayTimes = self.deck.s.all(""" +select day, reviewTime as reviewTime +from stats +where type = 1""") + + todaydt = self.deck._dailyStats.day + for dest, source in [("dayRepsNew", "combinedNewReps"), + ("dayRepsYoung", "combinedYoungReps"), + ("dayRepsMature", "matureReps")]: + self.stats[dest] = dict( + map(lambda dr: (-(todaydt -datetime.date( + *(int(x)for x in dr["day"].split("-")))).days, dr[source]), dayReps)) + + self.stats['dayTimes'] = dict( + map(lambda dr: (-(todaydt -datetime.date( + *(int(x)for x in dr["day"].split("-")))).days, dr["reviewTime"]/60.0), dayTimes)) + + def nextDue(self, days=30): + self.calcStats() + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + graph = fig.add_subplot(111) + dayslists = [self.stats['next'], self.stats['daysByType']['mature']] + + for dayslist in dayslists: + self.addMissing(dayslist, self.stats['lowestInDay'], days) + + argl = [] + + for dayslist in dayslists: + dl = [x for x in dayslist.items() if x[0] <= days] + argl.extend(list(self.unzip(dl))) + + self.varGraph(graph, days, [dueYoungC, dueMatureC], *argl) + + cheat = fig.add_subplot(111) + b1 = cheat.bar(0, 0, color = dueYoungC) + b2 = cheat.bar(1, 0, color = dueMatureC) + + cheat.legend([b1, b2], [ + "Young", + "Mature"], loc='upper right') + + graph.set_xlim(xmin=self.stats['lowestInDay'], xmax=days+1) + graph.set_xlabel("Day (0 = today)") + graph.set_ylabel("Cards Due") + + return fig + + def workDone(self, days=30): + self.calcStats() + + for type in ["dayRepsNew", "dayRepsYoung", "dayRepsMature"]: + self.addMissing(self.stats[type], -days, 0) + + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + graph = fig.add_subplot(111) + + args = sum((self.unzip(self.stats[type].items(), limit=days, reverseLimit=True) for type in ["dayRepsMature", "dayRepsYoung", "dayRepsNew"][::-1]), []) + + self.varGraph(graph, days, [reviewNewC, reviewYoungC, reviewMatureC], *args) + + cheat = fig.add_subplot(111) + b1 = cheat.bar(-3, 0, color = reviewNewC) + b2 = cheat.bar(-4, 0, color = reviewYoungC) + b3 = cheat.bar(-5, 0, color = reviewMatureC) + + cheat.legend([b1, b2, b3], [ + "New", + "Young", + "Mature"], loc='upper left') + + graph.set_xlim(xmin=-days+1, xmax=1) + graph.set_ylim(ymax=max(max(a for a in args[1::2])) + 10) + graph.set_xlabel("Day (0 = today)") + graph.set_ylabel("Cards Answered") + + return fig + + def timeSpent(self, days=30): + self.calcStats() + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + times = self.stats['dayTimes'] + self.addMissing(times, -days+1, 0) + times = self.unzip([(day,y) for (day,y) in times.items() + if day + days >= 0]) + graph = fig.add_subplot(111) + self.varGraph(graph, days, reviewTimeC, *times) + graph.set_xlim(xmin=-days+1, xmax=1) + graph.set_ylim(ymax=max(a for a in times[1]) + 0.1) + graph.set_xlabel("Day (0 = today)") + graph.set_ylabel("Minutes") + return fig + + def cumulativeDue(self, days=30): + self.calcStats() + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + graph = fig.add_subplot(111) + self.addMissing(self.stats['next'], 0, days-1) + dl = [x for x in self.stats['next'].items() if x[0] <= days] + (x, y) = self.unzip(dl) + count=0 + y = list(y) + for i in range(len(x)): + count = count + y[i] + if i == 0: + continue + y[i] = count + if x[i] > days: + break + self._filledGraph(graph, days, dueCumulC, 1, x, y) + graph.set_xlim(xmin=self.stats['lowestInDay'], xmax=days-1) + graph.set_ylim(ymax=graph.get_ylim()[1]+10) + graph.set_xlabel("Day (0 = today)") + graph.set_ylabel("Cards Due") + return fig + + def intervalPeriod(self, days=30): + self.calcStats() + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + ints = self.stats['days'] + self.addMissing(ints, 0, days) + intervals = self.unzip(ints.items(), limit=days) + graph = fig.add_subplot(111) + self.varGraph(graph, days, intervC, *intervals) + graph.set_xlim(xmin=0, xmax=days+1) + graph.set_xlabel("Card Interval") + graph.set_ylabel("Number of Cards") + return fig + + def addedRecently(self, numdays=30, attr='created'): + self.calcStats() + days = {} + fig = Figure(figsize=(self.width, self.height), dpi=self.dpi) + limit = self.endOfDay - (numdays) * 86400 + res = self.deck.s.column0("select %s from cards where %s >= %f" % + (attr, attr, limit)) + for r in res: + d = int((r - self.endOfDay) / 86400.0) + days[d] = days.get(d, 0) + 1 + self.addMissing(days, -numdays+1, 0) + graph = fig.add_subplot(111) + intervals = self.unzip(days.items()) + if attr == 'created': + colour = addedC + else: + colour = firstC + self.varGraph(graph, numdays, colour, *intervals) + graph.set_xlim(xmin=-numdays+1, xmax=1) + graph.set_xlabel("Day (0 = today)") + if attr == 'created': + graph.set_ylabel("Cards Added") + else: + graph.set_ylabel("Cards First Answered") + return fig + + def addMissing(self, dic, min, max): + for i in range(min, max+1): + if not i in dic: + dic[i] = 0 + + def unzip(self, tuples, fillFix=True, limit=None, reverseLimit=False): + tuples.sort(cmp=lambda x,y: cmp(x[0], y[0])) + if limit: + if reverseLimit: + tuples = tuples[-limit:] + else: + tuples = tuples[:limit+1] + new = zip(*tuples) + return new + + def varGraph(self, graph, days, colours=["b"], *args): + if len(args[0]) < 120: + return self.barGraph(graph, days, colours, *args) + else: + return self.filledGraph(graph, days, colours, *args) + + def filledGraph(self, graph, days, colours=["b"], *args): + self._filledGraph(graph, days, colours, 0, *args) + + def _filledGraph(self, graph, days, colours, lw, *args): + if isinstance(colours, str): + colours = [colours] + for triplet in [(args[n], args[n + 1], colours[n / 2]) for n in range(0, len(args), 2)]: + x = list(triplet[0]) + y = list(triplet[1]) + c = triplet[2] + lowest = 99999 + highest = -lowest + for i in range(len(x)): + if x[i] < lowest: + lowest = x[i] + if x[i] > highest: + highest = x[i] + # ensure the filled area reaches the bottom + x.insert(0, lowest - 1) + y.insert(0, 0) + x.append(highest + 1) + y.append(0) + # plot + graph.fill(x, y, c, lw=lw) + graph.grid(True) + graph.set_ylim(ymin=0, ymax=max(2, graph.get_ylim()[1])) + + def barGraph(self, graph, days, colours, *args): + if isinstance(colours, str): + colours = [colours] + lim = None + for triplet in [(args[n], args[n + 1], colours[n / 2]) for n in range(0, len(args), 2)]: + x = list(triplet[0]) + y = list(triplet[1]) + c = triplet[2] + lw = 0 + if lim is None: + lim = (x[0], x[-1]) + length = (lim[1] - lim[0]) + if len(args) > 4: + if length <= 30: + lw = 1 + else: + if length <= 90: + lw = 1 + lowest = 99999 + highest = -lowest + for i in range(len(x)): + if x[i] < lowest: + lowest = x[i] + if x[i] > highest: + highest = x[i] + graph.bar(x, y, color=c, width=1, linewidth=lw) + graph.grid(True) + graph.set_ylim(ymin=0, ymax=max(2, graph.get_ylim()[1])) + import numpy as np + if length > 10: + step = length / 10.0 + # python's range() won't accept float step args, so we do it manually + if lim[0] < 0: + ticks = [int(lim[1] - step * x) for x in range(10)] + else: + ticks = [int(lim[0] + step * x) for x in range(10)] + else: + ticks = list(xrange(lim[0], lim[1]+1)) + graph.set_xticks(np.array(ticks) + 0.5) + graph.set_xticklabels([str(int(x)) for x in ticks]) + for tick in graph.xaxis.get_major_ticks(): + tick.tick1On = False + tick.tick2On = False + + def easeBars(self): + fig = Figure(figsize=(3, 3), dpi=self.dpi) + graph = fig.add_subplot(111) + types = ("new", "young", "mature") + enum = 5 + offset = 0 + arrsize = 16 + arr = [0] * arrsize + n = 0 + colours = [easesNewC, easesYoungC, easesMatureC] + bars = [] + gs = oldanki.stats.globalStats(self.deck) + for type in types: + total = (getattr(gs, type + "Ease0") + + getattr(gs, type + "Ease1") + + getattr(gs, type + "Ease2") + + getattr(gs, type + "Ease3") + + getattr(gs, type + "Ease4")) + setattr(gs, type + "Ease1", getattr(gs, type + "Ease0") + + getattr(gs, type + "Ease1")) + setattr(gs, type + "Ease0", -1) + for e in range(1, enum): + try: + arr[e+offset] = (getattr(gs, type + "Ease%d" % e) + / float(total)) * 100 + 1 + except ZeroDivisionError: + arr[e+offset] = 0 + bars.append(graph.bar(range(arrsize), arr, width=1.0, + color=colours[n], align='center')) + arr = [0] * arrsize + offset += 5 + n += 1 + x = ([""] + [str(n) for n in range(1, enum)]) * 3 + graph.legend([p[0] for p in bars], ("New", + "Young", + "Mature"), + 'upper left') + graph.set_ylim(ymax=100) + graph.set_xlim(xmax=15) + graph.set_xticks(range(arrsize)) + graph.set_xticklabels(x) + graph.set_ylabel("% of Answers") + graph.set_xlabel("Answer Buttons") + graph.grid(True) + return fig diff --git a/oldanki/history.py b/oldanki/history.py new file mode 100644 index 000000000..e243af7fb --- /dev/null +++ b/oldanki/history.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +History - keeping a record of all reviews +========================================== + +If users run 'check db', duplicate records will be inserted into the DB - I +really should have used the time stamp as the key. You can remove them by +keeping the lowest id for any given timestamp. +""" + +__docformat__ = 'restructuredtext' + +import time +from oldanki.db import * + +reviewHistoryTable = Table( + 'reviewHistory', metadata, + Column('cardId', Integer, nullable=False), + Column('time', Float, nullable=False, default=time.time), + Column('lastInterval', Float, nullable=False), + Column('nextInterval', Float, nullable=False), + Column('ease', Integer, nullable=False), + Column('delay', Float, nullable=False), + Column('lastFactor', Float, nullable=False), + Column('nextFactor', Float, nullable=False), + Column('reps', Float, nullable=False), + Column('thinkingTime', Float, nullable=False), + Column('yesCount', Float, nullable=False), + Column('noCount', Float, nullable=False), + PrimaryKeyConstraint("cardId", "time")) + +class CardHistoryEntry(object): + "Create after rescheduling card." + + def __init__(self, card=None, ease=None, delay=None): + if not card: + return + self.cardId = card.id + self.lastInterval = card.lastInterval + self.nextInterval = card.interval + self.lastFactor = card.lastFactor + self.nextFactor = card.factor + self.reps = card.reps + self.yesCount = card.yesCount + self.noCount = card.noCount + self.ease = ease + self.delay = delay + self.thinkingTime = card.thinkingTime() + + def writeSQL(self, s): + s.statement(""" +insert into reviewHistory +(cardId, lastInterval, nextInterval, ease, delay, lastFactor, +nextFactor, reps, thinkingTime, yesCount, noCount, time) +values ( +:cardId, :lastInterval, :nextInterval, :ease, :delay, +:lastFactor, :nextFactor, :reps, :thinkingTime, :yesCount, :noCount, +:time)""", + cardId=self.cardId, + lastInterval=self.lastInterval, + nextInterval=self.nextInterval, + ease=self.ease, + delay=self.delay, + lastFactor=self.lastFactor, + nextFactor=self.nextFactor, + reps=self.reps, + thinkingTime=self.thinkingTime, + yesCount=self.yesCount, + noCount=self.noCount, + time=time.time()) + +mapper(CardHistoryEntry, reviewHistoryTable) diff --git a/oldanki/hooks.py b/oldanki/hooks.py new file mode 100644 index 000000000..6fa146ed8 --- /dev/null +++ b/oldanki/hooks.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Hooks - hook management and tools for extending Anki +============================================================================== + +To find available hooks, grep for runHook in the source code. + +Instrumenting allows you to modify functions that don't have hooks available. +If you call wrap() with pos='around', the original function will not be called +automatically but can be called with _old(). +""" + +# Hooks +############################################################################## + +_hooks = {} + +def runHook(hook, *args): + "Run all functions on hook." + hook = _hooks.get(hook, None) + if hook: + for func in hook: + func(*args) + +def runFilter(hook, arg, *args): + hook = _hooks.get(hook, None) + if hook: + for func in hook: + arg = func(arg, *args) + return arg + +def addHook(hook, func): + "Add a function to hook. Ignore if already on hook." + if not _hooks.get(hook, None): + _hooks[hook] = [] + if func not in _hooks[hook]: + _hooks[hook].append(func) + +def removeHook(hook, func): + "Remove a function if is on hook." + hook = _hooks.get(hook, []) + if func in hook: + hook.remove(func) + +def hookEmpty(hook): + return not _hooks.get(hook) + +# Instrumenting +############################################################################## + +def wrap(old, new, pos="after"): + "Override an existing function." + def repl(*args, **kwargs): + if pos == "after": + old(*args, **kwargs) + return new(*args, **kwargs) + elif pos == "before": + new(*args, **kwargs) + return old(*args, **kwargs) + else: + return new(_old=old, *args, **kwargs) + return repl diff --git a/oldanki/lang.py b/oldanki/lang.py new file mode 100644 index 000000000..578b6ee1c --- /dev/null +++ b/oldanki/lang.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Internationalisation +===================== +""" +__docformat__ = 'restructuredtext' + +import os, sys +import gettext +import threading + +threadLocal = threading.local() + +# global defaults +currentLang = None +currentTranslation = None + +def localTranslation(): + "Return the translation local to this thread, or the default." + if getattr(threadLocal, 'currentTranslation', None): + return threadLocal.currentTranslation + else: + return currentTranslation + +def _(str): + return localTranslation().ugettext(str) + +def ngettext(single, plural, n): + return localTranslation().ungettext(single, plural, n) + +def setLang(lang, local=True): + base = os.path.dirname(os.path.abspath(__file__)) + localeDir = os.path.join(base, "locale") + if not os.path.exists(localeDir): + localeDir = os.path.join( + os.path.dirname(sys.argv[0]), "locale") + trans = gettext.translation('libanki', localeDir, + languages=[lang], + fallback=True) + if local: + threadLocal.currentLang = lang + threadLocal.currentTranslation = trans + else: + global currentLang, currentTranslation + currentLang = lang + currentTranslation = trans + +def getLang(): + "Return the language local to this thread, or the default." + if getattr(threadLocal, 'currentLang', None): + return threadLocal.currentLang + else: + return currentLang + +if not currentTranslation: + setLang("en_US", local=False) diff --git a/oldanki/latex.py b/oldanki/latex.py new file mode 100644 index 000000000..76041ee36 --- /dev/null +++ b/oldanki/latex.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Latex support +============================== +""" +__docformat__ = 'restructuredtext' + +import re, tempfile, os, sys, shutil, cgi, subprocess +from oldanki.utils import genID, checksum, call +from oldanki.hooks import addHook +from htmlentitydefs import entitydefs +from oldanki.lang import _ + +latexDviPngCmd = ["dvipng", "-D", "200", "-T", "tight"] + +regexps = { + "standard": re.compile(r"\[latex\](.+?)\[/latex\]", re.DOTALL | re.IGNORECASE), + "expression": re.compile(r"\[\$\](.+?)\[/\$\]", re.DOTALL | re.IGNORECASE), + "math": re.compile(r"\[\$\$\](.+?)\[/\$\$\]", re.DOTALL | re.IGNORECASE), + } + +tmpdir = tempfile.mkdtemp(prefix="oldanki") + +# add standard tex install location to osx +if sys.platform == "darwin": + os.environ['PATH'] += ":/usr/texbin" + +def renderLatex(deck, text, build=True): + "Convert TEXT with embedded latex tags to image links." + for match in regexps['standard'].finditer(text): + text = text.replace(match.group(), imgLink(deck, match.group(1), + build)) + for match in regexps['expression'].finditer(text): + text = text.replace(match.group(), imgLink( + deck, "$" + match.group(1) + "$", build)) + for match in regexps['math'].finditer(text): + text = text.replace(match.group(), imgLink( + deck, + "\\begin{displaymath}" + match.group(1) + "\\end{displaymath}", + build)) + return text + +def stripLatex(text): + for match in regexps['standard'].finditer(text): + text = text.replace(match.group(), "") + for match in regexps['expression'].finditer(text): + text = text.replace(match.group(), "") + for match in regexps['math'].finditer(text): + text = text.replace(match.group(), "") + return text + +def latexImgFile(deck, latexCode): + key = checksum(latexCode) + return "latex-%s.png" % key + +def mungeLatex(deck, latex): + "Convert entities, fix newlines, convert to utf8, and wrap pre/postamble." + for match in re.compile("&([a-z]+);", re.IGNORECASE).finditer(latex): + if match.group(1) in entitydefs: + latex = latex.replace(match.group(), entitydefs[match.group(1)]) + latex = re.sub("", "\n", latex) + latex = (deck.getVar("latexPre") + "\n" + + latex + "\n" + + deck.getVar("latexPost")) + latex = latex.encode("utf-8") + return latex + +def buildImg(deck, latex): + log = open(os.path.join(tmpdir, "latex_log.txt"), "w+") + texpath = os.path.join(tmpdir, "tmp.tex") + texfile = file(texpath, "w") + texfile.write(latex) + texfile.close() + # make sure we have a valid mediaDir + mdir = deck.mediaDir(create=True) + oldcwd = os.getcwd() + if sys.platform == "win32": + si = subprocess.STARTUPINFO() + try: + si.dwFlags |= subprocess.STARTF_USESHOWWINDOW + except: + si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW + else: + si = None + try: + os.chdir(tmpdir) + def errmsg(type): + msg = _("Error executing %s.\n") % type + try: + log = open(os.path.join(tmpdir, "latex_log.txt")).read() + msg += "
" + cgi.escape(log) + "
" + except: + msg += _("Have you installed latex and dvipng?") + pass + return msg + if call(["latex", "-interaction=nonstopmode", + "tmp.tex"], stdout=log, stderr=log, startupinfo=si): + return (False, errmsg("latex")) + if call(latexDviPngCmd + ["tmp.dvi", "-o", "tmp.png"], + stdout=log, stderr=log, startupinfo=si): + return (False, errmsg("dvipng")) + # add to media + target = latexImgFile(deck, latex) + shutil.copy2(os.path.join(tmpdir, "tmp.png"), + os.path.join(mdir, target)) + return (True, target) + finally: + os.chdir(oldcwd) + +def imageForLatex(deck, latex, build=True): + "Return an image that represents 'latex', building if necessary." + imageFile = latexImgFile(deck, latex) + ok = True + if build and (not imageFile or not os.path.exists(imageFile)): + (ok, imageFile) = buildImg(deck, latex) + if not ok: + return (False, imageFile) + return (True, imageFile) + +def imgLink(deck, latex, build=True): + "Parse LATEX and return a HTML image representing the output." + munged = mungeLatex(deck, latex) + (ok, img) = imageForLatex(deck, munged, build) + if ok: + return '%s' % (img, latex) + else: + return img + +def formatQA(html, type, cid, mid, fact, tags, cm, deck): + return renderLatex(deck, html) + +# setup q/a filter +addHook("formatQA", formatQA) diff --git a/oldanki/media.py b/oldanki/media.py new file mode 100644 index 000000000..aea124181 --- /dev/null +++ b/oldanki/media.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Media support +==================== +""" +__docformat__ = 'restructuredtext' + +import os, shutil, re, urllib2, time, tempfile, unicodedata, urllib +from oldanki.db import * +from oldanki.utils import checksum, genID +from oldanki.lang import _ + +# other code depends on this order, so don't reorder +regexps = ("(?i)(\[sound:([^]]+)\])", + "(?i)(]+src=[\"']?([^\"'>]+)[\"']?[^>]*>)") + +# Tables +########################################################################## + +mediaTable = Table( + 'media', metadata, + Column('id', Integer, primary_key=True, nullable=False), + Column('filename', UnicodeText, nullable=False), + # reused as reference count + Column('size', Integer, nullable=False), + # treated as modification date, not creation date + Column('created', Float, nullable=False), + # reused as md5sum. empty string if file doesn't exist on disk + Column('originalPath', UnicodeText, nullable=False, default=u""), + # older versions stored original filename here, so we'll leave it for now + # in case we add a feature to rename media back to its original name. in + # the future we may want to zero this to save space + Column('description', UnicodeText, nullable=False, default=u"")) + +class Media(object): + pass + +mapper(Media, mediaTable) + +mediaDeletedTable = Table( + 'mediaDeleted', metadata, + Column('mediaId', Integer, ForeignKey("cards.id"), + nullable=False), + Column('deletedTime', Float, nullable=False)) + +# File handling +########################################################################## + +def copyToMedia(deck, path): + """Copy PATH to MEDIADIR, and return new filename. + +If a file with the same md5sum exists in the DB, return that. +If a file with the same name exists, return a unique name. +This does not modify the media table.""" + # see if have duplicate contents + newpath = deck.s.scalar( + "select filename from media where originalPath = :cs", + cs=checksum(open(path, "rb").read())) + # check if this filename already exists + if not newpath: + base = os.path.basename(path) + mdir = deck.mediaDir(create=True) + newpath = uniquePath(mdir, base) + shutil.copy2(path, newpath) + return os.path.basename(newpath) + +def uniquePath(dir, base): + # remove any dangerous characters + base = re.sub(r"[][<>:/\\&]", "", base) + # find a unique name + (root, ext) = os.path.splitext(base) + def repl(match): + n = int(match.group(1)) + return " (%d)" % (n+1) + while True: + path = os.path.join(dir, root + ext) + if not os.path.exists(path): + break + reg = " \((\d+)\)$" + if not re.search(reg, root): + root = root + " (1)" + else: + root = re.sub(reg, repl, root) + return path + +# DB routines +########################################################################## + +def updateMediaCount(deck, file, count=1): + mdir = deck.mediaDir() + if deck.s.scalar( + "select 1 from media where filename = :file", file=file): + deck.s.statement( + "update media set size = size + :c, created = :t where filename = :file", + file=file, c=count, t=time.time()) + elif count > 0: + try: + sum = unicode( + checksum(open(os.path.join(mdir, file), "rb").read())) + except: + sum = u"" + deck.s.statement(""" +insert into media (id, filename, size, created, originalPath, description) +values (:id, :file, :c, :mod, :sum, '')""", + id=genID(), file=file, c=count, mod=time.time(), + sum=sum) + +def removeUnusedMedia(deck): + ids = deck.s.column0("select id from media where size = 0") + for id in ids: + deck.s.statement("insert into mediaDeleted values (:id, :t)", + id=id, t=time.time()) + deck.s.statement("delete from media where size = 0") + +# String manipulation +########################################################################## + +def mediaFiles(string, remote=False): + l = [] + for reg in regexps: + for (full, fname) in re.findall(reg, string): + isLocal = not re.match("(https?|ftp)://", fname.lower()) + if not remote and isLocal: + l.append(fname) + elif remote and not isLocal: + l.append(fname) + return l + +def stripMedia(txt): + for reg in regexps: + txt = re.sub(reg, "", txt) + return txt + +def escapeImages(string): + def repl(match): + tag = match.group(1) + fname = match.group(2) + if re.match("(https?|ftp)://", fname): + return tag + return tag.replace( + fname, urllib.quote(fname.encode("utf-8"))) + return re.sub(regexps[1], repl, string) + +# Rebuilding DB +########################################################################## + +def rebuildMediaDir(deck, delete=False, dirty=True): + mdir = deck.mediaDir() + if not mdir: + return (0, 0) + deck.startProgress(title=_("Check Media DB")) + # set all ref counts to 0 + deck.s.statement("update media set size = 0") + # look through cards for media references + refs = {} + normrefs = {} + def norm(s): + if isinstance(s, unicode): + return unicodedata.normalize('NFD', s) + return s + for (question, answer) in deck.s.all( + "select question, answer from cards"): + for txt in (question, answer): + for f in mediaFiles(txt): + if f in refs: + refs[f] += 1 + else: + refs[f] = 1 + normrefs[norm(f)] = True + # update ref counts + for (file, count) in refs.items(): + updateMediaCount(deck, file, count) + # find unused media + unused = [] + for file in os.listdir(mdir): + path = os.path.join(mdir, file) + if not os.path.isfile(path): + # ignore directories + continue + nfile = norm(file) + if nfile not in normrefs: + unused.append(file) + # optionally delete + if delete: + for f in unused: + path = os.path.join(mdir, f) + os.unlink(path) + # remove entries in db for unused media + removeUnusedMedia(deck) + # check md5s are up to date + update = [] + for (file, created, md5) in deck.s.all( + "select filename, created, originalPath from media"): + path = os.path.join(mdir, file) + if not os.path.exists(path): + if md5: + update.append({'f':file, 'sum':u"", 'c':time.time()}) + else: + sum = unicode( + checksum(open(os.path.join(mdir, file), "rb").read())) + if md5 != sum: + update.append({'f':file, 'sum':sum, 'c':time.time()}) + if update: + deck.s.statements(""" +update media set originalPath = :sum, created = :c where filename = :f""", + update) + # update deck and get return info + if dirty: + deck.flushMod() + nohave = deck.s.column0("select filename from media where originalPath = ''") + deck.finishProgress() + return (nohave, unused) + +# Download missing +########################################################################## + +def downloadMissing(deck): + urlbase = deck.getVar("mediaURL") + if not urlbase: + return None + mdir = deck.mediaDir(create=True) + deck.startProgress() + missing = 0 + grabbed = 0 + for c, (f, sum) in enumerate(deck.s.all( + "select filename, originalPath from media")): + path = os.path.join(mdir, f) + if not os.path.exists(path): + try: + rpath = urlbase + f + url = urllib2.urlopen(rpath) + open(f, "wb").write(url.read()) + grabbed += 1 + except: + if sum: + # the file is supposed to exist + deck.finishProgress() + return (False, rpath) + else: + # ignore and keep going + missing += 1 + deck.updateProgress(label=_("File %d...") % (grabbed+missing)) + deck.finishProgress() + return (True, grabbed, missing) + +# Convert remote links to local ones +########################################################################## + +def downloadRemote(deck): + mdir = deck.mediaDir(create=True) + refs = {} + deck.startProgress() + for (question, answer) in deck.s.all( + "select question, answer from cards"): + for txt in (question, answer): + for f in mediaFiles(txt, remote=True): + refs[f] = True + + tmpdir = tempfile.mkdtemp(prefix="oldanki") + failed = [] + passed = [] + for c, link in enumerate(refs.keys()): + try: + path = os.path.join(tmpdir, os.path.basename(link)) + url = urllib2.urlopen(link) + open(path, "wb").write(url.read()) + newpath = copyToMedia(deck, path) + passed.append([link, newpath]) + except: + failed.append(link) + deck.updateProgress(label=_("Download %d...") % c) + for (url, name) in passed: + deck.s.statement( + "update fields set value = replace(value, :url, :name)", + url=url, name=name) + deck.updateProgress(label=_("Updating references...")) + deck.updateProgress(label=_("Updating cards...")) + # rebuild entire q/a cache + for m in deck.models: + deck.updateCardsFromModel(m, dirty=True) + deck.finishProgress() + deck.flushMod() + return (passed, failed) diff --git a/oldanki/models.py b/oldanki/models.py new file mode 100644 index 000000000..e825e4710 --- /dev/null +++ b/oldanki/models.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Model - define the way in which facts are added and shown +========================================================== + +- Field models +- Card models +- Models + +""" + +import time, re +from sqlalchemy.ext.orderinglist import ordering_list +from oldanki.db import * +from oldanki.utils import genID, canonifyTags +from oldanki.fonts import toPlatformFont +from oldanki.utils import parseTags, hexifyID, checksum, stripHTML +from oldanki.lang import _ +from oldanki.hooks import runFilter +from oldanki.template import render +from copy import copy + +def alignmentLabels(): + return { + 0: _("Center"), + 1: _("Left"), + 2: _("Right"), + } + +# Field models +########################################################################## + +fieldModelsTable = Table( + 'fieldModels', metadata, + Column('id', Integer, primary_key=True), + Column('ordinal', Integer, nullable=False), + Column('modelId', Integer, ForeignKey('models.id'), nullable=False), + Column('name', UnicodeText, nullable=False), + Column('description', UnicodeText, nullable=False, default=u""), # obsolete + # reused as RTL marker + Column('features', UnicodeText, nullable=False, default=u""), + Column('required', Boolean, nullable=False, default=True), + Column('unique', Boolean, nullable=False, default=True), # sqlite keyword + Column('numeric', Boolean, nullable=False, default=False), + # display + Column('quizFontFamily', UnicodeText, default=u"Arial"), + Column('quizFontSize', Integer, default=20), + Column('quizFontColour', String(7)), + Column('editFontFamily', UnicodeText, default=u"1"), # reused as
 toggle
+    Column('editFontSize', Integer, default=20))
+
+class FieldModel(object):
+    "The definition of one field in a fact."
+
+    def __init__(self, name=u"", required=True, unique=True):
+        self.name = name
+        self.required = required
+        self.unique = unique
+        self.id = genID()
+
+    def copy(self):
+        new = FieldModel()
+        for p in class_mapper(FieldModel).iterate_properties:
+            setattr(new, p.key, getattr(self, p.key))
+        new.id = genID()
+        new.model = None
+        return new
+
+mapper(FieldModel, fieldModelsTable)
+
+# Card models
+##########################################################################
+
+cardModelsTable = Table(
+    'cardModels', metadata,
+    Column('id', Integer, primary_key=True),
+    Column('ordinal', Integer, nullable=False),
+    Column('modelId', Integer, ForeignKey('models.id'), nullable=False),
+    Column('name', UnicodeText, nullable=False),
+    Column('description', UnicodeText, nullable=False, default=u""), # obsolete
+    Column('active', Boolean, nullable=False, default=True),
+    # formats: question/answer/last(not used)
+    Column('qformat', UnicodeText, nullable=False),
+    Column('aformat', UnicodeText, nullable=False),
+    Column('lformat', UnicodeText),
+    # question/answer editor format (not used yet)
+    Column('qedformat', UnicodeText),
+    Column('aedformat', UnicodeText),
+    Column('questionInAnswer', Boolean, nullable=False, default=False),
+    # unused
+    Column('questionFontFamily', UnicodeText, default=u"Arial"),
+    Column('questionFontSize', Integer, default=20),
+    Column('questionFontColour', String(7), default=u"#000000"),
+    # used for both question & answer
+    Column('questionAlign', Integer, default=0),
+    # ununsed
+    Column('answerFontFamily', UnicodeText, default=u"Arial"),
+    Column('answerFontSize', Integer, default=20),
+    Column('answerFontColour', String(7), default=u"#000000"),
+    Column('answerAlign', Integer, default=0),
+    Column('lastFontFamily', UnicodeText, default=u"Arial"),
+    Column('lastFontSize', Integer, default=20),
+    # used as background colour
+    Column('lastFontColour', String(7), default=u"#FFFFFF"),
+    Column('editQuestionFontFamily', UnicodeText, default=None),
+    Column('editQuestionFontSize', Integer, default=None),
+    Column('editAnswerFontFamily', UnicodeText, default=None),
+    Column('editAnswerFontSize', Integer, default=None),
+    # empty answer
+    Column('allowEmptyAnswer', Boolean, nullable=False, default=True),
+    Column('typeAnswer', UnicodeText, nullable=False, default=u""))
+
+class CardModel(object):
+    """Represents how to generate the front and back of a card."""
+    def __init__(self, name=u"", qformat=u"q", aformat=u"a", active=True):
+        self.name = name
+        self.qformat = qformat
+        self.aformat = aformat
+        self.active = active
+        self.id = genID()
+
+    def copy(self):
+        new = CardModel()
+        for p in class_mapper(CardModel).iterate_properties:
+            setattr(new, p.key, getattr(self, p.key))
+        new.id = genID()
+        new.model = None
+        return new
+
+mapper(CardModel, cardModelsTable)
+
+def formatQA(cid, mid, fact, tags, cm, deck):
+    "Return a dict of {id, question, answer}"
+    d = {'id': cid}
+    fields = {}
+    for (k, v) in fact.items():
+        fields["text:"+k] = stripHTML(v[1])
+        if v[1]:
+            fields[k] = '%s' % (
+                hexifyID(v[0]), v[1])
+        else:
+            fields[k] = u""
+    fields['tags'] = tags[0]
+    fields['Tags'] = tags[0]
+    fields['modelTags'] = tags[1]
+    fields['cardModel'] = tags[2]
+    # render q & a
+    ret = []
+    for (type, format) in (("question", cm.qformat),
+                           ("answer", cm.aformat)):
+        # convert old style
+        format = re.sub("%\((.+?)\)s", "{{\\1}}", format)
+        # allow custom rendering functions & info
+        fields = runFilter("prepareFields", fields, cid, mid, fact, tags, cm, deck)
+        html = render(format, fields)
+        d[type] = runFilter("formatQA", html, type, cid, mid, fact, tags, cm, deck)
+    return d
+
+# Model table
+##########################################################################
+
+modelsTable = Table(
+    'models', metadata,
+    Column('id', Integer, primary_key=True),
+    Column('deckId', Integer, ForeignKey("decks.id", use_alter=True, name="deckIdfk")),
+    Column('created', Float, nullable=False, default=time.time),
+    Column('modified', Float, nullable=False, default=time.time),
+    Column('tags', UnicodeText, nullable=False, default=u""),
+    Column('name', UnicodeText, nullable=False),
+    Column('description', UnicodeText, nullable=False, default=u""), # obsolete
+    Column('features', UnicodeText, nullable=False, default=u""), # used as mediaURL
+    Column('spacing', Float, nullable=False, default=0.1), # obsolete
+    Column('initialSpacing', Float, nullable=False, default=60), # obsolete
+    Column('source', Integer, nullable=False, default=0))
+
+class Model(object):
+    "Defines the way a fact behaves, what fields it can contain, etc."
+    def __init__(self, name=u""):
+        self.name = name
+        self.id = genID()
+
+    def setModified(self):
+        self.modified = time.time()
+
+    def addFieldModel(self, field):
+        "Add a field model."
+        self.fieldModels.append(field)
+        s = object_session(self)
+        if s:
+            s.flush()
+
+    def addCardModel(self, card):
+        "Add a card model."
+        self.cardModels.append(card)
+        s = object_session(self)
+        if s:
+            s.flush()
+
+mapper(Model, modelsTable, properties={
+    'fieldModels': relation(FieldModel, backref='model',
+                             collection_class=ordering_list('ordinal'),
+                             order_by=[fieldModelsTable.c.ordinal],
+                            cascade="all, delete-orphan"),
+    'cardModels': relation(CardModel, backref='model',
+                           collection_class=ordering_list('ordinal'),
+                           order_by=[cardModelsTable.c.ordinal],
+                           cascade="all, delete-orphan"),
+       })
+
+# Model deletions
+##########################################################################
+
+modelsDeletedTable = Table(
+    'modelsDeleted', metadata,
+    Column('modelId', Integer, ForeignKey("models.id"),
+           nullable=False),
+    Column('deletedTime', Float, nullable=False))
diff --git a/oldanki/sound.py b/oldanki/sound.py
new file mode 100644
index 000000000..b93ac72e6
--- /dev/null
+++ b/oldanki/sound.py
@@ -0,0 +1,364 @@
+# -*- coding: utf-8 -*-
+# Copyright: Damien Elmes 
+# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
+
+"""\
+Sound support
+==============================
+"""
+__docformat__ = 'restructuredtext'
+
+import re, sys, threading, time, subprocess, os, signal, errno, atexit
+import tempfile, shutil
+from oldanki.hooks import addHook, runHook
+
+# Shared utils
+##########################################################################
+
+def playFromText(text):
+    for match in re.findall("\[sound:(.*?)\]", text):
+        play(match)
+
+def stripSounds(text):
+    return re.sub("\[sound:.*?\]", "", text)
+
+def hasSound(text):
+    return re.search("\[sound:.*?\]", text) is not None
+
+##########################################################################
+
+# the amount of noise to cancel
+NOISE_AMOUNT = "0.1"
+# the amount of amplification
+NORM_AMOUNT = "-3"
+# the amount of bass
+BASS_AMOUNT = "+0"
+# the amount to fade at end
+FADE_AMOUNT = "0.25"
+
+noiseProfile = ""
+
+processingSrc = "rec.wav"
+processingDst = "rec.mp3"
+processingChain = []
+recFiles = ["rec2.wav", "rec3.wav"]
+
+cmd = ["sox", processingSrc, "rec2.wav"]
+processingChain = [
+    None, # placeholder
+    ["sox", "rec2.wav", "rec3.wav", "norm", NORM_AMOUNT,
+     "bass", BASS_AMOUNT, "fade", FADE_AMOUNT],
+    ["lame", "rec3.wav", processingDst, "--noreplaygain", "--quiet"],
+    ]
+
+tmpdir = None
+
+# don't show box on windows
+if sys.platform == "win32":
+    si = subprocess.STARTUPINFO()
+    try:
+        si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+    except:
+        # python2.7+
+        si.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
+    # tmp dir for non-hashed media
+    tmpdir = unicode(
+        tempfile.mkdtemp(prefix="oldanki"), sys.getfilesystemencoding())
+else:
+    si = None
+
+if sys.platform.startswith("darwin"):
+    # make sure lame, which is installed in /usr/local/bin, is in the path
+    os.environ['PATH'] += ":" + "/usr/local/bin"
+    dir = os.path.dirname(os.path.abspath(__file__))
+    dir = os.path.abspath(dir + "/../../../..")
+    os.environ['PATH'] += ":" + dir + "/audio"
+
+def retryWait(proc):
+    # osx throws interrupted system call errors frequently
+    while 1:
+        try:
+            return proc.wait()
+        except OSError:
+            continue
+
+# Noise profiles
+##########################################################################
+
+def checkForNoiseProfile():
+    global processingChain
+    if sys.platform.startswith("darwin"):
+        # not currently supported
+        processingChain = [
+            ["lame", "rec.wav", "rec.mp3", "--noreplaygain", "--quiet"]]
+    else:
+        cmd = ["sox", processingSrc, "rec2.wav"]
+        if os.path.exists(noiseProfile):
+            cmd = cmd + ["noisered", noiseProfile, NOISE_AMOUNT]
+        processingChain[0] = cmd
+
+def generateNoiseProfile():
+    try:
+        os.unlink(noiseProfile)
+    except OSError:
+        pass
+    retryWait(subprocess.Popen(
+        ["sox", processingSrc, recFiles[0], "trim", "1.5", "1.5"],
+        startupinfo=si))
+    retryWait(subprocess.Popen(["sox", recFiles[0], recFiles[1],
+                                "noiseprof", noiseProfile],
+                               startupinfo=si))
+    processingChain[0] = ["sox", processingSrc, "rec2.wav",
+                          "noisered", noiseProfile, NOISE_AMOUNT]
+
+# Mplayer settings
+##########################################################################
+
+if sys.platform.startswith("win32"):
+    mplayerCmd = ["mplayer.exe", "-ao", "win32", "-really-quiet"]
+    dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+    os.environ['PATH'] += ";" + dir
+    os.environ['PATH'] += ";" + dir + "\\..\\win\\top" # for testing
+else:
+    mplayerCmd = ["mplayer", "-really-quiet"]
+
+# Mplayer in slave mode
+##########################################################################
+
+mplayerQueue = []
+mplayerManager = None
+mplayerReader = None
+mplayerEvt = threading.Event()
+mplayerClear = False
+
+class MplayerReader(threading.Thread):
+    "Read any debugging info to prevent mplayer from blocking."
+
+    def run(self):
+        while 1:
+            mplayerEvt.wait()
+            try:
+                mplayerManager.mplayer.stdout.read()
+            except:
+                pass
+
+class MplayerMonitor(threading.Thread):
+
+    def run(self):
+        global mplayerClear
+        self.mplayer = None
+        self.deadPlayers = []
+        while 1:
+            mplayerEvt.wait()
+            if mplayerQueue:
+                # ensure started
+                if not self.mplayer:
+                    self.startProcess()
+                # loop through files to play
+                while mplayerQueue:
+                    item = mplayerQueue.pop(0)
+                    if mplayerClear:
+                        mplayerClear = False
+                        extra = ""
+                    else:
+                        extra = " 1"
+                    cmd = 'loadfile "%s"%s\n' % (item, extra)
+                    try:
+                        self.mplayer.stdin.write(cmd)
+                    except:
+                        # mplayer has quit and needs restarting
+                        self.deadPlayers.append(self.mplayer)
+                        self.mplayer = None
+                        self.startProcess()
+                        self.mplayer.stdin.write(cmd)
+            # wait() on finished processes. we don't want to block on the
+            # wait, so we keep trying each time we're reactivated
+            def clean(pl):
+                if pl.poll() is not None:
+                    pl.wait()
+                    return False
+                else:
+                    return True
+            self.deadPlayers = [pl for pl in self.deadPlayers if clean(pl)]
+            mplayerEvt.clear()
+
+    def kill(self):
+        if not self.mplayer:
+            return
+        try:
+            self.mplayer.stdin.write("quit\n")
+            self.deadPlayers.append(self.mplayer)
+        except:
+            pass
+        self.mplayer = None
+
+    def startProcess(self):
+        try:
+            cmd = mplayerCmd + ["-slave", "-idle"]
+            self.mplayer = subprocess.Popen(
+                cmd, startupinfo=si, stdin=subprocess.PIPE,
+                stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        except OSError:
+            mplayerEvt.clear()
+            raise Exception("Audio player not found")
+
+def queueMplayer(path):
+    ensureMplayerThreads()
+    while mplayerEvt.isSet():
+        time.sleep(0.1)
+    if tmpdir and os.path.exists(path):
+        # mplayer on windows doesn't like the encoding, so we create a
+        # temporary file instead. oddly, foreign characters in the dirname
+        # don't seem to matter.
+        (fd, name) = tempfile.mkstemp(suffix=os.path.splitext(path)[1],
+                                      dir=tmpdir)
+        f = os.fdopen(fd, "wb")
+        f.write(open(path, "rb").read())
+        f.close()
+        # it wants unix paths, too!
+        path = name.replace("\\", "/")
+        path = path.encode(sys.getfilesystemencoding())
+    else:
+        path = path.encode("utf-8")
+    mplayerQueue.append(path)
+    mplayerEvt.set()
+    runHook("soundQueued")
+
+def clearMplayerQueue():
+    global mplayerClear
+    mplayerClear = True
+    mplayerEvt.set()
+
+def ensureMplayerThreads():
+    global mplayerManager, mplayerReader
+    if not mplayerManager:
+        mplayerManager = MplayerMonitor()
+        mplayerManager.daemon = True
+        mplayerManager.start()
+        mplayerReader = MplayerReader()
+        mplayerReader.daemon = True
+        mplayerReader.start()
+
+def stopMplayer():
+    if not mplayerManager:
+        return
+    mplayerManager.kill()
+
+def onExit():
+    if tmpdir:
+        shutil.rmtree(tmpdir)
+
+addHook("deckClosed", stopMplayer)
+atexit.register(onExit)
+
+# PyAudio recording
+##########################################################################
+
+try:
+    import pyaudio
+    import wave
+
+    PYAU_FORMAT = pyaudio.paInt16
+    PYAU_CHANNELS = 1
+    PYAU_RATE = 44100
+    PYAU_INPUT_INDEX = None
+except:
+    pass
+
+class _Recorder(object):
+
+    def postprocess(self, encode=True):
+        self.encode = encode
+        for c in processingChain:
+            #print c
+            if not self.encode and c[0] == 'lame':
+                continue
+            ret = retryWait(subprocess.Popen(c, startupinfo=si))
+            if ret:
+                raise Exception(_("""
+Error processing audio.
+
+If you're on Linux and don't have sox 14.1+, you
+need to disable normalization. See the wiki.
+
+Command was:\n""") + u" ".join(c))
+
+class PyAudioThreadedRecorder(threading.Thread):
+
+    def __init__(self):
+        threading.Thread.__init__(self)
+        self.finish = False
+
+    def run(self):
+        chunk = 1024
+        try:
+            p = pyaudio.PyAudio()
+        except NameError:
+            raise Exception(
+                "Pyaudio not installed (recording not supported on OSX10.3)")
+        stream = p.open(format=PYAU_FORMAT,
+                        channels=PYAU_CHANNELS,
+                        rate=PYAU_RATE,
+                        input=True,
+                        input_device_index=PYAU_INPUT_INDEX,
+                        frames_per_buffer=chunk)
+        all = []
+        while not self.finish:
+            try:
+                data = stream.read(chunk)
+            except IOError, e:
+                if e[1] == pyaudio.paInputOverflowed:
+                    data = None
+                else:
+                    raise
+            if data:
+                all.append(data)
+        stream.close()
+        p.terminate()
+        data = ''.join(all)
+        wf = wave.open(processingSrc, 'wb')
+        wf.setnchannels(PYAU_CHANNELS)
+        wf.setsampwidth(p.get_sample_size(PYAU_FORMAT))
+        wf.setframerate(PYAU_RATE)
+        wf.writeframes(data)
+        wf.close()
+
+class PyAudioRecorder(_Recorder):
+
+    def __init__(self):
+        for t in recFiles + [processingSrc, processingDst]:
+            try:
+                os.unlink(t)
+            except OSError:
+                pass
+        self.encode = False
+
+    def start(self):
+        self.thread = PyAudioThreadedRecorder()
+        self.thread.start()
+
+    def stop(self):
+        self.thread.finish = True
+        self.thread.join()
+
+    def file(self):
+        if self.encode:
+            tgt = "rec%d.mp3" % time.time()
+            os.rename(processingDst, tgt)
+            return tgt
+        else:
+            return recFiles[1]
+
+# Audio interface
+##########################################################################
+
+_player = queueMplayer
+_queueEraser = clearMplayerQueue
+
+def play(path):
+    _player(path)
+
+def clearAudioQueue():
+    _queueEraser()
+
+Recorder = PyAudioRecorder
diff --git a/oldanki/stats.py b/oldanki/stats.py
new file mode 100644
index 000000000..e2568c308
--- /dev/null
+++ b/oldanki/stats.py
@@ -0,0 +1,560 @@
+# -*- coding: utf-8 -*-
+# Copyright: Damien Elmes 
+# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
+
+"""\
+Statistical tracking and reports
+=================================
+"""
+__docformat__ = 'restructuredtext'
+
+# we track statistics over the life of the deck, and per-day
+STATS_LIFE = 0
+STATS_DAY = 1
+
+import unicodedata, time, sys, os, datetime
+import oldanki, oldanki.utils
+from datetime import date
+from oldanki.db import *
+from oldanki.lang import _, ngettext
+from oldanki.utils import canonifyTags, ids2str
+from oldanki.hooks import runFilter
+
+# Tracking stats on the DB
+##########################################################################
+
+statsTable = Table(
+    'stats', metadata,
+    Column('id', Integer, primary_key=True),
+    Column('type', Integer, nullable=False),
+    Column('day', Date, nullable=False),
+    Column('reps', Integer, nullable=False, default=0),
+    Column('averageTime', Float, nullable=False, default=0),
+    Column('reviewTime', Float, nullable=False, default=0),
+    # next two columns no longer used
+    Column('distractedTime', Float, nullable=False, default=0),
+    Column('distractedReps', Integer, nullable=False, default=0),
+    Column('newEase0', Integer, nullable=False, default=0),
+    Column('newEase1', Integer, nullable=False, default=0),
+    Column('newEase2', Integer, nullable=False, default=0),
+    Column('newEase3', Integer, nullable=False, default=0),
+    Column('newEase4', Integer, nullable=False, default=0),
+    Column('youngEase0', Integer, nullable=False, default=0),
+    Column('youngEase1', Integer, nullable=False, default=0),
+    Column('youngEase2', Integer, nullable=False, default=0),
+    Column('youngEase3', Integer, nullable=False, default=0),
+    Column('youngEase4', Integer, nullable=False, default=0),
+    Column('matureEase0', Integer, nullable=False, default=0),
+    Column('matureEase1', Integer, nullable=False, default=0),
+    Column('matureEase2', Integer, nullable=False, default=0),
+    Column('matureEase3', Integer, nullable=False, default=0),
+    Column('matureEase4', Integer, nullable=False, default=0))
+
+class Stats(object):
+    def __init__(self):
+        self.day = None
+        self.reps = 0
+        self.averageTime = 0
+        self.reviewTime = 0
+        self.distractedTime = 0
+        self.distractedReps = 0
+        self.newEase0 = 0
+        self.newEase1 = 0
+        self.newEase2 = 0
+        self.newEase3 = 0
+        self.newEase4 = 0
+        self.youngEase0 = 0
+        self.youngEase1 = 0
+        self.youngEase2 = 0
+        self.youngEase3 = 0
+        self.youngEase4 = 0
+        self.matureEase0 = 0
+        self.matureEase1 = 0
+        self.matureEase2 = 0
+        self.matureEase3 = 0
+        self.matureEase4 = 0
+
+    def fromDB(self, s, id):
+        r = s.first("select * from stats where id = :id", id=id)
+        (self.id,
+         self.type,
+         self.day,
+         self.reps,
+         self.averageTime,
+         self.reviewTime,
+         self.distractedTime,
+         self.distractedReps,
+         self.newEase0,
+         self.newEase1,
+         self.newEase2,
+         self.newEase3,
+         self.newEase4,
+         self.youngEase0,
+         self.youngEase1,
+         self.youngEase2,
+         self.youngEase3,
+         self.youngEase4,
+         self.matureEase0,
+         self.matureEase1,
+         self.matureEase2,
+         self.matureEase3,
+         self.matureEase4) = r
+        self.day = datetime.date(*[int(i) for i in self.day.split("-")])
+
+    def create(self, s, type, day):
+        self.type = type
+        self.day = day
+        s.execute("""insert into stats
+(type, day, reps, averageTime, reviewTime, distractedTime, distractedReps,
+newEase0, newEase1, newEase2, newEase3, newEase4, youngEase0, youngEase1,
+youngEase2, youngEase3, youngEase4, matureEase0, matureEase1, matureEase2,
+matureEase3, matureEase4) values (:type, :day, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)""", self.__dict__)
+        self.id = s.scalar(
+            "select id from stats where type = :type and day = :day",
+            type=type, day=day)
+
+    def toDB(self, s):
+        assert self.id
+        s.execute("""update stats set
+type=:type,
+day=:day,
+reps=:reps,
+averageTime=:averageTime,
+reviewTime=:reviewTime,
+newEase0=:newEase0,
+newEase1=:newEase1,
+newEase2=:newEase2,
+newEase3=:newEase3,
+newEase4=:newEase4,
+youngEase0=:youngEase0,
+youngEase1=:youngEase1,
+youngEase2=:youngEase2,
+youngEase3=:youngEase3,
+youngEase4=:youngEase4,
+matureEase0=:matureEase0,
+matureEase1=:matureEase1,
+matureEase2=:matureEase2,
+matureEase3=:matureEase3,
+matureEase4=:matureEase4
+where id = :id""", self.__dict__)
+
+mapper(Stats, statsTable)
+
+def genToday(deck):
+    return datetime.datetime.utcfromtimestamp(
+        time.time() - deck.utcOffset).date()
+
+def updateAllStats(s, gs, ds, card, ease, oldState):
+    "Update global and daily statistics."
+    updateStats(s, gs, card, ease, oldState)
+    updateStats(s, ds, card, ease, oldState)
+
+def updateStats(s, stats, card, ease, oldState):
+    stats.reps += 1
+    delay = card.totalTime()
+    if delay >= 60:
+        stats.reviewTime += 60
+    else:
+        stats.reviewTime += delay
+        stats.averageTime = (
+            stats.reviewTime / float(stats.reps))
+    # update eases
+    attr = oldState + "Ease%d" % ease
+    setattr(stats, attr, getattr(stats, attr) + 1)
+    stats.toDB(s)
+
+def globalStats(deck):
+    s = deck.s
+    type = STATS_LIFE
+    today = genToday(deck)
+    id = s.scalar("select id from stats where type = :type",
+                  type=type)
+    stats = Stats()
+    if id:
+        stats.fromDB(s, id)
+        return stats
+    else:
+        stats.create(s, type, today)
+    stats.type = type
+    return stats
+
+def dailyStats(deck):
+    s = deck.s
+    type = STATS_DAY
+    today = genToday(deck)
+    id = s.scalar("select id from stats where type = :type and day = :day",
+                  type=type, day=today)
+    stats = Stats()
+    if id:
+        stats.fromDB(s, id)
+        return stats
+    else:
+        stats.create(s, type, today)
+    return stats
+
+def summarizeStats(stats, pre=""):
+    "Generate percentages and total counts for STATS. Optionally prefix."
+    cardTypes = ("new", "young", "mature")
+    h = {}
+    # total counts
+    ###############
+    for type in cardTypes:
+        # total yes/no for type, eg. gNewYes
+        h[pre + type.capitalize() + "No"] = (getattr(stats, type + "Ease0") +
+                                             getattr(stats, type + "Ease1"))
+        h[pre + type.capitalize() + "Yes"] = (getattr(stats, type + "Ease2") +
+                                              getattr(stats, type + "Ease3") +
+                                              getattr(stats, type + "Ease4"))
+        # total for type, eg. gNewTotal
+        h[pre + type.capitalize() + "Total"] = (
+            h[pre + type.capitalize() + "No"] +
+            h[pre + type.capitalize() + "Yes"])
+    # total yes/no, eg. gYesTotal
+    for answer in ("yes", "no"):
+        num = 0
+        for type in cardTypes:
+            num += h[pre + type.capitalize() + answer.capitalize()]
+        h[pre + answer.capitalize() + "Total"] = num
+    # total over all, eg. gTotal
+    num = 0
+    for type in cardTypes:
+        num += h[pre + type.capitalize() + "Total"]
+    h[pre + "Total"] = num
+    # percentages
+    ##############
+    for type in cardTypes:
+        # total yes/no % by type, eg. gNewYes%
+        for answer in ("yes", "no"):
+            setPercentage(h, pre + type.capitalize() + answer.capitalize(),
+                          pre + type.capitalize())
+    for answer in ("yes", "no"):
+        # total yes/no, eg. gYesTotal%
+        setPercentage(h, pre + answer.capitalize() + "Total", pre)
+    h[pre + 'AverageTime'] = stats.averageTime
+    h[pre + 'ReviewTime'] = stats.reviewTime
+    return h
+
+def setPercentage(h, a, b):
+    try:
+        h[a + "%"] = (h[a] / float(h[b + "Total"])) * 100
+    except ZeroDivisionError:
+        h[a + "%"] = 0
+
+def getStats(s, gs, ds):
+    "Return a handy dictionary exposing a number of internal stats."
+    h = {}
+    h.update(summarizeStats(gs, "g"))
+    h.update(summarizeStats(ds, "d"))
+    return h
+
+# Card stats
+##########################################################################
+
+class CardStats(object):
+
+    def __init__(self, deck, card):
+        self.deck = deck
+        self.card = card
+
+    def report(self):
+        c = self.card
+        fmt = oldanki.utils.fmtTimeSpan
+        fmtFloat = oldanki.utils.fmtFloat
+        self.txt = ""
+        self.addLine(_("Added"), self.strTime(c.created))
+        if c.firstAnswered:
+            self.addLine(_("First Review"), self.strTime(c.firstAnswered))
+        self.addLine(_("Changed"), self.strTime(c.modified))
+        if c.reps:
+            next = time.time() - c.combinedDue
+            if next > 0:
+                next = _("%s ago") % fmt(next)
+            else:
+                next = _("in %s") % fmt(abs(next))
+            self.addLine(_("Due"), next)
+        self.addLine(_("Interval"), fmt(c.interval * 86400))
+        self.addLine(_("Ease"), fmtFloat(c.factor, point=2))
+        if c.lastDue:
+            last = _("%s ago") % fmt(time.time() - c.lastDue)
+            self.addLine(_("Last Due"), last)
+        if c.interval != c.lastInterval:
+            # don't show the last interval if it hasn't been updated yet
+            self.addLine(_("Last Interval"), fmt(c.lastInterval * 86400))
+        self.addLine(_("Last Ease"), fmtFloat(c.lastFactor, point=2))
+        if c.reps:
+            self.addLine(_("Reviews"), "%d/%d (s=%d)" % (
+                c.yesCount, c.reps, c.successive))
+        avg = fmt(c.averageTime, point=2)
+        self.addLine(_("Average Time"),avg)
+        total = fmt(c.reviewTime, point=2)
+        self.addLine(_("Total Time"), total)
+        self.addLine(_("Model Tags"), c.fact.model.tags)
+        self.addLine(_("Card Template") + " "*5, c.cardModel.name)
+        self.txt += "
" + return self.txt + + def addLine(self, k, v): + self.txt += "%s%s" % (k, v) + + def strTime(self, tm): + s = oldanki.utils.fmtTimeSpan(time.time() - tm) + return _("%s ago") % s + +# Deck stats (specific to the 'sched' scheduler) +########################################################################## + +class DeckStats(object): + + def __init__(self, deck): + self.deck = deck + + def report(self): + "Return an HTML string with a report." + fmtPerc = oldanki.utils.fmtPercentage + fmtFloat = oldanki.utils.fmtFloat + if self.deck.isEmpty(): + return _("Please add some cards first.") + "

" + d = self.deck + html="

" + _("Deck Statistics") + "

" + html += _("Deck created: %s ago
") % self.createdTimeStr() + total = d.cardCount + new = d.newCountAll() + young = d.youngCardCount() + old = d.matureCardCount() + newP = new / float(total) * 100 + youngP = young / float(total) * 100 + oldP = old / float(total) * 100 + stats = d.getStats() + (stats["new"], stats["newP"]) = (new, newP) + (stats["old"], stats["oldP"]) = (old, oldP) + (stats["young"], stats["youngP"]) = (young, youngP) + html += _("Total number of cards:") + " %d
" % total + html += _("Total number of facts:") + " %d

" % d.factCount + + html += "" + _("Card Maturity") + "
" + html += _("Mature cards: ") + " %(old)d (%(oldP)s)
" % { + 'old': stats['old'], 'oldP' : fmtPerc(stats['oldP'])} + html += _("Young cards: ") + " %(young)d (%(youngP)s)
" % { + 'young': stats['young'], 'youngP' : fmtPerc(stats['youngP'])} + html += _("Unseen cards:") + " %(new)d (%(newP)s)
" % { + 'new': stats['new'], 'newP' : fmtPerc(stats['newP'])} + avgInt = self.getAverageInterval() + if avgInt: + html += _("Average interval: ") + ("%s ") % fmtFloat(avgInt) + _("days") + html += "
" + html += "
" + html += "" + _("Correct Answers") + "
" + html += _("Mature cards: ") + " " + fmtPerc(stats['gMatureYes%']) + ( + " " + _("(%(partOf)d of %(totalSum)d)") % { + 'partOf' : stats['gMatureYes'], + 'totalSum' : stats['gMatureTotal'] } + "
") + html += _("Young cards: ") + " " + fmtPerc(stats['gYoungYes%']) + ( + " " + _("(%(partOf)d of %(totalSum)d)") % { + 'partOf' : stats['gYoungYes'], + 'totalSum' : stats['gYoungTotal'] } + "
") + html += _("First-seen cards:") + " " + fmtPerc(stats['gNewYes%']) + ( + " " + _("(%(partOf)d of %(totalSum)d)") % { + 'partOf' : stats['gNewYes'], + 'totalSum' : stats['gNewTotal'] } + "

") + + # average pending time + existing = d.cardCount - d.newCountToday + def tr(a, b): + return "%s%s" % (a, b) + def repsPerDay(reps,days): + retval = ("%d " % reps) + ngettext("rep", "reps", reps) + retval += ("/%d " % days) + ngettext("day", "days", days) + return retval + if existing and avgInt: + html += "" + _("Recent Work") + "" + if sys.platform.startswith("darwin"): + html += "" + else: + html += "
" + html += tr(_("In last week"), repsPerDay( + self.getRepsDone(-7, 0), + self.getDaysReviewed(-7, 0))) + html += tr(_("In last month"), repsPerDay( + self.getRepsDone(-30, 0), + self.getDaysReviewed(-30, 0))) + html += tr(_("In last 3 months"), repsPerDay( + self.getRepsDone(-92, 0), + self.getDaysReviewed(-92, 0))) + html += tr(_("In last 6 months"), repsPerDay( + self.getRepsDone(-182, 0), + self.getDaysReviewed(-182, 0))) + html += tr(_("In last year"), repsPerDay( + self.getRepsDone(-365, 0), + self.getDaysReviewed(-365, 0))) + html += tr(_("Deck life"), repsPerDay( + self.getRepsDone(-13000, 0), + self.getDaysReviewed(-13000, 0))) + html += "
" + + html += "

" + _("Average Daily Reviews") + "" + if sys.platform.startswith("darwin"): + html += "" + else: + html += "
" + html += tr(_("Deck life"), ("%s ") % ( + fmtFloat(self.getSumInverseRoundInterval())) + _("cards/day")) + html += tr(_("In next week"), ("%s ") % ( + fmtFloat(self.getWorkloadPeriod(7))) + _("cards/day")) + html += tr(_("In next month"), ("%s ") % ( + fmtFloat(self.getWorkloadPeriod(30))) + _("cards/day")) + html += tr(_("In last week"), ("%s ") % ( + fmtFloat(self.getPastWorkloadPeriod(7))) + _("cards/day")) + html += tr(_("In last month"), ("%s ") % ( + fmtFloat(self.getPastWorkloadPeriod(30))) + _("cards/day")) + html += tr(_("In last 3 months"), ("%s ") % ( + fmtFloat(self.getPastWorkloadPeriod(92))) + _("cards/day")) + html += tr(_("In last 6 months"), ("%s ") % ( + fmtFloat(self.getPastWorkloadPeriod(182))) + _("cards/day")) + html += tr(_("In last year"), ("%s ") % ( + fmtFloat(self.getPastWorkloadPeriod(365))) + _("cards/day")) + html += "
" + + html += "

" + _("Average Added") + "" + if sys.platform.startswith("darwin"): + html += "" + else: + html += "
" + html += tr(_("Deck life"), _("%(a)s/day, %(b)s/mon") % { + 'a': fmtFloat(self.newAverage()), 'b': fmtFloat(self.newAverage()*30)}) + np = self.getNewPeriod(7) + html += tr(_("In last week"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(7))})) + np = self.getNewPeriod(30) + html += tr(_("In last month"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(30))})) + np = self.getNewPeriod(92) + html += tr(_("In last 3 months"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(92))})) + np = self.getNewPeriod(182) + html += tr(_("In last 6 months"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(182))})) + np = self.getNewPeriod(365) + html += tr(_("In last year"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(365))})) + html += "
" + + html += "

" + _("Average New Seen") + "" + if sys.platform.startswith("darwin"): + html += "" + else: + html += "
" + np = self.getFirstPeriod(7) + html += tr(_("In last week"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(7))})) + np = self.getFirstPeriod(30) + html += tr(_("In last month"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(30))})) + np = self.getFirstPeriod(92) + html += tr(_("In last 3 months"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(92))})) + np = self.getFirstPeriod(182) + html += tr(_("In last 6 months"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(182))})) + np = self.getFirstPeriod(365) + html += tr(_("In last year"), _("%(a)d (%(b)s/day)") % ( + {'a': np, 'b': fmtFloat(np / float(365))})) + html += "
" + + html += "

" + _("Card Ease") + "
" + html += _("Lowest factor: %.2f") % d.s.scalar( + "select min(factor) from cards") + "
" + html += _("Average factor: %.2f") % d.s.scalar( + "select avg(factor) from cards") + "
" + html += _("Highest factor: %.2f") % d.s.scalar( + "select max(factor) from cards") + "
" + + html = runFilter("deckStats", html) + return html + + def getDaysReviewed(self, start, finish): + now = datetime.datetime.today() + x = now + datetime.timedelta(start) + y = now + datetime.timedelta(finish) + return self.deck.s.scalar( + "select count() from stats where " + "day >= :x and day <= :y and reps > 0", + x=x, y=y) + + def getRepsDone(self, start, finish): + now = datetime.datetime.today() + x = time.mktime((now + datetime.timedelta(start)).timetuple()) + y = time.mktime((now + datetime.timedelta(finish)).timetuple()) + return self.deck.s.scalar( + "select count() from reviewHistory where time >= :x and time <= :y", + x=x, y=y) + + def getAverageInterval(self): + return self.deck.s.scalar( + "select sum(interval) / count(interval) from cards " + "where cards.reps > 0") or 0 + + def intervalReport(self, intervals, labels, total): + boxes = self.splitIntoIntervals(intervals) + keys = boxes.keys() + keys.sort() + html = "" + for key in keys: + html += ("%s" + + "%d%s") % ( + labels[key], + boxes[key], + fmtPerc(boxes[key] / float(total) * 100)) + return html + + def splitIntoIntervals(self, intervals): + boxes = {} + n = 0 + for i in range(len(intervals) - 1): + (min, max) = (intervals[i], intervals[i+1]) + for c in self.deck: + if c.interval > min and c.interval <= max: + boxes[n] = boxes.get(n, 0) + 1 + n += 1 + return boxes + + def newAverage(self): + "Average number of new cards added each day." + return self.deck.cardCount / max(1, self.ageInDays()) + + def createdTimeStr(self): + return oldanki.utils.fmtTimeSpan(time.time() - self.deck.created) + + def ageInDays(self): + return (time.time() - self.deck.created) / 86400.0 + + def getSumInverseRoundInterval(self): + return self.deck.s.scalar( + "select sum(1/round(max(interval, 1)+0.5)) from cards " + "where cards.reps > 0 " + "and priority > 0") or 0 + + def getWorkloadPeriod(self, period): + cutoff = time.time() + 86400 * period + return (self.deck.s.scalar(""" +select count(id) from cards +where combinedDue < :cutoff +and priority > 0 and relativeDelay in (0,1)""", cutoff=cutoff) or 0) / float(period) + + def getPastWorkloadPeriod(self, period): + cutoff = time.time() - 86400 * period + return (self.deck.s.scalar(""" +select count(*) from reviewHistory +where time > :cutoff""", cutoff=cutoff) or 0) / float(period) + + def getNewPeriod(self, period): + cutoff = time.time() - 86400 * period + return (self.deck.s.scalar(""" +select count(id) from cards +where created > :cutoff""", cutoff=cutoff) or 0) + + def getFirstPeriod(self, period): + cutoff = time.time() - 86400 * period + return (self.deck.s.scalar(""" +select count(*) from reviewHistory +where reps = 1 and time > :cutoff""", cutoff=cutoff) or 0) diff --git a/oldanki/stdmodels.py b/oldanki/stdmodels.py new file mode 100644 index 000000000..610da6af3 --- /dev/null +++ b/oldanki/stdmodels.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Standard Models. +============================================================== + +Plugins can add to the 'models' dict to provide more standard +models. +""" + +from oldanki.models import Model, CardModel, FieldModel +from oldanki.lang import _ + +models = {} + +def byName(name): + fn = models.get(name) + if fn: + return fn() + raise ValueError("No such model available!") + +def names(): + return models.keys() + +# Basic +########################################################################## + +def BasicModel(): + m = Model(_('Basic')) + m.addFieldModel(FieldModel(u'Front', True, True)) + m.addFieldModel(FieldModel(u'Back', False, False)) + m.addCardModel(CardModel(u'Forward', u'%(Front)s', u'%(Back)s')) + m.addCardModel(CardModel(u'Reverse', u'%(Back)s', u'%(Front)s', + active=False)) + m.tags = u"Basic" + return m + +models['Basic'] = BasicModel + +# Recovery +########################################################################## + +def RecoveryModel(): + m = Model(_('Recovery')) + m.addFieldModel(FieldModel(u'Question', False, False)) + m.addFieldModel(FieldModel(u'Answer', False, False)) + m.addCardModel(CardModel(u'Single', u'{{{Question}}}', u'{{{Answer}}}')) + m.tags = u"Recovery" + return m diff --git a/oldanki/sync.py b/oldanki/sync.py new file mode 100644 index 000000000..e5279e7c6 --- /dev/null +++ b/oldanki/sync.py @@ -0,0 +1,1236 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Synchronisation +============================== + +Support for keeping two decks synchronized. Both local syncing and syncing +over HTTP are supported. + +Server implements the following calls: + +getDecks(): return a list of deck names & modtimes +summary(lastSync): a list of all objects changed after lastSync +applyPayload(payload): apply any sent changes and return any changed remote + objects +finish(): save deck on server after payload applied and response received +createDeck(name): create a deck on the server + +Full sync support is not documented yet. +""" +__docformat__ = 'restructuredtext' + +import zlib, re, urllib, urllib2, socket, time, shutil +from anki.utils import json as simplejson +import os, base64, httplib, sys, tempfile, httplib, types +from datetime import date +import oldanki, oldanki.deck, oldanki.cards +from oldanki.db import sqlite +from oldanki.errors import * +from oldanki.models import Model, FieldModel, CardModel +from oldanki.facts import Fact, Field +from oldanki.cards import Card +from oldanki.stats import Stats, globalStats +from oldanki.history import CardHistoryEntry +from oldanki.stats import globalStats +from oldanki.utils import ids2str, hexifyID, checksum +from oldanki.media import mediaFiles +from oldanki.lang import _ +from hooks import runHook + +if simplejson.__version__ < "1.7.3": + raise Exception("SimpleJSON must be 1.7.3 or later.") + +CHUNK_SIZE = 32768 +MIME_BOUNDARY = "Anki-sync-boundary" +# live +SYNC_URL = "http://ankiweb.net/sync/" +SYNC_HOST = "ankiweb.net"; SYNC_PORT = 80 +# testing +#SYNC_URL = "http://localhost:8001/sync/" +#SYNC_HOST = "localhost"; SYNC_PORT = 8001 + + +KEYS = ("models", "facts", "cards", "media") + +########################################################################## +# Monkey-patch httplib to incrementally send instead of chewing up large +# amounts of memory, and track progress. + +sendProgressHook = None + +def incrementalSend(self, strOrFile): + if self.sock is None: + if self.auto_open: + self.connect() + else: + raise NotConnected() + if self.debuglevel > 0: + print "send:", repr(str) + try: + if (isinstance(strOrFile, str) or + isinstance(strOrFile, unicode)): + self.sock.sendall(strOrFile) + else: + cnt = 0 + t = time.time() + while 1: + if sendProgressHook and time.time() - t > 1: + sendProgressHook(cnt) + t = time.time() + data = strOrFile.read(CHUNK_SIZE) + cnt += len(data) + if not data: + break + self.sock.sendall(data) + except socket.error, v: + if v[0] == 32: # Broken pipe + self.close() + raise + +httplib.HTTPConnection.send = incrementalSend + +def fullSyncProgressHook(cnt): + runHook("fullSyncProgress", "fromLocal", cnt) + +########################################################################## + +class SyncTools(object): + + def __init__(self, deck=None): + self.deck = deck + self.diffs = {} + self.serverExcludedTags = [] + self.timediff = 0 + + # Control + ########################################################################## + + def setServer(self, server): + self.server = server + + def sync(self): + "Sync two decks locally. Reimplement this for finer control." + if not self.prepareSync(0): + return + sums = self.summaries() + payload = self.genPayload(sums) + res = self.server.applyPayload(payload) + self.applyPayloadReply(res) + self.deck.reset() + + def prepareSync(self, timediff): + "Sync setup. True if sync needed." + self.localTime = self.modified() + self.remoteTime = self.server.modified() + if self.localTime == self.remoteTime: + return False + l = self._lastSync(); r = self.server._lastSync() + # set lastSync to the lower of the two sides, and account for slow + # clocks & assume it took up to 10 seconds for the reply to arrive + self.deck.lastSync = min(l, r) - timediff - 10 + return True + + def summaries(self): + return (self.summary(self.deck.lastSync), + self.server.summary(self.deck.lastSync)) + + def genPayload(self, summaries): + (lsum, rsum) = summaries + self.preSyncRefresh() + payload = {} + # first, handle models, facts and cards + for key in KEYS: + diff = self.diffSummary(lsum, rsum, key) + payload["added-" + key] = self.getObjsFromKey(diff[0], key) + payload["deleted-" + key] = diff[1] + payload["missing-" + key] = diff[2] + self.deleteObjsFromKey(diff[3], key) + # handle the remainder + if self.localTime > self.remoteTime: + payload['stats'] = self.bundleStats() + payload['history'] = self.bundleHistory() + payload['sources'] = self.bundleSources() + # finally, set new lastSync and bundle the deck info + payload['deck'] = self.bundleDeck() + return payload + + def applyPayload(self, payload): + reply = {} + self.preSyncRefresh() + # model, facts and cards + for key in KEYS: + k = 'added-' + key + # send back any requested + if k in payload: + reply[k] = self.getObjsFromKey( + payload['missing-' + key], key) + self.updateObjsFromKey(payload['added-' + key], key) + self.deleteObjsFromKey(payload['deleted-' + key], key) + # send back deck-related stuff if it wasn't sent to us + if not 'deck' in payload: + reply['stats'] = self.bundleStats() + reply['history'] = self.bundleHistory() + reply['sources'] = self.bundleSources() + # finally, set new lastSync and bundle the deck info + reply['deck'] = self.bundleDeck() + else: + self.updateDeck(payload['deck']) + self.updateStats(payload['stats']) + self.updateHistory(payload['history']) + if 'sources' in payload: + self.updateSources(payload['sources']) + self.postSyncRefresh() + cardIds = [x[0] for x in payload['added-cards']] + self.deck.updateCardTags(cardIds) + # rebuild priorities on server + self.rebuildPriorities(cardIds, self.serverExcludedTags) + return reply + + def applyPayloadReply(self, reply): + # model, facts and cards + for key in KEYS: + k = 'added-' + key + # old version may not send media + if k in reply: + self.updateObjsFromKey(reply['added-' + key], key) + # deck + if 'deck' in reply: + self.updateDeck(reply['deck']) + self.updateStats(reply['stats']) + self.updateHistory(reply['history']) + if 'sources' in reply: + self.updateSources(reply['sources']) + self.postSyncRefresh() + # rebuild priorities on client + cardIds = [x[0] for x in reply['added-cards']] + self.deck.updateCardTags(cardIds) + self.rebuildPriorities(cardIds) + if self.missingFacts() != 0: + raise Exception( + "Facts missing after sync. Please run Tools>Advanced>Check DB.") + + def missingFacts(self): + return self.deck.s.scalar( + "select count() from cards where factId "+ + "not in (select id from facts)"); + + def rebuildPriorities(self, cardIds, suspend=[]): + self.deck.updateAllPriorities(partial=True, dirty=False) + self.deck.updatePriorities(cardIds, suspend=suspend, dirty=False) + + def postSyncRefresh(self): + "Flush changes to DB, and reload object associations." + self.deck.s.flush() + self.deck.s.refresh(self.deck) + self.deck.currentModel + + def preSyncRefresh(self): + # ensure global stats are available (queue may not be built) + self.deck._globalStats = globalStats(self.deck) + + def payloadChanges(self, payload): + h = { + 'lf': len(payload['added-facts']['facts']), + 'rf': len(payload['missing-facts']), + 'lc': len(payload['added-cards']), + 'rc': len(payload['missing-cards']), + 'lm': len(payload['added-models']), + 'rm': len(payload['missing-models']), + } + if self.localTime > self.remoteTime: + h['ls'] = _('all') + h['rs'] = 0 + else: + h['ls'] = 0 + h['rs'] = _('all') + return h + + def payloadChangeReport(self, payload): + p = self.payloadChanges(payload) + return _("""\ + + + + + + + +
Added/Changed   Here   Server
Cards%(lc)d%(rc)d
Facts%(lf)d%(rf)d
Models%(lm)d%(rm)d
Stats%(ls)s%(rs)s
""") % p + + # Summaries + ########################################################################## + + def summary(self, lastSync): + "Generate a full summary of modtimes for two-way syncing." + # client may have selected an earlier sync time + self.deck.lastSync = lastSync + # ensure we're flushed first + self.deck.s.flush() + return { + # cards + "cards": self.realLists(self.deck.s.all( + "select id, modified from cards where modified > :mod", + mod=lastSync)), + "delcards": self.realLists(self.deck.s.all( + "select cardId, deletedTime from cardsDeleted " + "where deletedTime > :mod", mod=lastSync)), + # facts + "facts": self.realLists(self.deck.s.all( + "select id, modified from facts where modified > :mod", + mod=lastSync)), + "delfacts": self.realLists(self.deck.s.all( + "select factId, deletedTime from factsDeleted " + "where deletedTime > :mod", mod=lastSync)), + # models + "models": self.realLists(self.deck.s.all( + "select id, modified from models where modified > :mod", + mod=lastSync)), + "delmodels": self.realLists(self.deck.s.all( + "select modelId, deletedTime from modelsDeleted " + "where deletedTime > :mod", mod=lastSync)), + # media + "media": self.realLists(self.deck.s.all( + "select id, created from media where created > :mod", + mod=lastSync)), + "delmedia": self.realLists(self.deck.s.all( + "select mediaId, deletedTime from mediaDeleted " + "where deletedTime > :mod", mod=lastSync)), + } + + # Diffing + ########################################################################## + + def diffSummary(self, localSummary, remoteSummary, key): + # list of ids on both ends + lexists = localSummary[key] + ldeleted = localSummary["del"+key] + rexists = remoteSummary[key] + rdeleted = remoteSummary["del"+key] + ldeletedIds = dict(ldeleted) + rdeletedIds = dict(rdeleted) + # to store the results + locallyEdited = [] + locallyDeleted = [] + remotelyEdited = [] + remotelyDeleted = [] + # build a hash of all ids, with value (localMod, remoteMod). + # deleted/nonexisting cards are marked with a modtime of None. + ids = {} + for (id, mod) in rexists: + ids[id] = [None, mod] + for (id, mod) in rdeleted: + ids[id] = [None, None] + for (id, mod) in lexists: + if id in ids: + ids[id][0] = mod + else: + ids[id] = [mod, None] + for (id, mod) in ldeleted: + if id in ids: + ids[id][0] = None + else: + ids[id] = [None, None] + # loop through the hash, determining differences + for (id, (localMod, remoteMod)) in ids.items(): + if localMod and remoteMod: + # changed/existing on both sides + if localMod < remoteMod: + remotelyEdited.append(id) + elif localMod > remoteMod: + locallyEdited.append(id) + elif localMod and not remoteMod: + # if it's missing on server or newer here, sync + if (id not in rdeletedIds or + rdeletedIds[id] < localMod): + locallyEdited.append(id) + else: + remotelyDeleted.append(id) + elif remoteMod and not localMod: + # if it's missing locally or newer there, sync + if (id not in ldeletedIds or + ldeletedIds[id] < remoteMod): + remotelyEdited.append(id) + else: + locallyDeleted.append(id) + else: + if id in ldeletedIds and id not in rdeletedIds: + locallyDeleted.append(id) + elif id in rdeletedIds and id not in ldeletedIds: + remotelyDeleted.append(id) + return (locallyEdited, locallyDeleted, + remotelyEdited, remotelyDeleted) + + # Models + ########################################################################## + + def getModels(self, ids, updateModified=False): + return [self.bundleModel(id, updateModified) for id in ids] + + def bundleModel(self, id, updateModified): + "Return a model representation suitable for transport." + mod = self.deck.s.query(Model).get(id) + # force load of lazy attributes + mod.fieldModels; mod.cardModels + m = self.dictFromObj(mod) + m['fieldModels'] = [self.bundleFieldModel(fm) for fm in m['fieldModels']] + m['cardModels'] = [self.bundleCardModel(fm) for fm in m['cardModels']] + if updateModified: + m['modified'] = time.time() + return m + + def bundleFieldModel(self, fm): + d = self.dictFromObj(fm) + if 'model' in d: del d['model'] + return d + + def bundleCardModel(self, cm): + d = self.dictFromObj(cm) + if 'model' in d: del d['model'] + return d + + def updateModels(self, models): + for model in models: + local = self.getModel(model['id']) + # avoid overwriting any existing card/field models + fms = model['fieldModels']; del model['fieldModels'] + cms = model['cardModels']; del model['cardModels'] + self.applyDict(local, model) + self.mergeFieldModels(local, fms) + self.mergeCardModels(local, cms) + self.deck.s.statement( + "delete from modelsDeleted where modelId in %s" % + ids2str([m['id'] for m in models])) + + def getModel(self, id, create=True): + "Return a local model with same ID, or create." + id = int(id) + for l in self.deck.models: + if l.id == id: + return l + if not create: + return + m = Model() + self.deck.models.append(m) + return m + + def mergeFieldModels(self, model, fms): + ids = [] + for fm in fms: + local = self.getFieldModel(model, fm) + self.applyDict(local, fm) + ids.append(fm['id']) + for fm in model.fieldModels: + if fm.id not in ids: + self.deck.deleteFieldModel(model, fm) + + def getFieldModel(self, model, remote): + id = int(remote['id']) + for fm in model.fieldModels: + if fm.id == id: + return fm + fm = FieldModel() + model.addFieldModel(fm) + return fm + + def mergeCardModels(self, model, cms): + ids = [] + for cm in cms: + local = self.getCardModel(model, cm) + if not 'allowEmptyAnswer' in cm or cm['allowEmptyAnswer'] is None: + cm['allowEmptyAnswer'] = True + self.applyDict(local, cm) + ids.append(cm['id']) + for cm in model.cardModels: + if cm.id not in ids: + self.deck.deleteCardModel(model, cm) + + def getCardModel(self, model, remote): + id = int(remote['id']) + for cm in model.cardModels: + if cm.id == id: + return cm + cm = CardModel() + model.addCardModel(cm) + return cm + + def deleteModels(self, ids): + for id in ids: + model = self.getModel(id, create=False) + if model: + self.deck.deleteModel(model) + + # Facts + ########################################################################## + + def getFacts(self, ids, updateModified=False): + if updateModified: + modified = time.time() + else: + modified = "modified" + factIds = ids2str(ids) + return { + 'facts': self.realLists(self.deck.s.all(""" +select id, modelId, created, %s, tags, spaceUntil, lastCardId from facts +where id in %s""" % (modified, factIds))), + 'fields': self.realLists(self.deck.s.all(""" +select id, factId, fieldModelId, ordinal, value from fields +where factId in %s""" % factIds)) + } + + def updateFacts(self, factsdict): + facts = factsdict['facts'] + fields = factsdict['fields'] + if not facts: + return + # update facts first + dlist = [{ + 'id': f[0], + 'modelId': f[1], + 'created': f[2], + 'modified': f[3], + 'tags': f[4], + 'spaceUntil': f[5] or "", + 'lastCardId': f[6] + } for f in facts] + self.deck.s.execute(""" +insert or replace into facts +(id, modelId, created, modified, tags, spaceUntil, lastCardId) +values +(:id, :modelId, :created, :modified, :tags, :spaceUntil, :lastCardId)""", dlist) + # now fields + dlist = [{ + 'id': f[0], + 'factId': f[1], + 'fieldModelId': f[2], + 'ordinal': f[3], + 'value': f[4] + } for f in fields] + # delete local fields since ids may have changed + self.deck.s.execute( + "delete from fields where factId in %s" % + ids2str([f[0] for f in facts])) + # then update + self.deck.s.execute(""" +insert into fields +(id, factId, fieldModelId, ordinal, value) +values +(:id, :factId, :fieldModelId, :ordinal, :value)""", dlist) + self.deck.s.statement( + "delete from factsDeleted where factId in %s" % + ids2str([f[0] for f in facts])) + + def deleteFacts(self, ids): + self.deck.deleteFacts(ids) + + # Cards + ########################################################################## + + def getCards(self, ids): + return self.realLists(self.deck.s.all(""" +select id, factId, cardModelId, created, modified, tags, ordinal, +priority, interval, lastInterval, due, lastDue, factor, +firstAnswered, reps, successive, averageTime, reviewTime, youngEase0, +youngEase1, youngEase2, youngEase3, youngEase4, matureEase0, +matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount, +question, answer, lastFactor, spaceUntil, type, combinedDue, relativeDelay +from cards where id in %s""" % ids2str(ids))) + + def updateCards(self, cards): + if not cards: + return + # FIXME: older clients won't send this, so this is temp compat code + def getType(row): + if len(row) > 36: + return row[36] + if row[15]: + return 1 + elif row[14]: + return 0 + return 2 + dlist = [{'id': c[0], + 'factId': c[1], + 'cardModelId': c[2], + 'created': c[3], + 'modified': c[4], + 'tags': c[5], + 'ordinal': c[6], + 'priority': c[7], + 'interval': c[8], + 'lastInterval': c[9], + 'due': c[10], + 'lastDue': c[11], + 'factor': c[12], + 'firstAnswered': c[13], + 'reps': c[14], + 'successive': c[15], + 'averageTime': c[16], + 'reviewTime': c[17], + 'youngEase0': c[18], + 'youngEase1': c[19], + 'youngEase2': c[20], + 'youngEase3': c[21], + 'youngEase4': c[22], + 'matureEase0': c[23], + 'matureEase1': c[24], + 'matureEase2': c[25], + 'matureEase3': c[26], + 'matureEase4': c[27], + 'yesCount': c[28], + 'noCount': c[29], + 'question': c[30], + 'answer': c[31], + 'lastFactor': c[32], + 'spaceUntil': c[33], + 'type': c[34], + 'combinedDue': c[35], + 'rd': getType(c) + } for c in cards] + self.deck.s.execute(""" +insert or replace into cards +(id, factId, cardModelId, created, modified, tags, ordinal, +priority, interval, lastInterval, due, lastDue, factor, +firstAnswered, reps, successive, averageTime, reviewTime, youngEase0, +youngEase1, youngEase2, youngEase3, youngEase4, matureEase0, +matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount, +question, answer, lastFactor, spaceUntil, type, combinedDue, +relativeDelay, isDue) +values +(:id, :factId, :cardModelId, :created, :modified, :tags, :ordinal, +:priority, :interval, :lastInterval, :due, :lastDue, :factor, +:firstAnswered, :reps, :successive, :averageTime, :reviewTime, :youngEase0, +:youngEase1, :youngEase2, :youngEase3, :youngEase4, :matureEase0, +:matureEase1, :matureEase2, :matureEase3, :matureEase4, :yesCount, +:noCount, :question, :answer, :lastFactor, :spaceUntil, +:type, :combinedDue, :rd, 0)""", dlist) + self.deck.s.statement( + "delete from cardsDeleted where cardId in %s" % + ids2str([c[0] for c in cards])) + + def deleteCards(self, ids): + self.deck.deleteCards(ids) + + # Deck/stats/history + ########################################################################## + + def bundleDeck(self): + # ensure modified is not greater than server time + if getattr(self, "server", None) and getattr( + self.server, "timestamp", None): + self.deck.modified = min(self.deck.modified,self.server.timestamp) + # and ensure lastSync is greater than modified + self.deck.lastSync = max(time.time(), self.deck.modified+1) + d = self.dictFromObj(self.deck) + del d['Session'] + del d['engine'] + del d['s'] + del d['path'] + del d['syncName'] + del d['version'] + if 'newQueue' in d: + del d['newQueue'] + del d['failedQueue'] + del d['revQueue'] + # these may be deleted before bundling + if 'css' in d: del d['css'] + if 'models' in d: del d['models'] + if 'currentModel' in d: del d['currentModel'] + keys = d.keys() + for k in keys: + if isinstance(d[k], types.MethodType): + del d[k] + d['meta'] = self.realLists(self.deck.s.all("select * from deckVars")) + return d + + def updateDeck(self, deck): + if 'meta' in deck: + meta = deck['meta'] + for (k,v) in meta: + self.deck.s.statement(""" +insert or replace into deckVars +(key, value) values (:k, :v)""", k=k, v=v) + del deck['meta'] + self.applyDict(self.deck, deck) + + def bundleStats(self): + def bundleStat(stat): + s = self.dictFromObj(stat) + s['day'] = s['day'].toordinal() + del s['id'] + return s + lastDay = date.fromtimestamp(max(0, self.deck.lastSync - 60*60*24)) + ids = self.deck.s.column0( + "select id from stats where type = 1 and day >= :day", day=lastDay) + stat = Stats() + def statFromId(id): + stat.fromDB(self.deck.s, id) + return stat + stats = { + 'global': bundleStat(self.deck._globalStats), + 'daily': [bundleStat(statFromId(id)) for id in ids], + } + return stats + + def updateStats(self, stats): + stats['global']['day'] = date.fromordinal(stats['global']['day']) + self.applyDict(self.deck._globalStats, stats['global']) + self.deck._globalStats.toDB(self.deck.s) + for record in stats['daily']: + record['day'] = date.fromordinal(record['day']) + stat = Stats() + id = self.deck.s.scalar("select id from stats where " + "type = :type and day = :day", + type=1, day=record['day']) + if id: + stat.fromDB(self.deck.s, id) + else: + stat.create(self.deck.s, 1, record['day']) + self.applyDict(stat, record) + stat.toDB(self.deck.s) + + def bundleHistory(self): + return self.realLists(self.deck.s.all(""" +select cardId, time, lastInterval, nextInterval, ease, delay, +lastFactor, nextFactor, reps, thinkingTime, yesCount, noCount +from reviewHistory where time > :ls""", + ls=self.deck.lastSync)) + + def updateHistory(self, history): + dlist = [{'cardId': h[0], + 'time': h[1], + 'lastInterval': h[2], + 'nextInterval': h[3], + 'ease': h[4], + 'delay': h[5], + 'lastFactor': h[6], + 'nextFactor': h[7], + 'reps': h[8], + 'thinkingTime': h[9], + 'yesCount': h[10], + 'noCount': h[11]} for h in history] + if not dlist: + return + self.deck.s.statements(""" +insert or ignore into reviewHistory +(cardId, time, lastInterval, nextInterval, ease, delay, +lastFactor, nextFactor, reps, thinkingTime, yesCount, noCount) +values +(:cardId, :time, :lastInterval, :nextInterval, :ease, :delay, +:lastFactor, :nextFactor, :reps, :thinkingTime, :yesCount, :noCount)""", + dlist) + + def bundleSources(self): + return self.realLists(self.deck.s.all("select * from sources")) + + def updateSources(self, sources): + for s in sources: + self.deck.s.statement(""" +insert or replace into sources values +(:id, :name, :created, :lastSync, :syncPeriod)""", + id=s[0], + name=s[1], + created=s[2], + lastSync=s[3], + syncPeriod=s[4]) + + # Media metadata + ########################################################################## + + def getMedia(self, ids): + return [tuple(row) for row in self.deck.s.all(""" +select id, filename, size, created, originalPath, description +from media where id in %s""" % ids2str(ids))] + + def updateMedia(self, media): + meta = [] + for m in media: + # build meta + meta.append({ + 'id': m[0], + 'filename': m[1], + 'size': m[2], + 'created': m[3], + 'originalPath': m[4], + 'description': m[5]}) + # apply metadata + if meta: + self.deck.s.statements(""" +insert or replace into media (id, filename, size, created, +originalPath, description) +values (:id, :filename, :size, :created, :originalPath, +:description)""", meta) + self.deck.s.statement( + "delete from mediaDeleted where mediaId in %s" % + ids2str([m[0] for m in media])) + + def deleteMedia(self, ids): + sids = ids2str(ids) + files = self.deck.s.column0( + "select filename from media where id in %s" % sids) + self.deck.s.statement(""" +insert into mediaDeleted +select id, :now from media +where media.id in %s""" % sids, now=time.time()) + self.deck.s.execute( + "delete from media where id in %s" % sids) + + # One-way syncing (sharing) + ########################################################################## + + def syncOneWay(self, lastSync): + "Sync two decks one way." + payload = self.server.genOneWayPayload(lastSync) + self.applyOneWayPayload(payload) + self.deck.reset() + + def syncOneWayDeckName(self): + return (self.deck.s.scalar("select name from sources where id = :id", + id=self.server.deckName) or + hexifyID(int(self.server.deckName))) + + def prepareOneWaySync(self): + "Sync setup. True if sync needed. Not used for local sync." + srcID = self.server.deckName + (lastSync, syncPeriod) = self.deck.s.first( + "select lastSync, syncPeriod from sources where id = :id", id=srcID) + if self.server.modified() <= lastSync: + return + self.deck.lastSync = lastSync + return True + + def genOneWayPayload(self, lastSync): + "Bundle all added or changed objects since the last sync." + p = {} + # facts + factIds = self.deck.s.column0( + "select id from facts where modified > :l", l=lastSync) + p['facts'] = self.getFacts(factIds, updateModified=True) + # models + modelIds = self.deck.s.column0( + "select id from models where modified > :l", l=lastSync) + p['models'] = self.getModels(modelIds, updateModified=True) + # media + mediaIds = self.deck.s.column0( + "select id from media where created > :l", l=lastSync) + p['media'] = self.getMedia(mediaIds) + # cards + cardIds = self.deck.s.column0( + "select id from cards where modified > :l", l=lastSync) + p['cards'] = self.realLists(self.getOneWayCards(cardIds)) + return p + + def applyOneWayPayload(self, payload): + keys = [k for k in KEYS if k != "cards"] + # model, facts, media + for key in keys: + self.updateObjsFromKey(payload[key], key) + # models need their source tagged + for m in payload["models"]: + self.deck.s.statement("update models set source = :s " + "where id = :id", + s=self.server.deckName, + id=m['id']) + # cards last, handled differently + t = time.time() + try: + self.updateOneWayCards(payload['cards']) + except KeyError: + sys.stderr.write("Subscribed to a broken deck. " + "Try removing your deck subscriptions.") + t = 0 + # update sync time + self.deck.s.statement( + "update sources set lastSync = :t where id = :id", + id=self.server.deckName, t=t) + self.deck.modified = time.time() + + def getOneWayCards(self, ids): + "The minimum information necessary to generate one way cards." + return self.deck.s.all( + "select id, factId, cardModelId, ordinal, created from cards " + "where id in %s" % ids2str(ids)) + + def updateOneWayCards(self, cards): + if not cards: + return + t = time.time() + dlist = [{'id': c[0], 'factId': c[1], 'cardModelId': c[2], + 'ordinal': c[3], 'created': c[4], 't': t} for c in cards] + # add any missing cards + self.deck.s.statements(""" +insert or ignore into cards +(id, factId, cardModelId, created, modified, tags, ordinal, +priority, interval, lastInterval, due, lastDue, factor, +firstAnswered, reps, successive, averageTime, reviewTime, youngEase0, +youngEase1, youngEase2, youngEase3, youngEase4, matureEase0, +matureEase1, matureEase2, matureEase3, matureEase4, yesCount, noCount, +question, answer, lastFactor, spaceUntil, isDue, type, combinedDue, +relativeDelay) +values +(:id, :factId, :cardModelId, :created, :t, "", :ordinal, +1, 0, 0, :created, 0, 2.5, +0, 0, 0, 0, 0, 0, +0, 0, 0, 0, 0, +0, 0, 0, 0, 0, +0, "", "", 2.5, 0, 0, 2, :t, 2)""", dlist) + # update q/as + models = dict(self.deck.s.all(""" +select cards.id, models.id +from cards, facts, models +where cards.factId = facts.id +and facts.modelId = models.id +and cards.id in %s""" % ids2str([c[0] for c in cards]))) + self.deck.s.flush() + self.deck.updateCardQACache( + [(c[0], c[2], c[1], models[c[0]]) for c in cards]) + # rebuild priorities on client + cardIds = [c[0] for c in cards] + self.deck.updateCardTags(cardIds) + self.rebuildPriorities(cardIds) + + # Tools + ########################################################################## + + def modified(self): + return self.deck.modified + + def _lastSync(self): + return self.deck.lastSync + + def unstuff(self, data): + "Uncompress and convert to unicode." + return simplejson.loads(unicode(zlib.decompress(data), "utf8")) + + def stuff(self, data): + "Convert into UTF-8 and compress." + return zlib.compress(simplejson.dumps(data)) + + def dictFromObj(self, obj): + "Return a dict representing OBJ without any hidden db fields." + return dict([(k,v) for (k,v) in obj.__dict__.items() + if not k.startswith("_")]) + + def applyDict(self, obj, dict): + "Apply each element in DICT to OBJ in a way the ORM notices." + for (k,v) in dict.items(): + setattr(obj, k, v) + + def realLists(self, result): + "Convert an SQLAlchemy response into a list of real lists." + return [list(x) for x in result] + + def getObjsFromKey(self, ids, key): + return getattr(self, "get" + key.capitalize())(ids) + + def deleteObjsFromKey(self, ids, key): + return getattr(self, "delete" + key.capitalize())(ids) + + def updateObjsFromKey(self, ids, key): + return getattr(self, "update" + key.capitalize())(ids) + + # Full sync + ########################################################################## + + def needFullSync(self, sums): + if self.deck.lastSync <= 0: + return True + for sum in sums: + for l in sum.values(): + if len(l) > 1000: + return True + if self.deck.s.scalar( + "select count() from reviewHistory where time > :ls", + ls=self.deck.lastSync) > 1000: + return True + lastDay = date.fromtimestamp(max(0, self.deck.lastSync - 60*60*24)) + if self.deck.s.scalar( + "select count() from stats where day >= :day", + day=lastDay) > 100: + return True + return False + + def prepareFullSync(self): + t = time.time() + # ensure modified is not greater than server time + self.deck.modified = min(self.deck.modified, self.server.timestamp) + self.deck.s.commit() + self.deck.close() + fields = { + "p": self.server.password, + "u": self.server.username, + "d": self.server.deckName.encode("utf-8"), + } + if self.localTime > self.remoteTime: + return ("fromLocal", fields, self.deck.path) + else: + return ("fromServer", fields, self.deck.path) + + def fullSync(self): + ret = self.prepareFullSync() + if ret[0] == "fromLocal": + self.fullSyncFromLocal(ret[1], ret[2]) + else: + self.fullSyncFromServer(ret[1], ret[2]) + + def fullSyncFromLocal(self, fields, path): + global sendProgressHook + try: + # write into a temporary file, since POST needs content-length + src = open(path, "rb") + (fd, name) = tempfile.mkstemp(prefix="oldanki") + tmp = open(name, "w+b") + # post vars + for (key, value) in fields.items(): + tmp.write('--' + MIME_BOUNDARY + "\r\n") + tmp.write('Content-Disposition: form-data; name="%s"\r\n' % key) + tmp.write('\r\n') + tmp.write(value) + tmp.write('\r\n') + # file header + tmp.write('--' + MIME_BOUNDARY + "\r\n") + tmp.write( + 'Content-Disposition: form-data; name="deck"; filename="deck"\r\n') + tmp.write('Content-Type: application/octet-stream\r\n') + tmp.write('\r\n') + # data + comp = zlib.compressobj() + while 1: + data = src.read(CHUNK_SIZE) + if not data: + tmp.write(comp.flush()) + break + tmp.write(comp.compress(data)) + src.close() + tmp.write('\r\n--' + MIME_BOUNDARY + '--\r\n\r\n') + size = tmp.tell() + tmp.seek(0) + # open http connection + runHook("fullSyncStarted", size) + headers = { + 'Content-type': 'multipart/form-data; boundary=%s' % + MIME_BOUNDARY, + 'Content-length': str(size), + 'Host': SYNC_HOST, + } + req = urllib2.Request(SYNC_URL + "fullup?v=2", tmp, headers) + try: + sendProgressHook = fullSyncProgressHook + res = urllib2.urlopen(req).read() + assert res.startswith("OK") + # update lastSync + c = sqlite.connect(path) + c.execute("update decks set lastSync = ?", + (res[3:],)) + c.commit() + c.close() + finally: + sendProgressHook = None + tmp.close() + os.close(fd) + os.unlink(name) + finally: + runHook("fullSyncFinished") + + def fullSyncFromServer(self, fields, path): + try: + runHook("fullSyncStarted", 0) + fields = urllib.urlencode(fields) + src = urllib.urlopen(SYNC_URL + "fulldown", fields) + (fd, tmpname) = tempfile.mkstemp(dir=os.path.dirname(path), + prefix="fullsync") + tmp = open(tmpname, "wb") + decomp = zlib.decompressobj() + cnt = 0 + while 1: + data = src.read(CHUNK_SIZE) + if not data: + tmp.write(decomp.flush()) + break + tmp.write(decomp.decompress(data)) + cnt += CHUNK_SIZE + runHook("fullSyncProgress", "fromServer", cnt) + src.close() + tmp.close() + os.close(fd) + # if we were successful, overwrite old deck + os.unlink(path) + os.rename(tmpname, path) + # reset the deck name + c = sqlite.connect(path) + c.execute("update decks set syncName = ?", + [checksum(path.encode("utf-8"))]) + c.commit() + c.close() + finally: + runHook("fullSyncFinished") + +# Local syncing +########################################################################## + + +class SyncServer(SyncTools): + + def __init__(self, deck=None): + SyncTools.__init__(self, deck) + +class SyncClient(SyncTools): + + pass + +# HTTP proxy: act as a server and direct requests to the real server +########################################################################## + +class HttpSyncServerProxy(SyncServer): + + def __init__(self, user, passwd): + SyncServer.__init__(self) + self.decks = None + self.deckName = None + self.username = user + self.password = passwd + self.protocolVersion = 5 + self.sourcesToCheck = [] + + def connect(self, clientVersion=""): + "Check auth, protocol & grab deck list." + if not self.decks: + import socket + socket.setdefaulttimeout(30) + d = self.runCmd("getDecks", + libanki=oldanki.version, + client=clientVersion, + sources=simplejson.dumps(self.sourcesToCheck), + pversion=self.protocolVersion) + socket.setdefaulttimeout(None) + if d['status'] != "OK": + raise SyncError(type="authFailed", status=d['status']) + self.decks = d['decks'] + self.timestamp = d['timestamp'] + self.timediff = abs(self.timestamp - time.time()) + + def hasDeck(self, deckName): + self.connect() + return deckName in self.decks.keys() + + def availableDecks(self): + self.connect() + return self.decks.keys() + + def createDeck(self, deckName): + ret = self.runCmd("createDeck", name=deckName.encode("utf-8")) + if not ret or ret['status'] != "OK": + raise SyncError(type="createFailed") + self.decks[deckName] = [0, 0] + + def summary(self, lastSync): + return self.runCmd("summary", + lastSync=self.stuff(lastSync)) + + def genOneWayPayload(self, lastSync): + return self.runCmd("genOneWayPayload", + lastSync=self.stuff(lastSync)) + + def modified(self): + self.connect() + return self.decks[self.deckName][0] + + def _lastSync(self): + self.connect() + return self.decks[self.deckName][1] + + def applyPayload(self, payload): + return self.runCmd("applyPayload", + payload=self.stuff(payload)) + + def finish(self): + assert self.runCmd("finish") == "OK" + + def runCmd(self, action, **args): + data = {"p": self.password, + "u": self.username, + "v": 2} + if self.deckName: + data['d'] = self.deckName.encode("utf-8") + else: + data['d'] = None + data.update(args) + data = urllib.urlencode(data) + try: + f = urllib2.urlopen(SYNC_URL + action, data) + except (urllib2.URLError, socket.error, socket.timeout, + httplib.BadStatusLine), e: + raise SyncError(type="connectionError", + exc=`e`) + ret = f.read() + if not ret: + raise SyncError(type="noResponse") + try: + return self.unstuff(ret) + except Exception, e: + raise SyncError(type="connectionError", + exc=`e`) + +# HTTP server: respond to proxy requests and return data +########################################################################## + +class HttpSyncServer(SyncServer): + def __init__(self): + SyncServer.__init__(self) + self.decks = {} + self.deck = None + + def summary(self, lastSync): + return self.stuff(SyncServer.summary( + self, float(zlib.decompress(lastSync)))) + + def applyPayload(self, payload): + return self.stuff(SyncServer.applyPayload(self, + self.unstuff(payload))) + + def genOneWayPayload(self, lastSync): + return self.stuff(SyncServer.genOneWayPayload( + self, float(zlib.decompress(lastSync)))) + + def getDecks(self, libanki, client, sources, pversion): + return self.stuff({ + "status": "OK", + "decks": self.decks, + "timestamp": time.time(), + }) + + def createDeck(self, name): + "Create a deck on the server. Not implemented." + return self.stuff("OK") + +# Local media copying +########################################################################## + +def copyLocalMedia(src, dst): + srcDir = src.mediaDir() + if not srcDir: + return + dstDir = dst.mediaDir(create=True) + files = os.listdir(srcDir) + # find media references + used = {} + for col in ("question", "answer"): + txt = dst.s.column0(""" +select %(c)s from cards where +%(c)s like '%% +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Tags +==================== +""" +__docformat__ = 'restructuredtext' + + +from oldanki.db import * + +#src 0 = fact +#src 1 = model +#src 2 = card model + +# Tables +########################################################################## + +def initTagTables(s): + try: + s.statement(""" +create table tags ( +id integer not null, +tag text not null collate nocase, +priority integer not null default 2, +primary key(id))""") + s.statement(""" +create table cardTags ( +id integer not null, +cardId integer not null, +tagId integer not null, +src integer not null, +primary key(id))""") + except: + pass + +def tagId(s, tag, create=True): + "Return ID for tag, creating if necessary." + id = s.scalar("select id from tags where tag = :tag", tag=tag) + if id or not create: + return id + s.statement(""" +insert or ignore into tags +(tag) values (:tag)""", tag=tag) + return s.scalar("select id from tags where tag = :tag", tag=tag) + +def tagIds(s, tags, create=True): + "Return an ID for all tags, creating if necessary." + ids = {} + if create: + s.statements("insert or ignore into tags (tag) values (:tag)", + [{'tag': t} for t in tags]) + tagsD = dict([(x.lower(), y) for (x, y) in s.all(""" +select tag, id from tags +where tag in (%s)""" % ",".join([ + "'%s'" % t.replace("'", "''") for t in tags]))]) + return tagsD diff --git a/oldanki/template/LICENSE b/oldanki/template/LICENSE new file mode 100644 index 000000000..2745bcce5 --- /dev/null +++ b/oldanki/template/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2009 Chris Wanstrath + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/oldanki/template/README.anki b/oldanki/template/README.anki new file mode 100644 index 000000000..efd8d0969 --- /dev/null +++ b/oldanki/template/README.anki @@ -0,0 +1,6 @@ +Anki uses a modified version of Pystache to provide Mustache-like syntax. +Behaviour is a little different from standard Mustache: + +- {{text}} returns text verbatim with no HTML escaping +- {{{text}}} strips an outer span tag +- partial rendering is disabled for security reasons diff --git a/oldanki/template/README.rst b/oldanki/template/README.rst new file mode 100644 index 000000000..c972a62cd --- /dev/null +++ b/oldanki/template/README.rst @@ -0,0 +1,78 @@ +======== +Pystache +======== + +Inspired by ctemplate_ and et_, Mustache_ is a +framework-agnostic way to render logic-free views. + +As ctemplates says, "It emphasizes separating logic from presentation: +it is impossible to embed application logic in this template language." + +Pystache is a Python implementation of Mustache. Pystache requires +Python 2.6. + +Documentation +============= + +The different Mustache tags are documented at `mustache(5)`_. + +Install It +========== + +:: + + pip install pystache + + +Use It +====== + +:: + + >>> import pystache + >>> pystache.render('Hi {{person}}!', {'person': 'Mom'}) + 'Hi Mom!' + +You can also create dedicated view classes to hold your view logic. + +Here's your simple.py:: + + import pystache + class Simple(pystache.View): + def thing(self): + return "pizza" + +Then your template, simple.mustache:: + + Hi {{thing}}! + +Pull it together:: + + >>> Simple().render() + 'Hi pizza!' + + +Test It +======= + +nose_ works great! :: + + pip install nose + cd pystache + nosetests + + +Author +====== + +:: + + context = { 'author': 'Chris Wanstrath', 'email': 'chris@ozmm.org' } + pystache.render("{{author}} :: {{email}}", context) + + +.. _ctemplate: http://code.google.com/p/google-ctemplate/ +.. _et: http://www.ivan.fomichev.name/2008/05/erlang-template-engine-prototype.html +.. _Mustache: http://defunkt.github.com/mustache/ +.. _mustache(5): http://defunkt.github.com/mustache/mustache.5.html +.. _nose: http://somethingaboutorange.com/mrl/projects/nose/0.11.1/testing.html \ No newline at end of file diff --git a/oldanki/template/__init__.py b/oldanki/template/__init__.py new file mode 100644 index 000000000..8b30ab788 --- /dev/null +++ b/oldanki/template/__init__.py @@ -0,0 +1,7 @@ +from oldanki.template.template import Template +from oldanki.template.view import View + +def render(template, context=None, **kwargs): + context = context and context.copy() or {} + context.update(kwargs) + return Template(template, context).render() diff --git a/oldanki/template/template.py b/oldanki/template/template.py new file mode 100644 index 000000000..531fcc6e0 --- /dev/null +++ b/oldanki/template/template.py @@ -0,0 +1,156 @@ +import re +import cgi +import collections + +modifiers = {} +def modifier(symbol): + """Decorator for associating a function with a Mustache tag modifier. + + @modifier('P') + def render_tongue(self, tag_name=None, context=None): + return ":P %s" % tag_name + + {{P yo }} => :P yo + """ + def set_modifier(func): + modifiers[symbol] = func + return func + return set_modifier + + +def get_or_attr(obj, name, default=None): + try: + return obj[name] + except KeyError: + return default + except: + try: + return getattr(obj, name) + except AttributeError: + return default + + +class Template(object): + # The regular expression used to find a #section + section_re = None + + # The regular expression used to find a tag. + tag_re = None + + # Opening tag delimiter + otag = '{{' + + # Closing tag delimiter + ctag = '}}' + + def __init__(self, template, context=None): + self.template = template + self.context = context or {} + self.compile_regexps() + + def render(self, template=None, context=None, encoding=None): + """Turns a Mustache template into something wonderful.""" + template = template or self.template + context = context or self.context + + template = self.render_sections(template, context) + result = self.render_tags(template, context) + if encoding is not None: + result = result.encode(encoding) + return result + + def compile_regexps(self): + """Compiles our section and tag regular expressions.""" + tags = { 'otag': re.escape(self.otag), 'ctag': re.escape(self.ctag) } + + section = r"%(otag)s[\#|^]([^\}]*)%(ctag)s(.+?)%(otag)s/\1%(ctag)s" + self.section_re = re.compile(section % tags, re.M|re.S) + + tag = r"%(otag)s(#|=|&|!|>|\{)?(.+?)\1?%(ctag)s+" + self.tag_re = re.compile(tag % tags) + + def render_sections(self, template, context): + """Expands sections.""" + while 1: + match = self.section_re.search(template) + if match is None: + break + + section, section_name, inner = match.group(0, 1, 2) + section_name = section_name.strip() + + it = get_or_attr(context, section_name, None) + replacer = '' + # if it and isinstance(it, collections.Callable): + # replacer = it(inner) + if it and not hasattr(it, '__iter__'): + if section[2] != '^': + replacer = inner + elif it and hasattr(it, 'keys') and hasattr(it, '__getitem__'): + if section[2] != '^': + replacer = self.render(inner, it) + elif it: + insides = [] + for item in it: + insides.append(self.render(inner, item)) + replacer = ''.join(insides) + elif not it and section[2] == '^': + replacer = inner + + template = template.replace(section, replacer) + + return template + + def render_tags(self, template, context): + """Renders all the tags in a template for a context.""" + while 1: + match = self.tag_re.search(template) + if match is None: + break + + tag, tag_type, tag_name = match.group(0, 1, 2) + tag_name = tag_name.strip() + try: + func = modifiers[tag_type] + replacement = func(self, tag_name, context) + template = template.replace(tag, replacement) + except: + return u"{{invalid template}}" + + return template + + @modifier('{') + def render_tag(self, tag_name, context): + """Given a tag name and context, finds, escapes, and renders the tag.""" + raw = get_or_attr(context, tag_name, '') + if not raw and raw is not 0: + return '' + return re.sub("^(.*)", "\\1", raw) + + @modifier('!') + def render_comment(self, tag_name=None, context=None): + """Rendering a comment always returns nothing.""" + return '' + + @modifier(None) + def render_unescaped(self, tag_name=None, context=None): + """Render a tag without escaping it.""" + return unicode(get_or_attr(context, tag_name, '{unknown field %s}' % tag_name)) + + # @modifier('>') + # def render_partial(self, tag_name=None, context=None): + # """Renders a partial within the current context.""" + # # Import view here to avoid import loop + # from pystache.view import View + + # view = View(context=context) + # view.template_name = tag_name + + # return view.render() + + @modifier('=') + def render_delimiter(self, tag_name=None, context=None): + """Changes the Mustache delimiter.""" + self.otag, self.ctag = tag_name.split(' ') + self.compile_regexps() + return '' diff --git a/oldanki/template/view.py b/oldanki/template/view.py new file mode 100644 index 000000000..405405cb0 --- /dev/null +++ b/oldanki/template/view.py @@ -0,0 +1,116 @@ +from oldanki.template import Template +import os.path +import re + +class View(object): + # Path where this view's template(s) live + template_path = '.' + + # Extension for templates + template_extension = 'mustache' + + # The name of this template. If none is given the View will try + # to infer it based on the class name. + template_name = None + + # Absolute path to the template itself. Pystache will try to guess + # if it's not provided. + template_file = None + + # Contents of the template. + template = None + + # Character encoding of the template file. If None, Pystache will not + # do any decoding of the template. + template_encoding = None + + def __init__(self, template=None, context=None, **kwargs): + self.template = template + self.context = context or {} + + # If the context we're handed is a View, we want to inherit + # its settings. + if isinstance(context, View): + self.inherit_settings(context) + + if kwargs: + self.context.update(kwargs) + + def inherit_settings(self, view): + """Given another View, copies its settings.""" + if view.template_path: + self.template_path = view.template_path + + if view.template_name: + self.template_name = view.template_name + + def load_template(self): + if self.template: + return self.template + + if self.template_file: + return self._load_template() + + name = self.get_template_name() + '.' + self.template_extension + + if isinstance(self.template_path, basestring): + self.template_file = os.path.join(self.template_path, name) + return self._load_template() + + for path in self.template_path: + self.template_file = os.path.join(path, name) + if os.path.exists(self.template_file): + return self._load_template() + + raise IOError('"%s" not found in "%s"' % (name, ':'.join(self.template_path),)) + + + def _load_template(self): + f = open(self.template_file, 'r') + try: + template = f.read() + if self.template_encoding: + template = unicode(template, self.template_encoding) + finally: + f.close() + return template + + def get_template_name(self, name=None): + """TemplatePartial => template_partial + Takes a string but defaults to using the current class' name or + the `template_name` attribute + """ + if self.template_name: + return self.template_name + + if not name: + name = self.__class__.__name__ + + def repl(match): + return '_' + match.group(0).lower() + + return re.sub('[A-Z]', repl, name)[1:] + + def __contains__(self, needle): + return needle in self.context or hasattr(self, needle) + + def __getitem__(self, attr): + val = self.get(attr, None) + if not val: + raise KeyError("No such key.") + return val + + def get(self, attr, default): + attr = self.context.get(attr, getattr(self, attr, default)) + + if hasattr(attr, '__call__'): + return attr() + else: + return attr + + def render(self, encoding=None): + template = self.load_template() + return Template(template, self).render(encoding=encoding) + + def __str__(self): + return self.render() diff --git a/oldanki/utils.py b/oldanki/utils.py new file mode 100644 index 000000000..7d8331012 --- /dev/null +++ b/oldanki/utils.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright: Damien Elmes +# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html + +"""\ +Miscellaneous utilities +============================== +""" +__docformat__ = 'restructuredtext' + +import re, os, random, time, types, math, htmlentitydefs, subprocess + +try: + import hashlib + md5 = hashlib.md5 +except ImportError: + import md5 + md5 = md5.new + +from oldanki.db import * +from oldanki.lang import _, ngettext +import locale, sys + +if sys.version_info[1] < 5: + def format_string(a, b): + return a % b + locale.format_string = format_string + +# Time handling +############################################################################## + +timeTable = { + "years": lambda n: ngettext("%s year", "%s years", n), + "months": lambda n: ngettext("%s month", "%s months", n), + "days": lambda n: ngettext("%s day", "%s days", n), + "hours": lambda n: ngettext("%s hour", "%s hours", n), + "minutes": lambda n: ngettext("%s minute", "%s minutes", n), + "seconds": lambda n: ngettext("%s second", "%s seconds", n), + } + +afterTimeTable = { + "years": lambda n: ngettext("%s year", "%s years", n), + "months": lambda n: ngettext("%s month", "%s months", n), + "days": lambda n: ngettext("%s day", "%s days", n), + "hours": lambda n: ngettext("%s hour", "%s hours", n), + "minutes": lambda n: ngettext("%s minute", "%s minutes", n), + "seconds": lambda n: ngettext("%s second", "%s seconds", n), + } + +shortTimeTable = { + "years": _("%sy"), + "months": _("%sm"), + "days": _("%sd"), + "hours": _("%sh"), + "minutes": _("%sm"), + "seconds": _("%ss"), + } + +def fmtTimeSpan(time, pad=0, point=0, short=False, after=False): + "Return a string representing a time span (eg '2 days')." + (type, point) = optimalPeriod(time, point) + time = convertSecondsTo(time, type) + if not point: + time = math.floor(time) + if short: + fmt = shortTimeTable[type] + else: + if after: + fmt = afterTimeTable[type](_pluralCount(time, point)) + else: + fmt = timeTable[type](_pluralCount(time, point)) + timestr = "%(a)d.%(b)df" % {'a': pad, 'b': point} + return locale.format_string("%" + (fmt % timestr), time) + +def optimalPeriod(time, point): + if abs(time) < 60: + type = "seconds" + point -= 1 + elif abs(time) < 3599: + type = "minutes" + elif abs(time) < 60 * 60 * 24: + type = "hours" + elif abs(time) < 60 * 60 * 24 * 30: + type = "days" + elif abs(time) < 60 * 60 * 24 * 365: + type = "months" + point += 1 + else: + type = "years" + point += 1 + return (type, max(point, 0)) + +def convertSecondsTo(seconds, type): + if type == "seconds": + return seconds + elif type == "minutes": + return seconds / 60.0 + elif type == "hours": + return seconds / 3600.0 + elif type == "days": + return seconds / 86400.0 + elif type == "months": + return seconds / 2592000.0 + elif type == "years": + return seconds / 31536000.0 + assert False + +def _pluralCount(time, point): + if point: + return 2 + return math.floor(time) + +# Locale +############################################################################## + +def fmtPercentage(float_value, point=1): + "Return float with percentage sign" + fmt = '%' + "0.%(b)df" % {'b': point} + return locale.format_string(fmt, float_value) + "%" + +def fmtFloat(float_value, point=1): + "Return a string with decimal separator according to current locale" + fmt = '%' + "0.%(b)df" % {'b': point} + return locale.format_string(fmt, float_value) + +# HTML +############################################################################## + +def stripHTML(s): + s = re.sub("(?s).*?", "", s) + s = re.sub("(?s).*?", "", s) + s = re.sub("<.*?>", "", s) + s = entsToTxt(s) + return s + +def stripHTMLAlt(s): + "Strip HTML, preserving img alt text." + s = re.sub("]*alt=[\"']?([^\"'>]+)[\"']?[^>]*>", "\\1", s) + return stripHTML(s) + +def stripHTMLMedia(s): + "Strip HTML but keep media filenames" + s = re.sub("]+)[\"']? ?/?>", " \\1 ", s) + return stripHTML(s) + +def tidyHTML(html): + "Remove cruft like body tags and return just the important part." + # contents of body - no head or html tags + html = re.sub(u".*(.*)", + "\\1", html.replace("\n", u"")) + # strip superfluous Qt formatting + html = re.sub(u"(?:-qt-table-type: root; )?" + "margin-top:\d+px; margin-bottom:\d+px; margin-left:\d+px; " + "margin-right:\d+px;(?: -qt-block-indent:0; " + "text-indent:0px;)?", u"", html) + html = re.sub(u"-qt-paragraph-type:empty;", u"", html) + # strip leading space in style statements, and remove if no contents + html = re.sub(u'style=" ', u'style="', html) + html = re.sub(u' style=""', u"", html) + # convert P tags into SPAN and/or BR + html = re.sub(u'(.*?)

', u'\\2
', html) + html = re.sub(u'

(.*?)

', u'\\1
', html) + html = re.sub(u'
$', u'', html) + html = re.sub(u"^
(.*)
$", u"\\1", html) + # this is being added by qt's html editor, and leads to unwanted spaces + html = re.sub(u"^

(.*?)

$", u'\\1', html) + html = re.sub(u"^
$", "", html) + return html + +def entsToTxt(html): + def fixup(m): + text = m.group(0) + if text[:2] == "&#": + # character reference + try: + if text[:3] == "&#x": + return unichr(int(text[3:-1], 16)) + else: + return unichr(int(text[2:-1])) + except ValueError: + pass + else: + # named entity + try: + text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) + except KeyError: + pass + return text # leave as is + return re.sub("&#?\w+;", fixup, html) + +# IDs +############################################################################## + +def genID(static=[]): + "Generate a random, unique 64bit ID." + # 23 bits of randomness, 41 bits of current time + # random rather than a counter to ensure efficient btree + t = long(time.time()*1000) + if not static: + static.extend([t, {}]) + else: + if static[0] != t: + static[0] = t + static[1] = {} + while 1: + rand = random.getrandbits(23) + if rand not in static[1]: + static[1][rand] = True + break + x = rand << 41 | t + # turn into a signed long + if x >= 9223372036854775808L: + x -= 18446744073709551616L + return x + +def hexifyID(id): + if id < 0: + id += 18446744073709551616L + return "%x" % id + +def dehexifyID(id): + id = int(id, 16) + if id >= 9223372036854775808L: + id -= 18446744073709551616L + return id + +def ids2str(ids): + """Given a list of integers, return a string '(int1,int2,.)' + +The caller is responsible for ensuring only integers are provided. +This is safe if you use sqlite primary key columns, which are guaranteed +to be integers.""" + return "(%s)" % ",".join([str(i) for i in ids]) + +# Tags +############################################################################## + +def parseTags(tags): + "Parse a string and return a list of tags." + tags = re.split(" |, ?", tags) + return [t.strip() for t in tags if t.strip()] + +def joinTags(tags): + return u" ".join(tags) + +def canonifyTags(tags): + "Strip leading/trailing/superfluous commas and duplicates." + tags = [t.lstrip(":") for t in set(parseTags(tags))] + return joinTags(sorted(tags)) + +def findTag(tag, tags): + "True if TAG is in TAGS. Ignore case." + if not isinstance(tags, types.ListType): + tags = parseTags(tags) + return tag.lower() in [t.lower() for t in tags] + +def addTags(tagstr, tags): + "Add tags if they don't exist." + currentTags = parseTags(tags) + for tag in parseTags(tagstr): + if not findTag(tag, currentTags): + currentTags.append(tag) + return joinTags(currentTags) + +def deleteTags(tagstr, tags): + "Delete tags if they don't exists." + currentTags = parseTags(tags) + for tag in parseTags(tagstr): + try: + currentTags.remove(tag) + except ValueError: + pass + return joinTags(currentTags) + +# Misc +############################################################################## + +def checksum(data): + return md5(data).hexdigest() + +def call(argv, wait=True, **kwargs): + try: + o = subprocess.Popen(argv, **kwargs) + except OSError: + # command not found + return -1 + if wait: + while 1: + try: + ret = o.wait() + except OSError: + # interrupted system call + continue + break + else: + ret = 0 + return ret diff --git a/tests/support/anki12-broken.anki b/tests/support/anki12-broken.anki new file mode 100644 index 0000000000000000000000000000000000000000..530815fefe76e76225c6179aacdb646ce4a453b6 GIT binary patch literal 64512 zcmeHw4R{;XmFUdev3~p;lemg;M8-0~7O@jawq+@h#BO32Lt@hW0NsQ&iZpgaWXaLU zB*x8BnV(SpN=qrwK+0~nWnsTAEPU;^bPIjyqp#2(0s*@1TUb6C3S`T63x)Q*-S+jJ zb7!tJBh4rVLJ4H%vo&+?J@?#m&)jo=&OP_crcbU*=45Fslbwj=q;5{+c%JK*B#z^1 z;D0szvnvb_BFBwLS_ID$D%@+hu3s#!0suF82EkU7|0cg9Z;;o?tK=8t1@bdBx&j8v zq~*lnS>xroWI7@5QFf)E8l$nPT!w~66LNg#1+lC$8e~sEIwxLJ&pSOH4}bkkF19@- zE4f%sp+CZ!je`T52c^veA73{p(I}~-+MP^D!y}sqKQXvb8aaQnG_qygx=yLuotvJN zO%oEaX=yFu(L-nDNyQY}7rRW(#>VB%$qCsf4XrzW0E*BRSP@tNup+Sl zU_~T|&Qw;eL}d}eip(N}6`e&0D?(KWXy>Z}@Dn%_ewMVs&!T4d`B)?TG}ggSZ8iM3 zobXdcFf5X@Ir2ktE7=7P_GNRx=D?gd&@C?H+^xLR>2kT;@DD%q=5mDs!C+UQyDJzW z{R{usp8M{*@9+CVU;N*n0kl4XnGm}4VwnPc-GUCWhEjztz=Va>Z~e`$BtY0Ns`nm# zA?MPl;W1MgHGyzd|Gd{yj)({ zolJN3Cex5MrN~g?PMCmgoyv_xE}luIC#Q09I&MgtiA^ZV^u+c~xWfRu0}>}8i;|m8 z$usiAWNsRZk#i|IJ)YY!1BqFjf&!&80b0yRUIgyNQ*|@IJMOvF+ z{-xDD99R~pcapX`V8?J8rgq88ahU$bIB4@;Pz^ znIv&?5jmHvAWMlCKlH0%;B?^BpV2Q2wigdpL5C zOp}Q43Ow4E&4Ck*11qcQ9I(uGJDpCq)8+2^TKlr=U%NFs@XRIm?_c;vp#O}YzVxiC zp|c;Ayl@rnzUV_P2yYM}yVD6vb@=C9hs5U~`>H=;_S>J1JhJ*XTb5q=`&a(0KlG(- z&pq;29O*y(N3R|EN!QB-JQ`l1f5E5sV*$cyQXq(Ty7;HB-Uk@{2NC0K*S@?B2)Z_K z*9vDjf=2l>6v2>y2q;Nzr&~wTw%5I9-~UgHBw_XNp`QQw;9uWdifQ#5(ckt1rj>kw z5adjekZrFo`u)Z`SD!caX49Wm9xluNe;WT0=|ELr6}g2Y|4yDI-zE3LqkY*NusQI- zaiB?bIQh~Zu8PJMrCaT0$O3>ZlX(hS+Q*3{v1&IXQ2jr5A;(=PGzlAsm)k@RzW1t& zSDpncg7$X)nqE5jfprn=W1R!rRhjIxA>c4hgA8fvTqqvPCWaH{+e10nI5XQf162?# z+H!Ihrc9WqUwG^Or=B=a%Yl~D>#eH;MdiRV+g^7&_7c3;Ii6MX58a(GpOGID4_n>1l4}uWnM;a~W98D--m$#sICxRanx| z{TiKC0yku=*KB|;IhHhCvgTsrO8Gj?e6?6{X*ULoVlYBAL8ho?HD>GOWICAx>ln@W z#B9~9F49fcsO5{vxxNd0Jr$Vyt{7R7Ibde3;DXr9mfIJf%4We_XFaxql2__Ooxun! z-PMIQzGWXOdP;N*(6;LHJ=Ij>HQTOhTWjT%45|vXIcb3gLre1+(o~HXjnYwKvQglqml^$#d{$Up5D94*a<} zu$N!LIh}R9gM-~o9xeg4IlGp$)wx|f?-B&~;oa&?!1e#`y&g9hzmnju8~E(+cEXNh zov`{1bqYYLoNnQw4WQo4CUesl$l2+0lH)sASpU2(vwOh;;i88haEJoI>K2Z*_;#SK zUDKPdPo~WV1S3prk15PV@|bA!AQo6a2|s--Y78*uc%Z|KO$SW-ewq}tP70X~n(<63 z6`NGFQ#+ck*{H#EqD(c8=0}$gmG#N}C2! z81qem#=v4!P9UoF$W&7p4Uqymbuc%z08&8t4J^L0DqR4OCw&SX=G{$D@(Pj5n|=MJlx)^k66 zGY_--;RD4?+g@)x_=nib|9ssmw|z~TkYi~u3z5{|)+6~FkIe2nlE3ES_MhdSLp9>P z&#IVOOp1pE#Pt_kGM33m+hbW(|2L20=J8ST1&*6NqlT)}u3Adf7&vEA=+8WZ+JZ(^ zP~j9b52o|IcctJl*Od8CyCrW)=aOLPQ$8O?Wev`yblQ|M+i5WMD4Rbq6^!-7>8cBA zou0Nf{#x)A0GaEV<0%HF`E)5%(_`UXAd^j?frlyUDl_)D>J7810zN?)KogTYYOyYW{GJysh3U0P1AgU^zxxh8jPhTk}1v1ohsH?0-lp8 ztP>0=YceS?B=SiY3~XF;&cMcwKKkN(aFi}*ijgIhVKkdRuQ_LM&3RHsq1MAAQb(Xu z3i@dwTB?s#+ccHc+n8vWm>;`gUAw5x>1l1{Zzi;1;i^V$SH-9K#%5JacxSB&y14}? z|5e1v!O6b^aN@5Y9_`EKz{$pe<==YgYlmUneBwX@=W?{x)w$iSW}a*2*+4wFXA(A7 zP?Z6CvG|TmNWCpMfqqLhvym!8P}^r^*|Q(Sr!Broc`H|e}eEg z3Rnmvd}j&=(az7pOS^pZbwrQJA?i>oL^?ZOMmwDhgco>A}JnTV3pV? z$6#a`rx`+OHk5x+IGrOmlGB9y;laLa4wP^p&)xCRf2)oBG-$M-n)VL>2(;T4_K!K- zb;WiT&y3^NVkW($qbNCkWl4wD1pUg{+q0{dbZFAySJc+n7ND0|V2EDwEsuW=+xU&b zX;j?Y-o>VrF!|Uhd)WQigSTTxfgpY7B`g1?uPNYPz)>A ztU`vWs=IZ+Z5D~o~YCb(*VhB%xaNGrQhD{eMpEr6NMBN+}% zESpOjvRceg8G?aSavXM5G_5b4+r}tWro|SM$_p)~Srn-Pm=s74Dwxf`SweYXEXATI zoMwMgW3y>RN&(7CK!^crP8gHRvx5|5b`PR24i$;TOGdYC44|ypRag`F%JCbg_-u8+ z-gB|BnZjvOVN836%LEkM<8Yz>AD;XcF|CA~-xB4l{Mt(oU|)Fe!4A%WstwnyJkGmZ zaHbMYRl>~$0qvqs&Q&kRL5U%?eB;0&x>bqFjOrIWpHmM>-t*$wuy#*N)Njj5-`GSj zB!|P%kQ~`AM*@jJ$hS%bpq^F$0NQNVZW(sgax21tz*)XkJI-E?7*@&Ua5T?~WG^7T>?Ds_o#QFkTpN_>cKDLQ1|E`+ckd2eCz(h-@a6yMUJKw%5klHOt_P*=Wv3fyTt2y>#0KUi^BzVgn|kOrOBX# zpiz|+f~YanY2l2@q=E*+3gS3w3@d5ZSpQwFNmPf^^7u*3;DU|mhI#=XJ+qMpYIAti zc||>;LKT6^LIVfYxOdIMqHfWOhm|T`bS+`1fZ3R67^e)S8kD6a%1$#StJWk8*)+wn zdi-wLJf~;L5`I=aen*ouBRc(avw@91eUfTVhcQ-60YlEYo6~8CtO*V@ssy*ucTLX) zY6R<_Pc;&(Pf`h{F~@~shRHy(CaaXkDC$Yg;>XO`1*d+@2FJ|mA^{#rOb)JMwza4p zE>A-yX0j#;)UtAjrkdr029;H`Numr#ILk3x`q@>c;&=!ZS0bhc7w^#xZ^4O;m5Z7J zfR%@u0zjy+I55beuK-}>pQZp{;hx4Yqloro4o4%|x zJJoEOs^1POZz14N8JuE4I{>LflO+>~q(a-R*&@->>$5MOnb;1~#stl3M%OVrQvC>M z5LRCyMS;#i>qgmnLnF4rLgH9m7LNaJ;Y^PF1?eLTg#Q$NEPP$KQrHGj_GNQG$AON2 z{=wC_9(nGt1J)z_^5_5W4`5clN?V8#gDCK}H|D_zU!!`#_9uA=VySTL+{JqWJCPN@rtd!2=r1VrO)maT`67C)rqNF$;3WN`-rNKG7B6n67_615aO9^=_HkHar zAqq4R%T7z+?in>Z{(S2K>N0v(^-q8#Zw9*9{WJr8EEaYQOlidQVGH6-%yWxe8@~|@P&8vn zq0T9CUY>dP5o#SG*8%|4hEb@k!qAXvVklILBIn_$GasReh+G%Xj*PRj|6-#ca!U%) z=mLN=@KB_iL4qd$l>b_?f+N2nFTxqXN69~c{NG4EO;YeZknBY24;)u!zj*uh7(b#yRAiRj+Q4Ej_@H+Xwj)+ z^U^XPYA6$xg4NBUhijmY#)6Ft#U)$=omnvgYCK{yBPtaBnApDF17019KG4g)S8PZf z4faYQD5D9>pe7uFh6Q3P*MM`!^|Yovwe!Umu0ivm9awsb=v~vqJ|a_woh6J0gJCIn zTBGRc^56tq0p`#O-4tL5U}oW6D7{N+L{DD};$*ZKR|sLG z8OW&?J<<6_FvGkY;Jj`u+X%y^!oV1>t2~)4l8gpJ;vNfeh8~#L~je+z!!0{h^jr%e3b+QlSfA|9yXuEiu10OyPoL0p< z9S&-~ea};WxSbg>u%Wt;tXdE=lL9tbFeKL>Wyx%+;H`24a%M@*aI;a~TjkPH_b+_p z3T7L@W)a2I+pgU|!fX)O1VEMlZ*t@se@k z^&>REdW{<5cCbF8hPWK8N5E&J{Dc4R*Wv5GkC6w+owgtFlc*)xRXssCFrPTp@%{Rv zS2M{j#yu`FIc9PD7d}40 zCd1}{&4D>{fXP3)^#tfP+o$A5W=cQ#<2dKPNMiFH5sYas(s*@aLI#n7vy)?TKMrR7a zviUc+08Ji#iqWLzP*t(?`3!TMDufQBYAK>d`$ra0tc98uu@0+78|~Iu!JvlOpGX~u zZ#d!Kyg*(Xoa$3Qeg_t(CO4p+$9-7G(mDW$B}(4h%tq$+kH? zE8F>eU7+2f_O1R&vFk zxkIyyvpI0Ga-d0sT?2u+=U@weV=NoDjj?P1Um(KP07JmW`voHG2(U;tS1?4fX@V;M zw*K#AZGd*YPYe#sCk_W&r!(2aIJOhWIsttg>jE&2t>>ArV;s{3ux>>k$9f8se~}#I z$Q$G(@-#fym(2m210Nm^G>Gn2J`@NBy8_)^!3dZH>cW9OVho3cdePPDX7GV9%Df^q;;#Y+@Hm)~Z4sl1%ycywG+~b!- z3vp~*-@OPuooqRba4gQh?Hgrro`cRp92@`JQZGTA|5?n-;=a9b6OFUw{~f$w8`S1N z1rDHmF^@!gIb{Bh^zzDl0O@6&#WC4Ld@N4icQ9u~dbwtC`u>E)>H8w)t%>y~RQ`n* zasK}cvVjbc5NQ7Eg+D-)ec2rNuybG$S?(zFOUBxMBWczWnY7L$9tU*+#zf{6(&EtE zlrib8C-WWP-irw-2Ejualg0czakj~EB6#*J@O_g@_M zU;H@#9C-S;jdYN|AzvlmDyiMprPPx5ih^w-e3LB(o;6Z3esr4JPWGwBh&xX&v6)1B zSmv5TbHbsIT3%|ySV%m6m~wJLE18;+B0=vfa?KpHU}BqEqVmLPV4nu1QAT;FMFmTF zqEFP-L>~=Co%l|`s55bf)6?xOv0B}%bKrpnrx*^E7Q+&lrw?YUiojro92*}_fM-3` zT=g&79iE`qXoXD+QPUZ!Zp$)MIAakS?tFa*Fbq8PeTp#*6O>Vd%M2RTV}`y2kQISd zpuPc_B{?`_RN|e~>tPmbinX9>zisd!=-=aaqA_FeyE&4H7I133S$h0p%c z{l90x1L(KNJ>+)sS#mkqNwz_Pec2qaIWRX4)Qdbg0Awb(E*f;Qppyn2%ox|92AN%N zEe%#N(_b*t15{PaN>@~a%;2}021SOdst$nNBFn2LvaCc6vT|zFAj9NVRLz>)*C#KE&}z@-m3F~G#wp$}AHfJwAaLmX}vzyv1TqCP+{ zz~sDEAE?0q6ZvX=fXcsv^l{+*_fhctJ3~GN0sFE!U~`}n2U?vx?{dJ=J%JhRvTuVk z0kndsKgtBL2*HeU(ZL@&4`e-prDP7r%MH3X2*yD-Tbl6q5!~bh7kb0 z1hVc3$A1TLa^zpY^Y_)@^E(Iu`?5K3a&f>9^6aGY%;eZgu1g_{&N|hcMfo;D)3 zUp5CUI1nar&AK%X-*{J6`o<>4#)7?}L`aTomm`5hAmm#G_ONF!hc9ubSFw?7PT9ap zRm;W((I`olmKIJ#3Z?Z#qq1+6YTv8nWaFAifx(a*4oCHLf*G%tjR{yjBF@UCMV4ng z5$t<4%GpRirwW7Nua=EXLQwu~{l7(Hx1((ie8f3`^6vsyU*vb>-^mN$|L+mf8@Bh>aSK;~pe+BRV2gyTV`*REV3{iv!g)c#Zec2p%pE)qEmbA`?4Sd-JQRMepn`H8m@5V+nLUvNaf*%w0Sa z%jS~tl)Sfkn=&cKld;syWNcE-D#^>`mEFm7XKykMX;X>}CGL!k%Uh>%W08wzlIh8* zoScpu(q>{4iZVU1y%X*evD^+woP;b&ZaO8;$P<&fX)H$0fg#*@ZpREHX3<6nD4hw= zVz$fU$#jwa>KZrk)=^WfEgRQ0x=3qNTHdYIJsemTsCSaKI$+0e8qCy^m&x@G2m=TU z*H)3mm_9&LYefh_sv4>$q`3iV3ZHTcHVCA(F(u3K9Ye8XN(qKf;fcRNnF5xi6YDTu zR)T2%$8#5O_;>uSeeHFk_og**MOhPrVwR0fY!5`EQEick%Qk6dER)W4jm0LCsp%EI zfh@cv-&r&c-_nZSz~r8@cJD~$Ha29|wr7OJm zh%OxK4)g&|ToKQuvd5sVbje#n-H|K5aOBE+kIeqVkt=U{<@TFix$`b*LpCE_*gaQ% zw#6rQMS3E^c%PnR{JwElER`HjuZTlymb1Wlgp4Jka(t|~jFx7sAVYnjK%Y)AU=7(F zijO6UCnqlGnG!PXd9vzHe~=UC)5(O zoKj2Ba#Bq}QT|2#F-}+@5dIl>u&)mt2kIKhV)znZHa868&Yr-sd8B=qjxX!rW8}GH zCcS)%0)yZ3L`+`3Hk+B;p4qc}!_@XvGQNBuy))U+K)g*jq-}zZw-% zHA415EOs#-yJ~E(!P@-?TvPa1AWn+a3B!q4fI3Ls(i;f@CE?y4gpEYU!r|@3ZcBxYL?dBPibZ;(eSLjFAefR{D!df78)_Zt4MOeu z!jbN150ry=K?>BFREJ9f*!eyxK$^Rnze-1vIf11x{c73tMn^yxXVJfgnsY2 zo4I0TcQt;=h|uqh4_yK*@As&8p^!B{^yJdQ2*d8ww?$<#jJ=#%VLf zVpv7k9p8=m^}c-nlg~dxtCA>tHaOZ)E`wbJ~{ag=qmv&e>cTiYXsLHWmRBz zR+ajrDF1@cfb!p9CHi*!NyCBJmMm{_mpP<5#Lm+H2`zrey!F1iEZ&({}r|9?9Q$WOmhq*WAR;U@Wj^nsf6xPJF0ZS@Zg1#E?SBy)ry>w z<2mU}X)K$WP`{5*y?ck8l_fl~C9Q-HBgfJSDXFZT!ced}lTFB3Y5TNPa+nJaQk~Q3 z@U%7Ypmu@zue=J4&UP=+wJ#G7|C8 z7&r^49hUST2l~|0c>a8Sm&){UWcnEBdE^A;yY5lXbY1kEm>#fVx_XX|z74~1wfrf| zRDv={5NN4bE6_da3DQN+iJ+hrK_#t#9TcQT`to(lRifx8*jbSyMf%{;j3|hHUx+FS zjh;!+uo-FaXhsh<{ke$BT$u!-g7QyQx4?4L1&ZS@5)c>+w(@~ zuQL;@Nvk2VR;{KS*M`kb@WL38Ovh3u(9o2x>(+HeYz%~sM)#;^k}i5q9C`<%B<&ji EKh-rEE&u=k literal 0 HcmV?d00001 diff --git a/tests/support/anki12.anki b/tests/support/anki12.anki index 042ecf4f4ca7fb6ce0efc235bb3540ca93ba3530..8a20fc0944adeb6036a0d1525903aa13b1c000f8 100644 GIT binary patch delta 3215 zcmai03s98T6~5=3eZulsfdJyNAU*(5P(ci$KolD-q%;~$f)ldBYKTS<78TscquOBA zn)Nr{SRZJdYBZBV+!~V%W{V}0`WPWa6WdY=PMT_bP!p51)`Xsam$h-GN&neB_jT@h z-E+Qs8yZXv|D=?8KP@PC*yEN|R6S>N#7!0&0U&bY;)DHAsJHy=9OjmF^bI(X)l9tS5 zbOMtR(M%pNGcgA-F$FL&=$U9q9cw6C&}n*!R#LXx9GXJPcx#5zYaOL5=Jud{e#duS`w zQzezs<20ADXdGE6khk|^oSBl0)iu?X_7!CXc6;eF_Ua(ALUl!zW6p{)`x>({H)m>) zi7ZfQTWX(IUS|(cnsTy+nQ1IMXD=(a6;zbk9OV@&LY4NMhfO5r3Ne?8sY}duxiU$k zk8}^1^rUoOHz>~EAM!>*i-!btP%TZxPtk48-GS~yk30n?QUrZQ=gC8d$!#ubB1I{Q zQ94V^5#bu0-uS_W7ba}FlGKscv*`MWuOHp594?6&-KDv}lQX_iCoS2nZ=pA|VxbTV zF#?OoBIZ$t%k>awl)``{(76jc&jIL_2BQUx%3fouvd0h&0m^a1DV@4iXgDl)Xj`Qg zhI$QJJv9q@pPG5aNlNopC%bt=h#Oc)bHr7btBr-HoQe($n)zsc-u(Qy`FRf&)|{HHV{n)VIChwNr_`T zwyLsfFNtu^yfV^jM5JQ75d%@~=Womgh*mmpw!v_vW?+*^6;r;#VXs1T0zoX%*S|cV4dXh3J90%|$9>kyFB1~rgLQ&p|g;(Tjv5+7YD~yp{u`p7;W#yqZ z77l(-uH4I-9YvrG6SD;WzJx6}5!c~OKC~k)SIV~ju@ED7Rlz>6C=DMs$`jQP3$e1d z8V-Y1{@ejSfjAjg3lZ{~1BxNu*I!fvbLD~6V3ujMpq1@4kju6}9kUHv1(qn)9RoQ9 z4Nxz=M{m<>T=l;sCp%L{(pdrx)ou1*v6bizdcp49&3#+gMr(rnUhWFuZcolX-87zcZFcpT2FU5yK<~2a$ z@spcjCWUZuFlcttL3)AZ%wZK7WR2B=UZ=yno~K(6b>DrtO?OzW>GOt#o4CyZIlBdB zWLvaGW>R@tO21s-=kb@iMQa#Z-4%7BzV~16^UlZqIR4jTxqYchUT%Se5IQF4Hha;_ z&Yq-Wtb0F|m1x)q-oc)#UFx7Lkc>LNu0S&A{F2qaAA?jZ*Qu~@ zoiVnP+Gw{I9vmdwx3DCILBB( zVz^KKQ6Gx`iOw=69%aV8B+vFix;)wn!{gNx4-h4Sf5#w@Go=kRxD4CLN}C0jlaSNa zz(^P;2UkO+hV#Ehrmlhn80AZ-XI8-sX|06^Wcw=4rLn%*VXI*pB+5rt!())7?u?S{ ztKlJbhx<A%+n14i zcYs-)1CTN=T*D`v>xB;?{J{>_@WJ{g{ZYSf(jUnm2>&2kym^pa_`@Jf52Oyk*LOdC zKpkwt8zehIa8?8r+>5??aO9w;sesofk#txwMHX2xQSP&%{RcTyFz3v)RgyI zDL{9xPUV{|m=K`#mui4K--0PKf;FH6jh`vdH}vxu)i|2SeqSvyDD9-p{3fa*J1fd3 zxw93QN_#7&59M#BV5EGPQF1LkL7B{xKr-FK2YtzdBjs)nCd(!dM#&Q%j1TkWBVU{! zF|cl6NS&x&owreHx^TX1>SE^CyD&*Up^}ffaD1o^(*&KO5WI+caVe&;f)S{hLZv!c z5&+{&ewgyl#LMG@I83H}i-mH>Bp!Iu3C2JxeF(RgURq zmd?<$c>o=8Dr%T;y;=Z&VQ}oA?|D=@AwXpXu|S9gE?0s3&&fw9RGrd5r@(L|=z&fa z{9ADEP0U8$l1LJifzQIu@4flYm{b{vAu3}C?(Vk{JP9_xe|jK4Qcv_gakulaid|9ytWdWEf1&YA`>EDX}S7pEnidO zuq~}tiP}z;pF3d;4&^>%%H2-L8u9-&q{4VP@_BfXan1caZ09O@cpYqoGX>I#vVerIF?2n$#IrHAT z@6NsV-TUs>toj?bYLz)@)+4!P)lU0DcV)Sw+CG-}5lTsx-A)J**_+BFEBUV z=uy(IbNF3S7&Ukd$vG6QwooeJc-vu|8DXU&Rz$SpyzqFc_tb_TrBuKI|LuD&_v>(s zfAmNu5G-tvO)@kQ9+C;{fCX?Cl06ptHO^zVUl<-LILs8s;RbOWsAN(k6-7ys372A%+IojQ+WGCCgWqQA zJaaF-&Uv1@xPm4aRX$)Ma09-EGtdK_5DM+^Dm1_|kO<453?75&V1fyE_za#kH6)Fj zEORzm<*uxrR#EC)Zn1E&wA@)*=E!xIII7Fs6@F&UN|!noIcJqU?F0)KZ>V~*YN@lL zG}q}YS?sLBM3{g}Ap<*Ms;pPq2vg)Kn4o;Ecq{=Mbw4u71&*D+*wYp?pPB&)oP?cl z8TNz667tNj<%Vpieu)?s_CPOxa!h^yq}$nj#qFDpP1wf=@`6Z@tzN@+f5>6&*qZ+)JyUN28)5gq8f-B0ZZ4@I#T?F_d>`5-kHgiIYr;*WoRD&EC3l^V z)_*C43*U-I3`vSzIZevY!sqU}`jzUe$A9d_nsa|Re!8+QW7FDsOqJqcMr z8&i2%4#iW#-EiaU>4fGd- z0|V(Za-XZ5M8e4abor;s(6aB=jx~*zWLA;ygkQ=9odg~ zigF+4FmiS%wRrBI9Yv+GifU)&ZyheQJQm=wKgurDF(-_U@ce$x6iWT^;M@)xfKSe= z_fuVfS7HdPg9xQviMiX(cHXJ;#7sX+17w4d2;yd9JY#y#kST69%F1Wj*G-%!e&$k| zW@;sAn~x8$>7Q(B|D@}7ED;1zl%F;f;Sl9 zI!5S%0{9hV_>w=tVaBqLrbdw;h^~r@;X3(Zez)GLe^&pgkRq%WJ{J?kRpJTherc_A z%}{9g#Q1=5m&sx(H~q`J+6&j4Y z%XiXHoL4b}j`UJ>Rus@M?<(0norZf?>)ct?ibvgPShPNxMtJu&t)E0~-qn@$S-7Jj z7$0iLppm$;VGfPLl*SOe((oXSW~=!$hK<3vy>T)fglgn)|64c;dp6IeGgKx91kV)RsE;v_+1X6C4%ljhYk&XcJZqt#dj7zFKHYPTV%C2=liDJh5q@a$?dlx9S%X$lJdF-p^!V0TmCkZ5K;rZv-EHDZUa zXlA}G%}IV*s?=EE!RBlebQAakSW%S3Ailhbdv zDxANt(CvOFWEKPCPpIGmM`g6@OE0K;%02qMj(hcc|0m=@eHi2scp3_s(tm;z@B}MI zZ$cil!6ta#M|t~7E>q#DK!sr^_JX_yn!KQ1^J?QDRs#)~cPI^CI23{J9*VWA<}ibk zz*?w*N|uq-;Q5SYiTBta#}0VeSNcW{kMyLYvuDwezov7rk9GB1;0u@w>30*PhD_z+ zVKa#r1(8Kd+Q!0@`&JSyp=I|i?EPlv)tuqz8Q+`3(p6V)q>d15=}o}?-Vpp|^m)9^=G0T8@Lax8(k>YfpC11owx9EY diff --git a/tests/test_importing.py b/tests/test_importing.py index 74412df4d..ec44a8a46 100644 --- a/tests/test_importing.py +++ b/tests/test_importing.py @@ -17,7 +17,8 @@ def test_anki2(): # get the deck to import tmp = getUpgradeDeckPath() u = Upgrader() - src = u.upgrade(tmp) + u.check(tmp) + src = u.upgrade() srcpath = src.path srcNotes = src.noteCount() srcCards = src.cardCount() diff --git a/tests/test_upgrade.py b/tests/test_upgrade.py index 00d9bbb6f..c293a9118 100644 --- a/tests/test_upgrade.py +++ b/tests/test_upgrade.py @@ -10,16 +10,21 @@ from anki.utils import checksum def test_check(): dst = getUpgradeDeckPath() u = Upgrader() - assert u.check(dst) + assert u.check(dst) == "ok" # if it's corrupted, will fail open(dst, "w+").write("foo") - assert not u.check(dst) + assert u.check(dst) == "invalid" + # the upgrade should be able to fix non-fatal errors - + # test with a deck that has cards with missing notes + dst = getUpgradeDeckPath("anki12-broken.anki") + assert "with missing fact" in u.check(dst) def test_upgrade1(): dst = getUpgradeDeckPath() csum = checksum(open(dst).read()) u = Upgrader() - deck = u.upgrade(dst) + u.check(dst) + deck = u.upgrade() # src file must not have changed assert csum == checksum(open(dst).read()) # creation time should have been adjusted @@ -45,14 +50,15 @@ def test_upgrade1(): def test_upgrade1_due(): dst = getUpgradeDeckPath("anki12-due.anki") u = Upgrader() - deck = u.upgrade(dst) + u.check(dst) + deck = u.upgrade() assert not deck.db.scalar("select 1 from cards where due != 1") def test_invalid_ords(): dst = getUpgradeDeckPath("invalid-ords.anki") u = Upgrader() u.check(dst) - deck = u.upgrade(dst) + deck = u.upgrade() assert deck.db.scalar("select count() from cards where ord = 0") == 1 assert deck.db.scalar("select count() from cards where ord = 1") == 1