merge pylib references
This commit is contained in:
parent
8fcfdb57ab
commit
9d1b6231d7
@ -1,5 +1,5 @@
|
||||
[settings]
|
||||
skip=aqt/forms,backend_pb2.py,backend_pb2.pyi,fluent_pb2.py,fluent_pb2.pyi,rsbackend_gen.py,hooks_gen.py
|
||||
skip=aqt/forms,backend_pb2.py,backend_pb2.pyi,fluent_pb2.py,fluent_pb2.pyi,rsbackend_gen.py,hooks_gen.py,genbackend.py
|
||||
profile=black
|
||||
multi_line_output=3
|
||||
include_trailing_comma=True
|
||||
|
@ -22,7 +22,6 @@ from anki.consts import *
|
||||
from anki.dbproxy import DBProxy
|
||||
from anki.decks import DeckManager
|
||||
from anki.errors import AnkiError
|
||||
from anki.lang import _
|
||||
from anki.media import MediaManager, media_paths_from_col_path
|
||||
from anki.models import ModelManager
|
||||
from anki.notes import Note
|
||||
@ -574,7 +573,12 @@ table.review-log {{ {revlog_style} }}
|
||||
old = self._undo[2]
|
||||
self.clearUndo()
|
||||
wasLeech = card.note().hasTag("leech") or False
|
||||
self._undo = [1, _("Review"), old + [copy.copy(card)], wasLeech]
|
||||
self._undo = [
|
||||
1,
|
||||
self.tr(TR.SCHEDULING_REVIEW),
|
||||
old + [copy.copy(card)],
|
||||
wasLeech,
|
||||
]
|
||||
|
||||
def _undoReview(self) -> Any:
|
||||
data = self._undo[2]
|
||||
|
@ -1,9 +1,12 @@
|
||||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
from typing import Any, Dict
|
||||
from __future__ import annotations
|
||||
|
||||
from anki.lang import _
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import anki
|
||||
from anki.rsbackend import TR
|
||||
|
||||
# whether new cards should be mixed with reviews, or shown first or last
|
||||
NEW_CARDS_DISTRIBUTE = 0
|
||||
@ -88,30 +91,45 @@ REVLOG_CRAM = 3
|
||||
##########################################################################
|
||||
|
||||
|
||||
def newCardOrderLabels() -> Dict[int, Any]:
|
||||
def _tr(col: Optional[anki.collection.Collection]):
|
||||
if col:
|
||||
return col.tr
|
||||
else:
|
||||
print("routine in consts.py should be passed col")
|
||||
from anki.lang import tr_legacyglobal
|
||||
|
||||
return tr_legacyglobal
|
||||
|
||||
|
||||
def newCardOrderLabels(col: Optional[anki.collection.Collection]) -> Dict[int, Any]:
|
||||
tr = _tr(col)
|
||||
return {
|
||||
0: _("Show new cards in random order"),
|
||||
1: _("Show new cards in order added"),
|
||||
0: tr(TR.SCHEDULING_SHOW_NEW_CARDS_IN_RANDOM_ORDER),
|
||||
1: tr(TR.SCHEDULING_SHOW_NEW_CARDS_IN_ORDER_ADDED),
|
||||
}
|
||||
|
||||
|
||||
def newCardSchedulingLabels() -> Dict[int, Any]:
|
||||
def newCardSchedulingLabels(
|
||||
col: Optional[anki.collection.Collection],
|
||||
) -> Dict[int, Any]:
|
||||
tr = _tr(col)
|
||||
return {
|
||||
0: _("Mix new cards and reviews"),
|
||||
1: _("Show new cards after reviews"),
|
||||
2: _("Show new cards before reviews"),
|
||||
0: tr(TR.SCHEDULING_MIX_NEW_CARDS_AND_REVIEWS),
|
||||
1: tr(TR.SCHEDULING_SHOW_NEW_CARDS_AFTER_REVIEWS),
|
||||
2: tr(TR.SCHEDULING_SHOW_NEW_CARDS_BEFORE_REVIEWS),
|
||||
}
|
||||
|
||||
|
||||
def dynOrderLabels() -> Dict[int, Any]:
|
||||
def dynOrderLabels(col: Optional[anki.collection.Collection]) -> Dict[int, Any]:
|
||||
tr = _tr(col)
|
||||
return {
|
||||
0: _("Oldest seen first"),
|
||||
1: _("Random"),
|
||||
2: _("Increasing intervals"),
|
||||
3: _("Decreasing intervals"),
|
||||
4: _("Most lapses"),
|
||||
5: _("Order added"),
|
||||
6: _("Order due"),
|
||||
7: _("Latest added first"),
|
||||
8: _("Relative overdueness"),
|
||||
0: tr(TR.DECKS_OLDEST_SEEN_FIRST),
|
||||
1: tr(TR.DECKS_RANDOM),
|
||||
2: tr(TR.DECKS_INCREASING_INTERVALS),
|
||||
3: tr(TR.DECKS_DECREASING_INTERVALS),
|
||||
4: tr(TR.DECKS_MOST_LAPSES),
|
||||
5: tr(TR.DECKS_ORDER_ADDED),
|
||||
6: tr(TR.DECKS_ORDER_DUE),
|
||||
7: tr(TR.DECKS_LATEST_ADDED_FIRST),
|
||||
8: tr(TR.DECKS_RELATIVE_OVERDUENESS),
|
||||
}
|
||||
|
@ -11,8 +11,13 @@ import anki # pylint: disable=unused-import
|
||||
import anki.backend_pb2 as pb
|
||||
from anki.consts import *
|
||||
from anki.errors import DeckRenameError
|
||||
from anki.lang import _
|
||||
from anki.rsbackend import DeckTreeNode, NotFoundError, from_json_bytes, to_json_bytes
|
||||
from anki.rsbackend import (
|
||||
TR,
|
||||
DeckTreeNode,
|
||||
NotFoundError,
|
||||
from_json_bytes,
|
||||
to_json_bytes,
|
||||
)
|
||||
from anki.utils import ids2str, intTime
|
||||
|
||||
# legacy code may pass this in as the type argument to .id()
|
||||
@ -379,7 +384,7 @@ class DeckManager:
|
||||
deck = self.get(did, default=default)
|
||||
if deck:
|
||||
return deck["name"]
|
||||
return _("[no deck]")
|
||||
return self.col.tr(TR.DECKS_NO_DECK)
|
||||
|
||||
def nameOrNone(self, did: int) -> Optional[str]:
|
||||
deck = self.get(did, default=False)
|
||||
|
@ -13,7 +13,7 @@ from zipfile import ZipFile
|
||||
|
||||
from anki import hooks
|
||||
from anki.collection import Collection
|
||||
from anki.lang import _
|
||||
from anki.rsbackend import TR
|
||||
from anki.utils import ids2str, namedtmp, splitFields, stripHTML
|
||||
|
||||
|
||||
@ -92,12 +92,12 @@ class Exporter:
|
||||
|
||||
class TextCardExporter(Exporter):
|
||||
|
||||
key = lambda self: _("Cards in Plain Text")
|
||||
ext = ".txt"
|
||||
includeHTML = True
|
||||
|
||||
def __init__(self, col) -> None:
|
||||
Exporter.__init__(self, col)
|
||||
self.key = col.tr(TR.EXPORTING_CARDS_IN_PLAIN_TEXT)
|
||||
|
||||
def doExport(self, file) -> None:
|
||||
ids = sorted(self.cardIds())
|
||||
@ -122,7 +122,6 @@ class TextCardExporter(Exporter):
|
||||
|
||||
class TextNoteExporter(Exporter):
|
||||
|
||||
key = lambda self: _("Notes in Plain Text")
|
||||
ext = ".txt"
|
||||
includeTags = True
|
||||
includeHTML = True
|
||||
@ -130,6 +129,7 @@ class TextNoteExporter(Exporter):
|
||||
def __init__(self, col: Collection) -> None:
|
||||
Exporter.__init__(self, col)
|
||||
self.includeID = False
|
||||
self.key = col.tr(TR.EXPORTING_NOTES_IN_PLAIN_TEXT)
|
||||
|
||||
def doExport(self, file: BufferedWriter) -> None:
|
||||
cardIds = self.cardIds()
|
||||
@ -164,13 +164,13 @@ where cards.id in %s)"""
|
||||
|
||||
class AnkiExporter(Exporter):
|
||||
|
||||
key = lambda self: _("Anki 2.0 Deck")
|
||||
ext = ".anki2"
|
||||
includeSched: Union[bool, None] = False
|
||||
includeMedia = True
|
||||
|
||||
def __init__(self, col: Collection) -> None:
|
||||
Exporter.__init__(self, col)
|
||||
self.key = col.tr(TR.EXPORTING_ANKI_20_DECK)
|
||||
|
||||
def deckIds(self) -> List[int]:
|
||||
if self.cids:
|
||||
@ -313,11 +313,11 @@ class AnkiExporter(Exporter):
|
||||
|
||||
class AnkiPackageExporter(AnkiExporter):
|
||||
|
||||
key = lambda self: _("Anki Deck Package")
|
||||
ext = ".apkg"
|
||||
|
||||
def __init__(self, col: Collection) -> None:
|
||||
AnkiExporter.__init__(self, col)
|
||||
self.key = col.tr(TR.EXPORTING_ANKI_DECK_PACKAGE)
|
||||
|
||||
def exportInto(self, path: str) -> None:
|
||||
# open a zip file
|
||||
@ -395,13 +395,13 @@ class AnkiPackageExporter(AnkiExporter):
|
||||
|
||||
class AnkiCollectionPackageExporter(AnkiPackageExporter):
|
||||
|
||||
key = lambda self: _("Anki Collection Package")
|
||||
ext = ".colpkg"
|
||||
verbatim = True
|
||||
includeSched = None
|
||||
|
||||
def __init__(self, col):
|
||||
AnkiPackageExporter.__init__(self, col)
|
||||
self.key = col.tr(TR.EXPORTING_ANKI_COLLECTION_PACKAGE)
|
||||
|
||||
def doExport(self, z, path):
|
||||
"Export collection. Caller must re-open afterwards."
|
||||
|
@ -7,12 +7,16 @@ from anki.importing.csvfile import TextImporter
|
||||
from anki.importing.mnemo import MnemosyneImporter
|
||||
from anki.importing.pauker import PaukerImporter
|
||||
from anki.importing.supermemo_xml import SupermemoXmlImporter # type: ignore
|
||||
from anki.lang import _
|
||||
from anki.lang import _, tr_legacyglobal
|
||||
from anki.rsbackend import TR
|
||||
|
||||
Importers = (
|
||||
(_("Text separated by tabs or semicolons (*)"), TextImporter),
|
||||
(_("Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)"), AnkiPackageImporter),
|
||||
(_("Mnemosyne 2.0 Deck (*.db)"), MnemosyneImporter),
|
||||
(_("Supermemo XML export (*.xml)"), SupermemoXmlImporter),
|
||||
(_("Pauker 1.8 Lesson (*.pau.gz)"), PaukerImporter),
|
||||
(tr_legacyglobal(TR.IMPORTING_TEXT_SEPARATED_BY_TABS_OR_SEMICOLONS), TextImporter),
|
||||
(
|
||||
tr_legacyglobal(TR.IMPORTING_PACKAGED_ANKI_DECKCOLLECTION_APKG_COLPKG_ZIP),
|
||||
AnkiPackageImporter,
|
||||
),
|
||||
(tr_legacyglobal(TR.IMPORTING_MNEMOSYNE_20_DECK_DB), MnemosyneImporter),
|
||||
(tr_legacyglobal(TR.IMPORTING_SUPERMEMO_XML_EXPORT_XML), SupermemoXmlImporter),
|
||||
(tr_legacyglobal(TR.IMPORTING_PAUKER_18_LESSON_PAUGZ), PaukerImporter),
|
||||
)
|
||||
|
@ -9,7 +9,7 @@ from anki.collection import Collection
|
||||
from anki.consts import *
|
||||
from anki.decks import DeckManager
|
||||
from anki.importing.base import Importer
|
||||
from anki.lang import _
|
||||
from anki.rsbackend import TR
|
||||
from anki.utils import intTime, joinFields, splitFields
|
||||
|
||||
GUID = 1
|
||||
@ -131,22 +131,25 @@ class Anki2Importer(Importer):
|
||||
else:
|
||||
dupesIdentical.append(note)
|
||||
|
||||
self.log.append(_("Notes found in file: %d") % total)
|
||||
self.log.append(self.dst.tr(TR.IMPORTING_NOTES_FOUND_IN_FILE, val="%s") % total)
|
||||
|
||||
if dupesIgnored:
|
||||
self.log.append(
|
||||
_("Notes that could not be imported as note type has changed: %d")
|
||||
self.dst.tr(TR.IMPORTING_NOTES_THAT_COULD_NOT_BE_IMPORTED, val="%s")
|
||||
% len(dupesIgnored)
|
||||
)
|
||||
if update:
|
||||
self.log.append(
|
||||
_("Notes updated, as file had newer version: %d") % len(update)
|
||||
self.dst.tr(TR.IMPORTING_NOTES_UPDATED_AS_FILE_HAD_NEWER, val="%s")
|
||||
% len(update)
|
||||
)
|
||||
if add:
|
||||
self.log.append(_("Notes added from file: %d") % len(add))
|
||||
self.log.append(
|
||||
self.dst.tr(TR.IMPORTING_NOTES_ADDED_FROM_FILE, val="%s") % len(add)
|
||||
)
|
||||
if dupesIdentical:
|
||||
self.log.append(
|
||||
_("Notes skipped, as they're already in your collection: %d")
|
||||
self.dst.tr(TR.IMPORTING_NOTES_SKIPPED_AS_THEYRE_ALREADY_IN, val="%s")
|
||||
% len(dupesIdentical)
|
||||
)
|
||||
|
||||
@ -154,16 +157,16 @@ class Anki2Importer(Importer):
|
||||
|
||||
if dupesIgnored:
|
||||
for row in dupesIgnored:
|
||||
self._logNoteRow(_("Skipped"), row)
|
||||
self._logNoteRow(self.dst.tr(TR.IMPORTING_SKIPPED), row)
|
||||
if update:
|
||||
for row in update:
|
||||
self._logNoteRow(_("Updated"), row)
|
||||
self._logNoteRow(self.dst.tr(TR.IMPORTING_UPDATED), row)
|
||||
if add:
|
||||
for row in add:
|
||||
self._logNoteRow(_("Added"), row)
|
||||
self._logNoteRow(self.dst.tr(TR.ADDING_ADDED), row)
|
||||
if dupesIdentical:
|
||||
for row in dupesIdentical:
|
||||
self._logNoteRow(_("Identical"), row)
|
||||
self._logNoteRow(self.dst.tr(TR.IMPORTING_IDENTICAL), row)
|
||||
|
||||
# export info for calling code
|
||||
self.dupes = len(dupesIdentical)
|
||||
|
@ -7,7 +7,7 @@ from typing import Any, List, Optional, TextIO, Union
|
||||
|
||||
from anki.collection import Collection
|
||||
from anki.importing.noteimp import ForeignNote, NoteImporter
|
||||
from anki.lang import _
|
||||
from anki.rsbackend import TR
|
||||
|
||||
|
||||
class TextImporter(NoteImporter):
|
||||
@ -41,7 +41,9 @@ class TextImporter(NoteImporter):
|
||||
if len(row) != self.numFields:
|
||||
if row:
|
||||
log.append(
|
||||
_("'%(row)s' had %(num1)d fields, " "expected %(num2)d")
|
||||
self.col.tr(
|
||||
TR.IMPORTING_ROWS_HAD_NUM1D_FIELDS_EXPECTED_NUM2D
|
||||
)
|
||||
% {
|
||||
"row": " ".join(row),
|
||||
"num1": len(row),
|
||||
@ -53,7 +55,7 @@ class TextImporter(NoteImporter):
|
||||
note = self.noteFromFields(row)
|
||||
notes.append(note)
|
||||
except (csv.Error) as e:
|
||||
log.append(_("Aborted: %s") % str(e))
|
||||
log.append(self.col.tr(TR.IMPORTING_ABORTED, val="%s") % str(e))
|
||||
self.log = log
|
||||
self.ignored = ignored
|
||||
self.close()
|
||||
|
@ -7,7 +7,8 @@ from typing import cast
|
||||
|
||||
from anki.db import DB
|
||||
from anki.importing.noteimp import ForeignCard, ForeignNote, NoteImporter
|
||||
from anki.lang import _, ngettext
|
||||
from anki.lang import ngettext
|
||||
from anki.rsbackend import TR
|
||||
from anki.stdmodels import addBasicModel, addClozeModel
|
||||
|
||||
|
||||
@ -21,7 +22,9 @@ class MnemosyneImporter(NoteImporter):
|
||||
db = DB(self.file)
|
||||
ver = db.scalar("select value from global_variables where key='version'")
|
||||
if not ver.startswith("Mnemosyne SQL 1") and ver not in ("2", "3"):
|
||||
self.log.append(_("File version unknown, trying import anyway."))
|
||||
self.log.append(
|
||||
self.col.tr(TR.IMPORTING_FILE_VERSION_UNKNOWN_TRYING_IMPORT_ANYWAY)
|
||||
)
|
||||
# gather facts into temp objects
|
||||
curid = None
|
||||
notes = {}
|
||||
|
@ -7,7 +7,8 @@ from typing import Dict, List, Optional, Tuple, Union
|
||||
from anki.collection import Collection
|
||||
from anki.consts import NEW_CARDS_RANDOM, STARTING_FACTOR
|
||||
from anki.importing.base import Importer
|
||||
from anki.lang import _, ngettext
|
||||
from anki.lang import ngettext
|
||||
from anki.rsbackend import TR
|
||||
from anki.utils import (
|
||||
fieldChecksum,
|
||||
guid64,
|
||||
@ -135,8 +136,10 @@ class NoteImporter(Importer):
|
||||
# loop through the notes
|
||||
updates = []
|
||||
updateLog = []
|
||||
updateLogTxt = _("First field matched: %s")
|
||||
dupeLogTxt = _("Added duplicate with first field: %s")
|
||||
updateLogTxt = self.col.tr(TR.IMPORTING_FIRST_FIELD_MATCHED, val="%s")
|
||||
dupeLogTxt = self.col.tr(
|
||||
TR.IMPORTING_ADDED_DUPLICATE_WITH_FIRST_FIELD, val="%s"
|
||||
)
|
||||
new = []
|
||||
self._ids: List[int] = []
|
||||
self._cards: List[Tuple] = []
|
||||
@ -153,12 +156,17 @@ class NoteImporter(Importer):
|
||||
csum = fieldChecksum(fld0)
|
||||
# first field must exist
|
||||
if not fld0:
|
||||
self.log.append(_("Empty first field: %s") % " ".join(n.fields))
|
||||
self.log.append(
|
||||
self.col.tr(TR.IMPORTING_EMPTY_FIRST_FIELD, val="%s")
|
||||
% " ".join(n.fields)
|
||||
)
|
||||
continue
|
||||
# earlier in import?
|
||||
if fld0 in firsts and self.importMode != ADD_MODE:
|
||||
# duplicates in source file; log and ignore
|
||||
self.log.append(_("Appeared twice in file: %s") % fld0)
|
||||
self.log.append(
|
||||
self.col.tr(TR.IMPORTING_APPEARED_TWICE_IN_FILE, val="%s") % fld0
|
||||
)
|
||||
continue
|
||||
firsts[fld0] = True
|
||||
# already exists?
|
||||
|
@ -148,7 +148,7 @@ current_catalog: Optional[
|
||||
] = None
|
||||
|
||||
# the current Fluent translation instance
|
||||
current_i18n: Optional[anki.rsbackend.RustBackend]
|
||||
current_i18n: Optional[anki.rsbackend.RustBackend] = None
|
||||
|
||||
# path to locale folder
|
||||
locale_folder = ""
|
||||
@ -161,6 +161,14 @@ def _(str: str) -> str:
|
||||
return str
|
||||
|
||||
|
||||
def tr_legacyglobal(*args, **kwargs) -> str:
|
||||
"Should use col.tr() instead."
|
||||
if current_i18n:
|
||||
return current_i18n.translate(*args, **kwargs)
|
||||
else:
|
||||
return "tr_legacyglobal() called without active backend"
|
||||
|
||||
|
||||
def ngettext(single: str, plural: str, n: int) -> str:
|
||||
if current_catalog:
|
||||
return current_catalog.ngettext(single, plural, n)
|
||||
|
@ -11,9 +11,8 @@ from typing import Any, List, Optional, Tuple
|
||||
|
||||
import anki
|
||||
from anki import hooks
|
||||
from anki.lang import _
|
||||
from anki.models import NoteType
|
||||
from anki.rsbackend import pb
|
||||
from anki.rsbackend import TR, pb
|
||||
from anki.template import TemplateRenderContext, TemplateRenderOutput
|
||||
from anki.utils import call, isMac, namedtmp, tmpdir
|
||||
|
||||
@ -129,15 +128,7 @@ def _save_latex_image(
|
||||
# don't mind if the sequence is only part of a command
|
||||
bad_re = "\\" + bad + "[^a-zA-Z]"
|
||||
if re.search(bad_re, tmplatex):
|
||||
return (
|
||||
_(
|
||||
"""\
|
||||
For security reasons, '%s' is not allowed on cards. You can still use \
|
||||
it by placing the command in a different package, and importing that \
|
||||
package in the LaTeX header instead."""
|
||||
)
|
||||
% bad
|
||||
)
|
||||
return col.tr(TR.MEDIA_FOR_SECURITY_REASONS_IS_NOT, val=bad)
|
||||
|
||||
# commands to use
|
||||
if svg:
|
||||
@ -160,7 +151,7 @@ package in the LaTeX header instead."""
|
||||
os.chdir(tmpdir())
|
||||
for latexCmd in latexCmds:
|
||||
if call(latexCmd, stdout=log, stderr=log):
|
||||
return _errMsg(latexCmd[0], texpath)
|
||||
return _errMsg(col, latexCmd[0], texpath)
|
||||
# add to media
|
||||
with open(png_or_svg, "rb") as file:
|
||||
data = file.read()
|
||||
@ -172,9 +163,9 @@ package in the LaTeX header instead."""
|
||||
log.close()
|
||||
|
||||
|
||||
def _errMsg(type: str, texpath: str) -> Any:
|
||||
msg = (_("Error executing %s.") % type) + "<br>"
|
||||
msg += (_("Generated file: %s") % texpath) + "<br>"
|
||||
def _errMsg(col: anki.collection.Collection, type: str, texpath: str) -> Any:
|
||||
msg = (col.tr(TR.MEDIA_ERROR_EXECUTING, val="%s") % type) + "<br>"
|
||||
msg += (col.tr(TR.MEDIA_GENERATED_FILE, val="%s") % texpath) + "<br>"
|
||||
try:
|
||||
with open(namedtmp("latex_log.txt", rm=False)) as f:
|
||||
log = f.read()
|
||||
@ -182,7 +173,7 @@ def _errMsg(type: str, texpath: str) -> Any:
|
||||
raise Exception()
|
||||
msg += "<small><pre>" + html.escape(log) + "</pre></small>"
|
||||
except:
|
||||
msg += _("Have you installed latex and dvipng/dvisvgm?")
|
||||
msg += col.tr(TR.MEDIA_HAVE_YOU_INSTALLED_LATEX_AND_DVIPNGDVISVGM)
|
||||
return msg
|
||||
|
||||
|
||||
|
@ -11,8 +11,14 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
||||
import anki # pylint: disable=unused-import
|
||||
import anki.backend_pb2 as pb
|
||||
from anki.consts import *
|
||||
from anki.lang import _
|
||||
from anki.rsbackend import NotFoundError, StockNoteType, from_json_bytes, to_json_bytes
|
||||
from anki.lang import without_unicode_isolation
|
||||
from anki.rsbackend import (
|
||||
TR,
|
||||
NotFoundError,
|
||||
StockNoteType,
|
||||
from_json_bytes,
|
||||
to_json_bytes,
|
||||
)
|
||||
from anki.utils import checksum, ids2str, intTime, joinFields, splitFields
|
||||
|
||||
# types
|
||||
@ -260,7 +266,9 @@ class ModelManager:
|
||||
def copy(self, m: NoteType) -> NoteType:
|
||||
"Copy, save and return."
|
||||
m2 = copy.deepcopy(m)
|
||||
m2["name"] = _("%s copy") % m2["name"]
|
||||
m2["name"] = without_unicode_isolation(
|
||||
self.col.tr(TR.NOTETYPES_COPY, val=m2["name"])
|
||||
)
|
||||
m2["id"] = 0
|
||||
self.add(m2)
|
||||
return m2
|
||||
|
@ -25,9 +25,9 @@ from anki import hooks
|
||||
from anki.cards import Card
|
||||
from anki.consts import *
|
||||
from anki.decks import Deck, DeckConfig, DeckManager, QueueConfig
|
||||
from anki.lang import _
|
||||
from anki.notes import Note
|
||||
from anki.rsbackend import (
|
||||
TR,
|
||||
CountsForDeckToday,
|
||||
DeckTreeNode,
|
||||
FormatTimeSpanContext,
|
||||
@ -1258,7 +1258,7 @@ due = (case when odue>0 then odue else due end), odue = 0, odid = 0, usn = ? whe
|
||||
"Return the next interval for CARD as a string."
|
||||
ivl_secs = self.nextIvl(card, ease)
|
||||
if not ivl_secs:
|
||||
return _("(end)")
|
||||
return self.col.tr(TR.SCHEDULING_END)
|
||||
s = self.col.format_timespan(ivl_secs, FormatTimeSpanContext.ANSWER_BUTTONS)
|
||||
if ivl_secs < self.col.conf["collapseTime"]:
|
||||
s = "<" + s
|
||||
|
@ -3,6 +3,7 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from anki.lang import without_unicode_isolation
|
||||
from tests.shared import getEmptyCol
|
||||
|
||||
|
||||
@ -20,7 +21,8 @@ def test_latex():
|
||||
assert len(os.listdir(col.media.dir())) == 0
|
||||
# check the error message
|
||||
msg = note.cards()[0].q()
|
||||
assert "executing nolatex" in msg
|
||||
print(msg)
|
||||
assert "executing nolatex" in without_unicode_isolation(msg)
|
||||
assert "installed" in msg
|
||||
# check if we have latex installed, and abort test if we don't
|
||||
if not shutil.which("latex") or not shutil.which("dvipng"):
|
||||
@ -91,5 +93,5 @@ def _test_includes_bad_command(bad):
|
||||
note = col.newNote()
|
||||
note["Front"] = "[latex]%s[/latex]" % bad
|
||||
col.addNote(note)
|
||||
q = note.cards()[0].q()
|
||||
q = without_unicode_isolation(note.cards()[0].q())
|
||||
return ("'%s' is not allowed on cards" % bad in q, "Card content: %s" % q)
|
||||
|
@ -5,10 +5,10 @@ import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import stringcase
|
||||
|
||||
import pylib.anki.backend_pb2 as pb
|
||||
|
||||
import stringcase
|
||||
|
||||
TYPE_DOUBLE = 1
|
||||
TYPE_FLOAT = 2
|
||||
TYPE_INT64 = 3
|
||||
|
@ -57,7 +57,7 @@ class DeckConf(QDialog):
|
||||
import anki.consts as cs
|
||||
|
||||
f = self.form
|
||||
f.newOrder.addItems(list(cs.newCardOrderLabels().values()))
|
||||
f.newOrder.addItems(list(cs.newCardOrderLabels(self.mw.col).values()))
|
||||
qconnect(f.newOrder.currentIndexChanged, self.onNewOrderChanged)
|
||||
|
||||
# Conf list
|
||||
|
@ -44,8 +44,8 @@ class DeckConf(QDialog):
|
||||
def initialSetup(self):
|
||||
import anki.consts as cs
|
||||
|
||||
self.form.order.addItems(list(cs.dynOrderLabels().values()))
|
||||
self.form.order_2.addItems(list(cs.dynOrderLabels().values()))
|
||||
self.form.order.addItems(list(cs.dynOrderLabels(self.mw.col).values()))
|
||||
self.form.order_2.addItems(list(cs.dynOrderLabels(self.mw.col).values()))
|
||||
|
||||
qconnect(self.form.resched.stateChanged, self._onReschedToggled)
|
||||
|
||||
|
@ -86,7 +86,7 @@ class Preferences(QDialog):
|
||||
else:
|
||||
f.hwAccel.setChecked(self.mw.pm.glMode() != "software")
|
||||
|
||||
f.newSpread.addItems(list(c.newCardSchedulingLabels().values()))
|
||||
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
|
||||
|
||||
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
|
||||
|
||||
|
@ -237,6 +237,11 @@ def migrate_entry(entry):
|
||||
if len(files) == 1 and "stats" in files:
|
||||
return None
|
||||
|
||||
for e in entry.occurrences:
|
||||
if "importing/__init__.py" in e[0]:
|
||||
files = ["importing"]
|
||||
break
|
||||
|
||||
files2 = set()
|
||||
for file in files:
|
||||
if file == "stats":
|
||||
|
@ -3,8 +3,8 @@
|
||||
import glob, re, json, stringcase
|
||||
|
||||
files = (
|
||||
# glob.glob("../../pylib/**/*.py", recursive=True) +
|
||||
glob.glob("../../qt/**/*.py", recursive=True)
|
||||
glob.glob("../../pylib/**/*.py", recursive=True)
|
||||
# glob.glob("../../qt/**/*.py", recursive=True)
|
||||
)
|
||||
string_re = re.compile(r'_\(\s*(".*?")\s*\)')
|
||||
|
||||
@ -15,6 +15,7 @@ blacklist = {
|
||||
"Label1",
|
||||
"After pressing OK, you can choose which tags to include.",
|
||||
"Filter/Cram",
|
||||
"Show %s",
|
||||
# previewer.py needs updating to fix these
|
||||
"Shortcut key: R",
|
||||
"Shortcut key: B",
|
||||
@ -34,9 +35,9 @@ def repl(m):
|
||||
|
||||
if "%d" in text or "%s" in text:
|
||||
# replace { $val } with %s for compat with old code
|
||||
return f'tr(TR.{screaming}, val="%s")'
|
||||
return f'tr_legacyglobal(TR.{screaming}, val="%s")'
|
||||
|
||||
return f"tr(TR.{screaming})"
|
||||
return f"tr_legacyglobal(TR.{screaming})"
|
||||
|
||||
|
||||
for file in files:
|
||||
@ -46,6 +47,7 @@ for file in files:
|
||||
buf2 = string_re.sub(repl, buf)
|
||||
if buf != buf2:
|
||||
lines = buf2.split("\n")
|
||||
lines.insert(3, "from aqt.utils import tr, TR")
|
||||
lines.insert(3, "from anki.rsbackend import TR")
|
||||
lines.insert(3, "from anki.lang import tr_legacyglobal")
|
||||
buf2 = "\n".join(lines)
|
||||
open(file, "w").write(buf2)
|
||||
|
@ -27,18 +27,23 @@ importing-map-to = Map to { $val }
|
||||
importing-map-to-tags = Map to Tags
|
||||
importing-mapped-to = mapped to <b>{ $val }</b>
|
||||
importing-mapped-to-tags = mapped to <b>Tags</b>
|
||||
importing-mnemosyne-20-deck-db = Mnemosyne 2.0 Deck (*.db)
|
||||
importing-multicharacter-separators-are-not-supported-please = Multi-character separators are not supported. Please enter one character only.
|
||||
importing-notes-added-from-file = Notes added from file: { $val }
|
||||
importing-notes-found-in-file = Notes found in file: { $val }
|
||||
importing-notes-skipped-as-theyre-already-in = Notes skipped, as they're already in your collection: { $val }
|
||||
importing-notes-that-could-not-be-imported = Notes that could not be imported as note type has changed: { $val }
|
||||
importing-notes-updated-as-file-had-newer = Notes updated, as file had newer version: { $val }
|
||||
importing-packaged-anki-deckcollection-apkg-colpkg-zip = Packaged Anki Deck/Collection (*.apkg *.colpkg *.zip)
|
||||
importing-pauker-18-lesson-paugz = Pauker 1.8 Lesson (*.pau.gz)
|
||||
importing-rows-had-num1d-fields-expected-num2d = '%(row)s' had %(num1)d fields, expected %(num2)d
|
||||
importing-selected-file-was-not-in-utf8 = Selected file was not in UTF-8 format. Please see the importing section of the manual.
|
||||
importing-semicolon = Semicolon
|
||||
importing-skipped = Skipped
|
||||
importing-supermemo-xml-export-xml = Supermemo XML export (*.xml)
|
||||
importing-tab = Tab
|
||||
importing-tag-modified-notes = Tag modified notes:
|
||||
importing-text-separated-by-tabs-or-semicolons = Text separated by tabs or semicolons (*)
|
||||
importing-the-first-field-of-the-note = The first field of the note type must be mapped.
|
||||
importing-the-provided-file-is-not-a = The provided file is not a valid .apkg file.
|
||||
importing-this-file-does-not-appear-to = This file does not appear to be a valid .apkg file. If you're getting this error from a file downloaded from AnkiWeb, chances are that your download failed. Please try again, and if the problem persists, please try again with a different browser.
|
||||
|
Loading…
Reference in New Issue
Block a user