5004cd332b
* Pack FSRS data into card.data * Update FSRS card data when preset or weights change + Show FSRS stats in card stats * Show a warning when there's a limited review history * Add some translations; tweak UI * Fix default requested retention * Add browser columns, fix calculation of R * Property searches eg prop:d>0.1 * Integrate FSRS into reviewer * Warn about long learning steps * Hide minimum interval when FSRS is on * Don't apply interval multiplier to FSRS intervals * Expose memory state to Python * Don't set memory state on new cards * Port Jarret's new tests; add some helpers to make tests more compact https://github.com/open-spaced-repetition/fsrs-rs/pull/64 * Fix learning cards not being given memory state * Require update to v3 scheduler * Don't exclude single learning step when calculating memory state * Use relearning step when learning steps unavailable * Update docstring * fix single_card_revlog_to_items (#2656) * not need check the review_kind for unique_dates * add email address to CONTRIBUTORS * fix last first learn & keep early review * cargo fmt * cargo clippy --fix * Add Jarrett to about screen * Fix fsrs_memory_state being initialized to default in get_card() * Set initial memory state on graduate * Update to latest FSRS * Fix experiment.log being empty * Fix broken colpkg imports Introduced by "Update FSRS card data when preset or weights change" * Update memory state during (re)learning; use FSRS for graduating intervals * Reset memory state when cards are manually rescheduled as new * Add difficulty graph; hide eases when FSRS enabled * Add retrievability graph * Derive memory_state from revlog when it's missing and shouldn't be --------- Co-authored-by: Jarrett Ye <jarrett.ye@outlook.com>
139 lines
4.6 KiB
Python
139 lines
4.6 KiB
Python
# Copyright: Ankitects Pty Ltd and contributors
|
|
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
|
|
"""
|
|
Helper for running tasks on background threads.
|
|
|
|
See QueryOp() and CollectionOp() for higher-level routines.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
from concurrent.futures import Future
|
|
from concurrent.futures.thread import ThreadPoolExecutor
|
|
from threading import Lock
|
|
from typing import Any, Callable
|
|
|
|
import aqt
|
|
from anki.collection import Progress
|
|
from aqt.progress import ProgressUpdate
|
|
from aqt.qt import *
|
|
|
|
Closure = Callable[[], None]
|
|
|
|
|
|
class TaskManager(QObject):
|
|
_closures_pending = pyqtSignal()
|
|
|
|
def __init__(self, mw: aqt.AnkiQt) -> None:
|
|
QObject.__init__(self)
|
|
self.mw = mw.weakref()
|
|
self._no_collection_executor = ThreadPoolExecutor()
|
|
self._collection_executor = ThreadPoolExecutor(max_workers=1)
|
|
self._closures: list[Closure] = []
|
|
self._closures_lock = Lock()
|
|
qconnect(self._closures_pending, self._on_closures_pending)
|
|
|
|
def run_on_main(self, closure: Closure) -> None:
|
|
"Run the provided closure on the main thread."
|
|
with self._closures_lock:
|
|
self._closures.append(closure)
|
|
self._closures_pending.emit() # type: ignore
|
|
|
|
def run_in_background(
|
|
self,
|
|
task: Callable,
|
|
on_done: Callable[[Future], None] | None = None,
|
|
args: dict[str, Any] | None = None,
|
|
uses_collection=True,
|
|
) -> Future:
|
|
"""Use QueryOp()/CollectionOp() in new code.
|
|
|
|
Run task on a background thread.
|
|
|
|
If on_done is provided, it will be called on the main thread with
|
|
the completed future.
|
|
|
|
Args if provided will be passed on as keyword arguments to the task callable.
|
|
|
|
Tasks that access the collection are serialized. If you're doing things that
|
|
don't require the collection (e.g. network requests), you can pass uses_collection
|
|
=False to allow multiple tasks to run in parallel."""
|
|
# Before we launch a background task, ensure any pending on_done closure are run on
|
|
# main. Qt's signal/slot system will have posted a notification, but it may
|
|
# not have been processed yet. The on_done() closures may make small queries
|
|
# to the database that we want to run first - if we delay them until after the
|
|
# background task starts, and it takes out a long-running lock on the database,
|
|
# the UI thread will hang until the end of the op.
|
|
self._on_closures_pending()
|
|
|
|
if args is None:
|
|
args = {}
|
|
|
|
executor = (
|
|
self._collection_executor
|
|
if uses_collection
|
|
else self._no_collection_executor
|
|
)
|
|
fut = executor.submit(task, **args)
|
|
|
|
if on_done is not None:
|
|
fut.add_done_callback(
|
|
lambda future: self.run_on_main(lambda: on_done(future))
|
|
)
|
|
|
|
return fut
|
|
|
|
def with_progress(
|
|
self,
|
|
task: Callable,
|
|
on_done: Callable[[Future], None] | None = None,
|
|
parent: QWidget | None = None,
|
|
label: str | None = None,
|
|
immediate: bool = False,
|
|
uses_collection=True,
|
|
) -> None:
|
|
"Use QueryOp()/CollectionOp() in new code."
|
|
self.mw.progress.start(parent=parent, label=label, immediate=immediate)
|
|
|
|
def wrapped_done(fut: Future) -> None:
|
|
self.mw.progress.finish()
|
|
if on_done:
|
|
on_done(fut)
|
|
|
|
self.run_in_background(task, wrapped_done, uses_collection=uses_collection)
|
|
|
|
def with_backend_progress(
|
|
self,
|
|
task: Callable,
|
|
progress_update: Callable[[Progress, ProgressUpdate], None],
|
|
on_done: Callable[[Future], None] | None = None,
|
|
parent: QWidget | None = None,
|
|
start_label: str | None = None,
|
|
uses_collection=True,
|
|
) -> None:
|
|
self.mw.progress.start_with_backend_updates(
|
|
progress_update,
|
|
parent=parent,
|
|
start_label=start_label,
|
|
)
|
|
|
|
def wrapped_done(fut: Future) -> None:
|
|
self.mw.progress.finish()
|
|
# allow the event loop to close the window before we proceed
|
|
if on_done:
|
|
self.mw.progress.single_shot(
|
|
100, lambda: on_done(fut), requires_collection=False
|
|
)
|
|
|
|
self.run_in_background(task, wrapped_done, uses_collection=uses_collection)
|
|
|
|
def _on_closures_pending(self) -> None:
|
|
"""Run any pending closures. This runs in the main thread."""
|
|
with self._closures_lock:
|
|
closures = self._closures
|
|
self._closures = []
|
|
|
|
for closure in closures:
|
|
closure()
|