add Collection struct, and get media check working again

- media check no longer needs collection to be closed
- use savepoints for operations initiated by Rust, so they are
atomic without forcing a commit
This commit is contained in:
Damien Elmes 2020-03-05 14:35:48 +10:00
parent 2cd7885ec0
commit ae06b9e446
15 changed files with 281 additions and 130 deletions

View File

@ -264,10 +264,10 @@ crt=?, mod=?, scm=?, dty=?, usn=?, ls=?, conf=?""",
def reopen(self) -> None:
"Reconnect to DB (after changing threads, etc)."
raise Exception("fixme")
if not self.db:
#self.db = DBProxy(self.path)
self.media.connect()
self._openLog()
# if not self.db:
# # self.db = DBProxy(self.path)
# self.media.connect()
# self._openLog()
def rollback(self) -> None:
self.db.rollback()

View File

@ -8,6 +8,7 @@ from typing import Any, Iterable, List, Optional, Sequence, Union
import anki
# fixme: threads
# fixme: col.reopen()
# fixme: setAutocommit()
# fixme: transaction/lock handling

View File

@ -171,8 +171,11 @@ class MediaManager:
##########################################################################
def check(self) -> MediaCheckOutput:
"This should be called while the collection is closed."
return self.col.backend.check_media()
output = self.col.backend.check_media()
# files may have been renamed on disk, so an undo at this point could
# break file references
self.col.save()
return output
def render_all_latex(
self, progress_cb: Optional[Callable[[int], bool]] = None

View File

@ -19,7 +19,7 @@ from anki.stdmodels import (
addForwardOptionalReverse,
addForwardReverse,
)
from anki.utils import intTime, isWin
from anki.utils import intTime
class ServerData:

View File

@ -73,8 +73,6 @@ def test_deckIntegration():
with open(os.path.join(d.media.dir(), "foo.jpg"), "w") as f:
f.write("test")
# check media
d.close()
ret = d.media.check()
d.reopen()
assert ret.missing == ["fake2.png"]
assert ret.unused == ["foo.jpg"]

View File

@ -40,7 +40,6 @@ class MediaChecker:
def check(self) -> None:
self.progress_dialog = self.mw.progress.start()
hooks.bg_thread_progress_callback.append(self._on_progress)
self.mw.col.close()
self.mw.taskman.run_in_background(self._check, self._on_finished)
def _on_progress(self, proceed: bool, progress: Progress) -> bool:
@ -61,7 +60,6 @@ class MediaChecker:
hooks.bg_thread_progress_callback.remove(self._on_progress)
self.mw.progress.finish()
self.progress_dialog = None
self.mw.col.reopen()
exc = future.exception()
if isinstance(exc, Interrupted):

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use crate::storage::SqliteStorage;
use crate::storage::StorageContext;
use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef};
use serde_derive::{Deserialize, Serialize};
@ -58,28 +58,28 @@ impl FromSql for SqlValue {
}
}
pub(super) fn db_command_bytes(db: &SqliteStorage, input: &[u8]) -> Result<String> {
pub(super) fn db_command_bytes(ctx: &StorageContext, input: &[u8]) -> Result<String> {
let req: DBRequest = serde_json::from_slice(input)?;
let resp = match req {
DBRequest::Query { sql, args } => db_query(db, &sql, &args)?,
DBRequest::Query { sql, args } => db_query(ctx, &sql, &args)?,
DBRequest::Begin => {
db.begin()?;
ctx.begin_trx()?;
DBResult::None
}
DBRequest::Commit => {
db.commit()?;
ctx.commit_trx()?;
DBResult::None
}
DBRequest::Rollback => {
db.rollback()?;
ctx.rollback_trx()?;
DBResult::None
}
};
Ok(serde_json::to_string(&resp)?)
}
pub(super) fn db_query(db: &SqliteStorage, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
let mut stmt = db.db.prepare_cached(sql)?;
pub(super) fn db_query(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
let mut stmt = ctx.db.prepare_cached(sql)?;
let columns = stmt.column_count();

View File

@ -4,16 +4,16 @@
use crate::backend::dbproxy::db_command_bytes;
use crate::backend_proto::backend_input::Value;
use crate::backend_proto::{Empty, RenderedTemplateReplacement, SyncMediaIn};
use crate::collection::{open_collection, Collection};
use crate::err::{AnkiError, NetworkErrorKind, Result, SyncErrorKind};
use crate::i18n::{tr_args, FString, I18n};
use crate::latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex};
use crate::log::{default_logger, Logger};
use crate::log::default_logger;
use crate::media::check::MediaChecker;
use crate::media::sync::MediaSyncProgress;
use crate::media::MediaManager;
use crate::sched::cutoff::{local_minutes_west_for_stamp, sched_timing_today};
use crate::sched::timespan::{answer_button_time, learning_congrats, studied_today, time_span};
use crate::storage::SqliteStorage;
use crate::template::{
render_card, without_legacy_template_directives, FieldMap, FieldRequirements, ParsedTemplate,
RenderedNode,
@ -31,14 +31,12 @@ mod dbproxy;
pub type ProtoProgressCallback = Box<dyn Fn(Vec<u8>) -> bool + Send>;
pub struct Backend {
col: SqliteStorage,
col: Collection,
#[allow(dead_code)]
col_path: PathBuf,
media_folder: PathBuf,
media_db: String,
progress_callback: Option<ProtoProgressCallback>,
pub i18n: I18n,
log: Logger,
}
enum Progress<'a> {
@ -124,7 +122,7 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
log::terminal(),
);
let col = SqliteStorage::open_or_create(Path::new(&input.collection_path), input.server)
let col = open_collection(&input.collection_path, input.server, i18n, logger)
.map_err(|e| format!("Unable to open collection: {:?}", e))?;
match Backend::new(
@ -132,8 +130,6 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
&input.collection_path,
&input.media_folder_path,
&input.media_db_path,
i18n,
logger,
) {
Ok(backend) => Ok(backend),
Err(e) => Err(format!("{:?}", e)),
@ -142,12 +138,10 @@ pub fn init_backend(init_msg: &[u8]) -> std::result::Result<Backend, String> {
impl Backend {
pub fn new(
col: SqliteStorage,
col: Collection,
col_path: &str,
media_folder: &str,
media_db: &str,
i18n: I18n,
log: Logger,
) -> Result<Backend> {
Ok(Backend {
col,
@ -155,11 +149,13 @@ impl Backend {
media_folder: media_folder.into(),
media_db: media_db.into(),
progress_callback: None,
i18n,
log,
})
}
pub fn i18n(&self) -> &I18n {
&self.col.i18n
}
/// Decode a request, process it, and return the encoded result.
pub fn run_command_bytes(&mut self, req: &[u8]) -> Vec<u8> {
let mut buf = vec![];
@ -169,7 +165,7 @@ impl Backend {
Err(_e) => {
// unable to decode
let err = AnkiError::invalid_input("couldn't decode backend request");
let oerr = anki_error_to_proto_error(err, &self.i18n);
let oerr = anki_error_to_proto_error(err, &self.col.i18n);
let output = pb::BackendOutput {
value: Some(oerr.into()),
};
@ -187,12 +183,12 @@ impl Backend {
let oval = if let Some(ival) = input.value {
match self.run_command_inner(ival) {
Ok(output) => output,
Err(err) => anki_error_to_proto_error(err, &self.i18n).into(),
Err(err) => anki_error_to_proto_error(err, &self.col.i18n).into(),
}
} else {
anki_error_to_proto_error(
AnkiError::invalid_input("unrecognized backend input value"),
&self.i18n,
&self.col.i18n,
)
.into()
};
@ -237,12 +233,12 @@ impl Backend {
Value::StudiedToday(input) => OValue::StudiedToday(studied_today(
input.cards as usize,
input.seconds as f32,
&self.i18n,
&self.col.i18n,
)),
Value::CongratsLearnMsg(input) => OValue::CongratsLearnMsg(learning_congrats(
input.remaining as usize,
input.next_due,
&self.i18n,
&self.col.i18n,
)),
Value::EmptyTrash(_) => {
self.empty_trash()?;
@ -257,7 +253,7 @@ impl Backend {
fn fire_progress_callback(&self, progress: Progress) -> bool {
if let Some(cb) = &self.progress_callback {
let bytes = progress_to_proto_bytes(progress, &self.i18n);
let bytes = progress_to_proto_bytes(progress, &self.col.i18n);
cb(bytes)
} else {
true
@ -337,7 +333,7 @@ impl Backend {
&input.answer_template,
&fields,
input.card_ordinal as u16,
&self.i18n,
&self.col.i18n,
)?;
// return
@ -415,7 +411,7 @@ impl Backend {
};
let mut rt = Runtime::new().unwrap();
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey, self.log.clone()))
rt.block_on(mgr.sync_media(callback, &input.endpoint, &input.hkey, self.col.log.clone()))
}
fn check_media(&self) -> Result<pb::MediaCheckOut> {
@ -423,7 +419,8 @@ impl Backend {
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n, &self.log);
self.col.transact(None, |ctx| {
let mut checker = MediaChecker::new(ctx, &mgr, callback);
let mut output = checker.check()?;
let report = checker.summarize_output(&mut output);
@ -434,6 +431,7 @@ impl Backend {
report,
have_trash: output.trash_count > 0,
})
})
}
fn remove_media_files(&self, fnames: &[String]) -> Result<()> {
@ -454,7 +452,7 @@ impl Backend {
.map(|(k, v)| (k.as_str(), translate_arg_to_fluent_val(&v)))
.collect();
self.i18n.trn(key, map)
self.col.i18n.trn(key, map)
}
fn format_time_span(&self, input: pb::FormatTimeSpanIn) -> String {
@ -463,12 +461,14 @@ impl Backend {
None => return "".to_string(),
};
match context {
pb::format_time_span_in::Context::Precise => time_span(input.seconds, &self.i18n, true),
pb::format_time_span_in::Context::Precise => {
time_span(input.seconds, &self.col.i18n, true)
}
pb::format_time_span_in::Context::Intervals => {
time_span(input.seconds, &self.i18n, false)
time_span(input.seconds, &self.col.i18n, false)
}
pb::format_time_span_in::Context::AnswerButtons => {
answer_button_time(input.seconds, &self.i18n)
answer_button_time(input.seconds, &self.col.i18n)
}
}
}
@ -478,9 +478,11 @@ impl Backend {
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n, &self.log);
self.col.transact(None, |ctx| {
let mut checker = MediaChecker::new(ctx, &mgr, callback);
checker.empty_trash()
})
}
fn restore_trash(&self) -> Result<()> {
@ -488,13 +490,15 @@ impl Backend {
|progress: usize| self.fire_progress_callback(Progress::MediaCheck(progress as u32));
let mgr = MediaManager::new(&self.media_folder, &self.media_db)?;
let mut checker = MediaChecker::new(&mgr, &self.col_path, callback, &self.i18n, &self.log);
self.col.transact(None, |ctx| {
let mut checker = MediaChecker::new(ctx, &mgr, callback);
checker.restore_trash()
})
}
pub fn db_command(&self, input: &[u8]) -> Result<String> {
db_command_bytes(&self.col, input)
db_command_bytes(&self.col.storage.context(self.col.server), input)
}
}

83
rslib/src/collection.rs Normal file
View File

@ -0,0 +1,83 @@
use crate::err::Result;
use crate::i18n::I18n;
use crate::log::Logger;
use crate::storage::{SqliteStorage, StorageContext};
use std::path::Path;
pub fn open_collection<P: AsRef<Path>>(
path: P,
server: bool,
i18n: I18n,
log: Logger,
) -> Result<Collection> {
let storage = SqliteStorage::open_or_create(path.as_ref())?;
let col = Collection {
storage,
server,
i18n,
log,
};
Ok(col)
}
pub struct Collection {
pub(crate) storage: SqliteStorage,
pub(crate) server: bool,
pub(crate) i18n: I18n,
pub(crate) log: Logger,
}
pub(crate) enum CollectionOp {}
pub(crate) struct RequestContext<'a> {
pub storage: StorageContext<'a>,
pub i18n: &'a I18n,
pub log: &'a Logger,
}
impl Collection {
/// Call the provided closure with a RequestContext that exists for
/// the duration of the call. The request will cache prepared sql
/// statements, so should be passed down the call tree.
///
/// This function should be used for read-only requests. To mutate
/// the database, use transact() instead.
pub(crate) fn with_ctx<F, R>(&self, func: F) -> Result<R>
where
F: FnOnce(&mut RequestContext) -> Result<R>,
{
let mut ctx = RequestContext {
storage: self.storage.context(self.server),
i18n: &self.i18n,
log: &self.log,
};
func(&mut ctx)
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(crate) fn transact<F, R>(&self, op: Option<CollectionOp>, func: F) -> Result<R>
where
F: FnOnce(&mut RequestContext) -> Result<R>,
{
self.with_ctx(|ctx| {
ctx.storage.begin_rust_trx()?;
let mut res = func(ctx);
if res.is_ok() {
if let Err(e) = ctx.storage.commit_rust_op(op) {
res = Err(e);
}
}
if res.is_err() {
ctx.storage.rollback_rust_trx()?;
}
res
})
}
}

View File

@ -11,6 +11,7 @@ pub fn version() -> &'static str {
pub mod backend;
pub mod cloze;
pub mod collection;
pub mod err;
pub mod i18n;
pub mod latex;

View File

@ -1,14 +1,12 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::collection::RequestContext;
use crate::err::{AnkiError, DBErrorKind, Result};
use crate::i18n::{tr_args, tr_strs, FString, I18n};
use crate::i18n::{tr_args, tr_strs, FString};
use crate::latex::extract_latex_expanding_clozes;
use crate::log::{debug, Logger};
use crate::media::col::{
for_every_note, get_note_types, mark_collection_modified, open_or_create_collection_db,
set_note, Note,
};
use crate::log::debug;
use crate::media::col::{for_every_note, get_note_types, mark_collection_modified, set_note, Note};
use crate::media::database::MediaDatabaseContext;
use crate::media::files::{
data_for_file, filename_if_normalized, trash_folder, MEDIA_SYNC_FILESIZE_LIMIT,
@ -19,14 +17,13 @@ use coarsetime::Instant;
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::{borrow::Cow, fs, io};
lazy_static! {
static ref REMOTE_FILENAME: Regex = Regex::new("(?i)^https?://").unwrap();
}
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Clone)]
pub struct MediaCheckOutput {
pub unused: Vec<String>,
pub missing: Vec<String>,
@ -49,34 +46,28 @@ pub struct MediaChecker<'a, P>
where
P: FnMut(usize) -> bool,
{
ctx: &'a RequestContext<'a>,
mgr: &'a MediaManager,
col_path: &'a Path,
progress_cb: P,
checked: usize,
progress_updated: Instant,
i18n: &'a I18n,
log: &'a Logger,
}
impl<P> MediaChecker<'_, P>
where
P: FnMut(usize) -> bool,
{
pub fn new<'a>(
pub(crate) fn new<'a>(
ctx: &'a RequestContext<'a>,
mgr: &'a MediaManager,
col_path: &'a Path,
progress_cb: P,
i18n: &'a I18n,
log: &'a Logger,
) -> MediaChecker<'a, P> {
MediaChecker {
ctx,
mgr,
col_path,
progress_cb,
checked: 0,
progress_updated: Instant::now(),
i18n,
log,
}
}
@ -100,7 +91,7 @@ where
pub fn summarize_output(&self, output: &mut MediaCheckOutput) -> String {
let mut buf = String::new();
let i = &self.i18n;
let i = &self.ctx.i18n;
// top summary area
if output.trash_count > 0 {
@ -279,7 +270,7 @@ where
}
})?;
let fname = self.mgr.add_file(ctx, disk_fname, &data)?;
debug!(self.log, "renamed"; "from"=>disk_fname, "to"=>&fname.as_ref());
debug!(self.ctx.log, "renamed"; "from"=>disk_fname, "to"=>&fname.as_ref());
assert_ne!(fname.as_ref(), disk_fname);
// remove the original file
@ -373,7 +364,7 @@ where
self.mgr
.add_file(&mut self.mgr.dbctx(), fname.as_ref(), &data)?;
} else {
debug!(self.log, "file disappeared while restoring trash"; "fname"=>fname.as_ref());
debug!(self.ctx.log, "file disappeared while restoring trash"; "fname"=>fname.as_ref());
}
fs::remove_file(dentry.path())?;
}
@ -387,14 +378,11 @@ where
&mut self,
renamed: &HashMap<String, String>,
) -> Result<HashSet<String>> {
let mut db = open_or_create_collection_db(self.col_path)?;
let trx = db.transaction()?;
let mut referenced_files = HashSet::new();
let note_types = get_note_types(&trx)?;
let note_types = get_note_types(&self.ctx.storage.db)?;
let mut collection_modified = false;
for_every_note(&trx, |note| {
for_every_note(&self.ctx.storage.db, |note| {
self.checked += 1;
if self.checked % 10 == 0 {
self.maybe_fire_progress_cb()?;
@ -407,7 +395,7 @@ where
})?;
if fix_and_extract_media_refs(note, &mut referenced_files, renamed)? {
// note was modified, needs saving
set_note(&trx, note, nt)?;
set_note(&self.ctx.storage.db, note, nt)?;
collection_modified = true;
}
@ -417,8 +405,7 @@ where
})?;
if collection_modified {
mark_collection_modified(&trx)?;
trx.commit()?;
mark_collection_modified(&self.ctx.storage.db)?;
}
Ok(referenced_files)
@ -512,18 +499,18 @@ fn extract_latex_refs(note: &Note, seen_files: &mut HashSet<String>, svg: bool)
#[cfg(test)]
mod test {
use crate::collection::{open_collection, Collection};
use crate::err::Result;
use crate::i18n::I18n;
use crate::log;
use crate::log::Logger;
use crate::media::check::{MediaCheckOutput, MediaChecker};
use crate::media::files::trash_folder;
use crate::media::MediaManager;
use std::path::{Path, PathBuf};
use std::path::Path;
use std::{fs, io};
use tempfile::{tempdir, TempDir};
fn common_setup() -> Result<(TempDir, MediaManager, PathBuf, Logger, I18n)> {
fn common_setup() -> Result<(TempDir, MediaManager, Collection)> {
let dir = tempdir()?;
let media_dir = dir.path().join("media");
fs::create_dir(&media_dir)?;
@ -537,15 +524,16 @@ mod test {
let mgr = MediaManager::new(&media_dir, media_db)?;
let log = log::terminal();
let i18n = I18n::new(&["zz"], "dummy", log.clone());
Ok((dir, mgr, col_path, log, i18n))
let col = open_collection(col_path, false, i18n, log)?;
Ok((dir, mgr, col))
}
#[test]
fn media_check() -> Result<()> {
let (_dir, mgr, col_path, log, i18n) = common_setup()?;
let (_dir, mgr, col) = common_setup()?;
// add some test files
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
@ -556,8 +544,13 @@ mod test {
fs::write(&mgr.media_folder.join("unused.jpg"), "foo")?;
let progress = |_n| true;
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
let mut output = checker.check()?;
let (output, report) = col.transact(None, |ctx| {
let mut checker = MediaChecker::new(&ctx, &mgr, progress);
let output = checker.check()?;
let summary = checker.summarize_output(&mut output.clone());
Ok((output, summary))
})?;
assert_eq!(
output,
@ -577,7 +570,6 @@ mod test {
assert!(fs::metadata(&mgr.media_folder.join("foo[.jpg")).is_err());
assert!(fs::metadata(&mgr.media_folder.join("foo.jpg")).is_ok());
let report = checker.summarize_output(&mut output);
assert_eq!(
report,
"Missing files: 1
@ -617,14 +609,16 @@ Unused: unused.jpg
#[test]
fn trash_handling() -> Result<()> {
let (_dir, mgr, col_path, log, i18n) = common_setup()?;
let (_dir, mgr, col) = common_setup()?;
let trash_folder = trash_folder(&mgr.media_folder)?;
fs::write(trash_folder.join("test.jpg"), "test")?;
let progress = |_n| true;
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
checker.restore_trash()?;
col.transact(None, |ctx| {
let mut checker = MediaChecker::new(&ctx, &mgr, progress);
checker.restore_trash()
})?;
// file should have been moved to media folder
assert_eq!(files_in_dir(&trash_folder), Vec::<String>::new());
@ -635,7 +629,10 @@ Unused: unused.jpg
// if we repeat the process, restoring should do the same thing if the contents are equal
fs::write(trash_folder.join("test.jpg"), "test")?;
checker.restore_trash()?;
col.transact(None, |ctx| {
let mut checker = MediaChecker::new(&ctx, &mgr, progress);
checker.restore_trash()
})?;
assert_eq!(files_in_dir(&trash_folder), Vec::<String>::new());
assert_eq!(
files_in_dir(&mgr.media_folder),
@ -644,7 +641,10 @@ Unused: unused.jpg
// but rename if required
fs::write(trash_folder.join("test.jpg"), "test2")?;
checker.restore_trash()?;
col.transact(None, |ctx| {
let mut checker = MediaChecker::new(&ctx, &mgr, progress);
checker.restore_trash()
})?;
assert_eq!(files_in_dir(&trash_folder), Vec::<String>::new());
assert_eq!(
files_in_dir(&mgr.media_folder),
@ -659,13 +659,17 @@ Unused: unused.jpg
#[test]
fn unicode_normalization() -> Result<()> {
let (_dir, mgr, col_path, log, i18n) = common_setup()?;
let (_dir, mgr, col) = common_setup()?;
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
let progress = |_n| true;
let mut checker = MediaChecker::new(&mgr, &col_path, progress, &i18n, &log);
let mut output = checker.check()?;
let mut output = col.transact(None, |ctx| {
let mut checker = MediaChecker::new(&ctx, &mgr, progress);
checker.check()
})?;
output.missing.sort();
if cfg!(target_vendor = "apple") {

View File

@ -11,7 +11,6 @@ use serde_aux::field_attributes::deserialize_number_from_string;
use serde_derive::Deserialize;
use std::collections::HashMap;
use std::convert::TryInto;
use std::path::Path;
#[derive(Debug)]
pub(super) struct Note {
@ -45,19 +44,6 @@ fn field_checksum(text: &str) -> u32 {
u32::from_be_bytes(digest[..4].try_into().unwrap())
}
pub(super) fn open_or_create_collection_db(path: &Path) -> Result<Connection> {
let db = Connection::open(path)?;
db.pragma_update(None, "locking_mode", &"exclusive")?;
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "cache_size", &(-40 * 1024))?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update(None, "journal", &"wal")?;
db.set_prepared_statement_cache_capacity(5);
Ok(db)
}
#[derive(Deserialize, Debug)]
pub(super) struct NoteType {
#[serde(deserialize_with = "deserialize_number_from_string")]

View File

@ -1,3 +1,3 @@
mod sqlite;
pub(crate) use sqlite::SqliteStorage;
pub(crate) use sqlite::{SqliteStorage, StorageContext};

View File

@ -1,9 +1,11 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::collection::CollectionOp;
use crate::err::Result;
use crate::err::{AnkiError, DBErrorKind};
use crate::time::i64_unix_timestamp;
use crate::time::i64_unix_secs;
use crate::types::Usn;
use rusqlite::{params, Connection, NO_PARAMS};
use std::path::{Path, PathBuf};
@ -15,8 +17,9 @@ const SCHEMA_MAX_VERSION: u8 = 11;
pub struct SqliteStorage {
// currently crate-visible for dbproxy
pub(crate) db: Connection,
// fixme: stored in wrong location?
path: PathBuf,
server: bool,
}
fn open_or_create_collection_db(path: &Path) -> Result<Connection> {
@ -59,17 +62,14 @@ fn trace(s: &str) {
}
impl SqliteStorage {
pub(crate) fn open_or_create(path: &Path, server: bool) -> Result<Self> {
pub(crate) fn open_or_create(path: &Path) -> Result<Self> {
let db = open_or_create_collection_db(path)?;
let (create, ver) = schema_version(&db)?;
if create {
db.prepare_cached("begin exclusive")?.execute(NO_PARAMS)?;
db.execute_batch(include_str!("schema11.sql"))?;
db.execute(
"update col set crt=?, ver=?",
params![i64_unix_timestamp(), ver],
)?;
db.execute("update col set crt=?, ver=?", params![i64_unix_secs(), ver])?;
db.prepare_cached("commit")?.execute(NO_PARAMS)?;
} else {
if ver > SCHEMA_MAX_VERSION {
@ -89,30 +89,103 @@ impl SqliteStorage {
let storage = Self {
db,
path: path.to_owned(),
server,
};
Ok(storage)
}
pub(crate) fn begin(&self) -> Result<()> {
pub(crate) fn context(&self, server: bool) -> StorageContext {
StorageContext::new(&self.db, server)
}
}
pub(crate) struct StorageContext<'a> {
pub(crate) db: &'a Connection,
#[allow(dead_code)]
server: bool,
#[allow(dead_code)]
usn: Option<Usn>,
}
impl StorageContext<'_> {
fn new(db: &Connection, server: bool) -> StorageContext {
StorageContext {
db,
server,
usn: None,
}
}
// Standard transaction start/stop
//////////////////////////////////////
pub(crate) fn begin_trx(&self) -> Result<()> {
self.db
.prepare_cached("begin exclusive")?
.execute(NO_PARAMS)?;
Ok(())
}
pub(crate) fn commit(&self) -> Result<()> {
pub(crate) fn commit_trx(&self) -> Result<()> {
if !self.db.is_autocommit() {
self.db.prepare_cached("commit")?.execute(NO_PARAMS)?;
}
Ok(())
}
pub(crate) fn rollback(&self) -> Result<()> {
pub(crate) fn rollback_trx(&self) -> Result<()> {
if !self.db.is_autocommit() {
self.db.execute("rollback", NO_PARAMS)?;
}
Ok(())
}
// Savepoints
//////////////////////////////////////////
//
// This is necessary at the moment because Anki's current architecture uses
// long-running transactions as an undo mechanism. Once a proper undo
// mechanism has been added to all existing functionality, we could
// transition these to standard commits.
pub(crate) fn begin_rust_trx(&self) -> Result<()> {
self.db
.prepare_cached("savepoint rust")?
.execute(NO_PARAMS)?;
Ok(())
}
pub(crate) fn commit_rust_trx(&self) -> Result<()> {
self.db.prepare_cached("release rust")?.execute(NO_PARAMS)?;
Ok(())
}
pub(crate) fn commit_rust_op(&self, _op: Option<CollectionOp>) -> Result<()> {
self.commit_rust_trx()
}
pub(crate) fn rollback_rust_trx(&self) -> Result<()> {
self.db
.prepare_cached("rollback to rust")?
.execute(NO_PARAMS)?;
Ok(())
}
//////////////////////////////////////////
#[allow(dead_code)]
pub(crate) fn usn(&mut self) -> Result<Usn> {
if self.server {
if self.usn.is_none() {
self.usn = Some(
self.db
.prepare_cached("select usn from col")?
.query_row(NO_PARAMS, |row| row.get(0))?,
);
}
Ok(*self.usn.as_ref().unwrap())
} else {
Ok(-1)
}
}
}

View File

@ -79,7 +79,7 @@ impl Backend {
let out_string = self
.backend
.db_command(in_bytes)
.map_err(|e| DBError::py_err(e.localized_description(&self.backend.i18n)))?;
.map_err(|e| DBError::py_err(e.localized_description(&self.backend.i18n())))?;
let out_obj = PyBytes::new(py, out_string.as_bytes());
Ok(out_obj.into())