drop separate RequestContext/StorageContext

This commit is contained in:
Damien Elmes 2020-03-29 09:58:33 +10:00
parent 2810d3883b
commit fdeca610b0
11 changed files with 254 additions and 314 deletions

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result; use crate::err::Result;
use crate::storage::StorageContext; use crate::storage::SqliteStorage;
use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef}; use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef};
use rusqlite::OptionalExtension; use rusqlite::OptionalExtension;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
@ -67,7 +67,7 @@ impl FromSql for SqlValue {
} }
} }
pub(super) fn db_command_bytes(ctx: &StorageContext, input: &[u8]) -> Result<String> { pub(super) fn db_command_bytes(ctx: &SqliteStorage, input: &[u8]) -> Result<String> {
let req: DBRequest = serde_json::from_slice(input)?; let req: DBRequest = serde_json::from_slice(input)?;
let resp = match req { let resp = match req {
DBRequest::Query { DBRequest::Query {
@ -98,7 +98,7 @@ pub(super) fn db_command_bytes(ctx: &StorageContext, input: &[u8]) -> Result<Str
Ok(serde_json::to_string(&resp)?) Ok(serde_json::to_string(&resp)?)
} }
pub(super) fn db_query_row(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Result<DBResult> { pub(super) fn db_query_row(ctx: &SqliteStorage, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
let mut stmt = ctx.db.prepare_cached(sql)?; let mut stmt = ctx.db.prepare_cached(sql)?;
let columns = stmt.column_count(); let columns = stmt.column_count();
@ -122,7 +122,7 @@ pub(super) fn db_query_row(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -
Ok(DBResult::Rows(rows)) Ok(DBResult::Rows(rows))
} }
pub(super) fn db_query(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Result<DBResult> { pub(super) fn db_query(ctx: &SqliteStorage, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
let mut stmt = ctx.db.prepare_cached(sql)?; let mut stmt = ctx.db.prepare_cached(sql)?;
let columns = stmt.column_count(); let columns = stmt.column_count();
@ -141,7 +141,7 @@ pub(super) fn db_query(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Re
} }
pub(super) fn db_execute_many( pub(super) fn db_execute_many(
ctx: &StorageContext, ctx: &SqliteStorage,
sql: &str, sql: &str,
args: &[Vec<SqlValue>], args: &[Vec<SqlValue>],
) -> Result<DBResult> { ) -> Result<DBResult> {

View File

@ -590,12 +590,11 @@ impl Backend {
} }
pub fn db_command(&self, input: &[u8]) -> Result<String> { pub fn db_command(&self, input: &[u8]) -> Result<String> {
self.with_col(|col| col.with_ctx(|ctx| db_command_bytes(&ctx.storage, input))) self.with_col(|col| db_command_bytes(&col.storage, input))
} }
fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> { fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> {
self.with_col(|col| { self.with_col(|col| {
col.with_ctx(|ctx| {
let order = if let Some(order) = input.order { let order = if let Some(order) = input.order {
use pb::sort_order::Value as V; use pb::sort_order::Value as V;
match order.value { match order.value {
@ -611,27 +610,24 @@ impl Backend {
} else { } else {
SortMode::FromConfig SortMode::FromConfig
}; };
let cids = search_cards(ctx, &input.search, order)?; let cids = search_cards(col, &input.search, order)?;
Ok(pb::SearchCardsOut { Ok(pb::SearchCardsOut {
card_ids: cids.into_iter().map(|v| v.0).collect(), card_ids: cids.into_iter().map(|v| v.0).collect(),
}) })
}) })
})
} }
fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> { fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
self.with_col(|col| { self.with_col(|col| {
col.with_ctx(|ctx| { let nids = search_notes(col, &input.search)?;
let nids = search_notes(ctx, &input.search)?;
Ok(pb::SearchNotesOut { Ok(pb::SearchNotesOut {
note_ids: nids.into_iter().map(|v| v.0).collect(), note_ids: nids.into_iter().map(|v| v.0).collect(),
}) })
}) })
})
} }
fn get_card(&self, cid: i64) -> Result<pb::GetCardOut> { fn get_card(&self, cid: i64) -> Result<pb::GetCardOut> {
let card = self.with_col(|col| col.with_ctx(|ctx| ctx.storage.get_card(CardID(cid))))?; let card = self.with_col(|col| col.storage.get_card(CardID(cid)))?;
Ok(pb::GetCardOut { Ok(pb::GetCardOut {
card: card.map(card_to_pb), card: card.map(card_to_pb),
}) })

View File

@ -5,7 +5,7 @@ use crate::decks::DeckID;
use crate::define_newtype; use crate::define_newtype;
use crate::err::{AnkiError, Result}; use crate::err::{AnkiError, Result};
use crate::notes::NoteID; use crate::notes::NoteID;
use crate::{collection::RequestContext, timestamp::TimestampSecs, types::Usn}; use crate::{collection::Collection, timestamp::TimestampSecs, types::Usn};
use num_enum::TryFromPrimitive; use num_enum::TryFromPrimitive;
use serde_repr::{Deserialize_repr, Serialize_repr}; use serde_repr::{Deserialize_repr, Serialize_repr};
@ -86,7 +86,7 @@ impl Default for Card {
} }
} }
impl RequestContext<'_> { impl Collection {
pub(crate) fn update_card(&mut self, card: &mut Card) -> Result<()> { pub(crate) fn update_card(&mut self, card: &mut Card) -> Result<()> {
if card.id.0 == 0 { if card.id.0 == 0 {
return Err(AnkiError::invalid_input("card id not set")); return Err(AnkiError::invalid_input("card id not set"));

View File

@ -4,7 +4,7 @@
use crate::err::{AnkiError, Result}; use crate::err::{AnkiError, Result};
use crate::i18n::I18n; use crate::i18n::I18n;
use crate::log::Logger; use crate::log::Logger;
use crate::storage::{SqliteStorage, StorageContext}; use crate::storage::SqliteStorage;
use std::path::PathBuf; use std::path::PathBuf;
pub fn open_collection<P: Into<PathBuf>>( pub fn open_collection<P: Into<PathBuf>>(
@ -16,24 +16,30 @@ pub fn open_collection<P: Into<PathBuf>>(
log: Logger, log: Logger,
) -> Result<Collection> { ) -> Result<Collection> {
let col_path = path.into(); let col_path = path.into();
let storage = SqliteStorage::open_or_create(&col_path)?; let storage = SqliteStorage::open_or_create(&col_path, server)?;
let col = Collection { let col = Collection {
storage, storage,
col_path, col_path,
media_folder: media_folder.into(), media_folder: media_folder.into(),
media_db: media_db.into(), media_db: media_db.into(),
server,
i18n, i18n,
log, log,
state: CollectionState::Normal, state: CollectionState {
task_state: CollectionTaskState::Normal,
},
}; };
Ok(col) Ok(col)
} }
#[derive(Debug)]
pub struct CollectionState {
task_state: CollectionTaskState,
}
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum CollectionState { pub enum CollectionTaskState {
Normal, Normal,
// in this state, the DB must not be closed // in this state, the DB must not be closed
MediaSyncRunning, MediaSyncRunning,
@ -45,7 +51,6 @@ pub struct Collection {
pub(crate) col_path: PathBuf, pub(crate) col_path: PathBuf,
pub(crate) media_folder: PathBuf, pub(crate) media_folder: PathBuf,
pub(crate) media_db: PathBuf, pub(crate) media_db: PathBuf,
pub(crate) server: bool,
pub(crate) i18n: I18n, pub(crate) i18n: I18n,
pub(crate) log: Logger, pub(crate) log: Logger,
state: CollectionState, state: CollectionState,
@ -53,63 +58,35 @@ pub struct Collection {
pub(crate) enum CollectionOp {} pub(crate) enum CollectionOp {}
pub(crate) struct RequestContext<'a> {
pub storage: StorageContext<'a>,
pub i18n: &'a I18n,
pub log: &'a Logger,
pub should_commit: bool,
}
impl Collection { impl Collection {
/// Call the provided closure with a RequestContext that exists for
/// the duration of the call. The request will cache prepared sql
/// statements, so should be passed down the call tree.
///
/// This function should be used for read-only requests. To mutate
/// the database, use transact() instead.
pub(crate) fn with_ctx<F, R>(&self, func: F) -> Result<R>
where
F: FnOnce(&mut RequestContext) -> Result<R>,
{
let mut ctx = RequestContext {
storage: self.storage.context(self.server),
i18n: &self.i18n,
log: &self.log,
should_commit: true,
};
func(&mut ctx)
}
/// Execute the provided closure in a transaction, rolling back if /// Execute the provided closure in a transaction, rolling back if
/// an error is returned. /// an error is returned.
pub(crate) fn transact<F, R>(&self, op: Option<CollectionOp>, func: F) -> Result<R> pub(crate) fn transact<F, R>(&mut self, op: Option<CollectionOp>, func: F) -> Result<R>
where where
F: FnOnce(&mut RequestContext) -> Result<R>, F: FnOnce(&mut Collection) -> Result<R>,
{ {
self.with_ctx(|ctx| { self.storage.begin_rust_trx()?;
ctx.storage.begin_rust_trx()?;
let mut res = func(ctx); let mut res = func(self);
if res.is_ok() && ctx.should_commit { if res.is_ok() {
if let Err(e) = ctx.storage.mark_modified() { if let Err(e) = self.storage.mark_modified() {
res = Err(e); res = Err(e);
} else if let Err(e) = ctx.storage.commit_rust_op(op) { } else if let Err(e) = self.storage.commit_rust_op(op) {
res = Err(e); res = Err(e);
} }
} }
if res.is_err() || !ctx.should_commit { if res.is_err() {
ctx.storage.rollback_rust_trx()?; self.storage.rollback_rust_trx()?;
} }
res res
})
} }
pub(crate) fn set_media_sync_running(&mut self) -> Result<()> { pub(crate) fn set_media_sync_running(&mut self) -> Result<()> {
if self.state == CollectionState::Normal { if self.state.task_state == CollectionTaskState::Normal {
self.state = CollectionState::MediaSyncRunning; self.state.task_state = CollectionTaskState::MediaSyncRunning;
Ok(()) Ok(())
} else { } else {
Err(AnkiError::invalid_input("media sync already running")) Err(AnkiError::invalid_input("media sync already running"))
@ -117,8 +94,8 @@ impl Collection {
} }
pub(crate) fn set_media_sync_finished(&mut self) -> Result<()> { pub(crate) fn set_media_sync_finished(&mut self) -> Result<()> {
if self.state == CollectionState::MediaSyncRunning { if self.state.task_state == CollectionTaskState::MediaSyncRunning {
self.state = CollectionState::Normal; self.state.task_state = CollectionTaskState::Normal;
Ok(()) Ok(())
} else { } else {
Err(AnkiError::invalid_input("media sync not running")) Err(AnkiError::invalid_input("media sync not running"))
@ -126,6 +103,6 @@ impl Collection {
} }
pub(crate) fn can_close(&self) -> bool { pub(crate) fn can_close(&self) -> bool {
self.state == CollectionState::Normal self.state.task_state == CollectionTaskState::Normal
} }
} }

View File

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::collection::RequestContext; use crate::collection::Collection;
use crate::err::{AnkiError, DBErrorKind, Result}; use crate::err::{AnkiError, DBErrorKind, Result};
use crate::i18n::{tr_args, tr_strs, FString}; use crate::i18n::{tr_args, tr_strs, FString};
use crate::latex::extract_latex_expanding_clozes; use crate::latex::extract_latex_expanding_clozes;
@ -44,26 +44,26 @@ struct MediaFolderCheck {
oversize: Vec<String>, oversize: Vec<String>,
} }
pub struct MediaChecker<'a, 'b, P> pub struct MediaChecker<'a, P>
where where
P: FnMut(usize) -> bool, P: FnMut(usize) -> bool,
{ {
ctx: &'a mut RequestContext<'b>, ctx: &'a Collection,
mgr: &'a MediaManager, mgr: &'a MediaManager,
progress_cb: P, progress_cb: P,
checked: usize, checked: usize,
progress_updated: Instant, progress_updated: Instant,
} }
impl<P> MediaChecker<'_, '_, P> impl<P> MediaChecker<'_, P>
where where
P: FnMut(usize) -> bool, P: FnMut(usize) -> bool,
{ {
pub(crate) fn new<'a, 'b>( pub(crate) fn new<'a>(
ctx: &'a mut RequestContext<'b>, ctx: &'a mut Collection,
mgr: &'a MediaManager, mgr: &'a MediaManager,
progress_cb: P, progress_cb: P,
) -> MediaChecker<'a, 'b, P> { ) -> MediaChecker<'a, P> {
MediaChecker { MediaChecker {
ctx, ctx,
mgr, mgr,
@ -411,10 +411,6 @@ where
Ok(()) Ok(())
})?; })?;
if !collection_modified {
self.ctx.should_commit = false;
}
Ok(referenced_files) Ok(referenced_files)
} }
} }
@ -561,7 +557,7 @@ pub(crate) mod test {
#[test] #[test]
fn media_check() -> Result<()> { fn media_check() -> Result<()> {
let (_dir, mgr, col) = common_setup()?; let (_dir, mgr, mut col) = common_setup()?;
// add some test files // add some test files
fs::write(&mgr.media_folder.join("zerobytes"), "")?; fs::write(&mgr.media_folder.join("zerobytes"), "")?;
@ -637,7 +633,7 @@ Unused: unused.jpg
#[test] #[test]
fn trash_handling() -> Result<()> { fn trash_handling() -> Result<()> {
let (_dir, mgr, col) = common_setup()?; let (_dir, mgr, mut col) = common_setup()?;
let trash_folder = trash_folder(&mgr.media_folder)?; let trash_folder = trash_folder(&mgr.media_folder)?;
fs::write(trash_folder.join("test.jpg"), "test")?; fs::write(trash_folder.join("test.jpg"), "test")?;
@ -687,7 +683,7 @@ Unused: unused.jpg
#[test] #[test]
fn unicode_normalization() -> Result<()> { fn unicode_normalization() -> Result<()> {
let (_dir, mgr, col) = common_setup()?; let (_dir, mgr, mut col) = common_setup()?;
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?; fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;

View File

@ -4,7 +4,7 @@
use super::{parser::Node, sqlwriter::node_to_sql}; use super::{parser::Node, sqlwriter::node_to_sql};
use crate::card::CardID; use crate::card::CardID;
use crate::card::CardType; use crate::card::CardType;
use crate::collection::RequestContext; use crate::collection::Collection;
use crate::config::SortKind; use crate::config::SortKind;
use crate::err::Result; use crate::err::Result;
use crate::search::parser::parse; use crate::search::parser::parse;
@ -18,7 +18,7 @@ pub(crate) enum SortMode {
} }
pub(crate) fn search_cards<'a, 'b>( pub(crate) fn search_cards<'a, 'b>(
req: &'a mut RequestContext<'b>, req: &'b mut Collection,
search: &'a str, search: &'a str,
order: SortMode, order: SortMode,
) -> Result<Vec<CardID>> { ) -> Result<Vec<CardID>> {
@ -96,7 +96,7 @@ fn write_order(sql: &mut String, kind: &SortKind, reverse: bool) -> Result<()> {
// In the future these items should be moved from JSON into separate SQL tables, // In the future these items should be moved from JSON into separate SQL tables,
// - for now we use a temporary deck to sort them. // - for now we use a temporary deck to sort them.
fn prepare_sort(req: &mut RequestContext, kind: &SortKind) -> Result<()> { fn prepare_sort(req: &mut Collection, kind: &SortKind) -> Result<()> {
use SortKind::*; use SortKind::*;
match kind { match kind {
CardDeck | NoteType => { CardDeck | NoteType => {
@ -139,14 +139,14 @@ fn prepare_sort(req: &mut RequestContext, kind: &SortKind) -> Result<()> {
Ok(()) Ok(())
} }
fn prepare_sort_order_table(req: &mut RequestContext) -> Result<()> { fn prepare_sort_order_table(req: &mut Collection) -> Result<()> {
req.storage req.storage
.db .db
.execute_batch(include_str!("sort_order.sql"))?; .execute_batch(include_str!("sort_order.sql"))?;
Ok(()) Ok(())
} }
fn prepare_sort_order_table2(req: &mut RequestContext) -> Result<()> { fn prepare_sort_order_table2(req: &mut Collection) -> Result<()> {
req.storage req.storage
.db .db
.execute_batch(include_str!("sort_order2.sql"))?; .execute_batch(include_str!("sort_order2.sql"))?;

View File

@ -2,15 +2,12 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{parser::Node, sqlwriter::node_to_sql}; use super::{parser::Node, sqlwriter::node_to_sql};
use crate::collection::RequestContext; use crate::collection::Collection;
use crate::err::Result; use crate::err::Result;
use crate::notes::NoteID; use crate::notes::NoteID;
use crate::search::parser::parse; use crate::search::parser::parse;
pub(crate) fn search_notes<'a, 'b>( pub(crate) fn search_notes<'a>(req: &'a mut Collection, search: &'a str) -> Result<Vec<NoteID>> {
req: &'a mut RequestContext<'b>,
search: &'a str,
) -> Result<Vec<NoteID>> {
let top_node = Node::Group(parse(search)?); let top_node = Node::Group(parse(search)?);
let (sql, args) = node_to_sql(req, &top_node)?; let (sql, args) = node_to_sql(req, &top_node)?;

View File

@ -10,26 +10,26 @@ use crate::notes::field_checksum;
use crate::notetypes::NoteTypeID; use crate::notetypes::NoteTypeID;
use crate::text::matches_wildcard; use crate::text::matches_wildcard;
use crate::text::without_combining; use crate::text::without_combining;
use crate::{collection::RequestContext, text::strip_html_preserving_image_filenames}; use crate::{collection::Collection, text::strip_html_preserving_image_filenames};
use std::fmt::Write; use std::fmt::Write;
struct SqlWriter<'a, 'b> { struct SqlWriter<'a> {
req: &'a mut RequestContext<'b>, col: &'a mut Collection,
sql: String, sql: String,
args: Vec<String>, args: Vec<String>,
} }
pub(super) fn node_to_sql(req: &mut RequestContext, node: &Node) -> Result<(String, Vec<String>)> { pub(super) fn node_to_sql(req: &mut Collection, node: &Node) -> Result<(String, Vec<String>)> {
let mut sctx = SqlWriter::new(req); let mut sctx = SqlWriter::new(req);
sctx.write_node_to_sql(&node)?; sctx.write_node_to_sql(&node)?;
Ok((sctx.sql, sctx.args)) Ok((sctx.sql, sctx.args))
} }
impl SqlWriter<'_, '_> { impl SqlWriter<'_> {
fn new<'a, 'b>(req: &'a mut RequestContext<'b>) -> SqlWriter<'a, 'b> { fn new(col: &mut Collection) -> SqlWriter<'_> {
let sql = String::new(); let sql = String::new();
let args = vec![]; let args = vec![];
SqlWriter { req, sql, args } SqlWriter { col, sql, args }
} }
fn write_node_to_sql(&mut self, node: &Node) -> Result<()> { fn write_node_to_sql(&mut self, node: &Node) -> Result<()> {
@ -129,7 +129,7 @@ impl SqlWriter<'_, '_> {
} }
fn write_rated(&mut self, days: u32, ease: Option<u8>) -> Result<()> { fn write_rated(&mut self, days: u32, ease: Option<u8>) -> Result<()> {
let today_cutoff = self.req.storage.timing_today()?.next_day_at; let today_cutoff = self.col.storage.timing_today()?.next_day_at;
let days = days.min(365) as i64; let days = days.min(365) as i64;
let target_cutoff_ms = (today_cutoff - 86_400 * days) * 1_000; let target_cutoff_ms = (today_cutoff - 86_400 * days) * 1_000;
write!( write!(
@ -148,7 +148,7 @@ impl SqlWriter<'_, '_> {
} }
fn write_prop(&mut self, op: &str, kind: &PropertyKind) -> Result<()> { fn write_prop(&mut self, op: &str, kind: &PropertyKind) -> Result<()> {
let timing = self.req.storage.timing_today()?; let timing = self.col.storage.timing_today()?;
match kind { match kind {
PropertyKind::Due(days) => { PropertyKind::Due(days) => {
let day = days + (timing.days_elapsed as i32); let day = days + (timing.days_elapsed as i32);
@ -173,7 +173,7 @@ impl SqlWriter<'_, '_> {
} }
fn write_state(&mut self, state: &StateKind) -> Result<()> { fn write_state(&mut self, state: &StateKind) -> Result<()> {
let timing = self.req.storage.timing_today()?; let timing = self.col.storage.timing_today()?;
match state { match state {
StateKind::New => write!(self.sql, "c.type = {}", CardQueue::New as i8), StateKind::New => write!(self.sql, "c.type = {}", CardQueue::New as i8),
StateKind::Review => write!(self.sql, "c.type = {}", CardQueue::Review as i8), StateKind::Review => write!(self.sql, "c.type = {}", CardQueue::Review as i8),
@ -212,14 +212,14 @@ impl SqlWriter<'_, '_> {
"filtered" => write!(self.sql, "c.odid > 0").unwrap(), "filtered" => write!(self.sql, "c.odid > 0").unwrap(),
deck => { deck => {
let all_decks: Vec<_> = self let all_decks: Vec<_> = self
.req .col
.storage .storage
.all_decks()? .all_decks()?
.into_iter() .into_iter()
.map(|(_, v)| v) .map(|(_, v)| v)
.collect(); .collect();
let dids_with_children = if deck == "current" { let dids_with_children = if deck == "current" {
let config = self.req.storage.all_config()?; let config = self.col.storage.all_config()?;
let mut dids_with_children = vec![config.current_deck_id]; let mut dids_with_children = vec![config.current_deck_id];
let current = get_deck(&all_decks, config.current_deck_id) let current = get_deck(&all_decks, config.current_deck_id)
.ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?; .ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?;
@ -251,7 +251,7 @@ impl SqlWriter<'_, '_> {
write!(self.sql, "c.ord = {}", n).unwrap(); write!(self.sql, "c.ord = {}", n).unwrap();
} }
TemplateKind::Name(name) => { TemplateKind::Name(name) => {
let note_types = self.req.storage.all_note_types()?; let note_types = self.col.storage.all_note_types()?;
let mut id_ords = vec![]; let mut id_ords = vec![];
for nt in note_types.values() { for nt in note_types.values() {
for tmpl in &nt.templates { for tmpl in &nt.templates {
@ -280,7 +280,7 @@ impl SqlWriter<'_, '_> {
fn write_note_type(&mut self, nt_name: &str) -> Result<()> { fn write_note_type(&mut self, nt_name: &str) -> Result<()> {
let mut ntids: Vec<_> = self let mut ntids: Vec<_> = self
.req .col
.storage .storage
.all_note_types()? .all_note_types()?
.values() .values()
@ -295,7 +295,7 @@ impl SqlWriter<'_, '_> {
} }
fn write_single_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> { fn write_single_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
let note_types = self.req.storage.all_note_types()?; let note_types = self.col.storage.all_note_types()?;
let mut field_map = vec![]; let mut field_map = vec![];
for nt in note_types.values() { for nt in note_types.values() {
@ -354,7 +354,7 @@ impl SqlWriter<'_, '_> {
} }
fn write_added(&mut self, days: u32) -> Result<()> { fn write_added(&mut self, days: u32) -> Result<()> {
let timing = self.req.storage.timing_today()?; let timing = self.col.storage.timing_today()?;
let cutoff = (timing.next_day_at - (86_400 * (days as i64))) * 1_000; let cutoff = (timing.next_day_at - (86_400 * (days as i64))) * 1_000;
write!(self.sql, "c.id > {}", cutoff).unwrap(); write!(self.sql, "c.id > {}", cutoff).unwrap();
Ok(()) Ok(())
@ -384,7 +384,11 @@ where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::ids_to_string; use super::ids_to_string;
use crate::{collection::open_collection, i18n::I18n, log}; use crate::{
collection::{open_collection, Collection},
i18n::I18n,
log,
};
use std::{fs, path::PathBuf}; use std::{fs, path::PathBuf};
use tempfile::tempdir; use tempfile::tempdir;
@ -409,7 +413,7 @@ mod test {
use super::*; use super::*;
// shortcut // shortcut
fn s(req: &mut RequestContext, search: &str) -> (String, Vec<String>) { fn s(req: &mut Collection, search: &str) -> (String, Vec<String>) {
let node = Node::Group(parse(search).unwrap()); let node = Node::Group(parse(search).unwrap());
node_to_sql(req, &node).unwrap() node_to_sql(req, &node).unwrap()
} }
@ -423,7 +427,7 @@ mod test {
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap(); fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
let i18n = I18n::new(&[""], "", log::terminal()); let i18n = I18n::new(&[""], "", log::terminal());
let col = open_collection( let mut col = open_collection(
&col_path, &col_path,
&PathBuf::new(), &PathBuf::new(),
&PathBuf::new(), &PathBuf::new(),
@ -433,7 +437,7 @@ mod test {
) )
.unwrap(); .unwrap();
col.with_ctx(|ctx| { let ctx = &mut col;
// unqualified search // unqualified search
assert_eq!( assert_eq!(
s(ctx, "test"), s(ctx, "test"),
@ -505,8 +509,7 @@ mod test {
assert_eq!( assert_eq!(
s(ctx, "dupes:123,test"), s(ctx, "dupes:123,test"),
( (
"((n.mid = 123 and n.csum = 2840236005 and field_at_index(n.flds, 0) = ?)" "((n.mid = 123 and n.csum = 2840236005 and field_at_index(n.flds, 0) = ?)".into(),
.into(),
vec!["test".into()] vec!["test".into()]
) )
); );
@ -573,10 +576,6 @@ mod test {
("(n.flds regexp ?)".into(), vec![r"(?i)\bone".into()]) ("(n.flds regexp ?)".into(), vec![r"(?i)\bone".into()])
); );
Ok(())
})
.unwrap();
Ok(()) Ok(())
} }
} }

View File

@ -31,8 +31,8 @@ impl FromSql for CardQueue {
} }
} }
impl super::StorageContext<'_> { impl super::SqliteStorage {
pub fn get_card(&mut self, cid: CardID) -> Result<Option<Card>> { pub fn get_card(&self, cid: CardID) -> Result<Option<Card>> {
let mut stmt = self.db.prepare_cached(include_str!("get_card.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("get_card.sql"))?;
stmt.query_row(params![cid], |row| { stmt.query_row(params![cid], |row| {
Ok(Card { Ok(Card {
@ -60,7 +60,7 @@ impl super::StorageContext<'_> {
.map_err(Into::into) .map_err(Into::into)
} }
pub(crate) fn update_card(&mut self, card: &Card) -> Result<()> { pub(crate) fn update_card(&self, card: &Card) -> Result<()> {
let mut stmt = self.db.prepare_cached(include_str!("update_card.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("update_card.sql"))?;
stmt.execute(params![ stmt.execute(params![
card.nid, card.nid,
@ -85,7 +85,7 @@ impl super::StorageContext<'_> {
Ok(()) Ok(())
} }
pub(crate) fn add_card(&mut self, card: &mut Card) -> Result<()> { pub(crate) fn add_card(&self, card: &mut Card) -> Result<()> {
let now = TimestampMillis::now().0; let now = TimestampMillis::now().0;
let mut stmt = self.db.prepare_cached(include_str!("add_card.sql"))?; let mut stmt = self.db.prepare_cached(include_str!("add_card.sql"))?;
stmt.execute(params![ stmt.execute(params![
@ -120,12 +120,11 @@ mod test {
#[test] #[test]
fn add_card() { fn add_card() {
let storage = SqliteStorage::open_or_create(Path::new(":memory:")).unwrap(); let storage = SqliteStorage::open_or_create(Path::new(":memory:"), false).unwrap();
let mut ctx = storage.context(false);
let mut card = Card::default(); let mut card = Card::default();
ctx.add_card(&mut card).unwrap(); storage.add_card(&mut card).unwrap();
let id1 = card.id; let id1 = card.id;
ctx.add_card(&mut card).unwrap(); storage.add_card(&mut card).unwrap();
assert_ne!(id1, card.id); assert_ne!(id1, card.id);
} }
} }

View File

@ -1,4 +1,4 @@
mod card; mod card;
mod sqlite; mod sqlite;
pub(crate) use sqlite::{SqliteStorage, StorageContext}; pub(crate) use sqlite::SqliteStorage;

View File

@ -38,6 +38,9 @@ pub struct SqliteStorage {
// currently crate-visible for dbproxy // currently crate-visible for dbproxy
pub(crate) db: Connection, pub(crate) db: Connection,
server: bool,
usn: Option<Usn>,
// fixme: stored in wrong location? // fixme: stored in wrong location?
path: PathBuf, path: PathBuf,
} }
@ -163,7 +166,7 @@ fn trace(s: &str) {
} }
impl SqliteStorage { impl SqliteStorage {
pub(crate) fn open_or_create(path: &Path) -> Result<Self> { pub(crate) fn open_or_create(path: &Path, server: bool) -> Result<Self> {
let db = open_or_create_collection_db(path)?; let db = open_or_create_collection_db(path)?;
let (create, ver) = schema_version(&db)?; let (create, ver) = schema_version(&db)?;
@ -193,34 +196,13 @@ impl SqliteStorage {
let storage = Self { let storage = Self {
db, db,
path: path.to_owned(), path: path.to_owned(),
server,
usn: None,
}; };
Ok(storage) Ok(storage)
} }
pub(crate) fn context(&self, server: bool) -> StorageContext {
StorageContext::new(&self.db, server)
}
}
pub(crate) struct StorageContext<'a> {
pub(crate) db: &'a Connection,
server: bool,
usn: Option<Usn>,
timing_today: Option<SchedTimingToday>,
}
impl StorageContext<'_> {
fn new(db: &Connection, server: bool) -> StorageContext {
StorageContext {
db,
server,
usn: None,
timing_today: None,
}
}
// Standard transaction start/stop // Standard transaction start/stop
////////////////////////////////////// //////////////////////////////////////
@ -276,8 +258,6 @@ impl StorageContext<'_> {
Ok(()) Ok(())
} }
//////////////////////////////////////////
pub(crate) fn mark_modified(&self) -> Result<()> { pub(crate) fn mark_modified(&self) -> Result<()> {
self.db self.db
.prepare_cached("update col set mod=?")? .prepare_cached("update col set mod=?")?
@ -330,9 +310,7 @@ impl StorageContext<'_> {
Ok(note_types) Ok(note_types)
} }
#[allow(dead_code)] pub(crate) fn timing_today(&self) -> Result<SchedTimingToday> {
pub(crate) fn timing_today(&mut self) -> Result<SchedTimingToday> {
if self.timing_today.is_none() {
let crt: i64 = self let crt: i64 = self
.db .db
.prepare_cached("select crt from col")? .prepare_cached("select crt from col")?
@ -340,14 +318,12 @@ impl StorageContext<'_> {
let conf = self.all_config()?; let conf = self.all_config()?;
let now_offset = if self.server { conf.local_offset } else { None }; let now_offset = if self.server { conf.local_offset } else { None };
self.timing_today = Some(sched_timing_today( Ok(sched_timing_today(
crt, crt,
TimestampSecs::now().0, TimestampSecs::now().0,
conf.creation_offset, conf.creation_offset,
now_offset, now_offset,
conf.rollover, conf.rollover,
)); ))
}
Ok(*self.timing_today.as_ref().unwrap())
} }
} }