drop separate RequestContext/StorageContext
This commit is contained in:
parent
2810d3883b
commit
fdeca610b0
@ -2,7 +2,7 @@
|
|||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use crate::storage::StorageContext;
|
use crate::storage::SqliteStorage;
|
||||||
use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef};
|
use rusqlite::types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef};
|
||||||
use rusqlite::OptionalExtension;
|
use rusqlite::OptionalExtension;
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
@ -67,7 +67,7 @@ impl FromSql for SqlValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn db_command_bytes(ctx: &StorageContext, input: &[u8]) -> Result<String> {
|
pub(super) fn db_command_bytes(ctx: &SqliteStorage, input: &[u8]) -> Result<String> {
|
||||||
let req: DBRequest = serde_json::from_slice(input)?;
|
let req: DBRequest = serde_json::from_slice(input)?;
|
||||||
let resp = match req {
|
let resp = match req {
|
||||||
DBRequest::Query {
|
DBRequest::Query {
|
||||||
@ -98,7 +98,7 @@ pub(super) fn db_command_bytes(ctx: &StorageContext, input: &[u8]) -> Result<Str
|
|||||||
Ok(serde_json::to_string(&resp)?)
|
Ok(serde_json::to_string(&resp)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn db_query_row(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
|
pub(super) fn db_query_row(ctx: &SqliteStorage, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
|
||||||
let mut stmt = ctx.db.prepare_cached(sql)?;
|
let mut stmt = ctx.db.prepare_cached(sql)?;
|
||||||
let columns = stmt.column_count();
|
let columns = stmt.column_count();
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ pub(super) fn db_query_row(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -
|
|||||||
Ok(DBResult::Rows(rows))
|
Ok(DBResult::Rows(rows))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn db_query(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
|
pub(super) fn db_query(ctx: &SqliteStorage, sql: &str, args: &[SqlValue]) -> Result<DBResult> {
|
||||||
let mut stmt = ctx.db.prepare_cached(sql)?;
|
let mut stmt = ctx.db.prepare_cached(sql)?;
|
||||||
let columns = stmt.column_count();
|
let columns = stmt.column_count();
|
||||||
|
|
||||||
@ -141,7 +141,7 @@ pub(super) fn db_query(ctx: &StorageContext, sql: &str, args: &[SqlValue]) -> Re
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn db_execute_many(
|
pub(super) fn db_execute_many(
|
||||||
ctx: &StorageContext,
|
ctx: &SqliteStorage,
|
||||||
sql: &str,
|
sql: &str,
|
||||||
args: &[Vec<SqlValue>],
|
args: &[Vec<SqlValue>],
|
||||||
) -> Result<DBResult> {
|
) -> Result<DBResult> {
|
||||||
|
@ -590,48 +590,44 @@ impl Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn db_command(&self, input: &[u8]) -> Result<String> {
|
pub fn db_command(&self, input: &[u8]) -> Result<String> {
|
||||||
self.with_col(|col| col.with_ctx(|ctx| db_command_bytes(&ctx.storage, input)))
|
self.with_col(|col| db_command_bytes(&col.storage, input))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> {
|
fn search_cards(&self, input: pb::SearchCardsIn) -> Result<pb::SearchCardsOut> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.with_ctx(|ctx| {
|
let order = if let Some(order) = input.order {
|
||||||
let order = if let Some(order) = input.order {
|
use pb::sort_order::Value as V;
|
||||||
use pb::sort_order::Value as V;
|
match order.value {
|
||||||
match order.value {
|
Some(V::None(_)) => SortMode::NoOrder,
|
||||||
Some(V::None(_)) => SortMode::NoOrder,
|
Some(V::Custom(s)) => SortMode::Custom(s),
|
||||||
Some(V::Custom(s)) => SortMode::Custom(s),
|
Some(V::FromConfig(_)) => SortMode::FromConfig,
|
||||||
Some(V::FromConfig(_)) => SortMode::FromConfig,
|
Some(V::Builtin(b)) => SortMode::Builtin {
|
||||||
Some(V::Builtin(b)) => SortMode::Builtin {
|
kind: sort_kind_from_pb(b.kind),
|
||||||
kind: sort_kind_from_pb(b.kind),
|
reverse: b.reverse,
|
||||||
reverse: b.reverse,
|
},
|
||||||
},
|
None => SortMode::FromConfig,
|
||||||
None => SortMode::FromConfig,
|
}
|
||||||
}
|
} else {
|
||||||
} else {
|
SortMode::FromConfig
|
||||||
SortMode::FromConfig
|
};
|
||||||
};
|
let cids = search_cards(col, &input.search, order)?;
|
||||||
let cids = search_cards(ctx, &input.search, order)?;
|
Ok(pb::SearchCardsOut {
|
||||||
Ok(pb::SearchCardsOut {
|
card_ids: cids.into_iter().map(|v| v.0).collect(),
|
||||||
card_ids: cids.into_iter().map(|v| v.0).collect(),
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
|
fn search_notes(&self, input: pb::SearchNotesIn) -> Result<pb::SearchNotesOut> {
|
||||||
self.with_col(|col| {
|
self.with_col(|col| {
|
||||||
col.with_ctx(|ctx| {
|
let nids = search_notes(col, &input.search)?;
|
||||||
let nids = search_notes(ctx, &input.search)?;
|
Ok(pb::SearchNotesOut {
|
||||||
Ok(pb::SearchNotesOut {
|
note_ids: nids.into_iter().map(|v| v.0).collect(),
|
||||||
note_ids: nids.into_iter().map(|v| v.0).collect(),
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_card(&self, cid: i64) -> Result<pb::GetCardOut> {
|
fn get_card(&self, cid: i64) -> Result<pb::GetCardOut> {
|
||||||
let card = self.with_col(|col| col.with_ctx(|ctx| ctx.storage.get_card(CardID(cid))))?;
|
let card = self.with_col(|col| col.storage.get_card(CardID(cid)))?;
|
||||||
Ok(pb::GetCardOut {
|
Ok(pb::GetCardOut {
|
||||||
card: card.map(card_to_pb),
|
card: card.map(card_to_pb),
|
||||||
})
|
})
|
||||||
|
@ -5,7 +5,7 @@ use crate::decks::DeckID;
|
|||||||
use crate::define_newtype;
|
use crate::define_newtype;
|
||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
use crate::notes::NoteID;
|
use crate::notes::NoteID;
|
||||||
use crate::{collection::RequestContext, timestamp::TimestampSecs, types::Usn};
|
use crate::{collection::Collection, timestamp::TimestampSecs, types::Usn};
|
||||||
use num_enum::TryFromPrimitive;
|
use num_enum::TryFromPrimitive;
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ impl Default for Card {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RequestContext<'_> {
|
impl Collection {
|
||||||
pub(crate) fn update_card(&mut self, card: &mut Card) -> Result<()> {
|
pub(crate) fn update_card(&mut self, card: &mut Card) -> Result<()> {
|
||||||
if card.id.0 == 0 {
|
if card.id.0 == 0 {
|
||||||
return Err(AnkiError::invalid_input("card id not set"));
|
return Err(AnkiError::invalid_input("card id not set"));
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
use crate::err::{AnkiError, Result};
|
use crate::err::{AnkiError, Result};
|
||||||
use crate::i18n::I18n;
|
use crate::i18n::I18n;
|
||||||
use crate::log::Logger;
|
use crate::log::Logger;
|
||||||
use crate::storage::{SqliteStorage, StorageContext};
|
use crate::storage::SqliteStorage;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub fn open_collection<P: Into<PathBuf>>(
|
pub fn open_collection<P: Into<PathBuf>>(
|
||||||
@ -16,24 +16,30 @@ pub fn open_collection<P: Into<PathBuf>>(
|
|||||||
log: Logger,
|
log: Logger,
|
||||||
) -> Result<Collection> {
|
) -> Result<Collection> {
|
||||||
let col_path = path.into();
|
let col_path = path.into();
|
||||||
let storage = SqliteStorage::open_or_create(&col_path)?;
|
let storage = SqliteStorage::open_or_create(&col_path, server)?;
|
||||||
|
|
||||||
let col = Collection {
|
let col = Collection {
|
||||||
storage,
|
storage,
|
||||||
col_path,
|
col_path,
|
||||||
media_folder: media_folder.into(),
|
media_folder: media_folder.into(),
|
||||||
media_db: media_db.into(),
|
media_db: media_db.into(),
|
||||||
server,
|
|
||||||
i18n,
|
i18n,
|
||||||
log,
|
log,
|
||||||
state: CollectionState::Normal,
|
state: CollectionState {
|
||||||
|
task_state: CollectionTaskState::Normal,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(col)
|
Ok(col)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct CollectionState {
|
||||||
|
task_state: CollectionTaskState,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum CollectionState {
|
pub enum CollectionTaskState {
|
||||||
Normal,
|
Normal,
|
||||||
// in this state, the DB must not be closed
|
// in this state, the DB must not be closed
|
||||||
MediaSyncRunning,
|
MediaSyncRunning,
|
||||||
@ -45,7 +51,6 @@ pub struct Collection {
|
|||||||
pub(crate) col_path: PathBuf,
|
pub(crate) col_path: PathBuf,
|
||||||
pub(crate) media_folder: PathBuf,
|
pub(crate) media_folder: PathBuf,
|
||||||
pub(crate) media_db: PathBuf,
|
pub(crate) media_db: PathBuf,
|
||||||
pub(crate) server: bool,
|
|
||||||
pub(crate) i18n: I18n,
|
pub(crate) i18n: I18n,
|
||||||
pub(crate) log: Logger,
|
pub(crate) log: Logger,
|
||||||
state: CollectionState,
|
state: CollectionState,
|
||||||
@ -53,63 +58,35 @@ pub struct Collection {
|
|||||||
|
|
||||||
pub(crate) enum CollectionOp {}
|
pub(crate) enum CollectionOp {}
|
||||||
|
|
||||||
pub(crate) struct RequestContext<'a> {
|
|
||||||
pub storage: StorageContext<'a>,
|
|
||||||
pub i18n: &'a I18n,
|
|
||||||
pub log: &'a Logger,
|
|
||||||
pub should_commit: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Collection {
|
impl Collection {
|
||||||
/// Call the provided closure with a RequestContext that exists for
|
|
||||||
/// the duration of the call. The request will cache prepared sql
|
|
||||||
/// statements, so should be passed down the call tree.
|
|
||||||
///
|
|
||||||
/// This function should be used for read-only requests. To mutate
|
|
||||||
/// the database, use transact() instead.
|
|
||||||
pub(crate) fn with_ctx<F, R>(&self, func: F) -> Result<R>
|
|
||||||
where
|
|
||||||
F: FnOnce(&mut RequestContext) -> Result<R>,
|
|
||||||
{
|
|
||||||
let mut ctx = RequestContext {
|
|
||||||
storage: self.storage.context(self.server),
|
|
||||||
i18n: &self.i18n,
|
|
||||||
log: &self.log,
|
|
||||||
should_commit: true,
|
|
||||||
};
|
|
||||||
func(&mut ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Execute the provided closure in a transaction, rolling back if
|
/// Execute the provided closure in a transaction, rolling back if
|
||||||
/// an error is returned.
|
/// an error is returned.
|
||||||
pub(crate) fn transact<F, R>(&self, op: Option<CollectionOp>, func: F) -> Result<R>
|
pub(crate) fn transact<F, R>(&mut self, op: Option<CollectionOp>, func: F) -> Result<R>
|
||||||
where
|
where
|
||||||
F: FnOnce(&mut RequestContext) -> Result<R>,
|
F: FnOnce(&mut Collection) -> Result<R>,
|
||||||
{
|
{
|
||||||
self.with_ctx(|ctx| {
|
self.storage.begin_rust_trx()?;
|
||||||
ctx.storage.begin_rust_trx()?;
|
|
||||||
|
|
||||||
let mut res = func(ctx);
|
let mut res = func(self);
|
||||||
|
|
||||||
if res.is_ok() && ctx.should_commit {
|
if res.is_ok() {
|
||||||
if let Err(e) = ctx.storage.mark_modified() {
|
if let Err(e) = self.storage.mark_modified() {
|
||||||
res = Err(e);
|
res = Err(e);
|
||||||
} else if let Err(e) = ctx.storage.commit_rust_op(op) {
|
} else if let Err(e) = self.storage.commit_rust_op(op) {
|
||||||
res = Err(e);
|
res = Err(e);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if res.is_err() || !ctx.should_commit {
|
if res.is_err() {
|
||||||
ctx.storage.rollback_rust_trx()?;
|
self.storage.rollback_rust_trx()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_media_sync_running(&mut self) -> Result<()> {
|
pub(crate) fn set_media_sync_running(&mut self) -> Result<()> {
|
||||||
if self.state == CollectionState::Normal {
|
if self.state.task_state == CollectionTaskState::Normal {
|
||||||
self.state = CollectionState::MediaSyncRunning;
|
self.state.task_state = CollectionTaskState::MediaSyncRunning;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(AnkiError::invalid_input("media sync already running"))
|
Err(AnkiError::invalid_input("media sync already running"))
|
||||||
@ -117,8 +94,8 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn set_media_sync_finished(&mut self) -> Result<()> {
|
pub(crate) fn set_media_sync_finished(&mut self) -> Result<()> {
|
||||||
if self.state == CollectionState::MediaSyncRunning {
|
if self.state.task_state == CollectionTaskState::MediaSyncRunning {
|
||||||
self.state = CollectionState::Normal;
|
self.state.task_state = CollectionTaskState::Normal;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(AnkiError::invalid_input("media sync not running"))
|
Err(AnkiError::invalid_input("media sync not running"))
|
||||||
@ -126,6 +103,6 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn can_close(&self) -> bool {
|
pub(crate) fn can_close(&self) -> bool {
|
||||||
self.state == CollectionState::Normal
|
self.state.task_state == CollectionTaskState::Normal
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
// Copyright: Ankitects Pty Ltd and contributors
|
// Copyright: Ankitects Pty Ltd and contributors
|
||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use crate::collection::RequestContext;
|
use crate::collection::Collection;
|
||||||
use crate::err::{AnkiError, DBErrorKind, Result};
|
use crate::err::{AnkiError, DBErrorKind, Result};
|
||||||
use crate::i18n::{tr_args, tr_strs, FString};
|
use crate::i18n::{tr_args, tr_strs, FString};
|
||||||
use crate::latex::extract_latex_expanding_clozes;
|
use crate::latex::extract_latex_expanding_clozes;
|
||||||
@ -44,26 +44,26 @@ struct MediaFolderCheck {
|
|||||||
oversize: Vec<String>,
|
oversize: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MediaChecker<'a, 'b, P>
|
pub struct MediaChecker<'a, P>
|
||||||
where
|
where
|
||||||
P: FnMut(usize) -> bool,
|
P: FnMut(usize) -> bool,
|
||||||
{
|
{
|
||||||
ctx: &'a mut RequestContext<'b>,
|
ctx: &'a Collection,
|
||||||
mgr: &'a MediaManager,
|
mgr: &'a MediaManager,
|
||||||
progress_cb: P,
|
progress_cb: P,
|
||||||
checked: usize,
|
checked: usize,
|
||||||
progress_updated: Instant,
|
progress_updated: Instant,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<P> MediaChecker<'_, '_, P>
|
impl<P> MediaChecker<'_, P>
|
||||||
where
|
where
|
||||||
P: FnMut(usize) -> bool,
|
P: FnMut(usize) -> bool,
|
||||||
{
|
{
|
||||||
pub(crate) fn new<'a, 'b>(
|
pub(crate) fn new<'a>(
|
||||||
ctx: &'a mut RequestContext<'b>,
|
ctx: &'a mut Collection,
|
||||||
mgr: &'a MediaManager,
|
mgr: &'a MediaManager,
|
||||||
progress_cb: P,
|
progress_cb: P,
|
||||||
) -> MediaChecker<'a, 'b, P> {
|
) -> MediaChecker<'a, P> {
|
||||||
MediaChecker {
|
MediaChecker {
|
||||||
ctx,
|
ctx,
|
||||||
mgr,
|
mgr,
|
||||||
@ -411,10 +411,6 @@ where
|
|||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if !collection_modified {
|
|
||||||
self.ctx.should_commit = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(referenced_files)
|
Ok(referenced_files)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -561,7 +557,7 @@ pub(crate) mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn media_check() -> Result<()> {
|
fn media_check() -> Result<()> {
|
||||||
let (_dir, mgr, col) = common_setup()?;
|
let (_dir, mgr, mut col) = common_setup()?;
|
||||||
|
|
||||||
// add some test files
|
// add some test files
|
||||||
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
|
fs::write(&mgr.media_folder.join("zerobytes"), "")?;
|
||||||
@ -637,7 +633,7 @@ Unused: unused.jpg
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn trash_handling() -> Result<()> {
|
fn trash_handling() -> Result<()> {
|
||||||
let (_dir, mgr, col) = common_setup()?;
|
let (_dir, mgr, mut col) = common_setup()?;
|
||||||
let trash_folder = trash_folder(&mgr.media_folder)?;
|
let trash_folder = trash_folder(&mgr.media_folder)?;
|
||||||
fs::write(trash_folder.join("test.jpg"), "test")?;
|
fs::write(trash_folder.join("test.jpg"), "test")?;
|
||||||
|
|
||||||
@ -687,7 +683,7 @@ Unused: unused.jpg
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn unicode_normalization() -> Result<()> {
|
fn unicode_normalization() -> Result<()> {
|
||||||
let (_dir, mgr, col) = common_setup()?;
|
let (_dir, mgr, mut col) = common_setup()?;
|
||||||
|
|
||||||
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
|
fs::write(&mgr.media_folder.join("ぱぱ.jpg"), "nfd encoding")?;
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
use super::{parser::Node, sqlwriter::node_to_sql};
|
use super::{parser::Node, sqlwriter::node_to_sql};
|
||||||
use crate::card::CardID;
|
use crate::card::CardID;
|
||||||
use crate::card::CardType;
|
use crate::card::CardType;
|
||||||
use crate::collection::RequestContext;
|
use crate::collection::Collection;
|
||||||
use crate::config::SortKind;
|
use crate::config::SortKind;
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use crate::search::parser::parse;
|
use crate::search::parser::parse;
|
||||||
@ -18,7 +18,7 @@ pub(crate) enum SortMode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn search_cards<'a, 'b>(
|
pub(crate) fn search_cards<'a, 'b>(
|
||||||
req: &'a mut RequestContext<'b>,
|
req: &'b mut Collection,
|
||||||
search: &'a str,
|
search: &'a str,
|
||||||
order: SortMode,
|
order: SortMode,
|
||||||
) -> Result<Vec<CardID>> {
|
) -> Result<Vec<CardID>> {
|
||||||
@ -96,7 +96,7 @@ fn write_order(sql: &mut String, kind: &SortKind, reverse: bool) -> Result<()> {
|
|||||||
|
|
||||||
// In the future these items should be moved from JSON into separate SQL tables,
|
// In the future these items should be moved from JSON into separate SQL tables,
|
||||||
// - for now we use a temporary deck to sort them.
|
// - for now we use a temporary deck to sort them.
|
||||||
fn prepare_sort(req: &mut RequestContext, kind: &SortKind) -> Result<()> {
|
fn prepare_sort(req: &mut Collection, kind: &SortKind) -> Result<()> {
|
||||||
use SortKind::*;
|
use SortKind::*;
|
||||||
match kind {
|
match kind {
|
||||||
CardDeck | NoteType => {
|
CardDeck | NoteType => {
|
||||||
@ -139,14 +139,14 @@ fn prepare_sort(req: &mut RequestContext, kind: &SortKind) -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_sort_order_table(req: &mut RequestContext) -> Result<()> {
|
fn prepare_sort_order_table(req: &mut Collection) -> Result<()> {
|
||||||
req.storage
|
req.storage
|
||||||
.db
|
.db
|
||||||
.execute_batch(include_str!("sort_order.sql"))?;
|
.execute_batch(include_str!("sort_order.sql"))?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepare_sort_order_table2(req: &mut RequestContext) -> Result<()> {
|
fn prepare_sort_order_table2(req: &mut Collection) -> Result<()> {
|
||||||
req.storage
|
req.storage
|
||||||
.db
|
.db
|
||||||
.execute_batch(include_str!("sort_order2.sql"))?;
|
.execute_batch(include_str!("sort_order2.sql"))?;
|
||||||
|
@ -2,15 +2,12 @@
|
|||||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||||
|
|
||||||
use super::{parser::Node, sqlwriter::node_to_sql};
|
use super::{parser::Node, sqlwriter::node_to_sql};
|
||||||
use crate::collection::RequestContext;
|
use crate::collection::Collection;
|
||||||
use crate::err::Result;
|
use crate::err::Result;
|
||||||
use crate::notes::NoteID;
|
use crate::notes::NoteID;
|
||||||
use crate::search::parser::parse;
|
use crate::search::parser::parse;
|
||||||
|
|
||||||
pub(crate) fn search_notes<'a, 'b>(
|
pub(crate) fn search_notes<'a>(req: &'a mut Collection, search: &'a str) -> Result<Vec<NoteID>> {
|
||||||
req: &'a mut RequestContext<'b>,
|
|
||||||
search: &'a str,
|
|
||||||
) -> Result<Vec<NoteID>> {
|
|
||||||
let top_node = Node::Group(parse(search)?);
|
let top_node = Node::Group(parse(search)?);
|
||||||
let (sql, args) = node_to_sql(req, &top_node)?;
|
let (sql, args) = node_to_sql(req, &top_node)?;
|
||||||
|
|
||||||
|
@ -10,26 +10,26 @@ use crate::notes::field_checksum;
|
|||||||
use crate::notetypes::NoteTypeID;
|
use crate::notetypes::NoteTypeID;
|
||||||
use crate::text::matches_wildcard;
|
use crate::text::matches_wildcard;
|
||||||
use crate::text::without_combining;
|
use crate::text::without_combining;
|
||||||
use crate::{collection::RequestContext, text::strip_html_preserving_image_filenames};
|
use crate::{collection::Collection, text::strip_html_preserving_image_filenames};
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
struct SqlWriter<'a, 'b> {
|
struct SqlWriter<'a> {
|
||||||
req: &'a mut RequestContext<'b>,
|
col: &'a mut Collection,
|
||||||
sql: String,
|
sql: String,
|
||||||
args: Vec<String>,
|
args: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn node_to_sql(req: &mut RequestContext, node: &Node) -> Result<(String, Vec<String>)> {
|
pub(super) fn node_to_sql(req: &mut Collection, node: &Node) -> Result<(String, Vec<String>)> {
|
||||||
let mut sctx = SqlWriter::new(req);
|
let mut sctx = SqlWriter::new(req);
|
||||||
sctx.write_node_to_sql(&node)?;
|
sctx.write_node_to_sql(&node)?;
|
||||||
Ok((sctx.sql, sctx.args))
|
Ok((sctx.sql, sctx.args))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SqlWriter<'_, '_> {
|
impl SqlWriter<'_> {
|
||||||
fn new<'a, 'b>(req: &'a mut RequestContext<'b>) -> SqlWriter<'a, 'b> {
|
fn new(col: &mut Collection) -> SqlWriter<'_> {
|
||||||
let sql = String::new();
|
let sql = String::new();
|
||||||
let args = vec![];
|
let args = vec![];
|
||||||
SqlWriter { req, sql, args }
|
SqlWriter { col, sql, args }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_node_to_sql(&mut self, node: &Node) -> Result<()> {
|
fn write_node_to_sql(&mut self, node: &Node) -> Result<()> {
|
||||||
@ -129,7 +129,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_rated(&mut self, days: u32, ease: Option<u8>) -> Result<()> {
|
fn write_rated(&mut self, days: u32, ease: Option<u8>) -> Result<()> {
|
||||||
let today_cutoff = self.req.storage.timing_today()?.next_day_at;
|
let today_cutoff = self.col.storage.timing_today()?.next_day_at;
|
||||||
let days = days.min(365) as i64;
|
let days = days.min(365) as i64;
|
||||||
let target_cutoff_ms = (today_cutoff - 86_400 * days) * 1_000;
|
let target_cutoff_ms = (today_cutoff - 86_400 * days) * 1_000;
|
||||||
write!(
|
write!(
|
||||||
@ -148,7 +148,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_prop(&mut self, op: &str, kind: &PropertyKind) -> Result<()> {
|
fn write_prop(&mut self, op: &str, kind: &PropertyKind) -> Result<()> {
|
||||||
let timing = self.req.storage.timing_today()?;
|
let timing = self.col.storage.timing_today()?;
|
||||||
match kind {
|
match kind {
|
||||||
PropertyKind::Due(days) => {
|
PropertyKind::Due(days) => {
|
||||||
let day = days + (timing.days_elapsed as i32);
|
let day = days + (timing.days_elapsed as i32);
|
||||||
@ -173,7 +173,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_state(&mut self, state: &StateKind) -> Result<()> {
|
fn write_state(&mut self, state: &StateKind) -> Result<()> {
|
||||||
let timing = self.req.storage.timing_today()?;
|
let timing = self.col.storage.timing_today()?;
|
||||||
match state {
|
match state {
|
||||||
StateKind::New => write!(self.sql, "c.type = {}", CardQueue::New as i8),
|
StateKind::New => write!(self.sql, "c.type = {}", CardQueue::New as i8),
|
||||||
StateKind::Review => write!(self.sql, "c.type = {}", CardQueue::Review as i8),
|
StateKind::Review => write!(self.sql, "c.type = {}", CardQueue::Review as i8),
|
||||||
@ -212,14 +212,14 @@ impl SqlWriter<'_, '_> {
|
|||||||
"filtered" => write!(self.sql, "c.odid > 0").unwrap(),
|
"filtered" => write!(self.sql, "c.odid > 0").unwrap(),
|
||||||
deck => {
|
deck => {
|
||||||
let all_decks: Vec<_> = self
|
let all_decks: Vec<_> = self
|
||||||
.req
|
.col
|
||||||
.storage
|
.storage
|
||||||
.all_decks()?
|
.all_decks()?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(_, v)| v)
|
.map(|(_, v)| v)
|
||||||
.collect();
|
.collect();
|
||||||
let dids_with_children = if deck == "current" {
|
let dids_with_children = if deck == "current" {
|
||||||
let config = self.req.storage.all_config()?;
|
let config = self.col.storage.all_config()?;
|
||||||
let mut dids_with_children = vec![config.current_deck_id];
|
let mut dids_with_children = vec![config.current_deck_id];
|
||||||
let current = get_deck(&all_decks, config.current_deck_id)
|
let current = get_deck(&all_decks, config.current_deck_id)
|
||||||
.ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?;
|
.ok_or_else(|| AnkiError::invalid_input("invalid current deck"))?;
|
||||||
@ -251,7 +251,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
write!(self.sql, "c.ord = {}", n).unwrap();
|
write!(self.sql, "c.ord = {}", n).unwrap();
|
||||||
}
|
}
|
||||||
TemplateKind::Name(name) => {
|
TemplateKind::Name(name) => {
|
||||||
let note_types = self.req.storage.all_note_types()?;
|
let note_types = self.col.storage.all_note_types()?;
|
||||||
let mut id_ords = vec![];
|
let mut id_ords = vec![];
|
||||||
for nt in note_types.values() {
|
for nt in note_types.values() {
|
||||||
for tmpl in &nt.templates {
|
for tmpl in &nt.templates {
|
||||||
@ -280,7 +280,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
|
|
||||||
fn write_note_type(&mut self, nt_name: &str) -> Result<()> {
|
fn write_note_type(&mut self, nt_name: &str) -> Result<()> {
|
||||||
let mut ntids: Vec<_> = self
|
let mut ntids: Vec<_> = self
|
||||||
.req
|
.col
|
||||||
.storage
|
.storage
|
||||||
.all_note_types()?
|
.all_note_types()?
|
||||||
.values()
|
.values()
|
||||||
@ -295,7 +295,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_single_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
|
fn write_single_field(&mut self, field_name: &str, val: &str, is_re: bool) -> Result<()> {
|
||||||
let note_types = self.req.storage.all_note_types()?;
|
let note_types = self.col.storage.all_note_types()?;
|
||||||
|
|
||||||
let mut field_map = vec![];
|
let mut field_map = vec![];
|
||||||
for nt in note_types.values() {
|
for nt in note_types.values() {
|
||||||
@ -354,7 +354,7 @@ impl SqlWriter<'_, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn write_added(&mut self, days: u32) -> Result<()> {
|
fn write_added(&mut self, days: u32) -> Result<()> {
|
||||||
let timing = self.req.storage.timing_today()?;
|
let timing = self.col.storage.timing_today()?;
|
||||||
let cutoff = (timing.next_day_at - (86_400 * (days as i64))) * 1_000;
|
let cutoff = (timing.next_day_at - (86_400 * (days as i64))) * 1_000;
|
||||||
write!(self.sql, "c.id > {}", cutoff).unwrap();
|
write!(self.sql, "c.id > {}", cutoff).unwrap();
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -384,7 +384,11 @@ where
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::ids_to_string;
|
use super::ids_to_string;
|
||||||
use crate::{collection::open_collection, i18n::I18n, log};
|
use crate::{
|
||||||
|
collection::{open_collection, Collection},
|
||||||
|
i18n::I18n,
|
||||||
|
log,
|
||||||
|
};
|
||||||
use std::{fs, path::PathBuf};
|
use std::{fs, path::PathBuf};
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
|
|
||||||
@ -409,7 +413,7 @@ mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
// shortcut
|
// shortcut
|
||||||
fn s(req: &mut RequestContext, search: &str) -> (String, Vec<String>) {
|
fn s(req: &mut Collection, search: &str) -> (String, Vec<String>) {
|
||||||
let node = Node::Group(parse(search).unwrap());
|
let node = Node::Group(parse(search).unwrap());
|
||||||
node_to_sql(req, &node).unwrap()
|
node_to_sql(req, &node).unwrap()
|
||||||
}
|
}
|
||||||
@ -423,7 +427,7 @@ mod test {
|
|||||||
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
fs::write(&col_path, MEDIACHECK_ANKI2).unwrap();
|
||||||
|
|
||||||
let i18n = I18n::new(&[""], "", log::terminal());
|
let i18n = I18n::new(&[""], "", log::terminal());
|
||||||
let col = open_collection(
|
let mut col = open_collection(
|
||||||
&col_path,
|
&col_path,
|
||||||
&PathBuf::new(),
|
&PathBuf::new(),
|
||||||
&PathBuf::new(),
|
&PathBuf::new(),
|
||||||
@ -433,149 +437,144 @@ mod test {
|
|||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
col.with_ctx(|ctx| {
|
let ctx = &mut col;
|
||||||
// unqualified search
|
// unqualified search
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, "test"),
|
s(ctx, "test"),
|
||||||
(
|
(
|
||||||
"((n.sfld like ?1 escape '\\' or n.flds like ?1 escape '\\'))".into(),
|
"((n.sfld like ?1 escape '\\' or n.flds like ?1 escape '\\'))".into(),
|
||||||
vec!["%test%".into()]
|
vec!["%test%".into()]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(s(ctx, "te%st").1, vec!["%te%st%".to_string()]);
|
||||||
|
// user should be able to escape sql wildcards
|
||||||
|
assert_eq!(s(ctx, r#"te\%s\_t"#).1, vec!["%te\\%s\\_t%".to_string()]);
|
||||||
|
|
||||||
|
// qualified search
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, "front:te*st"),
|
||||||
|
(
|
||||||
|
concat!(
|
||||||
|
"(((n.mid = 1581236385344 and field_at_index(n.flds, 0) like ?1) or ",
|
||||||
|
"(n.mid = 1581236385345 and field_at_index(n.flds, 0) like ?1) or ",
|
||||||
|
"(n.mid = 1581236385346 and field_at_index(n.flds, 0) like ?1) or ",
|
||||||
|
"(n.mid = 1581236385347 and field_at_index(n.flds, 0) like ?1)))"
|
||||||
)
|
)
|
||||||
);
|
.into(),
|
||||||
assert_eq!(s(ctx, "te%st").1, vec!["%te%st%".to_string()]);
|
vec!["te%st".into()]
|
||||||
// user should be able to escape sql wildcards
|
)
|
||||||
assert_eq!(s(ctx, r#"te\%s\_t"#).1, vec!["%te\\%s\\_t%".to_string()]);
|
);
|
||||||
|
|
||||||
// qualified search
|
// added
|
||||||
assert_eq!(
|
let timing = ctx.storage.timing_today().unwrap();
|
||||||
s(ctx, "front:te*st"),
|
assert_eq!(
|
||||||
(
|
s(ctx, "added:3").0,
|
||||||
concat!(
|
format!("(c.id > {})", (timing.next_day_at - (86_400 * 3)) * 1_000)
|
||||||
"(((n.mid = 1581236385344 and field_at_index(n.flds, 0) like ?1) or ",
|
);
|
||||||
"(n.mid = 1581236385345 and field_at_index(n.flds, 0) like ?1) or ",
|
|
||||||
"(n.mid = 1581236385346 and field_at_index(n.flds, 0) like ?1) or ",
|
// deck
|
||||||
"(n.mid = 1581236385347 and field_at_index(n.flds, 0) like ?1)))"
|
assert_eq!(s(ctx, "deck:default"), ("(c.did in (1))".into(), vec![],));
|
||||||
)
|
assert_eq!(s(ctx, "deck:current"), ("(c.did in (1))".into(), vec![],));
|
||||||
.into(),
|
assert_eq!(s(ctx, "deck:missing"), ("(c.did in ())".into(), vec![],));
|
||||||
vec!["te%st".into()]
|
assert_eq!(s(ctx, "deck:d*"), ("(c.did in (1))".into(), vec![],));
|
||||||
|
assert_eq!(s(ctx, "deck:filtered"), ("(c.odid > 0)".into(), vec![],));
|
||||||
|
|
||||||
|
// card
|
||||||
|
assert_eq!(s(ctx, "card:front"), ("(false)".into(), vec![],));
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, r#""card:card 1""#),
|
||||||
|
(
|
||||||
|
concat!(
|
||||||
|
"(((n.mid = 1581236385344 and c.ord = 0) or ",
|
||||||
|
"(n.mid = 1581236385345 and c.ord = 0) or ",
|
||||||
|
"(n.mid = 1581236385346 and c.ord = 0) or ",
|
||||||
|
"(n.mid = 1581236385347 and c.ord = 0)))"
|
||||||
)
|
)
|
||||||
);
|
.into(),
|
||||||
|
vec![],
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
// added
|
// IDs
|
||||||
let timing = ctx.storage.timing_today().unwrap();
|
assert_eq!(s(ctx, "mid:3"), ("(n.mid = 3)".into(), vec![]));
|
||||||
assert_eq!(
|
assert_eq!(s(ctx, "nid:3"), ("(n.id in (3))".into(), vec![]));
|
||||||
s(ctx, "added:3").0,
|
assert_eq!(s(ctx, "nid:3,4"), ("(n.id in (3,4))".into(), vec![]));
|
||||||
format!("(c.id > {})", (timing.next_day_at - (86_400 * 3)) * 1_000)
|
assert_eq!(s(ctx, "cid:3,4"), ("(c.id in (3,4))".into(), vec![]));
|
||||||
);
|
|
||||||
|
|
||||||
// deck
|
// flags
|
||||||
assert_eq!(s(ctx, "deck:default"), ("(c.did in (1))".into(), vec![],));
|
assert_eq!(s(ctx, "flag:2"), ("((c.flags & 7) == 2)".into(), vec![]));
|
||||||
assert_eq!(s(ctx, "deck:current"), ("(c.did in (1))".into(), vec![],));
|
assert_eq!(s(ctx, "flag:0"), ("((c.flags & 7) == 0)".into(), vec![]));
|
||||||
assert_eq!(s(ctx, "deck:missing"), ("(c.did in ())".into(), vec![],));
|
|
||||||
assert_eq!(s(ctx, "deck:d*"), ("(c.did in (1))".into(), vec![],));
|
|
||||||
assert_eq!(s(ctx, "deck:filtered"), ("(c.odid > 0)".into(), vec![],));
|
|
||||||
|
|
||||||
// card
|
// dupes
|
||||||
assert_eq!(s(ctx, "card:front"), ("(false)".into(), vec![],));
|
assert_eq!(
|
||||||
assert_eq!(
|
s(ctx, "dupes:123,test"),
|
||||||
s(ctx, r#""card:card 1""#),
|
(
|
||||||
(
|
"((n.mid = 123 and n.csum = 2840236005 and field_at_index(n.flds, 0) = ?)".into(),
|
||||||
concat!(
|
vec!["test".into()]
|
||||||
"(((n.mid = 1581236385344 and c.ord = 0) or ",
|
)
|
||||||
"(n.mid = 1581236385345 and c.ord = 0) or ",
|
);
|
||||||
"(n.mid = 1581236385346 and c.ord = 0) or ",
|
|
||||||
"(n.mid = 1581236385347 and c.ord = 0)))"
|
|
||||||
)
|
|
||||||
.into(),
|
|
||||||
vec![],
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
// IDs
|
// tags
|
||||||
assert_eq!(s(ctx, "mid:3"), ("(n.mid = 3)".into(), vec![]));
|
assert_eq!(
|
||||||
assert_eq!(s(ctx, "nid:3"), ("(n.id in (3))".into(), vec![]));
|
s(ctx, "tag:one"),
|
||||||
assert_eq!(s(ctx, "nid:3,4"), ("(n.id in (3,4))".into(), vec![]));
|
("(n.tags like ? escape '\\')".into(), vec!["% one %".into()])
|
||||||
assert_eq!(s(ctx, "cid:3,4"), ("(c.id in (3,4))".into(), vec![]));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, "tag:o*e"),
|
||||||
|
("(n.tags like ? escape '\\')".into(), vec!["% o%e %".into()])
|
||||||
|
);
|
||||||
|
assert_eq!(s(ctx, "tag:none"), ("(n.tags = '')".into(), vec![]));
|
||||||
|
assert_eq!(s(ctx, "tag:*"), ("(true)".into(), vec![]));
|
||||||
|
|
||||||
// flags
|
// state
|
||||||
assert_eq!(s(ctx, "flag:2"), ("((c.flags & 7) == 2)".into(), vec![]));
|
assert_eq!(
|
||||||
assert_eq!(s(ctx, "flag:0"), ("((c.flags & 7) == 0)".into(), vec![]));
|
s(ctx, "is:suspended").0,
|
||||||
|
format!("(c.queue = {})", CardQueue::Suspended as i8)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
s(ctx, "is:new").0,
|
||||||
|
format!("(c.type = {})", CardQueue::New as i8)
|
||||||
|
);
|
||||||
|
|
||||||
// dupes
|
// rated
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, "dupes:123,test"),
|
s(ctx, "rated:2").0,
|
||||||
(
|
format!(
|
||||||
"((n.mid = 123 and n.csum = 2840236005 and field_at_index(n.flds, 0) = ?)"
|
"(c.id in (select cid from revlog where id>{}))",
|
||||||
.into(),
|
(timing.next_day_at - (86_400 * 2)) * 1_000
|
||||||
vec!["test".into()]
|
)
|
||||||
)
|
);
|
||||||
);
|
assert_eq!(
|
||||||
|
s(ctx, "rated:400:1").0,
|
||||||
|
format!(
|
||||||
|
"(c.id in (select cid from revlog where id>{} and ease=1))",
|
||||||
|
(timing.next_day_at - (86_400 * 365)) * 1_000
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
// tags
|
// props
|
||||||
assert_eq!(
|
assert_eq!(s(ctx, "prop:lapses=3").0, "(lapses = 3)".to_string());
|
||||||
s(ctx, "tag:one"),
|
assert_eq!(s(ctx, "prop:ease>=2.5").0, "(factor >= 2500)".to_string());
|
||||||
("(n.tags like ? escape '\\')".into(), vec!["% one %".into()])
|
assert_eq!(
|
||||||
);
|
s(ctx, "prop:due!=-1").0,
|
||||||
assert_eq!(
|
format!(
|
||||||
s(ctx, "tag:o*e"),
|
"((c.queue in (2,3) and due != {}))",
|
||||||
("(n.tags like ? escape '\\')".into(), vec!["% o%e %".into()])
|
timing.days_elapsed - 1
|
||||||
);
|
)
|
||||||
assert_eq!(s(ctx, "tag:none"), ("(n.tags = '')".into(), vec![]));
|
);
|
||||||
assert_eq!(s(ctx, "tag:*"), ("(true)".into(), vec![]));
|
|
||||||
|
|
||||||
// state
|
// note types by name
|
||||||
assert_eq!(
|
assert_eq!(&s(ctx, "note:basic").0, "(n.mid in (1581236385347))");
|
||||||
s(ctx, "is:suspended").0,
|
assert_eq!(
|
||||||
format!("(c.queue = {})", CardQueue::Suspended as i8)
|
&s(ctx, "note:basic*").0,
|
||||||
);
|
"(n.mid in (1581236385345,1581236385346,1581236385347,1581236385344))"
|
||||||
assert_eq!(
|
);
|
||||||
s(ctx, "is:new").0,
|
|
||||||
format!("(c.type = {})", CardQueue::New as i8)
|
|
||||||
);
|
|
||||||
|
|
||||||
// rated
|
// regex
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s(ctx, "rated:2").0,
|
s(ctx, r"re:\bone"),
|
||||||
format!(
|
("(n.flds regexp ?)".into(), vec![r"(?i)\bone".into()])
|
||||||
"(c.id in (select cid from revlog where id>{}))",
|
);
|
||||||
(timing.next_day_at - (86_400 * 2)) * 1_000
|
|
||||||
)
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
s(ctx, "rated:400:1").0,
|
|
||||||
format!(
|
|
||||||
"(c.id in (select cid from revlog where id>{} and ease=1))",
|
|
||||||
(timing.next_day_at - (86_400 * 365)) * 1_000
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
// props
|
|
||||||
assert_eq!(s(ctx, "prop:lapses=3").0, "(lapses = 3)".to_string());
|
|
||||||
assert_eq!(s(ctx, "prop:ease>=2.5").0, "(factor >= 2500)".to_string());
|
|
||||||
assert_eq!(
|
|
||||||
s(ctx, "prop:due!=-1").0,
|
|
||||||
format!(
|
|
||||||
"((c.queue in (2,3) and due != {}))",
|
|
||||||
timing.days_elapsed - 1
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
// note types by name
|
|
||||||
assert_eq!(&s(ctx, "note:basic").0, "(n.mid in (1581236385347))");
|
|
||||||
assert_eq!(
|
|
||||||
&s(ctx, "note:basic*").0,
|
|
||||||
"(n.mid in (1581236385345,1581236385346,1581236385347,1581236385344))"
|
|
||||||
);
|
|
||||||
|
|
||||||
// regex
|
|
||||||
assert_eq!(
|
|
||||||
s(ctx, r"re:\bone"),
|
|
||||||
("(n.flds regexp ?)".into(), vec![r"(?i)\bone".into()])
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -31,8 +31,8 @@ impl FromSql for CardQueue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl super::StorageContext<'_> {
|
impl super::SqliteStorage {
|
||||||
pub fn get_card(&mut self, cid: CardID) -> Result<Option<Card>> {
|
pub fn get_card(&self, cid: CardID) -> Result<Option<Card>> {
|
||||||
let mut stmt = self.db.prepare_cached(include_str!("get_card.sql"))?;
|
let mut stmt = self.db.prepare_cached(include_str!("get_card.sql"))?;
|
||||||
stmt.query_row(params![cid], |row| {
|
stmt.query_row(params![cid], |row| {
|
||||||
Ok(Card {
|
Ok(Card {
|
||||||
@ -60,7 +60,7 @@ impl super::StorageContext<'_> {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_card(&mut self, card: &Card) -> Result<()> {
|
pub(crate) fn update_card(&self, card: &Card) -> Result<()> {
|
||||||
let mut stmt = self.db.prepare_cached(include_str!("update_card.sql"))?;
|
let mut stmt = self.db.prepare_cached(include_str!("update_card.sql"))?;
|
||||||
stmt.execute(params![
|
stmt.execute(params![
|
||||||
card.nid,
|
card.nid,
|
||||||
@ -85,7 +85,7 @@ impl super::StorageContext<'_> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_card(&mut self, card: &mut Card) -> Result<()> {
|
pub(crate) fn add_card(&self, card: &mut Card) -> Result<()> {
|
||||||
let now = TimestampMillis::now().0;
|
let now = TimestampMillis::now().0;
|
||||||
let mut stmt = self.db.prepare_cached(include_str!("add_card.sql"))?;
|
let mut stmt = self.db.prepare_cached(include_str!("add_card.sql"))?;
|
||||||
stmt.execute(params![
|
stmt.execute(params![
|
||||||
@ -120,12 +120,11 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_card() {
|
fn add_card() {
|
||||||
let storage = SqliteStorage::open_or_create(Path::new(":memory:")).unwrap();
|
let storage = SqliteStorage::open_or_create(Path::new(":memory:"), false).unwrap();
|
||||||
let mut ctx = storage.context(false);
|
|
||||||
let mut card = Card::default();
|
let mut card = Card::default();
|
||||||
ctx.add_card(&mut card).unwrap();
|
storage.add_card(&mut card).unwrap();
|
||||||
let id1 = card.id;
|
let id1 = card.id;
|
||||||
ctx.add_card(&mut card).unwrap();
|
storage.add_card(&mut card).unwrap();
|
||||||
assert_ne!(id1, card.id);
|
assert_ne!(id1, card.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
mod card;
|
mod card;
|
||||||
mod sqlite;
|
mod sqlite;
|
||||||
|
|
||||||
pub(crate) use sqlite::{SqliteStorage, StorageContext};
|
pub(crate) use sqlite::SqliteStorage;
|
||||||
|
@ -38,6 +38,9 @@ pub struct SqliteStorage {
|
|||||||
// currently crate-visible for dbproxy
|
// currently crate-visible for dbproxy
|
||||||
pub(crate) db: Connection,
|
pub(crate) db: Connection,
|
||||||
|
|
||||||
|
server: bool,
|
||||||
|
usn: Option<Usn>,
|
||||||
|
|
||||||
// fixme: stored in wrong location?
|
// fixme: stored in wrong location?
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
}
|
}
|
||||||
@ -163,7 +166,7 @@ fn trace(s: &str) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SqliteStorage {
|
impl SqliteStorage {
|
||||||
pub(crate) fn open_or_create(path: &Path) -> Result<Self> {
|
pub(crate) fn open_or_create(path: &Path, server: bool) -> Result<Self> {
|
||||||
let db = open_or_create_collection_db(path)?;
|
let db = open_or_create_collection_db(path)?;
|
||||||
|
|
||||||
let (create, ver) = schema_version(&db)?;
|
let (create, ver) = schema_version(&db)?;
|
||||||
@ -193,34 +196,13 @@ impl SqliteStorage {
|
|||||||
let storage = Self {
|
let storage = Self {
|
||||||
db,
|
db,
|
||||||
path: path.to_owned(),
|
path: path.to_owned(),
|
||||||
|
server,
|
||||||
|
usn: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(storage)
|
Ok(storage)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn context(&self, server: bool) -> StorageContext {
|
|
||||||
StorageContext::new(&self.db, server)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct StorageContext<'a> {
|
|
||||||
pub(crate) db: &'a Connection,
|
|
||||||
server: bool,
|
|
||||||
usn: Option<Usn>,
|
|
||||||
|
|
||||||
timing_today: Option<SchedTimingToday>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StorageContext<'_> {
|
|
||||||
fn new(db: &Connection, server: bool) -> StorageContext {
|
|
||||||
StorageContext {
|
|
||||||
db,
|
|
||||||
server,
|
|
||||||
usn: None,
|
|
||||||
timing_today: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Standard transaction start/stop
|
// Standard transaction start/stop
|
||||||
//////////////////////////////////////
|
//////////////////////////////////////
|
||||||
|
|
||||||
@ -276,8 +258,6 @@ impl StorageContext<'_> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////
|
|
||||||
|
|
||||||
pub(crate) fn mark_modified(&self) -> Result<()> {
|
pub(crate) fn mark_modified(&self) -> Result<()> {
|
||||||
self.db
|
self.db
|
||||||
.prepare_cached("update col set mod=?")?
|
.prepare_cached("update col set mod=?")?
|
||||||
@ -330,24 +310,20 @@ impl StorageContext<'_> {
|
|||||||
Ok(note_types)
|
Ok(note_types)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
pub(crate) fn timing_today(&self) -> Result<SchedTimingToday> {
|
||||||
pub(crate) fn timing_today(&mut self) -> Result<SchedTimingToday> {
|
let crt: i64 = self
|
||||||
if self.timing_today.is_none() {
|
.db
|
||||||
let crt: i64 = self
|
.prepare_cached("select crt from col")?
|
||||||
.db
|
.query_row(NO_PARAMS, |row| row.get(0))?;
|
||||||
.prepare_cached("select crt from col")?
|
let conf = self.all_config()?;
|
||||||
.query_row(NO_PARAMS, |row| row.get(0))?;
|
let now_offset = if self.server { conf.local_offset } else { None };
|
||||||
let conf = self.all_config()?;
|
|
||||||
let now_offset = if self.server { conf.local_offset } else { None };
|
|
||||||
|
|
||||||
self.timing_today = Some(sched_timing_today(
|
Ok(sched_timing_today(
|
||||||
crt,
|
crt,
|
||||||
TimestampSecs::now().0,
|
TimestampSecs::now().0,
|
||||||
conf.creation_offset,
|
conf.creation_offset,
|
||||||
now_offset,
|
now_offset,
|
||||||
conf.rollover,
|
conf.rollover,
|
||||||
));
|
))
|
||||||
}
|
|
||||||
Ok(*self.timing_today.as_ref().unwrap())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user