Minor Rust cleanups (#2272)

* Run cargo +nightly fmt

* Latest prost-build includes clippy workaround

* Tweak Rust protobuf imports

- Avoid use of stringify!(), as JetBrains editors get confused by it
- Stop merging all protobuf symbols into a single namespace

* Remove some unnecessary qualifications

Found via IntelliJ lint

* Migrate some asserts to assert_eq/ne

* Remove mention of node_modules exclusion

This no longer seems to be necessary after migrating away from Bazel,
and excluding it means TS/Svelte files can't be edited properly.
This commit is contained in:
Damien Elmes 2022-12-16 11:40:27 +00:00 committed by GitHub
parent 22ecef6fb2
commit fa625d7ad8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
101 changed files with 1062 additions and 737 deletions

4
Cargo.lock generated
View File

@ -2583,9 +2583,9 @@ dependencies = [
[[package]]
name = "prost-build"
version = "0.11.3"
version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e330bf1316db56b12c2bcfa399e8edddd4821965ea25ddb2c134b610b1c1c604"
checksum = "276470f7f281b0ed53d2ae42dd52b4a8d08853a3c70e7fe95882acbb98a6ae94"
dependencies = [
"bytes",
"heck",

View File

@ -160,7 +160,7 @@ impl BuildAction for GenPythonProto {
-Iproto $in"
}
fn files(&mut self, build: &mut impl ninja_gen::build::FilesHandle) {
fn files(&mut self, build: &mut impl FilesHandle) {
let proto_inputs = build.expand_inputs(&self.proto_files);
let python_outputs: Vec<_> = proto_inputs
.iter()

View File

@ -94,7 +94,7 @@ fn copy_windows_extras(folder_root: &Utf8Path) {
fn clean_top_level_files(folder_root: &Utf8Path) {
let mut to_remove = vec![];
for entry in std::fs::read_dir(folder_root).unwrap() {
for entry in fs::read_dir(folder_root).unwrap() {
let entry = entry.unwrap();
if entry.file_name() == "lib" {
continue;
@ -104,9 +104,9 @@ fn clean_top_level_files(folder_root: &Utf8Path) {
}
for path in to_remove {
if path.is_dir() {
std::fs::remove_dir_all(path).unwrap()
fs::remove_dir_all(path).unwrap()
} else {
std::fs::remove_file(path).unwrap()
fs::remove_file(path).unwrap()
}
}
}

View File

@ -49,12 +49,6 @@ see and install a number of recommended extensions.
If you decide to use PyCharm instead of VS Code, there are somethings to be
aware of.
### Slowdowns
Excluding the node_modules folder inside the editor may improve performance:
https://intellij-support.jetbrains.com/hc/en-us/community/posts/115000721750-Excluding-directories-globally
### Pylib References
You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a

View File

@ -90,7 +90,7 @@ enum Commands {
BuildDmgs(BuildDmgsArgs),
}
fn main() -> anyhow::Result<()> {
fn main() -> Result<()> {
match Cli::parse().command {
Commands::BuildApp {
version,
@ -215,7 +215,7 @@ fn fix_rpath(exe_path: Utf8PathBuf) -> Result<()> {
fn get_plist(anki_version: &str) -> plist::Dictionary {
let reader = std::io::Cursor::new(include_bytes!("Info.plist"));
let mut plist = plist::Value::from_reader(reader)
let mut plist = Value::from_reader(reader)
.unwrap()
.into_dictionary()
.unwrap();

View File

@ -17,7 +17,7 @@ struct Args {
qt5_setup_path: Utf8PathBuf,
}
fn main() -> anyhow::Result<()> {
fn main() -> Result<()> {
let args = Args::parse();
let src_win_folder = Utf8Path::new("qt/bundle/win");

View File

@ -27,7 +27,7 @@ required-features = ["bench"]
# After updating anything below, run ../cargo/update.py
[build-dependencies]
prost-build = "0.11.3"
prost-build = "0.11.4"
which = "4.3.0"
[dev-dependencies]

View File

@ -3,10 +3,11 @@
//! Check the .ftl files at build time to ensure we don't get runtime load failures.
use super::gather::TranslationsByLang;
use fluent::{FluentBundle, FluentResource};
use unic_langid::LanguageIdentifier;
use super::gather::TranslationsByLang;
pub fn check(lang_map: &TranslationsByLang) {
for (lang, files_map) in lang_map {
for (fname, content) in files_map {

View File

@ -362,24 +362,21 @@ fn want_comma_as_decimal_separator(langs: &[LanguageIdentifier]) -> bool {
}
fn format_decimal_with_comma(
val: &fluent::FluentValue,
val: &FluentValue,
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Option<String> {
format_number_values(val, Some(","))
}
fn format_decimal_with_period(
val: &fluent::FluentValue,
val: &FluentValue,
_intl: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Option<String> {
format_number_values(val, None)
}
#[inline]
fn format_number_values(
val: &fluent::FluentValue,
alt_separator: Option<&'static str>,
) -> Option<String> {
fn format_number_values(val: &FluentValue, alt_separator: Option<&'static str>) -> Option<String> {
match val {
FluentValue::Number(num) => {
// create a string with desired maximum digits

View File

@ -3,9 +3,10 @@
// copied from https://github.com/projectfluent/fluent-rs/pull/241
use fluent_syntax::{ast::*, parser::Slice};
use std::fmt::{self, Error, Write};
use fluent_syntax::{ast::*, parser::Slice};
pub fn serialize<'s, S: Slice<'s>>(resource: &Resource<S>) -> String {
serialize_with_options(resource, Options::default())
}

View File

@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::{adding::DeckAndNotetype, pb::DeckAndNotetype as DeckAndNotetypeProto};
use crate::{adding::DeckAndNotetype, pb::notes::DeckAndNotetype as DeckAndNotetypeProto};
impl From<DeckAndNotetype> for DeckAndNotetypeProto {
fn from(s: DeckAndNotetype) -> Self {

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::cards_service::Service as CardsService;
pub(super) use crate::pb::cards::cards_service::Service as CardsService;
use crate::{
card::{CardQueue, CardType},
pb,
@ -10,7 +10,7 @@ use crate::{
};
impl CardsService for Backend {
fn get_card(&self, input: pb::CardId) -> Result<pb::Card> {
fn get_card(&self, input: pb::cards::CardId) -> Result<pb::cards::Card> {
let cid = input.into();
self.with_col(|col| {
col.storage
@ -20,7 +20,10 @@ impl CardsService for Backend {
})
}
fn update_cards(&self, input: pb::UpdateCardsRequest) -> Result<pb::OpChanges> {
fn update_cards(
&self,
input: pb::cards::UpdateCardsRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let cards = input
.cards
@ -35,7 +38,7 @@ impl CardsService for Backend {
.map(Into::into)
}
fn remove_cards(&self, input: pb::RemoveCardsRequest) -> Result<pb::Empty> {
fn remove_cards(&self, input: pb::cards::RemoveCardsRequest) -> Result<pb::generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
col.remove_cards_and_orphaned_notes(
@ -50,13 +53,19 @@ impl CardsService for Backend {
})
}
fn set_deck(&self, input: pb::SetDeckRequest) -> Result<pb::OpChangesWithCount> {
fn set_deck(
&self,
input: pb::cards::SetDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let cids: Vec<_> = input.card_ids.into_iter().map(CardId).collect();
let deck_id = input.deck_id.into();
self.with_col(|col| col.set_deck(&cids, deck_id).map(Into::into))
}
fn set_flag(&self, input: pb::SetFlagRequest) -> Result<pb::OpChangesWithCount> {
fn set_flag(
&self,
input: pb::cards::SetFlagRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
col.set_card_flag(&to_card_ids(input.card_ids), input.flag)
.map(Into::into)
@ -64,10 +73,10 @@ impl CardsService for Backend {
}
}
impl TryFrom<pb::Card> for Card {
impl TryFrom<pb::cards::Card> for Card {
type Error = AnkiError;
fn try_from(c: pb::Card) -> Result<Self, Self::Error> {
fn try_from(c: pb::cards::Card) -> Result<Self, Self::Error> {
let ctype = CardType::try_from(c.ctype as u8).or_invalid("invalid card type")?;
let queue = CardQueue::try_from(c.queue as i8).or_invalid("invalid card queue")?;
Ok(Card {
@ -94,9 +103,9 @@ impl TryFrom<pb::Card> for Card {
}
}
impl From<Card> for pb::Card {
impl From<Card> for pb::cards::Card {
fn from(c: Card) -> Self {
pb::Card {
pb::cards::Card {
id: c.id.0,
note_id: c.note_id.0,
deck_id: c.deck_id.0,

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::cardrendering_service::Service as CardRenderingService;
pub(super) use crate::pb::card_rendering::cardrendering_service::Service as CardRenderingService;
use crate::{
card_rendering::{extract_av_tags, strip_av_tags},
latex::{extract_latex, extract_latex_expanding_clozes, ExtractedLatex},
@ -21,16 +21,19 @@ use crate::{
impl CardRenderingService for Backend {
fn extract_av_tags(
&self,
input: pb::ExtractAvTagsRequest,
) -> Result<pb::ExtractAvTagsResponse> {
input: pb::card_rendering::ExtractAvTagsRequest,
) -> Result<pb::card_rendering::ExtractAvTagsResponse> {
let out = extract_av_tags(input.text, input.question_side, self.i18n());
Ok(pb::ExtractAvTagsResponse {
Ok(pb::card_rendering::ExtractAvTagsResponse {
text: out.0,
av_tags: out.1,
})
}
fn extract_latex(&self, input: pb::ExtractLatexRequest) -> Result<pb::ExtractLatexResponse> {
fn extract_latex(
&self,
input: pb::card_rendering::ExtractLatexRequest,
) -> Result<pb::card_rendering::ExtractLatexResponse> {
let func = if input.expand_clozes {
extract_latex_expanding_clozes
} else {
@ -38,11 +41,11 @@ impl CardRenderingService for Backend {
};
let (text, extracted) = func(&input.text, input.svg);
Ok(pb::ExtractLatexResponse {
Ok(pb::card_rendering::ExtractLatexResponse {
text,
latex: extracted
.into_iter()
.map(|e: ExtractedLatex| pb::ExtractedLatex {
.map(|e: ExtractedLatex| pb::card_rendering::ExtractedLatex {
filename: e.fname,
latex_body: e.latex,
})
@ -50,7 +53,10 @@ impl CardRenderingService for Backend {
})
}
fn get_empty_cards(&self, _input: pb::Empty) -> Result<pb::EmptyCardsReport> {
fn get_empty_cards(
&self,
_input: pb::generic::Empty,
) -> Result<pb::card_rendering::EmptyCardsReport> {
self.with_col(|col| {
let mut empty = col.empty_cards()?;
let report = col.empty_cards_report(&mut empty)?;
@ -58,14 +64,14 @@ impl CardRenderingService for Backend {
let mut outnotes = vec![];
for (_ntid, notes) in empty {
outnotes.extend(notes.into_iter().map(|e| {
pb::empty_cards_report::NoteWithEmptyCards {
pb::card_rendering::empty_cards_report::NoteWithEmptyCards {
note_id: e.nid.0,
will_delete_note: e.empty.len() == e.current_count,
card_ids: e.empty.into_iter().map(|(_ord, id)| id.0).collect(),
}
}))
}
Ok(pb::EmptyCardsReport {
Ok(pb::card_rendering::EmptyCardsReport {
report,
notes: outnotes,
})
@ -74,8 +80,8 @@ impl CardRenderingService for Backend {
fn render_existing_card(
&self,
input: pb::RenderExistingCardRequest,
) -> Result<pb::RenderCardResponse> {
input: pb::card_rendering::RenderExistingCardRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
self.with_col(|col| {
col.render_existing_card(CardId(input.card_id), input.browser)
.map(Into::into)
@ -84,8 +90,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card(
&self,
input: pb::RenderUncommittedCardRequest,
) -> Result<pb::RenderCardResponse> {
input: pb::card_rendering::RenderUncommittedCardRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
let template = input.template.or_invalid("missing template")?.into();
let mut note = input.note.or_invalid("missing note")?.into();
let ord = input.card_ord as u16;
@ -98,8 +104,8 @@ impl CardRenderingService for Backend {
fn render_uncommitted_card_legacy(
&self,
input: pb::RenderUncommittedCardLegacyRequest,
) -> Result<pb::RenderCardResponse> {
input: pb::card_rendering::RenderUncommittedCardLegacyRequest,
) -> Result<pb::card_rendering::RenderCardResponse> {
let schema11: CardTemplateSchema11 = serde_json::from_slice(&input.template)?;
let template = schema11.into();
let mut note = input.note.or_invalid("missing note")?.into();
@ -111,11 +117,14 @@ impl CardRenderingService for Backend {
})
}
fn strip_av_tags(&self, input: pb::String) -> Result<pb::String> {
fn strip_av_tags(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(strip_av_tags(input.val).into())
}
fn render_markdown(&self, input: pb::RenderMarkdownRequest) -> Result<pb::String> {
fn render_markdown(
&self,
input: pb::card_rendering::RenderMarkdownRequest,
) -> Result<pb::generic::String> {
let mut text = render_markdown(&input.markdown);
if input.sanitize {
// currently no images
@ -124,18 +133,21 @@ impl CardRenderingService for Backend {
Ok(text.into())
}
fn encode_iri_paths(&self, input: pb::String) -> Result<pb::String> {
fn encode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(encode_iri_paths(&input.val).to_string().into())
}
fn decode_iri_paths(&self, input: pb::String) -> Result<pb::String> {
fn decode_iri_paths(&self, input: pb::generic::String) -> Result<pb::generic::String> {
Ok(decode_iri_paths(&input.val).to_string().into())
}
fn strip_html(&self, input: pb::StripHtmlRequest) -> Result<pb::String> {
fn strip_html(
&self,
input: pb::card_rendering::StripHtmlRequest,
) -> Result<pb::generic::String> {
Ok(match input.mode() {
pb::strip_html_request::Mode::Normal => strip_html(&input.text),
pb::strip_html_request::Mode::PreserveMediaFilenames => {
pb::card_rendering::strip_html_request::Mode::Normal => strip_html(&input.text),
pb::card_rendering::strip_html_request::Mode::PreserveMediaFilenames => {
strip_html_preserving_media_filenames(&input.text)
}
}
@ -143,38 +155,47 @@ impl CardRenderingService for Backend {
.into())
}
fn compare_answer(&self, input: pb::CompareAnswerRequest) -> Result<pb::String> {
fn compare_answer(
&self,
input: pb::card_rendering::CompareAnswerRequest,
) -> Result<pb::generic::String> {
Ok(compare_answer(&input.expected, &input.provided).into())
}
}
fn rendered_nodes_to_proto(nodes: Vec<RenderedNode>) -> Vec<pb::RenderedTemplateNode> {
fn rendered_nodes_to_proto(
nodes: Vec<RenderedNode>,
) -> Vec<pb::card_rendering::RenderedTemplateNode> {
nodes
.into_iter()
.map(|n| pb::RenderedTemplateNode {
.map(|n| pb::card_rendering::RenderedTemplateNode {
value: Some(rendered_node_to_proto(n)),
})
.collect()
}
fn rendered_node_to_proto(node: RenderedNode) -> pb::rendered_template_node::Value {
fn rendered_node_to_proto(node: RenderedNode) -> pb::card_rendering::rendered_template_node::Value {
match node {
RenderedNode::Text { text } => pb::rendered_template_node::Value::Text(text),
RenderedNode::Text { text } => {
pb::card_rendering::rendered_template_node::Value::Text(text)
}
RenderedNode::Replacement {
field_name,
current_text,
filters,
} => pb::rendered_template_node::Value::Replacement(pb::RenderedTemplateReplacement {
field_name,
current_text,
filters,
}),
} => pb::card_rendering::rendered_template_node::Value::Replacement(
pb::card_rendering::RenderedTemplateReplacement {
field_name,
current_text,
filters,
},
),
}
}
impl From<RenderCardOutput> for pb::RenderCardResponse {
impl From<RenderCardOutput> for pb::card_rendering::RenderCardResponse {
fn from(o: RenderCardOutput) -> Self {
pb::RenderCardResponse {
pb::card_rendering::RenderCardResponse {
question_nodes: rendered_nodes_to_proto(o.qnodes),
answer_nodes: rendered_nodes_to_proto(o.anodes),
css: o.css,

View File

@ -6,7 +6,7 @@ use std::sync::MutexGuard;
use slog::error;
use super::{progress::Progress, Backend};
pub(super) use crate::pb::collection_service::Service as CollectionService;
pub(super) use crate::pb::collection::collection_service::Service as CollectionService;
use crate::{
backend::progress::progress_to_proto,
collection::CollectionBuilder,
@ -16,17 +16,20 @@ use crate::{
};
impl CollectionService for Backend {
fn latest_progress(&self, _input: pb::Empty) -> Result<pb::Progress> {
fn latest_progress(&self, _input: pb::generic::Empty) -> Result<pb::collection::Progress> {
let progress = self.progress_state.lock().unwrap().last_progress;
Ok(progress_to_proto(progress, &self.tr))
}
fn set_wants_abort(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn set_wants_abort(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.progress_state.lock().unwrap().want_abort = true;
Ok(().into())
}
fn open_collection(&self, input: pb::OpenCollectionRequest) -> Result<pb::Empty> {
fn open_collection(
&self,
input: pb::collection::OpenCollectionRequest,
) -> Result<pb::generic::Empty> {
let mut guard = self.lock_closed_collection()?;
let mut builder = CollectionBuilder::new(input.collection_path);
@ -45,7 +48,10 @@ impl CollectionService for Backend {
Ok(().into())
}
fn close_collection(&self, input: pb::CloseCollectionRequest) -> Result<pb::Empty> {
fn close_collection(
&self,
input: pb::collection::CloseCollectionRequest,
) -> Result<pb::generic::Empty> {
let desired_version = if input.downgrade_to_schema11 {
Some(SchemaVersion::V11)
} else {
@ -63,42 +69,48 @@ impl CollectionService for Backend {
Ok(().into())
}
fn check_database(&self, _input: pb::Empty) -> Result<pb::CheckDatabaseResponse> {
fn check_database(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::CheckDatabaseResponse> {
let mut handler = self.new_progress_handler();
let progress_fn = move |progress, throttle| {
handler.update(Progress::DatabaseCheck(progress), throttle);
};
self.with_col(|col| {
col.check_database(progress_fn)
.map(|problems| pb::CheckDatabaseResponse {
.map(|problems| pb::collection::CheckDatabaseResponse {
problems: problems.to_i18n_strings(&col.tr),
})
})
}
fn get_undo_status(&self, _input: pb::Empty) -> Result<pb::UndoStatus> {
fn get_undo_status(&self, _input: pb::generic::Empty) -> Result<pb::collection::UndoStatus> {
self.with_col(|col| Ok(col.undo_status().into_protobuf(&col.tr)))
}
fn undo(&self, _input: pb::Empty) -> Result<pb::OpChangesAfterUndo> {
fn undo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
self.with_col(|col| col.undo().map(|out| out.into_protobuf(&col.tr)))
}
fn redo(&self, _input: pb::Empty) -> Result<pb::OpChangesAfterUndo> {
fn redo(&self, _input: pb::generic::Empty) -> Result<pb::collection::OpChangesAfterUndo> {
self.with_col(|col| col.redo().map(|out| out.into_protobuf(&col.tr)))
}
fn add_custom_undo_entry(&self, input: pb::String) -> Result<pb::UInt32> {
fn add_custom_undo_entry(&self, input: pb::generic::String) -> Result<pb::generic::UInt32> {
self.with_col(|col| Ok(col.add_custom_undo_step(input.val).into()))
}
fn merge_undo_entries(&self, input: pb::UInt32) -> Result<pb::OpChanges> {
fn merge_undo_entries(&self, input: pb::generic::UInt32) -> Result<pb::collection::OpChanges> {
let starting_from = input.val as usize;
self.with_col(|col| col.merge_undoable_ops(starting_from))
.map(Into::into)
}
fn create_backup(&self, input: pb::CreateBackupRequest) -> Result<pb::Bool> {
fn create_backup(
&self,
input: pb::collection::CreateBackupRequest,
) -> Result<pb::generic::Bool> {
// lock collection
let mut col_lock = self.lock_open_collection()?;
let col = col_lock.as_mut().unwrap();
@ -122,7 +134,7 @@ impl CollectionService for Backend {
Ok(created.into())
}
fn await_backup_completion(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn await_backup_completion(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.await_backup_completion()?;
Ok(().into())
}

View File

@ -4,11 +4,11 @@
use serde_json::Value;
use super::Backend;
pub(super) use crate::pb::config_service::Service as ConfigService;
pub(super) use crate::pb::config::config_service::Service as ConfigService;
use crate::{
config::{BoolKey, StringKey},
pb,
pb::config_key::{Bool as BoolKeyProto, String as StringKeyProto},
pb::config::config_key::{Bool as BoolKeyProto, String as StringKeyProto},
prelude::*,
};
@ -54,7 +54,7 @@ impl From<StringKeyProto> for StringKey {
}
impl ConfigService for Backend {
fn get_config_json(&self, input: pb::String) -> Result<pb::Json> {
fn get_config_json(&self, input: pb::generic::String) -> Result<pb::generic::Json> {
self.with_col(|col| {
let val: Option<Value> = col.get_config_optional(input.val.as_str());
val.or_not_found(input.val)
@ -63,7 +63,10 @@ impl ConfigService for Backend {
})
}
fn set_config_json(&self, input: pb::SetConfigJsonRequest) -> Result<pb::OpChanges> {
fn set_config_json(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.set_config_json(input.key.as_str(), &val, input.undoable)
@ -71,7 +74,10 @@ impl ConfigService for Backend {
.map(Into::into)
}
fn set_config_json_no_undo(&self, input: pb::SetConfigJsonRequest) -> Result<pb::Empty> {
fn set_config_json_no_undo(
&self,
input: pb::config::SetConfigJsonRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| {
let val: Value = serde_json::from_slice(&input.value_json)?;
col.transact_no_undo(|col| col.set_config(input.key.as_str(), &val).map(|_| ()))
@ -79,12 +85,12 @@ impl ConfigService for Backend {
.map(Into::into)
}
fn remove_config(&self, input: pb::String) -> Result<pb::OpChanges> {
fn remove_config(&self, input: pb::generic::String) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.remove_config(input.val.as_str()))
.map(Into::into)
}
fn get_all_config(&self, _input: pb::Empty) -> Result<pb::Json> {
fn get_all_config(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| {
let conf = col.storage.get_all_config()?;
serde_json::to_vec(&conf).map_err(Into::into)
@ -92,37 +98,49 @@ impl ConfigService for Backend {
.map(Into::into)
}
fn get_config_bool(&self, input: pb::GetConfigBoolRequest) -> Result<pb::Bool> {
fn get_config_bool(
&self,
input: pb::config::GetConfigBoolRequest,
) -> Result<pb::generic::Bool> {
self.with_col(|col| {
Ok(pb::Bool {
Ok(pb::generic::Bool {
val: col.get_config_bool(input.key().into()),
})
})
}
fn set_config_bool(&self, input: pb::SetConfigBoolRequest) -> Result<pb::OpChanges> {
fn set_config_bool(
&self,
input: pb::config::SetConfigBoolRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_config_bool(input.key().into(), input.value, input.undoable))
.map(Into::into)
}
fn get_config_string(&self, input: pb::GetConfigStringRequest) -> Result<pb::String> {
fn get_config_string(
&self,
input: pb::config::GetConfigStringRequest,
) -> Result<pb::generic::String> {
self.with_col(|col| {
Ok(pb::String {
Ok(pb::generic::String {
val: col.get_config_string(input.key().into()),
})
})
}
fn set_config_string(&self, input: pb::SetConfigStringRequest) -> Result<pb::OpChanges> {
fn set_config_string(
&self,
input: pb::config::SetConfigStringRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_config_string(input.key().into(), &input.value, input.undoable))
.map(Into::into)
}
fn get_preferences(&self, _input: pb::Empty) -> Result<pb::Preferences> {
fn get_preferences(&self, _input: pb::generic::Empty) -> Result<pb::config::Preferences> {
self.with_col(|col| col.get_preferences())
}
fn set_preferences(&self, input: pb::Preferences) -> Result<pb::OpChanges> {
fn set_preferences(&self, input: pb::config::Preferences) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_preferences(input))
.map(Into::into)
}

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::deckconfig_service::Service as DeckConfigService;
pub(super) use crate::pb::deckconfig::deckconfig_service::Service as DeckConfigService;
use crate::{
deckconfig::{DeckConfSchema11, DeckConfig, UpdateDeckConfigsRequest},
pb,
@ -10,19 +10,22 @@ use crate::{
};
impl DeckConfigService for Backend {
fn add_or_update_deck_config_legacy(&self, input: pb::Json) -> Result<pb::DeckConfigId> {
fn add_or_update_deck_config_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::deckconfig::DeckConfigId> {
let conf: DeckConfSchema11 = serde_json::from_slice(&input.json)?;
let mut conf: DeckConfig = conf.into();
self.with_col(|col| {
col.transact_no_undo(|col| {
col.add_or_update_deck_config_legacy(&mut conf)?;
Ok(pb::DeckConfigId { dcid: conf.id.0 })
Ok(pb::deckconfig::DeckConfigId { dcid: conf.id.0 })
})
})
.map(Into::into)
}
fn all_deck_config_legacy(&self, _input: pb::Empty) -> Result<pb::Json> {
fn all_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| {
let conf: Vec<DeckConfSchema11> = col
.storage
@ -35,11 +38,17 @@ impl DeckConfigService for Backend {
.map(Into::into)
}
fn get_deck_config(&self, input: pb::DeckConfigId) -> Result<pb::DeckConfig> {
fn get_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::deckconfig::DeckConfig> {
self.with_col(|col| Ok(col.get_deck_config(input.into(), true)?.unwrap().into()))
}
fn get_deck_config_legacy(&self, input: pb::DeckConfigId) -> Result<pb::Json> {
fn get_deck_config_legacy(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Json> {
self.with_col(|col| {
let conf = col.get_deck_config(input.into(), true)?.unwrap();
let conf: DeckConfSchema11 = conf.into();
@ -48,30 +57,39 @@ impl DeckConfigService for Backend {
.map(Into::into)
}
fn new_deck_config_legacy(&self, _input: pb::Empty) -> Result<pb::Json> {
fn new_deck_config_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
serde_json::to_vec(&DeckConfSchema11::default())
.map_err(Into::into)
.map(Into::into)
}
fn remove_deck_config(&self, input: pb::DeckConfigId) -> Result<pb::Empty> {
fn remove_deck_config(
&self,
input: pb::deckconfig::DeckConfigId,
) -> Result<pb::generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.remove_deck_config_inner(input.into())))
.map(Into::into)
}
fn get_deck_configs_for_update(&self, input: pb::DeckId) -> Result<pb::DeckConfigsForUpdate> {
fn get_deck_configs_for_update(
&self,
input: pb::decks::DeckId,
) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
self.with_col(|col| col.get_deck_configs_for_update(input.into()))
}
fn update_deck_configs(&self, input: pb::UpdateDeckConfigsRequest) -> Result<pb::OpChanges> {
fn update_deck_configs(
&self,
input: pb::deckconfig::UpdateDeckConfigsRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.update_deck_configs(input.into()))
.map(Into::into)
}
}
impl From<DeckConfig> for pb::DeckConfig {
impl From<DeckConfig> for pb::deckconfig::DeckConfig {
fn from(c: DeckConfig) -> Self {
pb::DeckConfig {
pb::deckconfig::DeckConfig {
id: c.id.0,
name: c.name,
mtime_secs: c.mtime_secs.0,
@ -81,8 +99,8 @@ impl From<DeckConfig> for pb::DeckConfig {
}
}
impl From<pb::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
fn from(c: pb::UpdateDeckConfigsRequest) -> Self {
impl From<pb::deckconfig::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
fn from(c: pb::deckconfig::UpdateDeckConfigsRequest) -> Self {
UpdateDeckConfigsRequest {
target_deck_id: c.target_deck_id.into(),
configs: c.configs.into_iter().map(Into::into).collect(),
@ -94,8 +112,8 @@ impl From<pb::UpdateDeckConfigsRequest> for UpdateDeckConfigsRequest {
}
}
impl From<pb::DeckConfig> for DeckConfig {
fn from(c: pb::DeckConfig) -> Self {
impl From<pb::deckconfig::DeckConfig> for DeckConfig {
fn from(c: pb::deckconfig::DeckConfig) -> Self {
DeckConfig {
id: c.id.into(),
name: c.name,

View File

@ -4,7 +4,7 @@
use std::convert::TryFrom;
use super::Backend;
pub(super) use crate::pb::decks_service::Service as DecksService;
pub(super) use crate::pb::decks::decks_service::Service as DecksService;
use crate::{
decks::{DeckSchema11, FilteredSearchOrder},
pb::{self as pb},
@ -13,16 +13,16 @@ use crate::{
};
impl DecksService for Backend {
fn new_deck(&self, _input: pb::Empty) -> Result<pb::Deck> {
fn new_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
Ok(Deck::new_normal().into())
}
fn add_deck(&self, deck: pb::Deck) -> Result<pb::OpChangesWithId> {
fn add_deck(&self, deck: pb::decks::Deck) -> Result<pb::collection::OpChangesWithId> {
let mut deck: Deck = deck.try_into()?;
self.with_col(|col| Ok(col.add_deck(&mut deck)?.map(|_| deck.id.0).into()))
}
fn add_deck_legacy(&self, input: pb::Json) -> Result<pb::OpChangesWithId> {
fn add_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChangesWithId> {
let schema11: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck: Deck = schema11.into();
self.with_col(|col| {
@ -33,8 +33,8 @@ impl DecksService for Backend {
fn add_or_update_deck_legacy(
&self,
input: pb::AddOrUpdateDeckLegacyRequest,
) -> Result<pb::DeckId> {
input: pb::decks::AddOrUpdateDeckLegacyRequest,
) -> Result<pb::decks::DeckId> {
self.with_col(|col| {
let schema11: DeckSchema11 = serde_json::from_slice(&input.deck)?;
let mut deck: Deck = schema11.into();
@ -46,11 +46,11 @@ impl DecksService for Backend {
} else {
col.add_or_update_deck(&mut deck)?;
}
Ok(pb::DeckId { did: deck.id.0 })
Ok(pb::decks::DeckId { did: deck.id.0 })
})
}
fn deck_tree(&self, input: pb::DeckTreeRequest) -> Result<pb::DeckTreeNode> {
fn deck_tree(&self, input: pb::decks::DeckTreeRequest) -> Result<pb::decks::DeckTreeNode> {
self.with_col(|col| {
let now = if input.now == 0 {
None
@ -61,7 +61,7 @@ impl DecksService for Backend {
})
}
fn deck_tree_legacy(&self, _input: pb::Empty) -> Result<pb::Json> {
fn deck_tree_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| {
let tree = col.legacy_deck_tree()?;
serde_json::to_vec(&tree)
@ -70,7 +70,7 @@ impl DecksService for Backend {
})
}
fn get_all_decks_legacy(&self, _input: pb::Empty) -> Result<pb::Json> {
fn get_all_decks_legacy(&self, _input: pb::generic::Empty) -> Result<pb::generic::Json> {
self.with_col(|col| {
let decks = col.storage.get_all_decks_as_schema11()?;
serde_json::to_vec(&decks).map_err(Into::into)
@ -78,26 +78,28 @@ impl DecksService for Backend {
.map(Into::into)
}
fn get_deck_id_by_name(&self, input: pb::String) -> Result<pb::DeckId> {
fn get_deck_id_by_name(&self, input: pb::generic::String) -> Result<pb::decks::DeckId> {
self.with_col(|col| {
col.get_deck_id(&input.val)
.and_then(|d| d.or_not_found(input.val).map(|d| pb::DeckId { did: d.0 }))
col.get_deck_id(&input.val).and_then(|d| {
d.or_not_found(input.val)
.map(|d| pb::decks::DeckId { did: d.0 })
})
})
}
fn get_deck(&self, input: pb::DeckId) -> Result<pb::Deck> {
fn get_deck(&self, input: pb::decks::DeckId) -> Result<pb::decks::Deck> {
let did = input.into();
self.with_col(|col| Ok(col.storage.get_deck(did)?.or_not_found(did)?.into()))
}
fn update_deck(&self, input: pb::Deck) -> Result<pb::OpChanges> {
fn update_deck(&self, input: pb::decks::Deck) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let mut deck = Deck::try_from(input)?;
col.update_deck(&mut deck).map(Into::into)
})
}
fn update_deck_legacy(&self, input: pb::Json) -> Result<pb::OpChanges> {
fn update_deck_legacy(&self, input: pb::generic::Json) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let deck: DeckSchema11 = serde_json::from_slice(&input.json)?;
let mut deck = deck.into();
@ -105,7 +107,7 @@ impl DecksService for Backend {
})
}
fn get_deck_legacy(&self, input: pb::DeckId) -> Result<pb::Json> {
fn get_deck_legacy(&self, input: pb::decks::DeckId) -> Result<pb::generic::Json> {
let did = input.into();
self.with_col(|col| {
let deck: DeckSchema11 = col.storage.get_deck(did)?.or_not_found(did)?.into();
@ -115,7 +117,10 @@ impl DecksService for Backend {
})
}
fn get_deck_names(&self, input: pb::GetDeckNamesRequest) -> Result<pb::DeckNames> {
fn get_deck_names(
&self,
input: pb::decks::GetDeckNamesRequest,
) -> Result<pb::decks::DeckNames> {
self.with_col(|col| {
let names = if input.include_filtered {
col.get_all_deck_names(input.skip_empty_default)?
@ -126,14 +131,14 @@ impl DecksService for Backend {
})
}
fn get_deck_and_child_names(&self, input: pb::DeckId) -> Result<pb::DeckNames> {
fn get_deck_and_child_names(&self, input: pb::decks::DeckId) -> Result<pb::decks::DeckNames> {
self.with_col(|col| {
col.get_deck_and_child_names(input.did.into())
.map(Into::into)
})
}
fn new_deck_legacy(&self, input: pb::Bool) -> Result<pb::Json> {
fn new_deck_legacy(&self, input: pb::generic::Bool) -> Result<pb::generic::Json> {
let deck = if input.val {
Deck::new_filtered()
} else {
@ -145,12 +150,18 @@ impl DecksService for Backend {
.map(Into::into)
}
fn remove_decks(&self, input: pb::DeckIds) -> Result<pb::OpChangesWithCount> {
fn remove_decks(
&self,
input: pb::decks::DeckIds,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_decks_and_child_decks(&Into::<Vec<DeckId>>::into(input)))
.map(Into::into)
}
fn reparent_decks(&self, input: pb::ReparentDecksRequest) -> Result<pb::OpChangesWithCount> {
fn reparent_decks(
&self,
input: pb::decks::ReparentDecksRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let deck_ids: Vec<_> = input.deck_ids.into_iter().map(Into::into).collect();
let new_parent = if input.new_parent == 0 {
None
@ -161,68 +172,80 @@ impl DecksService for Backend {
.map(Into::into)
}
fn rename_deck(&self, input: pb::RenameDeckRequest) -> Result<pb::OpChanges> {
fn rename_deck(
&self,
input: pb::decks::RenameDeckRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.rename_deck(input.deck_id.into(), &input.new_name))
.map(Into::into)
}
fn get_or_create_filtered_deck(&self, input: pb::DeckId) -> Result<pb::FilteredDeckForUpdate> {
fn get_or_create_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::decks::FilteredDeckForUpdate> {
self.with_col(|col| col.get_or_create_filtered_deck(input.into()))
.map(Into::into)
}
fn add_or_update_filtered_deck(
&self,
input: pb::FilteredDeckForUpdate,
) -> Result<pb::OpChangesWithId> {
input: pb::decks::FilteredDeckForUpdate,
) -> Result<pb::collection::OpChangesWithId> {
self.with_col(|col| col.add_or_update_filtered_deck(input.into()))
.map(|out| out.map(i64::from))
.map(Into::into)
}
fn filtered_deck_order_labels(&self, _input: pb::Empty) -> Result<pb::StringList> {
fn filtered_deck_order_labels(
&self,
_input: pb::generic::Empty,
) -> Result<pb::generic::StringList> {
Ok(FilteredSearchOrder::labels(&self.tr).into())
}
fn set_deck_collapsed(&self, input: pb::SetDeckCollapsedRequest) -> Result<pb::OpChanges> {
fn set_deck_collapsed(
&self,
input: pb::decks::SetDeckCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
col.set_deck_collapsed(input.deck_id.into(), input.collapsed, input.scope())
})
.map(Into::into)
}
fn set_current_deck(&self, input: pb::DeckId) -> Result<pb::OpChanges> {
fn set_current_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.set_current_deck(input.did.into()))
.map(Into::into)
}
fn get_current_deck(&self, _input: pb::Empty) -> Result<pb::Deck> {
fn get_current_deck(&self, _input: pb::generic::Empty) -> Result<pb::decks::Deck> {
self.with_col(|col| col.get_current_deck())
.map(|deck| (*deck).clone().into())
}
}
impl From<pb::DeckId> for DeckId {
fn from(did: pb::DeckId) -> Self {
impl From<pb::decks::DeckId> for DeckId {
fn from(did: pb::decks::DeckId) -> Self {
DeckId(did.did)
}
}
impl From<pb::DeckIds> for Vec<DeckId> {
fn from(dids: pb::DeckIds) -> Self {
impl From<pb::decks::DeckIds> for Vec<DeckId> {
fn from(dids: pb::decks::DeckIds) -> Self {
dids.dids.into_iter().map(DeckId).collect()
}
}
impl From<DeckId> for pb::DeckId {
impl From<DeckId> for pb::decks::DeckId {
fn from(did: DeckId) -> Self {
pb::DeckId { did: did.0 }
pb::decks::DeckId { did: did.0 }
}
}
impl From<FilteredDeckForUpdate> for pb::FilteredDeckForUpdate {
impl From<FilteredDeckForUpdate> for pb::decks::FilteredDeckForUpdate {
fn from(deck: FilteredDeckForUpdate) -> Self {
pb::FilteredDeckForUpdate {
pb::decks::FilteredDeckForUpdate {
id: deck.id.into(),
name: deck.human_name,
config: Some(deck.config),
@ -230,8 +253,8 @@ impl From<FilteredDeckForUpdate> for pb::FilteredDeckForUpdate {
}
}
impl From<pb::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: pb::FilteredDeckForUpdate) -> Self {
impl From<pb::decks::FilteredDeckForUpdate> for FilteredDeckForUpdate {
fn from(deck: pb::decks::FilteredDeckForUpdate) -> Self {
FilteredDeckForUpdate {
id: deck.id.into(),
human_name: deck.name,
@ -240,9 +263,9 @@ impl From<pb::FilteredDeckForUpdate> for FilteredDeckForUpdate {
}
}
impl From<Deck> for pb::Deck {
impl From<Deck> for pb::decks::Deck {
fn from(d: Deck) -> Self {
pb::Deck {
pb::decks::Deck {
id: d.id.0,
name: d.name.human_name(),
mtime_secs: d.mtime_secs.0,
@ -253,10 +276,10 @@ impl From<Deck> for pb::Deck {
}
}
impl TryFrom<pb::Deck> for Deck {
impl TryFrom<pb::decks::Deck> for Deck {
type Error = AnkiError;
fn try_from(d: pb::Deck) -> Result<Self, Self::Error> {
fn try_from(d: pb::decks::Deck) -> Result<Self, Self::Error> {
Ok(Deck {
id: DeckId(d.id),
name: NativeDeckName::from_human_name(&d.name),
@ -268,42 +291,42 @@ impl TryFrom<pb::Deck> for Deck {
}
}
impl From<DeckKind> for pb::deck::Kind {
impl From<DeckKind> for pb::decks::deck::Kind {
fn from(k: DeckKind) -> Self {
match k {
DeckKind::Normal(n) => pb::deck::Kind::Normal(n),
DeckKind::Filtered(f) => pb::deck::Kind::Filtered(f),
DeckKind::Normal(n) => pb::decks::deck::Kind::Normal(n),
DeckKind::Filtered(f) => pb::decks::deck::Kind::Filtered(f),
}
}
}
impl From<pb::deck::Kind> for DeckKind {
fn from(kind: pb::deck::Kind) -> Self {
impl From<pb::decks::deck::Kind> for DeckKind {
fn from(kind: pb::decks::deck::Kind) -> Self {
match kind {
pb::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
pb::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
pb::decks::deck::Kind::Normal(normal) => DeckKind::Normal(normal),
pb::decks::deck::Kind::Filtered(filtered) => DeckKind::Filtered(filtered),
}
}
}
impl From<(DeckId, String)> for pb::DeckNameId {
impl From<(DeckId, String)> for pb::decks::DeckNameId {
fn from(id_name: (DeckId, String)) -> Self {
pb::DeckNameId {
pb::decks::DeckNameId {
id: id_name.0 .0,
name: id_name.1,
}
}
}
impl From<Vec<(DeckId, String)>> for pb::DeckNames {
impl From<Vec<(DeckId, String)>> for pb::decks::DeckNames {
fn from(id_names: Vec<(DeckId, String)>) -> Self {
pb::DeckNames {
pb::decks::DeckNames {
entries: id_names.into_iter().map(Into::into).collect(),
}
}
}
// fn new_deck(&self, input: pb::Bool) -> Result<pb::Deck> {
// fn new_deck(&self, input: pb::generic::Bool) -> Result<pb::decks::Deck> {
// let deck = if input.val {
// Deck::new_filtered()
// } else {

View File

@ -4,12 +4,12 @@
use crate::{
error::{AnkiError, SyncErrorKind},
pb,
pb::backend_error::Kind,
pb::backend::backend_error::Kind,
prelude::*,
};
impl AnkiError {
pub fn into_protobuf(self, tr: &I18n) -> pb::BackendError {
pub fn into_protobuf(self, tr: &I18n) -> pb::backend::BackendError {
let message = self.message(tr);
let help_page = self.help_page().map(|page| page as i32);
let context = self.context();
@ -43,7 +43,7 @@ impl AnkiError {
AnkiError::InvalidId => Kind::InvalidInput,
};
pb::BackendError {
pb::backend::BackendError {
kind: kind as i32,
message,
help_page,

View File

@ -3,98 +3,98 @@
use crate::{pb, prelude::*};
impl From<Vec<u8>> for pb::Json {
impl From<Vec<u8>> for pb::generic::Json {
fn from(json: Vec<u8>) -> Self {
pb::Json { json }
pb::generic::Json { json }
}
}
impl From<String> for pb::String {
impl From<String> for pb::generic::String {
fn from(val: String) -> Self {
pb::String { val }
pb::generic::String { val }
}
}
impl From<bool> for pb::Bool {
impl From<bool> for pb::generic::Bool {
fn from(val: bool) -> Self {
pb::Bool { val }
pb::generic::Bool { val }
}
}
impl From<i32> for pb::Int32 {
impl From<i32> for pb::generic::Int32 {
fn from(val: i32) -> Self {
pb::Int32 { val }
pb::generic::Int32 { val }
}
}
impl From<i64> for pb::Int64 {
impl From<i64> for pb::generic::Int64 {
fn from(val: i64) -> Self {
pb::Int64 { val }
pb::generic::Int64 { val }
}
}
impl From<u32> for pb::UInt32 {
impl From<u32> for pb::generic::UInt32 {
fn from(val: u32) -> Self {
pb::UInt32 { val }
pb::generic::UInt32 { val }
}
}
impl From<usize> for pb::UInt32 {
impl From<usize> for pb::generic::UInt32 {
fn from(val: usize) -> Self {
pb::UInt32 { val: val as u32 }
pb::generic::UInt32 { val: val as u32 }
}
}
impl From<()> for pb::Empty {
impl From<()> for pb::generic::Empty {
fn from(_val: ()) -> Self {
pb::Empty {}
pb::generic::Empty {}
}
}
impl From<pb::CardId> for CardId {
fn from(cid: pb::CardId) -> Self {
impl From<pb::cards::CardId> for CardId {
fn from(cid: pb::cards::CardId) -> Self {
CardId(cid.cid)
}
}
impl From<pb::CardIds> for Vec<CardId> {
fn from(c: pb::CardIds) -> Self {
impl From<pb::cards::CardIds> for Vec<CardId> {
fn from(c: pb::cards::CardIds) -> Self {
c.cids.into_iter().map(CardId).collect()
}
}
impl From<pb::NoteId> for NoteId {
fn from(nid: pb::NoteId) -> Self {
impl From<pb::notes::NoteId> for NoteId {
fn from(nid: pb::notes::NoteId) -> Self {
NoteId(nid.nid)
}
}
impl From<NoteId> for pb::NoteId {
impl From<NoteId> for pb::notes::NoteId {
fn from(nid: NoteId) -> Self {
pb::NoteId { nid: nid.0 }
pb::notes::NoteId { nid: nid.0 }
}
}
impl From<pb::NotetypeId> for NotetypeId {
fn from(ntid: pb::NotetypeId) -> Self {
impl From<pb::notetypes::NotetypeId> for NotetypeId {
fn from(ntid: pb::notetypes::NotetypeId) -> Self {
NotetypeId(ntid.ntid)
}
}
impl From<NotetypeId> for pb::NotetypeId {
impl From<NotetypeId> for pb::notetypes::NotetypeId {
fn from(ntid: NotetypeId) -> Self {
pb::NotetypeId { ntid: ntid.0 }
pb::notetypes::NotetypeId { ntid: ntid.0 }
}
}
impl From<pb::DeckConfigId> for DeckConfigId {
fn from(dcid: pb::DeckConfigId) -> Self {
impl From<pb::deckconfig::DeckConfigId> for DeckConfigId {
fn from(dcid: pb::deckconfig::DeckConfigId) -> Self {
DeckConfigId(dcid.dcid)
}
}
impl From<Vec<String>> for pb::StringList {
impl From<Vec<String>> for pb::generic::StringList {
fn from(vals: Vec<String>) -> Self {
pb::StringList { vals }
pb::generic::StringList { vals }
}
}

View File

@ -6,7 +6,7 @@ use std::collections::HashMap;
use fluent::{FluentArgs, FluentValue};
use super::Backend;
pub(super) use crate::pb::i18n_service::Service as I18nService;
pub(super) use crate::pb::i18n::i18n_service::Service as I18nService;
use crate::{
pb,
prelude::*,
@ -14,7 +14,10 @@ use crate::{
};
impl I18nService for Backend {
fn translate_string(&self, input: pb::TranslateStringRequest) -> Result<pb::String> {
fn translate_string(
&self,
input: pb::i18n::TranslateStringRequest,
) -> Result<pb::generic::String> {
let args = build_fluent_args(input.args);
Ok(self
@ -27,8 +30,11 @@ impl I18nService for Backend {
.into())
}
fn format_timespan(&self, input: pb::FormatTimespanRequest) -> Result<pb::String> {
use pb::format_timespan_request::Context;
fn format_timespan(
&self,
input: pb::i18n::FormatTimespanRequest,
) -> Result<pb::generic::String> {
use pb::i18n::format_timespan_request::Context;
Ok(match input.context() {
Context::Precise => time_span(input.seconds, &self.tr, true),
Context::Intervals => time_span(input.seconds, &self.tr, false),
@ -37,14 +43,14 @@ impl I18nService for Backend {
.into())
}
fn i18n_resources(&self, input: pb::I18nResourcesRequest) -> Result<pb::Json> {
fn i18n_resources(&self, input: pb::i18n::I18nResourcesRequest) -> Result<pb::generic::Json> {
serde_json::to_vec(&self.tr.resources_for_js(&input.modules))
.map(Into::into)
.map_err(Into::into)
}
}
fn build_fluent_args(input: HashMap<String, pb::TranslateArgValue>) -> FluentArgs<'static> {
fn build_fluent_args(input: HashMap<String, pb::i18n::TranslateArgValue>) -> FluentArgs<'static> {
let mut args = FluentArgs::new();
for (key, val) in input {
args.set(key, translate_arg_to_fluent_val(&val));
@ -52,8 +58,8 @@ fn build_fluent_args(input: HashMap<String, pb::TranslateArgValue>) -> FluentArg
args
}
fn translate_arg_to_fluent_val(arg: &pb::TranslateArgValue) -> FluentValue<'static> {
use pb::translate_arg_value::Value as V;
fn translate_arg_to_fluent_val(arg: &pb::i18n::TranslateArgValue) -> FluentValue<'static> {
use pb::i18n::translate_arg_value::Value as V;
match &arg.value {
Some(val) => match val {
V::Str(s) => FluentValue::String(s.to_owned().into()),

View File

@ -4,10 +4,13 @@
use std::path::Path;
use super::{progress::Progress, Backend};
pub(super) use crate::pb::importexport_service::Service as ImportExportService;
pub(super) use crate::pb::import_export::importexport_service::Service as ImportExportService;
use crate::{
import_export::{package::import_colpkg, ExportProgress, ImportProgress, NoteLog},
pb::{self as pb, export_limit, ExportLimit},
pb::{
import_export::{export_limit, ExportLimit},
{self as pb},
},
prelude::*,
search::SearchNode,
};
@ -15,8 +18,8 @@ use crate::{
impl ImportExportService for Backend {
fn export_collection_package(
&self,
input: pb::ExportCollectionPackageRequest,
) -> Result<pb::Empty> {
input: pb::import_export::ExportCollectionPackageRequest,
) -> Result<pb::generic::Empty> {
self.abort_media_sync_and_wait();
let mut guard = self.lock_open_collection()?;
@ -34,8 +37,8 @@ impl ImportExportService for Backend {
fn import_collection_package(
&self,
input: pb::ImportCollectionPackageRequest,
) -> Result<pb::Empty> {
input: pb::import_export::ImportCollectionPackageRequest,
) -> Result<pb::generic::Empty> {
let _guard = self.lock_closed_collection()?;
import_colpkg(
@ -51,13 +54,16 @@ impl ImportExportService for Backend {
fn import_anki_package(
&self,
input: pb::ImportAnkiPackageRequest,
) -> Result<pb::ImportResponse> {
input: pb::import_export::ImportAnkiPackageRequest,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_apkg(&input.package_path, self.import_progress_fn()))
.map(Into::into)
}
fn export_anki_package(&self, input: pb::ExportAnkiPackageRequest) -> Result<pb::UInt32> {
fn export_anki_package(
&self,
input: pb::import_export::ExportAnkiPackageRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| {
col.export_apkg(
&input.out_path,
@ -72,7 +78,10 @@ impl ImportExportService for Backend {
.map(Into::into)
}
fn get_csv_metadata(&self, input: pb::CsvMetadataRequest) -> Result<pb::CsvMetadata> {
fn get_csv_metadata(
&self,
input: pb::import_export::CsvMetadataRequest,
) -> Result<pb::import_export::CsvMetadata> {
let delimiter = input.delimiter.is_some().then(|| input.delimiter());
self.with_col(|col| {
col.get_csv_metadata(
@ -84,7 +93,10 @@ impl ImportExportService for Backend {
})
}
fn import_csv(&self, input: pb::ImportCsvRequest) -> Result<pb::ImportResponse> {
fn import_csv(
&self,
input: pb::import_export::ImportCsvRequest,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| {
col.import_csv(
&input.path,
@ -95,12 +107,18 @@ impl ImportExportService for Backend {
.map(Into::into)
}
fn export_note_csv(&self, input: pb::ExportNoteCsvRequest) -> Result<pb::UInt32> {
fn export_note_csv(
&self,
input: pb::import_export::ExportNoteCsvRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| col.export_note_csv(input, self.export_progress_fn()))
.map(Into::into)
}
fn export_card_csv(&self, input: pb::ExportCardCsvRequest) -> Result<pb::UInt32> {
fn export_card_csv(
&self,
input: pb::import_export::ExportCardCsvRequest,
) -> Result<pb::generic::UInt32> {
self.with_col(|col| {
col.export_card_csv(
&input.out_path,
@ -112,12 +130,18 @@ impl ImportExportService for Backend {
.map(Into::into)
}
fn import_json_file(&self, input: pb::String) -> Result<pb::ImportResponse> {
fn import_json_file(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_json_file(&input.val, self.import_progress_fn()))
.map(Into::into)
}
fn import_json_string(&self, input: pb::String) -> Result<pb::ImportResponse> {
fn import_json_string(
&self,
input: pb::generic::String,
) -> Result<pb::import_export::ImportResponse> {
self.with_col(|col| col.import_json_string(&input.val, self.import_progress_fn()))
.map(Into::into)
}
@ -135,7 +159,7 @@ impl Backend {
}
}
impl From<OpOutput<NoteLog>> for pb::ImportResponse {
impl From<OpOutput<NoteLog>> for pb::import_export::ImportResponse {
fn from(output: OpOutput<NoteLog>) -> Self {
Self {
changes: Some(output.changes.into()),
@ -149,7 +173,7 @@ impl From<ExportLimit> for SearchNode {
use export_limit::Limit;
let limit = export_limit
.limit
.unwrap_or(Limit::WholeCollection(pb::Empty {}));
.unwrap_or(Limit::WholeCollection(pb::generic::Empty {}));
match limit {
Limit::WholeCollection(_) => Self::WholeCollection,
Limit::DeckId(did) => Self::from_deck_id(did, true),

View File

@ -2,11 +2,11 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::links_service::Service as LinksService;
pub(super) use crate::pb::links::links_service::Service as LinksService;
use crate::{pb, pb::links::help_page_link_request::HelpPage, prelude::*};
impl LinksService for Backend {
fn help_page_link(&self, input: pb::HelpPageLinkRequest) -> Result<pb::String> {
fn help_page_link(&self, input: pb::links::HelpPageLinkRequest) -> Result<pb::generic::String> {
Ok(HelpPage::from_i32(input.page)
.unwrap_or(HelpPage::Index)
.to_link()

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{progress::Progress, Backend};
pub(super) use crate::pb::media_service::Service as MediaService;
pub(super) use crate::pb::media::media_service::Service as MediaService;
use crate::{
media::{check::MediaChecker, MediaManager},
pb,
@ -13,7 +13,7 @@ impl MediaService for Backend {
// media
//-----------------------------------------------
fn check_media(&self, _input: pb::Empty) -> Result<pb::CheckMediaResponse> {
fn check_media(&self, _input: pb::generic::Empty) -> Result<pb::media::CheckMediaResponse> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -26,7 +26,7 @@ impl MediaService for Backend {
let mut report = checker.summarize_output(&mut output);
ctx.report_media_field_referencing_templates(&mut report)?;
Ok(pb::CheckMediaResponse {
Ok(pb::media::CheckMediaResponse {
unused: output.unused,
missing: output.missing,
report,
@ -36,7 +36,10 @@ impl MediaService for Backend {
})
}
fn trash_media_files(&self, input: pb::TrashMediaFilesRequest) -> Result<pb::Empty> {
fn trash_media_files(
&self,
input: pb::media::TrashMediaFilesRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx();
@ -45,7 +48,7 @@ impl MediaService for Backend {
.map(Into::into)
}
fn add_media_file(&self, input: pb::AddMediaFileRequest) -> Result<pb::String> {
fn add_media_file(&self, input: pb::media::AddMediaFileRequest) -> Result<pb::generic::String> {
self.with_col(|col| {
let mgr = MediaManager::new(&col.media_folder, &col.media_db)?;
let mut ctx = mgr.dbctx();
@ -56,7 +59,7 @@ impl MediaService for Backend {
})
}
fn empty_trash(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn empty_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);
@ -69,7 +72,7 @@ impl MediaService for Backend {
.map(Into::into)
}
fn restore_trash(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn restore_trash(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let mut handler = self.new_progress_handler();
let progress_fn =
move |progress| handler.update(Progress::MediaCheck(progress as u32), true);

View File

@ -38,7 +38,9 @@ use once_cell::sync::OnceCell;
use progress::AbortHandleSlot;
use prost::Message;
use slog::Logger;
use tokio::runtime::{self, Runtime};
use tokio::runtime::{
Runtime, {self},
};
use self::{
card::CardsService,
@ -60,7 +62,7 @@ use self::{
sync::{SyncService, SyncState},
tags::TagsService,
};
use crate::{backend::dbproxy::db_command_bytes, log, pb, prelude::*};
use crate::{backend::dbproxy::db_command_bytes, log, pb, pb::backend::ServiceIndex, prelude::*};
pub struct Backend {
col: Arc<Mutex<Option<Collection>>>,
@ -79,8 +81,8 @@ struct BackendState {
sync: SyncState,
}
pub fn init_backend(init_msg: &[u8], log: Option<Logger>) -> std::result::Result<Backend, String> {
let input: pb::BackendInit = match pb::BackendInit::decode(init_msg) {
pub fn init_backend(init_msg: &[u8], log: Option<Logger>) -> result::Result<Backend, String> {
let input: pb::backend::BackendInit = match pb::backend::BackendInit::decode(init_msg) {
Ok(req) => req,
Err(_) => return Err("couldn't decode init request".into()),
};
@ -119,30 +121,28 @@ impl Backend {
method: u32,
input: &[u8],
) -> result::Result<Vec<u8>, Vec<u8>> {
pb::ServiceIndex::from_i32(service as i32)
ServiceIndex::from_i32(service as i32)
.or_invalid("invalid service")
.and_then(|service| match service {
pb::ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input),
pb::ServiceIndex::Decks => DecksService::run_method(self, method, input),
pb::ServiceIndex::Notes => NotesService::run_method(self, method, input),
pb::ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input),
pb::ServiceIndex::Config => ConfigService::run_method(self, method, input),
pb::ServiceIndex::Sync => SyncService::run_method(self, method, input),
pb::ServiceIndex::Tags => TagsService::run_method(self, method, input),
pb::ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input),
pb::ServiceIndex::CardRendering => {
ServiceIndex::Scheduler => SchedulerService::run_method(self, method, input),
ServiceIndex::Decks => DecksService::run_method(self, method, input),
ServiceIndex::Notes => NotesService::run_method(self, method, input),
ServiceIndex::Notetypes => NotetypesService::run_method(self, method, input),
ServiceIndex::Config => ConfigService::run_method(self, method, input),
ServiceIndex::Sync => SyncService::run_method(self, method, input),
ServiceIndex::Tags => TagsService::run_method(self, method, input),
ServiceIndex::DeckConfig => DeckConfigService::run_method(self, method, input),
ServiceIndex::CardRendering => {
CardRenderingService::run_method(self, method, input)
}
pb::ServiceIndex::Media => MediaService::run_method(self, method, input),
pb::ServiceIndex::Stats => StatsService::run_method(self, method, input),
pb::ServiceIndex::Search => SearchService::run_method(self, method, input),
pb::ServiceIndex::I18n => I18nService::run_method(self, method, input),
pb::ServiceIndex::Links => LinksService::run_method(self, method, input),
pb::ServiceIndex::Collection => CollectionService::run_method(self, method, input),
pb::ServiceIndex::Cards => CardsService::run_method(self, method, input),
pb::ServiceIndex::ImportExport => {
ImportExportService::run_method(self, method, input)
}
ServiceIndex::Media => MediaService::run_method(self, method, input),
ServiceIndex::Stats => StatsService::run_method(self, method, input),
ServiceIndex::Search => SearchService::run_method(self, method, input),
ServiceIndex::I18n => I18nService::run_method(self, method, input),
ServiceIndex::Links => LinksService::run_method(self, method, input),
ServiceIndex::Collection => CollectionService::run_method(self, method, input),
ServiceIndex::Cards => CardsService::run_method(self, method, input),
ServiceIndex::ImportExport => ImportExportService::run_method(self, method, input),
})
.map_err(|err| {
let backend_err = err.into_protobuf(&self.tr);
@ -152,7 +152,7 @@ impl Backend {
})
}
pub fn run_db_command_bytes(&self, input: &[u8]) -> std::result::Result<Vec<u8>, Vec<u8>> {
pub fn run_db_command_bytes(&self, input: &[u8]) -> result::Result<Vec<u8>, Vec<u8>> {
self.db_command(input).map_err(|err| {
let backend_err = err.into_protobuf(&self.tr);
let mut bytes = Vec::new();

View File

@ -4,7 +4,7 @@
use std::collections::HashSet;
use super::Backend;
pub(super) use crate::pb::notes_service::Service as NotesService;
pub(super) use crate::pb::notes::notes_service::Service as NotesService;
use crate::{
cloze::add_cloze_numbers_in_string,
pb::{self as pb},
@ -12,7 +12,7 @@ use crate::{
};
impl NotesService for Backend {
fn new_note(&self, input: pb::NotetypeId) -> Result<pb::Note> {
fn new_note(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notes::Note> {
let ntid = input.into();
self.with_col(|col| {
let nt = col.get_notetype(ntid)?.or_not_found(ntid)?;
@ -20,11 +20,11 @@ impl NotesService for Backend {
})
}
fn add_note(&self, input: pb::AddNoteRequest) -> Result<pb::AddNoteResponse> {
fn add_note(&self, input: pb::notes::AddNoteRequest) -> Result<pb::notes::AddNoteResponse> {
self.with_col(|col| {
let mut note: Note = input.note.or_invalid("no note provided")?.into();
let changes = col.add_note(&mut note, DeckId(input.deck_id))?;
Ok(pb::AddNoteResponse {
Ok(pb::notes::AddNoteResponse {
note_id: note.id.0,
changes: Some(changes.into()),
})
@ -33,15 +33,18 @@ impl NotesService for Backend {
fn defaults_for_adding(
&self,
input: pb::DefaultsForAddingRequest,
) -> Result<pb::DeckAndNotetype> {
input: pb::notes::DefaultsForAddingRequest,
) -> Result<pb::notes::DeckAndNotetype> {
self.with_col(|col| {
let home_deck: DeckId = input.home_deck_of_current_review_card.into();
col.defaults_for_adding(home_deck).map(Into::into)
})
}
fn default_deck_for_notetype(&self, input: pb::NotetypeId) -> Result<pb::DeckId> {
fn default_deck_for_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::decks::DeckId> {
self.with_col(|col| {
Ok(col
.default_deck_for_notetype(input.into())?
@ -50,7 +53,10 @@ impl NotesService for Backend {
})
}
fn update_notes(&self, input: pb::UpdateNotesRequest) -> Result<pb::OpChanges> {
fn update_notes(
&self,
input: pb::notes::UpdateNotesRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let notes = input
.notes
@ -62,12 +68,15 @@ impl NotesService for Backend {
.map(Into::into)
}
fn get_note(&self, input: pb::NoteId) -> Result<pb::Note> {
fn get_note(&self, input: pb::notes::NoteId) -> Result<pb::notes::Note> {
let nid = input.into();
self.with_col(|col| col.storage.get_note(nid)?.or_not_found(nid).map(Into::into))
}
fn remove_notes(&self, input: pb::RemoveNotesRequest) -> Result<pb::OpChangesWithCount> {
fn remove_notes(
&self,
input: pb::notes::RemoveNotesRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
if !input.note_ids.is_empty() {
col.remove_notes(
@ -91,20 +100,23 @@ impl NotesService for Backend {
})
}
fn cloze_numbers_in_note(&self, note: pb::Note) -> Result<pb::ClozeNumbersInNoteResponse> {
fn cloze_numbers_in_note(
&self,
note: pb::notes::Note,
) -> Result<pb::notes::ClozeNumbersInNoteResponse> {
let mut set = HashSet::with_capacity(4);
for field in &note.fields {
add_cloze_numbers_in_string(field, &mut set);
}
Ok(pb::ClozeNumbersInNoteResponse {
Ok(pb::notes::ClozeNumbersInNoteResponse {
numbers: set.into_iter().map(|n| n as u32).collect(),
})
}
fn after_note_updates(
&self,
input: pb::AfterNoteUpdatesRequest,
) -> Result<pb::OpChangesWithCount> {
input: pb::notes::AfterNoteUpdatesRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
col.after_note_updates(
&to_note_ids(input.nids),
@ -117,35 +129,41 @@ impl NotesService for Backend {
fn field_names_for_notes(
&self,
input: pb::FieldNamesForNotesRequest,
) -> Result<pb::FieldNamesForNotesResponse> {
input: pb::notes::FieldNamesForNotesRequest,
) -> Result<pb::notes::FieldNamesForNotesResponse> {
self.with_col(|col| {
let nids: Vec<_> = input.nids.into_iter().map(NoteId).collect();
col.storage
.field_names_for_notes(&nids)
.map(|fields| pb::FieldNamesForNotesResponse { fields })
.map(|fields| pb::notes::FieldNamesForNotesResponse { fields })
})
}
fn note_fields_check(&self, input: pb::Note) -> Result<pb::NoteFieldsCheckResponse> {
fn note_fields_check(
&self,
input: pb::notes::Note,
) -> Result<pb::notes::NoteFieldsCheckResponse> {
let note: Note = input.into();
self.with_col(|col| {
col.note_fields_check(&note)
.map(|r| pb::NoteFieldsCheckResponse { state: r as i32 })
.map(|r| pb::notes::NoteFieldsCheckResponse { state: r as i32 })
})
}
fn cards_of_note(&self, input: pb::NoteId) -> Result<pb::CardIds> {
fn cards_of_note(&self, input: pb::notes::NoteId) -> Result<pb::cards::CardIds> {
self.with_col(|col| {
col.storage
.all_card_ids_of_note_in_template_order(NoteId(input.nid))
.map(|v| pb::CardIds {
.map(|v| pb::cards::CardIds {
cids: v.into_iter().map(Into::into).collect(),
})
})
}
fn get_single_notetype_of_notes(&self, input: pb::NoteIds) -> Result<pb::NotetypeId> {
fn get_single_notetype_of_notes(
&self,
input: pb::notes::NoteIds,
) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| {
col.get_single_notetype_of_notes(&input.note_ids.into_newtype(NoteId))
.map(Into::into)

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::notetypes_service::Service as NotetypesService;
pub(super) use crate::pb::notetypes::notetypes_service::Service as NotetypesService;
use crate::{
config::get_aux_notetype_config_key,
notetype::{
@ -13,7 +13,10 @@ use crate::{
};
impl NotetypesService for Backend {
fn add_notetype(&self, input: pb::Notetype) -> Result<pb::OpChangesWithId> {
fn add_notetype(
&self,
input: pb::notetypes::Notetype,
) -> Result<pb::collection::OpChangesWithId> {
let mut notetype: Notetype = input.into();
self.with_col(|col| {
Ok(col
@ -23,13 +26,16 @@ impl NotetypesService for Backend {
})
}
fn update_notetype(&self, input: pb::Notetype) -> Result<pb::OpChanges> {
fn update_notetype(&self, input: pb::notetypes::Notetype) -> Result<pb::collection::OpChanges> {
let mut notetype: Notetype = input.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
.map(Into::into)
}
fn add_notetype_legacy(&self, input: pb::Json) -> Result<pb::OpChangesWithId> {
fn add_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChangesWithId> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| {
@ -40,7 +46,10 @@ impl NotetypesService for Backend {
})
}
fn update_notetype_legacy(&self, input: pb::Json) -> Result<pb::OpChanges> {
fn update_notetype_legacy(
&self,
input: pb::generic::Json,
) -> Result<pb::collection::OpChanges> {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut notetype: Notetype = legacy.into();
self.with_col(|col| col.update_notetype(&mut notetype, false))
@ -49,8 +58,8 @@ impl NotetypesService for Backend {
fn add_or_update_notetype(
&self,
input: pb::AddOrUpdateNotetypeRequest,
) -> Result<pb::NotetypeId> {
input: pb::notetypes::AddOrUpdateNotetypeRequest,
) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| {
let legacy: NotetypeSchema11 = serde_json::from_slice(&input.json)?;
let mut nt: Notetype = legacy.into();
@ -64,11 +73,14 @@ impl NotetypesService for Backend {
} else {
col.add_or_update_notetype_with_existing_id(&mut nt, input.skip_checks)?;
}
Ok(pb::NotetypeId { ntid: nt.id.0 })
Ok(pb::notetypes::NotetypeId { ntid: nt.id.0 })
})
}
fn get_stock_notetype_legacy(&self, input: pb::StockNotetype) -> Result<pb::Json> {
fn get_stock_notetype_legacy(
&self,
input: pb::notetypes::StockNotetype,
) -> Result<pb::generic::Json> {
// fixme: use individual functions instead of full vec
let mut all = all_stock_notetypes(&self.tr);
let idx = (input.kind as usize).min(all.len() - 1);
@ -79,7 +91,7 @@ impl NotetypesService for Backend {
.map(Into::into)
}
fn get_notetype(&self, input: pb::NotetypeId) -> Result<pb::Notetype> {
fn get_notetype(&self, input: pb::notetypes::NotetypeId) -> Result<pb::notetypes::Notetype> {
let ntid = input.into();
self.with_col(|col| {
col.storage
@ -89,7 +101,7 @@ impl NotetypesService for Backend {
})
}
fn get_notetype_legacy(&self, input: pb::NotetypeId) -> Result<pb::Json> {
fn get_notetype_legacy(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::Json> {
let ntid = input.into();
self.with_col(|col| {
let schema11: NotetypeSchema11 =
@ -98,56 +110,73 @@ impl NotetypesService for Backend {
})
}
fn get_notetype_names(&self, _input: pb::Empty) -> Result<pb::NotetypeNames> {
fn get_notetype_names(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeNames> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_all_notetype_names()?
.into_iter()
.map(|(id, name)| pb::NotetypeNameId { id: id.0, name })
.map(|(id, name)| pb::notetypes::NotetypeNameId { id: id.0, name })
.collect();
Ok(pb::NotetypeNames { entries })
Ok(pb::notetypes::NotetypeNames { entries })
})
}
fn get_notetype_names_and_counts(&self, _input: pb::Empty) -> Result<pb::NotetypeUseCounts> {
fn get_notetype_names_and_counts(
&self,
_input: pb::generic::Empty,
) -> Result<pb::notetypes::NotetypeUseCounts> {
self.with_col(|col| {
let entries: Vec<_> = col
.storage
.get_notetype_use_counts()?
.into_iter()
.map(|(id, name, use_count)| pb::NotetypeNameIdUseCount {
id: id.0,
name,
use_count,
})
.map(
|(id, name, use_count)| pb::notetypes::NotetypeNameIdUseCount {
id: id.0,
name,
use_count,
},
)
.collect();
Ok(pb::NotetypeUseCounts { entries })
Ok(pb::notetypes::NotetypeUseCounts { entries })
})
}
fn get_notetype_id_by_name(&self, input: pb::String) -> Result<pb::NotetypeId> {
fn get_notetype_id_by_name(
&self,
input: pb::generic::String,
) -> Result<pb::notetypes::NotetypeId> {
self.with_col(|col| {
col.storage
.get_notetype_id(&input.val)
.and_then(|nt| nt.or_not_found(input.val))
.map(|ntid| pb::NotetypeId { ntid: ntid.0 })
.map(|ntid| pb::notetypes::NotetypeId { ntid: ntid.0 })
})
}
fn remove_notetype(&self, input: pb::NotetypeId) -> Result<pb::OpChanges> {
fn remove_notetype(
&self,
input: pb::notetypes::NotetypeId,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.remove_notetype(input.into()))
.map(Into::into)
}
fn get_aux_notetype_config_key(&self, input: pb::GetAuxConfigKeyRequest) -> Result<pb::String> {
fn get_aux_notetype_config_key(
&self,
input: pb::notetypes::GetAuxConfigKeyRequest,
) -> Result<pb::generic::String> {
Ok(get_aux_notetype_config_key(input.id.into(), &input.key).into())
}
fn get_aux_template_config_key(
&self,
input: pb::GetAuxTemplateConfigKeyRequest,
) -> Result<pb::String> {
input: pb::notetypes::GetAuxTemplateConfigKeyRequest,
) -> Result<pb::generic::String> {
self.with_col(|col| {
col.get_aux_template_config_key(
input.notetype_id.into(),
@ -160,26 +189,29 @@ impl NotetypesService for Backend {
fn get_change_notetype_info(
&self,
input: pb::GetChangeNotetypeInfoRequest,
) -> Result<pb::ChangeNotetypeInfo> {
input: pb::notetypes::GetChangeNotetypeInfoRequest,
) -> Result<pb::notetypes::ChangeNotetypeInfo> {
self.with_col(|col| {
col.notetype_change_info(input.old_notetype_id.into(), input.new_notetype_id.into())
.map(Into::into)
})
}
fn change_notetype(&self, input: pb::ChangeNotetypeRequest) -> Result<pb::OpChanges> {
fn change_notetype(
&self,
input: pb::notetypes::ChangeNotetypeRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.change_notetype_of_notes(input.into()).map(Into::into))
}
fn get_field_names(&self, input: pb::NotetypeId) -> Result<pb::StringList> {
fn get_field_names(&self, input: pb::notetypes::NotetypeId) -> Result<pb::generic::StringList> {
self.with_col(|col| col.storage.get_field_names(input.into()))
.map(Into::into)
}
}
impl From<pb::Notetype> for Notetype {
fn from(n: pb::Notetype) -> Self {
impl From<pb::notetypes::Notetype> for Notetype {
fn from(n: pb::notetypes::Notetype) -> Self {
Notetype {
id: n.id.into(),
name: n.name,
@ -192,9 +224,9 @@ impl From<pb::Notetype> for Notetype {
}
}
impl From<NotetypeChangeInfo> for pb::ChangeNotetypeInfo {
impl From<NotetypeChangeInfo> for pb::notetypes::ChangeNotetypeInfo {
fn from(i: NotetypeChangeInfo) -> Self {
pb::ChangeNotetypeInfo {
pb::notetypes::ChangeNotetypeInfo {
old_notetype_name: i.old_notetype_name,
old_field_names: i.old_field_names,
old_template_names: i.old_template_names,
@ -205,8 +237,8 @@ impl From<NotetypeChangeInfo> for pb::ChangeNotetypeInfo {
}
}
impl From<pb::ChangeNotetypeRequest> for ChangeNotetypeInput {
fn from(i: pb::ChangeNotetypeRequest) -> Self {
impl From<pb::notetypes::ChangeNotetypeRequest> for ChangeNotetypeInput {
fn from(i: pb::notetypes::ChangeNotetypeRequest) -> Self {
ChangeNotetypeInput {
current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_newtype(NoteId),
@ -234,9 +266,9 @@ impl From<pb::ChangeNotetypeRequest> for ChangeNotetypeInput {
}
}
impl From<ChangeNotetypeInput> for pb::ChangeNotetypeRequest {
impl From<ChangeNotetypeInput> for pb::notetypes::ChangeNotetypeRequest {
fn from(i: ChangeNotetypeInput) -> Self {
pb::ChangeNotetypeRequest {
pb::notetypes::ChangeNotetypeRequest {
current_schema: i.current_schema.into(),
note_ids: i.note_ids.into_iter().map(Into::into).collect(),
old_notetype_name: i.old_notetype_name,

View File

@ -8,9 +8,9 @@ use crate::{
undo::{UndoOutput, UndoStatus},
};
impl From<OpChanges> for pb::OpChanges {
impl From<OpChanges> for pb::collection::OpChanges {
fn from(c: OpChanges) -> Self {
pb::OpChanges {
pb::collection::OpChanges {
card: c.changes.card,
note: c.changes.note,
deck: c.changes.deck,
@ -28,8 +28,8 @@ impl From<OpChanges> for pb::OpChanges {
}
impl UndoStatus {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::UndoStatus {
pb::UndoStatus {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::UndoStatus {
pb::collection::UndoStatus {
undo: self.undo.map(|op| op.describe(tr)).unwrap_or_default(),
redo: self.redo.map(|op| op.describe(tr)).unwrap_or_default(),
last_step: self.last_step as u32,
@ -37,24 +37,24 @@ impl UndoStatus {
}
}
impl From<OpOutput<()>> for pb::OpChanges {
impl From<OpOutput<()>> for pb::collection::OpChanges {
fn from(o: OpOutput<()>) -> Self {
o.changes.into()
}
}
impl From<OpOutput<usize>> for pb::OpChangesWithCount {
impl From<OpOutput<usize>> for pb::collection::OpChangesWithCount {
fn from(out: OpOutput<usize>) -> Self {
pb::OpChangesWithCount {
pb::collection::OpChangesWithCount {
count: out.output as u32,
changes: Some(out.changes.into()),
}
}
}
impl From<OpOutput<i64>> for pb::OpChangesWithId {
impl From<OpOutput<i64>> for pb::collection::OpChangesWithId {
fn from(out: OpOutput<i64>) -> Self {
pb::OpChangesWithId {
pb::collection::OpChangesWithId {
id: out.output,
changes: Some(out.changes.into()),
}
@ -62,8 +62,8 @@ impl From<OpOutput<i64>> for pb::OpChangesWithId {
}
impl OpOutput<UndoOutput> {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::OpChangesAfterUndo {
pb::OpChangesAfterUndo {
pub(crate) fn into_protobuf(self, tr: &I18n) -> pb::collection::OpChangesAfterUndo {
pb::collection::OpChangesAfterUndo {
changes: Some(self.changes.into()),
operation: self.output.undone_op.describe(tr),
reverted_to_timestamp: self.output.reverted_to.0,

View File

@ -55,17 +55,21 @@ pub(super) enum Progress {
Export(ExportProgress),
}
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Progress {
pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::collection::Progress {
let progress = if let Some(progress) = progress {
match progress {
Progress::MediaSync(p) => pb::progress::Value::MediaSync(media_sync_progress(p, tr)),
Progress::MediaCheck(n) => {
pb::progress::Value::MediaCheck(tr.media_check_checked(n).into())
Progress::MediaSync(p) => {
pb::collection::progress::Value::MediaSync(media_sync_progress(p, tr))
}
Progress::MediaCheck(n) => {
pb::collection::progress::Value::MediaCheck(tr.media_check_checked(n).into())
}
Progress::FullSync(p) => {
pb::collection::progress::Value::FullSync(pb::collection::progress::FullSync {
transferred: p.transferred_bytes as u32,
total: p.total_bytes as u32,
})
}
Progress::FullSync(p) => pb::progress::Value::FullSync(pb::progress::FullSync {
transferred: p.transferred_bytes as u32,
total: p.total_bytes as u32,
}),
Progress::NormalSync(p) => {
let stage = match p.stage {
SyncStage::Connecting => tr.sync_syncing(),
@ -79,7 +83,7 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
let removed = tr
.sync_media_removed_count(p.local_remove, p.remote_remove)
.into();
pb::progress::Value::NormalSync(pb::progress::NormalSync {
pb::collection::progress::Value::NormalSync(pb::collection::progress::NormalSync {
stage,
added,
removed,
@ -100,13 +104,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
DatabaseCheckProgress::History => tr.database_check_checking_history(),
}
.to_string();
pb::progress::Value::DatabaseCheck(pb::progress::DatabaseCheck {
stage,
stage_total,
stage_current,
})
pb::collection::progress::Value::DatabaseCheck(
pb::collection::progress::DatabaseCheck {
stage,
stage_total,
stage_current,
},
)
}
Progress::Import(progress) => pb::progress::Value::Importing(
Progress::Import(progress) => pb::collection::progress::Value::Importing(
match progress {
ImportProgress::File => tr.importing_importing_file(),
ImportProgress::Media(n) => tr.importing_processed_media_file(n),
@ -117,7 +123,7 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
}
.into(),
),
Progress::Export(progress) => pb::progress::Value::Exporting(
Progress::Export(progress) => pb::collection::progress::Value::Exporting(
match progress {
ExportProgress::File => tr.exporting_exporting_file(),
ExportProgress::Media(n) => tr.exporting_processed_media_files(n),
@ -129,15 +135,15 @@ pub(super) fn progress_to_proto(progress: Option<Progress>, tr: &I18n) -> pb::Pr
),
}
} else {
pb::progress::Value::None(pb::Empty {})
pb::collection::progress::Value::None(pb::generic::Empty {})
};
pb::Progress {
pb::collection::Progress {
value: Some(progress),
}
}
fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::progress::MediaSync {
pb::progress::MediaSync {
fn media_sync_progress(p: MediaSyncProgress, tr: &I18n) -> pb::collection::progress::MediaSync {
pb::collection::progress::MediaSync {
checked: tr.sync_media_checked_count(p.checked).into(),
added: tr
.sync_media_added_count(p.uploaded_files, p.downloaded_files)

View File

@ -12,8 +12,8 @@ use crate::{
},
};
impl From<pb::CardAnswer> for CardAnswer {
fn from(mut answer: pb::CardAnswer) -> Self {
impl From<pb::scheduler::CardAnswer> for CardAnswer {
fn from(mut answer: pb::scheduler::CardAnswer) -> Self {
let mut new_state = mem::take(&mut answer.new_state).unwrap_or_default();
let custom_data = mem::take(&mut new_state.custom_data);
CardAnswer {
@ -28,34 +28,38 @@ impl From<pb::CardAnswer> for CardAnswer {
}
}
impl From<pb::card_answer::Rating> for Rating {
fn from(rating: pb::card_answer::Rating) -> Self {
impl From<pb::scheduler::card_answer::Rating> for Rating {
fn from(rating: pb::scheduler::card_answer::Rating) -> Self {
match rating {
pb::card_answer::Rating::Again => Rating::Again,
pb::card_answer::Rating::Hard => Rating::Hard,
pb::card_answer::Rating::Good => Rating::Good,
pb::card_answer::Rating::Easy => Rating::Easy,
pb::scheduler::card_answer::Rating::Again => Rating::Again,
pb::scheduler::card_answer::Rating::Hard => Rating::Hard,
pb::scheduler::card_answer::Rating::Good => Rating::Good,
pb::scheduler::card_answer::Rating::Easy => Rating::Easy,
}
}
}
impl From<QueuedCard> for pb::queued_cards::QueuedCard {
impl From<QueuedCard> for pb::scheduler::queued_cards::QueuedCard {
fn from(queued_card: QueuedCard) -> Self {
Self {
card: Some(queued_card.card.into()),
states: Some(queued_card.states.into()),
queue: match queued_card.kind {
crate::scheduler::queue::QueueEntryKind::New => pb::queued_cards::Queue::New,
crate::scheduler::queue::QueueEntryKind::Review => pb::queued_cards::Queue::Review,
crate::scheduler::queue::QueueEntryKind::New => {
pb::scheduler::queued_cards::Queue::New
}
crate::scheduler::queue::QueueEntryKind::Review => {
pb::scheduler::queued_cards::Queue::Review
}
crate::scheduler::queue::QueueEntryKind::Learning => {
pb::queued_cards::Queue::Learning
pb::scheduler::queued_cards::Queue::Learning
}
} as i32,
}
}
}
impl From<QueuedCards> for pb::QueuedCards {
impl From<QueuedCards> for pb::scheduler::QueuedCards {
fn from(queued_cards: QueuedCards) -> Self {
Self {
cards: queued_cards.cards.into_iter().map(Into::into).collect(),

View File

@ -5,7 +5,7 @@ mod answering;
mod states;
use super::Backend;
pub(super) use crate::pb::scheduler_service::Service as SchedulerService;
pub(super) use crate::pb::scheduler::scheduler_service::Service as SchedulerService;
use crate::{
pb,
prelude::*,
@ -19,7 +19,10 @@ use crate::{
impl SchedulerService for Backend {
/// This behaves like _updateCutoff() in older code - it also unburies at the start of
/// a new day.
fn sched_timing_today(&self, _input: pb::Empty) -> Result<pb::SchedTimingTodayResponse> {
fn sched_timing_today(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::SchedTimingTodayResponse> {
self.with_col(|col| {
let timing = col.timing_today()?;
col.unbury_if_day_rolled_over(timing)?;
@ -28,16 +31,19 @@ impl SchedulerService for Backend {
}
/// Fetch data from DB and return rendered string.
fn studied_today(&self, _input: pb::Empty) -> Result<pb::String> {
fn studied_today(&self, _input: pb::generic::Empty) -> Result<pb::generic::String> {
self.with_col(|col| col.studied_today().map(Into::into))
}
/// Message rendering only, for old graphs.
fn studied_today_message(&self, input: pb::StudiedTodayMessageRequest) -> Result<pb::String> {
fn studied_today_message(
&self,
input: pb::scheduler::StudiedTodayMessageRequest,
) -> Result<pb::generic::String> {
Ok(studied_today(input.cards, input.seconds as f32, &self.tr).into())
}
fn update_stats(&self, input: pb::UpdateStatsRequest) -> Result<pb::Empty> {
fn update_stats(&self, input: pb::scheduler::UpdateStatsRequest) -> Result<pb::generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
@ -47,7 +53,10 @@ impl SchedulerService for Backend {
})
}
fn extend_limits(&self, input: pb::ExtendLimitsRequest) -> Result<pb::Empty> {
fn extend_limits(
&self,
input: pb::scheduler::ExtendLimitsRequest,
) -> Result<pb::generic::Empty> {
self.with_col(|col| {
col.transact_no_undo(|col| {
let today = col.current_due_day(0)?;
@ -64,20 +73,32 @@ impl SchedulerService for Backend {
})
}
fn counts_for_deck_today(&self, input: pb::DeckId) -> Result<pb::CountsForDeckTodayResponse> {
fn counts_for_deck_today(
&self,
input: pb::decks::DeckId,
) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
self.with_col(|col| col.counts_for_deck_today(input.did.into()))
}
fn congrats_info(&self, _input: pb::Empty) -> Result<pb::CongratsInfoResponse> {
fn congrats_info(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::CongratsInfoResponse> {
self.with_col(|col| col.congrats_info())
}
fn restore_buried_and_suspended_cards(&self, input: pb::CardIds) -> Result<pb::OpChanges> {
fn restore_buried_and_suspended_cards(
&self,
input: pb::cards::CardIds,
) -> Result<pb::collection::OpChanges> {
let cids: Vec<_> = input.into();
self.with_col(|col| col.unbury_or_unsuspend_cards(&cids).map(Into::into))
}
fn unbury_deck(&self, input: pb::UnburyDeckRequest) -> Result<pb::OpChanges> {
fn unbury_deck(
&self,
input: pb::scheduler::UnburyDeckRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
col.unbury_deck(input.deck_id.into(), input.mode())
.map(Into::into)
@ -86,8 +107,8 @@ impl SchedulerService for Backend {
fn bury_or_suspend_cards(
&self,
input: pb::BuryOrSuspendCardsRequest,
) -> Result<pb::OpChangesWithCount> {
input: pb::scheduler::BuryOrSuspendCardsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
let mode = input.mode();
let cids = if input.card_ids.is_empty() {
@ -100,15 +121,21 @@ impl SchedulerService for Backend {
})
}
fn empty_filtered_deck(&self, input: pb::DeckId) -> Result<pb::OpChanges> {
fn empty_filtered_deck(&self, input: pb::decks::DeckId) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.empty_filtered_deck(input.did.into()).map(Into::into))
}
fn rebuild_filtered_deck(&self, input: pb::DeckId) -> Result<pb::OpChangesWithCount> {
fn rebuild_filtered_deck(
&self,
input: pb::decks::DeckId,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.rebuild_filtered_deck(input.did.into()).map(Into::into))
}
fn schedule_cards_as_new(&self, input: pb::ScheduleCardsAsNewRequest) -> Result<pb::OpChanges> {
fn schedule_cards_as_new(
&self,
input: pb::scheduler::ScheduleCardsAsNewRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
let cids = input.card_ids.into_newtype(CardId);
col.reschedule_cards_as_new(
@ -118,7 +145,7 @@ impl SchedulerService for Backend {
input.reset_counts,
input
.context
.and_then(pb::schedule_cards_as_new_request::Context::from_i32),
.and_then(pb::scheduler::schedule_cards_as_new_request::Context::from_i32),
)
.map(Into::into)
})
@ -126,19 +153,25 @@ impl SchedulerService for Backend {
fn schedule_cards_as_new_defaults(
&self,
input: pb::ScheduleCardsAsNewDefaultsRequest,
) -> Result<pb::ScheduleCardsAsNewDefaultsResponse> {
input: pb::scheduler::ScheduleCardsAsNewDefaultsRequest,
) -> Result<pb::scheduler::ScheduleCardsAsNewDefaultsResponse> {
self.with_col(|col| Ok(col.reschedule_cards_as_new_defaults(input.context())))
}
fn set_due_date(&self, input: pb::SetDueDateRequest) -> Result<pb::OpChanges> {
fn set_due_date(
&self,
input: pb::scheduler::SetDueDateRequest,
) -> Result<pb::collection::OpChanges> {
let config = input.config_key.map(|v| v.key().into());
let days = input.days;
let cids = input.card_ids.into_newtype(CardId);
self.with_col(|col| col.set_due_date(&cids, &days, config).map(Into::into))
}
fn sort_cards(&self, input: pb::SortCardsRequest) -> Result<pb::OpChangesWithCount> {
fn sort_cards(
&self,
input: pb::scheduler::SortCardsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let cids = input.card_ids.into_newtype(CardId);
let (start, step, random, shift) = (
input.starting_from,
@ -157,66 +190,86 @@ impl SchedulerService for Backend {
})
}
fn reposition_defaults(&self, _input: pb::Empty) -> Result<pb::RepositionDefaultsResponse> {
fn reposition_defaults(
&self,
_input: pb::generic::Empty,
) -> Result<pb::scheduler::RepositionDefaultsResponse> {
self.with_col(|col| Ok(col.reposition_defaults()))
}
fn sort_deck(&self, input: pb::SortDeckRequest) -> Result<pb::OpChangesWithCount> {
fn sort_deck(
&self,
input: pb::scheduler::SortDeckRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
col.sort_deck_legacy(input.deck_id.into(), input.randomize)
.map(Into::into)
})
}
fn get_scheduling_states(&self, input: pb::CardId) -> Result<pb::SchedulingStates> {
fn get_scheduling_states(
&self,
input: pb::cards::CardId,
) -> Result<pb::scheduler::SchedulingStates> {
let cid: CardId = input.into();
self.with_col(|col| col.get_scheduling_states(cid))
.map(Into::into)
}
fn describe_next_states(&self, input: pb::SchedulingStates) -> Result<pb::StringList> {
fn describe_next_states(
&self,
input: pb::scheduler::SchedulingStates,
) -> Result<pb::generic::StringList> {
let states: SchedulingStates = input.into();
self.with_col(|col| col.describe_next_states(states))
.map(Into::into)
}
fn state_is_leech(&self, input: pb::SchedulingState) -> Result<pb::Bool> {
fn state_is_leech(&self, input: pb::scheduler::SchedulingState) -> Result<pb::generic::Bool> {
let state: CardState = input.into();
Ok(state.leeched().into())
}
fn answer_card(&self, input: pb::CardAnswer) -> Result<pb::OpChanges> {
fn answer_card(&self, input: pb::scheduler::CardAnswer) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.answer_card(&mut input.into()))
.map(Into::into)
}
fn upgrade_scheduler(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn upgrade_scheduler(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.with_col(|col| col.transact_no_undo(|col| col.upgrade_to_v2_scheduler()))
.map(Into::into)
}
fn get_queued_cards(&self, input: pb::GetQueuedCardsRequest) -> Result<pb::QueuedCards> {
fn get_queued_cards(
&self,
input: pb::scheduler::GetQueuedCardsRequest,
) -> Result<pb::scheduler::QueuedCards> {
self.with_col(|col| {
col.get_queued_cards(input.fetch_limit as usize, input.intraday_learning_only)
.map(Into::into)
})
}
fn custom_study(&self, input: pb::CustomStudyRequest) -> Result<pb::OpChanges> {
fn custom_study(
&self,
input: pb::scheduler::CustomStudyRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| col.custom_study(input)).map(Into::into)
}
fn custom_study_defaults(
&self,
input: pb::CustomStudyDefaultsRequest,
) -> Result<pb::CustomStudyDefaultsResponse> {
input: pb::scheduler::CustomStudyDefaultsRequest,
) -> Result<pb::scheduler::CustomStudyDefaultsResponse> {
self.with_col(|col| col.custom_study_defaults(input.deck_id.into()))
}
}
impl From<crate::scheduler::timing::SchedTimingToday> for pb::SchedTimingTodayResponse {
fn from(t: crate::scheduler::timing::SchedTimingToday) -> pb::SchedTimingTodayResponse {
pb::SchedTimingTodayResponse {
impl From<crate::scheduler::timing::SchedTimingToday> for pb::scheduler::SchedTimingTodayResponse {
fn from(
t: crate::scheduler::timing::SchedTimingToday,
) -> pb::scheduler::SchedTimingTodayResponse {
pb::scheduler::SchedTimingTodayResponse {
days_elapsed: t.days_elapsed,
next_day_at: t.next_day_at.0,
}

View File

@ -3,31 +3,30 @@
use crate::{pb, scheduler::states::FilteredState};
impl From<FilteredState> for pb::scheduling_state::Filtered {
impl From<FilteredState> for pb::scheduler::scheduling_state::Filtered {
fn from(state: FilteredState) -> Self {
pb::scheduling_state::Filtered {
pb::scheduler::scheduling_state::Filtered {
value: Some(match state {
FilteredState::Preview(state) => {
pb::scheduling_state::filtered::Value::Preview(state.into())
pb::scheduler::scheduling_state::filtered::Value::Preview(state.into())
}
FilteredState::Rescheduling(state) => {
pb::scheduling_state::filtered::Value::Rescheduling(state.into())
pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state.into())
}
}),
}
}
}
impl From<pb::scheduling_state::Filtered> for FilteredState {
fn from(state: pb::scheduling_state::Filtered) -> Self {
match state
.value
.unwrap_or_else(|| pb::scheduling_state::filtered::Value::Preview(Default::default()))
{
pb::scheduling_state::filtered::Value::Preview(state) => {
impl From<pb::scheduler::scheduling_state::Filtered> for FilteredState {
fn from(state: pb::scheduler::scheduling_state::Filtered) -> Self {
match state.value.unwrap_or_else(|| {
pb::scheduler::scheduling_state::filtered::Value::Preview(Default::default())
}) {
pb::scheduler::scheduling_state::filtered::Value::Preview(state) => {
FilteredState::Preview(state.into())
}
pb::scheduling_state::filtered::Value::Rescheduling(state) => {
pb::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => {
FilteredState::Rescheduling(state.into())
}
}

View File

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::LearnState};
impl From<pb::scheduling_state::Learning> for LearnState {
fn from(state: pb::scheduling_state::Learning) -> Self {
impl From<pb::scheduler::scheduling_state::Learning> for LearnState {
fn from(state: pb::scheduler::scheduling_state::Learning) -> Self {
LearnState {
remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs,
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Learning> for LearnState {
}
}
impl From<LearnState> for pb::scheduling_state::Learning {
impl From<LearnState> for pb::scheduler::scheduling_state::Learning {
fn from(state: LearnState) -> Self {
pb::scheduling_state::Learning {
pb::scheduler::scheduling_state::Learning {
remaining_steps: state.remaining_steps,
scheduled_secs: state.scheduled_secs,
}

View File

@ -15,9 +15,9 @@ use crate::{
scheduler::states::{CardState, NewState, NormalState, SchedulingStates},
};
impl From<SchedulingStates> for pb::SchedulingStates {
impl From<SchedulingStates> for pb::scheduler::SchedulingStates {
fn from(choices: SchedulingStates) -> Self {
pb::SchedulingStates {
pb::scheduler::SchedulingStates {
current: Some(choices.current.into()),
again: Some(choices.again.into()),
hard: Some(choices.hard.into()),
@ -27,8 +27,8 @@ impl From<SchedulingStates> for pb::SchedulingStates {
}
}
impl From<pb::SchedulingStates> for SchedulingStates {
fn from(choices: pb::SchedulingStates) -> Self {
impl From<pb::scheduler::SchedulingStates> for SchedulingStates {
fn from(choices: pb::scheduler::SchedulingStates) -> Self {
SchedulingStates {
current: choices.current.unwrap_or_default().into(),
again: choices.again.unwrap_or_default().into(),
@ -39,24 +39,30 @@ impl From<pb::SchedulingStates> for SchedulingStates {
}
}
impl From<CardState> for pb::SchedulingState {
impl From<CardState> for pb::scheduler::SchedulingState {
fn from(state: CardState) -> Self {
pb::SchedulingState {
pb::scheduler::SchedulingState {
value: Some(match state {
CardState::Normal(state) => pb::scheduling_state::Value::Normal(state.into()),
CardState::Filtered(state) => pb::scheduling_state::Value::Filtered(state.into()),
CardState::Normal(state) => {
pb::scheduler::scheduling_state::Value::Normal(state.into())
}
CardState::Filtered(state) => {
pb::scheduler::scheduling_state::Value::Filtered(state.into())
}
}),
custom_data: None,
}
}
}
impl From<pb::SchedulingState> for CardState {
fn from(state: pb::SchedulingState) -> Self {
impl From<pb::scheduler::SchedulingState> for CardState {
fn from(state: pb::scheduler::SchedulingState) -> Self {
if let Some(value) = state.value {
match value {
pb::scheduling_state::Value::Normal(normal) => CardState::Normal(normal.into()),
pb::scheduling_state::Value::Filtered(filtered) => {
pb::scheduler::scheduling_state::Value::Normal(normal) => {
CardState::Normal(normal.into())
}
pb::scheduler::scheduling_state::Value::Filtered(filtered) => {
CardState::Filtered(filtered.into())
}
}

View File

@ -3,17 +3,17 @@
use crate::{pb, scheduler::states::NewState};
impl From<pb::scheduling_state::New> for NewState {
fn from(state: pb::scheduling_state::New) -> Self {
impl From<pb::scheduler::scheduling_state::New> for NewState {
fn from(state: pb::scheduler::scheduling_state::New) -> Self {
NewState {
position: state.position,
}
}
}
impl From<NewState> for pb::scheduling_state::New {
impl From<NewState> for pb::scheduler::scheduling_state::New {
fn from(state: NewState) -> Self {
pb::scheduling_state::New {
pb::scheduler::scheduling_state::New {
position: state.position,
}
}

View File

@ -3,37 +3,42 @@
use crate::{pb, scheduler::states::NormalState};
impl From<NormalState> for pb::scheduling_state::Normal {
impl From<NormalState> for pb::scheduler::scheduling_state::Normal {
fn from(state: NormalState) -> Self {
pb::scheduling_state::Normal {
pb::scheduler::scheduling_state::Normal {
value: Some(match state {
NormalState::New(state) => pb::scheduling_state::normal::Value::New(state.into()),
NormalState::New(state) => {
pb::scheduler::scheduling_state::normal::Value::New(state.into())
}
NormalState::Learning(state) => {
pb::scheduling_state::normal::Value::Learning(state.into())
pb::scheduler::scheduling_state::normal::Value::Learning(state.into())
}
NormalState::Review(state) => {
pb::scheduling_state::normal::Value::Review(state.into())
pb::scheduler::scheduling_state::normal::Value::Review(state.into())
}
NormalState::Relearning(state) => {
pb::scheduling_state::normal::Value::Relearning(state.into())
pb::scheduler::scheduling_state::normal::Value::Relearning(state.into())
}
}),
}
}
}
impl From<pb::scheduling_state::Normal> for NormalState {
fn from(state: pb::scheduling_state::Normal) -> Self {
match state
.value
.unwrap_or_else(|| pb::scheduling_state::normal::Value::New(Default::default()))
{
pb::scheduling_state::normal::Value::New(state) => NormalState::New(state.into()),
pb::scheduling_state::normal::Value::Learning(state) => {
impl From<pb::scheduler::scheduling_state::Normal> for NormalState {
fn from(state: pb::scheduler::scheduling_state::Normal) -> Self {
match state.value.unwrap_or_else(|| {
pb::scheduler::scheduling_state::normal::Value::New(Default::default())
}) {
pb::scheduler::scheduling_state::normal::Value::New(state) => {
NormalState::New(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Learning(state) => {
NormalState::Learning(state.into())
}
pb::scheduling_state::normal::Value::Review(state) => NormalState::Review(state.into()),
pb::scheduling_state::normal::Value::Relearning(state) => {
pb::scheduler::scheduling_state::normal::Value::Review(state) => {
NormalState::Review(state.into())
}
pb::scheduler::scheduling_state::normal::Value::Relearning(state) => {
NormalState::Relearning(state.into())
}
}

View File

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::PreviewState};
impl From<pb::scheduling_state::Preview> for PreviewState {
fn from(state: pb::scheduling_state::Preview) -> Self {
impl From<pb::scheduler::scheduling_state::Preview> for PreviewState {
fn from(state: pb::scheduler::scheduling_state::Preview) -> Self {
PreviewState {
scheduled_secs: state.scheduled_secs,
finished: state.finished,
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Preview> for PreviewState {
}
}
impl From<PreviewState> for pb::scheduling_state::Preview {
impl From<PreviewState> for pb::scheduler::scheduling_state::Preview {
fn from(state: PreviewState) -> Self {
pb::scheduling_state::Preview {
pb::scheduler::scheduling_state::Preview {
scheduled_secs: state.scheduled_secs,
finished: state.finished,
}

View File

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::RelearnState};
impl From<pb::scheduling_state::Relearning> for RelearnState {
fn from(state: pb::scheduling_state::Relearning) -> Self {
impl From<pb::scheduler::scheduling_state::Relearning> for RelearnState {
fn from(state: pb::scheduler::scheduling_state::Relearning) -> Self {
RelearnState {
review: state.review.unwrap_or_default().into(),
learning: state.learning.unwrap_or_default().into(),
@ -12,9 +12,9 @@ impl From<pb::scheduling_state::Relearning> for RelearnState {
}
}
impl From<RelearnState> for pb::scheduling_state::Relearning {
impl From<RelearnState> for pb::scheduler::scheduling_state::Relearning {
fn from(state: RelearnState) -> Self {
pb::scheduling_state::Relearning {
pb::scheduler::scheduling_state::Relearning {
review: Some(state.review.into()),
learning: Some(state.learning.into()),
}

View File

@ -3,17 +3,17 @@
use crate::{pb, scheduler::states::ReschedulingFilterState};
impl From<pb::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: pb::scheduling_state::ReschedulingFilter) -> Self {
impl From<pb::scheduler::scheduling_state::ReschedulingFilter> for ReschedulingFilterState {
fn from(state: pb::scheduler::scheduling_state::ReschedulingFilter) -> Self {
ReschedulingFilterState {
original_state: state.original_state.unwrap_or_default().into(),
}
}
}
impl From<ReschedulingFilterState> for pb::scheduling_state::ReschedulingFilter {
impl From<ReschedulingFilterState> for pb::scheduler::scheduling_state::ReschedulingFilter {
fn from(state: ReschedulingFilterState) -> Self {
pb::scheduling_state::ReschedulingFilter {
pb::scheduler::scheduling_state::ReschedulingFilter {
original_state: Some(state.original_state.into()),
}
}

View File

@ -3,8 +3,8 @@
use crate::{pb, scheduler::states::ReviewState};
impl From<pb::scheduling_state::Review> for ReviewState {
fn from(state: pb::scheduling_state::Review) -> Self {
impl From<pb::scheduler::scheduling_state::Review> for ReviewState {
fn from(state: pb::scheduler::scheduling_state::Review) -> Self {
ReviewState {
scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days,
@ -15,9 +15,9 @@ impl From<pb::scheduling_state::Review> for ReviewState {
}
}
impl From<ReviewState> for pb::scheduling_state::Review {
impl From<ReviewState> for pb::scheduler::scheduling_state::Review {
fn from(state: ReviewState) -> Self {
pb::scheduling_state::Review {
pb::scheduler::scheduling_state::Review {
scheduled_days: state.scheduled_days,
elapsed_days: state.elapsed_days,
ease_factor: state.ease_factor,

View File

@ -6,8 +6,8 @@ use std::str::FromStr;
use crate::{browser_table, i18n::I18n, pb};
impl browser_table::Column {
pub fn to_pb_column(self, i18n: &I18n) -> pb::browser_columns::Column {
pb::browser_columns::Column {
pub fn to_pb_column(self, i18n: &I18n) -> pb::search::browser_columns::Column {
pb::search::browser_columns::Column {
key: self.to_string(),
cards_mode_label: self.cards_mode_label(i18n),
notes_mode_label: self.notes_mode_label(i18n),
@ -20,8 +20,8 @@ impl browser_table::Column {
}
}
impl From<pb::StringList> for Vec<browser_table::Column> {
fn from(input: pb::StringList) -> Self {
impl From<pb::generic::StringList> for Vec<browser_table::Column> {
fn from(input: pb::generic::StringList) -> Self {
input
.vals
.iter()

View File

@ -7,56 +7,62 @@ mod search_node;
use std::{str::FromStr, sync::Arc};
use super::{notes::to_note_ids, Backend};
pub(super) use crate::pb::search_service::Service as SearchService;
pub(super) use crate::pb::search::search_service::Service as SearchService;
use crate::{
browser_table::Column,
pb,
pb::sort_order::Value as SortOrderProto,
pb::search::sort_order::Value as SortOrderProto,
prelude::*,
search::{replace_search_node, JoinSearches, Node, SortMode},
};
impl SearchService for Backend {
fn build_search_string(&self, input: pb::SearchNode) -> Result<pb::String> {
fn build_search_string(&self, input: pb::search::SearchNode) -> Result<pb::generic::String> {
let node: Node = input.try_into()?;
Ok(SearchBuilder::from_root(node).write().into())
}
fn search_cards(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> {
fn search_cards(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let cids = col.search_cards(&input.search, order)?;
Ok(pb::SearchResponse {
Ok(pb::search::SearchResponse {
ids: cids.into_iter().map(|v| v.0).collect(),
})
})
}
fn search_notes(&self, input: pb::SearchRequest) -> Result<pb::SearchResponse> {
fn search_notes(&self, input: pb::search::SearchRequest) -> Result<pb::search::SearchResponse> {
self.with_col(|col| {
let order = input.order.unwrap_or_default().value.into();
let nids = col.search_notes(&input.search, order)?;
Ok(pb::SearchResponse {
Ok(pb::search::SearchResponse {
ids: nids.into_iter().map(|v| v.0).collect(),
})
})
}
fn join_search_nodes(&self, input: pb::JoinSearchNodesRequest) -> Result<pb::String> {
fn join_search_nodes(
&self,
input: pb::search::JoinSearchNodesRequest,
) -> Result<pb::generic::String> {
let existing_node: Node = input.existing_node.unwrap_or_default().try_into()?;
let additional_node: Node = input.additional_node.unwrap_or_default().try_into()?;
Ok(
match pb::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() {
pb::search_node::group::Joiner::And => existing_node.and_flat(additional_node),
pb::search_node::group::Joiner::Or => existing_node.or_flat(additional_node),
match pb::search::search_node::group::Joiner::from_i32(input.joiner).unwrap_or_default() {
pb::search::search_node::group::Joiner::And => existing_node.and_flat(additional_node),
pb::search::search_node::group::Joiner::Or => existing_node.or_flat(additional_node),
}
.write()
.into(),
)
}
fn replace_search_node(&self, input: pb::ReplaceSearchNodeRequest) -> Result<pb::String> {
fn replace_search_node(
&self,
input: pb::search::ReplaceSearchNodeRequest,
) -> Result<pb::generic::String> {
let existing = {
let node = input.existing_node.unwrap_or_default().try_into()?;
if let Node::Group(nodes) = node {
@ -69,7 +75,10 @@ impl SearchService for Backend {
Ok(replace_search_node(existing, replacement).into())
}
fn find_and_replace(&self, input: pb::FindAndReplaceRequest) -> Result<pb::OpChangesWithCount> {
fn find_and_replace(
&self,
input: pb::search::FindAndReplaceRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let mut search = if input.regex {
input.search
} else {
@ -94,11 +103,17 @@ impl SearchService for Backend {
})
}
fn all_browser_columns(&self, _input: pb::Empty) -> Result<pb::BrowserColumns> {
fn all_browser_columns(
&self,
_input: pb::generic::Empty,
) -> Result<pb::search::BrowserColumns> {
self.with_col(|col| Ok(col.all_browser_columns()))
}
fn set_active_browser_columns(&self, input: pb::StringList) -> Result<pb::Empty> {
fn set_active_browser_columns(
&self,
input: pb::generic::StringList,
) -> Result<pb::generic::Empty> {
self.with_col(|col| {
col.state.active_browser_columns = Some(Arc::new(input.into()));
Ok(())
@ -106,15 +121,15 @@ impl SearchService for Backend {
.map(Into::into)
}
fn browser_row_for_id(&self, input: pb::Int64) -> Result<pb::BrowserRow> {
fn browser_row_for_id(&self, input: pb::generic::Int64) -> Result<pb::search::BrowserRow> {
self.with_col(|col| col.browser_row_for_id(input.val).map(Into::into))
}
}
impl From<Option<SortOrderProto>> for SortMode {
fn from(order: Option<SortOrderProto>) -> Self {
use pb::sort_order::Value as V;
match order.unwrap_or(V::None(pb::Empty {})) {
use pb::search::sort_order::Value as V;
match order.unwrap_or(V::None(pb::generic::Empty {})) {
V::None(_) => SortMode::NoOrder,
V::Custom(s) => SortMode::Custom(s),
V::Builtin(b) => SortMode::Builtin {

View File

@ -12,11 +12,11 @@ use crate::{
text::{escape_anki_wildcards, escape_anki_wildcards_for_search_node},
};
impl TryFrom<pb::SearchNode> for Node {
impl TryFrom<pb::search::SearchNode> for Node {
type Error = AnkiError;
fn try_from(msg: pb::SearchNode) -> std::result::Result<Self, Self::Error> {
use pb::search_node::{group::Joiner, Filter, Flag};
fn try_from(msg: pb::search::SearchNode) -> std::result::Result<Self, Self::Error> {
use pb::search::search_node::{group::Joiner, Filter, Flag};
Ok(if let Some(filter) = msg.filter {
match filter {
Filter::Tag(s) => SearchNode::from_tag_name(&s).into(),
@ -52,7 +52,7 @@ impl TryFrom<pb::SearchNode> for Node {
}),
Filter::EditedInDays(u) => Node::Search(SearchNode::EditedInDays(u)),
Filter::CardState(state) => Node::Search(SearchNode::State(
pb::search_node::CardState::from_i32(state)
pb::search::search_node::CardState::from_i32(state)
.unwrap_or_default()
.into(),
)),
@ -114,33 +114,33 @@ impl TryFrom<pb::SearchNode> for Node {
}
}
impl From<pb::search_node::Rating> for RatingKind {
fn from(r: pb::search_node::Rating) -> Self {
impl From<pb::search::search_node::Rating> for RatingKind {
fn from(r: pb::search::search_node::Rating) -> Self {
match r {
pb::search_node::Rating::Again => RatingKind::AnswerButton(1),
pb::search_node::Rating::Hard => RatingKind::AnswerButton(2),
pb::search_node::Rating::Good => RatingKind::AnswerButton(3),
pb::search_node::Rating::Easy => RatingKind::AnswerButton(4),
pb::search_node::Rating::Any => RatingKind::AnyAnswerButton,
pb::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
pb::search::search_node::Rating::Again => RatingKind::AnswerButton(1),
pb::search::search_node::Rating::Hard => RatingKind::AnswerButton(2),
pb::search::search_node::Rating::Good => RatingKind::AnswerButton(3),
pb::search::search_node::Rating::Easy => RatingKind::AnswerButton(4),
pb::search::search_node::Rating::Any => RatingKind::AnyAnswerButton,
pb::search::search_node::Rating::ByReschedule => RatingKind::ManualReschedule,
}
}
}
impl From<pb::search_node::CardState> for StateKind {
fn from(k: pb::search_node::CardState) -> Self {
impl From<pb::search::search_node::CardState> for StateKind {
fn from(k: pb::search::search_node::CardState) -> Self {
match k {
pb::search_node::CardState::New => StateKind::New,
pb::search_node::CardState::Learn => StateKind::Learning,
pb::search_node::CardState::Review => StateKind::Review,
pb::search_node::CardState::Due => StateKind::Due,
pb::search_node::CardState::Suspended => StateKind::Suspended,
pb::search_node::CardState::Buried => StateKind::Buried,
pb::search::search_node::CardState::New => StateKind::New,
pb::search::search_node::CardState::Learn => StateKind::Learning,
pb::search::search_node::CardState::Review => StateKind::Review,
pb::search::search_node::CardState::Due => StateKind::Due,
pb::search::search_node::CardState::Suspended => StateKind::Suspended,
pb::search::search_node::CardState::Buried => StateKind::Buried,
}
}
}
impl pb::search_node::IdList {
impl pb::search::search_node::IdList {
fn into_id_string(self) -> String {
self.ids
.iter()

View File

@ -2,23 +2,29 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::Backend;
pub(super) use crate::pb::stats_service::Service as StatsService;
pub(super) use crate::pb::stats::stats_service::Service as StatsService;
use crate::{pb, prelude::*, revlog::RevlogReviewKind};
impl StatsService for Backend {
fn card_stats(&self, input: pb::CardId) -> Result<pb::CardStatsResponse> {
fn card_stats(&self, input: pb::cards::CardId) -> Result<pb::stats::CardStatsResponse> {
self.with_col(|col| col.card_stats(input.into()))
}
fn graphs(&self, input: pb::GraphsRequest) -> Result<pb::GraphsResponse> {
fn graphs(&self, input: pb::stats::GraphsRequest) -> Result<pb::stats::GraphsResponse> {
self.with_col(|col| col.graph_data_for_search(&input.search, input.days))
}
fn get_graph_preferences(&self, _input: pb::Empty) -> Result<pb::GraphPreferences> {
fn get_graph_preferences(
&self,
_input: pb::generic::Empty,
) -> Result<pb::stats::GraphPreferences> {
self.with_col(|col| Ok(col.get_graph_preferences()))
}
fn set_graph_preferences(&self, input: pb::GraphPreferences) -> Result<pb::Empty> {
fn set_graph_preferences(
&self,
input: pb::stats::GraphPreferences,
) -> Result<pb::generic::Empty> {
self.with_col(|col| col.set_graph_preferences(input))
.map(Into::into)
}
@ -27,11 +33,11 @@ impl StatsService for Backend {
impl From<RevlogReviewKind> for i32 {
fn from(kind: RevlogReviewKind) -> Self {
(match kind {
RevlogReviewKind::Learning => pb::revlog_entry::ReviewKind::Learning,
RevlogReviewKind::Review => pb::revlog_entry::ReviewKind::Review,
RevlogReviewKind::Relearning => pb::revlog_entry::ReviewKind::Relearning,
RevlogReviewKind::Filtered => pb::revlog_entry::ReviewKind::Filtered,
RevlogReviewKind::Manual => pb::revlog_entry::ReviewKind::Manual,
RevlogReviewKind::Learning => pb::stats::revlog_entry::ReviewKind::Learning,
RevlogReviewKind::Review => pb::stats::revlog_entry::ReviewKind::Review,
RevlogReviewKind::Relearning => pb::stats::revlog_entry::ReviewKind::Relearning,
RevlogReviewKind::Filtered => pb::stats::revlog_entry::ReviewKind::Filtered,
RevlogReviewKind::Manual => pb::stats::revlog_entry::ReviewKind::Manual,
}) as i32
}
}

View File

@ -9,7 +9,7 @@ use futures::future::{AbortHandle, AbortRegistration, Abortable};
use slog::warn;
use super::{progress::AbortHandleSlot, Backend};
pub(super) use crate::pb::sync_service::Service as SyncService;
pub(super) use crate::pb::sync::sync_service::Service as SyncService;
use crate::{
media::MediaManager,
pb,
@ -30,47 +30,47 @@ pub(super) struct SyncState {
#[derive(Default, Debug)]
pub(super) struct RemoteSyncStatus {
pub last_check: TimestampSecs,
pub last_response: pb::sync_status_response::Required,
pub last_response: pb::sync::sync_status_response::Required,
}
impl RemoteSyncStatus {
pub(super) fn update(&mut self, required: pb::sync_status_response::Required) {
pub(super) fn update(&mut self, required: pb::sync::sync_status_response::Required) {
self.last_check = TimestampSecs::now();
self.last_response = required
}
}
impl From<SyncOutput> for pb::SyncCollectionResponse {
impl From<SyncOutput> for pb::sync::SyncCollectionResponse {
fn from(o: SyncOutput) -> Self {
pb::SyncCollectionResponse {
pb::sync::SyncCollectionResponse {
host_number: o.host_number,
server_message: o.server_message,
required: match o.required {
SyncActionRequired::NoChanges => {
pb::sync_collection_response::ChangesRequired::NoChanges as i32
pb::sync::sync_collection_response::ChangesRequired::NoChanges as i32
}
SyncActionRequired::FullSyncRequired {
upload_ok,
download_ok,
} => {
if !upload_ok {
pb::sync_collection_response::ChangesRequired::FullDownload as i32
pb::sync::sync_collection_response::ChangesRequired::FullDownload as i32
} else if !download_ok {
pb::sync_collection_response::ChangesRequired::FullUpload as i32
pb::sync::sync_collection_response::ChangesRequired::FullUpload as i32
} else {
pb::sync_collection_response::ChangesRequired::FullSync as i32
pb::sync::sync_collection_response::ChangesRequired::FullSync as i32
}
}
SyncActionRequired::NormalSyncRequired => {
pb::sync_collection_response::ChangesRequired::NormalSync as i32
pb::sync::sync_collection_response::ChangesRequired::NormalSync as i32
}
},
}
}
}
impl From<pb::SyncAuth> for SyncAuth {
fn from(a: pb::SyncAuth) -> Self {
impl From<pb::sync::SyncAuth> for SyncAuth {
fn from(a: pb::sync::SyncAuth) -> Self {
SyncAuth {
hkey: a.hkey,
host_number: a.host_number,
@ -79,11 +79,11 @@ impl From<pb::SyncAuth> for SyncAuth {
}
impl SyncService for Backend {
fn sync_media(&self, input: pb::SyncAuth) -> Result<pb::Empty> {
fn sync_media(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.sync_media_inner(input).map(Into::into)
}
fn abort_sync(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn abort_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
if let Some(handle) = self.sync_abort.lock().unwrap().take() {
handle.abort();
}
@ -91,7 +91,7 @@ impl SyncService for Backend {
}
/// Abort the media sync. Does not wait for completion.
fn abort_media_sync(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn abort_media_sync(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
let guard = self.state.lock().unwrap();
if let Some(handle) = &guard.sync.media_sync_abort {
handle.abort();
@ -99,33 +99,39 @@ impl SyncService for Backend {
Ok(().into())
}
fn before_upload(&self, _input: pb::Empty) -> Result<pb::Empty> {
fn before_upload(&self, _input: pb::generic::Empty) -> Result<pb::generic::Empty> {
self.with_col(|col| col.before_upload().map(Into::into))
}
fn sync_login(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> {
fn sync_login(&self, input: pb::sync::SyncLoginRequest) -> Result<pb::sync::SyncAuth> {
self.sync_login_inner(input)
}
fn sync_status(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> {
fn sync_status(&self, input: pb::sync::SyncAuth) -> Result<pb::sync::SyncStatusResponse> {
self.sync_status_inner(input)
}
fn sync_collection(&self, input: pb::SyncAuth) -> Result<pb::SyncCollectionResponse> {
fn sync_collection(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncCollectionResponse> {
self.sync_collection_inner(input)
}
fn full_upload(&self, input: pb::SyncAuth) -> Result<pb::Empty> {
fn full_upload(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.full_sync_inner(input, true)?;
Ok(().into())
}
fn full_download(&self, input: pb::SyncAuth) -> Result<pb::Empty> {
fn full_download(&self, input: pb::sync::SyncAuth) -> Result<pb::generic::Empty> {
self.full_sync_inner(input, false)?;
Ok(().into())
}
fn sync_server_method(&self, input: pb::SyncServerMethodRequest) -> Result<pb::Json> {
fn sync_server_method(
&self,
input: pb::sync::SyncServerMethodRequest,
) -> Result<pb::generic::Json> {
let req = SyncRequest::from_method_and_data(input.method(), input.data)?;
self.sync_server_method_inner(req).map(Into::into)
}
@ -160,7 +166,7 @@ impl Backend {
Ok((guard, abort_reg))
}
pub(super) fn sync_media_inner(&self, input: pb::SyncAuth) -> Result<()> {
pub(super) fn sync_media_inner(&self, input: pb::sync::SyncAuth) -> Result<()> {
// mark media sync as active
let (abort_handle, abort_reg) = AbortHandle::new_pair();
{
@ -220,7 +226,10 @@ impl Backend {
}
}
pub(super) fn sync_login_inner(&self, input: pb::SyncLoginRequest) -> Result<pb::SyncAuth> {
pub(super) fn sync_login_inner(
&self,
input: pb::sync::SyncLoginRequest,
) -> Result<pb::sync::SyncAuth> {
let (_guard, abort_reg) = self.sync_abort_handle()?;
let rt = self.runtime_handle();
@ -230,16 +239,19 @@ impl Backend {
Ok(sync_result) => sync_result,
Err(_) => Err(AnkiError::Interrupted),
};
ret.map(|a| pb::SyncAuth {
ret.map(|a| pb::sync::SyncAuth {
hkey: a.hkey,
host_number: a.host_number,
})
}
pub(super) fn sync_status_inner(&self, input: pb::SyncAuth) -> Result<pb::SyncStatusResponse> {
pub(super) fn sync_status_inner(
&self,
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncStatusResponse> {
// any local changes mean we can skip the network round-trip
let req = self.with_col(|col| col.get_local_sync_status())?;
if req != pb::sync_status_response::Required::NoChanges {
if req != pb::sync::sync_status_response::Required::NoChanges {
return Ok(req.into());
}
@ -273,8 +285,8 @@ impl Backend {
pub(super) fn sync_collection_inner(
&self,
input: pb::SyncAuth,
) -> Result<pb::SyncCollectionResponse> {
input: pb::sync::SyncAuth,
) -> Result<pb::sync::SyncCollectionResponse> {
let (_guard, abort_reg) = self.sync_abort_handle()?;
let rt = self.runtime_handle();
@ -314,7 +326,7 @@ impl Backend {
Ok(output.into())
}
pub(super) fn full_sync_inner(&self, input: pb::SyncAuth, upload: bool) -> Result<()> {
pub(super) fn full_sync_inner(&self, input: pb::sync::SyncAuth, upload: bool) -> Result<()> {
self.abort_media_sync_and_wait();
let rt = self.runtime_handle();
@ -356,7 +368,7 @@ impl Backend {
.unwrap()
.sync
.remote_sync_status
.update(pb::sync_status_response::Required::NoChanges);
.update(pb::sync::sync_status_response::Required::NoChanges);
}
sync_result
}

View File

@ -2,16 +2,19 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{notes::to_note_ids, Backend};
pub(super) use crate::pb::tags_service::Service as TagsService;
pub(super) use crate::pb::tags::tags_service::Service as TagsService;
use crate::{pb, prelude::*};
impl TagsService for Backend {
fn clear_unused_tags(&self, _input: pb::Empty) -> Result<pb::OpChangesWithCount> {
fn clear_unused_tags(
&self,
_input: pb::generic::Empty,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.clear_unused_tags().map(Into::into))
}
fn all_tags(&self, _input: pb::Empty) -> Result<pb::StringList> {
Ok(pb::StringList {
fn all_tags(&self, _input: pb::generic::Empty) -> Result<pb::generic::StringList> {
Ok(pb::generic::StringList {
vals: self.with_col(|col| {
Ok(col
.storage
@ -23,22 +26,28 @@ impl TagsService for Backend {
})
}
fn remove_tags(&self, tags: pb::String) -> Result<pb::OpChangesWithCount> {
fn remove_tags(&self, tags: pb::generic::String) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.remove_tags(tags.val.as_str()).map(Into::into))
}
fn set_tag_collapsed(&self, input: pb::SetTagCollapsedRequest) -> Result<pb::OpChanges> {
fn set_tag_collapsed(
&self,
input: pb::tags::SetTagCollapsedRequest,
) -> Result<pb::collection::OpChanges> {
self.with_col(|col| {
col.set_tag_collapsed(&input.name, input.collapsed)
.map(Into::into)
})
}
fn tag_tree(&self, _input: pb::Empty) -> Result<pb::TagTreeNode> {
fn tag_tree(&self, _input: pb::generic::Empty) -> Result<pb::tags::TagTreeNode> {
self.with_col(|col| col.tag_tree())
}
fn reparent_tags(&self, input: pb::ReparentTagsRequest) -> Result<pb::OpChangesWithCount> {
fn reparent_tags(
&self,
input: pb::tags::ReparentTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
let source_tags = input.tags;
let target_tag = if input.new_parent.is_empty() {
None
@ -49,19 +58,28 @@ impl TagsService for Backend {
.map(Into::into)
}
fn rename_tags(&self, input: pb::RenameTagsRequest) -> Result<pb::OpChangesWithCount> {
fn rename_tags(
&self,
input: pb::tags::RenameTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| col.rename_tag(&input.current_prefix, &input.new_prefix))
.map(Into::into)
}
fn add_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> {
fn add_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
col.add_tags_to_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
})
}
fn remove_note_tags(&self, input: pb::NoteIdsAndTagsRequest) -> Result<pb::OpChangesWithCount> {
fn remove_note_tags(
&self,
input: pb::tags::NoteIdsAndTagsRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
col.remove_tags_from_notes(&to_note_ids(input.note_ids), &input.tags)
.map(Into::into)
@ -70,8 +88,8 @@ impl TagsService for Backend {
fn find_and_replace_tag(
&self,
input: pb::FindAndReplaceTagRequest,
) -> Result<pb::OpChangesWithCount> {
input: pb::tags::FindAndReplaceTagRequest,
) -> Result<pb::collection::OpChangesWithCount> {
self.with_col(|col| {
let note_ids = if input.note_ids.is_empty() {
col.search_notes_unordered("")?
@ -89,10 +107,13 @@ impl TagsService for Backend {
})
}
fn complete_tag(&self, input: pb::CompleteTagRequest) -> Result<pb::CompleteTagResponse> {
fn complete_tag(
&self,
input: pb::tags::CompleteTagRequest,
) -> Result<pb::tags::CompleteTagResponse> {
self.with_col(|col| {
let tags = col.complete_tag(&input.input, input.match_limit as usize)?;
Ok(pb::CompleteTagResponse { tags })
Ok(pb::tags::CompleteTagResponse { tags })
})
}
}

View File

@ -182,8 +182,8 @@ impl Column {
.into()
}
pub fn default_order(self) -> pb::browser_columns::Sorting {
use pb::browser_columns::Sorting;
pub fn default_order(self) -> pb::search::browser_columns::Sorting {
use pb::search::browser_columns::Sorting;
match self {
Column::Question | Column::Answer | Column::Custom => Sorting::None,
Column::SortField | Column::Tags | Column::Notetype | Column::Deck => {
@ -205,8 +205,8 @@ impl Column {
matches!(self, Self::Question | Self::Answer | Self::SortField)
}
pub fn alignment(self) -> pb::browser_columns::Alignment {
use pb::browser_columns::Alignment;
pub fn alignment(self) -> pb::search::browser_columns::Alignment {
use pb::search::browser_columns::Alignment;
match self {
Self::Question
| Self::Answer
@ -221,16 +221,16 @@ impl Column {
}
impl Collection {
pub fn all_browser_columns(&self) -> pb::BrowserColumns {
let mut columns: Vec<pb::browser_columns::Column> = Column::iter()
pub fn all_browser_columns(&self) -> pb::search::BrowserColumns {
let mut columns: Vec<pb::search::browser_columns::Column> = Column::iter()
.filter(|&c| c != Column::Custom)
.map(|c| c.to_pb_column(&self.tr))
.collect();
columns.sort_by(|c1, c2| c1.cards_mode_label.cmp(&c2.cards_mode_label));
pb::BrowserColumns { columns }
pb::search::BrowserColumns { columns }
}
pub fn browser_row_for_id(&mut self, id: i64) -> Result<pb::BrowserRow> {
pub fn browser_row_for_id(&mut self, id: i64) -> Result<pb::search::BrowserRow> {
let notes_mode = self.get_config_bool(BoolKey::BrowserTableShowNotesMode);
let columns = Arc::clone(
self.state
@ -361,8 +361,8 @@ impl RowContext {
})
}
fn browser_row(&self, columns: &[Column]) -> Result<pb::BrowserRow> {
Ok(pb::BrowserRow {
fn browser_row(&self, columns: &[Column]) -> Result<pb::search::BrowserRow> {
Ok(pb::search::BrowserRow {
cells: columns
.iter()
.map(|&column| self.get_cell(column))
@ -373,8 +373,8 @@ impl RowContext {
})
}
fn get_cell(&self, column: Column) -> Result<pb::browser_row::Cell> {
Ok(pb::browser_row::Cell {
fn get_cell(&self, column: Column) -> Result<pb::search::browser_row::Cell> {
Ok(pb::search::browser_row::Cell {
text: self.get_cell_text(column)?,
is_rtl: self.get_is_rtl(column),
})
@ -546,8 +546,8 @@ impl RowContext {
Ok(self.template()?.config.browser_font_size)
}
fn get_row_color(&self) -> pb::browser_row::Color {
use pb::browser_row::Color;
fn get_row_color(&self) -> pb::search::browser_row::Color {
use pb::search::browser_row::Color;
if self.notes_mode {
if self.note.is_marked() {
Color::Marked

View File

@ -18,7 +18,7 @@ pub fn extract_av_tags<S: Into<String> + AsRef<str>>(
txt: S,
question_side: bool,
tr: &I18n,
) -> (String, Vec<pb::AvTag>) {
) -> (String, Vec<pb::card_rendering::AvTag>) {
nodes_or_text_only(txt.as_ref())
.map(|nodes| nodes.write_and_extract_av_tags(question_side, tr))
.unwrap_or_else(|| (txt.into(), vec![]))
@ -122,17 +122,21 @@ mod test {
(
"foo [anki:play:q:0] baz [anki:play:q:1]",
vec![
pb::AvTag {
value: Some(pb::av_tag::Value::SoundOrVideo("bar.mp3".to_string()))
pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
"bar.mp3".to_string()
))
},
pb::AvTag {
value: Some(pb::av_tag::Value::Tts(pb::TtsTag {
field_text: tr.card_templates_blank().to_string(),
lang: "en_US".to_string(),
voices: vec![],
speed: 1.0,
other_args: vec![],
}))
pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
field_text: tr.card_templates_blank().to_string(),
lang: "en_US".to_string(),
voices: vec![],
speed: 1.0,
other_args: vec![],
}
))
}
],
),

View File

@ -19,7 +19,7 @@ impl<'a> CardNodes<'a> {
&self,
question_side: bool,
tr: &I18n,
) -> (String, Vec<pb::AvTag>) {
) -> (String, Vec<pb::card_rendering::AvTag>) {
let mut extractor = AvExtractor::new(question_side, tr);
(extractor.write(self), extractor.tags)
}
@ -119,7 +119,7 @@ impl Write for AvStripper {
struct AvExtractor<'a> {
side: char,
tags: Vec<pb::AvTag>,
tags: Vec<pb::card_rendering::AvTag>,
tr: &'a I18n,
}
@ -147,8 +147,8 @@ impl<'a> AvExtractor<'a> {
impl Write for AvExtractor<'_> {
fn write_sound(&mut self, buf: &mut String, resource: &str) {
self.write_play_tag(buf);
self.tags.push(pb::AvTag {
value: Some(pb::av_tag::Value::SoundOrVideo(
self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::SoundOrVideo(
decode_entities(resource).into(),
)),
});
@ -161,18 +161,20 @@ impl Write for AvExtractor<'_> {
}
self.write_play_tag(buf);
self.tags.push(pb::AvTag {
value: Some(pb::av_tag::Value::Tts(pb::TtsTag {
field_text: self.transform_tts_content(directive),
lang: directive.lang.into(),
voices: directive.voices.iter().map(ToString::to_string).collect(),
speed: directive.speed,
other_args: directive
.options
.iter()
.map(|(key, val)| format!("{}={}", key, val))
.collect(),
})),
self.tags.push(pb::card_rendering::AvTag {
value: Some(pb::card_rendering::av_tag::Value::Tts(
pb::card_rendering::TtsTag {
field_text: self.transform_tts_content(directive),
lang: directive.lang.into(),
voices: directive.voices.iter().map(ToString::to_string).collect(),
speed: directive.speed,
other_args: directive
.options
.iter()
.map(|(key, val)| format!("{}={}", key, val))
.collect(),
},
)),
});
}
}

View File

@ -5,7 +5,9 @@ use std::{
ffi::OsStr,
fs::{read_dir, remove_file, DirEntry},
path::{Path, PathBuf},
thread::{self, JoinHandle},
thread::{
JoinHandle, {self},
},
time::SystemTime,
};
@ -15,7 +17,7 @@ use log::error;
use crate::{
import_export::package::export_colpkg_from_data, io::read_file, log,
pb::preferences::BackupLimits, prelude::*,
pb::config::preferences::BackupLimits, prelude::*,
};
const BACKUP_FORMAT_STRING: &str = "backup-%Y-%m-%d-%H.%M.%S.colpkg";

View File

@ -18,7 +18,7 @@ pub use self::{
bool::BoolKey, deck::DeckConfigKey, notetype::get_aux_notetype_config_key,
number::I32ConfigKey, string::StringKey,
};
use crate::{pb::preferences::BackupLimits, prelude::*};
use crate::{pb::config::preferences::BackupLimits, prelude::*};
/// Only used when updating/undoing.
#[derive(Debug)]

View File

@ -8,7 +8,7 @@ mod update;
pub use schema11::{DeckConfSchema11, NewCardOrderSchema11};
pub use update::UpdateDeckConfigsRequest;
pub use crate::pb::deck_config::{
pub use crate::pb::deckconfig::deck_config::{
config::{
LeechAction, NewCardGatherPriority, NewCardInsertOrder, NewCardSortOrder, ReviewCardOrder,
ReviewMix,

View File

@ -13,8 +13,8 @@ use crate::{
decks::NormalDeck,
pb,
pb::{
deck::normal::DayLimit,
deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck},
deckconfig::deck_configs_for_update::{current_deck::Limits, ConfigWithExtra, CurrentDeck},
decks::deck::normal::DayLimit,
},
prelude::*,
search::{JoinSearches, SearchNode},
@ -36,8 +36,8 @@ impl Collection {
pub fn get_deck_configs_for_update(
&mut self,
deck: DeckId,
) -> Result<pb::DeckConfigsForUpdate> {
Ok(pb::DeckConfigsForUpdate {
) -> Result<pb::deckconfig::DeckConfigsForUpdate> {
Ok(pb::deckconfig::DeckConfigsForUpdate {
all_config: self.get_deck_config_with_extra_for_update()?,
current_deck: Some(self.get_current_deck_for_update(deck)?),
defaults: Some(DeckConfig::default().into()),

View File

@ -42,11 +42,11 @@ impl Collection {
pub(crate) fn counts_for_deck_today(
&mut self,
did: DeckId,
) -> Result<pb::CountsForDeckTodayResponse> {
) -> Result<pb::scheduler::CountsForDeckTodayResponse> {
let today = self.current_due_day(0)?;
let mut deck = self.storage.get_deck(did)?.or_not_found(did)?;
deck.reset_stats_if_day_changed(today);
Ok(pb::CountsForDeckTodayResponse {
Ok(pb::scheduler::CountsForDeckTodayResponse {
new: deck.common.new_studied,
review: deck.common.review_studied,
})

View File

@ -21,7 +21,7 @@ pub(crate) use name::immediate_parent_name;
pub use name::NativeDeckName;
pub use schema11::DeckSchema11;
pub use crate::pb::{
pub use crate::pb::decks::{
deck::{
filtered::{search_term::Order as FilteredSearchOrder, SearchTerm as FilteredSearchTerm},
kind_container::Kind as DeckKind,

View File

@ -23,7 +23,9 @@ pub enum DeckSchema11 {
// serde doesn't support integer/bool enum tags, so we manually pick the correct variant
mod dynfix {
use serde::de::{self, Deserialize, Deserializer};
use serde::de::{
Deserialize, Deserializer, {self},
};
use serde_json::{Map, Value};
use super::{DeckSchema11, FilteredDeckSchema11, NormalDeckSchema11};

View File

@ -23,7 +23,7 @@ impl Collection {
&mut self,
today: u32,
usn: Usn,
input: pb::UpdateStatsRequest,
input: pb::scheduler::UpdateStatsRequest,
) -> Result<()> {
let did = input.deck_id.into();
let mutator = |c: &mut DeckCommon| {

View File

@ -14,8 +14,10 @@ use super::{
limits::{remaining_limits_map, RemainingLimits},
DueCounts,
};
pub use crate::pb::set_deck_collapsed_request::Scope as DeckCollapseScope;
use crate::{config::SchedulerVersion, ops::OpOutput, pb::DeckTreeNode, prelude::*, undo::Op};
pub use crate::pb::decks::set_deck_collapsed_request::Scope as DeckCollapseScope;
use crate::{
config::SchedulerVersion, ops::OpOutput, pb::decks::DeckTreeNode, prelude::*, undo::Op,
};
fn deck_names_to_tree(names: impl Iterator<Item = (DeckId, String)>) -> DeckTreeNode {
let mut top = DeckTreeNode::default();

View File

@ -38,8 +38,8 @@ impl PartialEq for InvalidInputError {
impl Eq for InvalidInputError {}
/// Allows generating [AnkiError::InvalidInput] from [Option::None] and the
/// typical [core::result::Result::Err].
/// Allows generating [AnkiError::InvalidInput] from [None] and the
/// typical [Err].
pub trait OrInvalid {
type Value;
fn or_invalid(self, message: impl Into<String>) -> Result<Self::Value>;

View File

@ -35,7 +35,7 @@ impl PartialEq for NotFoundError {
impl Eq for NotFoundError {}
/// Allows generating [AnkiError::NotFound] from [Option::None].
/// Allows generating [AnkiError::NotFound] from [None].
pub trait OrNotFound {
type Value;
fn or_not_found(self, identifier: impl fmt::Display) -> Result<Self::Value>;
@ -67,9 +67,6 @@ mod test {
#[test]
fn test_unqualified_lowercase_type_name() {
assert_eq!(
unqualified_lowercase_type_name::<crate::card::CardId>(),
"card id"
);
assert_eq!(unqualified_lowercase_type_name::<CardId>(), "card id");
}
}

View File

@ -8,7 +8,7 @@ pub mod text;
use std::marker::PhantomData;
pub use crate::pb::import_response::{Log as NoteLog, Note as LogNote};
pub use crate::pb::import_export::import_response::{Log as NoteLog, Note as LogNote};
use crate::{
prelude::*,
text::{

View File

@ -153,7 +153,7 @@ impl<'n> NoteContext<'n> {
}
fn add_notetype_with_remapped_id(&mut self, notetype: &mut Notetype) -> Result<()> {
let old_id = std::mem::take(&mut notetype.id);
let old_id = mem::take(&mut notetype.id);
notetype.usn = self.usn;
self.target_col
.add_notetype_inner(notetype, self.usn, true)?;

View File

@ -141,7 +141,7 @@ impl Collection {
for file in [SAMPLE_JPG, SAMPLE_JS, &new_mp3_name] {
assert!(self.media_folder.join(file).exists());
assert!(*csums.get(file).unwrap() != [0; 20]);
assert_ne!(*csums.get(file).unwrap(), [0; 20]);
}
let imported_note = self.storage.get_note(note.id).unwrap().unwrap();

View File

@ -6,7 +6,9 @@ use std::{
collections::HashMap,
ffi::OsStr,
fs::File,
io::{self, Read, Write},
io::{
Read, Write, {self},
},
path::{Path, PathBuf},
};
@ -268,7 +270,7 @@ fn write_media_map(
buf
};
let size = encoded_bytes.len();
let mut cursor = std::io::Cursor::new(encoded_bytes);
let mut cursor = io::Cursor::new(encoded_bytes);
if meta.zstd_compressed() {
zstd_copy(&mut cursor, zip, size)?;
} else {

View File

@ -3,12 +3,17 @@
use std::{
fs::File,
io::{self, Write},
io::{
Write, {self},
},
path::{Path, PathBuf},
};
use zip::{read::ZipFile, ZipArchive};
use zstd::{self, stream::copy_decode};
use zstd::{
stream::copy_decode,
{self},
};
use crate::{
collection::CollectionBuilder,

View File

@ -4,7 +4,9 @@
use std::{
borrow::Cow,
collections::HashMap,
fs::{self, File},
fs::{
File, {self},
},
io,
path::{Path, PathBuf},
};

View File

@ -3,14 +3,16 @@
use std::{
fs::File,
io::{self, Read},
io::{
Read, {self},
},
};
use prost::Message;
use zip::ZipArchive;
use zstd::stream::copy_decode;
pub(super) use crate::pb::{package_metadata::Version, PackageMetadata as Meta};
pub(super) use crate::pb::import_export::{package_metadata::Version, PackageMetadata as Meta};
use crate::{error::ImportError, prelude::*, storage::SchemaVersion};
impl Version {
@ -98,7 +100,7 @@ impl Meta {
pub(super) fn copy(
&self,
reader: &mut impl io::Read,
reader: &mut impl Read,
writer: &mut impl io::Write,
) -> io::Result<()> {
if self.zstd_compressed() {

View File

@ -11,4 +11,4 @@ pub(crate) use colpkg::export::export_colpkg_from_data;
pub use colpkg::import::import_colpkg;
pub(self) use meta::{Meta, Version};
pub(self) use crate::pb::{media_entries::MediaEntry, MediaEntries};
pub(self) use crate::pb::import_export::{media_entries::MediaEntry, MediaEntries};

View File

@ -11,7 +11,7 @@ use super::metadata::Delimiter;
use crate::{
import_export::{ExportProgress, IncrementableProgress},
notetype::RenderCardOutput,
pb::ExportNoteCsvRequest,
pb::import_export::ExportNoteCsvRequest,
prelude::*,
search::{SearchNode, SortMode},
template::RenderedNode,

View File

@ -22,7 +22,7 @@ use crate::{
import_export::text::NameOrId,
io::open_file,
notetype::NoteField,
pb::StringList,
pb::generic::StringList,
prelude::*,
text::{html_to_text_line, is_html},
};

View File

@ -8,7 +8,7 @@ mod json;
use serde_derive::{Deserialize, Serialize};
use super::LogNote;
use crate::pb::csv_metadata::DupeResolution;
use crate::pb::import_export::csv_metadata::DupeResolution;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(default)]

View File

@ -15,7 +15,7 @@ use crate::{
pub(crate) type Result<T, E = FileIoError> = std::result::Result<T, E>;
/// See [std::fs::File::open].
/// See [File::open].
pub(crate) fn open_file(path: impl AsRef<Path>) -> Result<File> {
File::open(&path).context(FileIoSnafu {
path: path.as_ref(),

View File

@ -286,7 +286,7 @@ pub(crate) fn sha1_of_file(path: &Path) -> Result<Sha1Hash, FileIoError> {
}
/// Return the SHA1 of a stream.
pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> std::io::Result<Sha1Hash> {
pub(crate) fn sha1_of_reader(reader: &mut impl Read) -> io::Result<Sha1Hash> {
let mut hasher = Sha1::new();
let mut buf = [0; 64 * 1024];
loop {

View File

@ -712,7 +712,7 @@ fn zip_files<'a>(
let buf = vec![];
let mut invalid_entries = vec![];
let w = std::io::Cursor::new(buf);
let w = io::Cursor::new(buf);
let mut zip = zip::ZipWriter::new(w);
let options =

View File

@ -18,7 +18,7 @@ use crate::{
notetype::{CardGenContext, NoteField},
ops::StateChanges,
pb,
pb::note_fields_check_response::State as NoteFieldsState,
pb::notes::note_fields_check_response::State as NoteFieldsState,
prelude::*,
template::field_is_empty,
text::{ensure_string_in_nfc, normalize_to_nfc, strip_html_preserving_media_filenames},
@ -169,7 +169,7 @@ impl Note {
/// Prepare note for saving to the database. Does not mark it as modified.
pub(crate) fn prepare_for_update(&mut self, nt: &Notetype, normalize_text: bool) -> Result<()> {
assert!(nt.id == self.notetype_id);
assert_eq!(nt.id, self.notetype_id);
let notetype_field_count = nt.fields.len().max(1);
require!(
notetype_field_count == self.fields.len(),
@ -258,9 +258,9 @@ pub(crate) fn normalize_field(field: &mut String, normalize_text: bool) {
}
}
impl From<Note> for pb::Note {
impl From<Note> for pb::notes::Note {
fn from(n: Note) -> Self {
pb::Note {
pb::notes::Note {
id: n.id.0,
guid: n.guid,
notetype_id: n.notetype_id.0,
@ -272,8 +272,8 @@ impl From<Note> for pb::Note {
}
}
impl From<pb::Note> for Note {
fn from(n: pb::Note) -> Self {
impl From<pb::notes::Note> for Note {
fn from(n: pb::notes::Note) -> Self {
Note {
id: NoteId(n.id),
guid: n.guid,

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{NoteFieldConfig, NoteFieldProto};
use crate::{pb::UInt32, prelude::*};
use crate::{pb::generic::UInt32, prelude::*};
#[derive(Debug, PartialEq, Clone)]
pub struct NoteField {

View File

@ -30,7 +30,7 @@ pub use stock::all_stock_notetypes;
pub use templates::CardTemplate;
use unicase::UniCase;
pub use crate::pb::{
pub use crate::pb::notetypes::{
notetype::{
config::{
card_requirement::Kind as CardRequirementKind, CardRequirement, Kind as NotetypeKind,

View File

@ -7,7 +7,7 @@ use crate::{
error::Result,
i18n::I18n,
notetype::Notetype,
pb::stock_notetype::Kind,
pb::notetypes::stock_notetype::Kind,
storage::SqliteStorage,
timestamp::TimestampSecs,
};

View File

@ -2,7 +2,7 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplateConfig, CardTemplateProto};
use crate::{pb::UInt32, prelude::*, template::ParsedTemplate};
use crate::{pb::generic::UInt32, prelude::*, template::ParsedTemplate};
#[derive(Debug, PartialEq, Clone)]
pub struct CardTemplate {

View File

@ -2,34 +2,29 @@
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
macro_rules! protobuf {
($ident:ident) => {
($ident:ident, $name:literal) => {
pub mod $ident {
#![allow(clippy::derive_partial_eq_without_eq)]
include!(concat!(
env!("OUT_DIR"),
concat!("/anki.", stringify!($ident), ".rs")
));
include!(concat!(env!("OUT_DIR"), "/anki.", $name, ".rs"));
}
pub use $ident::*;
};
}
protobuf!(backend);
protobuf!(card_rendering);
protobuf!(cards);
protobuf!(collection);
protobuf!(config);
protobuf!(deckconfig);
protobuf!(decks);
protobuf!(generic);
protobuf!(i18n);
protobuf!(import_export);
protobuf!(links);
protobuf!(media);
protobuf!(notes);
protobuf!(notetypes);
protobuf!(scheduler);
protobuf!(search);
protobuf!(stats);
protobuf!(sync);
protobuf!(tags);
protobuf!(backend, "backend");
protobuf!(card_rendering, "card_rendering");
protobuf!(cards, "cards");
protobuf!(collection, "collection");
protobuf!(config, "config");
protobuf!(deckconfig, "deckconfig");
protobuf!(decks, "decks");
protobuf!(generic, "generic");
protobuf!(i18n, "i18n");
protobuf!(import_export, "import_export");
protobuf!(links, "links");
protobuf!(media, "media");
protobuf!(notes, "notes");
protobuf!(notetypes, "notetypes");
protobuf!(scheduler, "scheduler");
protobuf!(search, "search");
protobuf!(stats, "stats");
protobuf!(sync, "sync");
protobuf!(tags, "tags");

View File

@ -5,7 +5,7 @@ use crate::{
collection::Collection,
config::{BoolKey, StringKey},
error::Result,
pb::{
pb::config::{
preferences::{scheduling::NewReviewMix as NewRevMixPB, Editing, Reviewing, Scheduling},
Preferences,
},

View File

@ -330,7 +330,7 @@ impl Collection {
self.update_deck_stats(
updater.timing.days_elapsed,
usn,
pb::UpdateStatsRequest {
pb::scheduler::UpdateStatsRequest {
deck_id: updater.deck.id.0,
new_delta,
review_delta,

View File

@ -5,7 +5,7 @@ use super::timing::SchedTimingToday;
use crate::{
card::CardQueue,
config::SchedulerVersion,
pb::{
pb::scheduler::{
bury_or_suspend_cards_request::Mode as BuryOrSuspendMode,
unbury_deck_request::Mode as UnburyDeckMode,
},

View File

@ -14,7 +14,7 @@ pub(crate) struct CongratsInfo {
}
impl Collection {
pub fn congrats_info(&mut self) -> Result<pb::CongratsInfoResponse> {
pub fn congrats_info(&mut self) -> Result<pb::scheduler::CongratsInfoResponse> {
let deck = self.get_current_deck()?;
let today = self.timing_today()?.days_elapsed;
let info = self.storage.congrats_info(&deck, today)?;
@ -27,7 +27,7 @@ impl Collection {
((info.next_learn_due as i64) - self.learn_ahead_secs() as i64 - TimestampSecs::now().0)
.max(60) as u32
};
Ok(pb::CongratsInfoResponse {
Ok(pb::scheduler::CongratsInfoResponse {
learn_remaining: info.learn_count,
review_remaining: info.review_remaining,
new_remaining: info.new_remaining,
@ -51,7 +51,7 @@ mod test {
let info = col.congrats_info().unwrap();
assert_eq!(
info,
crate::pb::CongratsInfoResponse {
crate::pb::scheduler::CongratsInfoResponse {
learn_remaining: 0,
review_remaining: false,
new_remaining: false,

View File

@ -9,22 +9,25 @@ use crate::{
decks::{FilteredDeck, FilteredSearchOrder, FilteredSearchTerm},
error::{CustomStudyError, FilteredDeckError},
pb::{
self as pb,
custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue},
scheduler::custom_study_request::{cram::CramKind, Cram, Value as CustomStudyValue},
{self as pb},
},
prelude::*,
search::{JoinSearches, Negated, PropertyKind, RatingKind, SearchNode, StateKind},
};
impl Collection {
pub fn custom_study(&mut self, input: pb::CustomStudyRequest) -> Result<OpOutput<()>> {
pub fn custom_study(
&mut self,
input: pb::scheduler::CustomStudyRequest,
) -> Result<OpOutput<()>> {
self.transact(Op::CreateCustomStudy, |col| col.custom_study_inner(input))
}
pub fn custom_study_defaults(
&mut self,
deck_id: DeckId,
) -> Result<pb::CustomStudyDefaultsResponse> {
) -> Result<pb::scheduler::CustomStudyDefaultsResponse> {
// daily counts
let deck = self.get_deck(deck_id)?.or_not_found(deck_id)?;
let normal = deck.normal()?;
@ -70,11 +73,11 @@ impl Collection {
);
let mut all_tags: Vec<_> = self.all_tags_in_deck(deck_id)?.into_iter().collect();
all_tags.sort_unstable();
let tags: Vec<pb::custom_study_defaults_response::Tag> = all_tags
let tags: Vec<pb::scheduler::custom_study_defaults_response::Tag> = all_tags
.into_iter()
.map(|tag| {
let tag = tag.into_inner();
pb::custom_study_defaults_response::Tag {
pb::scheduler::custom_study_defaults_response::Tag {
include: include_tags.contains(&tag),
exclude: exclude_tags.contains(&tag),
name: tag,
@ -82,7 +85,7 @@ impl Collection {
})
.collect();
Ok(pb::CustomStudyDefaultsResponse {
Ok(pb::scheduler::CustomStudyDefaultsResponse {
tags,
extend_new,
extend_review,
@ -95,7 +98,7 @@ impl Collection {
}
impl Collection {
fn custom_study_inner(&mut self, input: pb::CustomStudyRequest) -> Result<()> {
fn custom_study_inner(&mut self, input: pb::scheduler::CustomStudyRequest) -> Result<()> {
let mut deck = self
.storage
.get_deck(input.deck_id.into())?
@ -292,8 +295,8 @@ mod test {
use super::*;
use crate::{
collection::open_test_collection,
pb::{
scheduler::custom_study_request::{cram::CramKind, Cram, Value},
pb::scheduler::{
custom_study_request::{cram::CramKind, Cram, Value},
CustomStudyRequest,
},
};

View File

@ -266,7 +266,7 @@ mod test {
use crate::{
card::{CardQueue, CardType},
collection::open_test_collection,
pb::deck_config::config::{NewCardGatherPriority, NewCardSortOrder},
pb::deckconfig::deck_config::config::{NewCardGatherPriority, NewCardSortOrder},
};
impl Collection {

View File

@ -737,8 +737,8 @@ mod test {
use Node::*;
use SearchNode::*;
assert_eq!(parse("")?, vec![Search(SearchNode::WholeCollection)]);
assert_eq!(parse(" ")?, vec![Search(SearchNode::WholeCollection)]);
assert_eq!(parse("")?, vec![Search(WholeCollection)]);
assert_eq!(parse(" ")?, vec![Search(WholeCollection)]);
// leading/trailing/interspersed whitespace
assert_eq!(

View File

@ -14,7 +14,7 @@ use crate::timestamp::TimestampSecs;
pub(crate) fn default_on_invalid<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: Default + DeTrait<'de>,
D: serde::de::Deserializer<'de>,
D: Deserializer<'de>,
{
let v: Value = DeTrait::deserialize(deserializer)?;
Ok(T::deserialize(v).unwrap_or_default())

View File

@ -9,7 +9,7 @@ use crate::{
};
impl Collection {
pub fn card_stats(&mut self, cid: CardId) -> Result<pb::CardStatsResponse> {
pub fn card_stats(&mut self, cid: CardId) -> Result<pb::stats::CardStatsResponse> {
let card = self.storage.get_card(cid)?.or_not_found(cid)?;
let note = self
.storage
@ -27,7 +27,7 @@ impl Collection {
let (average_secs, total_secs) = average_and_total_secs_strings(&revlog);
let (due_date, due_position) = self.due_date_and_position(&card)?;
Ok(pb::CardStatsResponse {
Ok(pb::stats::CardStatsResponse {
card_id: card.id.into(),
note_id: card.note_id.into(),
deck: deck.human_name(),
@ -92,8 +92,8 @@ fn average_and_total_secs_strings(revlog: &[RevlogEntry]) -> (f32, f32) {
}
}
fn stats_revlog_entry(entry: &RevlogEntry) -> pb::card_stats_response::StatsRevlogEntry {
pb::card_stats_response::StatsRevlogEntry {
fn stats_revlog_entry(entry: &RevlogEntry) -> pb::stats::card_stats_response::StatsRevlogEntry {
pb::stats::card_stats_response::StatsRevlogEntry {
time: entry.id.as_secs().0,
review_kind: entry.review_kind.into(),
button_chosen: entry.button_chosen as u32,

View File

@ -14,13 +14,13 @@ impl Collection {
&mut self,
search: &str,
days: u32,
) -> Result<pb::GraphsResponse> {
) -> Result<pb::stats::GraphsResponse> {
let guard = self.search_cards_into_table(search, SortMode::NoOrder)?;
let all = search.trim().is_empty();
guard.col.graph_data(all, days)
}
fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::GraphsResponse> {
fn graph_data(&mut self, all: bool, days: u32) -> Result<pb::stats::GraphsResponse> {
let timing = self.timing_today()?;
let revlog_start = if days > 0 {
timing
@ -41,7 +41,7 @@ impl Collection {
.get_pb_revlog_entries_for_searched_cards(revlog_start)?
};
Ok(pb::GraphsResponse {
Ok(pb::stats::GraphsResponse {
cards: cards.into_iter().map(Into::into).collect(),
revlog,
days_elapsed: timing.days_elapsed,
@ -51,8 +51,8 @@ impl Collection {
})
}
pub(crate) fn get_graph_preferences(&self) -> pb::GraphPreferences {
pb::GraphPreferences {
pub(crate) fn get_graph_preferences(&self) -> pb::stats::GraphPreferences {
pb::stats::GraphPreferences {
calendar_first_day_of_week: self.get_first_day_of_week() as i32,
card_counts_separate_inactive: self
.get_config_bool(BoolKey::CardCountsSeparateInactive),
@ -61,7 +61,10 @@ impl Collection {
}
}
pub(crate) fn set_graph_preferences(&mut self, prefs: pb::GraphPreferences) -> Result<()> {
pub(crate) fn set_graph_preferences(
&mut self,
prefs: pb::stats::GraphPreferences,
) -> Result<()> {
self.set_first_day_of_week(match prefs.calendar_first_day_of_week {
1 => Weekday::Monday,
5 => Weekday::Friday,
@ -77,9 +80,9 @@ impl Collection {
}
}
impl From<RevlogEntry> for pb::RevlogEntry {
impl From<RevlogEntry> for pb::stats::RevlogEntry {
fn from(e: RevlogEntry) -> Self {
pb::RevlogEntry {
pb::stats::RevlogEntry {
id: e.id.0,
cid: e.cid.0,
usn: e.usn.0,

View File

@ -29,7 +29,7 @@ use crate::{
};
impl FromSql for CardType {
fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> {
fn column_result(value: ValueRef<'_>) -> result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value {
Ok(Self::try_from(i as u8).map_err(|_| FromSqlError::InvalidType)?)
} else {
@ -39,7 +39,7 @@ impl FromSql for CardType {
}
impl FromSql for CardQueue {
fn column_result(value: ValueRef<'_>) -> std::result::Result<Self, FromSqlError> {
fn column_result(value: ValueRef<'_>) -> result::Result<Self, FromSqlError> {
if let ValueRef::Integer(i) = value {
Ok(Self::try_from(i as i8).map_err(|_| FromSqlError::InvalidType)?)
} else {

View File

@ -153,7 +153,7 @@ impl SqliteStorage {
// caller should ensure name unique
pub(crate) fn add_deck(&self, deck: &mut Deck) -> Result<()> {
assert!(deck.id.0 == 0);
assert_eq!(deck.id.0, 0);
deck.id.0 = self
.db
.prepare(include_str!("alloc_id.sql"))?

View File

@ -51,7 +51,7 @@ impl super::SqliteStorage {
/// If fields have been modified, caller must call note.prepare_for_update() prior to calling this.
pub(crate) fn update_note(&self, note: &Note) -> Result<()> {
assert!(note.id.0 != 0);
assert_ne!(note.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("update.sql"))?;
stmt.execute(params![
note.guid,
@ -68,7 +68,7 @@ impl super::SqliteStorage {
}
pub(crate) fn add_note(&self, note: &mut Note) -> Result<()> {
assert!(note.id.0 == 0);
assert_eq!(note.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("add.sql"))?;
stmt.execute(params![
TimestampMillis::now(),

View File

@ -226,7 +226,7 @@ impl SqliteStorage {
}
pub(crate) fn add_notetype(&self, nt: &mut Notetype) -> Result<()> {
assert!(nt.id.0 == 0);
assert_eq!(nt.id.0, 0);
let mut stmt = self.db.prepare_cached(include_str!("add_notetype.sql"))?;
let mut config_bytes = vec![];

View File

@ -113,7 +113,7 @@ impl SqliteStorage {
pub(crate) fn get_pb_revlog_entries_for_searched_cards(
&self,
after: TimestampSecs,
) -> Result<Vec<pb::RevlogEntry>> {
) -> Result<Vec<pb::stats::RevlogEntry>> {
self.db
.prepare_cached(concat!(
include_str!("get.sql"),
@ -137,7 +137,7 @@ impl SqliteStorage {
pub(crate) fn get_all_revlog_entries(
&self,
after: TimestampSecs,
) -> Result<Vec<pb::RevlogEntry>> {
) -> Result<Vec<pb::stats::RevlogEntry>> {
self.db
.prepare_cached(concat!(include_str!("get.sql"), " where id >= ?"))?
.query_and_then([after.0 * 1000], |r| row_to_revlog_entry(r).map(Into::into))?

View File

@ -81,8 +81,8 @@ mod test {
#[test]
#[allow(clippy::assertions_on_constants)]
fn assert_18_is_latest_schema_version() {
assert!(
18 == SCHEMA_MAX_VERSION,
assert_eq!(
18, SCHEMA_MAX_VERSION,
"must implement SqliteStorage::downgrade_to(SchemaVersion::V18)"
);
}

View File

@ -6,7 +6,7 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use super::{Chunk, Graves, SanityCheckCounts, UnchunkedChanges};
use crate::{io::read_file, pb::sync_server_method_request::Method, prelude::*};
use crate::{io::read_file, pb::sync::sync_server_method_request::Method, prelude::*};
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub enum SyncRequest {

View File

@ -65,7 +65,7 @@ pub struct Timeouts {
impl Timeouts {
pub fn new() -> Self {
let io_secs = if std::env::var("LONG_IO_TIMEOUT").is_ok() {
let io_secs = if env::var("LONG_IO_TIMEOUT").is_ok() {
3600
} else {
300
@ -314,7 +314,7 @@ impl HttpSyncClient {
usize,
impl Stream<Item = std::result::Result<Bytes, reqwest::Error>>,
)> {
let resp: reqwest::Response = self.request_bytes("download", b"{}", true).await?;
let resp: Response = self.request_bytes("download", b"{}", true).await?;
let len = resp.content_length().unwrap_or_default();
Ok((len as usize, resp.bytes_stream()))
}
@ -379,7 +379,7 @@ where
}
fn sync_endpoint(host_number: u32) -> String {
if let Ok(endpoint) = std::env::var("SYNC_ENDPOINT") {
if let Ok(endpoint) = env::var("SYNC_ENDPOINT") {
endpoint
} else {
let suffix = if host_number > 0 {
@ -484,13 +484,13 @@ mod test {
#[test]
fn http_client() -> Result<()> {
let user = match std::env::var("TEST_SYNC_USER") {
let user = match env::var("TEST_SYNC_USER") {
Ok(s) => s,
Err(_) => {
return Ok(());
}
};
let pass = std::env::var("TEST_SYNC_PASS").unwrap();
let pass = env::var("TEST_SYNC_PASS").unwrap();
env_logger::init();
let rt = Runtime::new().unwrap();

View File

@ -23,7 +23,7 @@ use crate::{
io::atomic_rename,
notes::Note,
notetype::{Notetype, NotetypeSchema11},
pb::{sync_status_response, SyncStatusResponse},
pb::sync::{sync_status_response, SyncStatusResponse},
prelude::*,
revlog::RevlogEntry,
serde::{default_on_invalid, deserialize_int_from_number},

View File

@ -6,7 +6,7 @@ use std::{collections::HashSet, iter::Peekable};
use unicase::UniCase;
use super::{immediate_parent_name_unicase, Tag};
use crate::{pb::TagTreeNode, prelude::*};
use crate::{pb::tags::TagTreeNode, prelude::*};
impl Collection {
pub fn tag_tree(&mut self) -> Result<TagTreeNode> {

View File

@ -78,7 +78,7 @@ fn tokens<'a>(template: &'a str) -> Box<dyn Iterator<Item = TemplateResult<Token
}
fn new_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> {
std::iter::from_fn(move || {
iter::from_fn(move || {
if data.is_empty() {
return None;
}
@ -158,7 +158,7 @@ fn alternate_handlebar_token(s: &str) -> nom::IResult<&str, Token> {
}
fn legacy_tokens(mut data: &str) -> impl Iterator<Item = TemplateResult<Token>> {
std::iter::from_fn(move || {
iter::from_fn(move || {
if data.is_empty() {
return None;
}

Some files were not shown because too many files have changed in this diff Show More