42cbe42f06
* Add crate csv * Add start of csv importing on backend * Add Menomosyne serializer * Add csv and json importing on backend * Add plaintext importing on frontend * Add csv metadata extraction on backend * Add csv importing with GUI * Fix missing dfa file in build Added compile_data_attr, then re-ran cargo/update.py. * Don't use doubly buffered reader in csv * Escape HTML entities if CSV is not HTML Also use name 'is_html' consistently. * Use decimal number as foreign ease (like '2.5') * ForeignCard.ivl → ForeignCard.interval * Only allow fixed set of CSV delimiters * Map timestamp of ForeignCard to native due time * Don't trim CSV records * Document use of empty strings for defaults * Avoid creating CardGenContexts for every note This requires CardGenContext to be generic, so it works both with an owned and borrowed notetype. * Show all accepted file types in import file picker * Add import_json_file() * factor → ease_factor * delimter_from_value → delimiter_from_value * Map columns to fields, not the other way around * Fallback to current config for csv metadata * Add start of new import csv screen * Temporary fix for compilation issue on Linux/Mac * Disable jest bazel action for import-csv Jest fails with an error code if no tests are available, but this would not be noticable on Windows as Jest is not run there. * Fix field mapping issue * Revert "Temporary fix for compilation issue on Linux/Mac" This reverts commit 21f8a261408cdae49ec031aa21a1b659c4f66d82. * Add HtmlSwitch and move Switch to components * Fix spacing and make selectors consistent * Fix shortcut tooltip * Place import button at the top with path * Fix meta column indices * Remove NotetypeForString * Fix queue and type of foreign cards * Support different dupe resolution strategies * Allow dupe resolution selection when importing CSV * Test import of unnormalized text Close #1863. * Fix logging of foreign notes * Implement CSV exports * Use db_scalar() in notes_table_len() * Rework CSV metadata - Notetypes and decks are either defined by a global id or by a column. - If a notetype id is provided, its field map must also be specified. - If a notetype column is provided, fields are now mapped by index instead of name at import time. So the first non-meta column is used for the first field of every note, regardless of notetype. This makes importing easier and should improve compatiblity with files without a notetype column. - Ensure first field can be mapped to a column. - Meta columns must be defined as `#[meta name]:[column index]` instead of in the `#columns` tag. - Column labels contain the raw names defined by the file and must be prettified by the frontend. * Adjust frontend to new backend column mapping * Add force flags for is_html and delimiter * Detect if CSV is HTML by field content * Update dupe resolution labels * Simplify selectors * Fix coalescence of oneofs in TS * Disable meta columns from selection Plus a lot of refactoring. * Make import button stick to the bottom * Write delimiter and html flag into csv * Refetch field map after notetype change * Fix log labels for csv import * Log notes whose deck/notetype was missing * Fix hiding of empty log queues * Implement adding tags to all notes of a csv * Fix dupe resolution not being set in log * Implement adding tags to updated notes of a csv * Check first note field is not empty * Temporary fix for build on Linux/Mac * Fix inverted html check (dae) * Remove unused ftl string * Delimiter → Separator * Remove commented-out line * Don't accept .json files * Tweak tag ftl strings * Remove redundant blur call * Strip sound and add spaces in csv export * Export HTML by default * Fix unset deck in Mnemosyne import Also accept both numbers and strings for notetypes and decks in JSON. * Make DupeResolution::Update the default * Fix missing dot in extension * Make column indices 1-based * Remove StickContainer from TagEditor Fixes line breaking, border and z index on ImportCsvPage. * Assign different key combos to tag editors * Log all updated duplicates Add a log field for the true number of found notes. * Show identical notes as skipped * Split tag-editor into separate ts module (dae) * Add progress for CSV export * Add progress for text import * Tidy-ups after tag-editor split (dae) - import-csv no longer depends on editor - remove some commented lines
121 lines
3.7 KiB
Rust
121 lines
3.7 KiB
Rust
// Copyright: Ankitects Pty Ltd and contributors
|
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
|
|
use std::{env, fmt::Write, path::PathBuf};
|
|
|
|
struct CustomGenerator {}
|
|
|
|
fn write_method_trait(buf: &mut String, service: &prost_build::Service) {
|
|
buf.push_str(
|
|
r#"
|
|
pub trait Service {
|
|
fn run_method(&self, method: u32, input: &[u8]) -> Result<Vec<u8>> {
|
|
match method {
|
|
"#,
|
|
);
|
|
for (idx, method) in service.methods.iter().enumerate() {
|
|
write!(
|
|
buf,
|
|
concat!(" ",
|
|
"{idx} => {{ let input = super::{input_type}::decode(input)?;\n",
|
|
"let output = self.{rust_method}(input)?;\n",
|
|
"let mut out_bytes = Vec::new(); output.encode(&mut out_bytes)?; Ok(out_bytes) }}, "),
|
|
idx = idx,
|
|
input_type = method.input_type,
|
|
rust_method = method.name
|
|
)
|
|
.unwrap();
|
|
}
|
|
buf.push_str(
|
|
r#"
|
|
_ => Err(crate::error::AnkiError::invalid_input("invalid command")),
|
|
}
|
|
}
|
|
"#,
|
|
);
|
|
|
|
for method in &service.methods {
|
|
write!(
|
|
buf,
|
|
concat!(
|
|
" fn {method_name}(&self, input: super::{input_type}) -> ",
|
|
"Result<super::{output_type}>;\n"
|
|
),
|
|
method_name = method.name,
|
|
input_type = method.input_type,
|
|
output_type = method.output_type
|
|
)
|
|
.unwrap();
|
|
}
|
|
buf.push_str("}\n");
|
|
}
|
|
|
|
impl prost_build::ServiceGenerator for CustomGenerator {
|
|
fn generate(&mut self, service: prost_build::Service, buf: &mut String) {
|
|
write!(
|
|
buf,
|
|
"pub mod {name}_service {{
|
|
use prost::Message;
|
|
use crate::error::Result;
|
|
",
|
|
name = service.name.replace("Service", "").to_ascii_lowercase()
|
|
)
|
|
.unwrap();
|
|
write_method_trait(buf, &service);
|
|
buf.push('}');
|
|
}
|
|
}
|
|
|
|
fn service_generator() -> Box<dyn prost_build::ServiceGenerator> {
|
|
Box::new(CustomGenerator {})
|
|
}
|
|
|
|
pub fn write_backend_proto_rs() {
|
|
let proto_dir = if let Ok(proto) = env::var("PROTO_TOP") {
|
|
PathBuf::from(proto).parent().unwrap().to_owned()
|
|
} else {
|
|
PathBuf::from("../proto")
|
|
};
|
|
|
|
let subfolders = &["anki"];
|
|
let mut paths = vec![];
|
|
for subfolder in subfolders {
|
|
for entry in proto_dir.join(subfolder).read_dir().unwrap() {
|
|
let entry = entry.unwrap();
|
|
let path = entry.path();
|
|
if path
|
|
.file_name()
|
|
.unwrap()
|
|
.to_str()
|
|
.unwrap()
|
|
.ends_with(".proto")
|
|
{
|
|
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
|
|
paths.push(path);
|
|
}
|
|
}
|
|
}
|
|
|
|
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
|
let mut config = prost_build::Config::new();
|
|
config
|
|
.out_dir(&out_dir)
|
|
.service_generator(service_generator())
|
|
.type_attribute(
|
|
"Deck.Filtered.SearchTerm.Order",
|
|
"#[derive(strum::EnumIter)]",
|
|
)
|
|
.type_attribute("HelpPageLinkRequest.HelpPage", "#[derive(strum::EnumIter)]")
|
|
.type_attribute("CsvMetadata.Delimiter", "#[derive(strum::EnumIter)]")
|
|
.type_attribute(
|
|
"Preferences.BackupLimits",
|
|
"#[derive(Copy, serde_derive::Deserialize, serde_derive::Serialize)]",
|
|
)
|
|
.type_attribute(
|
|
"ImportCsvRequest.DupeResolution",
|
|
"#[derive(serde_derive::Deserialize, serde_derive::Serialize)]",
|
|
)
|
|
.compile_protos(paths.as_slice(), &[proto_dir])
|
|
.unwrap();
|
|
}
|