Add Rust bin to deprecate unused ftl entries (#2364)

* Add Rust bin to deprecate unused ftl entries

* Align function names with bin names

* Support passing in multiple ftl roots

* Use source instead of jsons for deprecating

* Fix CargoRun not working more than once (dae)

* Add ftl:deprecate (dae)

* Deprecate some strings (dae)

This is not all of the strings that are currently unused

* Check json files before deprecating; add allowlist (dae)

The scheduler messages we'll probably want to reuse for the v2->v3
transition, so I'd prefer to keep them undeprecated for now.

* Deprecate old bury options (dae)

* Support gathering usages from Kotlin files for AnkiDroid (dae)

* Update json scripts (dae)

* Remove old deprecation headers

* Parameterize JSON roots to keep

* Tweak deprecation message (dae)
This commit is contained in:
RumovZ 2023-02-07 02:56:14 +01:00 committed by GitHub
parent c824dd0b90
commit 855dc9d75b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 231 additions and 96 deletions

View File

@ -65,6 +65,16 @@ fn prepare_translations(build: &mut Build) -> Result<()> {
},
)?;
build.add(
"ftl:deprecate",
CargoRun {
binary_name: "deprecate_ftl_entries",
cargo_args: "-p anki_i18n_helpers",
bin_args: "ftl/core ftl/qt -- pylib qt rslib ts --keep ftl/usage",
deps: inputs!["ftl/core", "ftl/qt", "pylib", "qt", "rslib", "ts"],
},
)?;
Ok(())
}

View File

@ -239,6 +239,6 @@ impl BuildAction for CargoRun {
build.add_variable("binary", self.binary_name);
build.add_variable("cargo_args", self.cargo_args);
build.add_variable("bin_args", self.bin_args);
build.add_outputs("", vec!["phony"]);
build.add_outputs("", vec![format!("phony-{}", self.binary_name)]);
}
}

4
ftl/.gitignore vendored
View File

@ -1,2 +1,2 @@
usage
usage/*
!usage/no-deprecate.json

View File

@ -13,8 +13,6 @@ browsing-browser-options = Browser Options
browsing-buried = Buried
browsing-card = Card
browsing-cards = Cards
# Exactly one character representing 'Cards'; should differ from browsing-note-initial.
browsing-card-initial = C
browsing-card-list = Card List
browsing-cards-cant-be-manually-moved-into = Cards can't be manually moved into a filtered deck.
browsing-cards-deleted =
@ -61,8 +59,6 @@ browsing-no-flag = No Flag
browsing-no-selection = No cards or notes selected.
browsing-note = Note
browsing-notes = Notes
# Exactly one character representing 'Notes'; should differ from browsing-card-initial.
browsing-note-initial = N
browsing-optional-filter = Optional filter:
browsing-override-back-template = Override back template:
browsing-override-font = Override font:
@ -168,5 +164,10 @@ browsing-reparented-decks =
*[other] Renamed { $count } decks.
}
## obsolete; no need to translate
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
# Exactly one character representing 'Cards'; should differ from browsing-note-initial.
browsing-card-initial = C
# Exactly one character representing 'Notes'; should differ from browsing-card-initial.
browsing-note-initial = N

View File

@ -28,7 +28,7 @@ custom-study-available-new-cards-2 = Available new cards: { $countString }
custom-study-available-review-cards-2 = Available review cards: { $countString }
custom-study-available-child-count = ({ $count } in subdecks)
## DEPRECATED - you do not need to translate these.
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
custom-study-available-new-cards = Available new cards: { $count }
custom-study-available-review-cards = Available review cards: { $count }

View File

@ -98,17 +98,6 @@ deck-config-leech-action-tooltip =
## Burying section
deck-config-bury-title = Burying
deck-config-bury-new-siblings = Bury new siblings
deck-config-bury-review-siblings = Bury review siblings
deck-config-bury-interday-learning-siblings = Bury interday learning siblings
deck-config-bury-new-tooltip =
Whether other `new` cards of the same note (eg reverse cards, adjacent cloze deletions)
will be delayed until the next day.
deck-config-bury-review-tooltip = Whether other `review` cards of the same note will be delayed until the next day.
deck-config-bury-interday-learning-tooltip =
Whether other `learning` cards of the same note with intervals > 1 day
will be delayed until the next day.
deck-config-bury-siblings = Bury siblings
deck-config-do-not-bury = Do not bury siblings
deck-config-bury-if-new = Bury if new
@ -122,7 +111,7 @@ deck-config-bury-tooltip =
day. When enabled, Anki will automatically *bury* siblings, hiding them until the next
day. This option allows you to choose which kinds of cards may be buried when you answer
one of their siblings.
When using the V3 scheduler, interday learning cards can also be buried. Interday
learning cards are cards with a current learning step of one or more days.
@ -306,5 +295,16 @@ deck-config-maximum-answer-secs-above-recommended = Anki can schedule your revie
deck-config-which-deck = Which deck would you like?
## NO NEED TO TRANSLATE. These strings have been replaced with new versions, and will be removed in the future.
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
deck-config-bury-new-siblings = Bury new siblings
deck-config-bury-review-siblings = Bury review siblings
deck-config-bury-interday-learning-siblings = Bury interday learning siblings
deck-config-bury-new-tooltip =
Whether other `new` cards of the same note (eg reverse cards, adjacent cloze deletions)
will be delayed until the next day.
deck-config-bury-review-tooltip = Whether other `review` cards of the same note will be delayed until the next day.
deck-config-bury-interday-learning-tooltip =
Whether other `learning` cards of the same note with intervals > 1 day
will be delayed until the next day.

View File

@ -1,5 +1,3 @@
errors-invalid-input-empty = Invalid input.
errors-invalid-input-details = Invalid input: { $details }
errors-parse-number-fail = A number was invalid or out of range.
errors-filtered-parent-deck = Filtered decks can not have child decks.
errors-filtered-deck-required = This action can only be used on a filtered deck.
@ -18,3 +16,8 @@ errors-inconsistent-db-state = Your database appears to be in an inconsistent st
errors-bad-directive = Error in directive '{ $directive }': { $error }
errors-option-not-set = '{ $option }' not set
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
errors-invalid-input-empty = Invalid input.
errors-invalid-input-details = Invalid input: { $details }

View File

@ -82,7 +82,6 @@ importing-processed-media-file =
[one] Imported { $count } media file
*[other] Imported { $count } media files
}
importing-importing-collection = Importing collection...
importing-importing-file = Importing file...
importing-extracting = Extracting data...
importing-gathering = Gathering data...
@ -97,7 +96,6 @@ importing-processed-cards =
[one] Processed { $count } card...
*[other] Processed { $count } cards...
}
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported
importing-existing-notes = Existing notes
# "Existing notes: Duplicate" (verb)
importing-duplicate = Duplicate
@ -108,3 +106,8 @@ importing-update = Update
importing-tag-all-notes = Tag all notes
importing-tag-updated-notes = Tag updated notes
importing-file = File
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
importing-importing-collection = Importing collection...
importing-unable-to-import-filename = Unable to import { $filename }: file type not supported

View File

@ -1,6 +1,5 @@
preferences-automatically-sync-on-profile-openclose = Automatically sync on profile open/close
preferences-backups = Backups
preferences-basic = Basic
preferences-change-deck-depending-on-note-type = Change deck depending on note type
preferences-changes-will-take-effect-when-you = Changes will take effect when you restart Anki.
preferences-hours-past-midnight = hours past midnight
@ -58,15 +57,19 @@ preferences-appearance = Appearance
preferences-general = General
preferences-style = Style
preferences-review = Review
preferences-reviewer = Reviewer
preferences-distractions = Distractions
preferences-minimalist-mode = Minimalist mode
preferences-editing = Editing
preferences-browsing = Browsing
preferences-default-deck = Default deck
preferences-account = AnkiWeb Account
preferences-media = Media
preferences-note = Note
preferences-scheduler = Scheduler
preferences-user-interface = User Interface
preferences-import-export = Import/Export
## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.
preferences-basic = Basic
preferences-reviewer = Reviewer
preferences-media = Media

3
ftl/update-ankidroid-usage.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
cargo run --bin write_ftl_json ftl/usage/ankidroid.json ~/Local/droid/Anki-Android

View File

@ -1,13 +1,4 @@
#!/bin/bash
#
# This script can only be run by Damien, as it requires a copy of AnkiMobile's sources.
# A similar script could be added for AnkiDroid in the future.
#
set -e
scriptRoot=$(realpath $(dirname $0)/..)
sourceRoot=$(realpath $scriptRoot/../../mobile/ankimobile/src)
bazel run //rslib/i18n_helpers:write_ftl_json $scriptRoot/ftl/usage/ankimobile.json \
$sourceRoot
cargo run --bin write_ftl_json ftl/usage/ankimobile.json ../../mobile/ankimobile/src

View File

@ -1,9 +0,0 @@
#!/bin/bash
set -e
version=$1
root=$(realpath $(dirname $0)/..)
bazel run //rslib/i18n_helpers:write_ftl_json $root/ftl/usage/desktop-$version.json \
$root/{rslib,ts,pylib,qt}

View File

@ -0,0 +1,4 @@
[
"scheduling-update-soon",
"scheduling-update-later-button"
]

View File

@ -0,0 +1,49 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
/// Deprecate unused ftl entries by moving them to the bottom of the file and
/// adding a deprecation warning. An entry is considered unused if cannot be
/// found in a source or JSON file.
/// Arguments before `--` are roots of ftl files, arguments after that are
/// source roots. JSON roots must be preceded by `--keep` or `-k`.
fn main() {
let args = Arguments::new();
anki_i18n_helpers::garbage_collection::deprecate_ftl_entries(
&args.ftl_roots,
&args.source_roots,
&args.json_roots,
);
}
#[derive(Default)]
struct Arguments {
ftl_roots: Vec<String>,
source_roots: Vec<String>,
json_roots: Vec<String>,
}
impl Arguments {
fn new() -> Self {
let mut args = Self::default();
let mut past_separator = false;
let mut keep_flag = false;
for arg in std::env::args() {
match arg.as_str() {
"--" => {
past_separator = true;
}
"--keep" | "-k" => {
keep_flag = true;
}
_ if keep_flag => {
keep_flag = false;
args.json_roots.push(arg)
}
_ if past_separator => args.source_roots.push(arg),
_ => args.ftl_roots.push(arg),
};
}
args
}
}

View File

@ -3,9 +3,9 @@
/// Delete every entry in the ftl files that is not mentioned in another message
/// or a given json.
/// First argument is the root of the ftl files, second one is the root of the
/// json files.
/// First argument is the root of the json files, following are the roots of the
/// ftl files.
fn main() {
let args: Vec<String> = std::env::args().collect();
anki_i18n_helpers::garbage_collection::remove_unused_ftl_messages(&args[1], &args[2]);
anki_i18n_helpers::garbage_collection::garbage_collect_ftl_entries(&args[2..], &args[1]);
}

View File

@ -7,5 +7,5 @@
/// First argument is the target file name, following are source roots.
fn main() {
let args: Vec<String> = std::env::args().collect();
anki_i18n_helpers::garbage_collection::extract_ftl_references(&args[2..], &args[1]);
anki_i18n_helpers::garbage_collection::write_ftl_json(&args[2..], &args[1]);
}

View File

@ -7,6 +7,7 @@ use std::io::BufReader;
use std::iter::FromIterator;
use fluent_syntax::ast;
use fluent_syntax::ast::Resource;
use fluent_syntax::parser;
use lazy_static::lazy_static;
use regex::Regex;
@ -16,16 +17,14 @@ use walkdir::WalkDir;
use crate::serialize;
/// Extract references from all Rust, Python, TS, Svelte, Swift and Designer
/// files in the `roots`, convert them to kebab case and write them as a json to
/// the target file.
pub fn extract_ftl_references<S1: AsRef<str>, S2: AsRef<str>>(roots: &[S1], target: S2) {
let mut refs = HashSet::new();
for root in roots {
for_files_with_ending(root.as_ref(), "", |entry| {
extract_references_from_file(&mut refs, &entry)
})
}
const DEPCRATION_WARNING: &str =
"NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future.";
/// Extract references from all Rust, Python, TS, Svelte, Swift, Kotlin and
/// Designer files in the `roots`, convert them to kebab case and write them as
/// a json to the target file.
pub fn write_ftl_json<S1: AsRef<str>, S2: AsRef<str>>(roots: &[S1], target: S2) {
let refs = gather_ftl_references(roots);
let mut refs = Vec::from_iter(refs);
refs.sort();
serde_json::to_writer_pretty(
@ -37,42 +36,95 @@ pub fn extract_ftl_references<S1: AsRef<str>, S2: AsRef<str>>(roots: &[S1], targ
/// Delete every entry in `ftl_root` that is not mentioned in another message
/// or any json in `json_root`.
pub fn remove_unused_ftl_messages<S: AsRef<str>>(ftl_root: S, json_root: S) {
let mut used_ftls = HashSet::new();
import_used_messages(json_root.as_ref(), &mut used_ftls);
extract_nested_messages_and_terms(ftl_root.as_ref(), &mut used_ftls);
strip_unused_ftl_messages_and_terms(ftl_root.as_ref(), &used_ftls);
pub fn garbage_collect_ftl_entries(ftl_roots: &[impl AsRef<str>], json_root: impl AsRef<str>) {
let used_ftls = get_all_used_messages_and_terms(json_root.as_ref(), ftl_roots);
strip_unused_ftl_messages_and_terms(ftl_roots, &used_ftls);
}
fn for_files_with_ending(root: &str, file_ending: &str, mut op: impl FnMut(DirEntry)) {
for res in WalkDir::new(root) {
let entry = res.expect("failed to visit dir");
if entry.file_type().is_file()
&& entry
.file_name()
.to_str()
.expect("non-unicode filename")
.ends_with(file_ending)
{
op(entry);
/// Moves every entry in `ftl_roots` that is not mentioned in another message, a
/// source file or any json in `json_roots` to the bottom of its file below a
/// deprecation warning.
pub fn deprecate_ftl_entries(
ftl_roots: &[impl AsRef<str>],
source_roots: &[impl AsRef<str>],
json_roots: &[impl AsRef<str>],
) {
let mut used_ftls = gather_ftl_references(source_roots);
import_messages_from_json(json_roots, &mut used_ftls);
extract_nested_messages_and_terms(ftl_roots, &mut used_ftls);
deprecate_unused_ftl_messages_and_terms(ftl_roots, &used_ftls);
}
fn get_all_used_messages_and_terms(
json_root: &str,
ftl_roots: &[impl AsRef<str>],
) -> HashSet<String> {
let mut used_ftls = HashSet::new();
import_messages_from_json(&[json_root], &mut used_ftls);
extract_nested_messages_and_terms(ftl_roots, &mut used_ftls);
used_ftls
}
fn for_files_with_ending(
roots: &[impl AsRef<str>],
file_ending: &str,
mut op: impl FnMut(DirEntry),
) {
for root in roots {
for res in WalkDir::new(root.as_ref()) {
let entry = res.expect("failed to visit dir");
if entry.file_type().is_file()
&& entry
.file_name()
.to_str()
.expect("non-unicode filename")
.ends_with(file_ending)
{
op(entry);
}
}
}
}
fn import_used_messages(json_root: &str, used_ftls: &mut HashSet<String>) {
for_files_with_ending(json_root, ".json", |entry| {
fn gather_ftl_references(roots: &[impl AsRef<str>]) -> HashSet<String> {
let mut refs = HashSet::new();
for_files_with_ending(roots, "", |entry| {
extract_references_from_file(&mut refs, &entry)
});
refs
}
/// Iterates over all .ftl files in `root`, parses them and rewrites the file if
/// `op` decides to return a new AST.
fn rewrite_ftl_files(
roots: &[impl AsRef<str>],
mut op: impl FnMut(Resource<&str>) -> Option<Resource<&str>>,
) {
for_files_with_ending(roots, ".ftl", |entry| {
let ftl = fs::read_to_string(entry.path()).expect("failed to open file");
let ast = parser::parse(ftl.as_str()).expect("failed to parse ftl");
if let Some(ast) = op(ast) {
fs::write(entry.path(), serialize::serialize(&ast)).expect("failed to write file");
}
});
}
fn import_messages_from_json(json_roots: &[impl AsRef<str>], entries: &mut HashSet<String>) {
for_files_with_ending(json_roots, ".json", |entry| {
let buffer = BufReader::new(fs::File::open(entry.path()).expect("failed to open file"));
let refs: Vec<String> = serde_json::from_reader(buffer).expect("failed to parse json");
used_ftls.extend(refs);
entries.extend(refs);
})
}
fn extract_nested_messages_and_terms(ftl_root: &str, used_ftls: &mut HashSet<String>) {
fn extract_nested_messages_and_terms(
ftl_roots: &[impl AsRef<str>],
used_ftls: &mut HashSet<String>,
) {
lazy_static! {
static ref REFERENCE: Regex = Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap();
}
for_files_with_ending(ftl_root, ".ftl", |entry| {
for_files_with_ending(ftl_roots, ".ftl", |entry| {
let source = fs::read_to_string(entry.path()).expect("file not readable");
for caps in REFERENCE.captures_iter(&source) {
used_ftls.insert(caps[1].to_string());
@ -80,24 +132,48 @@ fn extract_nested_messages_and_terms(ftl_root: &str, used_ftls: &mut HashSet<Str
})
}
fn strip_unused_ftl_messages_and_terms(ftl_root: &str, used_ftls: &HashSet<String>) {
for_files_with_ending(ftl_root, ".ftl", |entry| {
let ftl = fs::read_to_string(entry.path()).expect("failed to open file");
let mut ast = parser::parse(ftl.as_str()).expect("failed to parse ftl");
fn strip_unused_ftl_messages_and_terms(roots: &[impl AsRef<str>], used_ftls: &HashSet<String>) {
rewrite_ftl_files(roots, |mut ast| {
let num_entries = ast.body.len();
ast.body.retain(entry_use_check(used_ftls));
(ast.body.len() < num_entries).then_some(ast)
});
}
ast.body.retain(|entry| match entry {
ast::Entry::Message(msg) => used_ftls.contains(msg.id.name),
ast::Entry::Term(term) => used_ftls.contains(term.id.name),
_ => true,
});
if ast.body.len() < num_entries {
fs::write(entry.path(), serialize::serialize(&ast)).expect("failed to write file");
fn deprecate_unused_ftl_messages_and_terms(roots: &[impl AsRef<str>], used_ftls: &HashSet<String>) {
rewrite_ftl_files(roots, |ast| {
let (mut used, mut unused): (Vec<_>, Vec<_>) =
ast.body.into_iter().partition(entry_use_check(used_ftls));
if unused.is_empty() {
None
} else {
append_deprecation_warning(&mut used);
used.append(&mut unused);
Some(Resource { body: used })
}
});
}
fn append_deprecation_warning(entries: &mut Vec<ast::Entry<&str>>) {
entries.retain(|entry| match entry {
ast::Entry::GroupComment(ast::Comment { content }) => {
!matches!(content.first(), Some(&DEPCRATION_WARNING))
}
_ => true,
});
entries.push(ast::Entry::GroupComment(ast::Comment {
content: vec![DEPCRATION_WARNING],
}));
}
fn entry_use_check(used_ftls: &HashSet<String>) -> impl Fn(&ast::Entry<&str>) -> bool + '_ {
|entry: &ast::Entry<&str>| match entry {
ast::Entry::Message(msg) => used_ftls.contains(msg.id.name),
ast::Entry::Term(term) => used_ftls.contains(term.id.name),
_ => true,
}
}
fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
lazy_static! {
static ref SNAKECASE_TR: Regex = Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap();
@ -113,6 +189,7 @@ fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
} else if file_name.ends_with(".ts")
|| file_name.ends_with(".svelte")
|| file_name.ends_with(".swift")
|| file_name.ends_with(".kt")
{
(&CAMELCASE_TR, camel_to_kebab_case)
} else if file_name.ends_with(".ui") {