use fluent_syntax::ast::Entry; use fluent_syntax::parser::Parser; use std::path::Path; use std::{collections::HashMap, env}; use std::{fs, path::PathBuf}; fn get_identifiers(ftl_text: &str) -> Vec { let res = Parser::new(ftl_text).parse().unwrap(); let mut idents = vec![]; for entry in res.body { if let Entry::Message(m) = entry { idents.push(m.id.name.to_string()); } } idents.sort_unstable(); idents } fn proto_enum(idents: &[String]) -> String { let mut buf = String::from( r#"// This file is automatically generated as part of the build process. syntax = "proto3"; package FluentProto; enum FluentString { "#, ); for (idx, s) in idents.iter().enumerate() { let name = s.replace("-", "_").to_uppercase(); buf += &format!(" {} = {};\n", name, idx); } buf += "}\n"; buf } fn rust_string_vec(idents: &[String]) -> String { let mut buf = String::from( r#"// This file is automatically generated as part of the build process. pub(super) const FLUENT_KEYS: &[&str] = &[ "#, ); for s in idents { buf += &format!(" \"{}\",\n", s); } buf += "];\n"; buf } struct FTLData { templates: Vec, /// lang -> [FileContent] translations: HashMap>, } impl FTLData { fn add_language_folder(&mut self, folder: &Path) { let lang = folder.file_name().unwrap().to_str().unwrap(); let list = self.translations.entry(lang.to_string()).or_default(); for entry in fs::read_dir(&folder).unwrap() { let entry = entry.unwrap(); let text = fs::read_to_string(&entry.path()).unwrap(); assert!( text.ends_with('\n'), "file was missing final newline: {:?}", entry ); list.push(text); } } fn add_template_folder(&mut self, folder: &Path) { for entry in fs::read_dir(&folder).unwrap() { let entry = entry.unwrap(); let text = fs::read_to_string(&entry.path()).unwrap(); assert!( text.ends_with('\n'), "file was missing final newline: {:?}", entry ); self.templates.push(text); } } } fn get_ftl_data() -> FTLData { let mut data = get_ftl_data_from_source_tree(); let rslib_l10n = std::env::var("RSLIB_FTL_ROOT").ok(); let extra_l10n = std::env::var("EXTRA_FTL_ROOT").ok(); // core translations provided? if let Some(path) = rslib_l10n { let path = Path::new(&path); let core_folder = path.with_file_name("core"); for entry in fs::read_dir(&core_folder).unwrap() { let entry = entry.unwrap(); if entry.file_name().to_str().unwrap() == "templates" { // ignore source ftl files, as we've already extracted them from the source tree continue; } data.add_language_folder(&entry.path()); } } // extra templates/translations provided? if let Some(path) = extra_l10n { let mut path = PathBuf::from(path); // drop l10n.toml filename to get folder path.pop(); // look for subfolders for outer_entry in fs::read_dir(&path).unwrap() { let outer_entry = outer_entry.unwrap(); if outer_entry.file_type().unwrap().is_dir() { // process folder for entry in fs::read_dir(&outer_entry.path()).unwrap() { let entry = entry.unwrap(); if entry.file_name().to_str().unwrap() == "templates" { data.add_template_folder(&entry.path()); } else { data.add_language_folder(&entry.path()); } } } } } data } /// Extracts English text from ftl folder in source tree. fn get_ftl_data_from_source_tree() -> FTLData { let mut templates: Vec = vec![]; let dir = if let Ok(srcfile) = env::var("FTL_SRC") { let mut path = PathBuf::from(srcfile); path.pop(); path } else { PathBuf::from("ftl") }; for entry in fs::read_dir(dir).unwrap() { let entry = entry.unwrap(); let fname = entry.file_name().into_string().unwrap(); if fname.ends_with(".ftl") { templates.push(fs::read_to_string(entry.path()).unwrap()); } } FTLData { templates, translations: Default::default(), } } /// Map of lang->content; Template lang is "template". fn merge_ftl_data(data: FTLData) -> HashMap { data.translations .into_iter() .map(|(lang, content)| (lang, content.join("\n"))) .chain(std::iter::once(( "template".to_string(), data.templates.join("\n"), ))) .collect() } fn write_merged_ftl_files(dir: &Path, data: &HashMap) { for (lang, content) in data { let path = dir.join(format!("{}.ftl", lang)); fs::write(&path, content).unwrap(); } } fn write_fluent_keys_rs(dir: &Path, idents: &[String]) { let path = dir.join("fluent_keys.rs"); fs::write(&path, rust_string_vec(idents)).unwrap(); } fn write_fluent_proto_inner(path: &Path, idents: &[String]) { fs::write(&path, proto_enum(idents)).unwrap(); } /// Write fluent.proto into the provided dir. /// Can be called separately to provide a proto /// to downstream code. pub fn write_fluent_proto(out_path: &str) { let merged_ftl = merge_ftl_data(get_ftl_data()); let idents = get_identifiers(merged_ftl.get("template").unwrap()); write_fluent_proto_inner(Path::new(out_path), &idents); } /// Write all ftl-related files into OUT_DIR. pub fn write_ftl_files_and_fluent_rs() { let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap()); let merged_ftl = merge_ftl_data(get_ftl_data()); write_merged_ftl_files(&dir, &merged_ftl); let idents = get_identifiers(merged_ftl.get("template").unwrap()); write_fluent_keys_rs(&dir, &idents); write_fluent_proto_inner(&dir.join("fluent.proto"), &idents); } #[cfg(test)] mod test { use super::*; #[test] fn all() { let idents = get_identifiers("key-one = foo\nkey-two = bar"); assert_eq!(idents, vec!["key-one", "key-two"]); assert_eq!( proto_enum(&idents), r#"// This file is automatically generated as part of the build process. syntax = "proto3"; package backend_strings; enum FluentString { KEY_ONE = 0; KEY_TWO = 1; } "# ); assert_eq!( rust_string_vec(&idents), r#"// This file is automatically generated as part of the build process. const FLUENT_KEYS: &[&str] = &[ "key-one", "key-two", ]; "# ); } }