213 lines
6.0 KiB
Rust
213 lines
6.0 KiB
Rust
|
use fluent_syntax::ast::{Entry::Message, ResourceEntry};
|
||
|
use fluent_syntax::parser::parse;
|
||
|
use std::collections::HashMap;
|
||
|
use std::path::Path;
|
||
|
use std::{fs, path::PathBuf};
|
||
|
|
||
|
fn get_identifiers(ftl_text: &str) -> Vec<String> {
|
||
|
let res = parse(ftl_text).unwrap();
|
||
|
let mut idents = vec![];
|
||
|
|
||
|
for entry in res.body {
|
||
|
if let ResourceEntry::Entry(Message(m)) = entry {
|
||
|
idents.push(m.id.name.to_string());
|
||
|
}
|
||
|
}
|
||
|
|
||
|
idents.sort_unstable();
|
||
|
|
||
|
idents
|
||
|
}
|
||
|
|
||
|
fn proto_enum(idents: &[String]) -> String {
|
||
|
let mut buf = String::from(
|
||
|
r#"// This file is automatically generated as part of the build process.
|
||
|
|
||
|
syntax = "proto3";
|
||
|
package FluentProto;
|
||
|
enum FluentString {
|
||
|
"#,
|
||
|
);
|
||
|
for (idx, s) in idents.iter().enumerate() {
|
||
|
let name = s.replace("-", "_").to_uppercase();
|
||
|
buf += &format!(" {} = {};\n", name, idx);
|
||
|
}
|
||
|
|
||
|
buf += "}\n";
|
||
|
|
||
|
buf
|
||
|
}
|
||
|
|
||
|
fn rust_string_vec(idents: &[String]) -> String {
|
||
|
let mut buf = String::from(
|
||
|
r#"// This file is automatically generated as part of the build process.
|
||
|
|
||
|
pub(super) const FLUENT_KEYS: &[&str] = &[
|
||
|
"#,
|
||
|
);
|
||
|
|
||
|
for s in idents {
|
||
|
buf += &format!(" \"{}\",\n", s);
|
||
|
}
|
||
|
|
||
|
buf += "];\n";
|
||
|
|
||
|
buf
|
||
|
}
|
||
|
|
||
|
struct FTLData {
|
||
|
templates: Vec<String>,
|
||
|
/// lang -> [FileContent]
|
||
|
translations: HashMap<String, Vec<String>>,
|
||
|
}
|
||
|
|
||
|
fn get_ftl_data(ftl_list_path: Option<String>) -> FTLData {
|
||
|
if let Some(path) = ftl_list_path {
|
||
|
let sources = fs::read_to_string(&path).expect("missing ftl list");
|
||
|
get_ftl_data_from_provided_files(sources)
|
||
|
} else {
|
||
|
get_ftl_data_from_source_tree()
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/// Extract string of provided files, and read their data.
|
||
|
fn get_ftl_data_from_provided_files(sources: String) -> FTLData {
|
||
|
let mut templates: Vec<String> = vec![];
|
||
|
let mut translations: HashMap<String, Vec<String>> = HashMap::new();
|
||
|
|
||
|
for path in sources.split(' ') {
|
||
|
let path = path.trim();
|
||
|
let mut elems = path.rsplit('/');
|
||
|
let (_fname, first_parent, second_parent) = (
|
||
|
elems.next().unwrap(),
|
||
|
elems.next().unwrap(),
|
||
|
elems.next().unwrap(),
|
||
|
);
|
||
|
let entry = std::fs::read_to_string(path).unwrap_or_else(|_| {
|
||
|
// try parent folder, for cases when we're running from build script
|
||
|
let path = Path::new("..").join(path);
|
||
|
std::fs::read_to_string(path).unwrap()
|
||
|
});
|
||
|
match (first_parent, second_parent) {
|
||
|
// templates in the rslib translation repo are ignored, as their canonical
|
||
|
// form is part of this source tree
|
||
|
("templates", "core") => (),
|
||
|
// templates from this source tree and extra_ftl get merged together
|
||
|
("ftl", _) => templates.push(entry),
|
||
|
("templates", _) => templates.push(entry),
|
||
|
// and translations for a given language get merged together
|
||
|
(lang, _) => translations
|
||
|
.entry(lang.to_string())
|
||
|
.or_default()
|
||
|
.push(entry),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
FTLData {
|
||
|
templates,
|
||
|
translations,
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/// Called when no srcs.list file is available; we include the built-in
|
||
|
/// English templates, and no translations.
|
||
|
fn get_ftl_data_from_source_tree() -> FTLData {
|
||
|
let mut templates: Vec<String> = vec![];
|
||
|
for entry in fs::read_dir("ftl").unwrap() {
|
||
|
let entry = entry.unwrap();
|
||
|
let fname = entry.file_name().into_string().unwrap();
|
||
|
if fname.ends_with(".ftl") {
|
||
|
templates.push(fs::read_to_string(entry.path()).unwrap());
|
||
|
}
|
||
|
}
|
||
|
|
||
|
FTLData {
|
||
|
templates,
|
||
|
translations: Default::default(),
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/// Map of lang->content; Template lang is "template".
|
||
|
fn merge_ftl_data(data: FTLData) -> HashMap<String, String> {
|
||
|
data.translations
|
||
|
.into_iter()
|
||
|
.map(|(lang, content)| (lang, content.join("\n")))
|
||
|
.chain(std::iter::once((
|
||
|
"template".to_string(),
|
||
|
data.templates.join("\n"),
|
||
|
)))
|
||
|
.collect()
|
||
|
}
|
||
|
|
||
|
fn write_merged_ftl_files(dir: &Path, data: &HashMap<String, String>) {
|
||
|
for (lang, content) in data {
|
||
|
let path = dir.join(format!("{}.ftl", lang));
|
||
|
fs::write(&path, content).unwrap();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
fn write_fluent_keys_rs(dir: &Path, idents: &[String]) {
|
||
|
let path = dir.join("fluent_keys.rs");
|
||
|
fs::write(&path, rust_string_vec(idents)).unwrap();
|
||
|
}
|
||
|
|
||
|
fn write_fluent_proto_inner(path: &Path, idents: &[String]) {
|
||
|
fs::write(&path, proto_enum(idents)).unwrap();
|
||
|
}
|
||
|
|
||
|
/// Write fluent.proto into the provided dir.
|
||
|
/// Can be called separately to provide a proto
|
||
|
/// to downstream code.
|
||
|
pub fn write_fluent_proto(out_path: &str, ftl_list: String) {
|
||
|
let merged_ftl = merge_ftl_data(get_ftl_data(Some(ftl_list)));
|
||
|
let idents = get_identifiers(merged_ftl.get("template").unwrap());
|
||
|
write_fluent_proto_inner(Path::new(out_path), &idents);
|
||
|
}
|
||
|
|
||
|
/// Write all ftl-related files into OUT_DIR.
|
||
|
pub fn write_ftl_files_and_fluent_rs() {
|
||
|
let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||
|
let ftl_list = std::env::var("FTL_FILES").ok();
|
||
|
let merged_ftl = merge_ftl_data(get_ftl_data(ftl_list));
|
||
|
write_merged_ftl_files(&dir, &merged_ftl);
|
||
|
|
||
|
let idents = get_identifiers(merged_ftl.get("template").unwrap());
|
||
|
write_fluent_keys_rs(&dir, &idents);
|
||
|
write_fluent_proto_inner(&dir.join("fluent.proto"), &idents);
|
||
|
}
|
||
|
|
||
|
#[cfg(test)]
|
||
|
mod test {
|
||
|
use super::*;
|
||
|
|
||
|
#[test]
|
||
|
fn all() {
|
||
|
let idents = get_identifiers("key-one = foo\nkey-two = bar");
|
||
|
assert_eq!(idents, vec!["key-one", "key-two"]);
|
||
|
|
||
|
assert_eq!(
|
||
|
proto_enum(&idents),
|
||
|
r#"// This file is automatically generated as part of the build process.
|
||
|
|
||
|
syntax = "proto3";
|
||
|
package backend_strings;
|
||
|
enum FluentString {
|
||
|
KEY_ONE = 0;
|
||
|
KEY_TWO = 1;
|
||
|
}
|
||
|
"#
|
||
|
);
|
||
|
|
||
|
assert_eq!(
|
||
|
rust_string_vec(&idents),
|
||
|
r#"// This file is automatically generated as part of the build process.
|
||
|
|
||
|
const FLUENT_KEYS: &[&str] = &[
|
||
|
"key-one",
|
||
|
"key-two",
|
||
|
];
|
||
|
"#
|
||
|
);
|
||
|
}
|
||
|
}
|