2020-01-10 12:00:48 +01:00
|
|
|
// Copyright: Ankitects Pty Ltd and contributors
|
|
|
|
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|
|
|
|
2020-01-16 07:37:44 +01:00
|
|
|
use crate::err::{Result, TemplateError};
|
2020-01-10 12:04:52 +01:00
|
|
|
use crate::template_filters::apply_filters;
|
2020-01-20 10:12:34 +01:00
|
|
|
use crate::text::strip_av_tags;
|
2020-01-10 05:59:29 +01:00
|
|
|
use lazy_static::lazy_static;
|
2019-12-24 05:05:15 +01:00
|
|
|
use nom;
|
|
|
|
use nom::branch::alt;
|
|
|
|
use nom::bytes::complete::tag;
|
|
|
|
use nom::error::ErrorKind;
|
|
|
|
use nom::sequence::delimited;
|
2020-01-10 05:59:29 +01:00
|
|
|
use regex::Regex;
|
2019-12-29 23:12:44 +01:00
|
|
|
use std::borrow::Cow;
|
2019-12-24 05:05:15 +01:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2020-01-10 09:02:26 +01:00
|
|
|
use std::iter;
|
2019-12-24 05:05:15 +01:00
|
|
|
|
|
|
|
pub type FieldMap<'a> = HashMap<&'a str, u16>;
|
2020-01-16 08:23:25 +01:00
|
|
|
type TemplateResult<T> = std::result::Result<T, TemplateError>;
|
2019-12-24 05:05:15 +01:00
|
|
|
|
|
|
|
// Lexing
|
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub enum Token<'a> {
|
|
|
|
Text(&'a str),
|
|
|
|
Replacement(&'a str),
|
|
|
|
OpenConditional(&'a str),
|
|
|
|
OpenNegated(&'a str),
|
|
|
|
CloseConditional(&'a str),
|
|
|
|
}
|
|
|
|
|
2020-01-08 11:29:04 +01:00
|
|
|
/// a span of text, terminated by {{ or end of string
|
|
|
|
pub(crate) fn text_until_open_handlebars(s: &str) -> nom::IResult<&str, &str> {
|
2019-12-24 05:05:15 +01:00
|
|
|
let end = s.len();
|
|
|
|
|
2020-01-08 11:29:04 +01:00
|
|
|
let limited_end = end.min(s.find("{{").unwrap_or(end));
|
|
|
|
let (output, input) = s.split_at(limited_end);
|
|
|
|
if output.is_empty() {
|
|
|
|
Err(nom::Err::Error((input, ErrorKind::TakeUntil)))
|
|
|
|
} else {
|
|
|
|
Ok((input, output))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// a span of text, terminated by }} or end of string
|
|
|
|
pub(crate) fn text_until_close_handlebars(s: &str) -> nom::IResult<&str, &str> {
|
|
|
|
let end = s.len();
|
|
|
|
|
|
|
|
let limited_end = end.min(s.find("}}").unwrap_or(end));
|
2019-12-24 05:05:15 +01:00
|
|
|
let (output, input) = s.split_at(limited_end);
|
|
|
|
if output.is_empty() {
|
|
|
|
Err(nom::Err::Error((input, ErrorKind::TakeUntil)))
|
|
|
|
} else {
|
|
|
|
Ok((input, output))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// text outside handlebars
|
|
|
|
fn text_token(s: &str) -> nom::IResult<&str, Token> {
|
2020-01-08 11:29:04 +01:00
|
|
|
text_until_open_handlebars(s).map(|(input, output)| (input, Token::Text(output)))
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// text wrapped in handlebars
|
|
|
|
fn handle_token(s: &str) -> nom::IResult<&str, Token> {
|
2020-01-08 11:29:04 +01:00
|
|
|
delimited(tag("{{"), text_until_close_handlebars, tag("}}"))(s)
|
2019-12-24 05:05:15 +01:00
|
|
|
.map(|(input, output)| (input, classify_handle(output)))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// classify handle based on leading character
|
|
|
|
fn classify_handle(s: &str) -> Token {
|
2020-01-11 09:02:24 +01:00
|
|
|
let start = s.trim_start_matches('{').trim();
|
2019-12-24 05:05:15 +01:00
|
|
|
if start.len() < 2 {
|
|
|
|
return Token::Replacement(start);
|
|
|
|
}
|
|
|
|
if start.starts_with('#') {
|
|
|
|
Token::OpenConditional(&start[1..].trim_start())
|
|
|
|
} else if start.starts_with('/') {
|
|
|
|
Token::CloseConditional(&start[1..].trim_start())
|
|
|
|
} else if start.starts_with('^') {
|
|
|
|
Token::OpenNegated(&start[1..].trim_start())
|
|
|
|
} else {
|
|
|
|
Token::Replacement(start)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn next_token(input: &str) -> nom::IResult<&str, Token> {
|
|
|
|
alt((handle_token, text_token))(input)
|
|
|
|
}
|
|
|
|
|
2020-01-16 08:23:25 +01:00
|
|
|
fn tokens(template: &str) -> impl Iterator<Item = TemplateResult<Token>> {
|
2019-12-24 05:05:15 +01:00
|
|
|
let mut data = template;
|
|
|
|
|
|
|
|
std::iter::from_fn(move || {
|
|
|
|
if data.is_empty() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
match next_token(data) {
|
|
|
|
Ok((i, o)) => {
|
|
|
|
data = i;
|
|
|
|
Some(Ok(o))
|
|
|
|
}
|
2020-01-16 07:37:44 +01:00
|
|
|
Err(_e) => Some(Err(TemplateError::NoClosingBrackets(data.to_string()))),
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parsing
|
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq)]
|
|
|
|
enum ParsedNode<'a> {
|
|
|
|
Text(&'a str),
|
|
|
|
Replacement {
|
|
|
|
key: &'a str,
|
|
|
|
filters: Vec<&'a str>,
|
|
|
|
},
|
|
|
|
Conditional {
|
|
|
|
key: &'a str,
|
|
|
|
children: Vec<ParsedNode<'a>>,
|
|
|
|
},
|
|
|
|
NegatedConditional {
|
|
|
|
key: &'a str,
|
|
|
|
children: Vec<ParsedNode<'a>>,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct ParsedTemplate<'a>(Vec<ParsedNode<'a>>);
|
|
|
|
|
|
|
|
impl ParsedTemplate<'_> {
|
2019-12-29 23:12:44 +01:00
|
|
|
/// Create a template from the provided text.
|
|
|
|
///
|
|
|
|
/// The legacy alternate syntax is not supported, so the provided text
|
|
|
|
/// should be run through without_legacy_template_directives() first.
|
2020-01-16 08:23:25 +01:00
|
|
|
pub fn from_text(template: &str) -> TemplateResult<ParsedTemplate> {
|
2019-12-24 05:05:15 +01:00
|
|
|
let mut iter = tokens(template);
|
|
|
|
Ok(Self(parse_inner(&mut iter, None)?))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-16 08:23:25 +01:00
|
|
|
fn parse_inner<'a, I: Iterator<Item = TemplateResult<Token<'a>>>>(
|
2019-12-24 05:05:15 +01:00
|
|
|
iter: &mut I,
|
|
|
|
open_tag: Option<&'a str>,
|
2020-01-16 08:23:25 +01:00
|
|
|
) -> TemplateResult<Vec<ParsedNode<'a>>> {
|
2019-12-24 05:05:15 +01:00
|
|
|
let mut nodes = vec![];
|
|
|
|
|
|
|
|
while let Some(token) = iter.next() {
|
|
|
|
use Token::*;
|
|
|
|
nodes.push(match token? {
|
|
|
|
Text(t) => ParsedNode::Text(t),
|
|
|
|
Replacement(t) => {
|
|
|
|
let mut it = t.rsplit(':');
|
|
|
|
ParsedNode::Replacement {
|
|
|
|
key: it.next().unwrap(),
|
|
|
|
filters: it.collect(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
OpenConditional(t) => ParsedNode::Conditional {
|
|
|
|
key: t,
|
|
|
|
children: parse_inner(iter, Some(t))?,
|
|
|
|
},
|
|
|
|
OpenNegated(t) => ParsedNode::NegatedConditional {
|
|
|
|
key: t,
|
|
|
|
children: parse_inner(iter, Some(t))?,
|
|
|
|
},
|
|
|
|
CloseConditional(t) => {
|
|
|
|
if let Some(open) = open_tag {
|
|
|
|
if open == t {
|
|
|
|
// matching closing tag, move back to parent
|
|
|
|
return Ok(nodes);
|
|
|
|
}
|
|
|
|
}
|
2020-01-16 07:37:44 +01:00
|
|
|
return Err(TemplateError::ConditionalNotOpen(t.to_string()));
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(open) = open_tag {
|
2020-01-16 07:37:44 +01:00
|
|
|
Err(TemplateError::ConditionalNotClosed(open.to_string()))
|
2019-12-24 05:05:15 +01:00
|
|
|
} else {
|
|
|
|
Ok(nodes)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-12 06:15:46 +01:00
|
|
|
// Legacy support
|
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
static ALT_HANDLEBAR_DIRECTIVE: &str = "{{=<% %>=}}";
|
|
|
|
|
|
|
|
/// Convert legacy alternate syntax to standard syntax.
|
|
|
|
pub fn without_legacy_template_directives(text: &str) -> Cow<str> {
|
|
|
|
if text.trim_start().starts_with(ALT_HANDLEBAR_DIRECTIVE) {
|
|
|
|
text.trim_start()
|
|
|
|
.trim_start_matches(ALT_HANDLEBAR_DIRECTIVE)
|
|
|
|
.replace("<%", "{{")
|
|
|
|
.replace("%>", "}}")
|
|
|
|
.into()
|
|
|
|
} else {
|
|
|
|
text.into()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-24 05:05:15 +01:00
|
|
|
// Checking if template is empty
|
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
impl ParsedTemplate<'_> {
|
|
|
|
/// true if provided fields are sufficient to render the template
|
|
|
|
pub fn renders_with_fields(&self, nonempty_fields: &HashSet<&str>) -> bool {
|
|
|
|
!template_is_empty(nonempty_fields, &self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn template_is_empty<'a>(nonempty_fields: &HashSet<&str>, nodes: &[ParsedNode<'a>]) -> bool {
|
|
|
|
use ParsedNode::*;
|
|
|
|
for node in nodes {
|
|
|
|
match node {
|
|
|
|
// ignore normal text
|
|
|
|
Text(_) => (),
|
2019-12-25 04:01:19 +01:00
|
|
|
Replacement { key, filters } => {
|
|
|
|
// Anki doesn't consider a type: reference as a required field
|
|
|
|
if filters.contains(&"type") {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-12-24 05:05:15 +01:00
|
|
|
if nonempty_fields.contains(*key) {
|
|
|
|
// a single replacement is enough
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Conditional { key, children } => {
|
|
|
|
if !nonempty_fields.contains(*key) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if !template_is_empty(nonempty_fields, children) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NegatedConditional { .. } => {
|
|
|
|
// negated conditionals ignored when determining card generation
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
true
|
|
|
|
}
|
|
|
|
|
2020-01-10 05:59:29 +01:00
|
|
|
// Rendering
|
2020-01-08 11:28:04 +01:00
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
#[derive(Debug, PartialEq)]
|
2020-01-10 05:59:29 +01:00
|
|
|
pub enum RenderedNode {
|
2020-01-08 11:28:04 +01:00
|
|
|
Text {
|
|
|
|
text: String,
|
|
|
|
},
|
|
|
|
Replacement {
|
2020-01-10 09:02:26 +01:00
|
|
|
field_name: String,
|
|
|
|
current_text: String,
|
|
|
|
/// Filters are in the order they should be applied.
|
2020-01-08 11:28:04 +01:00
|
|
|
filters: Vec<String>,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2020-01-12 06:15:46 +01:00
|
|
|
pub(crate) struct RenderContext<'a> {
|
|
|
|
pub fields: &'a HashMap<&'a str, &'a str>,
|
|
|
|
pub nonempty_fields: &'a HashSet<&'a str>,
|
|
|
|
pub question_side: bool,
|
|
|
|
pub card_ord: u16,
|
|
|
|
pub front_text: Option<Cow<'a, str>>,
|
|
|
|
}
|
|
|
|
|
2020-01-08 11:28:04 +01:00
|
|
|
impl ParsedTemplate<'_> {
|
2020-01-10 09:02:26 +01:00
|
|
|
/// Render the template with the provided fields.
|
2020-01-08 11:28:04 +01:00
|
|
|
///
|
2020-01-10 09:02:26 +01:00
|
|
|
/// Replacements that use only standard filters will become part of
|
|
|
|
/// a text node. If a non-standard filter is encountered, a partially
|
|
|
|
/// rendered Replacement is returned for the calling code to complete.
|
2020-01-16 08:23:25 +01:00
|
|
|
fn render(&self, context: &RenderContext) -> TemplateResult<Vec<RenderedNode>> {
|
2020-01-10 05:59:29 +01:00
|
|
|
let mut rendered = vec![];
|
2020-01-08 11:28:04 +01:00
|
|
|
|
2020-01-16 08:23:25 +01:00
|
|
|
render_into(&mut rendered, self.0.as_ref(), context)?;
|
2020-01-08 11:28:04 +01:00
|
|
|
|
2020-01-16 08:23:25 +01:00
|
|
|
Ok(rendered)
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-10 05:59:29 +01:00
|
|
|
fn render_into(
|
|
|
|
rendered_nodes: &mut Vec<RenderedNode>,
|
2020-01-08 11:28:04 +01:00
|
|
|
nodes: &[ParsedNode],
|
2020-01-12 06:15:46 +01:00
|
|
|
context: &RenderContext,
|
2020-01-16 08:23:25 +01:00
|
|
|
) -> TemplateResult<()> {
|
2020-01-08 11:28:04 +01:00
|
|
|
use ParsedNode::*;
|
|
|
|
for node in nodes {
|
|
|
|
match node {
|
2020-01-10 09:02:26 +01:00
|
|
|
Text(text) => {
|
|
|
|
append_str_to_nodes(rendered_nodes, text);
|
|
|
|
}
|
2020-01-12 06:15:46 +01:00
|
|
|
Replacement {
|
|
|
|
key: key @ "FrontSide",
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
if let Some(front_side) = &context.front_text {
|
|
|
|
// a fully rendered front side is available, so we can
|
|
|
|
// bake it into the output
|
|
|
|
append_str_to_nodes(rendered_nodes, front_side.as_ref());
|
|
|
|
} else {
|
|
|
|
// the front side contains unknown filters, and must
|
|
|
|
// be completed by the Python code
|
|
|
|
rendered_nodes.push(RenderedNode::Replacement {
|
|
|
|
field_name: (*key).to_string(),
|
|
|
|
filters: vec![],
|
|
|
|
current_text: "".into(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
2020-01-15 09:48:23 +01:00
|
|
|
Replacement { key: "", filters } if !filters.is_empty() => {
|
|
|
|
// if a filter is provided, we accept an empty field name to
|
|
|
|
// mean 'pass an empty string to the filter, and it will add
|
|
|
|
// its own text'
|
|
|
|
rendered_nodes.push(RenderedNode::Replacement {
|
|
|
|
field_name: "".to_string(),
|
|
|
|
current_text: "".to_string(),
|
|
|
|
filters: filters.iter().map(|&f| f.to_string()).collect(),
|
|
|
|
})
|
|
|
|
}
|
2020-01-10 09:02:26 +01:00
|
|
|
Replacement { key, filters } => {
|
2020-01-12 06:15:46 +01:00
|
|
|
// apply built in filters if field exists
|
|
|
|
let (text, remaining_filters) = match context.fields.get(key) {
|
|
|
|
Some(text) => apply_filters(text, filters, key, context),
|
2020-01-16 08:23:25 +01:00
|
|
|
None => {
|
|
|
|
// unknown field encountered
|
2020-01-16 08:44:26 +01:00
|
|
|
let filters_str = filters
|
2020-01-16 08:23:25 +01:00
|
|
|
.iter()
|
|
|
|
.rev()
|
|
|
|
.cloned()
|
2020-01-16 08:44:26 +01:00
|
|
|
.chain(iter::once(""))
|
2020-01-16 08:23:25 +01:00
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.join(":");
|
2020-01-16 08:44:26 +01:00
|
|
|
return Err(TemplateError::FieldNotFound {
|
|
|
|
field: (*key).to_string(),
|
|
|
|
filters: filters_str,
|
|
|
|
});
|
2020-01-16 08:23:25 +01:00
|
|
|
}
|
2020-01-10 09:02:26 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
// fully processed?
|
|
|
|
if remaining_filters.is_empty() {
|
|
|
|
append_str_to_nodes(rendered_nodes, text.as_ref())
|
2020-01-08 11:28:04 +01:00
|
|
|
} else {
|
2020-01-10 09:02:26 +01:00
|
|
|
rendered_nodes.push(RenderedNode::Replacement {
|
|
|
|
field_name: (*key).to_string(),
|
|
|
|
filters: remaining_filters,
|
|
|
|
current_text: text.into(),
|
|
|
|
});
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Conditional { key, children } => {
|
2020-01-12 06:15:46 +01:00
|
|
|
if context.nonempty_fields.contains(key) {
|
2020-01-16 08:23:25 +01:00
|
|
|
render_into(rendered_nodes, children.as_ref(), context)?;
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
NegatedConditional { key, children } => {
|
2020-01-12 06:15:46 +01:00
|
|
|
if !context.nonempty_fields.contains(key) {
|
2020-01-16 08:23:25 +01:00
|
|
|
render_into(rendered_nodes, children.as_ref(), context)?;
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2020-01-16 08:23:25 +01:00
|
|
|
|
|
|
|
Ok(())
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
|
|
|
|
2020-01-10 09:02:26 +01:00
|
|
|
/// Append to last node if last node is a string, else add new node.
|
|
|
|
fn append_str_to_nodes(nodes: &mut Vec<RenderedNode>, text: &str) {
|
|
|
|
if let Some(RenderedNode::Text {
|
|
|
|
text: ref mut existing_text,
|
|
|
|
}) = nodes.last_mut()
|
|
|
|
{
|
|
|
|
// append to existing last node
|
|
|
|
existing_text.push_str(text)
|
|
|
|
} else {
|
|
|
|
// otherwise, add a new string node
|
|
|
|
nodes.push(RenderedNode::Text {
|
|
|
|
text: text.to_string(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-10 05:59:29 +01:00
|
|
|
/// True if provided text contains only whitespace and/or empty BR/DIV tags.
|
|
|
|
fn field_is_empty(text: &str) -> bool {
|
|
|
|
lazy_static! {
|
|
|
|
static ref RE: Regex = Regex::new(
|
|
|
|
r#"(?xsi)
|
|
|
|
^(?:
|
|
|
|
[[:space:]]
|
|
|
|
|
|
|
|
|
</?(?:br|div)\ ?/?>
|
|
|
|
)*$
|
|
|
|
"#
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
RE.is_match(text)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn nonempty_fields<'a>(fields: &'a HashMap<&str, &str>) -> HashSet<&'a str> {
|
|
|
|
fields
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(name, val)| {
|
|
|
|
if !field_is_empty(val) {
|
|
|
|
Some(*name)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
2020-01-10 09:02:26 +01:00
|
|
|
|
2020-01-12 06:15:46 +01:00
|
|
|
// Rendering both sides
|
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
#[allow(clippy::implicit_hasher)]
|
|
|
|
pub fn render_card(
|
|
|
|
qfmt: &str,
|
|
|
|
afmt: &str,
|
|
|
|
field_map: &HashMap<&str, &str>,
|
|
|
|
card_ord: u16,
|
2020-01-16 07:37:44 +01:00
|
|
|
) -> Result<(Vec<RenderedNode>, Vec<RenderedNode>)> {
|
2020-01-12 06:15:46 +01:00
|
|
|
// prepare context
|
|
|
|
let mut context = RenderContext {
|
|
|
|
fields: field_map,
|
|
|
|
nonempty_fields: &nonempty_fields(field_map),
|
|
|
|
question_side: true,
|
|
|
|
card_ord,
|
|
|
|
front_text: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
// question side
|
|
|
|
let qnorm = without_legacy_template_directives(qfmt);
|
2020-01-16 08:23:25 +01:00
|
|
|
let qnodes = ParsedTemplate::from_text(qnorm.as_ref())?.render(&context)?;
|
2020-01-12 06:15:46 +01:00
|
|
|
|
|
|
|
// if the question side didn't have any unknown filters, we can pass
|
|
|
|
// FrontSide in now
|
|
|
|
if let [RenderedNode::Text { ref text }] = *qnodes.as_slice() {
|
2020-01-20 10:12:34 +01:00
|
|
|
context.front_text = Some(strip_av_tags(text));
|
2020-01-12 06:15:46 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// answer side
|
|
|
|
context.question_side = false;
|
|
|
|
let anorm = without_legacy_template_directives(afmt);
|
2020-01-16 08:23:25 +01:00
|
|
|
let anodes = ParsedTemplate::from_text(anorm.as_ref())?.render(&context)?;
|
2020-01-12 06:15:46 +01:00
|
|
|
|
2020-01-16 07:37:44 +01:00
|
|
|
Ok((qnodes, anodes))
|
2020-01-12 06:15:46 +01:00
|
|
|
}
|
|
|
|
|
2020-01-08 11:28:04 +01:00
|
|
|
// Field requirements
|
2019-12-24 05:05:15 +01:00
|
|
|
//----------------------------------------
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
|
|
pub enum FieldRequirements {
|
|
|
|
Any(HashSet<u16>),
|
|
|
|
All(HashSet<u16>),
|
|
|
|
None,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ParsedTemplate<'_> {
|
|
|
|
/// Return fields required by template.
|
|
|
|
///
|
|
|
|
/// This is not able to represent negated expressions or combinations of
|
2020-01-08 11:28:04 +01:00
|
|
|
/// Any and All, but is compatible with older Anki clients.
|
|
|
|
///
|
|
|
|
/// In the future, it may be feasible to calculate the requirements
|
|
|
|
/// when adding cards, instead of caching them up front, which would mean
|
|
|
|
/// the above restrictions could be lifted. We would probably
|
|
|
|
/// want to add a cache of non-zero fields -> available cards to avoid
|
|
|
|
/// slowing down bulk operations like importing too much.
|
2019-12-24 05:05:15 +01:00
|
|
|
pub fn requirements(&self, field_map: &FieldMap) -> FieldRequirements {
|
|
|
|
let mut nonempty: HashSet<_> = Default::default();
|
|
|
|
let mut ords = HashSet::new();
|
|
|
|
for (name, ord) in field_map {
|
|
|
|
nonempty.clear();
|
|
|
|
nonempty.insert(*name);
|
|
|
|
if self.renders_with_fields(&nonempty) {
|
|
|
|
ords.insert(*ord);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !ords.is_empty() {
|
|
|
|
return FieldRequirements::Any(ords);
|
|
|
|
}
|
|
|
|
|
|
|
|
nonempty.extend(field_map.keys());
|
|
|
|
ords.extend(field_map.values().copied());
|
|
|
|
for (name, ord) in field_map {
|
|
|
|
// can we remove this field and still render?
|
|
|
|
nonempty.remove(name);
|
|
|
|
if self.renders_with_fields(&nonempty) {
|
|
|
|
ords.remove(ord);
|
|
|
|
}
|
|
|
|
nonempty.insert(*name);
|
|
|
|
}
|
|
|
|
if !ords.is_empty() && self.renders_with_fields(&nonempty) {
|
|
|
|
FieldRequirements::All(ords)
|
|
|
|
} else {
|
|
|
|
FieldRequirements::None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Tests
|
|
|
|
//---------------------------------------
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use super::{FieldMap, ParsedNode::*, ParsedTemplate as PT};
|
2020-01-16 08:23:25 +01:00
|
|
|
use crate::err::TemplateError;
|
2020-01-12 06:15:46 +01:00
|
|
|
use crate::template::{
|
|
|
|
field_is_empty, nonempty_fields, render_card, without_legacy_template_directives,
|
|
|
|
FieldRequirements, RenderContext, RenderedNode,
|
|
|
|
};
|
|
|
|
use crate::text::strip_html;
|
2020-01-10 05:59:29 +01:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2019-12-24 05:05:15 +01:00
|
|
|
use std::iter::FromIterator;
|
|
|
|
|
2020-01-10 05:59:29 +01:00
|
|
|
#[test]
|
|
|
|
fn test_field_empty() {
|
|
|
|
assert_eq!(field_is_empty(""), true);
|
|
|
|
assert_eq!(field_is_empty(" "), true);
|
|
|
|
assert_eq!(field_is_empty("x"), false);
|
|
|
|
assert_eq!(field_is_empty("<BR>"), true);
|
|
|
|
assert_eq!(field_is_empty("<div />"), true);
|
|
|
|
assert_eq!(field_is_empty(" <div> <br> </div>\n"), true);
|
|
|
|
assert_eq!(field_is_empty(" <div>x</div>\n"), false);
|
|
|
|
}
|
|
|
|
|
2019-12-24 05:05:15 +01:00
|
|
|
#[test]
|
|
|
|
fn test_parsing() {
|
|
|
|
let tmpl = PT::from_text("foo {{bar}} {{#baz}} quux {{/baz}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.0,
|
|
|
|
vec![
|
|
|
|
Text("foo "),
|
|
|
|
Replacement {
|
|
|
|
key: "bar",
|
|
|
|
filters: vec![]
|
|
|
|
},
|
|
|
|
Text(" "),
|
|
|
|
Conditional {
|
|
|
|
key: "baz",
|
|
|
|
children: vec![Text(" quux ")]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
);
|
|
|
|
|
|
|
|
let tmpl = PT::from_text("{{^baz}}{{/baz}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.0,
|
|
|
|
vec![NegatedConditional {
|
|
|
|
key: "baz",
|
|
|
|
children: vec![]
|
|
|
|
}]
|
|
|
|
);
|
|
|
|
|
|
|
|
PT::from_text("{{#mis}}{{/matched}}").unwrap_err();
|
|
|
|
PT::from_text("{{/matched}}").unwrap_err();
|
|
|
|
PT::from_text("{{#mis}}").unwrap_err();
|
|
|
|
|
|
|
|
// whitespace
|
|
|
|
assert_eq!(
|
|
|
|
PT::from_text("{{ tag }}").unwrap().0,
|
|
|
|
vec![Replacement {
|
|
|
|
key: "tag",
|
|
|
|
filters: vec![]
|
|
|
|
}]
|
|
|
|
);
|
2020-01-08 11:29:04 +01:00
|
|
|
|
|
|
|
// stray closing characters (like in javascript) are ignored
|
|
|
|
assert_eq!(
|
|
|
|
PT::from_text("text }} more").unwrap().0,
|
|
|
|
vec![Text("text }} more")]
|
|
|
|
);
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_nonempty() {
|
|
|
|
let fields = HashSet::from_iter(vec!["1", "3"].into_iter());
|
|
|
|
let mut tmpl = PT::from_text("{{2}}{{1}}").unwrap();
|
|
|
|
assert_eq!(tmpl.renders_with_fields(&fields), true);
|
|
|
|
tmpl = PT::from_text("{{2}}{{type:cloze:1}}").unwrap();
|
2019-12-25 04:01:19 +01:00
|
|
|
assert_eq!(tmpl.renders_with_fields(&fields), false);
|
2019-12-24 05:05:15 +01:00
|
|
|
tmpl = PT::from_text("{{2}}{{4}}").unwrap();
|
|
|
|
assert_eq!(tmpl.renders_with_fields(&fields), false);
|
|
|
|
tmpl = PT::from_text("{{#3}}{{^2}}{{1}}{{/2}}{{/3}}").unwrap();
|
|
|
|
assert_eq!(tmpl.renders_with_fields(&fields), false);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_requirements() {
|
|
|
|
let field_map: FieldMap = vec!["a", "b"]
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.map(|(a, b)| (*b, a as u16))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let mut tmpl = PT::from_text("{{a}}{{b}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.requirements(&field_map),
|
|
|
|
FieldRequirements::Any(HashSet::from_iter(vec![0, 1].into_iter()))
|
|
|
|
);
|
|
|
|
|
|
|
|
tmpl = PT::from_text("{{#a}}{{b}}{{/a}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.requirements(&field_map),
|
|
|
|
FieldRequirements::All(HashSet::from_iter(vec![0, 1].into_iter()))
|
|
|
|
);
|
|
|
|
|
|
|
|
tmpl = PT::from_text("{{c}}").unwrap();
|
|
|
|
assert_eq!(tmpl.requirements(&field_map), FieldRequirements::None);
|
|
|
|
|
|
|
|
tmpl = PT::from_text("{{^a}}{{b}}{{/a}}").unwrap();
|
|
|
|
assert_eq!(tmpl.requirements(&field_map), FieldRequirements::None);
|
|
|
|
|
|
|
|
tmpl = PT::from_text("{{#a}}{{#b}}{{a}}{{/b}}{{/a}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.requirements(&field_map),
|
|
|
|
FieldRequirements::All(HashSet::from_iter(vec![0, 1].into_iter()))
|
|
|
|
);
|
|
|
|
|
2019-12-25 04:01:19 +01:00
|
|
|
tmpl = PT::from_text("{{a}}{{type:b}}").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
tmpl.requirements(&field_map),
|
|
|
|
FieldRequirements::Any(HashSet::from_iter(vec![0].into_iter()))
|
|
|
|
);
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|
2019-12-29 23:12:44 +01:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_alt_syntax() {
|
|
|
|
let input = "
|
|
|
|
{{=<% %>=}}
|
|
|
|
<%Front%>
|
|
|
|
<% #Back %>
|
|
|
|
<%/Back%>";
|
|
|
|
let output = "
|
|
|
|
{{Front}}
|
|
|
|
{{ #Back }}
|
|
|
|
{{/Back}}";
|
|
|
|
|
|
|
|
assert_eq!(without_legacy_template_directives(input), output);
|
|
|
|
}
|
2020-01-08 11:28:04 +01:00
|
|
|
|
|
|
|
#[test]
|
2020-01-12 06:15:46 +01:00
|
|
|
fn test_render_single() {
|
2020-01-10 09:02:26 +01:00
|
|
|
let map: HashMap<_, _> = vec![("F", "f"), ("B", "b"), ("E", " ")]
|
2020-01-10 05:59:29 +01:00
|
|
|
.into_iter()
|
|
|
|
.collect();
|
2020-01-08 11:28:04 +01:00
|
|
|
|
2020-01-12 06:15:46 +01:00
|
|
|
let ctx = RenderContext {
|
|
|
|
fields: &map,
|
|
|
|
nonempty_fields: &nonempty_fields(&map),
|
|
|
|
question_side: true,
|
|
|
|
card_ord: 1,
|
|
|
|
front_text: None,
|
|
|
|
};
|
|
|
|
|
2020-01-10 05:59:29 +01:00
|
|
|
use crate::template::RenderedNode as FN;
|
2020-01-08 11:28:04 +01:00
|
|
|
let mut tmpl = PT::from_text("{{B}}A{{F}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-10 09:02:26 +01:00
|
|
|
vec![FN::Text {
|
|
|
|
text: "bAf".to_owned()
|
|
|
|
},]
|
2020-01-08 11:28:04 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
// empty
|
|
|
|
tmpl = PT::from_text("{{#E}}A{{/E}}").unwrap();
|
2020-01-16 08:23:25 +01:00
|
|
|
assert_eq!(tmpl.render(&ctx).unwrap(), vec![]);
|
2020-01-08 11:28:04 +01:00
|
|
|
|
|
|
|
// missing
|
|
|
|
tmpl = PT::from_text("{{^M}}A{{/M}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-08 11:28:04 +01:00
|
|
|
vec![FN::Text {
|
|
|
|
text: "A".to_owned()
|
|
|
|
},]
|
|
|
|
);
|
|
|
|
|
|
|
|
// nested
|
|
|
|
tmpl = PT::from_text("{{^E}}1{{#F}}2{{#B}}{{F}}{{/B}}{{/F}}{{/E}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-10 09:02:26 +01:00
|
|
|
vec![FN::Text {
|
|
|
|
text: "12f".to_owned()
|
|
|
|
},]
|
2020-01-08 11:28:04 +01:00
|
|
|
);
|
|
|
|
|
2020-01-10 09:02:26 +01:00
|
|
|
// unknown filters
|
|
|
|
tmpl = PT::from_text("{{one:two:B}}").unwrap();
|
2020-01-08 11:28:04 +01:00
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-10 09:02:26 +01:00
|
|
|
vec![FN::Replacement {
|
|
|
|
field_name: "B".to_owned(),
|
|
|
|
filters: vec!["two".to_string(), "one".to_string()],
|
|
|
|
current_text: "b".to_owned()
|
|
|
|
},]
|
|
|
|
);
|
|
|
|
|
|
|
|
// partially unknown filters
|
2020-01-12 06:15:46 +01:00
|
|
|
// excess colons are ignored
|
|
|
|
tmpl = PT::from_text("{{one::text:B}}").unwrap();
|
2020-01-10 09:02:26 +01:00
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-10 09:02:26 +01:00
|
|
|
vec![FN::Replacement {
|
|
|
|
field_name: "B".to_owned(),
|
|
|
|
filters: vec!["one".to_string()],
|
|
|
|
current_text: "b".to_owned()
|
|
|
|
},]
|
|
|
|
);
|
|
|
|
|
|
|
|
// known filter
|
|
|
|
tmpl = PT::from_text("{{text:B}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-10 09:02:26 +01:00
|
|
|
vec![FN::Text {
|
|
|
|
text: "b".to_owned()
|
|
|
|
}]
|
|
|
|
);
|
|
|
|
|
|
|
|
// unknown field
|
|
|
|
tmpl = PT::from_text("{{X}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap_err(),
|
2020-01-16 08:44:26 +01:00
|
|
|
TemplateError::FieldNotFound {
|
|
|
|
field: "X".to_owned(),
|
|
|
|
filters: "".to_owned()
|
|
|
|
}
|
2020-01-10 09:02:26 +01:00
|
|
|
);
|
|
|
|
|
|
|
|
// unknown field with filters
|
|
|
|
tmpl = PT::from_text("{{foo:text:X}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap_err(),
|
2020-01-16 08:44:26 +01:00
|
|
|
TemplateError::FieldNotFound {
|
|
|
|
field: "X".to_owned(),
|
|
|
|
filters: "foo:text:".to_owned()
|
|
|
|
}
|
2020-01-08 11:28:04 +01:00
|
|
|
);
|
2020-01-15 09:48:23 +01:00
|
|
|
|
|
|
|
// a blank field is allowed if it has filters
|
|
|
|
tmpl = PT::from_text("{{filter:}}").unwrap();
|
|
|
|
assert_eq!(
|
2020-01-16 08:23:25 +01:00
|
|
|
tmpl.render(&ctx).unwrap(),
|
2020-01-15 09:48:23 +01:00
|
|
|
vec![FN::Replacement {
|
|
|
|
field_name: "".to_string(),
|
|
|
|
current_text: "".to_string(),
|
|
|
|
filters: vec!["filter".to_string()]
|
|
|
|
}]
|
|
|
|
);
|
2020-01-08 11:28:04 +01:00
|
|
|
}
|
2020-01-12 06:15:46 +01:00
|
|
|
|
|
|
|
fn get_complete_template(nodes: &Vec<RenderedNode>) -> Option<&str> {
|
|
|
|
if let [RenderedNode::Text { ref text }] = nodes.as_slice() {
|
|
|
|
Some(text.as_str())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_render_full() {
|
|
|
|
// make sure front and back side renders cloze differently
|
|
|
|
let fmt = "{{cloze:Text}}";
|
|
|
|
let clozed_text = "{{c1::one}} {{c2::two::hint}}";
|
|
|
|
let map: HashMap<_, _> = vec![("Text", clozed_text)].into_iter().collect();
|
|
|
|
|
2020-01-16 07:37:44 +01:00
|
|
|
let (qnodes, anodes) = render_card(fmt, fmt, &map, 0).unwrap();
|
2020-01-12 06:15:46 +01:00
|
|
|
assert_eq!(
|
|
|
|
strip_html(get_complete_template(&qnodes).unwrap()),
|
|
|
|
"[...] two"
|
|
|
|
);
|
|
|
|
assert_eq!(
|
|
|
|
strip_html(get_complete_template(&anodes).unwrap()),
|
|
|
|
"one two"
|
|
|
|
);
|
|
|
|
|
|
|
|
// FrontSide should render if only standard modifiers were used
|
2020-01-16 07:37:44 +01:00
|
|
|
let (_qnodes, anodes) =
|
|
|
|
render_card("{{kana:text:Text}}", "{{FrontSide}}", &map, 1).unwrap();
|
2020-01-12 06:15:46 +01:00
|
|
|
assert_eq!(get_complete_template(&anodes).unwrap(), clozed_text);
|
|
|
|
|
|
|
|
// But if a custom modifier was used, it's deferred to the Python code
|
2020-01-16 07:37:44 +01:00
|
|
|
let (_qnodes, anodes) = render_card("{{custom:Text}}", "{{FrontSide}}", &map, 1).unwrap();
|
2020-01-12 06:15:46 +01:00
|
|
|
assert_eq!(get_complete_template(&anodes).is_none(), true)
|
|
|
|
}
|
2019-12-24 05:05:15 +01:00
|
|
|
}
|