mirror of
https://github.com/typst/typst
synced 2025-05-14 04:56:26 +08:00
Better parser testing ✅
This commit is contained in:
parent
4069f0744d
commit
2b6ccd8248
12
src/color.rs
12
src/color.rs
@ -122,4 +122,16 @@ mod tests {
|
||||
test("233", 0x22, 0x33, 0x33, 0xff);
|
||||
test("111b", 0x11, 0x11, 0x11, 0xbb);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_invalid_colors() {
|
||||
fn test(hex: &str) {
|
||||
assert_eq!(RgbaColor::from_str(hex), Err(ParseRgbaError));
|
||||
}
|
||||
|
||||
test("12345");
|
||||
test("a5");
|
||||
test("14B2AH");
|
||||
test("f075ff011");
|
||||
}
|
||||
}
|
||||
|
@ -18,9 +18,9 @@ use std::rc::Rc;
|
||||
|
||||
use fontdock::FontStyle;
|
||||
|
||||
use crate::color::Color;
|
||||
use crate::diag::Diag;
|
||||
use crate::diag::{Deco, Feedback, Pass};
|
||||
use crate::color::Color;
|
||||
use crate::env::SharedEnv;
|
||||
use crate::geom::{BoxAlign, Dir, Flow, Gen, Length, Linear, Relative, Sides, Size};
|
||||
use crate::layout::{
|
||||
@ -328,6 +328,11 @@ impl Eval for SynNode {
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> Self::Output {
|
||||
match self {
|
||||
SynNode::Text(text) => {
|
||||
let node = ctx.make_text_node(text.clone());
|
||||
ctx.push(node);
|
||||
}
|
||||
|
||||
SynNode::Space => {
|
||||
let em = ctx.state.font.font_size();
|
||||
ctx.push(Spacing {
|
||||
@ -336,11 +341,6 @@ impl Eval for SynNode {
|
||||
});
|
||||
}
|
||||
|
||||
SynNode::Text(text) => {
|
||||
let node = ctx.make_text_node(text.clone());
|
||||
ctx.push(node);
|
||||
}
|
||||
|
||||
SynNode::Linebreak => {
|
||||
ctx.end_par_group();
|
||||
ctx.start_par_group();
|
||||
@ -356,10 +356,12 @@ impl Eval for SynNode {
|
||||
ctx.start_par_group();
|
||||
}
|
||||
|
||||
SynNode::Emph => ctx.state.font.emph ^= true,
|
||||
SynNode::Strong => ctx.state.font.strong ^= true,
|
||||
SynNode::Emph => ctx.state.font.emph ^= true,
|
||||
|
||||
SynNode::Heading(heading) => heading.eval(ctx),
|
||||
SynNode::Raw(raw) => raw.eval(ctx),
|
||||
|
||||
SynNode::Expr(expr) => expr.eval(ctx).eval(ctx),
|
||||
}
|
||||
}
|
||||
|
210
src/parse/mod.rs
210
src/parse/mod.rs
@ -33,10 +33,10 @@ fn tree(p: &mut Parser) -> SynTree {
|
||||
let mut tree = vec![];
|
||||
while !p.eof() {
|
||||
if let Some(node) = p.span_if(|p| node(p, at_start)) {
|
||||
if node.v == SynNode::Parbreak {
|
||||
at_start = true;
|
||||
} else if node.v != SynNode::Space {
|
||||
at_start = false;
|
||||
match node.v {
|
||||
SynNode::Parbreak => at_start = true,
|
||||
SynNode::Space => {}
|
||||
_ => at_start = false,
|
||||
}
|
||||
tree.push(node);
|
||||
}
|
||||
@ -46,9 +46,7 @@ fn tree(p: &mut Parser) -> SynTree {
|
||||
|
||||
/// Parse a syntax node.
|
||||
fn node(p: &mut Parser, at_start: bool) -> Option<SynNode> {
|
||||
let start = p.next_start();
|
||||
let node = match p.eat()? {
|
||||
// Spaces.
|
||||
let node = match p.peek()? {
|
||||
Token::Space(newlines) => {
|
||||
if newlines < 2 {
|
||||
SynNode::Space
|
||||
@ -56,61 +54,59 @@ fn node(p: &mut Parser, at_start: bool) -> Option<SynNode> {
|
||||
SynNode::Parbreak
|
||||
}
|
||||
}
|
||||
|
||||
// Text.
|
||||
Token::Text(text) => SynNode::Text(text.into()),
|
||||
|
||||
// Comments.
|
||||
Token::LineComment(_) | Token::BlockComment(_) => return None,
|
||||
Token::LineComment(_) | Token::BlockComment(_) => {
|
||||
p.eat();
|
||||
return None;
|
||||
}
|
||||
|
||||
// Markup.
|
||||
Token::Star => SynNode::Strong,
|
||||
Token::Underscore => SynNode::Emph,
|
||||
Token::Hashtag => {
|
||||
if at_start {
|
||||
SynNode::Heading(heading(p, start))
|
||||
} else {
|
||||
SynNode::Text(p.eaten_from(start).into())
|
||||
}
|
||||
}
|
||||
Token::Tilde => SynNode::Text("\u{00A0}".into()),
|
||||
Token::Backslash => SynNode::Linebreak,
|
||||
Token::UnicodeEscape(token) => SynNode::Text(unicode_escape(p, token, start)),
|
||||
Token::Raw(token) => SynNode::Raw(raw(p, token)),
|
||||
Token::Hashtag => {
|
||||
if at_start {
|
||||
return Some(SynNode::Heading(heading(p)));
|
||||
} else {
|
||||
SynNode::Text(p.get(p.peek_span()).into())
|
||||
}
|
||||
}
|
||||
Token::Raw(t) => SynNode::Raw(raw(p, t)),
|
||||
Token::UnicodeEscape(t) => SynNode::Text(unicode_escape(p, t)),
|
||||
|
||||
// Functions.
|
||||
Token::LeftBracket => {
|
||||
p.jump(start);
|
||||
SynNode::Expr(Expr::Call(bracket_call(p)))
|
||||
return Some(SynNode::Expr(Expr::Call(bracket_call(p))));
|
||||
}
|
||||
|
||||
// Blocks.
|
||||
Token::LeftBrace => {
|
||||
p.jump(start);
|
||||
SynNode::Expr(block_expr(p)?)
|
||||
return Some(SynNode::Expr(block_expr(p)?));
|
||||
}
|
||||
|
||||
// Bad tokens.
|
||||
_ => {
|
||||
p.jump(start);
|
||||
p.diag_unexpected();
|
||||
return None;
|
||||
}
|
||||
};
|
||||
p.eat();
|
||||
Some(node)
|
||||
}
|
||||
|
||||
/// Parse a heading.
|
||||
fn heading(p: &mut Parser, start: Pos) -> NodeHeading {
|
||||
// Parse the section depth.
|
||||
fn heading(p: &mut Parser) -> NodeHeading {
|
||||
// Count hashtags.
|
||||
let mut level = p.span(|p| {
|
||||
p.eat_assert(Token::Hashtag);
|
||||
|
||||
let mut level = 0u8;
|
||||
while p.eat_if(Token::Hashtag) {
|
||||
level = level.saturating_add(1);
|
||||
}
|
||||
level
|
||||
});
|
||||
|
||||
let mut level = level.span_with(start .. p.last_end());
|
||||
if level.v > 5 {
|
||||
p.diag(warning!(level.span, "section depth should be at most 6"));
|
||||
p.diag(warning!(level.span, "section depth should not exceed 6"));
|
||||
level.v = 5;
|
||||
}
|
||||
|
||||
@ -125,25 +121,23 @@ fn heading(p: &mut Parser, start: Pos) -> NodeHeading {
|
||||
NodeHeading { level, contents }
|
||||
}
|
||||
|
||||
/// Parse a raw block.
|
||||
/// Handle a raw block.
|
||||
fn raw(p: &mut Parser, token: TokenRaw) -> NodeRaw {
|
||||
let span = p.peek_span();
|
||||
let raw = resolve::resolve_raw(token.text, token.backticks);
|
||||
|
||||
if !token.terminated {
|
||||
p.diag(error!(p.last_end(), "expected backtick(s)"));
|
||||
p.diag(error!(span.end, "expected backtick(s)"));
|
||||
}
|
||||
|
||||
raw
|
||||
}
|
||||
|
||||
/// Parse a unicode escape sequence.
|
||||
fn unicode_escape(p: &mut Parser, token: TokenUnicodeEscape, start: Pos) -> String {
|
||||
let span = Span::new(start, p.last_end());
|
||||
/// Handle a unicode escape sequence.
|
||||
fn unicode_escape(p: &mut Parser, token: TokenUnicodeEscape) -> String {
|
||||
let span = p.peek_span();
|
||||
let text = if let Some(c) = resolve::resolve_hex(token.sequence) {
|
||||
c.to_string()
|
||||
} else {
|
||||
// Print out the escape sequence verbatim if it is
|
||||
// invalid.
|
||||
// Print out the escape sequence verbatim if it is invalid.
|
||||
p.diag(error!(span, "invalid unicode escape sequence"));
|
||||
p.get(span).into()
|
||||
};
|
||||
@ -155,6 +149,24 @@ fn unicode_escape(p: &mut Parser, token: TokenUnicodeEscape, start: Pos) -> Stri
|
||||
text
|
||||
}
|
||||
|
||||
/// Parse a block expression.
|
||||
fn block_expr(p: &mut Parser) -> Option<Expr> {
|
||||
p.push_mode(TokenMode::Header);
|
||||
p.start_group(Group::Brace);
|
||||
let expr = expr(p);
|
||||
p.pop_mode();
|
||||
p.end_group();
|
||||
expr
|
||||
}
|
||||
|
||||
/// Parse a parenthesized function call.
|
||||
fn paren_call(p: &mut Parser, name: Spanned<Ident>) -> ExprCall {
|
||||
p.start_group(Group::Paren);
|
||||
let args = p.span(|p| dict_contents(p).0);
|
||||
p.end_group();
|
||||
ExprCall { name, args }
|
||||
}
|
||||
|
||||
/// Parse a bracketed function call.
|
||||
fn bracket_call(p: &mut Parser) -> ExprCall {
|
||||
p.push_mode(TokenMode::Header);
|
||||
@ -180,7 +192,7 @@ fn bracket_call(p: &mut Parser) -> ExprCall {
|
||||
|
||||
while let Some(mut top) = outer.pop() {
|
||||
let span = inner.span;
|
||||
let node = inner.map(Expr::Call).map(SynNode::Expr);
|
||||
let node = inner.map(|c| SynNode::Expr(Expr::Call(c)));
|
||||
let expr = Expr::Lit(Lit::Content(vec![node])).span_with(span);
|
||||
top.v.args.v.0.push(LitDictEntry { key: None, expr });
|
||||
inner = top;
|
||||
@ -220,44 +232,16 @@ fn bracket_body(p: &mut Parser) -> SynTree {
|
||||
tree
|
||||
}
|
||||
|
||||
/// Parse a parenthesized function call.
|
||||
fn paren_call(p: &mut Parser, name: Spanned<Ident>) -> ExprCall {
|
||||
p.start_group(Group::Paren);
|
||||
let args = p.span(|p| dict_contents(p).0);
|
||||
p.end_group();
|
||||
ExprCall { name, args }
|
||||
}
|
||||
|
||||
/// Parse a block expression.
|
||||
fn block_expr(p: &mut Parser) -> Option<Expr> {
|
||||
p.push_mode(TokenMode::Header);
|
||||
p.start_group(Group::Brace);
|
||||
let expr = expr(p);
|
||||
p.pop_mode();
|
||||
p.end_group();
|
||||
expr
|
||||
}
|
||||
|
||||
/// Parse the contents of a dictionary.
|
||||
fn dict_contents(p: &mut Parser) -> (LitDict, bool) {
|
||||
let mut dict = LitDict::new();
|
||||
let mut missing_coma = None;
|
||||
let mut comma_and_keyless = true;
|
||||
let mut expected_comma = None;
|
||||
|
||||
loop {
|
||||
if p.eof() {
|
||||
break;
|
||||
}
|
||||
|
||||
let entry = if let Some(entry) = dict_entry(p) {
|
||||
entry
|
||||
} else {
|
||||
expected_comma = None;
|
||||
p.diag_unexpected();
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Some(pos) = expected_comma.take() {
|
||||
while !p.eof() {
|
||||
if let Some(entry) = dict_entry(p) {
|
||||
let behind = entry.expr.span.end;
|
||||
if let Some(pos) = missing_coma.take() {
|
||||
p.diag_expected_at("comma", pos);
|
||||
}
|
||||
|
||||
@ -266,18 +250,17 @@ fn dict_contents(p: &mut Parser) -> (LitDict, bool) {
|
||||
p.deco(Deco::DictKey.span_with(key.span));
|
||||
}
|
||||
|
||||
let behind = entry.expr.span.end;
|
||||
dict.0.push(entry);
|
||||
|
||||
if p.eof() {
|
||||
break;
|
||||
}
|
||||
|
||||
if !p.eat_if(Token::Comma) {
|
||||
expected_comma = Some(behind);
|
||||
}
|
||||
|
||||
if p.eat_if(Token::Comma) {
|
||||
comma_and_keyless = false;
|
||||
} else {
|
||||
missing_coma = Some(behind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let coercible = comma_and_keyless && !dict.0.is_empty();
|
||||
@ -291,15 +274,10 @@ fn dict_entry(p: &mut Parser) -> Option<LitDictEntry> {
|
||||
// Key-value pair.
|
||||
Some(Token::Colon) => {
|
||||
p.eat_assert(Token::Colon);
|
||||
if let Some(expr) = p.span_if(expr) {
|
||||
Some(LitDictEntry {
|
||||
p.span_if(expr).map(|expr| LitDictEntry {
|
||||
key: Some(ident.map(|id| DictKey::Str(id.0))),
|
||||
expr,
|
||||
})
|
||||
} else {
|
||||
p.diag_expected("value");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// Function call.
|
||||
@ -318,16 +296,14 @@ fn dict_entry(p: &mut Parser) -> Option<LitDictEntry> {
|
||||
expr: ident.map(|id| Expr::Lit(Lit::Ident(id))),
|
||||
}),
|
||||
}
|
||||
} else if let Some(expr) = p.span_if(expr) {
|
||||
Some(LitDictEntry { key: None, expr })
|
||||
} else {
|
||||
None
|
||||
p.span_if(expr).map(|expr| LitDictEntry { key: None, expr })
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse an expression: `term (+ term)*`.
|
||||
fn expr(p: &mut Parser) -> Option<Expr> {
|
||||
binops(p, "summand", term, |token| match token {
|
||||
binops(p, term, |token| match token {
|
||||
Token::Plus => Some(BinOp::Add),
|
||||
Token::Hyphen => Some(BinOp::Sub),
|
||||
_ => None,
|
||||
@ -336,7 +312,7 @@ fn expr(p: &mut Parser) -> Option<Expr> {
|
||||
|
||||
/// Parse a term: `factor (* factor)*`.
|
||||
fn term(p: &mut Parser) -> Option<Expr> {
|
||||
binops(p, "factor", factor, |token| match token {
|
||||
binops(p, factor, |token| match token {
|
||||
Token::Star => Some(BinOp::Mul),
|
||||
Token::Slash => Some(BinOp::Div),
|
||||
_ => None,
|
||||
@ -346,14 +322,12 @@ fn term(p: &mut Parser) -> Option<Expr> {
|
||||
/// Parse binary operations of the from `a (<op> b)*`.
|
||||
fn binops(
|
||||
p: &mut Parser,
|
||||
operand_name: &str,
|
||||
operand: fn(&mut Parser) -> Option<Expr>,
|
||||
op: fn(Token) -> Option<BinOp>,
|
||||
) -> Option<Expr> {
|
||||
let mut lhs = p.span_if(operand)?;
|
||||
|
||||
loop {
|
||||
if let Some(op) = p.span_if(|p| p.eat_map(op)) {
|
||||
while let Some(op) = p.span_if(|p| p.eat_map(op)) {
|
||||
if let Some(rhs) = p.span_if(operand) {
|
||||
let span = lhs.span.join(rhs.span);
|
||||
let expr = Expr::Binary(ExprBinary {
|
||||
@ -362,11 +336,6 @@ fn binops(
|
||||
rhs: Box::new(rhs),
|
||||
});
|
||||
lhs = expr.span_with(span);
|
||||
} else {
|
||||
let span = lhs.span.join(op.span);
|
||||
p.diag(error!(span, "missing right {}", operand_name));
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@ -383,12 +352,8 @@ fn factor(p: &mut Parser) -> Option<Expr> {
|
||||
};
|
||||
|
||||
if let Some(op) = p.span_if(|p| p.eat_map(op)) {
|
||||
if let Some(expr) = p.span_if(factor) {
|
||||
Some(Expr::Unary(ExprUnary { op, expr: Box::new(expr) }))
|
||||
} else {
|
||||
p.diag(error!(op.span, "missing factor"));
|
||||
None
|
||||
}
|
||||
p.span_if(factor)
|
||||
.map(|expr| Expr::Unary(ExprUnary { op, expr: Box::new(expr) }))
|
||||
} else {
|
||||
value(p)
|
||||
}
|
||||
@ -397,28 +362,28 @@ fn factor(p: &mut Parser) -> Option<Expr> {
|
||||
/// Parse a value.
|
||||
fn value(p: &mut Parser) -> Option<Expr> {
|
||||
let start = p.next_start();
|
||||
Some(match p.eat()? {
|
||||
Some(match p.eat() {
|
||||
// Bracketed function call.
|
||||
Token::LeftBracket => {
|
||||
Some(Token::LeftBracket) => {
|
||||
p.jump(start);
|
||||
let node = p.span(|p| SynNode::Expr(Expr::Call(bracket_call(p))));
|
||||
Expr::Lit(Lit::Content(vec![node]))
|
||||
}
|
||||
|
||||
// Content expression.
|
||||
Token::LeftBrace => {
|
||||
Some(Token::LeftBrace) => {
|
||||
p.jump(start);
|
||||
Expr::Lit(Lit::Content(content(p)))
|
||||
}
|
||||
|
||||
// Dictionary or just a parenthesized expression.
|
||||
Token::LeftParen => {
|
||||
Some(Token::LeftParen) => {
|
||||
p.jump(start);
|
||||
parenthesized(p)
|
||||
}
|
||||
|
||||
// Function or just ident.
|
||||
Token::Ident(id) => {
|
||||
Some(Token::Ident(id)) => {
|
||||
let ident = Ident(id.into());
|
||||
let after = p.last_end();
|
||||
if p.peek() == Some(Token::LeftParen) {
|
||||
@ -429,24 +394,25 @@ fn value(p: &mut Parser) -> Option<Expr> {
|
||||
}
|
||||
}
|
||||
|
||||
// Atomic values.
|
||||
Token::Bool(b) => Expr::Lit(Lit::Bool(b)),
|
||||
Token::Int(i) => Expr::Lit(Lit::Int(i)),
|
||||
Token::Float(f) => Expr::Lit(Lit::Float(f)),
|
||||
Token::Length(val, unit) => Expr::Lit(Lit::Length(val, unit)),
|
||||
Token::Percent(p) => Expr::Lit(Lit::Percent(p)),
|
||||
Token::Hex(hex) => Expr::Lit(Lit::Color(color(p, hex, start))),
|
||||
Token::Str(token) => Expr::Lit(Lit::Str(string(p, token))),
|
||||
// Basic values.
|
||||
Some(Token::Bool(b)) => Expr::Lit(Lit::Bool(b)),
|
||||
Some(Token::Int(i)) => Expr::Lit(Lit::Int(i)),
|
||||
Some(Token::Float(f)) => Expr::Lit(Lit::Float(f)),
|
||||
Some(Token::Length(val, unit)) => Expr::Lit(Lit::Length(val, unit)),
|
||||
Some(Token::Percent(p)) => Expr::Lit(Lit::Percent(p)),
|
||||
Some(Token::Hex(hex)) => Expr::Lit(Lit::Color(color(p, hex, start))),
|
||||
Some(Token::Str(token)) => Expr::Lit(Lit::Str(str(p, token))),
|
||||
|
||||
// No value.
|
||||
_ => {
|
||||
p.jump(start);
|
||||
p.diag_expected("expression");
|
||||
return None;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Parse a content expression: `{...}`.
|
||||
// Parse a content value: `{...}`.
|
||||
fn content(p: &mut Parser) -> SynTree {
|
||||
p.push_mode(TokenMode::Body);
|
||||
p.start_group(Group::Brace);
|
||||
@ -487,7 +453,7 @@ fn color(p: &mut Parser, hex: &str, start: Pos) -> RgbaColor {
|
||||
}
|
||||
|
||||
/// Parse a string.
|
||||
fn string(p: &mut Parser, token: TokenStr) -> String {
|
||||
fn str(p: &mut Parser, token: TokenStr) -> String {
|
||||
if !token.terminated {
|
||||
p.diag_expected_at("quote", p.last_end());
|
||||
}
|
||||
|
@ -10,7 +10,9 @@ pub struct Parser<'s> {
|
||||
/// An iterator over the source tokens.
|
||||
tokens: Tokens<'s>,
|
||||
/// The next token.
|
||||
/// (Only `None` if we are at the end of group or end of file).
|
||||
next: Option<Token<'s>>,
|
||||
/// The peeked token.
|
||||
/// (Same as `next` except if we are at the end of group, then `None`).
|
||||
peeked: Option<Token<'s>>,
|
||||
/// The start position of the peeked token.
|
||||
next_start: Pos,
|
||||
@ -28,10 +30,11 @@ impl<'s> Parser<'s> {
|
||||
/// Create a new parser for the source string.
|
||||
pub fn new(src: &'s str) -> Self {
|
||||
let mut tokens = Tokens::new(src, TokenMode::Body);
|
||||
let peeked = tokens.next();
|
||||
let next = tokens.next();
|
||||
Self {
|
||||
tokens,
|
||||
peeked,
|
||||
next,
|
||||
peeked: next,
|
||||
next_start: Pos::ZERO,
|
||||
last_end: Pos::ZERO,
|
||||
modes: vec![],
|
||||
@ -118,7 +121,9 @@ impl<'s> Parser<'s> {
|
||||
Group::Brace => self.eat_assert(Token::LeftBrace),
|
||||
Group::Subheader => {}
|
||||
}
|
||||
|
||||
self.groups.push(group);
|
||||
self.repeek();
|
||||
}
|
||||
|
||||
/// Ends the parsing of a group and returns the span of the whole group.
|
||||
@ -130,6 +135,8 @@ impl<'s> Parser<'s> {
|
||||
debug_assert_eq!(self.peek(), None, "unfinished group");
|
||||
|
||||
let group = self.groups.pop().expect("no started group");
|
||||
self.repeek();
|
||||
|
||||
let end = match group {
|
||||
Group::Paren => Some(Token::RightParen),
|
||||
Group::Bracket => Some(Token::RightBracket),
|
||||
@ -138,7 +145,7 @@ impl<'s> Parser<'s> {
|
||||
};
|
||||
|
||||
if let Some(token) = end {
|
||||
if self.peeked == Some(token) {
|
||||
if self.next == Some(token) {
|
||||
self.bump();
|
||||
} else {
|
||||
self.diag(error!(self.next_start, "expected {}", token.name()));
|
||||
@ -203,26 +210,24 @@ impl<'s> Parser<'s> {
|
||||
}
|
||||
|
||||
/// Peek at the next token without consuming it.
|
||||
pub fn peek(&mut self) -> Option<Token<'s>> {
|
||||
let group = match self.peeked {
|
||||
Some(Token::RightParen) => Group::Paren,
|
||||
Some(Token::RightBracket) => Group::Bracket,
|
||||
Some(Token::RightBrace) => Group::Brace,
|
||||
Some(Token::Pipe) => Group::Subheader,
|
||||
other => return other,
|
||||
};
|
||||
|
||||
if self.groups.contains(&group) {
|
||||
return None;
|
||||
pub fn peek(&self) -> Option<Token<'s>> {
|
||||
self.peeked
|
||||
}
|
||||
|
||||
self.peeked
|
||||
/// Peek at the span of the next token.
|
||||
///
|
||||
/// Has length zero if `peek()` returns `None`.
|
||||
pub fn peek_span(&self) -> Span {
|
||||
Span::new(
|
||||
self.next_start,
|
||||
if self.eof() { self.next_start } else { self.tokens.pos() },
|
||||
)
|
||||
}
|
||||
|
||||
/// Checks whether the next token fulfills a condition.
|
||||
///
|
||||
/// Returns `false` if there is no next token.
|
||||
pub fn check<F>(&mut self, f: F) -> bool
|
||||
pub fn check<F>(&self, f: F) -> bool
|
||||
where
|
||||
F: FnOnce(Token<'s>) -> bool,
|
||||
{
|
||||
@ -230,7 +235,7 @@ impl<'s> Parser<'s> {
|
||||
}
|
||||
|
||||
/// Whether the end of the source string or group is reached.
|
||||
pub fn eof(&mut self) -> bool {
|
||||
pub fn eof(&self) -> bool {
|
||||
self.peek().is_none()
|
||||
}
|
||||
|
||||
@ -284,22 +289,37 @@ impl<'s> Parser<'s> {
|
||||
fn bump(&mut self) {
|
||||
self.last_end = self.tokens.pos();
|
||||
self.next_start = self.tokens.pos();
|
||||
self.peeked = self.tokens.next();
|
||||
self.next = self.tokens.next();
|
||||
|
||||
match self.tokens.mode() {
|
||||
TokenMode::Body => {}
|
||||
TokenMode::Header => {
|
||||
while matches!(
|
||||
self.peeked,
|
||||
self.next,
|
||||
Some(Token::Space(_)) |
|
||||
Some(Token::LineComment(_)) |
|
||||
Some(Token::BlockComment(_))
|
||||
) {
|
||||
self.next_start = self.tokens.pos();
|
||||
self.peeked = self.tokens.next();
|
||||
self.next = self.tokens.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.repeek();
|
||||
}
|
||||
|
||||
fn repeek(&mut self) {
|
||||
self.peeked = self.next;
|
||||
if self.groups.contains(&match self.next {
|
||||
Some(Token::RightParen) => Group::Paren,
|
||||
Some(Token::RightBracket) => Group::Bracket,
|
||||
Some(Token::RightBrace) => Group::Brace,
|
||||
Some(Token::Pipe) => Group::Subheader,
|
||||
_ => return,
|
||||
}) {
|
||||
self.peeked = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,6 @@ pub fn resolve_string(string: &str) -> String {
|
||||
match s.eat() {
|
||||
Some('\\') => out.push('\\'),
|
||||
Some('"') => out.push('"'),
|
||||
|
||||
Some('n') => out.push('\n'),
|
||||
Some('t') => out.push('\t'),
|
||||
Some('u') if s.eat_if('{') => {
|
||||
@ -29,7 +28,7 @@ pub fn resolve_string(string: &str) -> String {
|
||||
if let Some(c) = resolve_hex(sequence) {
|
||||
out.push(c);
|
||||
} else {
|
||||
// TODO: Feedback that escape sequence is wrong.
|
||||
// TODO: Feedback that unicode escape sequence is wrong.
|
||||
out += s.eaten_from(start);
|
||||
}
|
||||
}
|
||||
@ -126,7 +125,7 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_unescape_strings() {
|
||||
fn test_resolve_strings() {
|
||||
fn test(string: &str, expected: &str) {
|
||||
assert_eq!(resolve_string(string), expected.to_string());
|
||||
}
|
||||
|
@ -1,58 +1,104 @@
|
||||
//! Parser tests.
|
||||
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
use super::parse;
|
||||
use crate::color::RgbaColor;
|
||||
use crate::diag::Deco;
|
||||
use crate::diag::{Diag, Level, Pass};
|
||||
use crate::eval::DictKey;
|
||||
use crate::geom::Unit;
|
||||
use crate::syntax::*;
|
||||
|
||||
// ------------------------------ Construct Syntax Nodes ------------------------------ //
|
||||
use BinOp::*;
|
||||
use SynNode::{Emph, Linebreak, Parbreak, Space, Strong};
|
||||
use UnOp::*;
|
||||
|
||||
use Deco::*;
|
||||
use SynNode::{Emph as E, Linebreak as L, Parbreak as P, Space as S, Strong as B};
|
||||
macro_rules! t {
|
||||
($src:literal
|
||||
nodes: [$($node:expr),* $(,)?]
|
||||
$(, errors: [$($err:expr),* $(,)?])?
|
||||
$(, warnings: [$($warn:expr),* $(,)?])?
|
||||
$(, spans: $spans:expr)?
|
||||
$(,)?
|
||||
) => {{
|
||||
#[allow(unused)]
|
||||
let mut spans = false;
|
||||
$(spans = $spans;)?
|
||||
|
||||
fn T(text: &str) -> SynNode {
|
||||
SynNode::Text(text.to_string())
|
||||
}
|
||||
let Pass { output, feedback } = parse($src);
|
||||
check($src, Content![@$($node),*], output, spans);
|
||||
check(
|
||||
$src,
|
||||
vec![
|
||||
$($(into!($err).map(|s: &str| Diag::new(Level::Error, s)),)*)?
|
||||
$($(into!($warn).map(|s: &str| Diag::new(Level::Warning, s)),)*)?
|
||||
],
|
||||
feedback.diags,
|
||||
true,
|
||||
);
|
||||
}};
|
||||
|
||||
macro_rules! H {
|
||||
($level:expr, $($tts:tt)*) => {
|
||||
SynNode::Heading(NodeHeading {
|
||||
level: Spanned::zero($level),
|
||||
contents: Tree![@$($tts)*],
|
||||
})
|
||||
($src:literal $($node:expr),* $(,)?) => {
|
||||
t!($src nodes: [$($node),*]);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! R {
|
||||
($lang:expr, $inline:expr, $($line:expr),* $(,)?) => {{
|
||||
/// Assert that expected and found are equal, printing both and the source of
|
||||
/// the test case if they aren't.
|
||||
///
|
||||
/// When `cmp_spans` is false, spans are ignored.
|
||||
#[track_caller]
|
||||
pub fn check<T>(src: &str, exp: T, found: T, cmp_spans: bool)
|
||||
where
|
||||
T: Debug + PartialEq,
|
||||
{
|
||||
Span::set_cmp(cmp_spans);
|
||||
|
||||
if exp != found {
|
||||
println!("source: {:?}", src);
|
||||
println!("expected: {:#?}", exp);
|
||||
println!("found: {:#?}", found);
|
||||
panic!("test failed");
|
||||
}
|
||||
|
||||
Span::set_cmp(true);
|
||||
}
|
||||
|
||||
/// Shorthand for `Spanned::new`.
|
||||
fn S<T>(span: impl Into<Span>, v: T) -> Spanned<T> {
|
||||
Spanned::new(v, span)
|
||||
}
|
||||
|
||||
// Enables tests to optionally specify spans.
|
||||
impl<T> From<T> for Spanned<T> {
|
||||
fn from(t: T) -> Self {
|
||||
Spanned::zero(t)
|
||||
}
|
||||
}
|
||||
|
||||
/// Shorthand for `Into::<Spanned<_>>::into`.
|
||||
macro_rules! into {
|
||||
($val:expr) => {
|
||||
Into::<Spanned<_>>::into($val)
|
||||
};
|
||||
}
|
||||
|
||||
fn Text(text: &str) -> SynNode {
|
||||
SynNode::Text(text.into())
|
||||
}
|
||||
|
||||
fn Heading(level: impl Into<Spanned<u8>>, contents: SynTree) -> SynNode {
|
||||
SynNode::Heading(NodeHeading { level: level.into(), contents })
|
||||
}
|
||||
|
||||
fn Raw(lang: Option<&str>, lines: &[&str], inline: bool) -> SynNode {
|
||||
SynNode::Raw(NodeRaw {
|
||||
lang: $lang,
|
||||
lines: vec![$($line.to_string()) ,*],
|
||||
inline: $inline,
|
||||
lang: lang.map(|id| Ident(id.into())),
|
||||
lines: lines.iter().map(ToString::to_string).collect(),
|
||||
inline,
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
fn Lang(lang: &str) -> Option<Ident> {
|
||||
Some(Ident(lang.to_string()))
|
||||
}
|
||||
|
||||
macro_rules! F {
|
||||
($($tts:tt)*) => { SynNode::Expr(Expr::Call(Call!(@$($tts)*))) }
|
||||
}
|
||||
|
||||
// ------------------------------- Construct Expressions ------------------------------ //
|
||||
|
||||
use BinOp::*;
|
||||
use UnOp::*;
|
||||
use Unit::*;
|
||||
|
||||
fn Id(ident: &str) -> Expr {
|
||||
Expr::Lit(Lit::Ident(Ident(ident.to_string())))
|
||||
}
|
||||
@ -85,30 +131,13 @@ fn Str(string: &str) -> Expr {
|
||||
Expr::Lit(Lit::Str(string.to_string()))
|
||||
}
|
||||
|
||||
macro_rules! Call {
|
||||
(@$name:expr $(, $span:expr)? $(; $($tts:tt)*)?) => {{
|
||||
let name = Into::<Spanned<&str>>::into($name);
|
||||
#[allow(unused)]
|
||||
let mut span = Span::ZERO;
|
||||
$(span = $span.into();)?
|
||||
ExprCall {
|
||||
name: name.map(|n| Ident(n.to_string())),
|
||||
args: Dict![@$($($tts)*)?].span_with(span),
|
||||
}
|
||||
}};
|
||||
($($tts:tt)*) => { Expr::Call(Call![@$($tts)*]) };
|
||||
}
|
||||
|
||||
fn Unary(op: impl Into<Spanned<UnOp>>, expr: impl Into<Spanned<Expr>>) -> Expr {
|
||||
Expr::Unary(ExprUnary {
|
||||
op: op.into(),
|
||||
expr: Box::new(expr.into()),
|
||||
})
|
||||
fn Block(expr: Expr) -> SynNode {
|
||||
SynNode::Expr(expr)
|
||||
}
|
||||
|
||||
fn Binary(
|
||||
op: impl Into<Spanned<BinOp>>,
|
||||
lhs: impl Into<Spanned<Expr>>,
|
||||
op: impl Into<Spanned<BinOp>>,
|
||||
rhs: impl Into<Spanned<Expr>>,
|
||||
) -> Expr {
|
||||
Expr::Binary(ExprBinary {
|
||||
@ -118,463 +147,418 @@ fn Binary(
|
||||
})
|
||||
}
|
||||
|
||||
fn Unary(op: impl Into<Spanned<UnOp>>, expr: impl Into<Spanned<Expr>>) -> Expr {
|
||||
Expr::Unary(ExprUnary {
|
||||
op: op.into(),
|
||||
expr: Box::new(expr.into()),
|
||||
})
|
||||
}
|
||||
|
||||
macro_rules! Dict {
|
||||
(@dict=$dict:expr,) => {};
|
||||
(@dict=$dict:expr, $key:expr => $expr:expr $(, $($tts:tt)*)?) => {{
|
||||
let key = Into::<Spanned<&str>>::into($key);
|
||||
let key = key.map(Into::<DictKey>::into);
|
||||
let expr = Into::<Spanned<Expr>>::into($expr);
|
||||
$dict.0.push(LitDictEntry { key: Some(key), expr });
|
||||
Dict![@dict=$dict, $($($tts)*)?];
|
||||
}};
|
||||
(@dict=$dict:expr, $expr:expr $(, $($tts:tt)*)?) => {
|
||||
let expr = Into::<Spanned<Expr>>::into($expr);
|
||||
$dict.0.push(LitDictEntry { key: None, expr });
|
||||
Dict![@dict=$dict, $($($tts)*)?];
|
||||
(@$($a:expr $(=> $b:expr)?),* $(,)?) => {
|
||||
LitDict(vec![$(#[allow(unused)] {
|
||||
let key: Option<Spanned<DictKey>> = None;
|
||||
let expr = $a;
|
||||
$(
|
||||
let key = Some(into!($a).map(|s: &str| s.into()));
|
||||
let expr = $b;
|
||||
)?
|
||||
LitDictEntry { key, expr: into!(expr) }
|
||||
}),*])
|
||||
};
|
||||
(@$($tts:tt)*) => {{
|
||||
#[allow(unused)]
|
||||
let mut dict = LitDict::new();
|
||||
Dict![@dict=dict, $($tts)*];
|
||||
dict
|
||||
}};
|
||||
($($tts:tt)*) => { Expr::Lit(Lit::Dict(Dict![@$($tts)*])) };
|
||||
($($tts:tt)*) => (Expr::Lit(Lit::Dict(Dict![@$($tts)*])));
|
||||
}
|
||||
|
||||
macro_rules! Tree {
|
||||
(@$($node:expr),* $(,)?) => {
|
||||
vec![$(Into::<Spanned<SynNode>>::into($node)),*]
|
||||
};
|
||||
($($tts:tt)*) => { Expr::Lit(Lit::Content(Tree![@$($tts)*])) };
|
||||
macro_rules! Content {
|
||||
(@$($node:expr),* $(,)?) => (vec![$(into!($node)),*]);
|
||||
($($tts:tt)*) => (Expr::Lit(Lit::Content(Content![@$($tts)*])));
|
||||
}
|
||||
|
||||
// ------------------------------------ Test Macros ----------------------------------- //
|
||||
|
||||
// Test syntax trees with or without spans.
|
||||
macro_rules! t { ($($tts:tt)*) => {test!(@spans=false, $($tts)*)} }
|
||||
macro_rules! ts { ($($tts:tt)*) => {test!(@spans=true, $($tts)*)} }
|
||||
macro_rules! test {
|
||||
(@spans=$spans:expr, $src:expr => $($tts:tt)*) => {
|
||||
let exp = Tree![@$($tts)*];
|
||||
let pass = parse($src);
|
||||
check($src, exp, pass.output, $spans);
|
||||
macro_rules! Call {
|
||||
(@@$name:expr) => {
|
||||
Call!(@@$name, Args![])
|
||||
};
|
||||
}
|
||||
|
||||
// Test expressions.
|
||||
macro_rules! v {
|
||||
($src:expr => $($tts:tt)*) => {
|
||||
t!(concat!("[val ", $src, "]") => F!("val"; $($tts)*));
|
||||
(@@$name:expr, $args:expr) => {
|
||||
ExprCall {
|
||||
name: into!($name).map(|s: &str| Ident(s.into())),
|
||||
args: into!($args),
|
||||
}
|
||||
}
|
||||
|
||||
// Test error messages.
|
||||
macro_rules! e {
|
||||
($src:expr => $($tts:tt)*) => {
|
||||
let exp = vec![$($tts)*];
|
||||
let pass = parse($src);
|
||||
let found = pass.feedback.diags.iter()
|
||||
.map(|s| s.as_ref().map(|e| e.message.as_str()))
|
||||
.collect::<Vec<_>>();
|
||||
check($src, exp, found, true);
|
||||
};
|
||||
(@$($tts:tt)*) => (Expr::Call(Call!(@@$($tts)*)));
|
||||
($($tts:tt)*) => (SynNode::Expr(Call!(@$($tts)*)));
|
||||
}
|
||||
|
||||
// Test decorations.
|
||||
macro_rules! d {
|
||||
($src:expr => $($tts:tt)*) => {
|
||||
let exp = vec![$($tts)*];
|
||||
let pass = parse($src);
|
||||
check($src, exp, pass.feedback.decos, true);
|
||||
};
|
||||
}
|
||||
|
||||
/// Assert that expected and found are equal, printing both and panicking
|
||||
/// and the source of their test case if they aren't.
|
||||
///
|
||||
/// When `cmp_spans` is false, spans are ignored.
|
||||
#[track_caller]
|
||||
pub fn check<T>(src: &str, exp: T, found: T, cmp_spans: bool)
|
||||
where
|
||||
T: Debug + PartialEq,
|
||||
{
|
||||
Span::set_cmp(cmp_spans);
|
||||
let equal = exp == found;
|
||||
Span::set_cmp(true);
|
||||
|
||||
if !equal {
|
||||
println!("source: {:?}", src);
|
||||
if cmp_spans {
|
||||
println!("expected: {:#?}", exp);
|
||||
println!("found: {:#?}", found);
|
||||
} else {
|
||||
println!("expected: {:?}", exp);
|
||||
println!("found: {:?}", found);
|
||||
}
|
||||
panic!("test failed");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn s<T>(start: u32, end: u32, v: T) -> Spanned<T> {
|
||||
v.span_with(Span::new(start, end))
|
||||
}
|
||||
|
||||
// Enables tests to optionally specify spans.
|
||||
impl<T> From<T> for Spanned<T> {
|
||||
fn from(t: T) -> Self {
|
||||
Spanned::zero(t)
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------- Tests -------------------------------------- //
|
||||
|
||||
#[test]
|
||||
fn test_parse_groups() {
|
||||
e!("[)" => s(1, 2, "expected function name, found closing paren"),
|
||||
s(2, 2, "expected closing bracket"));
|
||||
|
||||
e!("[v {]}" => s(4, 4, "expected closing brace"),
|
||||
s(5, 6, "unexpected closing brace"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_nodes() {
|
||||
t!("" => );
|
||||
t!("hi" => T("hi"));
|
||||
t!("*hi" => B, T("hi"));
|
||||
t!("hi_" => T("hi"), E);
|
||||
t!("hi you" => T("hi"), S, T("you"));
|
||||
t!("special~name" => T("special"), T("\u{00A0}"), T("name"));
|
||||
t!("special\\~name" => T("special"), T("~"), T("name"));
|
||||
t!("\\u{1f303}" => T("🌃"));
|
||||
t!("\n\n\nhello" => P, T("hello"));
|
||||
t!(r"a\ b" => T("a"), L, S, T("b"));
|
||||
|
||||
e!("\\u{d421c809}" => s(0, 12, "invalid unicode escape sequence"));
|
||||
e!("\\u{abc" => s(6, 6, "expected closing brace"));
|
||||
t!("💜\n\n 🌍" => T("💜"), P, T("🌍"));
|
||||
|
||||
ts!("hi" => s(0, 2, T("hi")));
|
||||
ts!("*Hi*" => s(0, 1, B), s(1, 3, T("Hi")), s(3, 4, B));
|
||||
ts!("💜\n\n 🌍" => s(0, 4, T("💜")), s(4, 7, P), s(7, 11, T("🌍")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_raw() {
|
||||
t!("`py`" => R![None, true, "py"]);
|
||||
t!("`hi\nyou" => R![None, true, "hi", "you"]);
|
||||
t!(r"`` hi\`du``" => R![None, true, r"hi\`du"]);
|
||||
|
||||
// More than one backtick with optional language tag.
|
||||
t!("``` console.log(\n\"alert\"\n)" => R![None, false, "console.log(", "\"alert\"", ")"]);
|
||||
t!("````typst \r\n Typst uses ``` to indicate code blocks````!"
|
||||
=> R![Lang("typst"), false, " Typst uses ``` to indicate code blocks"], T("!"));
|
||||
|
||||
// Trimming of whitespace.
|
||||
t!("`` a ``" => R![None, true, "a"]);
|
||||
t!("`` a ``" => R![None, true, "a "]);
|
||||
t!("`` ` ``" => R![None, true, "`"]);
|
||||
t!("``` ` ```" => R![None, true, " ` "]);
|
||||
t!("``` ` \n ```" => R![None, false, " ` "]);
|
||||
|
||||
// Errors.
|
||||
e!("`hi\nyou" => s(7, 7, "expected backtick(s)"));
|
||||
e!("``` hi\nyou" => s(10, 10, "expected backtick(s)"));
|
||||
|
||||
// TODO: Bring back when spans/errors are in place.
|
||||
// ts!("``java out``" => s(0, 12, R![Lang(s(2, 6, "java")), true, "out"]));
|
||||
// e!("```🌍 hi\nyou```" => s(3, 7, "invalid identifier"));
|
||||
macro_rules! Args {
|
||||
($($tts:tt)*) => (Dict![@$($tts)*]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_comments() {
|
||||
// In body.
|
||||
t!("hi// you\nw" => T("hi"), S, T("w"));
|
||||
t!("first//\n//\nsecond" => T("first"), S, S, T("second"));
|
||||
t!("first//\n \nsecond" => T("first"), P, T("second"));
|
||||
t!("first/*\n \n*/second" => T("first"), T("second"));
|
||||
e!("🌎\n*/n" => s(5, 7, "unexpected end of block comment"));
|
||||
t!("a// you\nb" Text("a"), Space, Text("b"));
|
||||
t!("* // \n /*\n\n*/*" Strong, Space, Space, Strong);
|
||||
|
||||
// In header.
|
||||
t!("[val /*12pt*/]" => F!("val"));
|
||||
t!("[val \n /* \n */]" => F!("val"));
|
||||
e!("[val \n /* \n */]" => );
|
||||
e!("[val 12, /* \n */ 14]" => );
|
||||
t!("[v /*12pt*/]" Call!("v"));
|
||||
t!("[v //\n]" Call!("v"));
|
||||
t!("[v 12, /*\n*/ size: 14]" Call!("v", Args![Int(12), "size" => Int(14)]));
|
||||
|
||||
// Error.
|
||||
t!("a*/b"
|
||||
nodes: [Text("a"), Text("b")],
|
||||
errors: [S(1..3, "unexpected end of block comment")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_simple_nodes() {
|
||||
// Basics.
|
||||
t!("");
|
||||
t!(" " Space);
|
||||
t!("hi" Text("hi"));
|
||||
t!("🧽" Text("🧽"));
|
||||
t!("_" Emph);
|
||||
t!("*" Strong);
|
||||
t!("~" Text("\u{00A0}"));
|
||||
t!(r"\" Linebreak);
|
||||
t!("\n\n" Parbreak);
|
||||
|
||||
// Multiple nodes.
|
||||
t!("ab c" Text("ab"), Space, Text("c"));
|
||||
t!("a`hi`\r\n\r*" Text("a"), Raw(None, &["hi"], true), Parbreak, Strong);
|
||||
|
||||
// Spans.
|
||||
t!("*🌍*"
|
||||
nodes: [S(0..1, Strong), S(1..5, Text("🌍")), S(5..6, Strong)],
|
||||
spans: true);
|
||||
|
||||
// Errors.
|
||||
t!("]}"
|
||||
nodes: [],
|
||||
errors: [S(0..1, "unexpected closing bracket"),
|
||||
S(1..2, "unexpected closing brace")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_headings() {
|
||||
t!("## Hello world!" => H![1, S, T("Hello"), S, T("world!")]);
|
||||
// Basics with spans.
|
||||
t!("#a"
|
||||
nodes: [S(0..2, Heading(S(0..1, 0), Content![@S(1..2, Text("a"))]))],
|
||||
spans: true);
|
||||
|
||||
// Handle various whitespace usages.
|
||||
t!("####Simple" => H![3, T("Simple")]);
|
||||
t!(" # Whitespace!" => S, H![0, S, T("Whitespace!")]);
|
||||
t!(" /* TODO: Improve */ ## Analysis" => S, S, H!(1, S, T("Analysis")));
|
||||
t!("# Heading \n ends" => H![0, S, T("Heading")], S, T("ends"));
|
||||
// Multiple hashtags.
|
||||
t!("###three" Heading(2, Content![@Text("three")]));
|
||||
t!("###### six" Heading(5, Content![@Space, Text("six")]));
|
||||
|
||||
// Complex heading contents.
|
||||
t!("Some text [box][### Valuable facts]" => T("Some"), S, T("text"), S,
|
||||
F!("box"; Tree![H!(2, S, T("Valuable"), S, T("facts"))])
|
||||
);
|
||||
t!("### Grandiose stuff [box][Get it \n\n straight]" => H![
|
||||
2,
|
||||
S, T("Grandiose"), S, T("stuff"), S,
|
||||
F!("box"; Tree![T("Get"), S, T("it"), P, T("straight")])
|
||||
]);
|
||||
t!("###### Multiline \\ headings" => H![5, S, T("Multiline"), S, L, S, T("headings")]);
|
||||
// Start of heading.
|
||||
t!("/**/#" Heading(0, Content![@]));
|
||||
t!("[f][#ok]" Call!("f", Args![Content![Heading(0, Content![@Text("ok")])]]));
|
||||
|
||||
// Things that should not become headings.
|
||||
t!("\\## Text" => T("#"), T("#"), S, T("Text"));
|
||||
t!(" ###### # Text" => S, H![5, S, T("#"), S, T("Text")]);
|
||||
t!("I am #1" => T("I"), S, T("am"), S, T("#"), T("1"));
|
||||
t!("[box][\n] # hi" => F!("box"; Tree![S]), S, T("#"), S, T("hi"));
|
||||
// End of heading.
|
||||
t!("#a\nb" Heading(0, Content![@Text("a")]), Space, Text("b"));
|
||||
|
||||
// Depth warnings.
|
||||
e!("########" => s(0, 8, "section depth should be at most 6"));
|
||||
// Continued heading.
|
||||
t!("#a{\n1\n}b" Heading(0, Content![@Text("a"), Block(Int(1)), Text("b")]));
|
||||
t!("#a[f][\n\n]d" Heading(0, Content![@
|
||||
Text("a"), Call!("f", Args![Content![Parbreak]]), Text("d"),
|
||||
]));
|
||||
|
||||
// No heading.
|
||||
t!(r"\#" Text("#"));
|
||||
t!("Nr. #1" Text("Nr."), Space, Text("#"), Text("1"));
|
||||
t!("[v]#" Call!("v"), Text("#"));
|
||||
|
||||
// Too many hashtags.
|
||||
t!("####### seven"
|
||||
nodes: [Heading(5, Content![@Space, Text("seven")])],
|
||||
warnings: [S(0..7, "section depth should not exceed 6")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_function_names() {
|
||||
// No closing bracket.
|
||||
t!("[" => F!(""));
|
||||
e!("[" => s(1, 1, "expected function name"),
|
||||
s(1, 1, "expected closing bracket"));
|
||||
fn test_parse_raw() {
|
||||
// Basic, mostly tested in tokenizer and resolver.
|
||||
t!("`py`" nodes: [S(0..4, Raw(None, &["py"], true))], spans: true);
|
||||
t!("`endless"
|
||||
nodes: [Raw(None, &["endless"], true)],
|
||||
errors: [S(8..8, "expected backtick(s)")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_escape_sequences() {
|
||||
// Basic, mostly tested in tokenizer.
|
||||
t!(r"\[" Text("["));
|
||||
t!(r"\u{1F3D5}" nodes: [S(0..9, Text("🏕"))], spans: true);
|
||||
|
||||
// Bad value.
|
||||
t!(r"\u{FFFFFF}"
|
||||
nodes: [Text(r"\u{FFFFFF}")],
|
||||
errors: [S(0..10, "invalid unicode escape sequence")]);
|
||||
|
||||
// No closing brace.
|
||||
t!(r"\u{41*"
|
||||
nodes: [Text("A"), Strong],
|
||||
errors: [S(5..5, "expected closing brace")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_groups() {
|
||||
// Test paren group.
|
||||
t!("{([v 1) + 3}"
|
||||
nodes: [Block(Binary(
|
||||
Content![Call!("v", Args![Int(1)])],
|
||||
Add,
|
||||
Int(3),
|
||||
))],
|
||||
errors: [S(6..6, "expected closing bracket")]);
|
||||
|
||||
// Test bracket group.
|
||||
t!("[)"
|
||||
nodes: [Call!("")],
|
||||
errors: [S(1..2, "expected function name, found closing paren"),
|
||||
S(2..2, "expected closing bracket")]);
|
||||
|
||||
t!("[v {]}"
|
||||
nodes: [Call!("v", Args![Content![]])],
|
||||
errors: [S(4..4, "expected closing brace"),
|
||||
S(5..6, "unexpected closing brace")]);
|
||||
|
||||
// Test brace group.
|
||||
t!("{1 + [}"
|
||||
nodes: [Block(Binary(Int(1), Add, Content![Call!("")]))],
|
||||
errors: [S(6..6, "expected function name"),
|
||||
S(6..6, "expected closing bracket")]);
|
||||
|
||||
// Test subheader group.
|
||||
t!("[v (|u )]"
|
||||
nodes: [Call!("v", Args![Dict![], Content![Call!("u")]])],
|
||||
errors: [S(4..4, "expected closing paren"),
|
||||
S(7..8, "expected expression, found closing paren")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_blocks() {
|
||||
// Basic with spans.
|
||||
t!("{1}" nodes: [S(0..3, Block(Int(1)))], spans: true);
|
||||
|
||||
// Function calls.
|
||||
t!("{f()}" Call!("f"));
|
||||
t!("{[f]}" Block(Content![Call!("f")]));
|
||||
|
||||
// Missing or bad value.
|
||||
t!("{}{1u}"
|
||||
nodes: [],
|
||||
errors: [S(1..1, "expected expression"),
|
||||
S(3..5, "expected expression, found invalid token")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_bracket_funcs() {
|
||||
// Basic.
|
||||
t!("[function]" Call!("function"));
|
||||
t!("[ v ]" Call!("v"));
|
||||
|
||||
// Body and no body.
|
||||
t!("[v][[f]]" Call!("v", Args![Content![Call!("f")]]));
|
||||
t!("[v][v][v]" Call!("v", Args![Content![Text("v")]]), Call!("v"));
|
||||
t!("[v] [f]" Call!("v"), Space, Call!("f"));
|
||||
|
||||
// Spans.
|
||||
t!("[v 1][📐]"
|
||||
nodes: [S(0..11, Call!(S(1..2, "v"), S(3..4, Args![
|
||||
S(3..4, Int(1)),
|
||||
S(5..11, Content![S(6..10, Text("📐"))]),
|
||||
])))],
|
||||
spans: true);
|
||||
|
||||
// No name and no closing bracket.
|
||||
t!("["
|
||||
nodes: [Call!("")],
|
||||
errors: [S(1..1, "expected function name"),
|
||||
S(1..1, "expected closing bracket")]);
|
||||
|
||||
// No name.
|
||||
e!("[]" => s(1, 1, "expected function name"));
|
||||
e!("[\"]" => s(1, 3, "expected function name, found string"),
|
||||
s(3, 3, "expected closing bracket"));
|
||||
t!("[]"
|
||||
nodes: [Call!("")],
|
||||
errors: [S(1..1, "expected function name")]);
|
||||
|
||||
// A valid name.
|
||||
t!("[hi]" => F!("hi"));
|
||||
t!("[ f]" => F!("f"));
|
||||
// Bad name.
|
||||
t!("[# 1]"
|
||||
nodes: [Call!("", Args![Int(1)])],
|
||||
errors: [S(1..2, "expected function name, found hex value")]);
|
||||
|
||||
// An invalid name.
|
||||
e!("[12]" => s(1, 3, "expected function name, found integer"));
|
||||
e!("[ 🌎]" => s(3, 7, "expected function name, found invalid token"));
|
||||
// String header eats closing bracket.
|
||||
t!(r#"[v "]"#
|
||||
nodes: [Call!("v", Args![Str("]")])],
|
||||
errors: [S(5..5, "expected quote"),
|
||||
S(5..5, "expected closing bracket")]);
|
||||
|
||||
// Raw in body eats closing bracket.
|
||||
t!("[v][`a]`"
|
||||
nodes: [Call!("v", Args![Content![Raw(None, &["a]"], true)]])],
|
||||
errors: [S(8..8, "expected closing bracket")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chaining() {
|
||||
// Things the parser has to make sense of
|
||||
t!("[hi (5.0, 2.1 | you]" => F!("hi"; Dict![Float(5.0), Float(2.1)], Tree![F!("you")]));
|
||||
t!("[box | pad: 1pt][Hi]" => F!("box"; Tree![
|
||||
F!("pad"; Length(1.0, Pt), Tree!(T("Hi")))
|
||||
]));
|
||||
t!("[bold 400, | emph | sub: 1cm]" => F!("bold"; Int(400), Tree![
|
||||
F!("emph"; Tree!(F!("sub"; Length(1.0, Cm))))
|
||||
]));
|
||||
// Basic.
|
||||
t!("[a | b]" Call!("a", Args![Content![Call!("b")]]));
|
||||
t!("[a | b | c]" Call!("a", Args![Content![
|
||||
Call!("b", Args![Content![Call!("c")]])
|
||||
]]));
|
||||
|
||||
// Errors for unclosed / empty predecessor groups
|
||||
e!("[hi (5.0, 2.1 | you]" => s(14, 14, "expected closing paren"));
|
||||
e!("[| abc]" => s(1, 1, "expected function name"));
|
||||
e!("[box |][Hi]" => s(6, 6, "expected function name"));
|
||||
// With body and spans.
|
||||
t!("[a|b][💕]"
|
||||
nodes: [S(0..11, Call!(S(1..2, "a"), S(2..2, Args![
|
||||
S(3..11, Content![S(3..11, Call!(S(3..4, "b"), S(4..4, Args![
|
||||
S(5..11, Content![S(6..10, Text("💕"))])
|
||||
])))])
|
||||
])))],
|
||||
spans: true);
|
||||
|
||||
// No name in second subheader.
|
||||
t!("[a 1|]"
|
||||
nodes: [Call!("a", Args![Int(1), Content![Call!("")]])],
|
||||
errors: [S(5..5, "expected function name")]);
|
||||
|
||||
// No name in first subheader.
|
||||
t!("[|a true]"
|
||||
nodes: [Call!("", Args![Content![Call!("a", Args![Bool(true)])]])],
|
||||
errors: [S(1..1, "expected function name")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_function_bodies() {
|
||||
t!("[val 1][*Hi*]" => F!("val"; Int(1), Tree![B, T("Hi"), B]));
|
||||
e!(" [val][ */]" => s(8, 10, "unexpected end of block comment"));
|
||||
fn test_parse_arguments() {
|
||||
// Bracket functions.
|
||||
t!("[v 1]" Call!("v", Args![Int(1)]));
|
||||
t!("[v 1,]" Call!("v", Args![Int(1)]));
|
||||
t!("[v a]" Call!("v", Args![Id("a")]));
|
||||
t!("[v a,]" Call!("v", Args![Id("a")]));
|
||||
t!("[v a:2]" Call!("v", Args!["a" => Int(2)]));
|
||||
|
||||
// Raw in body.
|
||||
t!("[val][`Hi]`" => F!("val"; Tree![R![None, true, "Hi]"]]));
|
||||
e!("[val][`Hi]`" => s(11, 11, "expected closing bracket"));
|
||||
// Parenthesized function with nested dictionary literal.
|
||||
t!(r#"{f(1, a: (2, 3), #004, b: "five")}"# Block(Call!(@"f", Args![
|
||||
Int(1),
|
||||
"a" => Dict![Int(2), Int(3)],
|
||||
Color(RgbaColor::new(0, 0, 0x44, 0xff)),
|
||||
"b" => Str("five"),
|
||||
])));
|
||||
|
||||
// Crazy.
|
||||
t!("[v][[v][v][v]]" => F!("v"; Tree![F!("v"; Tree![T("v")]), F!("v")]));
|
||||
// Bad expression.
|
||||
t!("[v */]"
|
||||
nodes: [Call!("v", Args![])],
|
||||
errors: [S(3..5, "expected expression, found end of block comment")]);
|
||||
|
||||
// Spanned.
|
||||
ts!(" [box][Oh my]" =>
|
||||
s(0, 1, S),
|
||||
s(1, 13, F!(s(2, 5, "box"), 5 .. 5;
|
||||
s(6, 13, Tree![
|
||||
s(7, 9, T("Oh")), s(9, 10, S), s(10, 12, T("my")),
|
||||
])
|
||||
))
|
||||
);
|
||||
// Missing comma between arguments.
|
||||
t!("[v 1 2]"
|
||||
nodes: [Call!("v", Args![Int(1), Int(2)])],
|
||||
errors: [S(4..4, "expected comma")]);
|
||||
|
||||
// Missing expression after name.
|
||||
t!("[v a:]"
|
||||
nodes: [Call!("v", Args![])],
|
||||
errors: [S(5..5, "expected expression")]);
|
||||
|
||||
// Bad expression after name.
|
||||
t!("[v a:1:]"
|
||||
nodes: [Call!("v", Args!["a" => Int(1)])],
|
||||
errors: [S(6..7, "expected expression, found colon")]);
|
||||
|
||||
// Name has to be identifier. Number parsed as positional argument.
|
||||
t!("[v 1:]"
|
||||
nodes: [Call!("v", Args![Int(1)])],
|
||||
errors: [S(4..5, "expected expression, found colon")]);
|
||||
|
||||
// Parsed as two positional arguments.
|
||||
t!("[v 1:2]"
|
||||
nodes: [Call!("v", Args![Int(1), Int(2)])],
|
||||
errors: [S(4..5, "expected expression, found colon"),
|
||||
S(4..4, "expected comma")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_values() {
|
||||
// Simple.
|
||||
v!("_" => Id("_"));
|
||||
v!("name" => Id("name"));
|
||||
v!("ke-bab" => Id("ke-bab"));
|
||||
v!("α" => Id("α"));
|
||||
v!("\"hi\"" => Str("hi"));
|
||||
v!("true" => Bool(true));
|
||||
v!("false" => Bool(false));
|
||||
v!("1.0e-4" => Float(1e-4));
|
||||
v!("3.15" => Float(3.15));
|
||||
v!("50%" => Percent(50.0));
|
||||
v!("4.5cm" => Length(4.5, Cm));
|
||||
v!("12e1pt" => Length(12e1, Pt));
|
||||
v!("#f7a20500" => Color(RgbaColor::new(0xf7, 0xa2, 0x05, 0x00)));
|
||||
v!("\"a\n[]\\\"string\"" => Str("a\n[]\"string"));
|
||||
fn test_parse_dict_literals() {
|
||||
// Basic.
|
||||
t!("{()}" Block(Dict![]));
|
||||
|
||||
// Content.
|
||||
v!("{_hi_}" => Tree![E, T("hi"), E]);
|
||||
e!("[val {_hi_}]" => );
|
||||
v!("[hi]" => Tree![F!("hi")]);
|
||||
e!("[val [hi]]" => );
|
||||
// With spans.
|
||||
t!("{(1, two: 2)}"
|
||||
nodes: [S(0..13, Block(Dict![
|
||||
S(2..3, Int(1)),
|
||||
S(5..8, "two") => S(10..11, Int(2)),
|
||||
]))],
|
||||
spans: true);
|
||||
|
||||
// Healed colors.
|
||||
v!("#12345" => Color(RgbaColor::new(0, 0, 0, 0xff)));
|
||||
e!("[val #12345]" => s(5, 11, "invalid color"));
|
||||
e!("[val #a5]" => s(5, 8, "invalid color"));
|
||||
e!("[val #14b2ah]" => s(5, 12, "invalid color"));
|
||||
e!("[val #f075ff011]" => s(5, 15, "invalid color"));
|
||||
|
||||
// Unclosed string.
|
||||
v!("\"hello" => Str("hello]"));
|
||||
e!("[val \"hello]" => s(12, 12, "expected quote"),
|
||||
s(12, 12, "expected closing bracket"));
|
||||
|
||||
// Spanned.
|
||||
ts!("[val 1.4]" => s(0, 9, F!(s(1, 4, "val"), 5 .. 8; s(5, 8, Float(1.4)))));
|
||||
// Unclosed.
|
||||
t!("{(}"
|
||||
nodes: [Block(Dict![])],
|
||||
errors: [S(2..2, "expected closing paren")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_expressions() {
|
||||
// Coerced dict.
|
||||
v!("(hi)" => Id("hi"));
|
||||
// Parenthesis.
|
||||
t!("{(x)}" Block(Id("x")));
|
||||
|
||||
// Operations.
|
||||
v!("-1" => Unary(Neg, Int(1)));
|
||||
v!("-- 1" => Unary(Neg, Unary(Neg, Int(1))));
|
||||
v!("--css" => Unary(Neg, Unary(Neg, Id("css"))));
|
||||
v!("3.2in + 6pt" => Binary(Add, Length(3.2, In), Length(6.0, Pt)));
|
||||
v!("5 - 0.01" => Binary(Sub, Int(5), Float(0.01)));
|
||||
v!("(3mm * 2)" => Binary(Mul, Length(3.0, Mm), Int(2)));
|
||||
v!("12e-3cm/1pt" => Binary(Div, Length(12e-3, Cm), Length(1.0, Pt)));
|
||||
// Unary operations.
|
||||
t!("{-1}" Block(Unary(Neg, Int(1))));
|
||||
t!("{--1}" Block(Unary(Neg, Unary(Neg, Int(1)))));
|
||||
|
||||
// More complex.
|
||||
v!("(3.2in + 6pt)*(5/2-1)" => Binary(
|
||||
Mul,
|
||||
Binary(Add, Length(3.2, In), Length(6.0, Pt)),
|
||||
Binary(Sub, Binary(Div, Int(5), Int(2)), Int(1))
|
||||
));
|
||||
v!("(6.3E+2+4* - 3.2pt)/2" => Binary(
|
||||
Div,
|
||||
Binary(Add, Float(6.3e2), Binary(
|
||||
Mul,
|
||||
Int(4),
|
||||
Unary(Neg, Length(3.2, Pt))
|
||||
)),
|
||||
Int(2)
|
||||
));
|
||||
// Binary operations.
|
||||
t!(r#"{"x"+"y"}"# Block(Binary(Str("x"), Add, Str("y"))));
|
||||
t!("{1-2}" Block(Binary(Int(1), Sub, Int(2))));
|
||||
t!("{a * b}" Block(Binary(Id("a"), Mul, Id("b"))));
|
||||
t!("{12pt/.4}" Block(Binary(Length(12.0, Unit::Pt), Div, Float(0.4))));
|
||||
|
||||
// Associativity of multiplication and division.
|
||||
v!("3/4*5" => Binary(Mul, Binary(Div, Int(3), Int(4)), Int(5)));
|
||||
// Associativity.
|
||||
t!("{1+2+3}" Block(Binary(Binary(Int(1), Add, Int(2)), Add, Int(3))));
|
||||
t!("{1/2*3}" Block(Binary(Binary(Int(1), Div, Int(2)), Mul, Int(3))));
|
||||
|
||||
// Spanned.
|
||||
ts!("[val 1 + 3]" => s(0, 11, F!(
|
||||
s(1, 4, "val"), 5 .. 10; s(5, 10, Binary(
|
||||
s(7, 8, Add),
|
||||
s(5, 6, Int(1)),
|
||||
s(9, 10, Int(3))
|
||||
))
|
||||
// Precedence.
|
||||
t!("{1+2*-3}" Block(Binary(
|
||||
Int(1), Add, Binary(Int(2), Mul, Unary(Neg, Int(3))),
|
||||
)));
|
||||
|
||||
// Span of parenthesized expression contains parens.
|
||||
ts!("[val (1)]" => s(0, 9, F!(s(1, 4, "val"), 5 .. 8; s(5, 8, Int(1)))));
|
||||
// Confusion with floating-point literal.
|
||||
t!("{1e-3-4e+4}" Block(Binary(Float(1e-3), Sub, Float(4e+4))));
|
||||
|
||||
// Invalid expressions.
|
||||
v!("4pt--" => Length(4.0, Pt));
|
||||
e!("[val 4pt--]" => s(9, 10, "missing factor"),
|
||||
s(5, 9, "missing right summand"));
|
||||
// Spans + parentheses winning over precedence.
|
||||
t!("{(1+2)*3}"
|
||||
nodes: [S(0..9, Block(Binary(
|
||||
S(1..6, Binary(S(2..3, Int(1)), S(3..4, Add), S(4..5, Int(2)))),
|
||||
S(6..7, Mul),
|
||||
S(7..8, Int(3)),
|
||||
)))],
|
||||
spans: true);
|
||||
|
||||
v!("3mm+4pt*" => Binary(Add, Length(3.0, Mm), Length(4.0, Pt)));
|
||||
e!("[val 3mm+4pt*]" => s(9, 13, "missing right factor"));
|
||||
// Errors.
|
||||
t!("{-}{1+}{2*}"
|
||||
nodes: [Block(Int(1)), Block(Int(2))],
|
||||
errors: [S(2..2, "expected expression"),
|
||||
S(6..6, "expected expression"),
|
||||
S(10..10, "expected expression")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dicts() {
|
||||
// Okay.
|
||||
v!("()" => Dict![]);
|
||||
v!("(false)" => Bool(false));
|
||||
v!("(true,)" => Dict![Bool(true)]);
|
||||
v!("(key: val)" => Dict!["key" => Id("val")]);
|
||||
v!("(1, 2)" => Dict![Int(1), Int(2)]);
|
||||
v!("(1, key: \"value\")" => Dict![Int(1), "key" => Str("value")]);
|
||||
fn test_parse_values() {
|
||||
// Basics.
|
||||
t!("{_}" Block(Id("_")));
|
||||
t!("{name}" Block(Id("name")));
|
||||
t!("{ke-bab}" Block(Id("ke-bab")));
|
||||
t!("{α}" Block(Id("α")));
|
||||
t!("{true}" Block(Bool(true)));
|
||||
t!("{false}" Block(Bool(false)));
|
||||
t!("{1.0e-4}" Block(Float(1e-4)));
|
||||
t!("{3.15}" Block(Float(3.15)));
|
||||
t!("{50%}" Block(Percent(50.0)));
|
||||
t!("{4.5cm}" Block(Length(4.5, Unit::Cm)));
|
||||
t!("{12e1pt}" Block(Length(12e1, Unit::Pt)));
|
||||
|
||||
// Decorations.
|
||||
d!("[val key: hi]" => s(5, 8, DictKey));
|
||||
d!("[val (key: hi)]" => s(6, 9, DictKey));
|
||||
d!("[val f(key: hi)]" => s(7, 10, DictKey));
|
||||
// Strings.
|
||||
t!(r#"{"hi"}"# Block(Str("hi")));
|
||||
t!(r#"{"a\n[]\"\u{1F680}string"}"# Block(Str("a\n[]\"🚀string")));
|
||||
|
||||
// Spanned with spacing around named arguments.
|
||||
ts!("[val \n hi \n : /* //\n */ \"s\n\"]" => s(0, 30, F!(
|
||||
s(1, 4, "val"),
|
||||
8 .. 29; s(8, 10, "hi") => s(25, 29, Str("s\n"))
|
||||
)));
|
||||
e!("[val \n hi \n : /* //\n */ \"s\n\"]" => );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dicts_paren_func_calls() {
|
||||
v!("empty()" => Call!("empty"));
|
||||
v!("add ( 1 , 2 )" => Call!("add"; Int(1), Int(2)));
|
||||
v!("items(\"fire\", #f93a6d)" => Call!("items";
|
||||
Str("fire"), Color(RgbaColor::new(0xf9, 0x3a, 0x6d, 0xff))
|
||||
));
|
||||
|
||||
// More complex.
|
||||
v!(r#"css(1pt, color: rgb(90, 102, 254), stroke: "solid")"# => Call!(
|
||||
"css";
|
||||
Length(1.0, Pt),
|
||||
"color" => Call!("rgb"; Int(90), Int(102), Int(254)),
|
||||
"stroke" => Str("solid"),
|
||||
));
|
||||
|
||||
// Unclosed.
|
||||
v!("lang(中文]" => Call!("lang"; Id("中文")));
|
||||
e!("[val lang(中文]" => s(16, 16, "expected closing paren"));
|
||||
|
||||
// Invalid name.
|
||||
v!("👠(\"abc\", 13e-5)" => Dict!(Str("abc"), Float(13.0e-5)));
|
||||
e!("[val 👠(\"abc\", 13e-5)]" => s(5, 9, "invalid token"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dicts_nested() {
|
||||
v!("(1, ( ab:(), d : (3, 14pt) )), false" =>
|
||||
Dict![
|
||||
Int(1),
|
||||
Dict!(
|
||||
"ab" => Dict![],
|
||||
"d" => Dict!(Int(3), Length(14.0, Pt)),
|
||||
),
|
||||
],
|
||||
Bool(false),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_dicts_errors() {
|
||||
// Expected value.
|
||||
e!("[val (:)]" => s(6, 7, "unexpected colon"));
|
||||
e!("[val (,)]" => s(6, 7, "unexpected comma"));
|
||||
v!("(\x07 abc,)" => Dict![Id("abc")]);
|
||||
e!("[val (\x07 abc,)]" => s(6, 7, "invalid token"));
|
||||
e!("[val (key:,)]" => s(10, 11, "expected value, found comma"));
|
||||
e!("[val hi,)]" => s(8, 9, "unexpected closing paren"));
|
||||
|
||||
// Expected comma.
|
||||
v!("(true false)" => Dict![Bool(true), Bool(false)]);
|
||||
e!("[val (true false)]" => s(10, 10, "expected comma"));
|
||||
|
||||
// Expected closing paren.
|
||||
e!("[val (#000]" => s(10, 10, "expected closing paren"));
|
||||
e!("[val (key]" => s(9, 9, "expected closing paren"));
|
||||
e!("[val (key:]" => s(10, 10, "expected value"),
|
||||
s(10, 10, "expected closing paren"));
|
||||
|
||||
// Bad key.
|
||||
v!("true:you" => Bool(true), Id("you"));
|
||||
e!("[val true:you]" => s(9, 10, "unexpected colon"));
|
||||
|
||||
// Unexpected colon.
|
||||
v!("z:y:4" => "z" => Id("y"), Int(4));
|
||||
e!("[val z:y:4]" => s(8, 9, "unexpected colon"));
|
||||
// Colors.
|
||||
t!("{#f7a20500}" Block(Color(RgbaColor::new(0xf7, 0xa2, 0x05, 0))));
|
||||
t!("{#a5}"
|
||||
nodes: [Block(Color(RgbaColor::new(0, 0, 0, 0xff)))],
|
||||
errors: [S(1..4, "invalid color")]);
|
||||
}
|
||||
|
@ -6,19 +6,20 @@ use super::*;
|
||||
/// code.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum SynNode {
|
||||
/// Whitespace containing less than two newlines.
|
||||
Space,
|
||||
/// Plain text.
|
||||
Text(String),
|
||||
|
||||
/// Whitespace containing less than two newlines.
|
||||
Space,
|
||||
/// A forced line break.
|
||||
Linebreak,
|
||||
/// A paragraph break.
|
||||
Parbreak,
|
||||
/// Emphasized text was enabled / disabled.
|
||||
Emph,
|
||||
|
||||
/// Strong text was enabled / disabled.
|
||||
Strong,
|
||||
/// Emphasized text was enabled / disabled.
|
||||
Emph,
|
||||
|
||||
/// A section heading.
|
||||
Heading(NodeHeading),
|
||||
|
@ -26,16 +26,16 @@ pub enum Token<'s> {
|
||||
Star,
|
||||
/// An underscore: `_`.
|
||||
Underscore,
|
||||
/// A hashtag indicating a section heading: `#`.
|
||||
Hashtag,
|
||||
/// A tilde: `~`.
|
||||
Tilde,
|
||||
/// A backslash followed by whitespace: `\`.
|
||||
Backslash,
|
||||
/// A unicode escape sequence: `\u{1F5FA}`.
|
||||
UnicodeEscape(TokenUnicodeEscape<'s>),
|
||||
/// A hashtag indicating a section heading: `#`.
|
||||
Hashtag,
|
||||
/// A raw block: `` `...` ``.
|
||||
Raw(TokenRaw<'s>),
|
||||
/// A unicode escape sequence: `\u{1F5FA}`.
|
||||
UnicodeEscape(TokenUnicodeEscape<'s>),
|
||||
|
||||
/// A left bracket: `[`.
|
||||
LeftBracket,
|
||||
|
Loading…
x
Reference in New Issue
Block a user