diff --git a/Cargo.toml b/Cargo.toml index c7fa703c4..6a5b72b99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,10 +5,11 @@ authors = ["The Typst Project Developers"] edition = "2018" [features] -default = ["cli", "fs", "layout-cache"] +default = ["cli", "fs", "layout-cache", "parse-cache"] cli = ["anyhow", "codespan-reporting", "fs", "pico-args", "same-file"] fs = ["dirs", "memmap2", "same-file", "walkdir"] layout-cache = ["rand"] +parse-cache = [] [profile.dev] # Faster compilation diff --git a/src/eval/capture.rs b/src/eval/capture.rs index f0a2b7292..baf597472 100644 --- a/src/eval/capture.rs +++ b/src/eval/capture.rs @@ -1,8 +1,7 @@ use std::rc::Rc; use super::{Scope, Scopes, Value}; -use crate::syntax::visit::{immutable::visit_expr, Visit}; -use crate::syntax::{Expr, Ident}; +use crate::syntax::{ClosureParam, Expr, Imports, RedTicket}; /// A visitor that captures variable slots. pub struct CapturesVisitor<'a> { @@ -21,36 +20,83 @@ impl<'a> CapturesVisitor<'a> { } } + pub fn visit(&mut self, node: RedTicket) { + let expr: Option = node.cast(); + + match expr.as_ref() { + Some(Expr::Let(expr)) => { + self.visit(expr.init_ticket()); + let ident = expr.binding(); + self.internal.def_mut(ident.as_str(), Value::None); + } + Some(Expr::Closure(closure)) => { + for arg in closure.params() { + match arg { + ClosureParam::Pos(ident) | ClosureParam::Sink(ident) => { + self.internal.def_mut(ident.as_str(), Value::None); + } + ClosureParam::Named(name) => { + self.internal.def_mut(name.name().as_str(), Value::None); + } + } + } + self.visit(closure.body_ticket()); + } + Some(Expr::For(forloop)) => { + let pattern = forloop.pattern(); + self.internal.def_mut(pattern.value().as_str(), Value::None); + + if let Some(key) = pattern.key() { + self.internal.def_mut(key.as_str(), Value::None); + } + self.visit(forloop.body_ticket()); + } + Some(Expr::Import(import)) => { + if let Imports::Idents(idents) = import.imports() { + for ident in idents { + self.internal.def_mut(ident.as_str(), Value::None); + } + } + } + Some(Expr::Ident(ident)) => { + if self.internal.get(ident.as_str()).is_none() { + if let Some(slot) = self.external.get(ident.as_str()) { + self.captures.def_slot(ident.as_str(), Rc::clone(slot)); + } + } + } + _ => {} + } + + match expr.as_ref() { + Some(Expr::Let(_)) | Some(Expr::For(_)) | Some(Expr::Closure(_)) => {} + + Some(Expr::Block(_)) => { + self.internal.enter(); + for child in node.own().children() { + self.visit(child); + } + self.internal.exit(); + } + + Some(Expr::Template(_)) => { + self.internal.enter(); + for child in node.own().children() { + self.visit(child); + } + self.internal.exit(); + } + + _ => { + for child in node.own().children() { + self.visit(child); + } + } + } + } + /// Return the scope of captured variables. pub fn finish(self) -> Scope { self.captures } } - -impl<'ast> Visit<'ast> for CapturesVisitor<'_> { - fn visit_expr(&mut self, node: &'ast Expr) { - if let Expr::Ident(ident) = node { - // Find out whether the name is not locally defined and if so if it - // can be captured. - if self.internal.get(ident).is_none() { - if let Some(slot) = self.external.get(ident) { - self.captures.def_slot(ident.as_str(), Rc::clone(slot)); - } - } - } else { - visit_expr(self, node); - } - } - - fn visit_binding(&mut self, ident: &'ast Ident) { - self.internal.def_mut(ident.as_str(), Value::None); - } - - fn visit_enter(&mut self) { - self.internal.enter(); - } - - fn visit_exit(&mut self) { - self.internal.exit(); - } -} diff --git a/src/eval/mod.rs b/src/eval/mod.rs index 691e3c494..296e33808 100644 --- a/src/eval/mod.rs +++ b/src/eval/mod.rs @@ -36,9 +36,7 @@ use crate::diag::{At, Error, StrResult, Trace, Tracepoint, TypResult}; use crate::geom::{Angle, Fractional, Length, Relative}; use crate::image::ImageStore; use crate::loading::Loader; -use crate::parse::parse; use crate::source::{SourceId, SourceStore}; -use crate::syntax::visit::Visit; use crate::syntax::*; use crate::util::RefMutExt; use crate::Context; @@ -114,7 +112,7 @@ impl<'a> EvalContext<'a> { // Parse the file. let source = self.sources.get(id); - let ast = parse(&source)?; + let ast = source.ast()?; // Prepare the new context. let new_scopes = Scopes::new(self.scopes.base); @@ -122,7 +120,7 @@ impl<'a> EvalContext<'a> { self.route.push(id); // Evaluate the module. - let template = Rc::new(ast).eval(self).trace(|| Tracepoint::Import, span)?; + let template = ast.eval(self).trace(|| Tracepoint::Import, span)?; // Restore the old context. let new_scopes = mem::replace(&mut self.scopes, old_scopes); @@ -232,7 +230,7 @@ impl Eval for ArrayExpr { type Output = Array; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - self.items.iter().map(|expr| expr.eval(ctx)).collect() + self.items().iter().map(|expr| expr.eval(ctx)).collect() } } @@ -240,9 +238,9 @@ impl Eval for DictExpr { type Output = Dict; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - self.items + self.items() .iter() - .map(|Named { name, expr }| Ok(((&name.string).into(), expr.eval(ctx)?))) + .map(|x| Ok(((&x.name().string).into(), x.expr().eval(ctx)?))) .collect() } } @@ -251,7 +249,7 @@ impl Eval for TemplateExpr { type Output = Template; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - self.body.eval(ctx) + self.body().eval(ctx) } } @@ -259,7 +257,7 @@ impl Eval for GroupExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - self.expr.eval(ctx) + self.expr().eval(ctx) } } @@ -270,7 +268,7 @@ impl Eval for BlockExpr { ctx.scopes.enter(); let mut output = Value::None; - for expr in &self.exprs { + for expr in &self.exprs() { let value = expr.eval(ctx)?; output = ops::join(output, value).at(expr.span())?; } @@ -285,13 +283,13 @@ impl Eval for UnaryExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let value = self.expr.eval(ctx)?; - let result = match self.op { + let value = self.expr().eval(ctx)?; + let result = match self.op() { UnOp::Pos => ops::pos(value), UnOp::Neg => ops::neg(value), UnOp::Not => ops::not(value), }; - result.at(self.span) + result.at(self.span()) } } @@ -299,7 +297,7 @@ impl Eval for BinaryExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - match self.op { + match self.op() { BinOp::Add => self.apply(ctx, ops::add), BinOp::Sub => self.apply(ctx, ops::sub), BinOp::Mul => self.apply(ctx, ops::mul), @@ -327,17 +325,17 @@ impl BinaryExpr { where F: FnOnce(Value, Value) -> StrResult, { - let lhs = self.lhs.eval(ctx)?; + let lhs = self.lhs().eval(ctx)?; // Short-circuit boolean operations. - if (self.op == BinOp::And && lhs == Value::Bool(false)) - || (self.op == BinOp::Or && lhs == Value::Bool(true)) + if (self.op() == BinOp::And && lhs == Value::Bool(false)) + || (self.op() == BinOp::Or && lhs == Value::Bool(true)) { return Ok(lhs); } - let rhs = self.rhs.eval(ctx)?; - op(lhs, rhs).at(self.span) + let rhs = self.rhs().eval(ctx)?; + op(lhs, rhs).at(self.span()) } /// Apply an assignment operation. @@ -345,10 +343,10 @@ impl BinaryExpr { where F: FnOnce(Value, Value) -> StrResult, { - let rhs = self.rhs.eval(ctx)?; - let mut target = self.lhs.access(ctx)?; + let rhs = self.rhs().eval(ctx)?; + let mut target = self.lhs().access(ctx)?; let lhs = mem::take(&mut *target); - *target = op(lhs, rhs).at(self.span)?; + *target = op(lhs, rhs).at(self.span())?; Ok(Value::None) } } @@ -357,27 +355,27 @@ impl Eval for CallExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let callee = self.callee.eval(ctx)?; - let mut args = self.args.eval(ctx)?; + let callee = self.callee().eval(ctx)?; + let mut args = self.args().eval(ctx)?; match callee { Value::Array(array) => { - array.get(args.into_index()?).map(Value::clone).at(self.span) + array.get(args.into_index()?).map(Value::clone).at(self.span()) } Value::Dict(dict) => { - dict.get(args.into_key()?).map(Value::clone).at(self.span) + dict.get(args.into_key()?).map(Value::clone).at(self.span()) } Value::Func(func) => { let point = || Tracepoint::Call(func.name().map(ToString::to_string)); - let value = func.call(ctx, &mut args).trace(point, self.span)?; + let value = func.call(ctx, &mut args).trace(point, self.span())?; args.finish()?; Ok(value) } v => bail!( - self.callee.span(), + self.callee().span(), "expected function or collection, found {}", v.type_name(), ), @@ -389,9 +387,9 @@ impl Eval for CallArgs { type Output = Args; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let mut items = Vec::with_capacity(self.items.len()); + let mut items = Vec::with_capacity(self.items().len()); - for arg in &self.items { + for arg in &self.items() { let span = arg.span(); match arg { CallArg::Pos(expr) => { @@ -401,11 +399,11 @@ impl Eval for CallArgs { value: Spanned::new(expr.eval(ctx)?, expr.span()), }); } - CallArg::Named(Named { name, expr }) => { + CallArg::Named(x) => { items.push(Arg { span, - name: Some((&name.string).into()), - value: Spanned::new(expr.eval(ctx)?, expr.span()), + name: Some((&x.name().string).into()), + value: Spanned::new(x.expr().eval(ctx)?, x.expr().span()), }); } CallArg::Spread(expr) => match expr.eval(ctx)? { @@ -438,7 +436,7 @@ impl Eval for CallArgs { } } - Ok(Args { span: self.span, items }) + Ok(Args { span: self.span(), items }) } } @@ -446,26 +444,27 @@ impl Eval for ClosureExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let name = self.name.as_ref().map(|name| name.string.clone()); + let name = self.name().as_ref().map(|name| name.string.clone()); // Collect captured variables. let captured = { let mut visitor = CapturesVisitor::new(&ctx.scopes); - visitor.visit_closure(self); + visitor.visit(self.underlying()); visitor.finish() }; let mut sink = None; - let mut params = Vec::with_capacity(self.params.len()); + let params_src = self.params(); + let mut params = Vec::with_capacity(params_src.len()); // Collect parameters and an optional sink parameter. - for param in &self.params { + for param in ¶ms_src { match param { ClosureParam::Pos(name) => { params.push((name.string.clone(), None)); } - ClosureParam::Named(Named { name, expr }) => { - params.push((name.string.clone(), Some(expr.eval(ctx)?))); + ClosureParam::Named(x) => { + params.push((x.name().string.clone(), Some(x.expr().eval(ctx)?))); } ClosureParam::Sink(name) => { if sink.is_some() { @@ -478,7 +477,7 @@ impl Eval for ClosureExpr { // Clone the body expression so that we don't have a lifetime // dependence on the AST. - let body = Rc::clone(&self.body); + let body = Rc::new(self.body()); // Define the actual function. let func = Function::new(name, move |ctx, args| { @@ -515,8 +514,9 @@ impl Eval for WithExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let wrapped = self.callee.eval(ctx)?.cast::().at(self.callee.span())?; - let applied = self.args.eval(ctx)?; + let wrapped = + self.callee().eval(ctx)?.cast::().at(self.callee().span())?; + let applied = self.args().eval(ctx)?; let name = wrapped.name().cloned(); let func = Function::new(name, move |ctx, args| { @@ -532,11 +532,11 @@ impl Eval for LetExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let value = match &self.init { + let value = match &self.init() { Some(expr) => expr.eval(ctx)?, None => Value::None, }; - ctx.scopes.def_mut(self.binding.as_str(), value); + ctx.scopes.def_mut(self.binding().as_str(), value); Ok(Value::None) } } @@ -545,12 +545,15 @@ impl Eval for IfExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let condition = - self.condition.eval(ctx)?.cast::().at(self.condition.span())?; + let condition = self + .condition() + .eval(ctx)? + .cast::() + .at(self.condition().span())?; if condition { - self.if_body.eval(ctx) - } else if let Some(else_body) = &self.else_body { + self.if_body().eval(ctx) + } else if let Some(else_body) = &self.else_body() { else_body.eval(ctx) } else { Ok(Value::None) @@ -564,9 +567,14 @@ impl Eval for WhileExpr { fn eval(&self, ctx: &mut EvalContext) -> TypResult { let mut output = Value::None; - while self.condition.eval(ctx)?.cast::().at(self.condition.span())? { - let value = self.body.eval(ctx)?; - output = ops::join(output, value).at(self.body.span())?; + while self + .condition() + .eval(ctx)? + .cast::() + .at(self.condition().span())? + { + let value = self.body().eval(ctx)?; + output = ops::join(output, value).at(self.body().span())?; } Ok(output) @@ -586,9 +594,9 @@ impl Eval for ForExpr { for ($($value),*) in $iter { $(ctx.scopes.def_mut($binding.as_str(), $value);)* - let value = self.body.eval(ctx)?; + let value = self.body().eval(ctx)?; output = ops::join(output, value) - .at(self.body.span())?; + .at(self.body().span())?; } ctx.scopes.exit(); @@ -596,28 +604,27 @@ impl Eval for ForExpr { }}; } - let iter = self.iter.eval(ctx)?; - match (&self.pattern, iter) { - (ForPattern::Value(v), Value::Str(string)) => { - iter!(for (v => value) in string.iter()) - } - (ForPattern::Value(v), Value::Array(array)) => { + let iter = self.iter().eval(ctx)?; + let pattern = self.pattern(); + match (pattern.key(), pattern.value(), iter) { + (None, v, Value::Str(string)) => iter!(for (v => value) in string.iter()), + (None, v, Value::Array(array)) => { iter!(for (v => value) in array.into_iter()) } - (ForPattern::KeyValue(i, v), Value::Array(array)) => { + (Some(i), v, Value::Array(array)) => { iter!(for (i => idx, v => value) in array.into_iter().enumerate()) } - (ForPattern::Value(v), Value::Dict(dict)) => { + (None, v, Value::Dict(dict)) => { iter!(for (v => value) in dict.into_iter().map(|p| p.1)) } - (ForPattern::KeyValue(k, v), Value::Dict(dict)) => { + (Some(k), v, Value::Dict(dict)) => { iter!(for (k => key, v => value) in dict.into_iter()) } - (ForPattern::KeyValue(_, _), Value::Str(_)) => { - bail!(self.pattern.span(), "mismatched pattern"); + (_, _, Value::Str(_)) => { + bail!(pattern.span(), "mismatched pattern"); } - (_, iter) => { - bail!(self.iter.span(), "cannot loop over {}", iter.type_name()); + (_, _, iter) => { + bail!(self.iter().span(), "cannot loop over {}", iter.type_name()); } } } @@ -627,12 +634,12 @@ impl Eval for ImportExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let path = self.path.eval(ctx)?.cast::().at(self.path.span())?; + let path = self.path().eval(ctx)?.cast::().at(self.path().span())?; - let file = ctx.import(&path, self.path.span())?; + let file = ctx.import(&path, self.path().span())?; let module = &ctx.modules[&file]; - match &self.imports { + match &self.imports() { Imports::Wildcard => { for (var, slot) in module.scope.iter() { ctx.scopes.def_mut(var, slot.borrow().clone()); @@ -657,9 +664,10 @@ impl Eval for IncludeExpr { type Output = Value; fn eval(&self, ctx: &mut EvalContext) -> TypResult { - let path = self.path.eval(ctx)?.cast::().at(self.path.span())?; + let path_node = self.path(); + let path = path_node.eval(ctx)?.cast::().at(path_node.span())?; - let file = ctx.import(&path, self.path.span())?; + let file = ctx.import(&path, path_node.span())?; let module = &ctx.modules[&file]; Ok(Value::Template(module.template.clone())) @@ -698,14 +706,14 @@ impl Access for Ident { impl Access for CallExpr { fn access<'a>(&self, ctx: &'a mut EvalContext) -> TypResult> { - let args = self.args.eval(ctx)?; - let guard = self.callee.access(ctx)?; + let args = self.args().eval(ctx)?; + let guard = self.callee().access(ctx)?; RefMut::try_map(guard, |value| match value { - Value::Array(array) => array.get_mut(args.into_index()?).at(self.span), + Value::Array(array) => array.get_mut(args.into_index()?).at(self.span()), Value::Dict(dict) => Ok(dict.get_mut(args.into_key()?)), v => bail!( - self.callee.span(), + self.callee().span(), "expected collection, found {}", v.type_name(), ), diff --git a/src/eval/walk.rs b/src/eval/walk.rs index 961383381..e4d7f61a1 100644 --- a/src/eval/walk.rs +++ b/src/eval/walk.rs @@ -27,10 +27,10 @@ impl Walk for MarkupNode { fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> { match self { Self::Space => ctx.template.space(), - Self::Linebreak(_) => ctx.template.linebreak(), - Self::Parbreak(_) => ctx.template.parbreak(), - Self::Strong(_) => ctx.template.modify(|s| s.text_mut().strong.flip()), - Self::Emph(_) => ctx.template.modify(|s| s.text_mut().emph.flip()), + Self::Linebreak => ctx.template.linebreak(), + Self::Parbreak => ctx.template.parbreak(), + Self::Strong => ctx.template.modify(|s| s.text_mut().strong.flip()), + Self::Emph => ctx.template.modify(|s| s.text_mut().emph.flip()), Self::Text(text) => ctx.template.text(text), Self::Raw(raw) => raw.walk(ctx)?, Self::Heading(heading) => heading.walk(ctx)?, @@ -69,8 +69,8 @@ impl Walk for RawNode { impl Walk for HeadingNode { fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> { - let level = self.level; - let body = self.body.eval(ctx)?; + let level = self.level().0; + let body = self.body().eval(ctx)?; ctx.template.parbreak(); ctx.template.save(); @@ -90,7 +90,7 @@ impl Walk for HeadingNode { impl Walk for ListNode { fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> { - let body = self.body.eval(ctx)?; + let body = self.body().eval(ctx)?; walk_item(ctx, Str::from('•'), body); Ok(()) } @@ -98,8 +98,8 @@ impl Walk for ListNode { impl Walk for EnumNode { fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> { - let body = self.body.eval(ctx)?; - let label = format_str!("{}.", self.number.unwrap_or(1)); + let body = self.body().eval(ctx)?; + let label = format_str!("{}.", self.number().0.unwrap_or(1)); walk_item(ctx, label, body); Ok(()) } diff --git a/src/lib.rs b/src/lib.rs index 41b2e88b9..468c06d8c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -58,7 +58,6 @@ use crate::layout::{EvictionPolicy, LayoutCache}; use crate::loading::Loader; use crate::source::{SourceId, SourceStore}; use crate::style::Style; -use crate::syntax::Markup; /// The core context which holds the loader, configuration and cached artifacts. pub struct Context { @@ -100,14 +99,9 @@ impl Context { &self.style } - /// Parse a source file and return the resulting markup. - pub fn parse(&mut self, id: SourceId) -> TypResult { - parse::parse(self.sources.get(id)) - } - /// Evaluate a source file and return the resulting module. pub fn evaluate(&mut self, id: SourceId) -> TypResult { - let ast = self.parse(id)?; + let ast = self.sources.get(id).ast()?; eval::eval(self, id, &ast) } diff --git a/src/parse/mod.rs b/src/parse/mod.rs index 307874232..dc7691833 100644 --- a/src/parse/mod.rs +++ b/src/parse/mod.rs @@ -12,215 +12,213 @@ pub use tokens::*; use std::rc::Rc; -use crate::diag::TypResult; use crate::source::SourceFile; use crate::syntax::*; use crate::util::EcoString; /// Parse a source file. -pub fn parse(source: &SourceFile) -> TypResult { +pub fn parse(source: &SourceFile) -> Rc { let mut p = Parser::new(source); - let markup = markup(&mut p); - let errors = p.finish(); - if errors.is_empty() { - Ok(markup) - } else { - Err(Box::new(errors)) - } + markup(&mut p); + p.finish() } /// Parse markup. -fn markup(p: &mut Parser) -> Markup { +fn markup(p: &mut Parser) { markup_while(p, true, &mut |_| true) } -/// Parse markup that stays equal or right of the given column. -fn markup_indented(p: &mut Parser, column: usize) -> Markup { +/// Parse markup that stays right of the given column. +fn markup_indented(p: &mut Parser, column: usize) { + // TODO this is broken p.eat_while(|t| match t { - Token::Space(n) => n == 0, - Token::LineComment(_) | Token::BlockComment(_) => true, + NodeKind::Space(n) => n == 0, + NodeKind::LineComment | NodeKind::BlockComment => true, _ => false, }); markup_while(p, false, &mut |p| match p.peek() { - Some(Token::Space(n)) if n >= 1 => p.column(p.next_end()) >= column, + Some(NodeKind::Space(n)) if n >= 1 => p.column(p.next_end()) >= column, _ => true, }) } -/// Parse a syntax tree while the peeked token satisifies a condition. +/// Parse a syntax tree while the peeked NodeKind satisifies a condition. /// /// If `at_start` is true, things like headings that may only appear at the /// beginning of a line or template are allowed. -fn markup_while(p: &mut Parser, mut at_start: bool, f: &mut F) -> Markup +fn markup_while(p: &mut Parser, mut at_start: bool, f: &mut F) where F: FnMut(&mut Parser) -> bool, { - let mut tree = vec![]; + p.start(); while !p.eof() && f(p) { - if let Some(node) = markup_node(p, &mut at_start) { - at_start &= matches!(node, MarkupNode::Space | MarkupNode::Parbreak(_)); - tree.push(node); + markup_node(p, &mut at_start); + if let Some(node) = p.last_child() { + at_start &= matches!(node.kind(), &NodeKind::Space(_) | &NodeKind::Parbreak | &NodeKind::LineComment | &NodeKind::BlockComment); } } - tree + p.end(NodeKind::Markup); } /// Parse a markup node. -fn markup_node(p: &mut Parser, at_start: &mut bool) -> Option { - let token = p.peek()?; - let span = p.peek_span(); - let node = match token { - // Whitespace. - Token::Space(newlines) => { - *at_start |= newlines > 0; - if newlines < 2 { - MarkupNode::Space - } else { - MarkupNode::Parbreak(span) +fn markup_node(p: &mut Parser, at_start: &mut bool) { + if let Some(token) = p.peek() { + match token { + // Whitespace. + NodeKind::Space(newlines) => { + *at_start |= newlines > 0; + + if newlines < 2 { + p.eat(); + } else { + p.convert(NodeKind::Parbreak); + } } - } - // Text. - Token::Text(text) => MarkupNode::Text(text.into()), - Token::Tilde => MarkupNode::Text("\u{00A0}".into()), - Token::HyphHyph => MarkupNode::Text("\u{2013}".into()), - Token::HyphHyphHyph => MarkupNode::Text("\u{2014}".into()), - Token::UnicodeEscape(t) => MarkupNode::Text(unicode_escape(p, t)), + // Text. + NodeKind::UnicodeEscape(u) => { + if !u.terminated { + p.convert(NodeKind::Error( + ErrorPosition::End, + "expected closing brace".into(), + )); + p.unsuccessful(); + return; + } - // Markup. - Token::Backslash => MarkupNode::Linebreak(span), - Token::Star => MarkupNode::Strong(span), - Token::Underscore => MarkupNode::Emph(span), - Token::Raw(t) => raw(p, t), - Token::Eq if *at_start => return Some(heading(p)), - Token::Hyph if *at_start => return Some(list_node(p)), - Token::Numbering(number) if *at_start => return Some(enum_node(p, number)), + if u.character.is_none() { + let src = p.peek_src(); + p.convert(NodeKind::Error( + ErrorPosition::Full, + "invalid unicode escape sequence".into(), + )); + p.start(); + p.end(NodeKind::Text(src.into())); + return; + } - // Line-based markup that is not currently at the start of the line. - Token::Eq | Token::Hyph | Token::Numbering(_) => { - MarkupNode::Text(p.peek_src().into()) - } - - // Hashtag + keyword / identifier. - Token::Ident(_) - | Token::Let - | Token::If - | Token::While - | Token::For - | Token::Import - | Token::Include => { - let stmt = matches!(token, Token::Let | Token::Import); - let group = if stmt { Group::Stmt } else { Group::Expr }; - - p.start_group(group, TokenMode::Code); - let expr = expr_with(p, true, 0); - if stmt && expr.is_some() && !p.eof() { - p.expected_at(p.prev_end(), "semicolon or line break"); + p.eat(); } - p.end_group(); + NodeKind::Raw(r) => { + if !r.terminated { + p.convert(NodeKind::Error( + ErrorPosition::End, + "expected backtick(s)".into(), + )); + p.unsuccessful(); + return; + } - return expr.map(MarkupNode::Expr); - } + p.eat(); + } + NodeKind::Text(_) + | NodeKind::EnDash + | NodeKind::EmDash + | NodeKind::NonBreakingSpace => { + p.eat(); + } - // Block and template. - Token::LeftBrace => return Some(MarkupNode::Expr(block(p))), - Token::LeftBracket => return Some(MarkupNode::Expr(template(p))), + // Markup. + NodeKind::Emph | NodeKind::Strong | NodeKind::Linebreak => { + p.eat(); + } - // Comments. - Token::LineComment(_) | Token::BlockComment(_) => { - p.eat(); - return None; - } + NodeKind::Eq if *at_start => heading(p), + NodeKind::ListBullet if *at_start => list_node(p), + NodeKind::EnumNumbering(_) if *at_start => enum_node(p), - _ => { - *at_start = false; - p.unexpected(); - return None; - } - }; - p.eat(); - Some(node) -} + // Line-based markup that is not currently at the start of the line. + NodeKind::Eq | NodeKind::ListBullet | NodeKind::EnumNumbering(_) => { + p.convert(NodeKind::Text(p.peek_src().into())) + } -/// Handle a unicode escape sequence. -fn unicode_escape(p: &mut Parser, token: UnicodeEscapeToken) -> EcoString { - let span = p.peek_span(); - let text = if let Some(c) = resolve::resolve_hex(token.sequence) { - c.into() - } else { - // Print out the escape sequence verbatim if it is invalid. - p.error(span, "invalid unicode escape sequence"); - p.peek_src().into() - }; + // Hashtag + keyword / identifier. + NodeKind::Ident(_) + | NodeKind::Let + | NodeKind::If + | NodeKind::While + | NodeKind::For + | NodeKind::Import + | NodeKind::Include => { + let stmt = matches!(token, NodeKind::Let | NodeKind::Import); + let group = if stmt { Group::Stmt } else { Group::Expr }; - if !token.terminated { - p.error(span.end, "expected closing brace"); + p.start_group(group, TokenMode::Code); + expr_with(p, true, 0); + if stmt && p.success() && !p.eof() { + p.expected_at("semicolon or line break"); + } + p.end_group(); + } + + // Block and template. + NodeKind::LeftBrace => { + block(p); + } + NodeKind::LeftBracket => { + template(p); + } + + // Comments. + NodeKind::LineComment | NodeKind::BlockComment => { + p.eat(); + } + + _ => { + *at_start = false; + p.unexpected(); + } + }; } - - text -} - -/// Handle a raw block. -fn raw(p: &mut Parser, token: RawToken) -> MarkupNode { - let column = p.column(p.next_start()); - let span = p.peek_span(); - let raw = resolve::resolve_raw(span, column, token.backticks, token.text); - if !token.terminated { - p.error(span.end, "expected backtick(s)"); - } - MarkupNode::Raw(Box::new(raw)) } /// Parse a heading. -fn heading(p: &mut Parser) -> MarkupNode { - let start = p.next_start(); - p.eat_assert(Token::Eq); +fn heading(p: &mut Parser) { + p.start(); + p.start(); + p.eat_assert(NodeKind::Eq); // Count depth. let mut level: usize = 1; - while p.eat_if(Token::Eq) { + while p.eat_if(NodeKind::Eq) { level += 1; } if level > 6 { - return MarkupNode::Text(p.get(start .. p.prev_end()).into()); + p.lift(); + p.end(NodeKind::Text(EcoString::from('=').repeat(level))); + } else { + p.end(NodeKind::HeadingLevel(level as u8)); + let column = p.column(p.prev_end()); + markup_indented(p, column); + p.end(NodeKind::Heading); } - - let column = p.column(p.prev_end()); - let body = markup_indented(p, column); - MarkupNode::Heading(Box::new(HeadingNode { - span: p.span_from(start), - level, - body, - })) } /// Parse a single list item. -fn list_node(p: &mut Parser) -> MarkupNode { - let start = p.next_start(); - p.eat_assert(Token::Hyph); +fn list_node(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::ListBullet); let column = p.column(p.prev_end()); - let body = markup_indented(p, column); - MarkupNode::List(Box::new(ListNode { span: p.span_from(start), body })) + markup_indented(p, column); + p.end(NodeKind::List); } /// Parse a single enum item. -fn enum_node(p: &mut Parser, number: Option) -> MarkupNode { - let start = p.next_start(); - p.eat_assert(Token::Numbering(number)); +fn enum_node(p: &mut Parser) { + p.start(); + if !matches!(p.eat(), Some(NodeKind::EnumNumbering(_))) { + panic!("enum item does not start with numbering") + }; let column = p.column(p.prev_end()); - let body = markup_indented(p, column); - MarkupNode::Enum(Box::new(EnumNode { - span: p.span_from(start), - number, - body, - })) + markup_indented(p, column); + p.end(NodeKind::Enum); } /// Parse an expression. -fn expr(p: &mut Parser) -> Option { +fn expr(p: &mut Parser) { expr_with(p, false, 0) } @@ -231,134 +229,167 @@ fn expr(p: &mut Parser) -> Option { /// in markup. /// /// Stops parsing at operations with lower precedence than `min_prec`, -fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) -> Option { - let start = p.next_start(); - let mut lhs = match p.eat_map(UnOp::from_token) { +fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) { + p.start(); + let mut offset = p.child_count(); + // Start the unary expression. + match p.eat_map(|x| UnOp::from_token(&x)) { Some(op) => { let prec = op.precedence(); - let expr = expr_with(p, atomic, prec)?; - Expr::Unary(Box::new(UnaryExpr { span: p.span_from(start), op, expr })) + expr_with(p, atomic, prec); + + if p.may_lift_abort() { + return; + } + + p.end_and_start_with(NodeKind::Unary); + } + None => { + primary(p, atomic); + if p.may_lift_abort() { + return; + } } - None => primary(p, atomic)?, }; loop { // Exclamation mark, parenthesis or bracket means this is a function // call. - if matches!(p.peek_direct(), Some(Token::LeftParen | Token::LeftBracket)) { - lhs = call(p, lhs)?; + if matches!( + p.peek_direct(), + Some(NodeKind::LeftParen | NodeKind::LeftBracket) + ) { + call(p, p.child_count() - offset); continue; } - if p.eat_if(Token::With) { - lhs = with_expr(p, lhs)?; + if p.peek() == Some(NodeKind::With) { + with_expr(p, p.child_count() - offset); + + if p.may_lift_abort() { + return; + } } if atomic { + p.lift(); break; } - let op = match p.peek().and_then(BinOp::from_token) { + let op = match p.peek().as_ref().and_then(BinOp::from_token) { Some(binop) => binop, - None => break, + None => { + p.lift(); + break; + } }; let mut prec = op.precedence(); if prec < min_prec { - break; + { + p.lift(); + break; + }; } p.eat(); + match op.associativity() { Associativity::Left => prec += 1, Associativity::Right => {} } - let rhs = match expr_with(p, atomic, prec) { - Some(rhs) => rhs, - None => break, - }; + expr_with(p, atomic, prec); - let span = lhs.span().join(rhs.span()); - lhs = Expr::Binary(Box::new(BinaryExpr { span, lhs, op, rhs })); + if !p.success() { + p.lift(); + break; + } + + offset = p.end_and_start_with(NodeKind::Binary).0; } - - Some(lhs) } /// Parse a primary expression. -fn primary(p: &mut Parser, atomic: bool) -> Option { - if let Some(expr) = literal(p) { - return Some(expr); +fn primary(p: &mut Parser, atomic: bool) { + if literal(p) { + return; } match p.peek() { // Things that start with an identifier. - Some(Token::Ident(string)) => { - let ident = Ident { - span: p.eat_span(), - string: string.into(), - }; + Some(NodeKind::Ident(_)) => { + // Start closure params. + p.start(); + p.eat(); // Arrow means this is a closure's lone parameter. - Some(if !atomic && p.eat_if(Token::Arrow) { - let body = expr(p)?; - Expr::Closure(Box::new(ClosureExpr { - span: ident.span.join(body.span()), - name: None, - params: vec![ClosureParam::Pos(ident)], - body: Rc::new(body), - })) + if !atomic && p.peek() == Some(NodeKind::Arrow) { + p.end_and_start_with(NodeKind::ClosureParams); + p.eat(); + + expr(p); + + p.end_or_abort(NodeKind::Closure); } else { - Expr::Ident(Box::new(ident)) - }) + p.lift(); + } } // Structures. - Some(Token::LeftParen) => parenthesized(p), - Some(Token::LeftBracket) => Some(template(p)), - Some(Token::LeftBrace) => Some(block(p)), + Some(NodeKind::LeftParen) => parenthesized(p), + Some(NodeKind::LeftBracket) => template(p), + Some(NodeKind::LeftBrace) => block(p), // Keywords. - Some(Token::Let) => let_expr(p), - Some(Token::If) => if_expr(p), - Some(Token::While) => while_expr(p), - Some(Token::For) => for_expr(p), - Some(Token::Import) => import_expr(p), - Some(Token::Include) => include_expr(p), + Some(NodeKind::Let) => let_expr(p), + Some(NodeKind::If) => if_expr(p), + Some(NodeKind::While) => while_expr(p), + Some(NodeKind::For) => for_expr(p), + Some(NodeKind::Import) => import_expr(p), + Some(NodeKind::Include) => include_expr(p), // Nothing. _ => { p.expected("expression"); - None + p.unsuccessful(); } } } /// Parse a literal. -fn literal(p: &mut Parser) -> Option { - let span = p.peek_span(); - let lit = match p.peek()? { - // Basic values. - Token::None => Lit::None(span), - Token::Auto => Lit::Auto(span), - Token::Bool(b) => Lit::Bool(span, b), - Token::Int(i) => Lit::Int(span, i), - Token::Float(f) => Lit::Float(span, f), - Token::Length(val, unit) => Lit::Length(span, val, unit), - Token::Angle(val, unit) => Lit::Angle(span, val, unit), - Token::Percent(p) => Lit::Percent(span, p), - Token::Fraction(p) => Lit::Fractional(span, p), - Token::Str(token) => Lit::Str(span, { - if !token.terminated { - p.expected_at(span.end, "quote"); - } - resolve::resolve_string(token.string) - }), - _ => return None, +fn literal(p: &mut Parser) -> bool { + let peeked = if let Some(p) = p.peek() { + p + } else { + return false; }; - p.eat(); - Some(Expr::Lit(Box::new(lit))) + + match peeked { + // Basic values. + NodeKind::None + | NodeKind::Auto + | NodeKind::Int(_) + | NodeKind::Float(_) + | NodeKind::Bool(_) + | NodeKind::Fraction(_) + | NodeKind::Length(_, _) + | NodeKind::Angle(_, _) + | NodeKind::Percentage(_) => { + p.eat(); + } + NodeKind::Str(s) => { + p.eat(); + if !s.terminated { + p.expected_at("quote"); + } + } + _ => { + return false; + } + } + + true } /// Parse something that starts with a parenthesis, which can be either of: @@ -366,433 +397,508 @@ fn literal(p: &mut Parser) -> Option { /// - Dictionary literal /// - Parenthesized expression /// - Parameter list of closure expression -fn parenthesized(p: &mut Parser) -> Option { +fn parenthesized(p: &mut Parser) { + let offset = p.child_count(); + p.start(); p.start_group(Group::Paren, TokenMode::Code); - let colon = p.eat_if(Token::Colon); - let (items, has_comma) = collection(p); - let span = p.end_group(); + let colon = p.eat_if(NodeKind::Colon); + let kind = collection(p).0; + p.end_group(); + let token_count = p.child_count() - offset; - // Leading colon makes this a dictionary. + // Leading colon makes this a (empty) dictionary. if colon { - return Some(dict(p, items, span)); + p.lift(); + dict(p, token_count); + return; } // Arrow means this is a closure's parameter list. - if p.eat_if(Token::Arrow) { - let params = params(p, items); - let body = expr(p)?; - return Some(Expr::Closure(Box::new(ClosureExpr { - span: span.join(body.span()), - name: None, - params, - body: Rc::new(body), - }))); + if p.peek() == Some(NodeKind::Arrow) { + p.start_with(token_count); + params(p, 0, true); + p.end(NodeKind::ClosureParams); + + p.eat_assert(NodeKind::Arrow); + + expr(p); + + p.end_or_abort(NodeKind::Closure); + return; } // Find out which kind of collection this is. - Some(match items.as_slice() { - [] => array(p, items, span), - [CallArg::Pos(_)] if !has_comma => match items.into_iter().next() { - Some(CallArg::Pos(expr)) => Expr::Group(Box::new(GroupExpr { span, expr })), - _ => unreachable!(), - }, - [CallArg::Pos(_), ..] => array(p, items, span), - [CallArg::Named(_), ..] => dict(p, items, span), - [CallArg::Spread(expr), ..] => { - p.error(expr.span(), "spreading is not allowed here"); - return None; + match kind { + CollectionKind::Group => p.end(NodeKind::Group), + CollectionKind::PositionalCollection => { + p.lift(); + array(p, token_count); } - }) + CollectionKind::NamedCollection => { + p.lift(); + dict(p, token_count); + } + } +} + +/// The type of a collection. +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +enum CollectionKind { + /// The collection is only one item and has no comma. + Group, + /// The collection starts with a positional and has more items or a trailing + /// comma. + PositionalCollection, + /// The collection starts with a named item. + NamedCollection, } /// Parse a collection. /// -/// Returns whether the literal contained any commas. -fn collection(p: &mut Parser) -> (Vec, bool) { - let mut items = vec![]; +/// Returns the length of the collection and whether the literal contained any +/// commas. +fn collection(p: &mut Parser) -> (CollectionKind, usize) { + let mut items = 0; + let mut kind = CollectionKind::PositionalCollection; + let mut seen_spread = false; let mut has_comma = false; let mut missing_coma = None; while !p.eof() { - if let Some(arg) = item(p) { - items.push(arg); + let item_kind = item(p); + if p.success() { + if items == 0 && item_kind == CollectionItemKind::Named { + kind = CollectionKind::NamedCollection; + } + + if item_kind == CollectionItemKind::ParameterSink { + seen_spread = true; + } + + items += 1; if let Some(pos) = missing_coma.take() { - p.expected_at(pos, "comma"); + p.expected_at_child(pos, "comma"); } if p.eof() { break; } - let behind = p.prev_end(); - if p.eat_if(Token::Comma) { + if p.eat_if(NodeKind::Comma) { has_comma = true; } else { - missing_coma = Some(behind); + missing_coma = Some(p.child_count()); } } } - (items, has_comma) -} - -/// Parse an expression or a named pair. -fn item(p: &mut Parser) -> Option { - if p.eat_if(Token::Dots) { - return expr(p).map(CallArg::Spread); + if !has_comma + && items == 1 + && !seen_spread + && kind == CollectionKind::PositionalCollection + { + kind = CollectionKind::Group; } - let first = expr(p)?; - if p.eat_if(Token::Colon) { - if let Expr::Ident(name) = first { - Some(CallArg::Named(Named { name: *name, expr: expr(p)? })) - } else { - p.error(first.span(), "expected identifier"); + (kind, items) +} + +/// What kind of item is this? +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +enum CollectionItemKind { + /// A named item. + Named, + /// An unnamed item. + Unnamed, + /// A parameter sink. + ParameterSink, +} + +/// Parse an expression or a named pair. Returns if this is a named pair. +fn item(p: &mut Parser) -> CollectionItemKind { + p.start(); + if p.eat_if(NodeKind::Dots) { + expr(p); + + p.end_or_abort(NodeKind::ParameterSink); + return CollectionItemKind::ParameterSink; + } + + expr(p); + + if p.may_lift_abort() { + return CollectionItemKind::Unnamed; + } + + if p.eat_if(NodeKind::Colon) { + let child = p.child(1).unwrap(); + if matches!(child.kind(), &NodeKind::Ident(_)) { expr(p); - None + p.end_or_abort(NodeKind::Named); + } else { + p.wrap( + 1, + NodeKind::Error(ErrorPosition::Full, "expected identifier".into()), + ); + + expr(p); + p.end(NodeKind::Named); + p.unsuccessful(); } + + CollectionItemKind::Named } else { - Some(CallArg::Pos(first)) + p.lift(); + CollectionItemKind::Unnamed } } /// Convert a collection into an array, producing errors for anything other than /// expressions. -fn array(p: &mut Parser, items: Vec, span: Span) -> Expr { - let iter = items.into_iter().filter_map(|item| match item { - CallArg::Pos(expr) => Some(expr), - CallArg::Named(_) => { - p.error(item.span(), "expected expression, found named pair"); - None - } - CallArg::Spread(_) => { - p.error(item.span(), "spreading is not allowed here"); - None - } - }); - Expr::Array(Box::new(ArrayExpr { span, items: iter.collect() })) +fn array(p: &mut Parser, items: usize) { + p.start_with(items); + p.filter_children( + 0, + |x| match x.kind() { + NodeKind::Named | NodeKind::ParameterSink => false, + _ => true, + }, + |kind| match kind { + NodeKind::Named => ( + ErrorPosition::Full, + "expected expression, found named pair".into(), + ), + NodeKind::ParameterSink => { + (ErrorPosition::Full, "spreading is not allowed here".into()) + } + _ => unreachable!(), + }, + ); + + p.end(NodeKind::Array) } /// Convert a collection into a dictionary, producing errors for anything other /// than named pairs. -fn dict(p: &mut Parser, items: Vec, span: Span) -> Expr { - let iter = items.into_iter().filter_map(|item| match item { - CallArg::Named(named) => Some(named), - CallArg::Pos(_) => { - p.error(item.span(), "expected named pair, found expression"); - None - } - CallArg::Spread(_) => { - p.error(item.span(), "spreading is not allowed here"); - None - } - }); - Expr::Dict(Box::new(DictExpr { span, items: iter.collect() })) +fn dict(p: &mut Parser, items: usize) { + p.start_with(items); + p.filter_children( + 0, + |x| { + x.kind() == &NodeKind::Named + || x.kind().is_parenthesis() + || x.kind() == &NodeKind::Comma + || x.kind() == &NodeKind::Colon + }, + |kind| match kind { + NodeKind::ParameterSink => { + (ErrorPosition::Full, "spreading is not allowed here".into()) + } + _ => ( + ErrorPosition::Full, + "expected named pair, found expression".into(), + ), + }, + ); + p.end(NodeKind::Dict); } /// Convert a collection into a list of parameters, producing errors for /// anything other than identifiers, spread operations and named pairs. -fn params(p: &mut Parser, items: Vec) -> Vec { - let iter = items.into_iter().filter_map(|item| match item { - CallArg::Pos(Expr::Ident(ident)) => Some(ClosureParam::Pos(*ident)), - CallArg::Named(named) => Some(ClosureParam::Named(named)), - CallArg::Spread(Expr::Ident(ident)) => Some(ClosureParam::Sink(*ident)), - _ => { - p.error(item.span(), "expected identifier"); - None - } - }); - iter.collect() -} - -/// Convert a collection into a list of identifiers, producing errors for -/// anything other than identifiers. -fn idents(p: &mut Parser, items: Vec) -> Vec { - let iter = items.into_iter().filter_map(|item| match item { - CallArg::Pos(Expr::Ident(ident)) => Some(*ident), - _ => { - p.error(item.span(), "expected identifier"); - None - } - }); - iter.collect() +fn params(p: &mut Parser, count: usize, allow_parens: bool) { + p.filter_children( + count, + |x| match x.kind() { + NodeKind::Named | NodeKind::Comma | NodeKind::Ident(_) => true, + NodeKind::ParameterSink => matches!( + x.children().last().map(|x| x.kind()), + Some(&NodeKind::Ident(_)) + ), + _ => false, + } + || (allow_parens && x.kind().is_parenthesis()), + |_| (ErrorPosition::Full, "expected identifier".into()), + ); } // Parse a template block: `[...]`. -fn template(p: &mut Parser) -> Expr { +fn template(p: &mut Parser) { + p.start(); p.start_group(Group::Bracket, TokenMode::Markup); - let tree = markup(p); - let span = p.end_group(); - Expr::Template(Box::new(TemplateExpr { span, body: tree })) + markup(p); + p.end_group(); + p.end(NodeKind::Template); } /// Parse a code block: `{...}`. -fn block(p: &mut Parser) -> Expr { +fn block(p: &mut Parser) { + p.start(); p.start_group(Group::Brace, TokenMode::Code); - let mut exprs = vec![]; while !p.eof() { p.start_group(Group::Stmt, TokenMode::Code); - if let Some(expr) = expr(p) { - exprs.push(expr); + expr(p); + if p.success() { if !p.eof() { - p.expected_at(p.prev_end(), "semicolon or line break"); + p.expected_at("semicolon or line break"); } } p.end_group(); // Forcefully skip over newlines since the group's contents can't. - p.eat_while(|t| matches!(t, Token::Space(_))); + p.eat_while(|t| matches!(t, NodeKind::Space(_))); } - let span = p.end_group(); - Expr::Block(Box::new(BlockExpr { span, exprs })) + p.end_group(); + p.end(NodeKind::Block); } /// Parse a function call. -fn call(p: &mut Parser, callee: Expr) -> Option { - let mut args = match p.peek_direct() { - Some(Token::LeftParen) => args(p), - Some(Token::LeftBracket) => CallArgs { - span: Span::at(p.id(), callee.span().end), - items: vec![], - }, +fn call(p: &mut Parser, callee: usize) { + p.start_with(callee); + match p.peek_direct() { + Some(NodeKind::LeftParen) | Some(NodeKind::LeftBracket) => args(p, true), _ => { - p.expected_at(p.prev_end(), "argument list"); - return None; + p.expected_at("argument list"); + p.may_end_abort(NodeKind::Call); + return; } }; - while p.peek_direct() == Some(Token::LeftBracket) { - let body = template(p); - args.items.push(CallArg::Pos(body)); - } - - Some(Expr::Call(Box::new(CallExpr { - span: p.span_from(callee.span().start), - callee, - args, - }))) + p.end(NodeKind::Call); } /// Parse the arguments to a function call. -fn args(p: &mut Parser) -> CallArgs { - p.start_group(Group::Paren, TokenMode::Code); - let items = collection(p).0; - let span = p.end_group(); - CallArgs { span, items } +fn args(p: &mut Parser, allow_template: bool) { + p.start(); + if !allow_template || p.peek_direct() == Some(&NodeKind::LeftParen) { + p.start_group(Group::Paren, TokenMode::Code); + collection(p); + p.end_group(); + } + + while allow_template && p.peek_direct() == Some(&NodeKind::LeftBracket) { + template(p); + } + + p.end(NodeKind::CallArgs); } /// Parse a with expression. -fn with_expr(p: &mut Parser, callee: Expr) -> Option { - if p.peek() == Some(Token::LeftParen) { - Some(Expr::With(Box::new(WithExpr { - span: p.span_from(callee.span().start), - callee, - args: args(p), - }))) +fn with_expr(p: &mut Parser, preserve: usize) { + p.start_with(preserve); + p.eat_assert(NodeKind::With); + + if p.peek() == Some(NodeKind::LeftParen) { + args(p, false); + p.end(NodeKind::WithExpr); } else { p.expected("argument list"); - None + p.may_end_abort(NodeKind::WithExpr); } } /// Parse a let expression. -fn let_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::Let); +fn let_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::Let); - let mut output = None; - if let Some(binding) = ident(p) { - let mut init = None; - - if p.eat_if(Token::With) { - init = with_expr(p, Expr::Ident(Box::new(binding.clone()))); - } else { - // If a parenthesis follows, this is a function definition. - let mut maybe_params = None; - if p.peek_direct() == Some(Token::LeftParen) { - p.start_group(Group::Paren, TokenMode::Code); - let items = collection(p).0; - maybe_params = Some(params(p, items)); - p.end_group(); - } - - if p.eat_if(Token::Eq) { - init = expr(p); - } else if maybe_params.is_some() { - // Function definitions must have a body. - p.expected_at(p.prev_end(), "body"); - } - - // Rewrite into a closure expression if it's a function definition. - if let Some(params) = maybe_params { - let body = init?; - init = Some(Expr::Closure(Box::new(ClosureExpr { - span: binding.span.join(body.span()), - name: Some(binding.clone()), - params, - body: Rc::new(body), - }))); - } - } - - output = Some(Expr::Let(Box::new(LetExpr { - span: p.span_from(start), - binding, - init, - }))); + let offset = p.child_count(); + ident(p); + if p.may_end_abort(NodeKind::LetExpr) { + return; } - output + if p.peek() == Some(NodeKind::With) { + with_expr(p, p.child_count() - offset); + } else { + // If a parenthesis follows, this is a function definition. + let has_params = if p.peek_direct() == Some(&NodeKind::LeftParen) { + p.start(); + p.start_group(Group::Paren, TokenMode::Code); + let offset = p.child_count(); + collection(p); + params(p, offset, true); + p.end_group(); + p.end(NodeKind::ClosureParams); + true + } else { + false + }; + + if p.eat_if(NodeKind::Eq) { + expr(p); + } else if has_params { + // Function definitions must have a body. + p.expected_at("body"); + } + + // Rewrite into a closure expression if it's a function definition. + if has_params { + if p.may_end_abort(NodeKind::LetExpr) { + return; + } + + p.start_with(p.child_count() - offset); + p.end(NodeKind::Closure) + } + } + + p.end(NodeKind::LetExpr); } /// Parse an if expresion. -fn if_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::If); +fn if_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::If); - let mut output = None; - if let Some(condition) = expr(p) { - if let Some(if_body) = body(p) { - let mut else_body = None; - if p.eat_if(Token::Else) { - if p.peek() == Some(Token::If) { - else_body = if_expr(p); - } else { - else_body = body(p); - } - } + expr(p); + if p.may_end_abort(NodeKind::IfExpr) { + return; + } - output = Some(Expr::If(Box::new(IfExpr { - span: p.span_from(start), - condition, - if_body, - else_body, - }))); + body(p); + if p.may_end_abort(NodeKind::IfExpr) { + // Expected function body. + return; + } + + if p.eat_if(NodeKind::Else) { + if p.peek() == Some(NodeKind::If) { + if_expr(p); + } else { + body(p); } } - output + p.end(NodeKind::IfExpr); } /// Parse a while expresion. -fn while_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::While); +fn while_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::While); - let mut output = None; - if let Some(condition) = expr(p) { - if let Some(body) = body(p) { - output = Some(Expr::While(Box::new(WhileExpr { - span: p.span_from(start), - condition, - body, - }))); - } + expr(p); + + if p.may_end_abort(NodeKind::WhileExpr) { + return; } - output + body(p); + if !p.may_end_abort(NodeKind::WhileExpr) { + p.end(NodeKind::WhileExpr); + } } /// Parse a for expression. -fn for_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::For); +fn for_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::For); - let mut output = None; - if let Some(pattern) = for_pattern(p) { - if p.eat_expect(Token::In) { - if let Some(iter) = expr(p) { - if let Some(body) = body(p) { - output = Some(Expr::For(Box::new(ForExpr { - span: p.span_from(start), - pattern, - iter, - body, - }))); - } - } - } + for_pattern(p); + + if p.may_end_abort(NodeKind::ForExpr) { + return; } - output + if p.eat_expect(NodeKind::In) { + expr(p); + + if p.may_end_abort(NodeKind::ForExpr) { + return; + } + + body(p); + + if !p.may_end_abort(NodeKind::ForExpr) { + p.end(NodeKind::ForExpr); + } + } else { + p.unsuccessful(); + p.may_end_abort(NodeKind::ForExpr); + } } /// Parse a for loop pattern. -fn for_pattern(p: &mut Parser) -> Option { - let first = ident(p)?; - if p.eat_if(Token::Comma) { - if let Some(second) = ident(p) { - return Some(ForPattern::KeyValue(first, second)); +fn for_pattern(p: &mut Parser) { + p.start(); + ident(p); + + if p.may_end_abort(NodeKind::ForPattern) { + return; + } + + if p.peek() == Some(NodeKind::Comma) { + p.eat(); + + ident(p); + + if p.may_end_abort(NodeKind::ForPattern) { + return; } } - Some(ForPattern::Value(first)) + + p.end(NodeKind::ForPattern); } /// Parse an import expression. -fn import_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::Import); +fn import_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::Import); - let imports = if p.eat_if(Token::Star) { - // This is the wildcard scenario. - Imports::Wildcard - } else { + if !p.eat_if(NodeKind::Star) { // This is the list of identifiers scenario. + p.start(); p.start_group(Group::Imports, TokenMode::Code); - let items = collection(p).0; - if items.is_empty() { - p.expected_at(p.prev_end(), "import items"); + let offset = p.child_count(); + let items = collection(p).1; + if items == 0 { + p.expected_at("import items"); } p.end_group(); - Imports::Idents(idents(p, items)) + + p.filter_children( + offset, + |n| matches!(n.kind(), NodeKind::Ident(_) | NodeKind::Comma), + |_| (ErrorPosition::Full, "expected identifier".into()), + ); + p.end(NodeKind::ImportItems); }; - let mut output = None; - if p.eat_expect(Token::From) { - if let Some(path) = expr(p) { - output = Some(Expr::Import(Box::new(ImportExpr { - span: p.span_from(start), - imports, - path, - }))); - } + if p.eat_expect(NodeKind::From) { + expr(p); } - output + p.end(NodeKind::ImportExpr); } /// Parse an include expression. -fn include_expr(p: &mut Parser) -> Option { - let start = p.next_start(); - p.eat_assert(Token::Include); +fn include_expr(p: &mut Parser) { + p.start(); + p.eat_assert(NodeKind::Include); - expr(p).map(|path| { - Expr::Include(Box::new(IncludeExpr { span: p.span_from(start), path })) - }) + expr(p); + p.end(NodeKind::IncludeExpr); } /// Parse an identifier. -fn ident(p: &mut Parser) -> Option { - if let Some(Token::Ident(string)) = p.peek() { - Some(Ident { - span: p.eat_span(), - string: string.into(), - }) +fn ident(p: &mut Parser) { + if let Some(NodeKind::Ident(_)) = p.peek() { + p.eat(); } else { p.expected("identifier"); - None + p.unsuccessful(); } } /// Parse a control flow body. -fn body(p: &mut Parser) -> Option { +fn body(p: &mut Parser) { match p.peek() { - Some(Token::LeftBracket) => Some(template(p)), - Some(Token::LeftBrace) => Some(block(p)), + Some(NodeKind::LeftBracket) => template(p), + Some(NodeKind::LeftBrace) => block(p), _ => { - p.expected_at(p.prev_end(), "body"); - None + p.expected_at("body"); + p.unsuccessful(); } } } diff --git a/src/parse/parser.rs b/src/parse/parser.rs index 347d6f715..f62e882af 100644 --- a/src/parse/parser.rs +++ b/src/parse/parser.rs @@ -1,29 +1,34 @@ use std::ops::Range; +use std::rc::Rc; use super::{TokenMode, Tokens}; -use crate::diag::Error; use crate::source::{SourceFile, SourceId}; -use crate::syntax::{IntoSpan, Pos, Span, Token}; +use crate::syntax::{ErrorPosition, Green, GreenData, GreenNode, NodeKind}; +use crate::util::EcoString; /// A convenient token-based parser. pub struct Parser<'s> { /// The parsed file. source: &'s SourceFile, - /// Parsing errors. - errors: Vec, /// An iterator over the source tokens. tokens: Tokens<'s>, /// The stack of open groups. groups: Vec, /// The next token. - next: Option>, + next: Option, /// The peeked token. /// (Same as `next` except if we are at the end of group, then `None`). - peeked: Option>, + peeked: Option, /// The end index of the last (non-whitespace if in code mode) token. prev_end: usize, /// The start index of the peeked token. next_start: usize, + /// A stack of outer children vectors. + stack: Vec>, + /// The children of the currently built node. + children: Vec, + /// Whether the last parsing step was successful. + success: bool, } /// A logical group of tokens, e.g. `[...]`. @@ -32,9 +37,6 @@ struct GroupEntry { /// For example, a [`Group::Paren`] will be ended by /// [`Token::RightParen`]. pub kind: Group, - /// The start index of the group. Used by `Parser::end_group` to return the - /// group's full span. - pub start: usize, /// The mode the parser was in _before_ the group started (to which we go /// back once the group ends). pub prev_mode: TokenMode, @@ -60,51 +62,204 @@ pub enum Group { impl<'s> Parser<'s> { /// Create a new parser for the source string. pub fn new(source: &'s SourceFile) -> Self { - let mut tokens = Tokens::new(source.src(), TokenMode::Markup); + let mut tokens = Tokens::new(source, TokenMode::Markup); let next = tokens.next(); Self { source, - errors: vec![], tokens, groups: vec![], - next, + next: next.clone(), peeked: next, prev_end: 0, next_start: 0, + stack: vec![], + children: vec![], + success: true, } } - /// Finish parsing and return all errors. - pub fn finish(self) -> Vec { - self.errors - } - /// The id of the parsed source file. pub fn id(&self) -> SourceId { self.source.id() } + /// Start a nested node. + /// + /// Each start call has to be matched with a call to `end`, + /// `end_with_custom_children`, `lift`, `abort`, or `end_or_abort`. + pub fn start(&mut self) { + self.stack.push(std::mem::take(&mut self.children)); + } + + /// Start a nested node, preserving a number of the current children. + pub fn start_with(&mut self, preserve: usize) { + let preserved = self.children.drain(self.children.len() - preserve ..).collect(); + self.stack.push(std::mem::replace(&mut self.children, preserved)); + } + + /// Filter the last children using the given predicate. + pub fn filter_children(&mut self, count: usize, f: F, error: G) + where + F: Fn(&Green) -> bool, + G: Fn(&NodeKind) -> (ErrorPosition, EcoString), + { + for child in &mut self.children[count ..] { + if !((self.tokens.mode() != TokenMode::Code + || Self::skip_type_ext(child.kind(), false)) + || child.kind().is_error() + || f(&child)) + { + let (pos, msg) = error(child.kind()); + let inner = std::mem::take(child); + *child = + GreenNode::with_child(NodeKind::Error(pos, msg), inner.len(), inner) + .into(); + } + } + } + + pub fn child(&self, child: usize) -> Option<&Green> { + self.node_index_from_back(child).map(|i| &self.children[i]) + } + + fn node_index_from_back(&self, child: usize) -> Option { + let len = self.children.len(); + let code = self.tokens.mode() == TokenMode::Code; + let mut seen = 0; + for x in (0 .. len).rev() { + if self.skip_type(self.children[x].kind()) && code { + continue; + } + if seen == child { + return Some(x); + } + seen += 1; + } + + None + } + + /// End the current node as a node of given `kind`. + pub fn end(&mut self, kind: NodeKind) { + let outer = self.stack.pop().unwrap(); + let mut children = std::mem::replace(&mut self.children, outer); + + // have trailing whitespace continue to sit in self.children in code + // mode. + let mut remains = vec![]; + if self.tokens.mode() == TokenMode::Code { + let len = children.len(); + for n in (0 .. len).rev() { + if !self.skip_type(&children[n].kind()) { + break; + } + + remains.push(children.pop().unwrap()); + } + remains.reverse(); + } + + let len = children.iter().map(|c| c.len()).sum(); + self.children + .push(GreenNode::with_children(kind, len, children.into_iter()).into()); + self.children.extend(remains); + self.success = true; + } + + /// End the current node as a node of given `kind`, and start a new node + /// with the ended node as a first child. The function returns how many + /// children the stack frame had before and how many were appended (accounts + /// for trivia). + pub fn end_and_start_with(&mut self, kind: NodeKind) -> (usize, usize) { + let stack_offset = self.stack.last().unwrap().len(); + self.end(kind); + let diff = self.children.len() - stack_offset; + self.start_with(diff); + (stack_offset, diff) + } + + pub fn wrap(&mut self, index: usize, kind: NodeKind) { + let index = self.node_index_from_back(index).unwrap(); + let child = std::mem::take(&mut self.children[index]); + let item = GreenNode::with_child(kind, child.len(), child); + self.children[index] = item.into(); + } + + pub fn convert(&mut self, kind: NodeKind) { + self.start(); + self.eat(); + self.end(kind); + } + + /// End the current node and undo its existence, inling all accumulated + /// children into its parent. + pub fn lift(&mut self) { + let outer = self.stack.pop().unwrap(); + let children = std::mem::replace(&mut self.children, outer); + self.children.extend(children); + self.success = true; + } + + /// End the current node and undo its existence, deleting all accumulated + /// children. + pub fn abort(&mut self, msg: impl Into) { + self.end(NodeKind::Error(ErrorPosition::Full, msg.into().into())); + self.success = false; + } + + pub fn may_lift_abort(&mut self) -> bool { + if !self.success { + self.lift(); + self.success = false; + true + } else { + false + } + } + + pub fn may_end_abort(&mut self, kind: NodeKind) -> bool { + if !self.success { + self.end(kind); + self.success = false; + true + } else { + false + } + } + + /// End the current node as a node of given `kind` if the last parse was + /// successful, otherwise, abort. + pub fn end_or_abort(&mut self, kind: NodeKind) -> bool { + if self.success { + self.end(kind); + true + } else { + self.may_end_abort(kind); + false + } + } + + pub fn finish(&mut self) -> Rc { + if let Green::Node(n) = self.children.pop().unwrap() { + n + } else { + panic!() + } + } + /// Whether the end of the source string or group is reached. pub fn eof(&self) -> bool { self.peek().is_none() } - /// Consume the next token. - pub fn eat(&mut self) -> Option> { + pub fn eat(&mut self) -> Option { let token = self.peek()?; self.bump(); Some(token) } - /// Eat the next token and return its source range. - pub fn eat_span(&mut self) -> Span { - let start = self.next_start(); - self.eat(); - Span::new(self.id(), start, self.prev_end()) - } - /// Consume the next token if it is the given one. - pub fn eat_if(&mut self, t: Token) -> bool { + pub fn eat_if(&mut self, t: NodeKind) -> bool { if self.peek() == Some(t) { self.bump(); true @@ -116,7 +271,7 @@ impl<'s> Parser<'s> { /// Consume the next token if the closure maps it a to `Some`-variant. pub fn eat_map(&mut self, f: F) -> Option where - F: FnOnce(Token<'s>) -> Option, + F: FnOnce(NodeKind) -> Option, { let token = self.peek()?; let mapped = f(token); @@ -128,16 +283,16 @@ impl<'s> Parser<'s> { /// Consume the next token if it is the given one and produce an error if /// not. - pub fn eat_expect(&mut self, t: Token) -> bool { - let eaten = self.eat_if(t); + pub fn eat_expect(&mut self, t: NodeKind) -> bool { + let eaten = self.eat_if(t.clone()); if !eaten { - self.expected_at(self.prev_end(), t.name()); + self.expected_at(&t.to_string()); } eaten } /// Consume the next token, debug-asserting that it is one of the given ones. - pub fn eat_assert(&mut self, t: Token) { + pub fn eat_assert(&mut self, t: NodeKind) { let next = self.eat(); debug_assert_eq!(next, Some(t)); } @@ -145,7 +300,7 @@ impl<'s> Parser<'s> { /// Consume tokens while the condition is true. pub fn eat_while(&mut self, mut f: F) where - F: FnMut(Token<'s>) -> bool, + F: FnMut(NodeKind) -> bool, { while self.peek().map_or(false, |t| f(t)) { self.eat(); @@ -153,42 +308,25 @@ impl<'s> Parser<'s> { } /// Peek at the next token without consuming it. - pub fn peek(&self) -> Option> { - self.peeked + pub fn peek(&self) -> Option { + self.peeked.clone() } /// Peek at the next token if it follows immediately after the last one /// without any whitespace in between. - pub fn peek_direct(&self) -> Option> { + pub fn peek_direct(&self) -> Option<&NodeKind> { if self.next_start() == self.prev_end() { - self.peeked + self.peeked.as_ref() } else { None } } - /// Peek at the span of the next token. - /// - /// Has length zero if `peek()` returns `None`. - pub fn peek_span(&self) -> Span { - Span::new(self.id(), self.next_start(), self.next_end()) - } - /// Peek at the source of the next token. pub fn peek_src(&self) -> &'s str { self.get(self.next_start() .. self.next_end()) } - /// Checks whether the next token fulfills a condition. - /// - /// Returns `false` if there is no next token. - pub fn check(&self, f: F) -> bool - where - F: FnOnce(Token<'s>) -> bool, - { - self.peek().map_or(false, f) - } - /// The byte index at which the last token ended. /// /// Refers to the end of the last _non-whitespace_ token in code mode. @@ -219,11 +357,6 @@ impl<'s> Parser<'s> { self.source.get(range).unwrap() } - /// The span from `start` to [`self.prev_end()`](Self::prev_end). - pub fn span_from(&self, start: impl Into) -> Span { - Span::new(self.id(), start, self.prev_end()) - } - /// Continue parsing in a group. /// /// When the end delimiter of the group is reached, all subsequent calls to @@ -232,19 +365,15 @@ impl<'s> Parser<'s> { /// /// This panics if the next token does not start the given group. pub fn start_group(&mut self, kind: Group, mode: TokenMode) { - self.groups.push(GroupEntry { - kind, - start: self.next_start(), - prev_mode: self.tokens.mode(), - }); + self.groups.push(GroupEntry { kind, prev_mode: self.tokens.mode() }); self.tokens.set_mode(mode); self.repeek(); match kind { - Group::Paren => self.eat_assert(Token::LeftParen), - Group::Bracket => self.eat_assert(Token::LeftBracket), - Group::Brace => self.eat_assert(Token::LeftBrace), + Group::Paren => self.eat_assert(NodeKind::LeftParen), + Group::Bracket => self.eat_assert(NodeKind::LeftBracket), + Group::Brace => self.eat_assert(NodeKind::LeftBrace), Group::Stmt => {} Group::Expr => {} Group::Imports => {} @@ -254,7 +383,7 @@ impl<'s> Parser<'s> { /// End the parsing of a group. /// /// This panics if no group was started. - pub fn end_group(&mut self) -> Span { + pub fn end_group(&mut self) { let prev_mode = self.tokens.mode(); let group = self.groups.pop().expect("no started group"); self.tokens.set_mode(group.prev_mode); @@ -264,83 +393,125 @@ impl<'s> Parser<'s> { // Eat the end delimiter if there is one. if let Some((end, required)) = match group.kind { - Group::Paren => Some((Token::RightParen, true)), - Group::Bracket => Some((Token::RightBracket, true)), - Group::Brace => Some((Token::RightBrace, true)), - Group::Stmt => Some((Token::Semicolon, false)), + Group::Paren => Some((NodeKind::RightParen, true)), + Group::Bracket => Some((NodeKind::RightBracket, true)), + Group::Brace => Some((NodeKind::RightBrace, true)), + Group::Stmt => Some((NodeKind::Semicolon, false)), Group::Expr => None, Group::Imports => None, } { - if self.next == Some(end) { + if self.next == Some(end.clone()) { // Bump the delimeter and return. No need to rescan in this case. self.bump(); rescan = false; } else if required { - self.error( - self.next_start() .. self.next_start(), - format!("expected {}", end.name()), - ); + self.start(); + self.abort(format!("expected {}", end.to_string())); } } // Rescan the peeked token if the mode changed. if rescan { self.tokens.jump(self.prev_end()); - self.bump(); + + if prev_mode == TokenMode::Code { + let len = self.children.len(); + for n in (0 .. len).rev() { + if !self.skip_type(self.children[n].kind()) { + break; + } + + self.children.pop(); + } + } + + self.fast_forward(); } - - Span::new(self.id(), group.start, self.prev_end()) - } - - /// Add an error with location and message. - pub fn error(&mut self, span: impl IntoSpan, message: impl Into) { - self.errors.push(Error::new(span.into_span(self.id()), message)); } /// Add an error that `what` was expected at the given span. - pub fn expected_at(&mut self, span: impl IntoSpan, what: &str) { - self.error(span, format!("expected {}", what)); + pub fn expected_at(&mut self, what: &str) { + let mut found = self.children.len(); + for (i, node) in self.children.iter().enumerate().rev() { + if !self.skip_type(node.kind()) { + break; + } + found = i; + } + + self.expected_at_child(found, what); + } + + /// Add an error that `what` was expected at the given child index. + pub fn expected_at_child(&mut self, index: usize, what: &str) { + self.children.insert( + index, + GreenData::new( + NodeKind::Error(ErrorPosition::Full, format!("expected {}", what).into()), + 0, + ) + .into(), + ); } /// Eat the next token and add an error that it is not the expected `thing`. pub fn expected(&mut self, what: &str) { - let before = self.next_start(); + self.start(); if let Some(found) = self.eat() { - let after = self.prev_end(); - self.error( - before .. after, - format!("expected {}, found {}", what, found.name()), - ); + self.abort(format!("expected {}, found {}", what, found.to_string())) } else { - self.expected_at(self.next_start(), what); + self.lift(); + self.expected_at(what); } } /// Eat the next token and add an error that it is unexpected. pub fn unexpected(&mut self) { - let before = self.next_start(); + self.start(); if let Some(found) = self.eat() { - let after = self.prev_end(); - self.error(before .. after, format!("unexpected {}", found.name())); + self.abort(format!("unexpected {}", found.to_string())) + } else { + self.abort("unexpected end of file") } } + pub fn skip_type_ext(token: &NodeKind, stop_at_newline: bool) -> bool { + match token { + NodeKind::Space(n) => n < &1 || !stop_at_newline, + NodeKind::LineComment => true, + NodeKind::BlockComment => true, + _ => false, + } + } + + fn skip_type(&self, token: &NodeKind) -> bool { + Self::skip_type_ext(token, self.stop_at_newline()) + } + /// Move to the next token. fn bump(&mut self) { - self.prev_end = self.tokens.index().into(); + self.children.push( + GreenData::new( + self.next.clone().unwrap(), + self.tokens.index() - self.next_start, + ) + .into(), + ); + + self.fast_forward(); + } + + pub fn fast_forward(&mut self) { + if !self.next.as_ref().map_or(false, |x| self.skip_type(x)) { + self.prev_end = self.tokens.index().into(); + } self.next_start = self.tokens.index().into(); self.next = self.tokens.next(); if self.tokens.mode() == TokenMode::Code { // Skip whitespace and comments. - while match self.next { - Some(Token::Space(n)) => n < 1 || !self.stop_at_newline(), - Some(Token::LineComment(_)) => true, - Some(Token::BlockComment(_)) => true, - _ => false, - } { - self.next_start = self.tokens.index().into(); - self.next = self.tokens.next(); + while self.next.as_ref().map_or(false, |x| self.skip_type(x)) { + self.bump(); } } @@ -349,19 +520,19 @@ impl<'s> Parser<'s> { /// Take another look at the next token to recheck whether it ends a group. fn repeek(&mut self) { - self.peeked = self.next; - let token = match self.next { + self.peeked = self.next.clone(); + let token = match self.next.as_ref() { Some(token) => token, None => return, }; if match token { - Token::RightParen => self.inside(Group::Paren), - Token::RightBracket => self.inside(Group::Bracket), - Token::RightBrace => self.inside(Group::Brace), - Token::Semicolon => self.inside(Group::Stmt), - Token::From => self.inside(Group::Imports), - Token::Space(n) => n >= 1 && self.stop_at_newline(), + NodeKind::RightParen => self.inside(Group::Paren), + NodeKind::RightBracket => self.inside(Group::Bracket), + NodeKind::RightBrace => self.inside(Group::Brace), + NodeKind::Semicolon => self.inside(Group::Stmt), + NodeKind::From => self.inside(Group::Imports), + NodeKind::Space(n) => n > &0 && self.stop_at_newline(), _ => false, } { self.peeked = None; @@ -380,4 +551,22 @@ impl<'s> Parser<'s> { fn inside(&self, kind: Group) -> bool { self.groups.iter().any(|g| g.kind == kind) } + + pub fn last_child(&self) -> Option<&Green> { + self.children.last() + } + + pub fn success(&mut self) -> bool { + let s = self.success; + self.success = true; + s + } + + pub fn unsuccessful(&mut self) { + self.success = false; + } + + pub fn child_count(&self) -> usize { + self.children.len() + } } diff --git a/src/parse/resolve.rs b/src/parse/resolve.rs index 1b3238472..c59c3bb17 100644 --- a/src/parse/resolve.rs +++ b/src/parse/resolve.rs @@ -1,5 +1,5 @@ use super::{is_newline, Scanner}; -use crate::syntax::{Ident, RawNode, Span}; +use crate::syntax::RawToken; use crate::util::EcoString; /// Resolve all escape sequences in a string. @@ -48,21 +48,28 @@ pub fn resolve_hex(sequence: &str) -> Option { } /// Resolve the language tag and trims the raw text. -pub fn resolve_raw(span: Span, column: usize, backticks: usize, text: &str) -> RawNode { +pub fn resolve_raw( + column: usize, + backticks: u8, + text: &str, + terminated: bool, +) -> RawToken { if backticks > 1 { let (tag, inner) = split_at_lang_tag(text); let (text, block) = trim_and_split_raw(column, inner); - RawNode { - span, - lang: Ident::new(tag, span.with_end(span.start + tag.len())), + RawToken { + lang: Some(tag.into()), text: text.into(), + backticks, + terminated, block, } } else { - RawNode { - span, + RawToken { lang: None, text: split_lines(text).join("\n").into(), + backticks, + terminated, block: false, } } @@ -140,7 +147,6 @@ fn split_lines(text: &str) -> Vec<&str> { #[cfg(test)] #[rustfmt::skip] mod tests { - use crate::syntax::Span; use super::*; #[test] @@ -175,8 +181,8 @@ mod tests { test("typst\n it!", "typst", "\n it!"); test("typst\n it!", "typst", "\n it!"); test("abc`", "abc", "`"); - test(" hi", "", " hi"); - test("`", "", "`"); + test(" hi", "", " hi"); + test("`", "", "`"); } #[test] @@ -184,13 +190,13 @@ mod tests { #[track_caller] fn test( column: usize, - backticks: usize, + backticks: u8, raw: &str, lang: Option<&str>, text: &str, block: bool, ) { - let node = resolve_raw(Span::detached(), column, backticks, raw); + let node = resolve_raw(column, backticks, raw, true); assert_eq!(node.lang.as_deref(), lang); assert_eq!(node.text, text); assert_eq!(node.block, block); @@ -204,15 +210,15 @@ mod tests { // More than one backtick with lang tag. test(0, 2, "js alert()", Some("js"), "alert()", false); test(0, 3, "py quit(\n\n)", Some("py"), "quit(\n\n)", true); - test(0, 2, "♥", None, "", false); + test(0, 2, "♥", Some("♥"), "", false); // Trimming of whitespace (tested more thoroughly in separate test). - test(0, 2, " a", None, "a", false); - test(0, 2, " a", None, " a", false); - test(0, 2, " \na", None, "a", true); + test(0, 2, " a", Some(""), "a", false); + test(0, 2, " a", Some(""), " a", false); + test(0, 2, " \na", Some(""), "a", true); // Dedenting - test(2, 3, " def foo():\n bar()", None, "def foo():\n bar()", true); + test(2, 3, " def foo():\n bar()", Some(""), "def foo():\n bar()", true); } #[test] diff --git a/src/parse/tokens.rs b/src/parse/tokens.rs index 5f9694528..19d0d77bb 100644 --- a/src/parse/tokens.rs +++ b/src/parse/tokens.rs @@ -1,9 +1,13 @@ -use super::{is_newline, Scanner}; +use super::{is_newline, resolve_raw, Scanner}; use crate::geom::{AngularUnit, LengthUnit}; +use crate::parse::resolve::{resolve_hex, resolve_string}; +use crate::source::SourceFile; use crate::syntax::*; +use crate::util::EcoString; /// An iterator over the tokens of a string of source code. pub struct Tokens<'s> { + source: &'s SourceFile, s: Scanner<'s>, mode: TokenMode, } @@ -20,8 +24,12 @@ pub enum TokenMode { impl<'s> Tokens<'s> { /// Create a new token iterator with the given mode. #[inline] - pub fn new(src: &'s str, mode: TokenMode) -> Self { - Self { s: Scanner::new(src), mode } + pub fn new(source: &'s SourceFile, mode: TokenMode) -> Self { + Self { + s: Scanner::new(source.src()), + source, + mode, + } } /// Get the current token mode. @@ -59,7 +67,7 @@ impl<'s> Tokens<'s> { } impl<'s> Iterator for Tokens<'s> { - type Item = Token<'s>; + type Item = NodeKind; /// Parse the next token in the source code. #[inline] @@ -68,19 +76,21 @@ impl<'s> Iterator for Tokens<'s> { let c = self.s.eat()?; Some(match c { // Blocks and templates. - '[' => Token::LeftBracket, - ']' => Token::RightBracket, - '{' => Token::LeftBrace, - '}' => Token::RightBrace, + '[' => NodeKind::LeftBracket, + ']' => NodeKind::RightBracket, + '{' => NodeKind::LeftBrace, + '}' => NodeKind::RightBrace, // Whitespace. - ' ' if self.s.check_or(true, |c| !c.is_whitespace()) => Token::Space(0), + ' ' if self.s.check_or(true, |c| !c.is_whitespace()) => NodeKind::Space(0), c if c.is_whitespace() => self.whitespace(), // Comments with special case for URLs. '/' if self.s.eat_if('*') => self.block_comment(), '/' if !self.maybe_in_url() && self.s.eat_if('/') => self.line_comment(), - '*' if self.s.eat_if('/') => Token::Invalid(self.s.eaten_from(start)), + '*' if self.s.eat_if('/') => { + NodeKind::Error(ErrorPosition::Full, self.s.eaten_from(start).into()) + } // Other things. _ => match self.mode { @@ -93,7 +103,7 @@ impl<'s> Iterator for Tokens<'s> { impl<'s> Tokens<'s> { #[inline] - fn markup(&mut self, start: usize, c: char) -> Token<'s> { + fn markup(&mut self, start: usize, c: char) -> NodeKind { match c { // Escape sequences. '\\' => self.backslash(), @@ -102,13 +112,15 @@ impl<'s> Tokens<'s> { '#' => self.hash(), // Markup. - '~' => Token::Tilde, - '*' => Token::Star, - '_' => Token::Underscore, + '~' => NodeKind::NonBreakingSpace, + '*' => NodeKind::Strong, + '_' => NodeKind::Emph, '`' => self.raw(), '$' => self.math(), - '-' => self.hyph(start), - '=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => Token::Eq, + '-' => self.hyph(), + '=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => { + NodeKind::Eq + } c if c == '.' || c.is_ascii_digit() => self.numbering(start, c), // Plain text. @@ -116,35 +128,35 @@ impl<'s> Tokens<'s> { } } - fn code(&mut self, start: usize, c: char) -> Token<'s> { + fn code(&mut self, start: usize, c: char) -> NodeKind { match c { // Parens. - '(' => Token::LeftParen, - ')' => Token::RightParen, + '(' => NodeKind::LeftParen, + ')' => NodeKind::RightParen, // Length two. - '=' if self.s.eat_if('=') => Token::EqEq, - '!' if self.s.eat_if('=') => Token::ExclEq, - '<' if self.s.eat_if('=') => Token::LtEq, - '>' if self.s.eat_if('=') => Token::GtEq, - '+' if self.s.eat_if('=') => Token::PlusEq, - '-' if self.s.eat_if('=') => Token::HyphEq, - '*' if self.s.eat_if('=') => Token::StarEq, - '/' if self.s.eat_if('=') => Token::SlashEq, - '.' if self.s.eat_if('.') => Token::Dots, - '=' if self.s.eat_if('>') => Token::Arrow, + '=' if self.s.eat_if('=') => NodeKind::EqEq, + '!' if self.s.eat_if('=') => NodeKind::ExclEq, + '<' if self.s.eat_if('=') => NodeKind::LtEq, + '>' if self.s.eat_if('=') => NodeKind::GtEq, + '+' if self.s.eat_if('=') => NodeKind::PlusEq, + '-' if self.s.eat_if('=') => NodeKind::HyphEq, + '*' if self.s.eat_if('=') => NodeKind::StarEq, + '/' if self.s.eat_if('=') => NodeKind::SlashEq, + '.' if self.s.eat_if('.') => NodeKind::Dots, + '=' if self.s.eat_if('>') => NodeKind::Arrow, // Length one. - ',' => Token::Comma, - ';' => Token::Semicolon, - ':' => Token::Colon, - '+' => Token::Plus, - '-' => Token::Hyph, - '*' => Token::Star, - '/' => Token::Slash, - '=' => Token::Eq, - '<' => Token::Lt, - '>' => Token::Gt, + ',' => NodeKind::Comma, + ';' => NodeKind::Semicolon, + ':' => NodeKind::Colon, + '+' => NodeKind::Plus, + '-' => NodeKind::Minus, + '*' => NodeKind::Star, + '/' => NodeKind::Slash, + '=' => NodeKind::Eq, + '<' => NodeKind::Lt, + '>' => NodeKind::Gt, // Identifiers. c if is_id_start(c) => self.ident(start), @@ -159,12 +171,12 @@ impl<'s> Tokens<'s> { // Strings. '"' => self.string(), - _ => Token::Invalid(self.s.eaten_from(start)), + _ => NodeKind::Error(ErrorPosition::Full, self.s.eaten_from(start).into()), } } #[inline] - fn text(&mut self, start: usize) -> Token<'s> { + fn text(&mut self, start: usize) -> NodeKind { macro_rules! table { ($($c:literal)|*) => {{ let mut t = [false; 128]; @@ -186,10 +198,10 @@ impl<'s> Tokens<'s> { TABLE.get(c as usize).copied().unwrap_or_else(|| c.is_whitespace()) }); - Token::Text(self.s.eaten_from(start)) + NodeKind::Text(resolve_string(self.s.eaten_from(start))) } - fn whitespace(&mut self) -> Token<'s> { + fn whitespace(&mut self) -> NodeKind { self.s.uneat(); // Count the number of newlines. @@ -208,10 +220,10 @@ impl<'s> Tokens<'s> { } } - Token::Space(newlines) + NodeKind::Space(newlines) } - fn backslash(&mut self) -> Token<'s> { + fn backslash(&mut self) -> NodeKind { if let Some(c) = self.s.peek() { match c { // Backslash and comments. @@ -220,61 +232,61 @@ impl<'s> Tokens<'s> { '[' | ']' | '{' | '}' | '#' | // Markup. '*' | '_' | '=' | '~' | '`' | '$' => { - let start = self.s.index(); self.s.eat_assert(c); - Token::Text(&self.s.eaten_from(start)) + NodeKind::Text(c.into()) } 'u' if self.s.rest().starts_with("u{") => { self.s.eat_assert('u'); self.s.eat_assert('{'); - Token::UnicodeEscape(UnicodeEscapeToken { - // Allow more than `ascii_hexdigit` for better error recovery. - sequence: self.s.eat_while(|c| c.is_ascii_alphanumeric()), - terminated: self.s.eat_if('}'), + let sequence: EcoString = self.s.eat_while(|c| c.is_ascii_alphanumeric()).into(); + NodeKind::UnicodeEscape(UnicodeEscapeToken { + character: resolve_hex(&sequence), + sequence, + terminated: self.s.eat_if('}') }) } - c if c.is_whitespace() => Token::Backslash, - _ => Token::Text("\\"), + c if c.is_whitespace() => NodeKind::Linebreak, + _ => NodeKind::Text("\\".into()), } } else { - Token::Backslash + NodeKind::Linebreak } } #[inline] - fn hash(&mut self) -> Token<'s> { + fn hash(&mut self) -> NodeKind { if self.s.check_or(false, is_id_start) { let read = self.s.eat_while(is_id_continue); if let Some(keyword) = keyword(read) { keyword } else { - Token::Ident(read) + NodeKind::Ident(read.into()) } } else { - Token::Text("#") + NodeKind::Text("#".into()) } } - fn hyph(&mut self, start: usize) -> Token<'s> { + fn hyph(&mut self) -> NodeKind { if self.s.eat_if('-') { if self.s.eat_if('-') { - Token::HyphHyphHyph + NodeKind::EmDash } else { - Token::HyphHyph + NodeKind::EnDash } } else if self.s.check_or(true, char::is_whitespace) { - Token::Hyph + NodeKind::ListBullet } else { - Token::Text(self.s.eaten_from(start)) + NodeKind::Text("-".into()) } } - fn numbering(&mut self, start: usize, c: char) -> Token<'s> { + fn numbering(&mut self, start: usize, c: char) -> NodeKind { let number = if c != '.' { self.s.eat_while(|c| c.is_ascii_digit()); let read = self.s.eaten_from(start); if !self.s.eat_if('.') { - return Token::Text(read); + return NodeKind::Text(self.s.eaten_from(start).into()); } read.parse().ok() } else { @@ -282,21 +294,28 @@ impl<'s> Tokens<'s> { }; if self.s.check_or(true, char::is_whitespace) { - Token::Numbering(number) + NodeKind::EnumNumbering(number) } else { - Token::Text(self.s.eaten_from(start)) + NodeKind::Text(self.s.eaten_from(start).into()) } } - fn raw(&mut self) -> Token<'s> { + fn raw(&mut self) -> NodeKind { + let column = self.source.byte_to_column(self.s.index() - 1).unwrap(); let mut backticks = 1; - while self.s.eat_if('`') { + while self.s.eat_if('`') && backticks < u8::MAX { backticks += 1; } // Special case for empty inline block. if backticks == 2 { - return Token::Raw(RawToken { text: "", backticks: 1, terminated: true }); + return NodeKind::Raw(RawToken { + text: EcoString::new(), + lang: None, + backticks: 1, + terminated: true, + block: false, + }); } let start = self.s.index(); @@ -311,16 +330,17 @@ impl<'s> Tokens<'s> { } let terminated = found == backticks; - let end = self.s.index() - if terminated { found } else { 0 }; + let end = self.s.index() - if terminated { found as usize } else { 0 }; - Token::Raw(RawToken { - text: self.s.get(start .. end), + NodeKind::Raw(resolve_raw( + column, backticks, + self.s.get(start .. end).into(), terminated, - }) + )) } - fn math(&mut self) -> Token<'s> { + fn math(&mut self) -> NodeKind { let mut display = false; if self.s.eat_if('[') { display = true; @@ -350,25 +370,25 @@ impl<'s> Tokens<'s> { (true, true) => 2, }; - Token::Math(MathToken { - formula: self.s.get(start .. end), + NodeKind::Math(MathToken { + formula: self.s.get(start .. end).into(), display, terminated, }) } - fn ident(&mut self, start: usize) -> Token<'s> { + fn ident(&mut self, start: usize) -> NodeKind { self.s.eat_while(is_id_continue); match self.s.eaten_from(start) { - "none" => Token::None, - "auto" => Token::Auto, - "true" => Token::Bool(true), - "false" => Token::Bool(false), - id => keyword(id).unwrap_or(Token::Ident(id)), + "none" => NodeKind::None, + "auto" => NodeKind::Auto, + "true" => NodeKind::Bool(true), + "false" => NodeKind::Bool(false), + id => keyword(id).unwrap_or(NodeKind::Ident(id.into())), } } - fn number(&mut self, start: usize, c: char) -> Token<'s> { + fn number(&mut self, start: usize, c: char) -> NodeKind { // Read the first part (integer or fractional depending on `first`). self.s.eat_while(|c| c.is_ascii_digit()); @@ -380,7 +400,9 @@ impl<'s> Tokens<'s> { // Read the exponent. if self.s.eat_if('e') || self.s.eat_if('E') { - let _ = self.s.eat_if('+') || self.s.eat_if('-'); + if !self.s.eat_if('+') { + self.s.eat_if('-'); + } self.s.eat_while(|c| c.is_ascii_digit()); } @@ -396,55 +418,53 @@ impl<'s> Tokens<'s> { // Find out whether it is a simple number. if suffix.is_empty() { - if let Ok(int) = number.parse::() { - return Token::Int(int); - } else if let Ok(float) = number.parse::() { - return Token::Float(float); + if let Ok(i) = number.parse::() { + return NodeKind::Int(i); } } - // Otherwise parse into the fitting numeric type. - let build = match suffix { - "%" => Token::Percent, - "fr" => Token::Fraction, - "pt" => |x| Token::Length(x, LengthUnit::Pt), - "mm" => |x| Token::Length(x, LengthUnit::Mm), - "cm" => |x| Token::Length(x, LengthUnit::Cm), - "in" => |x| Token::Length(x, LengthUnit::In), - "rad" => |x| Token::Angle(x, AngularUnit::Rad), - "deg" => |x| Token::Angle(x, AngularUnit::Deg), - _ => return Token::Invalid(all), - }; - - if let Ok(float) = number.parse::() { - build(float) + if let Ok(f) = number.parse::() { + match suffix { + "" => NodeKind::Float(f), + "%" => NodeKind::Percentage(f), + "fr" => NodeKind::Fraction(f), + "pt" => NodeKind::Length(f, LengthUnit::Pt), + "mm" => NodeKind::Length(f, LengthUnit::Mm), + "cm" => NodeKind::Length(f, LengthUnit::Cm), + "in" => NodeKind::Length(f, LengthUnit::In), + "deg" => NodeKind::Angle(f, AngularUnit::Deg), + "rad" => NodeKind::Angle(f, AngularUnit::Rad), + _ => { + return NodeKind::Error(ErrorPosition::Full, all.into()); + } + } } else { - Token::Invalid(all) + NodeKind::Error(ErrorPosition::Full, all.into()) } } - fn string(&mut self) -> Token<'s> { + + fn string(&mut self) -> NodeKind { let mut escaped = false; - Token::Str(StrToken { - string: self.s.eat_until(|c| { + NodeKind::Str(StrToken { + string: resolve_string(self.s.eat_until(|c| { if c == '"' && !escaped { true } else { escaped = c == '\\' && !escaped; false } - }), + })), terminated: self.s.eat_if('"'), }) } - fn line_comment(&mut self) -> Token<'s> { - Token::LineComment(self.s.eat_until(is_newline)) + fn line_comment(&mut self) -> NodeKind { + self.s.eat_until(is_newline); + NodeKind::LineComment } - fn block_comment(&mut self) -> Token<'s> { - let start = self.s.index(); - + fn block_comment(&mut self) -> NodeKind { let mut state = '_'; let mut depth = 1; @@ -466,10 +486,7 @@ impl<'s> Tokens<'s> { } } - let terminated = depth == 0; - let end = self.s.index() - if terminated { 2 } else { 0 }; - - Token::BlockComment(self.s.get(start .. end)) + NodeKind::BlockComment } fn maybe_in_url(&self) -> bool { @@ -477,24 +494,24 @@ impl<'s> Tokens<'s> { } } -fn keyword(ident: &str) -> Option> { +fn keyword(ident: &str) -> Option { Some(match ident { - "not" => Token::Not, - "and" => Token::And, - "or" => Token::Or, - "with" => Token::With, - "let" => Token::Let, - "if" => Token::If, - "else" => Token::Else, - "for" => Token::For, - "in" => Token::In, - "while" => Token::While, - "break" => Token::Break, - "continue" => Token::Continue, - "return" => Token::Return, - "import" => Token::Import, - "include" => Token::Include, - "from" => Token::From, + "not" => NodeKind::Not, + "and" => NodeKind::And, + "or" => NodeKind::Or, + "with" => NodeKind::With, + "let" => NodeKind::Let, + "if" => NodeKind::If, + "else" => NodeKind::Else, + "for" => NodeKind::For, + "in" => NodeKind::In, + "while" => NodeKind::While, + "break" => NodeKind::Break, + "continue" => NodeKind::Continue, + "return" => NodeKind::Return, + "import" => NodeKind::Import, + "include" => NodeKind::Include, + "from" => NodeKind::From, _ => return None, }) } @@ -506,24 +523,56 @@ mod tests { use super::*; + use NodeKind::*; use Option::None; - use Token::{Ident, *}; use TokenMode::{Code, Markup}; - const fn UnicodeEscape(sequence: &str, terminated: bool) -> Token { - Token::UnicodeEscape(UnicodeEscapeToken { sequence, terminated }) + fn UnicodeEscape(sequence: &str, terminated: bool) -> NodeKind { + NodeKind::UnicodeEscape(UnicodeEscapeToken { + character: resolve_hex(sequence), + sequence: sequence.into(), + terminated, + }) } - const fn Raw(text: &str, backticks: usize, terminated: bool) -> Token { - Token::Raw(RawToken { text, backticks, terminated }) + fn Raw( + text: &str, + lang: Option<&str>, + backticks: u8, + terminated: bool, + block: bool, + ) -> NodeKind { + NodeKind::Raw(RawToken { + text: text.into(), + lang: lang.map(Into::into), + backticks, + terminated, + block, + }) } - const fn Math(formula: &str, display: bool, terminated: bool) -> Token { - Token::Math(MathToken { formula, display, terminated }) + fn Math(formula: &str, display: bool, terminated: bool) -> NodeKind { + NodeKind::Math(MathToken { + formula: formula.into(), + display, + terminated, + }) } - const fn Str(string: &str, terminated: bool) -> Token { - Token::Str(StrToken { string, terminated }) + fn Str(string: &str, terminated: bool) -> NodeKind { + NodeKind::Str(StrToken { string: string.into(), terminated }) + } + + fn Text(string: &str) -> NodeKind { + NodeKind::Text(string.into()) + } + + fn Ident(ident: &str) -> NodeKind { + NodeKind::Ident(ident.into()) + } + + fn Invalid(invalid: &str) -> NodeKind { + NodeKind::Error(ErrorPosition::Full, invalid.into()) } /// Building blocks for suffix testing. @@ -541,40 +590,6 @@ mod tests { /// - '/': symbols const BLOCKS: &str = " a1/"; - /// Suffixes described by four-tuples of: - /// - /// - block the suffix is part of - /// - mode in which the suffix is applicable - /// - the suffix string - /// - the resulting suffix token - const SUFFIXES: &[(char, Option, &str, Token)] = &[ - // Whitespace suffixes. - (' ', None, " ", Space(0)), - (' ', None, "\n", Space(1)), - (' ', None, "\r", Space(1)), - (' ', None, "\r\n", Space(1)), - // Letter suffixes. - ('a', Some(Markup), "hello", Text("hello")), - ('a', Some(Markup), "💚", Text("💚")), - ('a', Some(Code), "val", Ident("val")), - ('a', Some(Code), "α", Ident("α")), - ('a', Some(Code), "_", Ident("_")), - // Number suffixes. - ('1', Some(Code), "2", Int(2)), - ('1', Some(Code), ".2", Float(0.2)), - // Symbol suffixes. - ('/', None, "[", LeftBracket), - ('/', None, "//", LineComment("")), - ('/', None, "/**/", BlockComment("")), - ('/', Some(Markup), "*", Star), - ('/', Some(Markup), "$ $", Math(" ", false, true)), - ('/', Some(Markup), r"\\", Text(r"\")), - ('/', Some(Markup), "#let", Let), - ('/', Some(Code), "(", LeftParen), - ('/', Some(Code), ":", Colon), - ('/', Some(Code), "+=", PlusEq), - ]; - macro_rules! t { (Both $($tts:tt)*) => { t!(Markup $($tts)*); @@ -584,22 +599,56 @@ mod tests { // Test without suffix. t!(@$mode: $src => $($token),*); + // Suffixes described by four-tuples of: + // + // - block the suffix is part of + // - mode in which the suffix is applicable + // - the suffix string + // - the resulting suffix NodeKind + let suffixes: &[(char, Option, &str, NodeKind)] = &[ + // Whitespace suffixes. + (' ', None, " ", Space(0)), + (' ', None, "\n", Space(1)), + (' ', None, "\r", Space(1)), + (' ', None, "\r\n", Space(1)), + // Letter suffixes. + ('a', Some(Markup), "hello", Text("hello")), + ('a', Some(Markup), "💚", Text("💚")), + ('a', Some(Code), "val", Ident("val")), + ('a', Some(Code), "α", Ident("α")), + ('a', Some(Code), "_", Ident("_")), + // Number suffixes. + ('1', Some(Code), "2", Int(2)), + ('1', Some(Code), ".2", Float(0.2)), + // Symbol suffixes. + ('/', None, "[", LeftBracket), + ('/', None, "//", LineComment), + ('/', None, "/**/", BlockComment), + ('/', Some(Markup), "*", Strong), + ('/', Some(Markup), "$ $", Math(" ", false, true)), + ('/', Some(Markup), r"\\", Text("\\")), + ('/', Some(Markup), "#let", Let), + ('/', Some(Code), "(", LeftParen), + ('/', Some(Code), ":", Colon), + ('/', Some(Code), "+=", PlusEq), + ]; + // Test with each applicable suffix. - for &(block, mode, suffix, token) in SUFFIXES { + for (block, mode, suffix, token) in suffixes { let src = $src; #[allow(unused_variables)] let blocks = BLOCKS; $(let blocks = $blocks;)? assert!(!blocks.contains(|c| !BLOCKS.contains(c))); - if (mode.is_none() || mode == Some($mode)) && blocks.contains(block) { + if (mode.is_none() || mode == &Some($mode)) && blocks.contains(*block) { t!(@$mode: format!("{}{}", src, suffix) => $($token,)* token); } } }}; (@$mode:ident: $src:expr => $($token:expr),*) => {{ let src = $src; - let found = Tokens::new(&src, $mode).collect::>(); - let expected = vec![$($token),*]; + let found = Tokens::new(&SourceFile::detached(src.clone()), $mode).collect::>(); + let expected = vec![$($token.clone()),*]; check(&src, found, expected); }}; } @@ -671,7 +720,7 @@ mod tests { // Test text ends. t!(Markup[""]: "hello " => Text("hello"), Space(0)); - t!(Markup[""]: "hello~" => Text("hello"), Tilde); + t!(Markup[""]: "hello~" => Text("hello"), NonBreakingSpace); } #[test] @@ -713,16 +762,16 @@ mod tests { #[test] fn test_tokenize_markup_symbols() { // Test markup tokens. - t!(Markup[" a1"]: "*" => Star); - t!(Markup: "_" => Underscore); + t!(Markup[" a1"]: "*" => Strong); + t!(Markup: "_" => Emph); t!(Markup[""]: "===" => Eq, Eq, Eq); t!(Markup["a1/"]: "= " => Eq, Space(0)); - t!(Markup: "~" => Tilde); - t!(Markup[" "]: r"\" => Backslash); - t!(Markup["a "]: r"a--" => Text("a"), HyphHyph); - t!(Markup["a1/"]: "- " => Hyph, Space(0)); - t!(Markup[" "]: "." => Numbering(None)); - t!(Markup[" "]: "1." => Numbering(Some(1))); + t!(Markup: "~" => NonBreakingSpace); + t!(Markup[" "]: r"\" => Linebreak); + t!(Markup["a "]: r"a--" => Text("a"), EnDash); + t!(Markup["a1/"]: "- " => ListBullet, Space(0)); + t!(Markup[" "]: "." => EnumNumbering(None)); + t!(Markup[" "]: "1." => EnumNumbering(Some(1))); t!(Markup[" "]: "1.a" => Text("1."), Text("a")); t!(Markup[" /"]: "a1." => Text("a1.")); } @@ -734,7 +783,7 @@ mod tests { t!(Code: ";" => Semicolon); t!(Code: ":" => Colon); t!(Code: "+" => Plus); - t!(Code: "-" => Hyph); + t!(Code: "-" => Minus); t!(Code[" a1"]: "*" => Star); t!(Code[" a1"]: "/" => Slash); t!(Code: "=" => Eq); @@ -756,10 +805,10 @@ mod tests { t!(Code[" a/"]: "..." => Dots, Invalid(".")); // Test hyphen as symbol vs part of identifier. - t!(Code[" /"]: "-1" => Hyph, Int(1)); - t!(Code[" /"]: "-a" => Hyph, Ident("a")); - t!(Code[" /"]: "--1" => Hyph, Hyph, Int(1)); - t!(Code[" /"]: "--_a" => Hyph, Hyph, Ident("_a")); + t!(Code[" /"]: "-1" => Minus, Int(1)); + t!(Code[" /"]: "-a" => Minus, Ident("a")); + t!(Code[" /"]: "--1" => Minus, Minus, Int(1)); + t!(Code[" /"]: "--_a" => Minus, Minus, Ident("_a")); t!(Code[" /"]: "a-b" => Ident("a-b")); } @@ -776,13 +825,13 @@ mod tests { ("import", Import), ]; - for &(s, t) in &list { + for (s, t) in list.clone() { t!(Markup[" "]: format!("#{}", s) => t); t!(Markup[" "]: format!("#{0}#{0}", s) => t, t); - t!(Markup[" /"]: format!("# {}", s) => Token::Text("#"), Space(0), Text(s)); + t!(Markup[" /"]: format!("# {}", s) => Text("#"), Space(0), Text(s)); } - for &(s, t) in &list { + for (s, t) in list { t!(Code[" "]: s => t); t!(Markup[" /"]: s => Text(s)); } @@ -796,25 +845,23 @@ mod tests { #[test] fn test_tokenize_raw_blocks() { - let empty = Raw("", 1, true); - // Test basic raw block. - t!(Markup: "``" => empty); - t!(Markup: "`raw`" => Raw("raw", 1, true)); - t!(Markup[""]: "`]" => Raw("]", 1, false)); + t!(Markup: "``" => Raw("", None, 1, true, false)); + t!(Markup: "`raw`" => Raw("raw", None, 1, true, false)); + t!(Markup[""]: "`]" => Raw("]", None, 1, false, false)); // Test special symbols in raw block. - t!(Markup: "`[brackets]`" => Raw("[brackets]", 1, true)); - t!(Markup[""]: r"`\`` " => Raw(r"\", 1, true), Raw(" ", 1, false)); + t!(Markup: "`[brackets]`" => Raw("[brackets]", None, 1, true, false)); + t!(Markup[""]: r"`\`` " => Raw(r"\", None, 1, true, false), Raw(" ", None, 1, false, false)); // Test separated closing backticks. - t!(Markup: "```not `y`e`t```" => Raw("not `y`e`t", 3, true)); + t!(Markup: "```not `y`e`t```" => Raw("`y`e`t", Some("not"), 3, true, false)); // Test more backticks. - t!(Markup: "``nope``" => empty, Text("nope"), empty); - t!(Markup: "````🚀````" => Raw("🚀", 4, true)); - t!(Markup[""]: "`````👩‍🚀````noend" => Raw("👩‍🚀````noend", 5, false)); - t!(Markup[""]: "````raw``````" => Raw("raw", 4, true), empty); + t!(Markup: "``nope``" => Raw("", None, 1, true, false), Text("nope"), Raw("", None, 1, true, false)); + t!(Markup: "````🚀````" => Raw("", Some("🚀"), 4, true, false)); + t!(Markup[""]: "`````👩‍🚀````noend" => Raw("````noend", Some("👩‍🚀"), 5, false, false)); + t!(Markup[""]: "````raw``````" => Raw("", Some("raw"), 4, true, false), Raw("", None, 1, true, false)); } #[test] @@ -896,8 +943,8 @@ mod tests { let nums = ints.iter().map(|&(k, v)| (k, v as f64)).chain(floats); let suffixes = [ - ("%", Percent as fn(f64) -> Token<'static>), - ("fr", Fraction as fn(f64) -> Token<'static>), + ("%", Percentage as fn(f64) -> NodeKind), + ("fr", Fraction as fn(f64) -> NodeKind), ("mm", |x| Length(x, LengthUnit::Mm)), ("pt", |x| Length(x, LengthUnit::Pt)), ("cm", |x| Length(x, LengthUnit::Cm)), @@ -930,54 +977,54 @@ mod tests { t!(Code[""]: "\"hi" => Str("hi", false)); // Test escaped quote. - t!(Code: r#""a\"bc""# => Str(r#"a\"bc"#, true)); - t!(Code[""]: r#""\""# => Str(r#"\""#, false)); + t!(Code: r#""a\"bc""# => Str("a\"bc", true)); + t!(Code[""]: r#""\""# => Str("\"", false)); } #[test] fn test_tokenize_line_comments() { // Test line comment with no trailing newline. - t!(Both[""]: "//" => LineComment("")); + t!(Both[""]: "//" => LineComment); // Test line comment ends at newline. - t!(Both["a1/"]: "//bc\n" => LineComment("bc"), Space(1)); - t!(Both["a1/"]: "// bc \n" => LineComment(" bc "), Space(1)); - t!(Both["a1/"]: "//bc\r\n" => LineComment("bc"), Space(1)); + t!(Both["a1/"]: "//bc\n" => LineComment, Space(1)); + t!(Both["a1/"]: "// bc \n" => LineComment, Space(1)); + t!(Both["a1/"]: "//bc\r\n" => LineComment, Space(1)); // Test nested line comments. - t!(Both["a1/"]: "//a//b\n" => LineComment("a//b"), Space(1)); + t!(Both["a1/"]: "//a//b\n" => LineComment, Space(1)); } #[test] fn test_tokenize_block_comments() { // Test basic block comments. - t!(Both[""]: "/*" => BlockComment("")); - t!(Both: "/**/" => BlockComment("")); - t!(Both: "/*🏞*/" => BlockComment("🏞")); - t!(Both: "/*\n*/" => BlockComment("\n")); + t!(Both[""]: "/*" => BlockComment); + t!(Both: "/**/" => BlockComment); + t!(Both: "/*🏞*/" => BlockComment); + t!(Both: "/*\n*/" => BlockComment); // Test depth 1 and 2 nested block comments. - t!(Both: "/* /* */ */" => BlockComment(" /* */ ")); - t!(Both: "/*/*/**/*/*/" => BlockComment("/*/**/*/")); + t!(Both: "/* /* */ */" => BlockComment); + t!(Both: "/*/*/**/*/*/" => BlockComment); // Test two nested, one unclosed block comments. - t!(Both[""]: "/*/*/**/*/" => BlockComment("/*/**/*/")); + t!(Both[""]: "/*/*/**/*/" => BlockComment); // Test all combinations of up to two following slashes and stars. - t!(Both[""]: "/*" => BlockComment("")); - t!(Both[""]: "/*/" => BlockComment("/")); - t!(Both[""]: "/**" => BlockComment("*")); - t!(Both[""]: "/*//" => BlockComment("//")); - t!(Both[""]: "/*/*" => BlockComment("/*")); - t!(Both[""]: "/**/" => BlockComment("")); - t!(Both[""]: "/***" => BlockComment("**")); + t!(Both[""]: "/*" => BlockComment); + t!(Both[""]: "/*/" => BlockComment); + t!(Both[""]: "/**" => BlockComment); + t!(Both[""]: "/*//" => BlockComment); + t!(Both[""]: "/*/*" => BlockComment); + t!(Both[""]: "/**/" => BlockComment); + t!(Both[""]: "/***" => BlockComment); } #[test] fn test_tokenize_invalid() { // Test invalidly closed block comments. - t!(Both: "*/" => Token::Invalid("*/")); - t!(Both: "/**/*/" => BlockComment(""), Token::Invalid("*/")); + t!(Both: "*/" => Invalid("*/")); + t!(Both: "/**/*/" => BlockComment, Invalid("*/")); // Test invalid expressions. t!(Code: r"\" => Invalid(r"\")); @@ -990,6 +1037,6 @@ mod tests { // Test invalid number suffixes. t!(Code[" /"]: "1foo" => Invalid("1foo")); t!(Code: "1p%" => Invalid("1p"), Invalid("%")); - t!(Code: "1%%" => Percent(1.0), Invalid("%")); + t!(Code: "1%%" => Percentage(1.0), Invalid("%")); } } diff --git a/src/source.rs b/src/source.rs index c9164f90b..e33e146c0 100644 --- a/src/source.rs +++ b/src/source.rs @@ -8,8 +8,10 @@ use std::rc::Rc; use serde::{Deserialize, Serialize}; +use crate::diag::{Error, TypResult}; use crate::loading::{FileHash, Loader}; -use crate::parse::{is_newline, Scanner}; +use crate::parse::{is_newline, parse, Scanner}; +use crate::syntax::{GreenNode, Markup, NodeKind, RedNode}; use crate::util::PathExt; #[cfg(feature = "codespan-reporting")] @@ -124,6 +126,7 @@ pub struct SourceFile { path: PathBuf, src: String, line_starts: Vec, + root: Rc, } impl SourceFile { @@ -131,11 +134,28 @@ impl SourceFile { pub fn new(id: SourceId, path: &Path, src: String) -> Self { let mut line_starts = vec![0]; line_starts.extend(newlines(&src)); - Self { + let mut init = Self { id, path: path.normalize(), src, line_starts, + root: Rc::new(GreenNode::new(NodeKind::Markup, 0)), + }; + + let root = parse(&init); + init.root = root; + init + } + + pub fn ast(&self) -> TypResult { + let res = RedNode::new_root(self.root.clone(), self.id); + let errors = res.errors(); + if errors.is_empty() { + Ok(res.ticket().cast().unwrap()) + } else { + Err(Box::new( + errors.into_iter().map(|(span, msg)| Error::new(span, msg)).collect(), + )) } } diff --git a/src/syntax/expr.rs b/src/syntax/expr.rs index 904515bab..d0d0c62fe 100644 --- a/src/syntax/expr.rs +++ b/src/syntax/expr.rs @@ -1,75 +1,50 @@ -use std::rc::Rc; - -use super::{Ident, Markup, Span, Token}; +use super::{Ident, Markup, NodeKind, RedNode, RedTicket, Span, TypedNode}; use crate::geom::{AngularUnit, LengthUnit}; +use crate::node; use crate::util::EcoString; /// An expression. #[derive(Debug, Clone, PartialEq)] pub enum Expr { /// An identifier: `left`. - Ident(Box), + Ident(Ident), /// A literal: `1`, `true`, ... - Lit(Box), + Lit(Lit), /// An array expression: `(1, "hi", 12cm)`. - Array(Box), + Array(ArrayExpr), /// A dictionary expression: `(thickness: 3pt, pattern: dashed)`. - Dict(Box), + Dict(DictExpr), /// A template expression: `[*Hi* there!]`. - Template(Box), + Template(TemplateExpr), /// A grouped expression: `(1 + 2)`. - Group(Box), + Group(GroupExpr), /// A block expression: `{ let x = 1; x + 2 }`. - Block(Box), + Block(BlockExpr), /// A unary operation: `-x`. - Unary(Box), + Unary(UnaryExpr), /// A binary operation: `a + b`. - Binary(Box), + Binary(BinaryExpr), /// An invocation of a function: `f(x, y)`. - Call(Box), + Call(CallExpr), /// A closure expression: `(x, y) => z`. - Closure(Box), + Closure(ClosureExpr), /// A with expression: `f with (x, y: 1)`. - With(Box), + With(WithExpr), /// A let expression: `let x = 1`. - Let(Box), + Let(LetExpr), /// An if-else expression: `if x { y } else { z }`. - If(Box), + If(IfExpr), /// A while loop expression: `while x { y }`. - While(Box), + While(WhileExpr), /// A for loop expression: `for x in y { z }`. - For(Box), + For(ForExpr), /// An import expression: `import a, b, c from "utils.typ"`. - Import(Box), + Import(ImportExpr), /// An include expression: `include "chapter1.typ"`. - Include(Box), + Include(IncludeExpr), } impl Expr { - /// The source code location. - pub fn span(&self) -> Span { - match self { - Self::Ident(v) => v.span, - Self::Lit(v) => v.span(), - Self::Array(v) => v.span, - Self::Dict(v) => v.span, - Self::Template(v) => v.span, - Self::Group(v) => v.span, - Self::Block(v) => v.span, - Self::Unary(v) => v.span, - Self::Binary(v) => v.span, - Self::Call(v) => v.span, - Self::Closure(v) => v.span, - Self::With(v) => v.span, - Self::Let(v) => v.span, - Self::If(v) => v.span, - Self::While(v) => v.span, - Self::For(v) => v.span, - Self::Import(v) => v.span, - Self::Include(v) => v.span, - } - } - /// Whether the expression can be shortened in markup with a hashtag. pub fn has_short_form(&self) -> bool { matches!(self, @@ -83,6 +58,63 @@ impl Expr { | Self::Include(_) ) } + + /// Return the expression's span. + pub fn span(&self) -> Span { + match self { + Self::Ident(ident) => ident.span, + Self::Lit(lit) => lit.span(), + Self::Array(array) => array.span(), + Self::Dict(dict) => dict.span(), + Self::Template(template) => template.span(), + Self::Group(group) => group.span(), + Self::Block(block) => block.span(), + Self::Unary(unary) => unary.span(), + Self::Binary(binary) => binary.span(), + Self::Call(call) => call.span(), + Self::Closure(closure) => closure.span(), + Self::With(with) => with.span(), + Self::Let(let_) => let_.span(), + Self::If(if_) => if_.span(), + Self::While(while_) => while_.span(), + Self::For(for_) => for_.span(), + Self::Import(import) => import.span(), + Self::Include(include) => include.span(), + } + } +} + +impl TypedNode for Expr { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::Ident(_) => Some(Self::Ident(Ident::cast_from(node).unwrap())), + NodeKind::Array => Some(Self::Array(ArrayExpr::cast_from(node).unwrap())), + NodeKind::Dict => Some(Self::Dict(DictExpr::cast_from(node).unwrap())), + NodeKind::Template => { + Some(Self::Template(TemplateExpr::cast_from(node).unwrap())) + } + NodeKind::Group => Some(Self::Group(GroupExpr::cast_from(node).unwrap())), + NodeKind::Block => Some(Self::Block(BlockExpr::cast_from(node).unwrap())), + NodeKind::Unary => Some(Self::Unary(UnaryExpr::cast_from(node).unwrap())), + NodeKind::Binary => Some(Self::Binary(BinaryExpr::cast_from(node).unwrap())), + NodeKind::Call => Some(Self::Call(CallExpr::cast_from(node).unwrap())), + NodeKind::Closure => { + Some(Self::Closure(ClosureExpr::cast_from(node).unwrap())) + } + NodeKind::WithExpr => Some(Self::With(WithExpr::cast_from(node).unwrap())), + NodeKind::LetExpr => Some(Self::Let(LetExpr::cast_from(node).unwrap())), + NodeKind::IfExpr => Some(Self::If(IfExpr::cast_from(node).unwrap())), + NodeKind::WhileExpr => Some(Self::While(WhileExpr::cast_from(node).unwrap())), + NodeKind::ForExpr => Some(Self::For(ForExpr::cast_from(node).unwrap())), + NodeKind::ImportExpr => { + Some(Self::Import(ImportExpr::cast_from(node).unwrap())) + } + NodeKind::IncludeExpr => { + Some(Self::Include(IncludeExpr::cast_from(node).unwrap())) + } + _ => Some(Self::Lit(Lit::cast_from(node)?)), + } + } } /// A literal: `1`, `true`, ... @@ -113,94 +145,145 @@ pub enum Lit { Str(Span, EcoString), } -impl Lit { - /// The source code location. - pub fn span(&self) -> Span { - match *self { - Self::None(span) => span, - Self::Auto(span) => span, - Self::Bool(span, _) => span, - Self::Int(span, _) => span, - Self::Float(span, _) => span, - Self::Length(span, _, _) => span, - Self::Angle(span, _, _) => span, - Self::Percent(span, _) => span, - Self::Fractional(span, _) => span, - Self::Str(span, _) => span, +impl TypedNode for Lit { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::None => Some(Self::None(node.own().span())), + NodeKind::Auto => Some(Self::Auto(node.own().span())), + NodeKind::Bool(b) => Some(Self::Bool(node.own().span(), *b)), + NodeKind::Int(i) => Some(Self::Int(node.own().span(), *i)), + NodeKind::Float(f) => Some(Self::Float(node.own().span(), *f)), + NodeKind::Length(f, unit) => Some(Self::Length(node.own().span(), *f, *unit)), + NodeKind::Angle(f, unit) => Some(Self::Angle(node.own().span(), *f, *unit)), + NodeKind::Percentage(f) => Some(Self::Percent(node.own().span(), *f)), + NodeKind::Fraction(f) => Some(Self::Fractional(node.own().span(), *f)), + NodeKind::Str(s) => Some(Self::Str(node.own().span(), s.string.clone())), + _ => None, } } } -/// An array expression: `(1, "hi", 12cm)`. -#[derive(Debug, Clone, PartialEq)] -pub struct ArrayExpr { - /// The source code location. - pub span: Span, - /// The entries of the array. - pub items: Vec, -} - -/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`. -#[derive(Debug, Clone, PartialEq)] -pub struct DictExpr { - /// The source code location. - pub span: Span, - /// The named dictionary entries. - pub items: Vec, -} - -/// A pair of a name and an expression: `pattern: dashed`. -#[derive(Debug, Clone, PartialEq)] -pub struct Named { - /// The name: `pattern`. - pub name: Ident, - /// The right-hand side of the pair: `dashed`. - pub expr: Expr, -} - -impl Named { - /// The source code location. +impl Lit { pub fn span(&self) -> Span { - self.name.span.join(self.expr.span()) + match self { + Self::None(span) => *span, + Self::Auto(span) => *span, + Self::Bool(span, _) => *span, + Self::Int(span, _) => *span, + Self::Float(span, _) => *span, + Self::Length(span, _, _) => *span, + Self::Angle(span, _, _) => *span, + Self::Percent(span, _) => *span, + Self::Fractional(span, _) => *span, + Self::Str(span, _) => *span, + } } } -/// A template expression: `[*Hi* there!]`. -#[derive(Debug, Clone, PartialEq)] -pub struct TemplateExpr { - /// The source code location. - pub span: Span, +node!( + /// An array expression: `(1, "hi", 12cm)`. + Array => ArrayExpr +); + +impl ArrayExpr { + /// The array items. + pub fn items(&self) -> Vec { + self.0.children().filter_map(RedTicket::cast).collect() + } +} + +node!( + /// A dictionary expression: `(thickness: 3pt, pattern: dashed)`. + Dict => DictExpr +); + +impl DictExpr { + /// The named dictionary items. + pub fn items(&self) -> Vec { + self.0.children().filter_map(RedTicket::cast).collect() + } +} + +node!( + /// A pair of a name and an expression: `pattern: dashed`. + Named +); + +impl Named { + /// The name: `pattern`. + pub fn name(&self) -> Ident { + self.0.cast_first_child().expect("named pair is missing name ident") + } + + /// The right-hand side of the pair: `dashed`. + pub fn expr(&self) -> Expr { + self.0 + .children() + .filter_map(RedTicket::cast) + .nth(1) + .expect("named pair is missing expression") + } +} + +node!( + /// A template expression: `[*Hi* there!]`. + Template => TemplateExpr +); + +impl TemplateExpr { /// The contents of the template. - pub body: Markup, + pub fn body(&self) -> Markup { + self.0 + .cast_first_child() + .expect("template expression is missing body") + } } -/// A grouped expression: `(1 + 2)`. -#[derive(Debug, Clone, PartialEq)] -pub struct GroupExpr { - /// The source code location. - pub span: Span, +node!( + /// A grouped expression: `(1 + 2)`. + Group => GroupExpr +); + +impl GroupExpr { /// The wrapped expression. - pub expr: Expr, + pub fn expr(&self) -> Expr { + self.0 + .cast_first_child() + .expect("group expression is missing expression") + } } -/// A block expression: `{ let x = 1; x + 2 }`. -#[derive(Debug, Clone, PartialEq)] -pub struct BlockExpr { - /// The source code location. - pub span: Span, +node!( + /// A block expression: `{ let x = 1; x + 2 }`. + Block => BlockExpr +); + +impl BlockExpr { /// The list of expressions contained in the block. - pub exprs: Vec, + pub fn exprs(&self) -> Vec { + self.0.children().filter_map(RedTicket::cast).collect() + } } -/// A unary operation: `-x`. -#[derive(Debug, Clone, PartialEq)] -pub struct UnaryExpr { - /// The source code location. - pub span: Span, +node!( + /// A unary operation: `-x`. + Unary => UnaryExpr +); + +impl UnaryExpr { /// The operator: `-`. - pub op: UnOp, + pub fn op(&self) -> UnOp { + self.0 + .cast_first_child() + .expect("unary expression is missing operator") + } + /// The expression to operator on: `x`. - pub expr: Expr, + pub fn expr(&self) -> Expr { + self.0 + .cast_first_child() + .expect("unary expression is missing expression") + } } /// A unary operator. @@ -214,13 +297,19 @@ pub enum UnOp { Not, } +impl TypedNode for UnOp { + fn cast_from(node: RedTicket) -> Option { + Self::from_token(node.kind()) + } +} + impl UnOp { /// Try to convert the token into a unary operation. - pub fn from_token(token: Token) -> Option { + pub fn from_token(token: &NodeKind) -> Option { Some(match token { - Token::Plus => Self::Pos, - Token::Hyph => Self::Neg, - Token::Not => Self::Not, + NodeKind::Plus => Self::Pos, + NodeKind::Minus => Self::Neg, + NodeKind::Not => Self::Not, _ => return None, }) } @@ -229,7 +318,7 @@ impl UnOp { pub fn precedence(self) -> usize { match self { Self::Pos | Self::Neg => 8, - Self::Not => 3, + Self::Not => 4, } } @@ -243,17 +332,34 @@ impl UnOp { } } -/// A binary operation: `a + b`. -#[derive(Debug, Clone, PartialEq)] -pub struct BinaryExpr { - /// The source code location. - pub span: Span, +node!( + /// A binary operation: `a + b`. + Binary => BinaryExpr +); + +impl BinaryExpr { + /// The binary operator: `+`. + pub fn op(&self) -> BinOp { + self.0 + .cast_first_child() + .expect("binary expression is missing operator") + } + /// The left-hand side of the operation: `a`. - pub lhs: Expr, - /// The operator: `+`. - pub op: BinOp, + pub fn lhs(&self) -> Expr { + self.0 + .cast_first_child() + .expect("binary expression is missing left-hand side") + } + /// The right-hand side of the operation: `b`. - pub rhs: Expr, + pub fn rhs(&self) -> Expr { + self.0 + .children() + .filter_map(RedTicket::cast) + .nth(1) + .expect("binary expression is missing right-hand side") + } } /// A binary operator. @@ -295,27 +401,33 @@ pub enum BinOp { DivAssign, } +impl TypedNode for BinOp { + fn cast_from(node: RedTicket) -> Option { + Self::from_token(node.kind()) + } +} + impl BinOp { /// Try to convert the token into a binary operation. - pub fn from_token(token: Token) -> Option { + pub fn from_token(token: &NodeKind) -> Option { Some(match token { - Token::Plus => Self::Add, - Token::Hyph => Self::Sub, - Token::Star => Self::Mul, - Token::Slash => Self::Div, - Token::And => Self::And, - Token::Or => Self::Or, - Token::EqEq => Self::Eq, - Token::ExclEq => Self::Neq, - Token::Lt => Self::Lt, - Token::LtEq => Self::Leq, - Token::Gt => Self::Gt, - Token::GtEq => Self::Geq, - Token::Eq => Self::Assign, - Token::PlusEq => Self::AddAssign, - Token::HyphEq => Self::SubAssign, - Token::StarEq => Self::MulAssign, - Token::SlashEq => Self::DivAssign, + NodeKind::Plus => Self::Add, + NodeKind::Minus => Self::Sub, + NodeKind::Star => Self::Mul, + NodeKind::Slash => Self::Div, + NodeKind::And => Self::And, + NodeKind::Or => Self::Or, + NodeKind::EqEq => Self::Eq, + NodeKind::ExclEq => Self::Neq, + NodeKind::Lt => Self::Lt, + NodeKind::LtEq => Self::Leq, + NodeKind::Gt => Self::Gt, + NodeKind::GtEq => Self::Geq, + NodeKind::Eq => Self::Assign, + NodeKind::PlusEq => Self::AddAssign, + NodeKind::HyphEq => Self::SubAssign, + NodeKind::StarEq => Self::MulAssign, + NodeKind::SlashEq => Self::DivAssign, _ => return None, }) } @@ -392,27 +504,35 @@ pub enum Associativity { Right, } -/// An invocation of a function: `foo(...)`. -#[derive(Debug, Clone, PartialEq)] -pub struct CallExpr { - /// The source code location. - pub span: Span, +node!( + /// An invocation of a function: `foo(...)`. + Call => CallExpr +); + +impl CallExpr { /// The function to call. - pub callee: Expr, + pub fn callee(&self) -> Expr { + self.0.cast_first_child().expect("call expression is missing callee") + } + /// The arguments to the function. - pub args: CallArgs, + pub fn args(&self) -> CallArgs { + self.0 + .cast_first_child() + .expect("call expression is missing argument list") + } } -/// The arguments to a function: `12, draw: false`. -/// -/// In case of a bracketed invocation with a body, the body is _not_ -/// included in the span for the sake of clearer error messages. -#[derive(Debug, Clone, PartialEq)] -pub struct CallArgs { - /// The source code location. - pub span: Span, +node!( + /// The arguments to a function: `12, draw: false`. + CallArgs +); + +impl CallArgs { /// The positional and named arguments. - pub items: Vec, + pub fn items(&self) -> Vec { + self.0.children().filter_map(RedTicket::cast).collect() + } } /// An argument to a function call. @@ -426,30 +546,75 @@ pub enum CallArg { Spread(Expr), } +impl TypedNode for CallArg { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::Named => Some(CallArg::Named( + node.cast().expect("named call argument is missing name"), + )), + NodeKind::ParameterSink => Some(CallArg::Spread( + node.own() + .cast_first_child() + .expect("call argument sink is missing expression"), + )), + _ => Some(CallArg::Pos(node.cast()?)), + } + } +} + impl CallArg { - /// The source code location. + /// The name of this argument. pub fn span(&self) -> Span { match self { - Self::Pos(expr) => expr.span(), Self::Named(named) => named.span(), + Self::Pos(expr) => expr.span(), Self::Spread(expr) => expr.span(), } } } -/// A closure expression: `(x, y) => z`. -#[derive(Debug, Clone, PartialEq)] -pub struct ClosureExpr { - /// The source code location. - pub span: Span, +node!( + /// A closure expression: `(x, y) => z`. + Closure => ClosureExpr +); + +impl ClosureExpr { /// The name of the closure. /// /// This only exists if you use the function syntax sugar: `let f(x) = y`. - pub name: Option, + pub fn name(&self) -> Option { + // `first_convert_child` does not work here because of the Option in the + // Result. + self.0.cast_first_child() + } + /// The parameter bindings. - pub params: Vec, + pub fn params(&self) -> Vec { + self.0 + .children() + .find(|x| x.kind() == &NodeKind::ClosureParams) + .expect("closure is missing parameter list") + .own() + .children() + .filter_map(RedTicket::cast) + .collect() + } + /// The body of the closure. - pub body: Rc, + pub fn body(&self) -> Expr { + // The filtering for the NodeKind is necessary here because otherwise, + // `first_convert_child` will use the Ident if present. + self.0.cast_last_child().expect("closure is missing body") + } + + /// The ticket of the body of the closure. + pub fn body_ticket(&self) -> RedTicket { + self.0 + .children() + .filter(|x| x.cast::().is_some()) + .last() + .unwrap() + } } /// An parameter to a closure. @@ -463,50 +628,111 @@ pub enum ClosureParam { Sink(Ident), } -impl ClosureParam { - /// The source code location. - pub fn span(&self) -> Span { - match self { - Self::Pos(ident) => ident.span, - Self::Named(named) => named.span(), - Self::Sink(ident) => ident.span, +impl TypedNode for ClosureParam { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::Ident(i) => { + Some(ClosureParam::Pos(Ident::new(i, node.own().span()).unwrap())) + } + NodeKind::Named => Some(ClosureParam::Named( + node.cast().expect("named closure parameter is missing name"), + )), + NodeKind::ParameterSink => Some(ClosureParam::Sink( + node.own() + .cast_first_child() + .expect("closure parameter sink is missing identifier"), + )), + _ => Some(ClosureParam::Pos(node.cast()?)), } } } -/// A with expression: `f with (x, y: 1)`. -/// -/// Applies arguments to a function. -#[derive(Debug, Clone, PartialEq)] -pub struct WithExpr { - /// The source code location. - pub span: Span, +node!( + /// A with expression: `f with (x, y: 1)`. + WithExpr +); + +impl WithExpr { /// The function to apply the arguments to. - pub callee: Expr, + pub fn callee(&self) -> Expr { + self.0 + .cast_first_child() + .expect("with expression is missing callee expression") + } + /// The arguments to apply to the function. - pub args: CallArgs, + pub fn args(&self) -> CallArgs { + self.0 + .cast_first_child() + .expect("with expression is missing argument list") + } } -/// A let expression: `let x = 1`. -#[derive(Debug, Clone, PartialEq)] -pub struct LetExpr { - /// The source code location. - pub span: Span, +node!( + /// A let expression: `let x = 1`. + LetExpr +); + +impl LetExpr { /// The binding to assign to. - pub binding: Ident, + pub fn binding(&self) -> Ident { + if let Some(c) = self.0.cast_first_child() { + c + } else if let Some(w) = self.0.typed_child(&NodeKind::WithExpr) { + // Can't do an `first_convert_child` here because the WithExpr's + // callee has to be an identifier. + w.cast_first_child() + .expect("with expression is missing an identifier callee") + } else if let Some(Expr::Closure(c)) = self.0.cast_last_child() { + c.name().expect("closure is missing an identifier name") + } else { + panic!("let expression is missing either an identifier or a with expression") + } + } + /// The expression the binding is initialized with. - pub init: Option, + pub fn init(&self) -> Option { + if self.0.cast_first_child::().is_some() { + self.0.children().filter_map(RedTicket::cast).nth(1) + } else { + Some( + self.0 + .cast_first_child() + .expect("let expression is missing a with expression"), + ) + } + } + + /// The ticket for the expression the binding is initialized with. + pub fn init_ticket(&self) -> RedTicket { + if self.0.cast_first_child::().is_some() { + self.0.children().filter(|x| x.cast::().is_some()).nth(1) + } else { + self.0.children().find(|x| x.cast::().is_some()) + } + .unwrap() + } } -/// An import expression: `import a, b, c from "utils.typ"`. -#[derive(Debug, Clone, PartialEq)] -pub struct ImportExpr { - /// The source code location. - pub span: Span, +node!( + /// An import expression: `import a, b, c from "utils.typ"`. + ImportExpr +); + +impl ImportExpr { /// The items to be imported. - pub imports: Imports, + pub fn imports(&self) -> Imports { + self.0 + .cast_first_child() + .expect("import expression is missing import list") + } + /// The location of the importable file. - pub path: Expr, + pub fn path(&self) -> Expr { + self.0 + .cast_first_child() + .expect("import expression is missing path expression") + } } /// The items that ought to be imported from a file. @@ -518,67 +744,137 @@ pub enum Imports { Idents(Vec), } -/// An include expression: `include "chapter1.typ"`. -#[derive(Debug, Clone, PartialEq)] -pub struct IncludeExpr { - /// The source code location. - pub span: Span, - /// The location of the file to be included. - pub path: Expr, -} - -/// An if-else expression: `if x { y } else { z }`. -#[derive(Debug, Clone, PartialEq)] -pub struct IfExpr { - /// The source code location. - pub span: Span, - /// The condition which selects the body to evaluate. - pub condition: Expr, - /// The expression to evaluate if the condition is true. - pub if_body: Expr, - /// The expression to evaluate if the condition is false. - pub else_body: Option, -} - -/// A while loop expression: `while x { y }`. -#[derive(Debug, Clone, PartialEq)] -pub struct WhileExpr { - /// The source code location. - pub span: Span, - /// The condition which selects whether to evaluate the body. - pub condition: Expr, - /// The expression to evaluate while the condition is true. - pub body: Expr, -} - -/// A for loop expression: `for x in y { z }`. -#[derive(Debug, Clone, PartialEq)] -pub struct ForExpr { - /// The source code location. - pub span: Span, - /// The pattern to assign to. - pub pattern: ForPattern, - /// The expression to iterate over. - pub iter: Expr, - /// The expression to evaluate for each iteration. - pub body: Expr, -} - -/// A pattern in a for loop. -#[derive(Debug, Clone, PartialEq)] -pub enum ForPattern { - /// A value pattern: `for v in array`. - Value(Ident), - /// A key-value pattern: `for k, v in dict`. - KeyValue(Ident, Ident), -} - -impl ForPattern { - /// The source code location. - pub fn span(&self) -> Span { - match self { - Self::Value(v) => v.span, - Self::KeyValue(k, v) => k.span.join(v.span), +impl TypedNode for Imports { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::Star => Some(Imports::Wildcard), + NodeKind::ImportItems => { + let idents = node.own().children().filter_map(RedTicket::cast).collect(); + Some(Imports::Idents(idents)) + } + _ => None, } } } + +node!( + /// An include expression: `include "chapter1.typ"`. + IncludeExpr +); + +impl IncludeExpr { + /// The location of the file to be included. + pub fn path(&self) -> Expr { + self.0 + .cast_first_child() + .expect("include expression is missing path expression") + } +} + +node!( + /// An if-else expression: `if x { y } else { z }`. + IfExpr +); + +impl IfExpr { + /// The condition which selects the body to evaluate. + pub fn condition(&self) -> Expr { + self.0 + .cast_first_child() + .expect("if expression is missing condition expression") + } + + /// The expression to evaluate if the condition is true. + pub fn if_body(&self) -> Expr { + self.0 + .children() + .filter_map(RedTicket::cast) + .nth(1) + .expect("if expression is missing if body") + } + + /// The expression to evaluate if the condition is false. + pub fn else_body(&self) -> Option { + self.0.children().filter_map(RedTicket::cast).nth(2) + } +} + +node!( + /// A while loop expression: `while x { y }`. + WhileExpr +); + +impl WhileExpr { + /// The condition which selects whether to evaluate the body. + pub fn condition(&self) -> Expr { + self.0 + .cast_first_child() + .expect("while loop expression is missing condition expression") + } + + /// The expression to evaluate while the condition is true. + pub fn body(&self) -> Expr { + self.0 + .children() + .filter_map(RedTicket::cast) + .nth(1) + .expect("while loop expression is missing body") + } +} + +node!( + /// A for loop expression: `for x in y { z }`. + ForExpr +); + +impl ForExpr { + /// The pattern to assign to. + pub fn pattern(&self) -> ForPattern { + self.0 + .cast_first_child() + .expect("for loop expression is missing pattern") + } + + /// The expression to iterate over. + pub fn iter(&self) -> Expr { + self.0 + .cast_first_child() + .expect("for loop expression is missing iterable expression") + } + + /// The expression to evaluate for each iteration. + pub fn body(&self) -> Expr { + self.0 + .children() + .filter_map(RedTicket::cast) + .last() + .expect("for loop expression is missing body") + } + + /// The ticket for the expression to evaluate for each iteration. + pub fn body_ticket(&self) -> RedTicket { + self.0 + .children() + .filter(|x| x.cast::().is_some()) + .last() + .unwrap() + } +} + +node!( + /// A for-in loop expression: `for x in y { z }`. + ForPattern +); + +impl ForPattern { + pub fn key(&self) -> Option { + let mut items: Vec<_> = self.0.children().filter_map(RedTicket::cast).collect(); + if items.len() > 1 { Some(items.remove(0)) } else { None } + } + + pub fn value(&self) -> Ident { + self.0 + .cast_last_child() + .expect("for-in loop pattern is missing value") + } +} diff --git a/src/syntax/ident.rs b/src/syntax/ident.rs index 398e2ff98..2c61329d1 100644 --- a/src/syntax/ident.rs +++ b/src/syntax/ident.rs @@ -3,7 +3,7 @@ use std::ops::Deref; use unicode_xid::UnicodeXID; -use super::Span; +use super::{NodeKind, RedTicket, Span, TypedNode}; use crate::util::EcoString; /// An unicode identifier with a few extra permissible characters. @@ -66,6 +66,16 @@ impl From<&Ident> for EcoString { } } +impl TypedNode for Ident { + fn cast_from(node: RedTicket) -> Option { + if let NodeKind::Ident(i) = node.kind() { + Some(Ident::new(i, node.own().span()).unwrap()) + } else { + None + } + } +} + /// Whether a string is a valid identifier. pub fn is_ident(string: &str) -> bool { let mut chars = string.chars(); diff --git a/src/syntax/markup.rs b/src/syntax/markup.rs index 09a371161..c12c0e819 100644 --- a/src/syntax/markup.rs +++ b/src/syntax/markup.rs @@ -1,41 +1,87 @@ -use super::{Expr, Ident, Span}; +use super::{Expr, Ident, NodeKind, RedNode, RedTicket, Span, TypedNode}; +use crate::node; use crate::util::EcoString; +use std::fmt::Write; /// The syntactical root capable of representing a full parsed document. pub type Markup = Vec; +impl TypedNode for Markup { + fn cast_from(node: RedTicket) -> Option { + if node.kind() != &NodeKind::Markup { + return None; + } + + let children = node.own().children().filter_map(TypedNode::cast_from).collect(); + Some(children) + } +} + /// A single piece of markup. #[derive(Debug, Clone, PartialEq)] pub enum MarkupNode { /// Whitespace containing less than two newlines. Space, /// A forced line break: `\`. - Linebreak(Span), + Linebreak, /// A paragraph break: Two or more newlines. - Parbreak(Span), + Parbreak, /// Strong text was enabled / disabled: `*`. - Strong(Span), + Strong, /// Emphasized text was enabled / disabled: `_`. - Emph(Span), + Emph, /// Plain text. Text(EcoString), /// A raw block with optional syntax highlighting: `` `...` ``. - Raw(Box), + Raw(RawNode), /// A section heading: `= Introduction`. - Heading(Box), + Heading(HeadingNode), /// An item in an unordered list: `- ...`. - List(Box), + List(ListNode), /// An item in an enumeration (ordered list): `1. ...`. - Enum(Box), + Enum(EnumNode), /// An expression. Expr(Expr), } +impl TypedNode for MarkupNode { + fn cast_from(node: RedTicket) -> Option { + match node.kind() { + NodeKind::Space(_) => Some(MarkupNode::Space), + NodeKind::Linebreak => Some(MarkupNode::Linebreak), + NodeKind::Parbreak => Some(MarkupNode::Parbreak), + NodeKind::Strong => Some(MarkupNode::Strong), + NodeKind::Emph => Some(MarkupNode::Emph), + NodeKind::Text(s) => Some(MarkupNode::Text(s.clone())), + NodeKind::UnicodeEscape(u) => { + Some(MarkupNode::Text(if let Some(s) = u.character { + s.into() + } else { + let mut eco = EcoString::with_capacity(u.sequence.len() + 4); + write!(&mut eco, "\\u{{{}}}", u.sequence).unwrap(); + eco + })) + } + NodeKind::EnDash => Some(MarkupNode::Text(EcoString::from("\u{2013}"))), + NodeKind::EmDash => Some(MarkupNode::Text(EcoString::from("\u{2014}"))), + NodeKind::NonBreakingSpace => { + Some(MarkupNode::Text(EcoString::from("\u{00A0}"))) + } + NodeKind::Raw(_) => Some(MarkupNode::Raw(RawNode::cast_from(node).unwrap())), + NodeKind::Heading => { + Some(MarkupNode::Heading(HeadingNode::cast_from(node).unwrap())) + } + NodeKind::List => Some(MarkupNode::List(ListNode::cast_from(node).unwrap())), + NodeKind::Enum => Some(MarkupNode::Enum(EnumNode::cast_from(node).unwrap())), + NodeKind::Error(_, _) => None, + _ => Some(MarkupNode::Expr(Expr::cast_from(node)?)), + } + } +} + /// A raw block with optional syntax highlighting: `` `...` ``. #[derive(Debug, Clone, PartialEq)] pub struct RawNode { - /// The source code location. - pub span: Span, /// An optional identifier specifying the language to syntax-highlight in. pub lang: Option, /// The raw text, determined as the raw string between the backticks trimmed @@ -46,33 +92,97 @@ pub struct RawNode { pub block: bool, } -/// A section heading: `= Introduction`. -#[derive(Debug, Clone, PartialEq)] -pub struct HeadingNode { - /// The source code location. - pub span: Span, - /// The section depth (numer of equals signs). - pub level: usize, +impl TypedNode for RawNode { + fn cast_from(node: RedTicket) -> Option { + if let NodeKind::Raw(raw) = node.kind() { + let span = node.own().span(); + let start = span.start + raw.backticks as usize; + Some(Self { + block: raw.block, + lang: raw.lang.as_ref().and_then(|x| { + let span = Span::new(span.source, start, start + x.len()); + Ident::new(x, span) + }), + text: raw.text.clone(), + }) + } else { + None + } + } +} + +node!( + /// A section heading: `= Introduction`. + Heading => HeadingNode +); + +impl HeadingNode { /// The contents of the heading. - pub body: Markup, + pub fn body(&self) -> Markup { + self.0 + .cast_first_child() + .expect("heading node is missing markup body") + } + + /// The section depth (numer of equals signs). + pub fn level(&self) -> HeadingLevel { + self.0 + .cast_first_child() + .expect("heading node is missing heading level") + } } -/// An item in an unordered list: `- ...`. -#[derive(Debug, Clone, PartialEq)] -pub struct ListNode { - /// The source code location. - pub span: Span, +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct HeadingLevel(pub usize); + +impl TypedNode for HeadingLevel { + fn cast_from(node: RedTicket) -> Option { + if let NodeKind::HeadingLevel(l) = node.kind() { + Some(Self((*l).into())) + } else { + None + } + } +} + +node!( + /// An item in an unordered list: `- ...`. + List => ListNode +); + +impl ListNode { /// The contents of the list item. - pub body: Markup, + pub fn body(&self) -> Markup { + self.0.cast_first_child().expect("list node is missing body") + } } -/// An item in an enumeration (ordered list): `1. ...`. -#[derive(Debug, Clone, PartialEq)] -pub struct EnumNode { - /// The source code location. - pub span: Span, +node!( + /// An item in an enumeration (ordered list): `1. ...`. + Enum => EnumNode +); + +impl EnumNode { + /// The contents of the list item. + pub fn body(&self) -> Markup { + self.0.cast_first_child().expect("enumeration node is missing body") + } + /// The number, if any. - pub number: Option, - /// The contents of the list item. - pub body: Markup, + pub fn number(&self) -> EnumNumber { + self.0.cast_first_child().expect("enumeration node is missing number") + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct EnumNumber(pub Option); + +impl TypedNode for EnumNumber { + fn cast_from(node: RedTicket) -> Option { + if let NodeKind::EnumNumbering(x) = node.kind() { + Some(Self(*x)) + } else { + None + } + } } diff --git a/src/syntax/mod.rs b/src/syntax/mod.rs index 8dbb108de..88757f8ea 100644 --- a/src/syntax/mod.rs +++ b/src/syntax/mod.rs @@ -6,7 +6,11 @@ mod markup; mod pretty; mod span; mod token; -pub mod visit; + +use std::fmt; +use std::fmt::{Debug, Display, Formatter}; +use std::mem; +use std::rc::Rc; pub use expr::*; pub use ident::*; @@ -14,3 +18,685 @@ pub use markup::*; pub use pretty::*; pub use span::*; pub use token::*; + +use crate::geom::{AngularUnit, LengthUnit}; +use crate::source::SourceId; +use crate::util::EcoString; + +#[derive(Debug, Clone, PartialEq)] +pub enum NodeKind { + /// A left square bracket: `[`. + LeftBracket, + /// A right square bracket: `]`. + RightBracket, + /// A left curly brace: `{`. + LeftBrace, + /// A right curly brace: `}`. + RightBrace, + /// A left round parenthesis: `(`. + LeftParen, + /// A right round parenthesis: `)`. + RightParen, + /// An asterisk: `*`. + Star, + /// A comma: `,`. + Comma, + /// A semicolon: `;`. + Semicolon, + /// A colon: `:`. + Colon, + /// A plus: `+`. + Plus, + /// A hyphen: `-`. + Minus, + /// A slash: `/`. + Slash, + /// A single equals sign: `=`. + Eq, + /// Two equals signs: `==`. + EqEq, + /// An exclamation mark followed by an equals sign: `!=`. + ExclEq, + /// A less-than sign: `<`. + Lt, + /// A less-than sign followed by an equals sign: `<=`. + LtEq, + /// A greater-than sign: `>`. + Gt, + /// A greater-than sign followed by an equals sign: `>=`. + GtEq, + /// A plus followed by an equals sign: `+=`. + PlusEq, + /// A hyphen followed by an equals sign: `-=`. + HyphEq, + /// An asterisk followed by an equals sign: `*=`. + StarEq, + /// A slash followed by an equals sign: `/=`. + SlashEq, + /// Two dots: `..`. + Dots, + /// An equals sign followed by a greater-than sign: `=>`. + Arrow, + /// The `not` operator. + Not, + /// The `and` operator. + And, + /// The `or` operator. + Or, + /// The `with` operator. + With, + /// The `with` expression: `with (1)`. + WithExpr, + /// The none literal: `none`. + None, + /// The auto literal: `auto`. + Auto, + /// The `let` keyword. + Let, + /// The `if` keyword. + If, + /// The `else` keyword. + Else, + /// The `for` keyword. + For, + /// The `in` keyword. + In, + /// The `while` keyword. + While, + /// The `break` keyword. + Break, + /// The `continue` keyword. + Continue, + /// The `return` keyword. + Return, + /// The `import` keyword. + Import, + /// The `include` keyword. + Include, + /// The `from` keyword. + From, + /// One or more whitespace characters. + Space(usize), + /// A consecutive non-markup string. + Text(EcoString), + /// A slash and the letter "u" followed by a hexadecimal unicode entity + /// enclosed in curly braces: `\u{1F5FA}`. + UnicodeEscape(UnicodeEscapeToken), + /// An arbitrary number of backticks followed by inner contents, terminated + /// with the same number of backticks: `` `...` ``. + Raw(RawToken), + /// Dollar signs surrounding inner contents. + Math(MathToken), + /// A numbering: `23.`. + /// + /// Can also exist without the number: `.`. + EnumNumbering(Option), + /// An identifier: `center`. + Ident(EcoString), + /// A boolean: `true`, `false`. + Bool(bool), + /// An integer: `120`. + Int(i64), + /// A floating-point number: `1.2`, `10e-4`. + Float(f64), + /// A length: `12pt`, `3cm`. + Length(f64, LengthUnit), + /// An angle: `90deg`. + Angle(f64, AngularUnit), + /// A percentage: `50%`. + /// + /// _Note_: `50%` is stored as `50.0` here, as in the corresponding + /// [literal](super::Lit::Percent). + Percentage(f64), + /// A fraction unit: `3fr`. + Fraction(f64), + /// A quoted string: `"..."`. + Str(StrToken), + /// Two slashes followed by inner contents, terminated with a newline: + /// `//\n`. + LineComment, + /// A slash and a star followed by inner contents, terminated with a star + /// and a slash: `/**/`. + /// + /// The comment can contain nested block comments. + BlockComment, + /// A node that should never appear in a finished tree. + Never, + /// Tokens that appear in the wrong place. + Error(ErrorPosition, EcoString), + /// Template markup. + Markup, + /// A forced line break: `\`. + Linebreak, + /// A paragraph break: Two or more newlines. + Parbreak, + /// Strong text was enabled / disabled: `*`. + Strong, + /// Emphasized text was enabled / disabled: `_`. + Emph, + /// A non-breaking space: `~`. + NonBreakingSpace, + /// An en-dash: `--`. + EnDash, + /// An em-dash: `---`. + EmDash, + /// A section heading: `= Introduction`. + Heading, + /// A heading's level: `=`, `==`, `===`, etc. + HeadingLevel(u8), + /// An item in an unordered list: `- ...`. + List, + /// The bullet character of an item in an unordered list: `-`. + ListBullet, + /// An item in an enumeration (ordered list): `1. ...`. + Enum, + /// An array expression: `(1, "hi", 12cm)`. + Array, + /// A dictionary expression: `(thickness: 3pt, pattern: dashed)`. + Dict, + /// A named argument: `thickness: 3pt`. + Named, + /// A template expression: `[*Hi* there!]`. + Template, + /// A grouped expression: `(1 + 2)`. + Group, + /// A block expression: `{ let x = 1; x + 2 }`. + Block, + /// A unary operation: `-x`. + Unary, + /// A binary operation: `a + b`. + Binary, + /// An invocation of a function: `f(x, y)`. + Call, + /// A function call's argument list: `(x, y)`. + CallArgs, + /// A closure expression: `(x, y) => z`. + Closure, + /// A closure's parameters: `(x, y)`. + ClosureParams, + /// A parameter sink: `..x`. + ParameterSink, + /// A for loop expression: `for x in y { ... }`. + ForExpr, + /// A while loop expression: `while x { ... }`. + WhileExpr, + /// An if expression: `if x { ... }`. + IfExpr, + /// A let expression: `let x = 1`. + LetExpr, + /// A for loop's destructuring pattern: `x` or `x, y`. + ForPattern, + /// The import expression: `import x from "foo.typ"`. + ImportExpr, + /// Items to import: `a, b, c`. + ImportItems, + /// The include expression: `include "foo.typ"`. + IncludeExpr, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum ErrorPosition { + /// At the start of the node. + Start, + /// Over the full width of the node. + Full, + /// At the end of the node. + End, +} + +impl Display for NodeKind { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.pad(match self { + Self::LeftBracket => "opening bracket", + Self::RightBracket => "closing bracket", + Self::LeftBrace => "opening brace", + Self::RightBrace => "closing brace", + Self::LeftParen => "opening paren", + Self::RightParen => "closing paren", + Self::Star => "star", + Self::Comma => "comma", + Self::Semicolon => "semicolon", + Self::Colon => "colon", + Self::Plus => "plus", + Self::Minus => "minus", + Self::Slash => "slash", + Self::Eq => "assignment operator", + Self::EqEq => "equality operator", + Self::ExclEq => "inequality operator", + Self::Lt => "less-than operator", + Self::LtEq => "less-than or equal operator", + Self::Gt => "greater-than operator", + Self::GtEq => "greater-than or equal operator", + Self::PlusEq => "add-assign operator", + Self::HyphEq => "subtract-assign operator", + Self::StarEq => "multiply-assign operator", + Self::SlashEq => "divide-assign operator", + Self::Dots => "dots", + Self::Arrow => "arrow", + Self::Not => "operator `not`", + Self::And => "operator `and`", + Self::Or => "operator `or`", + Self::With => "operator `with`", + Self::WithExpr => "`with` expression", + Self::None => "`none`", + Self::Auto => "`auto`", + Self::Let => "keyword `let`", + Self::If => "keyword `if`", + Self::Else => "keyword `else`", + Self::For => "keyword `for`", + Self::In => "keyword `in`", + Self::While => "keyword `while`", + Self::Break => "keyword `break`", + Self::Continue => "keyword `continue`", + Self::Return => "keyword `return`", + Self::Import => "keyword `import`", + Self::Include => "keyword `include`", + Self::From => "keyword `from`", + Self::Space(_) => "space", + Self::Math(_) => "math formula", + Self::EnumNumbering(_) => "numbering", + Self::Str(_) => "string", + Self::Never => "a node that should not be here", + Self::LineComment => "line comment", + Self::BlockComment => "block comment", + Self::Markup => "markup", + Self::Linebreak => "forced linebreak", + Self::Parbreak => "paragraph break", + Self::Strong => "strong", + Self::Emph => "emphasis", + Self::Text(_) => "text", + Self::NonBreakingSpace => "non-breaking space", + Self::EnDash => "en dash", + Self::EmDash => "em dash", + Self::UnicodeEscape(_) => "unicode escape sequence", + Self::Raw(_) => "raw block", + Self::Heading => "heading", + Self::HeadingLevel(_) => "heading level", + Self::List => "list", + Self::ListBullet => "list bullet", + Self::Enum => "enum", + Self::Ident(_) => "identifier", + Self::Bool(_) => "boolean", + Self::Int(_) => "integer", + Self::Float(_) => "float", + Self::Length(_, _) => "length", + Self::Angle(_, _) => "angle", + Self::Percentage(_) => "percentage", + Self::Fraction(_) => "`fr` value", + Self::Array => "array", + Self::Dict => "dictionary", + Self::Named => "named argument", + Self::Template => "template", + Self::Group => "group", + Self::Block => "block", + Self::Unary => "unary expression", + Self::Binary => "binary expression", + Self::Call => "call", + Self::CallArgs => "call arguments", + Self::Closure => "closure", + Self::ClosureParams => "closure parameters", + Self::ParameterSink => "parameter sink", + Self::ForExpr => "for-loop expression", + Self::WhileExpr => "while-loop expression", + Self::IfExpr => "if expression", + Self::LetExpr => "let expression", + Self::ForPattern => "for-loop destructuring pattern", + Self::ImportExpr => "import expression", + Self::ImportItems => "import items", + Self::IncludeExpr => "include expression", + Self::Error(_, src) => match src.as_str() { + "*/" => "end of block comment", + _ => "invalid token", + }, + }) + } +} + +impl NodeKind { + pub fn is_parenthesis(&self) -> bool { + match self { + Self::LeftParen => true, + Self::RightParen => true, + _ => false, + } + } + + pub fn is_bracket(&self) -> bool { + match self { + Self::LeftBracket => true, + Self::RightBracket => true, + _ => false, + } + } + + pub fn is_brace(&self) -> bool { + match self { + Self::LeftBrace => true, + Self::RightBrace => true, + _ => false, + } + } + + pub fn is_error(&self) -> bool { + matches!(self, NodeKind::Never | NodeKind::Error(_, _)) + } +} + +/// A syntactical node. +#[derive(Clone, PartialEq)] +pub struct GreenNode { + /// Node metadata. + meta: GreenData, + /// This node's children, losslessly make up this node. + children: Vec, +} + +/// Data shared between [`GreenNode`]s and [`GreenToken`]s. +#[derive(Clone, PartialEq)] +pub struct GreenData { + /// What kind of node this is (each kind would have its own struct in a + /// strongly typed AST). + kind: NodeKind, + /// The byte length of the node in the source. + len: usize, + /// Whether this node or any of its children are erroneous. + has_error: bool, +} + +impl GreenData { + pub fn new(kind: NodeKind, len: usize) -> Self { + Self { len, has_error: kind.is_error(), kind } + } + + pub fn kind(&self) -> &NodeKind { + &self.kind + } + + pub fn len(&self) -> usize { + self.len + } + + pub fn has_error(&self) -> bool { + self.has_error + } +} + +impl From for Green { + fn from(token: GreenData) -> Self { + Self::Token(token) + } +} + +/// Children of a [`GreenNode`]. +#[derive(Clone, PartialEq)] +pub enum Green { + /// A terminal owned token. + Token(GreenData), + /// A non-terminal node in an Rc. + Node(Rc), +} + +impl Green { + fn meta(&self) -> &GreenData { + match self { + Green::Token(t) => &t, + Green::Node(n) => &n.meta, + } + } + + pub fn kind(&self) -> &NodeKind { + self.meta().kind() + } + + pub fn len(&self) -> usize { + self.meta().len() + } + + pub fn has_error(&self) -> bool { + self.meta().has_error() + } + + pub fn children(&self) -> &[Green] { + match self { + Green::Token(_) => &[], + Green::Node(n) => &n.children(), + } + } +} + +impl GreenNode { + pub fn new(kind: NodeKind, len: usize) -> Self { + Self { + meta: GreenData::new(kind, len), + children: Vec::new(), + } + } + + pub fn with_children( + kind: NodeKind, + len: usize, + children: impl Iterator>, + ) -> Self { + let mut meta = GreenData::new(kind, len); + let children = children + .map(|x| { + let x = x.into(); + meta.has_error |= x.has_error(); + x + }) + .collect(); + Self { meta, children } + } + + pub fn with_child(kind: NodeKind, len: usize, child: impl Into) -> Self { + Self::with_children(kind, len, std::iter::once(child.into())) + } + + pub fn children(&self) -> &[Green] { + &self.children + } +} + +impl From for Green { + fn from(node: GreenNode) -> Self { + Rc::new(node).into() + } +} + +impl From> for Green { + fn from(node: Rc) -> Self { + Self::Node(node) + } +} + +impl Default for Green { + fn default() -> Self { + Self::Token(GreenData::new(NodeKind::Never, 0)) + } +} + +impl Debug for Green { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "{:?}: {}", self.kind(), self.len())?; + if let Self::Node(n) = self { + if !n.children.is_empty() { + f.write_str(" ")?; + f.debug_list().entries(&n.children).finish()?; + } + } + + Ok(()) + } +} + +#[derive(Copy, Clone, PartialEq)] +pub struct RedTicket<'a> { + id: SourceId, + offset: usize, + green: &'a Green, +} + +impl<'a> RedTicket<'a> { + pub fn own(self) -> RedNode { + RedNode { + id: self.id, + offset: self.offset, + green: self.green.clone(), + } + } + + pub fn kind(&self) -> &NodeKind { + self.green.kind() + } + + + pub fn cast(self) -> Option + where + T: TypedNode, + { + T::cast_from(self) + } +} + +#[derive(Clone, PartialEq)] +pub struct RedNode { + id: SourceId, + offset: usize, + green: Green, +} + +impl RedNode { + pub fn new_root(root: Rc, id: SourceId) -> Self { + Self { id, offset: 0, green: root.into() } + } + + pub fn span(&self) -> Span { + Span::new(self.id, self.offset, self.offset + self.green.len()) + } + + pub fn len(&self) -> usize { + self.green.len() + } + + pub fn kind(&self) -> &NodeKind { + self.green.kind() + } + + pub fn children<'a>(&'a self) -> impl Iterator> + Clone + 'a { + let children = match &self.green { + Green::Node(node) => node.children(), + Green::Token(_) => &[], + }; + + let mut offset = self.offset; + children.iter().map(move |green_child| { + let child_offset = offset; + offset += green_child.len(); + RedTicket { + id: self.id, + offset: child_offset, + green: &green_child, + } + }) + } + + pub fn has_error(&self) -> bool { + self.green.has_error() + } + + pub fn errors(&self) -> Vec<(Span, EcoString)> { + if !self.green.has_error() { + return vec![]; + } + + if let NodeKind::Error(pos, msg) = self.kind() { + let span = match pos { + ErrorPosition::Start => self.span().at_start(), + ErrorPosition::Full => self.span(), + ErrorPosition::End => self.span().at_end(), + }; + + vec![(span, msg.clone())] + } else if let NodeKind::Never = self.kind() { + vec![(self.span(), "found a never node".into())] + } else { + self.children() + .filter(|ticket| ticket.green.has_error()) + .flat_map(|ticket| ticket.own().errors()) + .collect() + } + } + + pub fn ticket<'a>(&'a self) -> RedTicket<'a> { + RedTicket { + id: self.id, + offset: self.offset, + green: &self.green, + } + } + + pub(crate) fn typed_child(&self, kind: &NodeKind) -> Option { + self.children() + .find(|x| mem::discriminant(x.kind()) == mem::discriminant(kind)) + .map(RedTicket::own) + } + + pub(crate) fn cast_first_child(&self) -> Option { + self.children().find_map(RedTicket::cast) + } + + pub(crate) fn cast_last_child(&self) -> Option { + self.children().filter_map(RedTicket::cast).last() + } +} + +impl Debug for RedNode { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "{:?}: {:?}", self.kind(), self.span())?; + let children = self.children().collect::>(); + if !children.is_empty() { + f.write_str(" ")?; + f.debug_list() + .entries(children.into_iter().map(RedTicket::own)) + .finish()?; + } + Ok(()) + } +} + +pub trait TypedNode: Sized { + /// Performs the conversion. + fn cast_from(value: RedTicket) -> Option; +} + +#[macro_export] +macro_rules! node { + (#[doc = $doc:expr] $name:ident) => { + node!(#[doc = $doc] $name => $name); + }; + (#[doc = $doc:expr] $variant:ident => $name:ident) => { + #[doc = $doc] + #[derive(Debug, Clone, PartialEq)] + pub struct $name(RedNode); + + impl TypedNode for $name { + fn cast_from(node: RedTicket) -> Option { + if node.kind() != &NodeKind::$variant { + return None; + } + + Some(Self(node.own())) + } + } + + impl $name { + pub fn span(&self) -> Span { + self.0.span() + } + + pub fn underlying(&self) -> RedTicket { + self.0.ticket() + } + } + }; +} diff --git a/src/syntax/pretty.rs b/src/syntax/pretty.rs index 3d02f39f7..b1c7e02bd 100644 --- a/src/syntax/pretty.rs +++ b/src/syntax/pretty.rs @@ -88,10 +88,10 @@ impl Pretty for MarkupNode { match self { // TODO: Handle escaping. Self::Space => p.push(' '), - Self::Linebreak(_) => p.push_str(r"\"), - Self::Parbreak(_) => p.push_str("\n\n"), - Self::Strong(_) => p.push('*'), - Self::Emph(_) => p.push('_'), + Self::Linebreak => p.push_str(r"\"), + Self::Parbreak => p.push_str("\n\n"), + Self::Strong => p.push('*'), + Self::Emph => p.push('_'), Self::Text(text) => p.push_str(text), Self::Raw(raw) => raw.pretty(p), Self::Heading(heading) => heading.pretty(p), @@ -165,28 +165,28 @@ impl Pretty for RawNode { impl Pretty for HeadingNode { fn pretty(&self, p: &mut Printer) { - for _ in 0 .. self.level { + for _ in 0 .. self.level().0 { p.push('='); } p.push(' '); - self.body.pretty(p); + self.body().pretty(p); } } impl Pretty for ListNode { fn pretty(&self, p: &mut Printer) { p.push_str("- "); - self.body.pretty(p); + self.body().pretty(p); } } impl Pretty for EnumNode { fn pretty(&self, p: &mut Printer) { - if let Some(number) = self.number { + if let Some(number) = self.number().0 { write!(p, "{}", number).unwrap(); } p.push_str(". "); - self.body.pretty(p); + self.body().pretty(p); } } @@ -235,8 +235,10 @@ impl Pretty for Lit { impl Pretty for ArrayExpr { fn pretty(&self, p: &mut Printer) { p.push('('); - p.join(&self.items, ", ", |item, p| item.pretty(p)); - if self.items.len() == 1 { + + let items = self.items(); + p.join(&items, ", ", |item, p| item.pretty(p)); + if items.len() == 1 { p.push(','); } p.push(')'); @@ -246,10 +248,12 @@ impl Pretty for ArrayExpr { impl Pretty for DictExpr { fn pretty(&self, p: &mut Printer) { p.push('('); - if self.items.is_empty() { + + let items = self.items(); + if items.is_empty() { p.push(':'); } else { - p.join(&self.items, ", ", |named, p| named.pretty(p)); + p.join(&items, ", ", |named, p| named.pretty(p)); } p.push(')'); } @@ -257,16 +261,16 @@ impl Pretty for DictExpr { impl Pretty for Named { fn pretty(&self, p: &mut Printer) { - self.name.pretty(p); + self.name().pretty(p); p.push_str(": "); - self.expr.pretty(p); + self.expr().pretty(p); } } impl Pretty for TemplateExpr { fn pretty(&self, p: &mut Printer) { p.push('['); - self.body.pretty(p); + self.body().pretty(p); p.push(']'); } } @@ -274,7 +278,7 @@ impl Pretty for TemplateExpr { impl Pretty for GroupExpr { fn pretty(&self, p: &mut Printer) { p.push('('); - self.expr.pretty(p); + self.expr().pretty(p); p.push(')'); } } @@ -282,11 +286,13 @@ impl Pretty for GroupExpr { impl Pretty for BlockExpr { fn pretty(&self, p: &mut Printer) { p.push('{'); - if self.exprs.len() > 1 { + + let exprs = self.exprs(); + if exprs.len() > 1 { p.push(' '); } - p.join(&self.exprs, "; ", |expr, p| expr.pretty(p)); - if self.exprs.len() > 1 { + p.join(&exprs, "; ", |expr, p| expr.pretty(p)); + if exprs.len() > 1 { p.push(' '); } p.push('}'); @@ -295,11 +301,12 @@ impl Pretty for BlockExpr { impl Pretty for UnaryExpr { fn pretty(&self, p: &mut Printer) { - self.op.pretty(p); - if self.op == UnOp::Not { + let op = self.op(); + op.pretty(p); + if op == UnOp::Not { p.push(' '); } - self.expr.pretty(p); + self.expr().pretty(p); } } @@ -311,11 +318,11 @@ impl Pretty for UnOp { impl Pretty for BinaryExpr { fn pretty(&self, p: &mut Printer) { - self.lhs.pretty(p); + self.lhs().pretty(p); p.push(' '); - self.op.pretty(p); + self.op().pretty(p); p.push(' '); - self.rhs.pretty(p); + self.rhs().pretty(p); } } @@ -327,7 +334,7 @@ impl Pretty for BinOp { impl Pretty for CallExpr { fn pretty(&self, p: &mut Printer) { - self.callee.pretty(p); + self.callee().pretty(p); let mut write_args = |items: &[CallArg]| { p.push('('); @@ -335,25 +342,26 @@ impl Pretty for CallExpr { p.push(')'); }; - match self.args.items.as_slice() { - // This can be moved behind the arguments. - // - // Example: Transforms "#v(a, [b])" => "#v(a)[b]". - [head @ .., CallArg::Pos(Expr::Template(template))] => { - if !head.is_empty() { - write_args(head); - } - template.pretty(p); - } + let arg_list = self.args(); + let args = arg_list.items(); - items => write_args(items), + if let Some(Expr::Template(template)) = args + .last() + .and_then(|x| if let CallArg::Pos(arg) = x { Some(arg) } else { None }) + { + if args.len() > 1 { + write_args(&args[0 .. args.len() - 1]); + } + template.pretty(p); + } else { + write_args(&args); } } } impl Pretty for CallArgs { fn pretty(&self, p: &mut Printer) { - p.join(&self.items, ", ", |item, p| item.pretty(p)); + p.join(&self.items(), ", ", |item, p| item.pretty(p)); } } @@ -372,15 +380,15 @@ impl Pretty for CallArg { impl Pretty for ClosureExpr { fn pretty(&self, p: &mut Printer) { - if let [param] = self.params.as_slice() { + if let [param] = self.params().as_slice() { param.pretty(p); } else { p.push('('); - p.join(self.params.iter(), ", ", |item, p| item.pretty(p)); + p.join(self.params().iter(), ", ", |item, p| item.pretty(p)); p.push(')'); } p.push_str(" => "); - self.body.pretty(p); + self.body().pretty(p); } } @@ -399,9 +407,9 @@ impl Pretty for ClosureParam { impl Pretty for WithExpr { fn pretty(&self, p: &mut Printer) { - self.callee.pretty(p); + self.callee().pretty(p); p.push_str(" with ("); - self.args.pretty(p); + self.args().pretty(p); p.push(')'); } } @@ -409,13 +417,13 @@ impl Pretty for WithExpr { impl Pretty for LetExpr { fn pretty(&self, p: &mut Printer) { p.push_str("let "); - self.binding.pretty(p); - if let Some(Expr::Closure(closure)) = &self.init { + self.binding().pretty(p); + if let Some(Expr::Closure(closure)) = &self.init() { p.push('('); - p.join(closure.params.iter(), ", ", |item, p| item.pretty(p)); + p.join(closure.params().iter(), ", ", |item, p| item.pretty(p)); p.push_str(") = "); - closure.body.pretty(p); - } else if let Some(init) = &self.init { + closure.body().pretty(p); + } else if let Some(init) = &self.init() { p.push_str(" = "); init.pretty(p); } @@ -425,10 +433,10 @@ impl Pretty for LetExpr { impl Pretty for IfExpr { fn pretty(&self, p: &mut Printer) { p.push_str("if "); - self.condition.pretty(p); + self.condition().pretty(p); p.push(' '); - self.if_body.pretty(p); - if let Some(expr) = &self.else_body { + self.if_body().pretty(p); + if let Some(expr) = &self.else_body() { p.push_str(" else "); expr.pretty(p); } @@ -438,42 +446,40 @@ impl Pretty for IfExpr { impl Pretty for WhileExpr { fn pretty(&self, p: &mut Printer) { p.push_str("while "); - self.condition.pretty(p); + self.condition().pretty(p); p.push(' '); - self.body.pretty(p); + self.body().pretty(p); } } impl Pretty for ForExpr { fn pretty(&self, p: &mut Printer) { p.push_str("for "); - self.pattern.pretty(p); + self.pattern().pretty(p); p.push_str(" in "); - self.iter.pretty(p); + self.iter().pretty(p); p.push(' '); - self.body.pretty(p); + self.body().pretty(p); } } impl Pretty for ForPattern { fn pretty(&self, p: &mut Printer) { - match self { - Self::Value(v) => v.pretty(p), - Self::KeyValue(k, v) => { - k.pretty(p); - p.push_str(", "); - v.pretty(p); - } + if let Some(key) = self.key() { + key.pretty(p); + p.push_str(", "); } + + self.value().pretty(p); } } impl Pretty for ImportExpr { fn pretty(&self, p: &mut Printer) { p.push_str("import "); - self.imports.pretty(p); + self.imports().pretty(p); p.push_str(" from "); - self.path.pretty(p); + self.path().pretty(p); } } @@ -489,7 +495,7 @@ impl Pretty for Imports { impl Pretty for IncludeExpr { fn pretty(&self, p: &mut Printer) { p.push_str("include "); - self.path.pretty(p); + self.path().pretty(p); } } @@ -502,7 +508,6 @@ impl Pretty for Ident { #[cfg(test)] mod tests { use super::*; - use crate::parse::parse; use crate::source::SourceFile; #[track_caller] @@ -513,7 +518,7 @@ mod tests { #[track_caller] fn test_parse(src: &str, expected: &str) { let source = SourceFile::detached(src); - let ast = parse(&source).unwrap(); + let ast: Markup = source.ast().unwrap(); let found = pretty(&ast); if found != expected { println!("tree: {:#?}", ast); diff --git a/src/syntax/span.rs b/src/syntax/span.rs index bfb9e755c..ee7cba4c2 100644 --- a/src/syntax/span.rs +++ b/src/syntax/span.rs @@ -109,6 +109,11 @@ impl Span { *self = self.join(other) } + /// Test whether a position is within the span. + pub fn contains_pos(&self, pos: Pos) -> bool { + self.start <= pos && self.end >= pos + } + /// Test whether one span complete contains the other span. pub fn contains(self, other: Self) -> bool { self.source == other.source && self.start <= other.start && self.end >= other.end @@ -118,6 +123,16 @@ impl Span { pub fn to_range(self) -> Range { self.start.to_usize() .. self.end.to_usize() } + + /// A new span at the position of this span's start. + pub fn at_start(&self) -> Span { + Self::at(self.source, self.start) + } + + /// A new span at the position of this span's end. + pub fn at_end(&self) -> Span { + Self::at(self.source, self.end) + } } impl Debug for Span { diff --git a/src/syntax/token.rs b/src/syntax/token.rs index 22dd104b0..49613667e 100644 --- a/src/syntax/token.rs +++ b/src/syntax/token.rs @@ -1,188 +1,38 @@ -use crate::geom::{AngularUnit, LengthUnit}; - -/// A minimal semantic entity of source code. -#[derive(Debug, Copy, Clone, PartialEq)] -pub enum Token<'s> { - /// A left square bracket: `[`. - LeftBracket, - /// A right square bracket: `]`. - RightBracket, - /// A left curly brace: `{`. - LeftBrace, - /// A right curly brace: `}`. - RightBrace, - /// A left round parenthesis: `(`. - LeftParen, - /// A right round parenthesis: `)`. - RightParen, - /// An asterisk: `*`. - Star, - /// An underscore: `_`. - Underscore, - /// A tilde: `~`. - Tilde, - /// Two hyphens: `--`. - HyphHyph, - /// Three hyphens: `---`. - HyphHyphHyph, - /// A backslash followed by nothing or whitespace: `\`. - Backslash, - /// A comma: `,`. - Comma, - /// A semicolon: `;`. - Semicolon, - /// A colon: `:`. - Colon, - /// A plus: `+`. - Plus, - /// A hyphen: `-`. - Hyph, - /// A slash: `/`. - Slash, - /// A single equals sign: `=`. - Eq, - /// Two equals signs: `==`. - EqEq, - /// An exclamation mark followed by an equals sign: `!=`. - ExclEq, - /// A less-than sign: `<`. - Lt, - /// A less-than sign followed by an equals sign: `<=`. - LtEq, - /// A greater-than sign: `>`. - Gt, - /// A greater-than sign followed by an equals sign: `>=`. - GtEq, - /// A plus followed by an equals sign: `+=`. - PlusEq, - /// A hyphen followed by an equals sign: `-=`. - HyphEq, - /// An asterisk followed by an equals sign: `*=`. - StarEq, - /// A slash followed by an equals sign: `/=`. - SlashEq, - /// Two dots: `..`. - Dots, - /// An equals sign followed by a greater-than sign: `=>`. - Arrow, - /// The `not` operator. - Not, - /// The `and` operator. - And, - /// The `or` operator. - Or, - /// The `with` operator. - With, - /// The none literal: `none`. - None, - /// The auto literal: `auto`. - Auto, - /// The `let` keyword. - Let, - /// The `if` keyword. - If, - /// The `else` keyword. - Else, - /// The `for` keyword. - For, - /// The `in` keyword. - In, - /// The `while` keyword. - While, - /// The `break` keyword. - Break, - /// The `continue` keyword. - Continue, - /// The `return` keyword. - Return, - /// The `import` keyword. - Import, - /// The `include` keyword. - Include, - /// The `from` keyword. - From, - /// One or more whitespace characters. - /// - /// The contained `usize` denotes the number of newlines that were contained - /// in the whitespace. - Space(usize), - /// A consecutive non-markup string. - Text(&'s str), - /// A slash and the letter "u" followed by a hexadecimal unicode entity - /// enclosed in curly braces: `\u{1F5FA}`. - UnicodeEscape(UnicodeEscapeToken<'s>), - /// An arbitrary number of backticks followed by inner contents, terminated - /// with the same number of backticks: `` `...` ``. - Raw(RawToken<'s>), - /// One or two dollar signs followed by inner contents, terminated with the - /// same number of dollar signs. - Math(MathToken<'s>), - /// A numbering: `23.`. - /// - /// Can also exist without the number: `.`. - Numbering(Option), - /// An identifier: `center`. - Ident(&'s str), - /// A boolean: `true`, `false`. - Bool(bool), - /// An integer: `120`. - Int(i64), - /// A floating-point number: `1.2`, `10e-4`. - Float(f64), - /// A length: `12pt`, `3cm`. - Length(f64, LengthUnit), - /// An angle: `90deg`. - Angle(f64, AngularUnit), - /// A percentage: `50%`. - /// - /// _Note_: `50%` is stored as `50.0` here, as in the corresponding - /// [literal](super::Lit::Percent). - Percent(f64), - /// A fraction unit: `3fr`. - Fraction(f64), - /// A quoted string: `"..."`. - Str(StrToken<'s>), - /// Two slashes followed by inner contents, terminated with a newline: - /// `//\n`. - LineComment(&'s str), - /// A slash and a star followed by inner contents, terminated with a star - /// and a slash: `/**/`. - /// - /// The comment can contain nested block comments. - BlockComment(&'s str), - /// Things that are not valid tokens. - Invalid(&'s str), -} +use crate::util::EcoString; /// A quoted string token: `"..."`. -#[derive(Debug, Copy, Clone, PartialEq)] -pub struct StrToken<'s> { +#[derive(Debug, Clone, PartialEq)] +pub struct StrToken { /// The string inside the quotes. /// /// _Note_: If the string contains escape sequences these are not yet /// applied to be able to just store a string slice here instead of /// a `String`. The resolving is done later in the parser. - pub string: &'s str, + pub string: EcoString, /// Whether the closing quote was present. pub terminated: bool, } /// A raw block token: `` `...` ``. -#[derive(Debug, Copy, Clone, PartialEq)] -pub struct RawToken<'s> { - /// The raw text between the backticks. - pub text: &'s str, +#[derive(Debug, Clone, PartialEq)] +pub struct RawToken { + /// The raw text in the block. + pub text: EcoString, + /// The programming language of the raw text. + pub lang: Option, /// The number of opening backticks. - pub backticks: usize, + pub backticks: u8, /// Whether all closing backticks were present. pub terminated: bool, + /// Whether to display this as a block. + pub block: bool, } /// A math formula token: `$2pi + x$` or `$[f'(x) = x^2]$`. -#[derive(Debug, Copy, Clone, PartialEq)] -pub struct MathToken<'s> { +#[derive(Debug, Clone, PartialEq)] +pub struct MathToken { /// The formula between the dollars. - pub formula: &'s str, + pub formula: EcoString, /// Whether the formula is display-level, that is, it is surrounded by /// `$[..]`. pub display: bool, @@ -191,86 +41,21 @@ pub struct MathToken<'s> { } /// A unicode escape sequence token: `\u{1F5FA}`. -#[derive(Debug, Copy, Clone, PartialEq)] -pub struct UnicodeEscapeToken<'s> { +#[derive(Debug, Clone, PartialEq)] +pub struct UnicodeEscapeToken { /// The escape sequence between the braces. - pub sequence: &'s str, + pub sequence: EcoString, + /// The resulting unicode character. + pub character: Option, /// Whether the closing brace was present. pub terminated: bool, } -impl<'s> Token<'s> { - /// The English name of this token for use in error messages. - pub fn name(self) -> &'static str { - match self { - Self::LeftBracket => "opening bracket", - Self::RightBracket => "closing bracket", - Self::LeftBrace => "opening brace", - Self::RightBrace => "closing brace", - Self::LeftParen => "opening paren", - Self::RightParen => "closing paren", - Self::Star => "star", - Self::Underscore => "underscore", - Self::Tilde => "tilde", - Self::HyphHyph => "en dash", - Self::HyphHyphHyph => "em dash", - Self::Backslash => "backslash", - Self::Comma => "comma", - Self::Semicolon => "semicolon", - Self::Colon => "colon", - Self::Plus => "plus", - Self::Hyph => "minus", - Self::Slash => "slash", - Self::Eq => "assignment operator", - Self::EqEq => "equality operator", - Self::ExclEq => "inequality operator", - Self::Lt => "less-than operator", - Self::LtEq => "less-than or equal operator", - Self::Gt => "greater-than operator", - Self::GtEq => "greater-than or equal operator", - Self::PlusEq => "add-assign operator", - Self::HyphEq => "subtract-assign operator", - Self::StarEq => "multiply-assign operator", - Self::SlashEq => "divide-assign operator", - Self::Dots => "dots", - Self::Arrow => "arrow", - Self::Not => "operator `not`", - Self::And => "operator `and`", - Self::Or => "operator `or`", - Self::With => "operator `with`", - Self::None => "`none`", - Self::Auto => "`auto`", - Self::Let => "keyword `let`", - Self::If => "keyword `if`", - Self::Else => "keyword `else`", - Self::For => "keyword `for`", - Self::In => "keyword `in`", - Self::While => "keyword `while`", - Self::Break => "keyword `break`", - Self::Continue => "keyword `continue`", - Self::Return => "keyword `return`", - Self::Import => "keyword `import`", - Self::Include => "keyword `include`", - Self::From => "keyword `from`", - Self::Space(_) => "space", - Self::Text(_) => "text", - Self::UnicodeEscape(_) => "unicode escape sequence", - Self::Raw(_) => "raw block", - Self::Math(_) => "math formula", - Self::Numbering(_) => "numbering", - Self::Ident(_) => "identifier", - Self::Bool(_) => "boolean", - Self::Int(_) => "integer", - Self::Float(_) => "float", - Self::Length(_, _) => "length", - Self::Angle(_, _) => "angle", - Self::Percent(_) => "percentage", - Self::Fraction(_) => "`fr` value", - Self::Str(_) => "string", - Self::LineComment(_) => "line comment", - Self::BlockComment(_) => "block comment", - Self::Invalid("*/") => "end of block comment", - Self::Invalid(_) => "invalid token", - } - } +/// A unit-bound number token: `1.2em`. +#[derive(Debug, Clone, PartialEq)] +pub struct UnitToken { + /// The number part. + pub number: std::ops::Range, + /// The unit part. + pub unit: std::ops::Range, } diff --git a/src/syntax/visit.rs b/src/syntax/visit.rs deleted file mode 100644 index 40e8eb93c..000000000 --- a/src/syntax/visit.rs +++ /dev/null @@ -1,263 +0,0 @@ -//! Mutable and immutable syntax tree traversal. - -use super::*; - -/// Implement the immutable and the mutable visitor version. -macro_rules! impl_visitors { - ($($name:ident($($tts:tt)*) $body:block)*) => { - macro_rules! r { - (rc: $x:expr) => { $x.as_ref() }; - ($x:expr) => { &$x }; - } - - impl_visitor! { - Visit, - immutable, - immutably, - [$(($name($($tts)*) $body))*] - } - - macro_rules! r { - (rc: $x:expr) => { std::rc::Rc::make_mut(&mut $x) }; - ($x:expr) => { &mut $x }; - } - - impl_visitor! { - VisitMut, - mutable, - mutably, - [$(($name($($tts)*) $body mut))*] mut - } - }; -} - -/// Implement an immutable or mutable visitor. -macro_rules! impl_visitor { - ( - $visit:ident, - $mutability:ident, - $adjective:ident, - [$(( - $name:ident($v:ident, $node:ident: $ty:ty) - $body:block - $($fmut:tt)? - ))*] - $($mut:tt)? - ) => { - #[doc = concat!("Visit syntax trees ", stringify!($adjective), ".")] - pub trait $visit<'ast> { - /// Visit a definition of a binding. - /// - /// Bindings are, for example, left-hand side of let expressions, - /// and key/value patterns in for loops. - fn visit_binding(&mut self, _: &'ast $($mut)? Ident) {} - - /// Visit the entry into a scope. - fn visit_enter(&mut self) {} - - /// Visit the exit from a scope. - fn visit_exit(&mut self) {} - - $(fn $name(&mut self, $node: &'ast $($fmut)? $ty) { - $mutability::$name(self, $node); - })* - } - - #[doc = concat!("Visitor functions that are ", stringify!($mutability), ".")] - pub mod $mutability { - use super::*; - $( - #[doc = concat!("Visit a node of type [`", stringify!($ty), "`].")] - pub fn $name<'ast, V>($v: &mut V, $node: &'ast $($fmut)? $ty) - where - V: $visit<'ast> + ?Sized - $body - )* - } - }; -} - -impl_visitors! { - visit_tree(v, markup: Markup) { - for node in markup { - v.visit_node(node); - } - } - - visit_node(v, node: MarkupNode) { - match node { - MarkupNode::Space => {} - MarkupNode::Linebreak(_) => {} - MarkupNode::Parbreak(_) => {} - MarkupNode::Strong(_) => {} - MarkupNode::Emph(_) => {} - MarkupNode::Text(_) => {} - MarkupNode::Raw(_) => {} - MarkupNode::Heading(n) => v.visit_heading(n), - MarkupNode::List(n) => v.visit_list(n), - MarkupNode::Enum(n) => v.visit_enum(n), - MarkupNode::Expr(n) => v.visit_expr(n), - } - } - - visit_heading(v, heading: HeadingNode) { - v.visit_tree(r!(heading.body)); - } - - visit_list(v, list: ListNode) { - v.visit_tree(r!(list.body)); - } - - visit_enum(v, enum_: EnumNode) { - v.visit_tree(r!(enum_.body)); - } - - visit_expr(v, expr: Expr) { - match expr { - Expr::Ident(_) => {} - Expr::Lit(_) => {}, - Expr::Array(e) => v.visit_array(e), - Expr::Dict(e) => v.visit_dict(e), - Expr::Template(e) => v.visit_template(e), - Expr::Group(e) => v.visit_group(e), - Expr::Block(e) => v.visit_block(e), - Expr::Unary(e) => v.visit_unary(e), - Expr::Binary(e) => v.visit_binary(e), - Expr::Call(e) => v.visit_call(e), - Expr::Closure(e) => v.visit_closure(e), - Expr::With(e) => v.visit_with(e), - Expr::Let(e) => v.visit_let(e), - Expr::If(e) => v.visit_if(e), - Expr::While(e) => v.visit_while(e), - Expr::For(e) => v.visit_for(e), - Expr::Import(e) => v.visit_import(e), - Expr::Include(e) => v.visit_include(e), - } - } - - visit_array(v, array: ArrayExpr) { - for expr in r!(array.items) { - v.visit_expr(expr); - } - } - - visit_dict(v, dict: DictExpr) { - for named in r!(dict.items) { - v.visit_expr(r!(named.expr)); - } - } - - visit_template(v, template: TemplateExpr) { - v.visit_enter(); - v.visit_tree(r!(template.body)); - v.visit_exit(); - } - - visit_group(v, group: GroupExpr) { - v.visit_expr(r!(group.expr)); - } - - visit_block(v, block: BlockExpr) { - v.visit_enter(); - for expr in r!(block.exprs) { - v.visit_expr(expr); - } - v.visit_exit(); - } - - visit_binary(v, binary: BinaryExpr) { - v.visit_expr(r!(binary.lhs)); - v.visit_expr(r!(binary.rhs)); - } - - visit_unary(v, unary: UnaryExpr) { - v.visit_expr(r!(unary.expr)); - } - - visit_call(v, call: CallExpr) { - v.visit_expr(r!(call.callee)); - v.visit_args(r!(call.args)); - } - - visit_args(v, args: CallArgs) { - for arg in r!(args.items) { - v.visit_arg(arg); - } - } - - visit_arg(v, arg: CallArg) { - match arg { - CallArg::Pos(expr) => v.visit_expr(expr), - CallArg::Named(named) => v.visit_expr(r!(named.expr)), - CallArg::Spread(expr) => v.visit_expr(expr), - } - } - - visit_closure(v, closure: ClosureExpr) { - for param in r!(closure.params) { - v.visit_param(param); - } - v.visit_expr(r!(rc: closure.body)); - } - - visit_param(v, param: ClosureParam) { - match param { - ClosureParam::Pos(binding) => v.visit_binding(binding), - ClosureParam::Named(named) => { - v.visit_binding(r!(named.name)); - v.visit_expr(r!(named.expr)); - } - ClosureParam::Sink(binding) => v.visit_binding(binding), - } - } - - visit_with(v, with_expr: WithExpr) { - v.visit_expr(r!(with_expr.callee)); - v.visit_args(r!(with_expr.args)); - } - - visit_let(v, let_expr: LetExpr) { - if let Some(init) = r!(let_expr.init) { - v.visit_expr(init); - } - v.visit_binding(r!(let_expr.binding)); - } - - visit_if(v, if_expr: IfExpr) { - v.visit_expr(r!(if_expr.condition)); - v.visit_expr(r!(if_expr.if_body)); - if let Some(body) = r!(if_expr.else_body) { - v.visit_expr(body); - } - } - - visit_while(v, while_expr: WhileExpr) { - v.visit_expr(r!(while_expr.condition)); - v.visit_expr(r!(while_expr.body)); - } - - visit_for(v, for_expr: ForExpr) { - v.visit_expr(r!(for_expr.iter)); - match r!(for_expr.pattern) { - ForPattern::Value(value) => v.visit_binding(value), - ForPattern::KeyValue(key, value) => { - v.visit_binding(key); - v.visit_binding(value); - } - } - v.visit_expr(r!(for_expr.body)); - } - - visit_import(v, import_expr: ImportExpr) { - v.visit_expr(r!(import_expr.path)); - if let Imports::Idents(idents) = r!(import_expr.imports) { - for ident in idents { - v.visit_binding(ident); - } - } - } - - visit_include(v, include_expr: IncludeExpr) { - v.visit_expr(r!(include_expr.path)); - } -} diff --git a/tests/typ/code/array.typ b/tests/typ/code/array.typ index df37dd454..44b8b5979 100644 --- a/tests/typ/code/array.typ +++ b/tests/typ/code/array.typ @@ -72,7 +72,7 @@ {(,1)} // Missing expression makes named pair incomplete, making this an empty array. -// Error: 5 expected expression +// Error: 3-5 expected expression, found named pair {(a:)} // Named pair after this is already identified as an array. diff --git a/tests/typ/code/call.typ b/tests/typ/code/call.typ index 2c16af1cf..95d75595d 100644 --- a/tests/typ/code/call.typ +++ b/tests/typ/code/call.typ @@ -72,7 +72,7 @@ // Error: 10-12 expected expression, found end of block comment #func(a:1*/) -// Error: 8 expected comma +// Error: 9 expected comma #func(1 2) // Error: 7-8 expected identifier diff --git a/tests/typ/code/dict.typ b/tests/typ/code/dict.typ index b369b8b65..757759aca 100644 --- a/tests/typ/code/dict.typ +++ b/tests/typ/code/dict.typ @@ -42,7 +42,7 @@ // Identified as dictionary due to initial colon. // Error: 4-5 expected named pair, found expression -// Error: 5 expected comma +// Error: 6 expected comma // Error: 12-16 expected identifier // Error: 17-18 expected expression, found colon {(:1 b:"", true::)} diff --git a/tests/typ/code/import.typ b/tests/typ/code/import.typ index bc96e80c8..1fa8f2057 100644 --- a/tests/typ/code/import.typ +++ b/tests/typ/code/import.typ @@ -79,7 +79,7 @@ This is never reached. // Error: 22 expected keyword `from` #import afrom, "b", c -// Error: 8 expected import items +// Error: 9 expected import items #import from "target.typ" // Error: 9-10 expected expression, found assignment operator @@ -114,4 +114,5 @@ This is never reached. // An item after a star. // Should output `, a from "target.typ"`. // Error: 10 expected keyword `from` +// Error: 10 expected semicolon or line break #import *, a from "target.typ" diff --git a/tests/typ/code/spread.typ b/tests/typ/code/spread.typ index 8a9491d06..41e790a41 100644 --- a/tests/typ/code/spread.typ +++ b/tests/typ/code/spread.typ @@ -62,7 +62,7 @@ #min(.."nope") --- -// Error: 10-14 expected identifier +// Error: 8-14 expected identifier #let f(..true) = none --- @@ -70,9 +70,9 @@ #let f(..a, ..b) = none --- -// Error: 5-6 spreading is not allowed here +// Error: 3-6 spreading is not allowed here {(..x)} --- -// Error: 11-17 spreading is not allowed here +// Error: 9-17 spreading is not allowed here {(1, 2, ..(1, 2))}