mirror of
https://github.com/typst/typst
synced 2025-05-15 17:45:27 +08:00
Less owning, more iterating
This commit is contained in:
parent
4875633acf
commit
84d35efee3
@ -1,7 +1,7 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use super::{Scope, Scopes, Value};
|
use super::{Scope, Scopes, Value};
|
||||||
use crate::syntax::{ClosureParam, Expr, Imports, RedTicket};
|
use crate::syntax::{ClosureParam, Expr, Imports, RedRef};
|
||||||
|
|
||||||
/// A visitor that captures variable slots.
|
/// A visitor that captures variable slots.
|
||||||
pub struct CapturesVisitor<'a> {
|
pub struct CapturesVisitor<'a> {
|
||||||
@ -20,12 +20,12 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit(&mut self, node: RedTicket) {
|
pub fn visit(&mut self, node: RedRef) {
|
||||||
let expr: Option<Expr> = node.cast();
|
let expr: Option<Expr> = node.cast();
|
||||||
|
|
||||||
match expr.as_ref() {
|
match expr.as_ref() {
|
||||||
Some(Expr::Let(expr)) => {
|
Some(Expr::Let(expr)) => {
|
||||||
self.visit(expr.init_ticket());
|
self.visit(expr.init_ref());
|
||||||
let ident = expr.binding();
|
let ident = expr.binding();
|
||||||
self.internal.def_mut(ident.as_str(), Value::None);
|
self.internal.def_mut(ident.as_str(), Value::None);
|
||||||
}
|
}
|
||||||
@ -40,7 +40,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.visit(closure.body_ticket());
|
self.visit(closure.body_ref());
|
||||||
}
|
}
|
||||||
Some(Expr::For(forloop)) => {
|
Some(Expr::For(forloop)) => {
|
||||||
let pattern = forloop.pattern();
|
let pattern = forloop.pattern();
|
||||||
@ -49,7 +49,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
if let Some(key) = pattern.key() {
|
if let Some(key) = pattern.key() {
|
||||||
self.internal.def_mut(key.as_str(), Value::None);
|
self.internal.def_mut(key.as_str(), Value::None);
|
||||||
}
|
}
|
||||||
self.visit(forloop.body_ticket());
|
self.visit(forloop.body_ref());
|
||||||
}
|
}
|
||||||
Some(Expr::Import(import)) => {
|
Some(Expr::Import(import)) => {
|
||||||
if let Imports::Idents(idents) = import.imports() {
|
if let Imports::Idents(idents) = import.imports() {
|
||||||
@ -73,7 +73,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
|
|
||||||
Some(Expr::Block(_)) => {
|
Some(Expr::Block(_)) => {
|
||||||
self.internal.enter();
|
self.internal.enter();
|
||||||
for child in node.own().children() {
|
for child in node.children() {
|
||||||
self.visit(child);
|
self.visit(child);
|
||||||
}
|
}
|
||||||
self.internal.exit();
|
self.internal.exit();
|
||||||
@ -81,14 +81,14 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
|
|
||||||
Some(Expr::Template(_)) => {
|
Some(Expr::Template(_)) => {
|
||||||
self.internal.enter();
|
self.internal.enter();
|
||||||
for child in node.own().children() {
|
for child in node.children() {
|
||||||
self.visit(child);
|
self.visit(child);
|
||||||
}
|
}
|
||||||
self.internal.exit();
|
self.internal.exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
for child in node.own().children() {
|
for child in node.children() {
|
||||||
self.visit(child);
|
self.visit(child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -230,7 +230,7 @@ impl Eval for ArrayExpr {
|
|||||||
type Output = Array;
|
type Output = Array;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.items().iter().map(|expr| expr.eval(ctx)).collect()
|
self.items().map(|expr| expr.eval(ctx)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,7 +239,6 @@ impl Eval for DictExpr {
|
|||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.items()
|
self.items()
|
||||||
.iter()
|
|
||||||
.map(|x| Ok(((&x.name().string).into(), x.expr().eval(ctx)?)))
|
.map(|x| Ok(((&x.name().string).into(), x.expr().eval(ctx)?)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
@ -268,7 +267,7 @@ impl Eval for BlockExpr {
|
|||||||
ctx.scopes.enter();
|
ctx.scopes.enter();
|
||||||
|
|
||||||
let mut output = Value::None;
|
let mut output = Value::None;
|
||||||
for expr in &self.exprs() {
|
for expr in self.exprs() {
|
||||||
let value = expr.eval(ctx)?;
|
let value = expr.eval(ctx)?;
|
||||||
output = ops::join(output, value).at(expr.span())?;
|
output = ops::join(output, value).at(expr.span())?;
|
||||||
}
|
}
|
||||||
@ -387,9 +386,9 @@ impl Eval for CallArgs {
|
|||||||
type Output = Args;
|
type Output = Args;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let mut items = Vec::with_capacity(self.items().len());
|
let mut items = Vec::new();
|
||||||
|
|
||||||
for arg in &self.items() {
|
for arg in self.items() {
|
||||||
let span = arg.span();
|
let span = arg.span();
|
||||||
match arg {
|
match arg {
|
||||||
CallArg::Pos(expr) => {
|
CallArg::Pos(expr) => {
|
||||||
@ -454,11 +453,10 @@ impl Eval for ClosureExpr {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut sink = None;
|
let mut sink = None;
|
||||||
let params_src = self.params();
|
let mut params = Vec::new();
|
||||||
let mut params = Vec::with_capacity(params_src.len());
|
|
||||||
|
|
||||||
// Collect parameters and an optional sink parameter.
|
// Collect parameters and an optional sink parameter.
|
||||||
for param in ¶ms_src {
|
for param in self.params() {
|
||||||
match param {
|
match param {
|
||||||
ClosureParam::Pos(name) => {
|
ClosureParam::Pos(name) => {
|
||||||
params.push((name.string.clone(), None));
|
params.push((name.string.clone(), None));
|
||||||
|
@ -69,7 +69,7 @@ impl Walk for RawNode {
|
|||||||
|
|
||||||
impl Walk for HeadingNode {
|
impl Walk for HeadingNode {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
let level = self.level().0;
|
let level = self.level();
|
||||||
let body = self.body().eval(ctx)?;
|
let body = self.body().eval(ctx)?;
|
||||||
|
|
||||||
ctx.template.parbreak();
|
ctx.template.parbreak();
|
||||||
@ -99,7 +99,7 @@ impl Walk for ListNode {
|
|||||||
impl Walk for EnumNode {
|
impl Walk for EnumNode {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
let body = self.body().eval(ctx)?;
|
let body = self.body().eval(ctx)?;
|
||||||
let label = format_str!("{}.", self.number().0.unwrap_or(1));
|
let label = format_str!("{}.", self.number().unwrap_or(1));
|
||||||
walk_item(ctx, label, body);
|
walk_item(ctx, label, body);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
275
src/parse/mod.rs
275
src/parse/mod.rs
@ -30,15 +30,14 @@ fn markup(p: &mut Parser) {
|
|||||||
|
|
||||||
/// Parse markup that stays right of the given column.
|
/// Parse markup that stays right of the given column.
|
||||||
fn markup_indented(p: &mut Parser, column: usize) {
|
fn markup_indented(p: &mut Parser, column: usize) {
|
||||||
// TODO this is broken
|
|
||||||
p.eat_while(|t| match t {
|
p.eat_while(|t| match t {
|
||||||
NodeKind::Space(n) => n == 0,
|
NodeKind::Space(n) => *n == 0,
|
||||||
NodeKind::LineComment | NodeKind::BlockComment => true,
|
NodeKind::LineComment | NodeKind::BlockComment => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
});
|
});
|
||||||
|
|
||||||
markup_while(p, false, &mut |p| match p.peek() {
|
markup_while(p, false, &mut |p| match p.peek() {
|
||||||
Some(NodeKind::Space(n)) if n >= 1 => p.column(p.next_end()) >= column,
|
Some(NodeKind::Space(n)) if *n >= 1 => p.column(p.next_end()) >= column,
|
||||||
_ => true,
|
_ => true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -64,125 +63,119 @@ where
|
|||||||
|
|
||||||
/// Parse a markup node.
|
/// Parse a markup node.
|
||||||
fn markup_node(p: &mut Parser, at_start: &mut bool) {
|
fn markup_node(p: &mut Parser, at_start: &mut bool) {
|
||||||
if let Some(token) = p.peek() {
|
let token = match p.peek() {
|
||||||
match token {
|
Some(t) => t,
|
||||||
// Whitespace.
|
None => return,
|
||||||
NodeKind::Space(newlines) => {
|
};
|
||||||
*at_start |= newlines > 0;
|
|
||||||
|
|
||||||
if newlines < 2 {
|
match token {
|
||||||
p.eat();
|
// Whitespace.
|
||||||
} else {
|
NodeKind::Space(newlines) => {
|
||||||
p.convert(NodeKind::Parbreak);
|
*at_start |= *newlines > 0;
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Text.
|
|
||||||
NodeKind::UnicodeEscape(u) => {
|
|
||||||
if !u.terminated {
|
|
||||||
p.convert(NodeKind::Error(
|
|
||||||
ErrorPosition::End,
|
|
||||||
"expected closing brace".into(),
|
|
||||||
));
|
|
||||||
p.unsuccessful();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if u.character.is_none() {
|
|
||||||
let src = p.peek_src();
|
|
||||||
p.convert(NodeKind::Error(
|
|
||||||
ErrorPosition::Full,
|
|
||||||
"invalid unicode escape sequence".into(),
|
|
||||||
));
|
|
||||||
p.start();
|
|
||||||
p.end(NodeKind::Text(src.into()));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if *newlines < 2 {
|
||||||
p.eat();
|
p.eat();
|
||||||
|
} else {
|
||||||
|
p.convert(NodeKind::Parbreak);
|
||||||
}
|
}
|
||||||
NodeKind::Raw(r) => {
|
}
|
||||||
if !r.terminated {
|
|
||||||
p.convert(NodeKind::Error(
|
|
||||||
ErrorPosition::End,
|
|
||||||
"expected backtick(s)".into(),
|
|
||||||
));
|
|
||||||
p.unsuccessful();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.eat();
|
// Text and markup.
|
||||||
}
|
NodeKind::Text(_)
|
||||||
NodeKind::Text(_)
|
| NodeKind::EnDash
|
||||||
| NodeKind::EnDash
|
| NodeKind::EmDash
|
||||||
| NodeKind::EmDash
|
| NodeKind::NonBreakingSpace
|
||||||
| NodeKind::NonBreakingSpace => {
|
| NodeKind::Emph
|
||||||
p.eat();
|
| NodeKind::Strong
|
||||||
|
| NodeKind::Linebreak => p.eat(),
|
||||||
|
|
||||||
|
NodeKind::UnicodeEscape(u) => {
|
||||||
|
if !u.terminated {
|
||||||
|
p.convert(NodeKind::Error(
|
||||||
|
ErrorPosition::End,
|
||||||
|
"expected closing brace".into(),
|
||||||
|
));
|
||||||
|
p.unsuccessful();
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Markup.
|
if u.character.is_none() {
|
||||||
NodeKind::Emph | NodeKind::Strong | NodeKind::Linebreak => {
|
let src = p.peek_src();
|
||||||
p.eat();
|
p.convert(NodeKind::Error(
|
||||||
|
ErrorPosition::Full,
|
||||||
|
"invalid unicode escape sequence".into(),
|
||||||
|
));
|
||||||
|
p.start();
|
||||||
|
p.end(NodeKind::Text(src.into()));
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
NodeKind::Eq if *at_start => heading(p),
|
p.eat();
|
||||||
NodeKind::ListBullet if *at_start => list_node(p),
|
}
|
||||||
NodeKind::EnumNumbering(_) if *at_start => enum_node(p),
|
NodeKind::Raw(r) => {
|
||||||
|
if !r.terminated {
|
||||||
// Line-based markup that is not currently at the start of the line.
|
p.convert(NodeKind::Error(
|
||||||
NodeKind::Eq | NodeKind::ListBullet | NodeKind::EnumNumbering(_) => {
|
ErrorPosition::End,
|
||||||
p.convert(NodeKind::Text(p.peek_src().into()))
|
"expected backtick(s)".into(),
|
||||||
|
));
|
||||||
|
p.unsuccessful();
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hashtag + keyword / identifier.
|
p.eat();
|
||||||
NodeKind::Ident(_)
|
}
|
||||||
| NodeKind::Let
|
|
||||||
| NodeKind::If
|
|
||||||
| NodeKind::While
|
|
||||||
| NodeKind::For
|
|
||||||
| NodeKind::Import
|
|
||||||
| NodeKind::Include => {
|
|
||||||
let stmt = matches!(token, NodeKind::Let | NodeKind::Import);
|
|
||||||
let group = if stmt { Group::Stmt } else { Group::Expr };
|
|
||||||
|
|
||||||
p.start_group(group, TokenMode::Code);
|
NodeKind::Eq if *at_start => heading(p),
|
||||||
expr_with(p, true, 0);
|
NodeKind::ListBullet if *at_start => list_node(p),
|
||||||
if stmt && p.success() && !p.eof() {
|
NodeKind::EnumNumbering(_) if *at_start => enum_node(p),
|
||||||
p.expected_at("semicolon or line break");
|
|
||||||
}
|
|
||||||
p.end_group();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Block and template.
|
// Line-based markup that is not currently at the start of the line.
|
||||||
NodeKind::LeftBrace => {
|
NodeKind::Eq | NodeKind::ListBullet | NodeKind::EnumNumbering(_) => {
|
||||||
block(p);
|
p.convert(NodeKind::Text(p.peek_src().into()))
|
||||||
}
|
}
|
||||||
NodeKind::LeftBracket => {
|
|
||||||
template(p);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Comments.
|
// Hashtag + keyword / identifier.
|
||||||
NodeKind::LineComment | NodeKind::BlockComment => {
|
NodeKind::Ident(_)
|
||||||
p.eat();
|
| NodeKind::Let
|
||||||
}
|
| NodeKind::If
|
||||||
|
| NodeKind::While
|
||||||
|
| NodeKind::For
|
||||||
|
| NodeKind::Import
|
||||||
|
| NodeKind::Include => {
|
||||||
|
let stmt = matches!(token, NodeKind::Let | NodeKind::Import);
|
||||||
|
let group = if stmt { Group::Stmt } else { Group::Expr };
|
||||||
|
|
||||||
_ => {
|
p.start_group(group, TokenMode::Code);
|
||||||
*at_start = false;
|
expr_with(p, true, 0);
|
||||||
p.unexpected();
|
if stmt && p.success() && !p.eof() {
|
||||||
|
p.expected_at("semicolon or line break");
|
||||||
}
|
}
|
||||||
};
|
p.end_group();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Block and template.
|
||||||
|
NodeKind::LeftBrace => block(p),
|
||||||
|
NodeKind::LeftBracket => template(p),
|
||||||
|
|
||||||
|
// Comments.
|
||||||
|
NodeKind::LineComment | NodeKind::BlockComment => p.eat(),
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
*at_start = false;
|
||||||
|
p.unexpected();
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a heading.
|
/// Parse a heading.
|
||||||
fn heading(p: &mut Parser) {
|
fn heading(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::Eq);
|
p.eat_assert(&NodeKind::Eq);
|
||||||
|
|
||||||
// Count depth.
|
// Count depth.
|
||||||
let mut level: usize = 1;
|
let mut level: usize = 1;
|
||||||
while p.eat_if(NodeKind::Eq) {
|
while p.eat_if(&NodeKind::Eq) {
|
||||||
level += 1;
|
level += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,7 +193,7 @@ fn heading(p: &mut Parser) {
|
|||||||
/// Parse a single list item.
|
/// Parse a single list item.
|
||||||
fn list_node(p: &mut Parser) {
|
fn list_node(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::ListBullet);
|
p.eat_assert(&NodeKind::ListBullet);
|
||||||
let column = p.column(p.prev_end());
|
let column = p.column(p.prev_end());
|
||||||
markup_indented(p, column);
|
markup_indented(p, column);
|
||||||
p.end(NodeKind::List);
|
p.end(NodeKind::List);
|
||||||
@ -209,9 +202,7 @@ fn list_node(p: &mut Parser) {
|
|||||||
/// Parse a single enum item.
|
/// Parse a single enum item.
|
||||||
fn enum_node(p: &mut Parser) {
|
fn enum_node(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
if !matches!(p.eat(), Some(NodeKind::EnumNumbering(_))) {
|
p.eat();
|
||||||
panic!("enum item does not start with numbering")
|
|
||||||
};
|
|
||||||
let column = p.column(p.prev_end());
|
let column = p.column(p.prev_end());
|
||||||
markup_indented(p, column);
|
markup_indented(p, column);
|
||||||
p.end(NodeKind::Enum);
|
p.end(NodeKind::Enum);
|
||||||
@ -263,7 +254,7 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek() == Some(NodeKind::With) {
|
if p.peek() == Some(&NodeKind::With) {
|
||||||
with_expr(p, p.child_count() - offset);
|
with_expr(p, p.child_count() - offset);
|
||||||
|
|
||||||
if p.may_lift_abort() {
|
if p.may_lift_abort() {
|
||||||
@ -276,7 +267,7 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let op = match p.peek().as_ref().and_then(BinOp::from_token) {
|
let op = match p.peek().and_then(BinOp::from_token) {
|
||||||
Some(binop) => binop,
|
Some(binop) => binop,
|
||||||
None => {
|
None => {
|
||||||
p.lift();
|
p.lift();
|
||||||
@ -286,10 +277,8 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
|
|||||||
|
|
||||||
let mut prec = op.precedence();
|
let mut prec = op.precedence();
|
||||||
if prec < min_prec {
|
if prec < min_prec {
|
||||||
{
|
p.lift();
|
||||||
p.lift();
|
break;
|
||||||
break;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
p.eat();
|
p.eat();
|
||||||
@ -324,7 +313,7 @@ fn primary(p: &mut Parser, atomic: bool) {
|
|||||||
p.eat();
|
p.eat();
|
||||||
|
|
||||||
// Arrow means this is a closure's lone parameter.
|
// Arrow means this is a closure's lone parameter.
|
||||||
if !atomic && p.peek() == Some(NodeKind::Arrow) {
|
if !atomic && p.peek() == Some(&NodeKind::Arrow) {
|
||||||
p.end_and_start_with(NodeKind::ClosureParams);
|
p.end_and_start_with(NodeKind::ClosureParams);
|
||||||
p.eat();
|
p.eat();
|
||||||
|
|
||||||
@ -359,10 +348,9 @@ fn primary(p: &mut Parser, atomic: bool) {
|
|||||||
|
|
||||||
/// Parse a literal.
|
/// Parse a literal.
|
||||||
fn literal(p: &mut Parser) -> bool {
|
fn literal(p: &mut Parser) -> bool {
|
||||||
let peeked = if let Some(p) = p.peek() {
|
let peeked = match p.peek() {
|
||||||
p
|
Some(x) => x.clone(),
|
||||||
} else {
|
None => return false,
|
||||||
return false;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
match peeked {
|
match peeked {
|
||||||
@ -375,18 +363,14 @@ fn literal(p: &mut Parser) -> bool {
|
|||||||
| NodeKind::Fraction(_)
|
| NodeKind::Fraction(_)
|
||||||
| NodeKind::Length(_, _)
|
| NodeKind::Length(_, _)
|
||||||
| NodeKind::Angle(_, _)
|
| NodeKind::Angle(_, _)
|
||||||
| NodeKind::Percentage(_) => {
|
| NodeKind::Percentage(_) => p.eat(),
|
||||||
p.eat();
|
|
||||||
}
|
|
||||||
NodeKind::Str(s) => {
|
NodeKind::Str(s) => {
|
||||||
p.eat();
|
p.eat();
|
||||||
if !s.terminated {
|
if !s.terminated {
|
||||||
p.expected_at("quote");
|
p.expected_at("quote");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => return false,
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
@ -401,7 +385,7 @@ fn parenthesized(p: &mut Parser) {
|
|||||||
let offset = p.child_count();
|
let offset = p.child_count();
|
||||||
p.start();
|
p.start();
|
||||||
p.start_group(Group::Paren, TokenMode::Code);
|
p.start_group(Group::Paren, TokenMode::Code);
|
||||||
let colon = p.eat_if(NodeKind::Colon);
|
let colon = p.eat_if(&NodeKind::Colon);
|
||||||
let kind = collection(p).0;
|
let kind = collection(p).0;
|
||||||
p.end_group();
|
p.end_group();
|
||||||
let token_count = p.child_count() - offset;
|
let token_count = p.child_count() - offset;
|
||||||
@ -414,12 +398,12 @@ fn parenthesized(p: &mut Parser) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Arrow means this is a closure's parameter list.
|
// Arrow means this is a closure's parameter list.
|
||||||
if p.peek() == Some(NodeKind::Arrow) {
|
if p.peek() == Some(&NodeKind::Arrow) {
|
||||||
p.start_with(token_count);
|
p.start_with(token_count);
|
||||||
params(p, 0, true);
|
params(p, 0, true);
|
||||||
p.end(NodeKind::ClosureParams);
|
p.end(NodeKind::ClosureParams);
|
||||||
|
|
||||||
p.eat_assert(NodeKind::Arrow);
|
p.eat_assert(&NodeKind::Arrow);
|
||||||
|
|
||||||
expr(p);
|
expr(p);
|
||||||
|
|
||||||
@ -485,7 +469,7 @@ fn collection(p: &mut Parser) -> (CollectionKind, usize) {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.eat_if(NodeKind::Comma) {
|
if p.eat_if(&NodeKind::Comma) {
|
||||||
has_comma = true;
|
has_comma = true;
|
||||||
} else {
|
} else {
|
||||||
missing_coma = Some(p.child_count());
|
missing_coma = Some(p.child_count());
|
||||||
@ -518,7 +502,7 @@ enum CollectionItemKind {
|
|||||||
/// Parse an expression or a named pair. Returns if this is a named pair.
|
/// Parse an expression or a named pair. Returns if this is a named pair.
|
||||||
fn item(p: &mut Parser) -> CollectionItemKind {
|
fn item(p: &mut Parser) -> CollectionItemKind {
|
||||||
p.start();
|
p.start();
|
||||||
if p.eat_if(NodeKind::Dots) {
|
if p.eat_if(&NodeKind::Dots) {
|
||||||
expr(p);
|
expr(p);
|
||||||
|
|
||||||
p.end_or_abort(NodeKind::ParameterSink);
|
p.end_or_abort(NodeKind::ParameterSink);
|
||||||
@ -531,7 +515,7 @@ fn item(p: &mut Parser) -> CollectionItemKind {
|
|||||||
return CollectionItemKind::Unnamed;
|
return CollectionItemKind::Unnamed;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.eat_if(NodeKind::Colon) {
|
if p.eat_if(&NodeKind::Colon) {
|
||||||
let child = p.child(1).unwrap();
|
let child = p.child(1).unwrap();
|
||||||
if matches!(child.kind(), &NodeKind::Ident(_)) {
|
if matches!(child.kind(), &NodeKind::Ident(_)) {
|
||||||
expr(p);
|
expr(p);
|
||||||
@ -686,9 +670,9 @@ fn args(p: &mut Parser, allow_template: bool) {
|
|||||||
/// Parse a with expression.
|
/// Parse a with expression.
|
||||||
fn with_expr(p: &mut Parser, preserve: usize) {
|
fn with_expr(p: &mut Parser, preserve: usize) {
|
||||||
p.start_with(preserve);
|
p.start_with(preserve);
|
||||||
p.eat_assert(NodeKind::With);
|
p.eat_assert(&NodeKind::With);
|
||||||
|
|
||||||
if p.peek() == Some(NodeKind::LeftParen) {
|
if p.peek() == Some(&NodeKind::LeftParen) {
|
||||||
args(p, false);
|
args(p, false);
|
||||||
p.end(NodeKind::WithExpr);
|
p.end(NodeKind::WithExpr);
|
||||||
} else {
|
} else {
|
||||||
@ -700,7 +684,7 @@ fn with_expr(p: &mut Parser, preserve: usize) {
|
|||||||
/// Parse a let expression.
|
/// Parse a let expression.
|
||||||
fn let_expr(p: &mut Parser) {
|
fn let_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::Let);
|
p.eat_assert(&NodeKind::Let);
|
||||||
|
|
||||||
let offset = p.child_count();
|
let offset = p.child_count();
|
||||||
ident(p);
|
ident(p);
|
||||||
@ -708,7 +692,7 @@ fn let_expr(p: &mut Parser) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek() == Some(NodeKind::With) {
|
if p.peek() == Some(&NodeKind::With) {
|
||||||
with_expr(p, p.child_count() - offset);
|
with_expr(p, p.child_count() - offset);
|
||||||
} else {
|
} else {
|
||||||
// If a parenthesis follows, this is a function definition.
|
// If a parenthesis follows, this is a function definition.
|
||||||
@ -725,7 +709,7 @@ fn let_expr(p: &mut Parser) {
|
|||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
if p.eat_if(NodeKind::Eq) {
|
if p.eat_if(&NodeKind::Eq) {
|
||||||
expr(p);
|
expr(p);
|
||||||
} else if has_params {
|
} else if has_params {
|
||||||
// Function definitions must have a body.
|
// Function definitions must have a body.
|
||||||
@ -749,7 +733,7 @@ fn let_expr(p: &mut Parser) {
|
|||||||
/// Parse an if expresion.
|
/// Parse an if expresion.
|
||||||
fn if_expr(p: &mut Parser) {
|
fn if_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::If);
|
p.eat_assert(&NodeKind::If);
|
||||||
|
|
||||||
expr(p);
|
expr(p);
|
||||||
if p.may_end_abort(NodeKind::IfExpr) {
|
if p.may_end_abort(NodeKind::IfExpr) {
|
||||||
@ -762,8 +746,8 @@ fn if_expr(p: &mut Parser) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.eat_if(NodeKind::Else) {
|
if p.eat_if(&NodeKind::Else) {
|
||||||
if p.peek() == Some(NodeKind::If) {
|
if p.peek() == Some(&NodeKind::If) {
|
||||||
if_expr(p);
|
if_expr(p);
|
||||||
} else {
|
} else {
|
||||||
body(p);
|
body(p);
|
||||||
@ -776,7 +760,7 @@ fn if_expr(p: &mut Parser) {
|
|||||||
/// Parse a while expresion.
|
/// Parse a while expresion.
|
||||||
fn while_expr(p: &mut Parser) {
|
fn while_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::While);
|
p.eat_assert(&NodeKind::While);
|
||||||
|
|
||||||
expr(p);
|
expr(p);
|
||||||
|
|
||||||
@ -793,7 +777,7 @@ fn while_expr(p: &mut Parser) {
|
|||||||
/// Parse a for expression.
|
/// Parse a for expression.
|
||||||
fn for_expr(p: &mut Parser) {
|
fn for_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::For);
|
p.eat_assert(&NodeKind::For);
|
||||||
|
|
||||||
for_pattern(p);
|
for_pattern(p);
|
||||||
|
|
||||||
@ -801,7 +785,7 @@ fn for_expr(p: &mut Parser) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.eat_expect(NodeKind::In) {
|
if p.eat_expect(&NodeKind::In) {
|
||||||
expr(p);
|
expr(p);
|
||||||
|
|
||||||
if p.may_end_abort(NodeKind::ForExpr) {
|
if p.may_end_abort(NodeKind::ForExpr) {
|
||||||
@ -828,7 +812,7 @@ fn for_pattern(p: &mut Parser) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek() == Some(NodeKind::Comma) {
|
if p.peek() == Some(&NodeKind::Comma) {
|
||||||
p.eat();
|
p.eat();
|
||||||
|
|
||||||
ident(p);
|
ident(p);
|
||||||
@ -844,9 +828,9 @@ fn for_pattern(p: &mut Parser) {
|
|||||||
/// Parse an import expression.
|
/// Parse an import expression.
|
||||||
fn import_expr(p: &mut Parser) {
|
fn import_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::Import);
|
p.eat_assert(&NodeKind::Import);
|
||||||
|
|
||||||
if !p.eat_if(NodeKind::Star) {
|
if !p.eat_if(&NodeKind::Star) {
|
||||||
// This is the list of identifiers scenario.
|
// This is the list of identifiers scenario.
|
||||||
p.start();
|
p.start();
|
||||||
p.start_group(Group::Imports, TokenMode::Code);
|
p.start_group(Group::Imports, TokenMode::Code);
|
||||||
@ -865,7 +849,7 @@ fn import_expr(p: &mut Parser) {
|
|||||||
p.end(NodeKind::ImportItems);
|
p.end(NodeKind::ImportItems);
|
||||||
};
|
};
|
||||||
|
|
||||||
if p.eat_expect(NodeKind::From) {
|
if p.eat_expect(&NodeKind::From) {
|
||||||
expr(p);
|
expr(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -875,7 +859,7 @@ fn import_expr(p: &mut Parser) {
|
|||||||
/// Parse an include expression.
|
/// Parse an include expression.
|
||||||
fn include_expr(p: &mut Parser) {
|
fn include_expr(p: &mut Parser) {
|
||||||
p.start();
|
p.start();
|
||||||
p.eat_assert(NodeKind::Include);
|
p.eat_assert(&NodeKind::Include);
|
||||||
|
|
||||||
expr(p);
|
expr(p);
|
||||||
p.end(NodeKind::IncludeExpr);
|
p.end(NodeKind::IncludeExpr);
|
||||||
@ -883,11 +867,12 @@ fn include_expr(p: &mut Parser) {
|
|||||||
|
|
||||||
/// Parse an identifier.
|
/// Parse an identifier.
|
||||||
fn ident(p: &mut Parser) {
|
fn ident(p: &mut Parser) {
|
||||||
if let Some(NodeKind::Ident(_)) = p.peek() {
|
match p.peek() {
|
||||||
p.eat();
|
Some(NodeKind::Ident(_)) => p.eat(),
|
||||||
} else {
|
_ => {
|
||||||
p.expected("identifier");
|
p.expected("identifier");
|
||||||
p.unsuccessful();
|
p.unsuccessful();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +161,7 @@ impl<'s> Parser<'s> {
|
|||||||
|
|
||||||
let len = children.iter().map(|c| c.len()).sum();
|
let len = children.iter().map(|c| c.len()).sum();
|
||||||
self.children
|
self.children
|
||||||
.push(GreenNode::with_children(kind, len, children.into_iter()).into());
|
.push(GreenNode::with_children(kind, len, children).into());
|
||||||
self.children.extend(remains);
|
self.children.extend(remains);
|
||||||
self.success = true;
|
self.success = true;
|
||||||
}
|
}
|
||||||
@ -240,10 +240,9 @@ impl<'s> Parser<'s> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(&mut self) -> Rc<GreenNode> {
|
pub fn finish(&mut self) -> Rc<GreenNode> {
|
||||||
if let Green::Node(n) = self.children.pop().unwrap() {
|
match self.children.pop().unwrap() {
|
||||||
n
|
Green::Node(n) => n,
|
||||||
} else {
|
_ => panic!(),
|
||||||
panic!()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -252,16 +251,16 @@ impl<'s> Parser<'s> {
|
|||||||
self.peek().is_none()
|
self.peek().is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn eat(&mut self) -> Option<NodeKind> {
|
fn eat_peeked(&mut self) -> Option<NodeKind> {
|
||||||
let token = self.peek()?;
|
let token = self.peek()?.clone();
|
||||||
self.bump();
|
self.eat();
|
||||||
Some(token)
|
Some(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token if it is the given one.
|
/// Consume the next token if it is the given one.
|
||||||
pub fn eat_if(&mut self, t: NodeKind) -> bool {
|
pub fn eat_if(&mut self, t: &NodeKind) -> bool {
|
||||||
if self.peek() == Some(t) {
|
if self.peek() == Some(t) {
|
||||||
self.bump();
|
self.eat();
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
@ -271,36 +270,36 @@ impl<'s> Parser<'s> {
|
|||||||
/// Consume the next token if the closure maps it a to `Some`-variant.
|
/// Consume the next token if the closure maps it a to `Some`-variant.
|
||||||
pub fn eat_map<T, F>(&mut self, f: F) -> Option<T>
|
pub fn eat_map<T, F>(&mut self, f: F) -> Option<T>
|
||||||
where
|
where
|
||||||
F: FnOnce(NodeKind) -> Option<T>,
|
F: FnOnce(&NodeKind) -> Option<T>,
|
||||||
{
|
{
|
||||||
let token = self.peek()?;
|
let token = self.peek()?;
|
||||||
let mapped = f(token);
|
let mapped = f(token);
|
||||||
if mapped.is_some() {
|
if mapped.is_some() {
|
||||||
self.bump();
|
self.eat();
|
||||||
}
|
}
|
||||||
mapped
|
mapped
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token if it is the given one and produce an error if
|
/// Consume the next token if it is the given one and produce an error if
|
||||||
/// not.
|
/// not.
|
||||||
pub fn eat_expect(&mut self, t: NodeKind) -> bool {
|
pub fn eat_expect(&mut self, t: &NodeKind) -> bool {
|
||||||
let eaten = self.eat_if(t.clone());
|
let eaten = self.eat_if(t);
|
||||||
if !eaten {
|
if !eaten {
|
||||||
self.expected_at(&t.to_string());
|
self.expected_at(t.as_str());
|
||||||
}
|
}
|
||||||
eaten
|
eaten
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token, debug-asserting that it is one of the given ones.
|
/// Consume the next token, debug-asserting that it is one of the given ones.
|
||||||
pub fn eat_assert(&mut self, t: NodeKind) {
|
pub fn eat_assert(&mut self, t: &NodeKind) {
|
||||||
let next = self.eat();
|
let next = self.eat_peeked();
|
||||||
debug_assert_eq!(next, Some(t));
|
debug_assert_eq!(next.as_ref(), Some(t));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume tokens while the condition is true.
|
/// Consume tokens while the condition is true.
|
||||||
pub fn eat_while<F>(&mut self, mut f: F)
|
pub fn eat_while<F>(&mut self, mut f: F)
|
||||||
where
|
where
|
||||||
F: FnMut(NodeKind) -> bool,
|
F: FnMut(&NodeKind) -> bool,
|
||||||
{
|
{
|
||||||
while self.peek().map_or(false, |t| f(t)) {
|
while self.peek().map_or(false, |t| f(t)) {
|
||||||
self.eat();
|
self.eat();
|
||||||
@ -308,8 +307,8 @@ impl<'s> Parser<'s> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the next token without consuming it.
|
/// Peek at the next token without consuming it.
|
||||||
pub fn peek(&self) -> Option<NodeKind> {
|
pub fn peek(&self) -> Option<&NodeKind> {
|
||||||
self.peeked.clone()
|
self.peeked.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the next token if it follows immediately after the last one
|
/// Peek at the next token if it follows immediately after the last one
|
||||||
@ -371,9 +370,9 @@ impl<'s> Parser<'s> {
|
|||||||
self.repeek();
|
self.repeek();
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
Group::Paren => self.eat_assert(NodeKind::LeftParen),
|
Group::Paren => self.eat_assert(&NodeKind::LeftParen),
|
||||||
Group::Bracket => self.eat_assert(NodeKind::LeftBracket),
|
Group::Bracket => self.eat_assert(&NodeKind::LeftBracket),
|
||||||
Group::Brace => self.eat_assert(NodeKind::LeftBrace),
|
Group::Brace => self.eat_assert(&NodeKind::LeftBrace),
|
||||||
Group::Stmt => {}
|
Group::Stmt => {}
|
||||||
Group::Expr => {}
|
Group::Expr => {}
|
||||||
Group::Imports => {}
|
Group::Imports => {}
|
||||||
@ -402,11 +401,11 @@ impl<'s> Parser<'s> {
|
|||||||
} {
|
} {
|
||||||
if self.next == Some(end.clone()) {
|
if self.next == Some(end.clone()) {
|
||||||
// Bump the delimeter and return. No need to rescan in this case.
|
// Bump the delimeter and return. No need to rescan in this case.
|
||||||
self.bump();
|
self.eat();
|
||||||
rescan = false;
|
rescan = false;
|
||||||
} else if required {
|
} else if required {
|
||||||
self.start();
|
self.start();
|
||||||
self.abort(format!("expected {}", end.to_string()));
|
self.abort(format!("expected {}", end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -457,21 +456,21 @@ impl<'s> Parser<'s> {
|
|||||||
/// Eat the next token and add an error that it is not the expected `thing`.
|
/// Eat the next token and add an error that it is not the expected `thing`.
|
||||||
pub fn expected(&mut self, what: &str) {
|
pub fn expected(&mut self, what: &str) {
|
||||||
self.start();
|
self.start();
|
||||||
if let Some(found) = self.eat() {
|
match self.eat_peeked() {
|
||||||
self.abort(format!("expected {}, found {}", what, found.to_string()))
|
Some(found) => self.abort(format!("expected {}, found {}", what, found)),
|
||||||
} else {
|
None => {
|
||||||
self.lift();
|
self.lift();
|
||||||
self.expected_at(what);
|
self.expected_at(what);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Eat the next token and add an error that it is unexpected.
|
/// Eat the next token and add an error that it is unexpected.
|
||||||
pub fn unexpected(&mut self) {
|
pub fn unexpected(&mut self) {
|
||||||
self.start();
|
self.start();
|
||||||
if let Some(found) = self.eat() {
|
match self.eat_peeked() {
|
||||||
self.abort(format!("unexpected {}", found.to_string()))
|
Some(found) => self.abort(format!("unexpected {}", found)),
|
||||||
} else {
|
None => self.abort("unexpected end of file"),
|
||||||
self.abort("unexpected end of file")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -489,7 +488,7 @@ impl<'s> Parser<'s> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Move to the next token.
|
/// Move to the next token.
|
||||||
fn bump(&mut self) {
|
pub fn eat(&mut self) {
|
||||||
self.children.push(
|
self.children.push(
|
||||||
GreenData::new(
|
GreenData::new(
|
||||||
self.next.clone().unwrap(),
|
self.next.clone().unwrap(),
|
||||||
@ -511,7 +510,7 @@ impl<'s> Parser<'s> {
|
|||||||
if self.tokens.mode() == TokenMode::Code {
|
if self.tokens.mode() == TokenMode::Code {
|
||||||
// Skip whitespace and comments.
|
// Skip whitespace and comments.
|
||||||
while self.next.as_ref().map_or(false, |x| self.skip_type(x)) {
|
while self.next.as_ref().map_or(false, |x| self.skip_type(x)) {
|
||||||
self.bump();
|
self.eat();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,11 +25,9 @@ pub fn resolve_string(string: &str) -> EcoString {
|
|||||||
let sequence = s.eat_while(|c| c.is_ascii_hexdigit());
|
let sequence = s.eat_while(|c| c.is_ascii_hexdigit());
|
||||||
let _terminated = s.eat_if('}');
|
let _terminated = s.eat_if('}');
|
||||||
|
|
||||||
if let Some(c) = resolve_hex(sequence) {
|
match resolve_hex(sequence) {
|
||||||
out.push(c);
|
Some(c) => out.push(c),
|
||||||
} else {
|
None => out.push_str(s.eaten_from(start)),
|
||||||
// TODO: Feedback that unicode escape sequence is wrong.
|
|
||||||
out.push_str(s.eaten_from(start));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,8 +224,8 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn backslash(&mut self) -> NodeKind {
|
fn backslash(&mut self) -> NodeKind {
|
||||||
if let Some(c) = self.s.peek() {
|
match self.s.peek() {
|
||||||
match c {
|
Some(c) => match c {
|
||||||
// Backslash and comments.
|
// Backslash and comments.
|
||||||
'\\' | '/' |
|
'\\' | '/' |
|
||||||
// Parenthesis and hashtag.
|
// Parenthesis and hashtag.
|
||||||
@ -247,9 +247,8 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
c if c.is_whitespace() => NodeKind::Linebreak,
|
c if c.is_whitespace() => NodeKind::Linebreak,
|
||||||
_ => NodeKind::Text("\\".into()),
|
_ => NodeKind::Text("\\".into()),
|
||||||
}
|
},
|
||||||
} else {
|
None => NodeKind::Linebreak,
|
||||||
NodeKind::Linebreak
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -257,10 +256,9 @@ impl<'s> Tokens<'s> {
|
|||||||
fn hash(&mut self) -> NodeKind {
|
fn hash(&mut self) -> NodeKind {
|
||||||
if self.s.check_or(false, is_id_start) {
|
if self.s.check_or(false, is_id_start) {
|
||||||
let read = self.s.eat_while(is_id_continue);
|
let read = self.s.eat_while(is_id_continue);
|
||||||
if let Some(keyword) = keyword(read) {
|
match keyword(read) {
|
||||||
keyword
|
Some(keyword) => keyword,
|
||||||
} else {
|
None => NodeKind::Ident(read.into()),
|
||||||
NodeKind::Ident(read.into())
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
NodeKind::Text("#".into())
|
NodeKind::Text("#".into())
|
||||||
|
@ -148,10 +148,10 @@ impl SourceFile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn ast(&self) -> TypResult<Markup> {
|
pub fn ast(&self) -> TypResult<Markup> {
|
||||||
let res = RedNode::new_root(self.root.clone(), self.id);
|
let red = RedNode::new_root(self.root.clone(), self.id);
|
||||||
let errors = res.errors();
|
let errors = red.errors();
|
||||||
if errors.is_empty() {
|
if errors.is_empty() {
|
||||||
Ok(res.ticket().cast().unwrap())
|
Ok(red.as_ref().cast().unwrap())
|
||||||
} else {
|
} else {
|
||||||
Err(Box::new(
|
Err(Box::new(
|
||||||
errors.into_iter().map(|(span, msg)| Error::new(span, msg)).collect(),
|
errors.into_iter().map(|(span, msg)| Error::new(span, msg)).collect(),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::{Ident, Markup, NodeKind, RedNode, RedTicket, Span, TypedNode};
|
use super::{Ident, Markup, NodeKind, RedNode, RedRef, Span, TypedNode};
|
||||||
use crate::geom::{AngularUnit, LengthUnit};
|
use crate::geom::{AngularUnit, LengthUnit};
|
||||||
use crate::node;
|
use crate::node;
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
@ -85,7 +85,7 @@ impl Expr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for Expr {
|
impl TypedNode for Expr {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::Ident(_) => Some(Self::Ident(Ident::cast_from(node).unwrap())),
|
NodeKind::Ident(_) => Some(Self::Ident(Ident::cast_from(node).unwrap())),
|
||||||
NodeKind::Array => Some(Self::Array(ArrayExpr::cast_from(node).unwrap())),
|
NodeKind::Array => Some(Self::Array(ArrayExpr::cast_from(node).unwrap())),
|
||||||
@ -146,18 +146,18 @@ pub enum Lit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for Lit {
|
impl TypedNode for Lit {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::None => Some(Self::None(node.own().span())),
|
NodeKind::None => Some(Self::None(node.span())),
|
||||||
NodeKind::Auto => Some(Self::Auto(node.own().span())),
|
NodeKind::Auto => Some(Self::Auto(node.span())),
|
||||||
NodeKind::Bool(b) => Some(Self::Bool(node.own().span(), *b)),
|
NodeKind::Bool(b) => Some(Self::Bool(node.span(), *b)),
|
||||||
NodeKind::Int(i) => Some(Self::Int(node.own().span(), *i)),
|
NodeKind::Int(i) => Some(Self::Int(node.span(), *i)),
|
||||||
NodeKind::Float(f) => Some(Self::Float(node.own().span(), *f)),
|
NodeKind::Float(f) => Some(Self::Float(node.span(), *f)),
|
||||||
NodeKind::Length(f, unit) => Some(Self::Length(node.own().span(), *f, *unit)),
|
NodeKind::Length(f, unit) => Some(Self::Length(node.span(), *f, *unit)),
|
||||||
NodeKind::Angle(f, unit) => Some(Self::Angle(node.own().span(), *f, *unit)),
|
NodeKind::Angle(f, unit) => Some(Self::Angle(node.span(), *f, *unit)),
|
||||||
NodeKind::Percentage(f) => Some(Self::Percent(node.own().span(), *f)),
|
NodeKind::Percentage(f) => Some(Self::Percent(node.span(), *f)),
|
||||||
NodeKind::Fraction(f) => Some(Self::Fractional(node.own().span(), *f)),
|
NodeKind::Fraction(f) => Some(Self::Fractional(node.span(), *f)),
|
||||||
NodeKind::Str(s) => Some(Self::Str(node.own().span(), s.string.clone())),
|
NodeKind::Str(s) => Some(Self::Str(node.span(), s.string.clone())),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -180,34 +180,34 @@ impl Lit {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An array expression: `(1, "hi", 12cm)`.
|
/// An array expression: `(1, "hi", 12cm)`.
|
||||||
Array => ArrayExpr
|
Array => ArrayExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ArrayExpr {
|
impl ArrayExpr {
|
||||||
/// The array items.
|
/// The array items.
|
||||||
pub fn items(&self) -> Vec<Expr> {
|
pub fn items<'a>(&'a self) -> impl Iterator<Item = Expr> + 'a {
|
||||||
self.0.children().filter_map(RedTicket::cast).collect()
|
self.0.children().filter_map(RedRef::cast)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||||
Dict => DictExpr
|
Dict => DictExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl DictExpr {
|
impl DictExpr {
|
||||||
/// The named dictionary items.
|
/// The named dictionary items.
|
||||||
pub fn items(&self) -> Vec<Named> {
|
pub fn items<'a>(&'a self) -> impl Iterator<Item = Named> + 'a {
|
||||||
self.0.children().filter_map(RedTicket::cast).collect()
|
self.0.children().filter_map(RedRef::cast)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A pair of a name and an expression: `pattern: dashed`.
|
/// A pair of a name and an expression: `pattern: dashed`.
|
||||||
Named
|
Named
|
||||||
);
|
}
|
||||||
|
|
||||||
impl Named {
|
impl Named {
|
||||||
/// The name: `pattern`.
|
/// The name: `pattern`.
|
||||||
@ -219,16 +219,16 @@ impl Named {
|
|||||||
pub fn expr(&self) -> Expr {
|
pub fn expr(&self) -> Expr {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.nth(1)
|
.nth(1)
|
||||||
.expect("named pair is missing expression")
|
.expect("named pair is missing expression")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A template expression: `[*Hi* there!]`.
|
/// A template expression: `[*Hi* there!]`.
|
||||||
Template => TemplateExpr
|
Template => TemplateExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl TemplateExpr {
|
impl TemplateExpr {
|
||||||
/// The contents of the template.
|
/// The contents of the template.
|
||||||
@ -239,10 +239,10 @@ impl TemplateExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A grouped expression: `(1 + 2)`.
|
/// A grouped expression: `(1 + 2)`.
|
||||||
Group => GroupExpr
|
Group => GroupExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl GroupExpr {
|
impl GroupExpr {
|
||||||
/// The wrapped expression.
|
/// The wrapped expression.
|
||||||
@ -253,22 +253,22 @@ impl GroupExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||||
Block => BlockExpr
|
Block => BlockExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl BlockExpr {
|
impl BlockExpr {
|
||||||
/// The list of expressions contained in the block.
|
/// The list of expressions contained in the block.
|
||||||
pub fn exprs(&self) -> Vec<Expr> {
|
pub fn exprs<'a>(&'a self) -> impl Iterator<Item = Expr> + 'a {
|
||||||
self.0.children().filter_map(RedTicket::cast).collect()
|
self.0.children().filter_map(RedRef::cast)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A unary operation: `-x`.
|
/// A unary operation: `-x`.
|
||||||
Unary => UnaryExpr
|
Unary => UnaryExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl UnaryExpr {
|
impl UnaryExpr {
|
||||||
/// The operator: `-`.
|
/// The operator: `-`.
|
||||||
@ -298,7 +298,7 @@ pub enum UnOp {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for UnOp {
|
impl TypedNode for UnOp {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
Self::from_token(node.kind())
|
Self::from_token(node.kind())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -332,10 +332,10 @@ impl UnOp {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A binary operation: `a + b`.
|
/// A binary operation: `a + b`.
|
||||||
Binary => BinaryExpr
|
Binary => BinaryExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl BinaryExpr {
|
impl BinaryExpr {
|
||||||
/// The binary operator: `+`.
|
/// The binary operator: `+`.
|
||||||
@ -356,7 +356,7 @@ impl BinaryExpr {
|
|||||||
pub fn rhs(&self) -> Expr {
|
pub fn rhs(&self) -> Expr {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.nth(1)
|
.nth(1)
|
||||||
.expect("binary expression is missing right-hand side")
|
.expect("binary expression is missing right-hand side")
|
||||||
}
|
}
|
||||||
@ -402,7 +402,7 @@ pub enum BinOp {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for BinOp {
|
impl TypedNode for BinOp {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
Self::from_token(node.kind())
|
Self::from_token(node.kind())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -504,10 +504,10 @@ pub enum Associativity {
|
|||||||
Right,
|
Right,
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An invocation of a function: `foo(...)`.
|
/// An invocation of a function: `foo(...)`.
|
||||||
Call => CallExpr
|
Call => CallExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl CallExpr {
|
impl CallExpr {
|
||||||
/// The function to call.
|
/// The function to call.
|
||||||
@ -523,15 +523,15 @@ impl CallExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// The arguments to a function: `12, draw: false`.
|
/// The arguments to a function: `12, draw: false`.
|
||||||
CallArgs
|
CallArgs
|
||||||
);
|
}
|
||||||
|
|
||||||
impl CallArgs {
|
impl CallArgs {
|
||||||
/// The positional and named arguments.
|
/// The positional and named arguments.
|
||||||
pub fn items(&self) -> Vec<CallArg> {
|
pub fn items<'a>(&'a self) -> impl Iterator<Item = CallArg> + 'a {
|
||||||
self.0.children().filter_map(RedTicket::cast).collect()
|
self.0.children().filter_map(RedRef::cast)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -547,14 +547,13 @@ pub enum CallArg {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for CallArg {
|
impl TypedNode for CallArg {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::Named => Some(CallArg::Named(
|
NodeKind::Named => Some(CallArg::Named(
|
||||||
node.cast().expect("named call argument is missing name"),
|
node.cast().expect("named call argument is missing name"),
|
||||||
)),
|
)),
|
||||||
NodeKind::ParameterSink => Some(CallArg::Spread(
|
NodeKind::ParameterSink => Some(CallArg::Spread(
|
||||||
node.own()
|
node.cast_first_child()
|
||||||
.cast_first_child()
|
|
||||||
.expect("call argument sink is missing expression"),
|
.expect("call argument sink is missing expression"),
|
||||||
)),
|
)),
|
||||||
_ => Some(CallArg::Pos(node.cast()?)),
|
_ => Some(CallArg::Pos(node.cast()?)),
|
||||||
@ -573,10 +572,10 @@ impl CallArg {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A closure expression: `(x, y) => z`.
|
/// A closure expression: `(x, y) => z`.
|
||||||
Closure => ClosureExpr
|
Closure => ClosureExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ClosureExpr {
|
impl ClosureExpr {
|
||||||
/// The name of the closure.
|
/// The name of the closure.
|
||||||
@ -589,15 +588,13 @@ impl ClosureExpr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The parameter bindings.
|
/// The parameter bindings.
|
||||||
pub fn params(&self) -> Vec<ClosureParam> {
|
pub fn params<'a>(&'a self) -> impl Iterator<Item = ClosureParam> + 'a {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.find(|x| x.kind() == &NodeKind::ClosureParams)
|
.find(|x| x.kind() == &NodeKind::ClosureParams)
|
||||||
.expect("closure is missing parameter list")
|
.expect("closure is missing parameter list")
|
||||||
.own()
|
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The body of the closure.
|
/// The body of the closure.
|
||||||
@ -607,8 +604,8 @@ impl ClosureExpr {
|
|||||||
self.0.cast_last_child().expect("closure is missing body")
|
self.0.cast_last_child().expect("closure is missing body")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The ticket of the body of the closure.
|
/// The red node reference of the body of the closure.
|
||||||
pub fn body_ticket(&self) -> RedTicket {
|
pub fn body_ref(&self) -> RedRef {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter(|x| x.cast::<Expr>().is_some())
|
.filter(|x| x.cast::<Expr>().is_some())
|
||||||
@ -629,17 +626,16 @@ pub enum ClosureParam {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for ClosureParam {
|
impl TypedNode for ClosureParam {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::Ident(i) => {
|
NodeKind::Ident(i) => {
|
||||||
Some(ClosureParam::Pos(Ident::new(i, node.own().span()).unwrap()))
|
Some(ClosureParam::Pos(Ident::new(i, node.span()).unwrap()))
|
||||||
}
|
}
|
||||||
NodeKind::Named => Some(ClosureParam::Named(
|
NodeKind::Named => Some(ClosureParam::Named(
|
||||||
node.cast().expect("named closure parameter is missing name"),
|
node.cast().expect("named closure parameter is missing name"),
|
||||||
)),
|
)),
|
||||||
NodeKind::ParameterSink => Some(ClosureParam::Sink(
|
NodeKind::ParameterSink => Some(ClosureParam::Sink(
|
||||||
node.own()
|
node.cast_first_child()
|
||||||
.cast_first_child()
|
|
||||||
.expect("closure parameter sink is missing identifier"),
|
.expect("closure parameter sink is missing identifier"),
|
||||||
)),
|
)),
|
||||||
_ => Some(ClosureParam::Pos(node.cast()?)),
|
_ => Some(ClosureParam::Pos(node.cast()?)),
|
||||||
@ -647,10 +643,10 @@ impl TypedNode for ClosureParam {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A with expression: `f with (x, y: 1)`.
|
/// A with expression: `f with (x, y: 1)`.
|
||||||
WithExpr
|
WithExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl WithExpr {
|
impl WithExpr {
|
||||||
/// The function to apply the arguments to.
|
/// The function to apply the arguments to.
|
||||||
@ -668,10 +664,10 @@ impl WithExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A let expression: `let x = 1`.
|
/// A let expression: `let x = 1`.
|
||||||
LetExpr
|
LetExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl LetExpr {
|
impl LetExpr {
|
||||||
/// The binding to assign to.
|
/// The binding to assign to.
|
||||||
@ -693,7 +689,7 @@ impl LetExpr {
|
|||||||
/// The expression the binding is initialized with.
|
/// The expression the binding is initialized with.
|
||||||
pub fn init(&self) -> Option<Expr> {
|
pub fn init(&self) -> Option<Expr> {
|
||||||
if self.0.cast_first_child::<Ident>().is_some() {
|
if self.0.cast_first_child::<Ident>().is_some() {
|
||||||
self.0.children().filter_map(RedTicket::cast).nth(1)
|
self.0.children().filter_map(RedRef::cast).nth(1)
|
||||||
} else {
|
} else {
|
||||||
Some(
|
Some(
|
||||||
self.0
|
self.0
|
||||||
@ -703,8 +699,9 @@ impl LetExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The ticket for the expression the binding is initialized with.
|
/// The red node reference for the expression the binding is initialized
|
||||||
pub fn init_ticket(&self) -> RedTicket {
|
/// with.
|
||||||
|
pub fn init_ref(&self) -> RedRef {
|
||||||
if self.0.cast_first_child::<Ident>().is_some() {
|
if self.0.cast_first_child::<Ident>().is_some() {
|
||||||
self.0.children().filter(|x| x.cast::<Expr>().is_some()).nth(1)
|
self.0.children().filter(|x| x.cast::<Expr>().is_some()).nth(1)
|
||||||
} else {
|
} else {
|
||||||
@ -714,10 +711,10 @@ impl LetExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||||
ImportExpr
|
ImportExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ImportExpr {
|
impl ImportExpr {
|
||||||
/// The items to be imported.
|
/// The items to be imported.
|
||||||
@ -745,11 +742,11 @@ pub enum Imports {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for Imports {
|
impl TypedNode for Imports {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::Star => Some(Imports::Wildcard),
|
NodeKind::Star => Some(Imports::Wildcard),
|
||||||
NodeKind::ImportItems => {
|
NodeKind::ImportItems => {
|
||||||
let idents = node.own().children().filter_map(RedTicket::cast).collect();
|
let idents = node.children().filter_map(RedRef::cast).collect();
|
||||||
Some(Imports::Idents(idents))
|
Some(Imports::Idents(idents))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
@ -757,10 +754,10 @@ impl TypedNode for Imports {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An include expression: `include "chapter1.typ"`.
|
/// An include expression: `include "chapter1.typ"`.
|
||||||
IncludeExpr
|
IncludeExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl IncludeExpr {
|
impl IncludeExpr {
|
||||||
/// The location of the file to be included.
|
/// The location of the file to be included.
|
||||||
@ -771,10 +768,10 @@ impl IncludeExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An if-else expression: `if x { y } else { z }`.
|
/// An if-else expression: `if x { y } else { z }`.
|
||||||
IfExpr
|
IfExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl IfExpr {
|
impl IfExpr {
|
||||||
/// The condition which selects the body to evaluate.
|
/// The condition which selects the body to evaluate.
|
||||||
@ -788,21 +785,21 @@ impl IfExpr {
|
|||||||
pub fn if_body(&self) -> Expr {
|
pub fn if_body(&self) -> Expr {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.nth(1)
|
.nth(1)
|
||||||
.expect("if expression is missing if body")
|
.expect("if expression is missing if body")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The expression to evaluate if the condition is false.
|
/// The expression to evaluate if the condition is false.
|
||||||
pub fn else_body(&self) -> Option<Expr> {
|
pub fn else_body(&self) -> Option<Expr> {
|
||||||
self.0.children().filter_map(RedTicket::cast).nth(2)
|
self.0.children().filter_map(RedRef::cast).nth(2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A while loop expression: `while x { y }`.
|
/// A while loop expression: `while x { y }`.
|
||||||
WhileExpr
|
WhileExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl WhileExpr {
|
impl WhileExpr {
|
||||||
/// The condition which selects whether to evaluate the body.
|
/// The condition which selects whether to evaluate the body.
|
||||||
@ -816,16 +813,16 @@ impl WhileExpr {
|
|||||||
pub fn body(&self) -> Expr {
|
pub fn body(&self) -> Expr {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.nth(1)
|
.nth(1)
|
||||||
.expect("while loop expression is missing body")
|
.expect("while loop expression is missing body")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A for loop expression: `for x in y { z }`.
|
/// A for loop expression: `for x in y { z }`.
|
||||||
ForExpr
|
ForExpr
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ForExpr {
|
impl ForExpr {
|
||||||
/// The pattern to assign to.
|
/// The pattern to assign to.
|
||||||
@ -846,13 +843,13 @@ impl ForExpr {
|
|||||||
pub fn body(&self) -> Expr {
|
pub fn body(&self) -> Expr {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter_map(RedTicket::cast)
|
.filter_map(RedRef::cast)
|
||||||
.last()
|
.last()
|
||||||
.expect("for loop expression is missing body")
|
.expect("for loop expression is missing body")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The ticket for the expression to evaluate for each iteration.
|
/// The red node reference for the expression to evaluate for each iteration.
|
||||||
pub fn body_ticket(&self) -> RedTicket {
|
pub fn body_ref(&self) -> RedRef {
|
||||||
self.0
|
self.0
|
||||||
.children()
|
.children()
|
||||||
.filter(|x| x.cast::<Expr>().is_some())
|
.filter(|x| x.cast::<Expr>().is_some())
|
||||||
@ -861,14 +858,14 @@ impl ForExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A for-in loop expression: `for x in y { z }`.
|
/// A for-in loop expression: `for x in y { z }`.
|
||||||
ForPattern
|
ForPattern
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ForPattern {
|
impl ForPattern {
|
||||||
pub fn key(&self) -> Option<Ident> {
|
pub fn key(&self) -> Option<Ident> {
|
||||||
let mut items: Vec<_> = self.0.children().filter_map(RedTicket::cast).collect();
|
let mut items: Vec<_> = self.0.children().filter_map(RedRef::cast).collect();
|
||||||
if items.len() > 1 { Some(items.remove(0)) } else { None }
|
if items.len() > 1 { Some(items.remove(0)) } else { None }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ use std::ops::Deref;
|
|||||||
|
|
||||||
use unicode_xid::UnicodeXID;
|
use unicode_xid::UnicodeXID;
|
||||||
|
|
||||||
use super::{NodeKind, RedTicket, Span, TypedNode};
|
use super::{NodeKind, RedRef, Span, TypedNode};
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
|
|
||||||
/// An unicode identifier with a few extra permissible characters.
|
/// An unicode identifier with a few extra permissible characters.
|
||||||
@ -67,11 +67,10 @@ impl From<&Ident> for EcoString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for Ident {
|
impl TypedNode for Ident {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
if let NodeKind::Ident(i) = node.kind() {
|
match node.kind() {
|
||||||
Some(Ident::new(i, node.own().span()).unwrap())
|
NodeKind::Ident(i) => Some(Ident::new(i, node.span()).unwrap()),
|
||||||
} else {
|
_ => None,
|
||||||
None
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::{Expr, Ident, NodeKind, RedNode, RedTicket, Span, TypedNode};
|
use super::{Expr, Ident, NodeKind, RedNode, RedRef, Span, TypedNode};
|
||||||
use crate::node;
|
use crate::node;
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
@ -7,12 +7,12 @@ use std::fmt::Write;
|
|||||||
pub type Markup = Vec<MarkupNode>;
|
pub type Markup = Vec<MarkupNode>;
|
||||||
|
|
||||||
impl TypedNode for Markup {
|
impl TypedNode for Markup {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
if node.kind() != &NodeKind::Markup {
|
if node.kind() != &NodeKind::Markup {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let children = node.own().children().filter_map(TypedNode::cast_from).collect();
|
let children = node.children().filter_map(TypedNode::cast_from).collect();
|
||||||
Some(children)
|
Some(children)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -45,7 +45,7 @@ pub enum MarkupNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for MarkupNode {
|
impl TypedNode for MarkupNode {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
match node.kind() {
|
match node.kind() {
|
||||||
NodeKind::Space(_) => Some(MarkupNode::Space),
|
NodeKind::Space(_) => Some(MarkupNode::Space),
|
||||||
NodeKind::Linebreak => Some(MarkupNode::Linebreak),
|
NodeKind::Linebreak => Some(MarkupNode::Linebreak),
|
||||||
@ -53,15 +53,14 @@ impl TypedNode for MarkupNode {
|
|||||||
NodeKind::Strong => Some(MarkupNode::Strong),
|
NodeKind::Strong => Some(MarkupNode::Strong),
|
||||||
NodeKind::Emph => Some(MarkupNode::Emph),
|
NodeKind::Emph => Some(MarkupNode::Emph),
|
||||||
NodeKind::Text(s) => Some(MarkupNode::Text(s.clone())),
|
NodeKind::Text(s) => Some(MarkupNode::Text(s.clone())),
|
||||||
NodeKind::UnicodeEscape(u) => {
|
NodeKind::UnicodeEscape(u) => Some(MarkupNode::Text(match u.character {
|
||||||
Some(MarkupNode::Text(if let Some(s) = u.character {
|
Some(c) => c.into(),
|
||||||
s.into()
|
None => {
|
||||||
} else {
|
|
||||||
let mut eco = EcoString::with_capacity(u.sequence.len() + 4);
|
let mut eco = EcoString::with_capacity(u.sequence.len() + 4);
|
||||||
write!(&mut eco, "\\u{{{}}}", u.sequence).unwrap();
|
write!(&mut eco, "\\u{{{}}}", u.sequence).unwrap();
|
||||||
eco
|
eco
|
||||||
}))
|
}
|
||||||
}
|
})),
|
||||||
NodeKind::EnDash => Some(MarkupNode::Text(EcoString::from("\u{2013}"))),
|
NodeKind::EnDash => Some(MarkupNode::Text(EcoString::from("\u{2013}"))),
|
||||||
NodeKind::EmDash => Some(MarkupNode::Text(EcoString::from("\u{2014}"))),
|
NodeKind::EmDash => Some(MarkupNode::Text(EcoString::from("\u{2014}"))),
|
||||||
NodeKind::NonBreakingSpace => {
|
NodeKind::NonBreakingSpace => {
|
||||||
@ -93,28 +92,29 @@ pub struct RawNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TypedNode for RawNode {
|
impl TypedNode for RawNode {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
if let NodeKind::Raw(raw) = node.kind() {
|
match node.kind() {
|
||||||
let span = node.own().span();
|
NodeKind::Raw(raw) => {
|
||||||
let start = span.start + raw.backticks as usize;
|
let span = node.span();
|
||||||
Some(Self {
|
let start = span.start + raw.backticks as usize;
|
||||||
block: raw.block,
|
Some(Self {
|
||||||
lang: raw.lang.as_ref().and_then(|x| {
|
block: raw.block,
|
||||||
let span = Span::new(span.source, start, start + x.len());
|
lang: raw.lang.as_ref().and_then(|x| {
|
||||||
Ident::new(x, span)
|
let span = Span::new(span.source, start, start + x.len());
|
||||||
}),
|
Ident::new(x, span)
|
||||||
text: raw.text.clone(),
|
}),
|
||||||
})
|
text: raw.text.clone(),
|
||||||
} else {
|
})
|
||||||
None
|
}
|
||||||
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// A section heading: `= Introduction`.
|
/// A section heading: `= Introduction`.
|
||||||
Heading => HeadingNode
|
Heading => HeadingNode
|
||||||
);
|
}
|
||||||
|
|
||||||
impl HeadingNode {
|
impl HeadingNode {
|
||||||
/// The contents of the heading.
|
/// The contents of the heading.
|
||||||
@ -125,30 +125,21 @@ impl HeadingNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The section depth (numer of equals signs).
|
/// The section depth (numer of equals signs).
|
||||||
pub fn level(&self) -> HeadingLevel {
|
pub fn level(&self) -> u8 {
|
||||||
self.0
|
self.0
|
||||||
.cast_first_child()
|
.children()
|
||||||
|
.find_map(|node| match node.kind() {
|
||||||
|
NodeKind::HeadingLevel(heading) => Some(*heading),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
.expect("heading node is missing heading level")
|
.expect("heading node is missing heading level")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
node! {
|
||||||
pub struct HeadingLevel(pub usize);
|
|
||||||
|
|
||||||
impl TypedNode for HeadingLevel {
|
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
|
||||||
if let NodeKind::HeadingLevel(l) = node.kind() {
|
|
||||||
Some(Self((*l).into()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
node!(
|
|
||||||
/// An item in an unordered list: `- ...`.
|
/// An item in an unordered list: `- ...`.
|
||||||
List => ListNode
|
List => ListNode
|
||||||
);
|
}
|
||||||
|
|
||||||
impl ListNode {
|
impl ListNode {
|
||||||
/// The contents of the list item.
|
/// The contents of the list item.
|
||||||
@ -157,10 +148,10 @@ impl ListNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node!(
|
node! {
|
||||||
/// An item in an enumeration (ordered list): `1. ...`.
|
/// An item in an enumeration (ordered list): `1. ...`.
|
||||||
Enum => EnumNode
|
Enum => EnumNode
|
||||||
);
|
}
|
||||||
|
|
||||||
impl EnumNode {
|
impl EnumNode {
|
||||||
/// The contents of the list item.
|
/// The contents of the list item.
|
||||||
@ -169,20 +160,13 @@ impl EnumNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The number, if any.
|
/// The number, if any.
|
||||||
pub fn number(&self) -> EnumNumber {
|
pub fn number(&self) -> Option<usize> {
|
||||||
self.0.cast_first_child().expect("enumeration node is missing number")
|
self.0
|
||||||
}
|
.children()
|
||||||
}
|
.find_map(|node| match node.kind() {
|
||||||
|
NodeKind::EnumNumbering(num) => Some(num.clone()),
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
_ => None,
|
||||||
pub struct EnumNumber(pub Option<usize>);
|
})
|
||||||
|
.expect("enumeration node is missing number")
|
||||||
impl TypedNode for EnumNumber {
|
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
|
||||||
if let NodeKind::EnumNumbering(x) = node.kind() {
|
|
||||||
Some(Self(*x))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -160,8 +160,6 @@ pub enum NodeKind {
|
|||||||
///
|
///
|
||||||
/// The comment can contain nested block comments.
|
/// The comment can contain nested block comments.
|
||||||
BlockComment,
|
BlockComment,
|
||||||
/// A node that should never appear in a finished tree.
|
|
||||||
Never,
|
|
||||||
/// Tokens that appear in the wrong place.
|
/// Tokens that appear in the wrong place.
|
||||||
Error(ErrorPosition, EcoString),
|
Error(ErrorPosition, EcoString),
|
||||||
/// Template markup.
|
/// Template markup.
|
||||||
@ -246,7 +244,41 @@ pub enum ErrorPosition {
|
|||||||
|
|
||||||
impl Display for NodeKind {
|
impl Display for NodeKind {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
f.pad(match self {
|
f.pad(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeKind {
|
||||||
|
pub fn is_parenthesis(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::LeftParen => true,
|
||||||
|
Self::RightParen => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_bracket(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::LeftBracket => true,
|
||||||
|
Self::RightBracket => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_brace(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::LeftBrace => true,
|
||||||
|
Self::RightBrace => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
matches!(self, NodeKind::Error(_, _))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
Self::LeftBracket => "opening bracket",
|
Self::LeftBracket => "opening bracket",
|
||||||
Self::RightBracket => "closing bracket",
|
Self::RightBracket => "closing bracket",
|
||||||
Self::LeftBrace => "opening brace",
|
Self::LeftBrace => "opening brace",
|
||||||
@ -296,7 +328,6 @@ impl Display for NodeKind {
|
|||||||
Self::Math(_) => "math formula",
|
Self::Math(_) => "math formula",
|
||||||
Self::EnumNumbering(_) => "numbering",
|
Self::EnumNumbering(_) => "numbering",
|
||||||
Self::Str(_) => "string",
|
Self::Str(_) => "string",
|
||||||
Self::Never => "a node that should not be here",
|
|
||||||
Self::LineComment => "line comment",
|
Self::LineComment => "line comment",
|
||||||
Self::BlockComment => "block comment",
|
Self::BlockComment => "block comment",
|
||||||
Self::Markup => "markup",
|
Self::Markup => "markup",
|
||||||
@ -348,45 +379,15 @@ impl Display for NodeKind {
|
|||||||
"*/" => "end of block comment",
|
"*/" => "end of block comment",
|
||||||
_ => "invalid token",
|
_ => "invalid token",
|
||||||
},
|
},
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NodeKind {
|
|
||||||
pub fn is_parenthesis(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::LeftParen => true,
|
|
||||||
Self::RightParen => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_bracket(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::LeftBracket => true,
|
|
||||||
Self::RightBracket => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_brace(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::LeftBrace => true,
|
|
||||||
Self::RightBrace => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_error(&self) -> bool {
|
|
||||||
matches!(self, NodeKind::Never | NodeKind::Error(_, _))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A syntactical node.
|
/// A syntactical node.
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct GreenNode {
|
pub struct GreenNode {
|
||||||
/// Node metadata.
|
/// Node metadata.
|
||||||
meta: GreenData,
|
data: GreenData,
|
||||||
/// This node's children, losslessly make up this node.
|
/// This node's children, losslessly make up this node.
|
||||||
children: Vec<Green>,
|
children: Vec<Green>,
|
||||||
}
|
}
|
||||||
@ -400,12 +401,12 @@ pub struct GreenData {
|
|||||||
/// The byte length of the node in the source.
|
/// The byte length of the node in the source.
|
||||||
len: usize,
|
len: usize,
|
||||||
/// Whether this node or any of its children are erroneous.
|
/// Whether this node or any of its children are erroneous.
|
||||||
has_error: bool,
|
erroneous: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GreenData {
|
impl GreenData {
|
||||||
pub fn new(kind: NodeKind, len: usize) -> Self {
|
pub fn new(kind: NodeKind, len: usize) -> Self {
|
||||||
Self { len, has_error: kind.is_error(), kind }
|
Self { len, erroneous: kind.is_error(), kind }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> &NodeKind {
|
pub fn kind(&self) -> &NodeKind {
|
||||||
@ -416,8 +417,8 @@ impl GreenData {
|
|||||||
self.len
|
self.len
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_error(&self) -> bool {
|
pub fn erroneous(&self) -> bool {
|
||||||
self.has_error
|
self.erroneous
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -437,23 +438,23 @@ pub enum Green {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Green {
|
impl Green {
|
||||||
fn meta(&self) -> &GreenData {
|
fn data(&self) -> &GreenData {
|
||||||
match self {
|
match self {
|
||||||
Green::Token(t) => &t,
|
Green::Token(t) => &t,
|
||||||
Green::Node(n) => &n.meta,
|
Green::Node(n) => &n.data,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> &NodeKind {
|
pub fn kind(&self) -> &NodeKind {
|
||||||
self.meta().kind()
|
self.data().kind()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.meta().len()
|
self.data().len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_error(&self) -> bool {
|
pub fn erroneous(&self) -> bool {
|
||||||
self.meta().has_error()
|
self.data().erroneous()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn children(&self) -> &[Green] {
|
pub fn children(&self) -> &[Green] {
|
||||||
@ -467,29 +468,19 @@ impl Green {
|
|||||||
impl GreenNode {
|
impl GreenNode {
|
||||||
pub fn new(kind: NodeKind, len: usize) -> Self {
|
pub fn new(kind: NodeKind, len: usize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
meta: GreenData::new(kind, len),
|
data: GreenData::new(kind, len),
|
||||||
children: Vec::new(),
|
children: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_children(
|
pub fn with_children(kind: NodeKind, len: usize, children: Vec<Green>) -> Self {
|
||||||
kind: NodeKind,
|
|
||||||
len: usize,
|
|
||||||
children: impl Iterator<Item = impl Into<Green>>,
|
|
||||||
) -> Self {
|
|
||||||
let mut meta = GreenData::new(kind, len);
|
let mut meta = GreenData::new(kind, len);
|
||||||
let children = children
|
meta.erroneous |= children.iter().any(|c| c.erroneous());
|
||||||
.map(|x| {
|
Self { data: meta, children }
|
||||||
let x = x.into();
|
|
||||||
meta.has_error |= x.has_error();
|
|
||||||
x
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
Self { meta, children }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_child(kind: NodeKind, len: usize, child: impl Into<Green>) -> Self {
|
pub fn with_child(kind: NodeKind, len: usize, child: impl Into<Green>) -> Self {
|
||||||
Self::with_children(kind, len, std::iter::once(child.into()))
|
Self::with_children(kind, len, vec![child.into()])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn children(&self) -> &[Green] {
|
pub fn children(&self) -> &[Green] {
|
||||||
@ -511,7 +502,7 @@ impl From<Rc<GreenNode>> for Green {
|
|||||||
|
|
||||||
impl Default for Green {
|
impl Default for Green {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::Token(GreenData::new(NodeKind::Never, 0))
|
Self::Token(GreenData::new(NodeKind::None, 0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -530,13 +521,13 @@ impl Debug for Green {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq)]
|
#[derive(Copy, Clone, PartialEq)]
|
||||||
pub struct RedTicket<'a> {
|
pub struct RedRef<'a> {
|
||||||
id: SourceId,
|
id: SourceId,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
green: &'a Green,
|
green: &'a Green,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> RedTicket<'a> {
|
impl<'a> RedRef<'a> {
|
||||||
pub fn own(self) -> RedNode {
|
pub fn own(self) -> RedNode {
|
||||||
RedNode {
|
RedNode {
|
||||||
id: self.id,
|
id: self.id,
|
||||||
@ -549,6 +540,9 @@ impl<'a> RedTicket<'a> {
|
|||||||
self.green.kind()
|
self.green.kind()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
Span::new(self.id, self.offset, self.offset + self.green.len())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn cast<T>(self) -> Option<T>
|
pub fn cast<T>(self) -> Option<T>
|
||||||
where
|
where
|
||||||
@ -556,6 +550,37 @@ impl<'a> RedTicket<'a> {
|
|||||||
{
|
{
|
||||||
T::cast_from(self)
|
T::cast_from(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn erroneous(&self) -> bool {
|
||||||
|
self.green.erroneous()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn children(self) -> impl Iterator<Item = RedRef<'a>> + Clone {
|
||||||
|
let children = match &self.green {
|
||||||
|
Green::Node(node) => node.children(),
|
||||||
|
Green::Token(_) => &[],
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut offset = self.offset;
|
||||||
|
children.iter().map(move |green| {
|
||||||
|
let child_offset = offset;
|
||||||
|
offset += green.len();
|
||||||
|
RedRef { id: self.id, offset: child_offset, green }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn typed_child(&self, kind: &NodeKind) -> Option<RedRef> {
|
||||||
|
self.children()
|
||||||
|
.find(|x| mem::discriminant(x.kind()) == mem::discriminant(kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
|
self.children().find_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
|
self.children().filter_map(RedRef::cast).last()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
@ -571,7 +596,7 @@ impl RedNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn span(&self) -> Span {
|
pub fn span(&self) -> Span {
|
||||||
Span::new(self.id, self.offset, self.offset + self.green.len())
|
self.as_ref().span()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
@ -582,53 +607,36 @@ impl RedNode {
|
|||||||
self.green.kind()
|
self.green.kind()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn children<'a>(&'a self) -> impl Iterator<Item = RedTicket<'a>> + Clone + 'a {
|
pub fn children<'a>(&'a self) -> impl Iterator<Item = RedRef<'a>> + Clone {
|
||||||
let children = match &self.green {
|
self.as_ref().children()
|
||||||
Green::Node(node) => node.children(),
|
|
||||||
Green::Token(_) => &[],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut offset = self.offset;
|
|
||||||
children.iter().map(move |green_child| {
|
|
||||||
let child_offset = offset;
|
|
||||||
offset += green_child.len();
|
|
||||||
RedTicket {
|
|
||||||
id: self.id,
|
|
||||||
offset: child_offset,
|
|
||||||
green: &green_child,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_error(&self) -> bool {
|
|
||||||
self.green.has_error()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn errors(&self) -> Vec<(Span, EcoString)> {
|
pub fn errors(&self) -> Vec<(Span, EcoString)> {
|
||||||
if !self.green.has_error() {
|
if !self.green.erroneous() {
|
||||||
return vec![];
|
return vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
if let NodeKind::Error(pos, msg) = self.kind() {
|
match self.kind() {
|
||||||
let span = match pos {
|
NodeKind::Error(pos, msg) => {
|
||||||
ErrorPosition::Start => self.span().at_start(),
|
let span = match pos {
|
||||||
ErrorPosition::Full => self.span(),
|
ErrorPosition::Start => self.span().at_start(),
|
||||||
ErrorPosition::End => self.span().at_end(),
|
ErrorPosition::Full => self.span(),
|
||||||
};
|
ErrorPosition::End => self.span().at_end(),
|
||||||
|
};
|
||||||
|
|
||||||
vec![(span, msg.clone())]
|
vec![(span, msg.clone())]
|
||||||
} else if let NodeKind::Never = self.kind() {
|
}
|
||||||
vec![(self.span(), "found a never node".into())]
|
_ => self
|
||||||
} else {
|
.as_ref()
|
||||||
self.children()
|
.children()
|
||||||
.filter(|ticket| ticket.green.has_error())
|
.filter(|red| red.green.erroneous())
|
||||||
.flat_map(|ticket| ticket.own().errors())
|
.flat_map(|red| red.own().errors())
|
||||||
.collect()
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ticket<'a>(&'a self) -> RedTicket<'a> {
|
pub fn as_ref<'a>(&'a self) -> RedRef<'a> {
|
||||||
RedTicket {
|
RedRef {
|
||||||
id: self.id,
|
id: self.id,
|
||||||
offset: self.offset,
|
offset: self.offset,
|
||||||
green: &self.green,
|
green: &self.green,
|
||||||
@ -636,28 +644,26 @@ impl RedNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn typed_child(&self, kind: &NodeKind) -> Option<RedNode> {
|
pub(crate) fn typed_child(&self, kind: &NodeKind) -> Option<RedNode> {
|
||||||
self.children()
|
self.as_ref().typed_child(kind).map(RedRef::own)
|
||||||
.find(|x| mem::discriminant(x.kind()) == mem::discriminant(kind))
|
|
||||||
.map(RedTicket::own)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
|
pub(crate) fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
self.children().find_map(RedTicket::cast)
|
self.as_ref().cast_first_child()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
|
pub(crate) fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
self.children().filter_map(RedTicket::cast).last()
|
self.as_ref().cast_last_child()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for RedNode {
|
impl Debug for RedNode {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
write!(f, "{:?}: {:?}", self.kind(), self.span())?;
|
write!(f, "{:?}: {:?}", self.kind(), self.span())?;
|
||||||
let children = self.children().collect::<Vec<_>>();
|
let children = self.as_ref().children().collect::<Vec<_>>();
|
||||||
if !children.is_empty() {
|
if !children.is_empty() {
|
||||||
f.write_str(" ")?;
|
f.write_str(" ")?;
|
||||||
f.debug_list()
|
f.debug_list()
|
||||||
.entries(children.into_iter().map(RedTicket::own))
|
.entries(children.into_iter().map(RedRef::own))
|
||||||
.finish()?;
|
.finish()?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -666,21 +672,22 @@ impl Debug for RedNode {
|
|||||||
|
|
||||||
pub trait TypedNode: Sized {
|
pub trait TypedNode: Sized {
|
||||||
/// Performs the conversion.
|
/// Performs the conversion.
|
||||||
fn cast_from(value: RedTicket) -> Option<Self>;
|
fn cast_from(value: RedRef) -> Option<Self>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! node {
|
macro_rules! node {
|
||||||
(#[doc = $doc:expr] $name:ident) => {
|
($(#[$attr:meta])* $name:ident) => {
|
||||||
node!(#[doc = $doc] $name => $name);
|
node!{$(#[$attr])* $name => $name}
|
||||||
};
|
};
|
||||||
(#[doc = $doc:expr] $variant:ident => $name:ident) => {
|
($(#[$attr:meta])* $variant:ident => $name:ident) => {
|
||||||
#[doc = $doc]
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
#[repr(transparent)]
|
||||||
|
$(#[$attr])*
|
||||||
pub struct $name(RedNode);
|
pub struct $name(RedNode);
|
||||||
|
|
||||||
impl TypedNode for $name {
|
impl TypedNode for $name {
|
||||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
fn cast_from(node: RedRef) -> Option<Self> {
|
||||||
if node.kind() != &NodeKind::$variant {
|
if node.kind() != &NodeKind::$variant {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -694,8 +701,8 @@ macro_rules! node {
|
|||||||
self.0.span()
|
self.0.span()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn underlying(&self) -> RedTicket {
|
pub fn underlying(&self) -> RedRef {
|
||||||
self.0.ticket()
|
self.0.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -46,20 +46,25 @@ impl Printer {
|
|||||||
Write::write_fmt(self, fmt)
|
Write::write_fmt(self, fmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a list of items joined by a joiner.
|
/// Write a list of items joined by a joiner and return how many there were.
|
||||||
pub fn join<T, I, F>(&mut self, items: I, joiner: &str, mut write_item: F)
|
pub fn join<T, I, F>(&mut self, items: I, joiner: &str, mut write_item: F) -> usize
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = T>,
|
I: IntoIterator<Item = T>,
|
||||||
F: FnMut(T, &mut Self),
|
F: FnMut(T, &mut Self),
|
||||||
{
|
{
|
||||||
|
let mut count = 0;
|
||||||
let mut iter = items.into_iter();
|
let mut iter = items.into_iter();
|
||||||
if let Some(first) = iter.next() {
|
if let Some(first) = iter.next() {
|
||||||
write_item(first, self);
|
write_item(first, self);
|
||||||
|
count += 1;
|
||||||
}
|
}
|
||||||
for item in iter {
|
for item in iter {
|
||||||
self.push_str(joiner);
|
self.push_str(joiner);
|
||||||
write_item(item, self);
|
write_item(item, self);
|
||||||
|
count += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
count
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish pretty printing and return the underlying buffer.
|
/// Finish pretty printing and return the underlying buffer.
|
||||||
@ -165,7 +170,7 @@ impl Pretty for RawNode {
|
|||||||
|
|
||||||
impl Pretty for HeadingNode {
|
impl Pretty for HeadingNode {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
for _ in 0 .. self.level().0 {
|
for _ in 0 .. self.level() {
|
||||||
p.push('=');
|
p.push('=');
|
||||||
}
|
}
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
@ -182,7 +187,7 @@ impl Pretty for ListNode {
|
|||||||
|
|
||||||
impl Pretty for EnumNode {
|
impl Pretty for EnumNode {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
if let Some(number) = self.number().0 {
|
if let Some(number) = self.number() {
|
||||||
write!(p, "{}", number).unwrap();
|
write!(p, "{}", number).unwrap();
|
||||||
}
|
}
|
||||||
p.push_str(". ");
|
p.push_str(". ");
|
||||||
@ -237,8 +242,8 @@ impl Pretty for ArrayExpr {
|
|||||||
p.push('(');
|
p.push('(');
|
||||||
|
|
||||||
let items = self.items();
|
let items = self.items();
|
||||||
p.join(&items, ", ", |item, p| item.pretty(p));
|
let len = p.join(items, ", ", |item, p| item.pretty(p));
|
||||||
if items.len() == 1 {
|
if len == 1 {
|
||||||
p.push(',');
|
p.push(',');
|
||||||
}
|
}
|
||||||
p.push(')');
|
p.push(')');
|
||||||
@ -249,11 +254,11 @@ impl Pretty for DictExpr {
|
|||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
|
|
||||||
let items = self.items();
|
let mut items = self.items().peekable();
|
||||||
if items.is_empty() {
|
if items.peek().is_none() {
|
||||||
p.push(':');
|
p.push(':');
|
||||||
} else {
|
} else {
|
||||||
p.join(&items, ", ", |named, p| named.pretty(p));
|
p.join(items, ", ", |named, p| named.pretty(p));
|
||||||
}
|
}
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
@ -287,7 +292,7 @@ impl Pretty for BlockExpr {
|
|||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('{');
|
p.push('{');
|
||||||
|
|
||||||
let exprs = self.exprs();
|
let exprs: Vec<_> = self.exprs().collect();
|
||||||
if exprs.len() > 1 {
|
if exprs.len() > 1 {
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
}
|
}
|
||||||
@ -342,8 +347,7 @@ impl Pretty for CallExpr {
|
|||||||
p.push(')');
|
p.push(')');
|
||||||
};
|
};
|
||||||
|
|
||||||
let arg_list = self.args();
|
let args: Vec<_> = self.args().items().collect();
|
||||||
let args = arg_list.items();
|
|
||||||
|
|
||||||
if let Some(Expr::Template(template)) = args
|
if let Some(Expr::Template(template)) = args
|
||||||
.last()
|
.last()
|
||||||
@ -361,7 +365,7 @@ impl Pretty for CallExpr {
|
|||||||
|
|
||||||
impl Pretty for CallArgs {
|
impl Pretty for CallArgs {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.join(&self.items(), ", ", |item, p| item.pretty(p));
|
p.join(self.items(), ", ", |item, p| item.pretty(p));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -380,11 +384,12 @@ impl Pretty for CallArg {
|
|||||||
|
|
||||||
impl Pretty for ClosureExpr {
|
impl Pretty for ClosureExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
if let [param] = self.params().as_slice() {
|
let params: Vec<_> = self.params().collect();
|
||||||
|
if let [param] = params.as_slice() {
|
||||||
param.pretty(p);
|
param.pretty(p);
|
||||||
} else {
|
} else {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
p.join(self.params().iter(), ", ", |item, p| item.pretty(p));
|
p.join(params.iter(), ", ", |item, p| item.pretty(p));
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
p.push_str(" => ");
|
p.push_str(" => ");
|
||||||
@ -420,7 +425,7 @@ impl Pretty for LetExpr {
|
|||||||
self.binding().pretty(p);
|
self.binding().pretty(p);
|
||||||
if let Some(Expr::Closure(closure)) = &self.init() {
|
if let Some(Expr::Closure(closure)) = &self.init() {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
p.join(closure.params().iter(), ", ", |item, p| item.pretty(p));
|
p.join(closure.params(), ", ", |item, p| item.pretty(p));
|
||||||
p.push_str(") = ");
|
p.push_str(") = ");
|
||||||
closure.body().pretty(p);
|
closure.body().pretty(p);
|
||||||
} else if let Some(init) = &self.init() {
|
} else if let Some(init) = &self.init() {
|
||||||
@ -487,7 +492,9 @@ impl Pretty for Imports {
|
|||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
match self {
|
match self {
|
||||||
Self::Wildcard => p.push('*'),
|
Self::Wildcard => p.push('*'),
|
||||||
Self::Idents(idents) => p.join(idents, ", ", |item, p| item.pretty(p)),
|
Self::Idents(idents) => {
|
||||||
|
p.join(idents, ", ", |item, p| item.pretty(p));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user