mirror of
https://github.com/typst/typst
synced 2025-05-17 02:25:27 +08:00
commit
c6f8ad35f4
@ -6,7 +6,7 @@ use typst::eval::eval;
|
|||||||
use typst::layout::layout;
|
use typst::layout::layout;
|
||||||
use typst::loading::MemLoader;
|
use typst::loading::MemLoader;
|
||||||
use typst::parse::{parse, Scanner, TokenMode, Tokens};
|
use typst::parse::{parse, Scanner, TokenMode, Tokens};
|
||||||
use typst::source::{SourceFile, SourceId};
|
use typst::source::SourceId;
|
||||||
use typst::Context;
|
use typst::Context;
|
||||||
|
|
||||||
const SRC: &str = include_str!("bench.typ");
|
const SRC: &str = include_str!("bench.typ");
|
||||||
@ -48,12 +48,12 @@ fn bench_tokenize(iai: &mut Iai) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn bench_parse(iai: &mut Iai) {
|
fn bench_parse(iai: &mut Iai) {
|
||||||
iai.run(|| parse(&SourceFile::detached(SRC)));
|
iai.run(|| parse(SRC));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bench_eval(iai: &mut Iai) {
|
fn bench_eval(iai: &mut Iai) {
|
||||||
let (mut ctx, id) = context();
|
let (mut ctx, id) = context();
|
||||||
let ast = ctx.parse(id).unwrap();
|
let ast = ctx.sources.get(id).ast().unwrap();
|
||||||
iai.run(|| eval(&mut ctx, id, &ast).unwrap());
|
iai.run(|| eval(&mut ctx, id, &ast).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@ impl<T> Trace<T> for TypResult<T> {
|
|||||||
{
|
{
|
||||||
self.map_err(|mut errors| {
|
self.map_err(|mut errors| {
|
||||||
for error in errors.iter_mut() {
|
for error in errors.iter_mut() {
|
||||||
if !span.contains(error.span) {
|
if !span.surrounds(error.span) {
|
||||||
error.trace.push(Spanned::new(make_point(), span));
|
error.trace.push(Spanned::new(make_point(), span));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use super::{Scope, Scopes, Value};
|
use super::{Scope, Scopes, Value};
|
||||||
use crate::syntax::visit::{immutable::visit_expr, Visit};
|
use crate::syntax::ast::{ClosureParam, Expr, Ident, Imports, TypedNode};
|
||||||
use crate::syntax::{Expr, Ident};
|
use crate::syntax::RedRef;
|
||||||
|
|
||||||
/// A visitor that captures variable slots.
|
/// A visitor that captures variable slots.
|
||||||
pub struct CapturesVisitor<'a> {
|
pub struct CapturesVisitor<'a> {
|
||||||
@ -25,32 +25,153 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
pub fn finish(self) -> Scope {
|
pub fn finish(self) -> Scope {
|
||||||
self.captures
|
self.captures
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl<'ast> Visit<'ast> for CapturesVisitor<'_> {
|
/// Bind a new internal variable.
|
||||||
fn visit_expr(&mut self, node: &'ast Expr) {
|
pub fn bind(&mut self, ident: Ident) {
|
||||||
if let Expr::Ident(ident) = node {
|
self.internal.def_mut(ident.take(), Value::None);
|
||||||
// Find out whether the name is not locally defined and if so if it
|
}
|
||||||
// can be captured.
|
|
||||||
if self.internal.get(ident).is_none() {
|
/// Capture a variable if it isn't internal.
|
||||||
if let Some(slot) = self.external.get(ident) {
|
pub fn capture(&mut self, ident: Ident) {
|
||||||
self.captures.def_slot(ident.as_str(), Rc::clone(slot));
|
if self.internal.get(&ident).is_none() {
|
||||||
}
|
if let Some(slot) = self.external.get(&ident) {
|
||||||
|
self.captures.def_slot(ident.take(), Rc::clone(slot));
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
visit_expr(self, node);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_binding(&mut self, ident: &'ast Ident) {
|
/// Visit any node and collect all captured variables.
|
||||||
self.internal.def_mut(ident.as_str(), Value::None);
|
pub fn visit(&mut self, node: RedRef) {
|
||||||
}
|
match node.cast() {
|
||||||
|
// Every identifier is a potential variable that we need to capture.
|
||||||
|
// Identifiers that shouldn't count as captures because they
|
||||||
|
// actually bind a new name are handled further below (individually
|
||||||
|
// through the expressions that contain them).
|
||||||
|
Some(Expr::Ident(ident)) => self.capture(ident),
|
||||||
|
|
||||||
fn visit_enter(&mut self) {
|
// A closure contains parameter bindings, which are bound before the
|
||||||
self.internal.enter();
|
// body is evaluated. Take must be taken so that the default values
|
||||||
}
|
// of named parameters cannot access previous parameter bindings.
|
||||||
|
Some(Expr::Closure(expr)) => {
|
||||||
|
for param in expr.params() {
|
||||||
|
if let ClosureParam::Named(named) = param {
|
||||||
|
self.visit(named.expr().as_red());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn visit_exit(&mut self) {
|
for param in expr.params() {
|
||||||
self.internal.exit();
|
match param {
|
||||||
|
ClosureParam::Pos(ident) => self.bind(ident),
|
||||||
|
ClosureParam::Named(named) => self.bind(named.name()),
|
||||||
|
ClosureParam::Sink(ident) => self.bind(ident),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.visit(expr.body().as_red());
|
||||||
|
}
|
||||||
|
|
||||||
|
// A let expression contains a binding, but that binding is only
|
||||||
|
// active after the body is evaluated.
|
||||||
|
Some(Expr::Let(expr)) => {
|
||||||
|
if let Some(init) = expr.init() {
|
||||||
|
self.visit(init.as_red());
|
||||||
|
}
|
||||||
|
self.bind(expr.binding());
|
||||||
|
}
|
||||||
|
|
||||||
|
// A for loop contains one or two bindings in its pattern. These are
|
||||||
|
// active after the iterable is evaluated but before the body is
|
||||||
|
// evaluated.
|
||||||
|
Some(Expr::For(expr)) => {
|
||||||
|
self.visit(expr.iter().as_red());
|
||||||
|
let pattern = expr.pattern();
|
||||||
|
if let Some(key) = pattern.key() {
|
||||||
|
self.bind(key);
|
||||||
|
}
|
||||||
|
self.bind(pattern.value());
|
||||||
|
self.visit(expr.body().as_red());
|
||||||
|
}
|
||||||
|
|
||||||
|
// An import contains items, but these are active only after the
|
||||||
|
// path is evaluated.
|
||||||
|
Some(Expr::Import(expr)) => {
|
||||||
|
self.visit(expr.path().as_red());
|
||||||
|
if let Imports::Items(items) = expr.imports() {
|
||||||
|
for item in items {
|
||||||
|
self.bind(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Blocks and templates create a scope.
|
||||||
|
Some(Expr::Block(_) | Expr::Template(_)) => {
|
||||||
|
self.internal.enter();
|
||||||
|
for child in node.children() {
|
||||||
|
self.visit(child);
|
||||||
|
}
|
||||||
|
self.internal.exit();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Everything else is traversed from left to right.
|
||||||
|
_ => {
|
||||||
|
for child in node.children() {
|
||||||
|
self.visit(child);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::parse::parse;
|
||||||
|
use crate::source::SourceId;
|
||||||
|
use crate::syntax::RedNode;
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn test(src: &str, result: &[&str]) {
|
||||||
|
let green = parse(src);
|
||||||
|
let red = RedNode::from_root(green, SourceId::from_raw(0));
|
||||||
|
|
||||||
|
let mut scopes = Scopes::new(None);
|
||||||
|
scopes.def_const("x", 0);
|
||||||
|
scopes.def_const("y", 0);
|
||||||
|
scopes.def_const("z", 0);
|
||||||
|
|
||||||
|
let mut visitor = CapturesVisitor::new(&scopes);
|
||||||
|
visitor.visit(red.as_ref());
|
||||||
|
|
||||||
|
let captures = visitor.finish();
|
||||||
|
let mut names: Vec<_> = captures.iter().map(|(k, _)| k).collect();
|
||||||
|
names.sort();
|
||||||
|
|
||||||
|
assert_eq!(names, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_captures() {
|
||||||
|
// Let binding and function definition.
|
||||||
|
test("#let x = x", &["x"]);
|
||||||
|
test("#let x; {x + y}", &["y"]);
|
||||||
|
test("#let f(x, y) = x + y", &[]);
|
||||||
|
|
||||||
|
// Closure with different kinds of params.
|
||||||
|
test("{(x, y) => x + z}", &["z"]);
|
||||||
|
test("{(x: y, z) => x + z}", &["y"]);
|
||||||
|
test("{(..x) => x + y}", &["y"]);
|
||||||
|
test("{(x, y: x + z) => x + y}", &["x", "z"]);
|
||||||
|
|
||||||
|
// For loop.
|
||||||
|
test("#for x in y { x + z }", &["y", "z"]);
|
||||||
|
test("#for x, y in y { x + y }", &["y"]);
|
||||||
|
|
||||||
|
// Import.
|
||||||
|
test("#import x, y from z", &["z"]);
|
||||||
|
test("#import x, y, z from x + y", &["x", "y"]);
|
||||||
|
|
||||||
|
// Scoping.
|
||||||
|
test("{ let x = 1; { let y = 2; y }; x + y }", &["y"]);
|
||||||
|
test("[#let x = 1]#x", &["x"]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
222
src/eval/mod.rs
222
src/eval/mod.rs
@ -30,16 +30,14 @@ use std::collections::HashMap;
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::diag::{At, Error, StrResult, Trace, Tracepoint, TypResult};
|
use crate::diag::{At, Error, StrResult, Trace, Tracepoint, TypResult};
|
||||||
use crate::geom::{Angle, Fractional, Length, Relative};
|
use crate::geom::{Angle, Fractional, Length, Relative};
|
||||||
use crate::image::ImageStore;
|
use crate::image::ImageStore;
|
||||||
use crate::loading::Loader;
|
use crate::loading::Loader;
|
||||||
use crate::parse::parse;
|
|
||||||
use crate::source::{SourceId, SourceStore};
|
use crate::source::{SourceId, SourceStore};
|
||||||
use crate::syntax::visit::Visit;
|
use crate::syntax::ast::*;
|
||||||
use crate::syntax::*;
|
use crate::syntax::{Span, Spanned};
|
||||||
use crate::util::RefMutExt;
|
use crate::util::RefMutExt;
|
||||||
use crate::Context;
|
use crate::Context;
|
||||||
|
|
||||||
@ -114,7 +112,7 @@ impl<'a> EvalContext<'a> {
|
|||||||
|
|
||||||
// Parse the file.
|
// Parse the file.
|
||||||
let source = self.sources.get(id);
|
let source = self.sources.get(id);
|
||||||
let ast = parse(&source)?;
|
let ast = source.ast()?;
|
||||||
|
|
||||||
// Prepare the new context.
|
// Prepare the new context.
|
||||||
let new_scopes = Scopes::new(self.scopes.base);
|
let new_scopes = Scopes::new(self.scopes.base);
|
||||||
@ -122,7 +120,7 @@ impl<'a> EvalContext<'a> {
|
|||||||
self.route.push(id);
|
self.route.push(id);
|
||||||
|
|
||||||
// Evaluate the module.
|
// Evaluate the module.
|
||||||
let template = Rc::new(ast).eval(self).trace(|| Tracepoint::Import, span)?;
|
let template = ast.eval(self).trace(|| Tracepoint::Import, span)?;
|
||||||
|
|
||||||
// Restore the old context.
|
// Restore the old context.
|
||||||
let new_scopes = mem::replace(&mut self.scopes, old_scopes);
|
let new_scopes = mem::replace(&mut self.scopes, old_scopes);
|
||||||
@ -176,8 +174,8 @@ impl Eval for Expr {
|
|||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
match self {
|
match self {
|
||||||
Self::Ident(v) => v.eval(ctx),
|
|
||||||
Self::Lit(v) => v.eval(ctx),
|
Self::Lit(v) => v.eval(ctx),
|
||||||
|
Self::Ident(v) => v.eval(ctx),
|
||||||
Self::Array(v) => v.eval(ctx).map(Value::Array),
|
Self::Array(v) => v.eval(ctx).map(Value::Array),
|
||||||
Self::Dict(v) => v.eval(ctx).map(Value::Dict),
|
Self::Dict(v) => v.eval(ctx).map(Value::Dict),
|
||||||
Self::Template(v) => v.eval(ctx).map(Value::Template),
|
Self::Template(v) => v.eval(ctx).map(Value::Template),
|
||||||
@ -202,17 +200,17 @@ impl Eval for Lit {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, _: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, _: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
Ok(match *self {
|
Ok(match self.kind() {
|
||||||
Self::None(_) => Value::None,
|
LitKind::None => Value::None,
|
||||||
Self::Auto(_) => Value::Auto,
|
LitKind::Auto => Value::Auto,
|
||||||
Self::Bool(_, v) => Value::Bool(v),
|
LitKind::Bool(v) => Value::Bool(v),
|
||||||
Self::Int(_, v) => Value::Int(v),
|
LitKind::Int(v) => Value::Int(v),
|
||||||
Self::Float(_, v) => Value::Float(v),
|
LitKind::Float(v) => Value::Float(v),
|
||||||
Self::Length(_, v, unit) => Value::Length(Length::with_unit(v, unit)),
|
LitKind::Length(v, unit) => Value::Length(Length::with_unit(v, unit)),
|
||||||
Self::Angle(_, v, unit) => Value::Angle(Angle::with_unit(v, unit)),
|
LitKind::Angle(v, unit) => Value::Angle(Angle::with_unit(v, unit)),
|
||||||
Self::Percent(_, v) => Value::Relative(Relative::new(v / 100.0)),
|
LitKind::Percent(v) => Value::Relative(Relative::new(v / 100.0)),
|
||||||
Self::Fractional(_, v) => Value::Fractional(Fractional::new(v)),
|
LitKind::Fractional(v) => Value::Fractional(Fractional::new(v)),
|
||||||
Self::Str(_, ref v) => Value::Str(v.into()),
|
LitKind::Str(ref v) => Value::Str(v.into()),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -223,7 +221,7 @@ impl Eval for Ident {
|
|||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
match ctx.scopes.get(self) {
|
match ctx.scopes.get(self) {
|
||||||
Some(slot) => Ok(slot.borrow().clone()),
|
Some(slot) => Ok(slot.borrow().clone()),
|
||||||
None => bail!(self.span, "unknown variable"),
|
None => bail!(self.span(), "unknown variable"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -232,7 +230,7 @@ impl Eval for ArrayExpr {
|
|||||||
type Output = Array;
|
type Output = Array;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.items.iter().map(|expr| expr.eval(ctx)).collect()
|
self.items().map(|expr| expr.eval(ctx)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -240,9 +238,8 @@ impl Eval for DictExpr {
|
|||||||
type Output = Dict;
|
type Output = Dict;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.items
|
self.items()
|
||||||
.iter()
|
.map(|x| Ok((x.name().take().into(), x.expr().eval(ctx)?)))
|
||||||
.map(|Named { name, expr }| Ok(((&name.string).into(), expr.eval(ctx)?)))
|
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -251,7 +248,7 @@ impl Eval for TemplateExpr {
|
|||||||
type Output = Template;
|
type Output = Template;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.body.eval(ctx)
|
self.body().eval(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -259,7 +256,7 @@ impl Eval for GroupExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
self.expr.eval(ctx)
|
self.expr().eval(ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -270,7 +267,7 @@ impl Eval for BlockExpr {
|
|||||||
ctx.scopes.enter();
|
ctx.scopes.enter();
|
||||||
|
|
||||||
let mut output = Value::None;
|
let mut output = Value::None;
|
||||||
for expr in &self.exprs {
|
for expr in self.exprs() {
|
||||||
let value = expr.eval(ctx)?;
|
let value = expr.eval(ctx)?;
|
||||||
output = ops::join(output, value).at(expr.span())?;
|
output = ops::join(output, value).at(expr.span())?;
|
||||||
}
|
}
|
||||||
@ -285,13 +282,13 @@ impl Eval for UnaryExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let value = self.expr.eval(ctx)?;
|
let value = self.expr().eval(ctx)?;
|
||||||
let result = match self.op {
|
let result = match self.op() {
|
||||||
UnOp::Pos => ops::pos(value),
|
UnOp::Pos => ops::pos(value),
|
||||||
UnOp::Neg => ops::neg(value),
|
UnOp::Neg => ops::neg(value),
|
||||||
UnOp::Not => ops::not(value),
|
UnOp::Not => ops::not(value),
|
||||||
};
|
};
|
||||||
result.at(self.span)
|
result.at(self.span())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -299,7 +296,7 @@ impl Eval for BinaryExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
match self.op {
|
match self.op() {
|
||||||
BinOp::Add => self.apply(ctx, ops::add),
|
BinOp::Add => self.apply(ctx, ops::add),
|
||||||
BinOp::Sub => self.apply(ctx, ops::sub),
|
BinOp::Sub => self.apply(ctx, ops::sub),
|
||||||
BinOp::Mul => self.apply(ctx, ops::mul),
|
BinOp::Mul => self.apply(ctx, ops::mul),
|
||||||
@ -327,17 +324,17 @@ impl BinaryExpr {
|
|||||||
where
|
where
|
||||||
F: FnOnce(Value, Value) -> StrResult<Value>,
|
F: FnOnce(Value, Value) -> StrResult<Value>,
|
||||||
{
|
{
|
||||||
let lhs = self.lhs.eval(ctx)?;
|
let lhs = self.lhs().eval(ctx)?;
|
||||||
|
|
||||||
// Short-circuit boolean operations.
|
// Short-circuit boolean operations.
|
||||||
if (self.op == BinOp::And && lhs == Value::Bool(false))
|
if (self.op() == BinOp::And && lhs == Value::Bool(false))
|
||||||
|| (self.op == BinOp::Or && lhs == Value::Bool(true))
|
|| (self.op() == BinOp::Or && lhs == Value::Bool(true))
|
||||||
{
|
{
|
||||||
return Ok(lhs);
|
return Ok(lhs);
|
||||||
}
|
}
|
||||||
|
|
||||||
let rhs = self.rhs.eval(ctx)?;
|
let rhs = self.rhs().eval(ctx)?;
|
||||||
op(lhs, rhs).at(self.span)
|
op(lhs, rhs).at(self.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply an assignment operation.
|
/// Apply an assignment operation.
|
||||||
@ -345,10 +342,10 @@ impl BinaryExpr {
|
|||||||
where
|
where
|
||||||
F: FnOnce(Value, Value) -> StrResult<Value>,
|
F: FnOnce(Value, Value) -> StrResult<Value>,
|
||||||
{
|
{
|
||||||
let rhs = self.rhs.eval(ctx)?;
|
let rhs = self.rhs().eval(ctx)?;
|
||||||
let mut target = self.lhs.access(ctx)?;
|
let mut target = self.lhs().access(ctx)?;
|
||||||
let lhs = mem::take(&mut *target);
|
let lhs = mem::take(&mut *target);
|
||||||
*target = op(lhs, rhs).at(self.span)?;
|
*target = op(lhs, rhs).at(self.span())?;
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -357,27 +354,27 @@ impl Eval for CallExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let callee = self.callee.eval(ctx)?;
|
let callee = self.callee().eval(ctx)?;
|
||||||
let mut args = self.args.eval(ctx)?;
|
let mut args = self.args().eval(ctx)?;
|
||||||
|
|
||||||
match callee {
|
match callee {
|
||||||
Value::Array(array) => {
|
Value::Array(array) => {
|
||||||
array.get(args.into_index()?).map(Value::clone).at(self.span)
|
array.get(args.into_index()?).map(Value::clone).at(self.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
Value::Dict(dict) => {
|
Value::Dict(dict) => {
|
||||||
dict.get(args.into_key()?).map(Value::clone).at(self.span)
|
dict.get(args.into_key()?).map(Value::clone).at(self.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
Value::Func(func) => {
|
Value::Func(func) => {
|
||||||
let point = || Tracepoint::Call(func.name().map(ToString::to_string));
|
let point = || Tracepoint::Call(func.name().map(ToString::to_string));
|
||||||
let value = func.call(ctx, &mut args).trace(point, self.span)?;
|
let value = func.call(ctx, &mut args).trace(point, self.span())?;
|
||||||
args.finish()?;
|
args.finish()?;
|
||||||
Ok(value)
|
Ok(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
v => bail!(
|
v => bail!(
|
||||||
self.callee.span(),
|
self.callee().span(),
|
||||||
"expected function or collection, found {}",
|
"expected function or collection, found {}",
|
||||||
v.type_name(),
|
v.type_name(),
|
||||||
),
|
),
|
||||||
@ -389,9 +386,9 @@ impl Eval for CallArgs {
|
|||||||
type Output = Args;
|
type Output = Args;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let mut items = Vec::with_capacity(self.items.len());
|
let mut items = Vec::new();
|
||||||
|
|
||||||
for arg in &self.items {
|
for arg in self.items() {
|
||||||
let span = arg.span();
|
let span = arg.span();
|
||||||
match arg {
|
match arg {
|
||||||
CallArg::Pos(expr) => {
|
CallArg::Pos(expr) => {
|
||||||
@ -401,11 +398,11 @@ impl Eval for CallArgs {
|
|||||||
value: Spanned::new(expr.eval(ctx)?, expr.span()),
|
value: Spanned::new(expr.eval(ctx)?, expr.span()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
CallArg::Named(Named { name, expr }) => {
|
CallArg::Named(named) => {
|
||||||
items.push(Arg {
|
items.push(Arg {
|
||||||
span,
|
span,
|
||||||
name: Some((&name.string).into()),
|
name: Some(named.name().take().into()),
|
||||||
value: Spanned::new(expr.eval(ctx)?, expr.span()),
|
value: Spanned::new(named.expr().eval(ctx)?, named.expr().span()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
CallArg::Spread(expr) => match expr.eval(ctx)? {
|
CallArg::Spread(expr) => match expr.eval(ctx)? {
|
||||||
@ -438,7 +435,7 @@ impl Eval for CallArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Args { span: self.span, items })
|
Ok(Args { span: self.span(), items })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -446,39 +443,38 @@ impl Eval for ClosureExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let name = self.name.as_ref().map(|name| name.string.clone());
|
|
||||||
|
|
||||||
// Collect captured variables.
|
// Collect captured variables.
|
||||||
let captured = {
|
let captured = {
|
||||||
let mut visitor = CapturesVisitor::new(&ctx.scopes);
|
let mut visitor = CapturesVisitor::new(&ctx.scopes);
|
||||||
visitor.visit_closure(self);
|
visitor.visit(self.as_red());
|
||||||
visitor.finish()
|
visitor.finish()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut sink = None;
|
let mut sink = None;
|
||||||
let mut params = Vec::with_capacity(self.params.len());
|
let mut params = Vec::new();
|
||||||
|
|
||||||
// Collect parameters and an optional sink parameter.
|
// Collect parameters and an optional sink parameter.
|
||||||
for param in &self.params {
|
for param in self.params() {
|
||||||
match param {
|
match param {
|
||||||
ClosureParam::Pos(name) => {
|
ClosureParam::Pos(name) => {
|
||||||
params.push((name.string.clone(), None));
|
params.push((name.take(), None));
|
||||||
}
|
}
|
||||||
ClosureParam::Named(Named { name, expr }) => {
|
ClosureParam::Named(named) => {
|
||||||
params.push((name.string.clone(), Some(expr.eval(ctx)?)));
|
params.push((named.name().take(), Some(named.expr().eval(ctx)?)));
|
||||||
}
|
}
|
||||||
ClosureParam::Sink(name) => {
|
ClosureParam::Sink(name) => {
|
||||||
if sink.is_some() {
|
if sink.is_some() {
|
||||||
bail!(name.span, "only one argument sink is allowed");
|
bail!(name.span(), "only one argument sink is allowed");
|
||||||
}
|
}
|
||||||
sink = Some(name.string.clone());
|
sink = Some(name.take());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clone the body expression so that we don't have a lifetime
|
// Clone the body expression so that we don't have a lifetime
|
||||||
// dependence on the AST.
|
// dependence on the AST.
|
||||||
let body = Rc::clone(&self.body);
|
let name = self.name().map(Ident::take);
|
||||||
|
let body = self.body();
|
||||||
|
|
||||||
// Define the actual function.
|
// Define the actual function.
|
||||||
let func = Function::new(name, move |ctx, args| {
|
let func = Function::new(name, move |ctx, args| {
|
||||||
@ -515,8 +511,9 @@ impl Eval for WithExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let wrapped = self.callee.eval(ctx)?.cast::<Function>().at(self.callee.span())?;
|
let callee = self.callee();
|
||||||
let applied = self.args.eval(ctx)?;
|
let wrapped = callee.eval(ctx)?.cast::<Function>().at(callee.span())?;
|
||||||
|
let applied = self.args().eval(ctx)?;
|
||||||
|
|
||||||
let name = wrapped.name().cloned();
|
let name = wrapped.name().cloned();
|
||||||
let func = Function::new(name, move |ctx, args| {
|
let func = Function::new(name, move |ctx, args| {
|
||||||
@ -532,11 +529,11 @@ impl Eval for LetExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let value = match &self.init {
|
let value = match self.init() {
|
||||||
Some(expr) => expr.eval(ctx)?,
|
Some(expr) => expr.eval(ctx)?,
|
||||||
None => Value::None,
|
None => Value::None,
|
||||||
};
|
};
|
||||||
ctx.scopes.def_mut(self.binding.as_str(), value);
|
ctx.scopes.def_mut(self.binding().take(), value);
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -545,12 +542,10 @@ impl Eval for IfExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let condition =
|
let condition = self.condition();
|
||||||
self.condition.eval(ctx)?.cast::<bool>().at(self.condition.span())?;
|
if condition.eval(ctx)?.cast::<bool>().at(condition.span())? {
|
||||||
|
self.if_body().eval(ctx)
|
||||||
if condition {
|
} else if let Some(else_body) = self.else_body() {
|
||||||
self.if_body.eval(ctx)
|
|
||||||
} else if let Some(else_body) = &self.else_body {
|
|
||||||
else_body.eval(ctx)
|
else_body.eval(ctx)
|
||||||
} else {
|
} else {
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
@ -564,9 +559,11 @@ impl Eval for WhileExpr {
|
|||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let mut output = Value::None;
|
let mut output = Value::None;
|
||||||
|
|
||||||
while self.condition.eval(ctx)?.cast::<bool>().at(self.condition.span())? {
|
let condition = self.condition();
|
||||||
let value = self.body.eval(ctx)?;
|
while condition.eval(ctx)?.cast::<bool>().at(condition.span())? {
|
||||||
output = ops::join(output, value).at(self.body.span())?;
|
let body = self.body();
|
||||||
|
let value = body.eval(ctx)?;
|
||||||
|
output = ops::join(output, value).at(body.span())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(output)
|
Ok(output)
|
||||||
@ -584,40 +581,44 @@ impl Eval for ForExpr {
|
|||||||
|
|
||||||
#[allow(unused_parens)]
|
#[allow(unused_parens)]
|
||||||
for ($($value),*) in $iter {
|
for ($($value),*) in $iter {
|
||||||
$(ctx.scopes.def_mut($binding.as_str(), $value);)*
|
$(ctx.scopes.def_mut(&$binding, $value);)*
|
||||||
|
|
||||||
let value = self.body.eval(ctx)?;
|
let value = self.body().eval(ctx)?;
|
||||||
output = ops::join(output, value)
|
output = ops::join(output, value)
|
||||||
.at(self.body.span())?;
|
.at(self.body().span())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.scopes.exit();
|
ctx.scopes.exit();
|
||||||
Ok(output)
|
return Ok(output);
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
let iter = self.iter.eval(ctx)?;
|
let iter = self.iter().eval(ctx)?;
|
||||||
match (&self.pattern, iter) {
|
let pattern = self.pattern();
|
||||||
(ForPattern::Value(v), Value::Str(string)) => {
|
let key = pattern.key().map(Ident::take);
|
||||||
iter!(for (v => value) in string.iter())
|
let value = pattern.value().take();
|
||||||
|
|
||||||
|
match (key, value, iter) {
|
||||||
|
(None, v, Value::Str(string)) => {
|
||||||
|
iter!(for (v => value) in string.iter());
|
||||||
}
|
}
|
||||||
(ForPattern::Value(v), Value::Array(array)) => {
|
(None, v, Value::Array(array)) => {
|
||||||
iter!(for (v => value) in array.into_iter())
|
iter!(for (v => value) in array.into_iter());
|
||||||
}
|
}
|
||||||
(ForPattern::KeyValue(i, v), Value::Array(array)) => {
|
(Some(i), v, Value::Array(array)) => {
|
||||||
iter!(for (i => idx, v => value) in array.into_iter().enumerate())
|
iter!(for (i => idx, v => value) in array.into_iter().enumerate());
|
||||||
}
|
}
|
||||||
(ForPattern::Value(v), Value::Dict(dict)) => {
|
(None, v, Value::Dict(dict)) => {
|
||||||
iter!(for (v => value) in dict.into_iter().map(|p| p.1))
|
iter!(for (v => value) in dict.into_iter().map(|p| p.1));
|
||||||
}
|
}
|
||||||
(ForPattern::KeyValue(k, v), Value::Dict(dict)) => {
|
(Some(k), v, Value::Dict(dict)) => {
|
||||||
iter!(for (k => key, v => value) in dict.into_iter())
|
iter!(for (k => key, v => value) in dict.into_iter());
|
||||||
}
|
}
|
||||||
(ForPattern::KeyValue(_, _), Value::Str(_)) => {
|
(_, _, Value::Str(_)) => {
|
||||||
bail!(self.pattern.span(), "mismatched pattern");
|
bail!(pattern.span(), "mismatched pattern");
|
||||||
}
|
}
|
||||||
(_, iter) => {
|
(_, _, iter) => {
|
||||||
bail!(self.iter.span(), "cannot loop over {}", iter.type_name());
|
bail!(self.iter().span(), "cannot loop over {}", iter.type_name());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -627,23 +628,23 @@ impl Eval for ImportExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let path = self.path.eval(ctx)?.cast::<Str>().at(self.path.span())?;
|
let path = self.path();
|
||||||
|
let resolved = path.eval(ctx)?.cast::<Str>().at(path.span())?;
|
||||||
let file = ctx.import(&path, self.path.span())?;
|
let file = ctx.import(&resolved, path.span())?;
|
||||||
let module = &ctx.modules[&file];
|
let module = &ctx.modules[&file];
|
||||||
|
|
||||||
match &self.imports {
|
match self.imports() {
|
||||||
Imports::Wildcard => {
|
Imports::Wildcard => {
|
||||||
for (var, slot) in module.scope.iter() {
|
for (var, slot) in module.scope.iter() {
|
||||||
ctx.scopes.def_mut(var, slot.borrow().clone());
|
ctx.scopes.def_mut(var, slot.borrow().clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Imports::Idents(idents) => {
|
Imports::Items(idents) => {
|
||||||
for ident in idents {
|
for ident in idents {
|
||||||
if let Some(slot) = module.scope.get(&ident) {
|
if let Some(slot) = module.scope.get(&ident) {
|
||||||
ctx.scopes.def_mut(ident.as_str(), slot.borrow().clone());
|
ctx.scopes.def_mut(ident.take(), slot.borrow().clone());
|
||||||
} else {
|
} else {
|
||||||
bail!(ident.span, "unresolved import");
|
bail!(ident.span(), "unresolved import");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -657,11 +658,10 @@ impl Eval for IncludeExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let path = self.path.eval(ctx)?.cast::<Str>().at(self.path.span())?;
|
let path = self.path();
|
||||||
|
let resolved = path.eval(ctx)?.cast::<Str>().at(path.span())?;
|
||||||
let file = ctx.import(&path, self.path.span())?;
|
let file = ctx.import(&resolved, path.span())?;
|
||||||
let module = &ctx.modules[&file];
|
let module = &ctx.modules[&file];
|
||||||
|
|
||||||
Ok(Value::Template(module.template.clone()))
|
Ok(Value::Template(module.template.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -689,23 +689,23 @@ impl Access for Ident {
|
|||||||
match ctx.scopes.get(self) {
|
match ctx.scopes.get(self) {
|
||||||
Some(slot) => match slot.try_borrow_mut() {
|
Some(slot) => match slot.try_borrow_mut() {
|
||||||
Ok(guard) => Ok(guard),
|
Ok(guard) => Ok(guard),
|
||||||
Err(_) => bail!(self.span, "cannot mutate a constant"),
|
Err(_) => bail!(self.span(), "cannot mutate a constant"),
|
||||||
},
|
},
|
||||||
None => bail!(self.span, "unknown variable"),
|
None => bail!(self.span(), "unknown variable"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Access for CallExpr {
|
impl Access for CallExpr {
|
||||||
fn access<'a>(&self, ctx: &'a mut EvalContext) -> TypResult<RefMut<'a, Value>> {
|
fn access<'a>(&self, ctx: &'a mut EvalContext) -> TypResult<RefMut<'a, Value>> {
|
||||||
let args = self.args.eval(ctx)?;
|
let args = self.args().eval(ctx)?;
|
||||||
let guard = self.callee.access(ctx)?;
|
let guard = self.callee().access(ctx)?;
|
||||||
|
|
||||||
RefMut::try_map(guard, |value| match value {
|
RefMut::try_map(guard, |value| match value {
|
||||||
Value::Array(array) => array.get_mut(args.into_index()?).at(self.span),
|
Value::Array(array) => array.get_mut(args.into_index()?).at(self.span()),
|
||||||
Value::Dict(dict) => Ok(dict.get_mut(args.into_key()?)),
|
Value::Dict(dict) => Ok(dict.get_mut(args.into_key()?)),
|
||||||
v => bail!(
|
v => bail!(
|
||||||
self.callee.span(),
|
self.callee().span(),
|
||||||
"expected collection, found {}",
|
"expected collection, found {}",
|
||||||
v.type_name(),
|
v.type_name(),
|
||||||
),
|
),
|
||||||
|
@ -120,6 +120,8 @@ impl Scope {
|
|||||||
|
|
||||||
impl Debug for Scope {
|
impl Debug for Scope {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
self.values.fmt(f)
|
f.debug_map()
|
||||||
|
.entries(self.values.iter().map(|(k, v)| (k, v.borrow())))
|
||||||
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use crate::diag::TypResult;
|
|||||||
use crate::geom::Spec;
|
use crate::geom::Spec;
|
||||||
use crate::layout::BlockLevel;
|
use crate::layout::BlockLevel;
|
||||||
use crate::library::{GridNode, ParChild, ParNode, TrackSizing};
|
use crate::library::{GridNode, ParChild, ParNode, TrackSizing};
|
||||||
use crate::syntax::*;
|
use crate::syntax::ast::*;
|
||||||
use crate::util::BoolExt;
|
use crate::util::BoolExt;
|
||||||
|
|
||||||
/// Walk markup, filling the currently built template.
|
/// Walk markup, filling the currently built template.
|
||||||
@ -16,7 +16,7 @@ pub trait Walk {
|
|||||||
|
|
||||||
impl Walk for Markup {
|
impl Walk for Markup {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
for node in self.iter() {
|
for node in self.nodes() {
|
||||||
node.walk(ctx)?;
|
node.walk(ctx)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -27,12 +27,13 @@ impl Walk for MarkupNode {
|
|||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Space => ctx.template.space(),
|
Self::Space => ctx.template.space(),
|
||||||
Self::Linebreak(_) => ctx.template.linebreak(),
|
Self::Linebreak => ctx.template.linebreak(),
|
||||||
Self::Parbreak(_) => ctx.template.parbreak(),
|
Self::Parbreak => ctx.template.parbreak(),
|
||||||
Self::Strong(_) => ctx.template.modify(|s| s.text_mut().strong.flip()),
|
Self::Strong => ctx.template.modify(|s| s.text_mut().strong.flip()),
|
||||||
Self::Emph(_) => ctx.template.modify(|s| s.text_mut().emph.flip()),
|
Self::Emph => ctx.template.modify(|s| s.text_mut().emph.flip()),
|
||||||
Self::Text(text) => ctx.template.text(text),
|
Self::Text(text) => ctx.template.text(text),
|
||||||
Self::Raw(raw) => raw.walk(ctx)?,
|
Self::Raw(raw) => raw.walk(ctx)?,
|
||||||
|
Self::Math(math) => math.walk(ctx)?,
|
||||||
Self::Heading(heading) => heading.walk(ctx)?,
|
Self::Heading(heading) => heading.walk(ctx)?,
|
||||||
Self::List(list) => list.walk(ctx)?,
|
Self::List(list) => list.walk(ctx)?,
|
||||||
Self::Enum(enum_) => enum_.walk(ctx)?,
|
Self::Enum(enum_) => enum_.walk(ctx)?,
|
||||||
@ -67,16 +68,32 @@ impl Walk for RawNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Walk for MathNode {
|
||||||
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
|
if self.display {
|
||||||
|
ctx.template.parbreak();
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.template.monospace(self.formula.trim());
|
||||||
|
|
||||||
|
if self.display {
|
||||||
|
ctx.template.parbreak();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Walk for HeadingNode {
|
impl Walk for HeadingNode {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
let level = self.level;
|
let level = self.level();
|
||||||
let body = self.body.eval(ctx)?;
|
let body = self.body().eval(ctx)?;
|
||||||
|
|
||||||
ctx.template.parbreak();
|
ctx.template.parbreak();
|
||||||
ctx.template.save();
|
ctx.template.save();
|
||||||
ctx.template.modify(move |style| {
|
ctx.template.modify(move |style| {
|
||||||
let text = style.text_mut();
|
let text = style.text_mut();
|
||||||
let upscale = 1.6 - 0.1 * level as f64;
|
let upscale = (1.6 - 0.1 * level as f64).max(0.75);
|
||||||
text.size *= upscale;
|
text.size *= upscale;
|
||||||
text.strong = true;
|
text.strong = true;
|
||||||
});
|
});
|
||||||
@ -90,7 +107,7 @@ impl Walk for HeadingNode {
|
|||||||
|
|
||||||
impl Walk for ListNode {
|
impl Walk for ListNode {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
let body = self.body.eval(ctx)?;
|
let body = self.body().eval(ctx)?;
|
||||||
walk_item(ctx, Str::from('•'), body);
|
walk_item(ctx, Str::from('•'), body);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -98,8 +115,8 @@ impl Walk for ListNode {
|
|||||||
|
|
||||||
impl Walk for EnumNode {
|
impl Walk for EnumNode {
|
||||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||||
let body = self.body.eval(ctx)?;
|
let body = self.body().eval(ctx)?;
|
||||||
let label = format_str!("{}.", self.number.unwrap_or(1));
|
let label = format_str!("{}.", self.number().unwrap_or(1));
|
||||||
walk_item(ctx, label, body);
|
walk_item(ctx, label, body);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use super::*;
|
|||||||
/// A relative length.
|
/// A relative length.
|
||||||
///
|
///
|
||||||
/// _Note_: `50%` is represented as `0.5` here, but stored as `50.0` in the
|
/// _Note_: `50%` is represented as `0.5` here, but stored as `50.0` in the
|
||||||
/// corresponding [literal](crate::syntax::Lit::Percent).
|
/// corresponding [literal](crate::syntax::ast::LitKind::Percent).
|
||||||
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub struct Relative(N64);
|
pub struct Relative(N64);
|
||||||
|
|
||||||
|
10
src/lib.rs
10
src/lib.rs
@ -20,7 +20,7 @@
|
|||||||
//!
|
//!
|
||||||
//! [tokens]: parse::Tokens
|
//! [tokens]: parse::Tokens
|
||||||
//! [parsed]: parse::parse
|
//! [parsed]: parse::parse
|
||||||
//! [markup]: syntax::Markup
|
//! [markup]: syntax::ast::Markup
|
||||||
//! [evaluate]: eval::eval
|
//! [evaluate]: eval::eval
|
||||||
//! [module]: eval::Module
|
//! [module]: eval::Module
|
||||||
//! [layout tree]: layout::LayoutTree
|
//! [layout tree]: layout::LayoutTree
|
||||||
@ -58,7 +58,6 @@ use crate::layout::{EvictionPolicy, LayoutCache};
|
|||||||
use crate::loading::Loader;
|
use crate::loading::Loader;
|
||||||
use crate::source::{SourceId, SourceStore};
|
use crate::source::{SourceId, SourceStore};
|
||||||
use crate::style::Style;
|
use crate::style::Style;
|
||||||
use crate::syntax::Markup;
|
|
||||||
|
|
||||||
/// The core context which holds the loader, configuration and cached artifacts.
|
/// The core context which holds the loader, configuration and cached artifacts.
|
||||||
pub struct Context {
|
pub struct Context {
|
||||||
@ -100,14 +99,9 @@ impl Context {
|
|||||||
&self.style
|
&self.style
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a source file and return the resulting markup.
|
|
||||||
pub fn parse(&mut self, id: SourceId) -> TypResult<Markup> {
|
|
||||||
parse::parse(self.sources.get(id))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Evaluate a source file and return the resulting module.
|
/// Evaluate a source file and return the resulting module.
|
||||||
pub fn evaluate(&mut self, id: SourceId) -> TypResult<Module> {
|
pub fn evaluate(&mut self, id: SourceId) -> TypResult<Module> {
|
||||||
let ast = self.parse(id)?;
|
let ast = self.sources.get(id).ast()?;
|
||||||
eval::eval(self, id, &ast)
|
eval::eval(self, id, &ast)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
961
src/parse/mod.rs
961
src/parse/mod.rs
File diff suppressed because it is too large
Load Diff
@ -1,250 +1,216 @@
|
|||||||
use std::ops::Range;
|
use std::mem;
|
||||||
|
|
||||||
use super::{TokenMode, Tokens};
|
use super::{TokenMode, Tokens};
|
||||||
use crate::diag::Error;
|
use crate::syntax::{ErrorPos, Green, GreenData, GreenNode, NodeKind};
|
||||||
use crate::source::{SourceFile, SourceId};
|
use crate::util::EcoString;
|
||||||
use crate::syntax::{IntoSpan, Pos, Span, Token};
|
|
||||||
|
/// Allows parser methods to use the try operator. Not exposed as the parser
|
||||||
|
/// recovers from all errors.
|
||||||
|
pub(crate) type ParseResult<T = ()> = Result<T, ()>;
|
||||||
|
|
||||||
/// A convenient token-based parser.
|
/// A convenient token-based parser.
|
||||||
pub struct Parser<'s> {
|
pub struct Parser<'s> {
|
||||||
/// The parsed file.
|
|
||||||
source: &'s SourceFile,
|
|
||||||
/// Parsing errors.
|
|
||||||
errors: Vec<Error>,
|
|
||||||
/// An iterator over the source tokens.
|
/// An iterator over the source tokens.
|
||||||
tokens: Tokens<'s>,
|
tokens: Tokens<'s>,
|
||||||
|
/// Whether we are at the end of the file or of a group.
|
||||||
|
eof: bool,
|
||||||
|
/// The current token.
|
||||||
|
current: Option<NodeKind>,
|
||||||
|
/// The end byte index of the last non-trivia token.
|
||||||
|
prev_end: usize,
|
||||||
|
/// The start byte index of the peeked token.
|
||||||
|
current_start: usize,
|
||||||
/// The stack of open groups.
|
/// The stack of open groups.
|
||||||
groups: Vec<GroupEntry>,
|
groups: Vec<GroupEntry>,
|
||||||
/// The next token.
|
/// The children of the currently built node.
|
||||||
next: Option<Token<'s>>,
|
children: Vec<Green>,
|
||||||
/// The peeked token.
|
|
||||||
/// (Same as `next` except if we are at the end of group, then `None`).
|
|
||||||
peeked: Option<Token<'s>>,
|
|
||||||
/// The end index of the last (non-whitespace if in code mode) token.
|
|
||||||
prev_end: usize,
|
|
||||||
/// The start index of the peeked token.
|
|
||||||
next_start: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A logical group of tokens, e.g. `[...]`.
|
|
||||||
struct GroupEntry {
|
|
||||||
/// The kind of group this is. This decides which tokens will end the group.
|
|
||||||
/// For example, a [`Group::Paren`] will be ended by
|
|
||||||
/// [`Token::RightParen`].
|
|
||||||
pub kind: Group,
|
|
||||||
/// The start index of the group. Used by `Parser::end_group` to return the
|
|
||||||
/// group's full span.
|
|
||||||
pub start: usize,
|
|
||||||
/// The mode the parser was in _before_ the group started (to which we go
|
|
||||||
/// back once the group ends).
|
|
||||||
pub prev_mode: TokenMode,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A group, confined by optional start and end delimiters.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
|
||||||
pub enum Group {
|
|
||||||
/// A parenthesized group: `(...)`.
|
|
||||||
Paren,
|
|
||||||
/// A bracketed group: `[...]`.
|
|
||||||
Bracket,
|
|
||||||
/// A curly-braced group: `{...}`.
|
|
||||||
Brace,
|
|
||||||
/// A group ended by a semicolon or a line break: `;`, `\n`.
|
|
||||||
Stmt,
|
|
||||||
/// A group for a single expression, ended by a line break.
|
|
||||||
Expr,
|
|
||||||
/// A group for import items, ended by a semicolon, line break or `from`.
|
|
||||||
Imports,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s> Parser<'s> {
|
impl<'s> Parser<'s> {
|
||||||
/// Create a new parser for the source string.
|
/// Create a new parser for the source string.
|
||||||
pub fn new(source: &'s SourceFile) -> Self {
|
pub fn new(src: &'s str) -> Self {
|
||||||
let mut tokens = Tokens::new(source.src(), TokenMode::Markup);
|
let mut tokens = Tokens::new(src, TokenMode::Markup);
|
||||||
let next = tokens.next();
|
let current = tokens.next();
|
||||||
Self {
|
Self {
|
||||||
source,
|
|
||||||
errors: vec![],
|
|
||||||
tokens,
|
tokens,
|
||||||
groups: vec![],
|
eof: current.is_none(),
|
||||||
next,
|
current,
|
||||||
peeked: next,
|
|
||||||
prev_end: 0,
|
prev_end: 0,
|
||||||
next_start: 0,
|
current_start: 0,
|
||||||
|
groups: vec![],
|
||||||
|
children: vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish parsing and return all errors.
|
/// End the parsing process and return the last child.
|
||||||
pub fn finish(self) -> Vec<Error> {
|
pub fn finish(self) -> Vec<Green> {
|
||||||
self.errors
|
self.children
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The id of the parsed source file.
|
/// Create a new marker.
|
||||||
pub fn id(&self) -> SourceId {
|
pub fn marker(&mut self) -> Marker {
|
||||||
self.source.id()
|
Marker(self.children.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a markup right before the trailing trivia.
|
||||||
|
pub fn trivia_start(&self) -> Marker {
|
||||||
|
let count = self
|
||||||
|
.children
|
||||||
|
.iter()
|
||||||
|
.rev()
|
||||||
|
.take_while(|node| self.is_trivia(node.kind()))
|
||||||
|
.count();
|
||||||
|
Marker(self.children.len() - count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a subparse that wraps its result in a node with the given kind.
|
||||||
|
pub fn perform<F, T>(&mut self, kind: NodeKind, f: F) -> T
|
||||||
|
where
|
||||||
|
F: FnOnce(&mut Self) -> T,
|
||||||
|
{
|
||||||
|
let prev = mem::take(&mut self.children);
|
||||||
|
let output = f(self);
|
||||||
|
let until = self.trivia_start();
|
||||||
|
let mut children = mem::replace(&mut self.children, prev);
|
||||||
|
|
||||||
|
if self.tokens.mode() == TokenMode::Code {
|
||||||
|
// Trailing trivia should not be wrapped into the new node.
|
||||||
|
let idx = self.children.len();
|
||||||
|
self.children.push(Green::default());
|
||||||
|
self.children.extend(children.drain(until.0 ..));
|
||||||
|
self.children[idx] = GreenNode::with_children(kind, children).into();
|
||||||
|
} else {
|
||||||
|
self.children.push(GreenNode::with_children(kind, children).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the end of the source string or group is reached.
|
/// Whether the end of the source string or group is reached.
|
||||||
pub fn eof(&self) -> bool {
|
pub fn eof(&self) -> bool {
|
||||||
self.peek().is_none()
|
self.eof
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token.
|
/// Consume the current token and also trailing trivia.
|
||||||
pub fn eat(&mut self) -> Option<Token<'s>> {
|
pub fn eat(&mut self) {
|
||||||
let token = self.peek()?;
|
self.prev_end = self.tokens.index();
|
||||||
self.bump();
|
self.bump();
|
||||||
Some(token)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Eat the next token and return its source range.
|
if self.tokens.mode() == TokenMode::Code {
|
||||||
pub fn eat_span(&mut self) -> Span {
|
// Skip whitespace and comments.
|
||||||
let start = self.next_start();
|
while self.current.as_ref().map_or(false, |x| self.is_trivia(x)) {
|
||||||
self.eat();
|
self.bump();
|
||||||
Span::new(self.id(), start, self.prev_end())
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Consume the next token if it is the given one.
|
|
||||||
pub fn eat_if(&mut self, t: Token) -> bool {
|
|
||||||
if self.peek() == Some(t) {
|
|
||||||
self.bump();
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.repeek();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token if the closure maps it a to `Some`-variant.
|
/// Eat if the current token it is the given one.
|
||||||
pub fn eat_map<T, F>(&mut self, f: F) -> Option<T>
|
pub fn eat_if(&mut self, t: &NodeKind) -> bool {
|
||||||
where
|
let at = self.at(t);
|
||||||
F: FnOnce(Token<'s>) -> Option<T>,
|
if at {
|
||||||
{
|
self.eat();
|
||||||
let token = self.peek()?;
|
|
||||||
let mapped = f(token);
|
|
||||||
if mapped.is_some() {
|
|
||||||
self.bump();
|
|
||||||
}
|
}
|
||||||
mapped
|
at
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token if it is the given one and produce an error if
|
/// Eat if the current token is the given one and produce an error if not.
|
||||||
/// not.
|
pub fn eat_expect(&mut self, t: &NodeKind) -> ParseResult {
|
||||||
pub fn eat_expect(&mut self, t: Token) -> bool {
|
|
||||||
let eaten = self.eat_if(t);
|
let eaten = self.eat_if(t);
|
||||||
if !eaten {
|
if !eaten {
|
||||||
self.expected_at(self.prev_end(), t.name());
|
self.expected_at(t.as_str());
|
||||||
}
|
}
|
||||||
eaten
|
if eaten { Ok(()) } else { Err(()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the next token, debug-asserting that it is one of the given ones.
|
/// Eat, debug-asserting that the token is the given one.
|
||||||
pub fn eat_assert(&mut self, t: Token) {
|
pub fn eat_assert(&mut self, t: &NodeKind) {
|
||||||
let next = self.eat();
|
debug_assert_eq!(self.peek(), Some(t));
|
||||||
debug_assert_eq!(next, Some(t));
|
self.eat();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume tokens while the condition is true.
|
/// Eat tokens while the condition is true.
|
||||||
pub fn eat_while<F>(&mut self, mut f: F)
|
pub fn eat_while<F>(&mut self, mut f: F)
|
||||||
where
|
where
|
||||||
F: FnMut(Token<'s>) -> bool,
|
F: FnMut(&NodeKind) -> bool,
|
||||||
{
|
{
|
||||||
while self.peek().map_or(false, |t| f(t)) {
|
while self.peek().map_or(false, |t| f(t)) {
|
||||||
self.eat();
|
self.eat();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the next token without consuming it.
|
/// Eat the current token, but change its type.
|
||||||
pub fn peek(&self) -> Option<Token<'s>> {
|
pub fn convert(&mut self, kind: NodeKind) {
|
||||||
self.peeked
|
let marker = self.marker();
|
||||||
|
self.eat();
|
||||||
|
marker.convert(self, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the next token if it follows immediately after the last one
|
/// Whether the current token is of the given type.
|
||||||
/// without any whitespace in between.
|
pub fn at(&self, kind: &NodeKind) -> bool {
|
||||||
pub fn peek_direct(&self) -> Option<Token<'s>> {
|
self.peek() == Some(kind)
|
||||||
if self.next_start() == self.prev_end() {
|
}
|
||||||
self.peeked
|
|
||||||
|
/// Peek at the current token without consuming it.
|
||||||
|
pub fn peek(&self) -> Option<&NodeKind> {
|
||||||
|
if self.eof { None } else { self.current.as_ref() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Peek at the current token, if it follows immediately after the last one
|
||||||
|
/// without any trivia in between.
|
||||||
|
pub fn peek_direct(&self) -> Option<&NodeKind> {
|
||||||
|
if self.prev_end() == self.current_start() {
|
||||||
|
self.peek()
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the span of the next token.
|
/// Peek at the source of the current token.
|
||||||
///
|
|
||||||
/// Has length zero if `peek()` returns `None`.
|
|
||||||
pub fn peek_span(&self) -> Span {
|
|
||||||
Span::new(self.id(), self.next_start(), self.next_end())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Peek at the source of the next token.
|
|
||||||
pub fn peek_src(&self) -> &'s str {
|
pub fn peek_src(&self) -> &'s str {
|
||||||
self.get(self.next_start() .. self.next_end())
|
self.tokens.scanner().get(self.current_start() .. self.current_end())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks whether the next token fulfills a condition.
|
/// The byte index at which the last non-trivia token ended.
|
||||||
///
|
|
||||||
/// Returns `false` if there is no next token.
|
|
||||||
pub fn check<F>(&self, f: F) -> bool
|
|
||||||
where
|
|
||||||
F: FnOnce(Token<'s>) -> bool,
|
|
||||||
{
|
|
||||||
self.peek().map_or(false, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The byte index at which the last token ended.
|
|
||||||
///
|
|
||||||
/// Refers to the end of the last _non-whitespace_ token in code mode.
|
|
||||||
pub fn prev_end(&self) -> usize {
|
pub fn prev_end(&self) -> usize {
|
||||||
self.prev_end
|
self.prev_end
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The byte index at which the next token starts.
|
/// The byte index at which the current token starts.
|
||||||
pub fn next_start(&self) -> usize {
|
pub fn current_start(&self) -> usize {
|
||||||
self.next_start
|
self.current_start
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The byte index at which the next token will end.
|
/// The byte index at which the current token ends.
|
||||||
///
|
pub fn current_end(&self) -> usize {
|
||||||
/// Is the same as [`next_start()`][Self::next_start] if `peek()` returns
|
|
||||||
/// `None`.
|
|
||||||
pub fn next_end(&self) -> usize {
|
|
||||||
self.tokens.index()
|
self.tokens.index()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determine the column index for the given byte index.
|
/// Determine the column index for the given byte index.
|
||||||
pub fn column(&self, index: usize) -> usize {
|
pub fn column(&self, index: usize) -> usize {
|
||||||
self.source.byte_to_column(index).unwrap()
|
self.tokens.scanner().column(index)
|
||||||
}
|
|
||||||
|
|
||||||
/// Slice out part of the source string.
|
|
||||||
pub fn get(&self, range: Range<usize>) -> &'s str {
|
|
||||||
self.source.get(range).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The span from `start` to [`self.prev_end()`](Self::prev_end).
|
|
||||||
pub fn span_from(&self, start: impl Into<Pos>) -> Span {
|
|
||||||
Span::new(self.id(), start, self.prev_end())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Continue parsing in a group.
|
/// Continue parsing in a group.
|
||||||
///
|
///
|
||||||
/// When the end delimiter of the group is reached, all subsequent calls to
|
/// When the end delimiter of the group is reached, all subsequent calls to
|
||||||
/// `eat()` and `peek()` return `None`. Parsing can only continue with
|
/// `peek()` return `None`. Parsing can only continue with a matching call
|
||||||
/// a matching call to `end_group`.
|
/// to `end_group`.
|
||||||
///
|
///
|
||||||
/// This panics if the next token does not start the given group.
|
/// This panics if the current token does not start the given group.
|
||||||
pub fn start_group(&mut self, kind: Group, mode: TokenMode) {
|
pub fn start_group(&mut self, kind: Group) {
|
||||||
self.groups.push(GroupEntry {
|
self.groups.push(GroupEntry { kind, prev_mode: self.tokens.mode() });
|
||||||
kind,
|
self.tokens.set_mode(match kind {
|
||||||
start: self.next_start(),
|
Group::Bracket => TokenMode::Markup,
|
||||||
prev_mode: self.tokens.mode(),
|
_ => TokenMode::Code,
|
||||||
});
|
});
|
||||||
|
|
||||||
self.tokens.set_mode(mode);
|
|
||||||
self.repeek();
|
self.repeek();
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
Group::Paren => self.eat_assert(Token::LeftParen),
|
Group::Paren => self.eat_assert(&NodeKind::LeftParen),
|
||||||
Group::Bracket => self.eat_assert(Token::LeftBracket),
|
Group::Bracket => self.eat_assert(&NodeKind::LeftBracket),
|
||||||
Group::Brace => self.eat_assert(Token::LeftBrace),
|
Group::Brace => self.eat_assert(&NodeKind::LeftBrace),
|
||||||
Group::Stmt => {}
|
Group::Stmt => {}
|
||||||
Group::Expr => {}
|
Group::Expr => {}
|
||||||
Group::Imports => {}
|
Group::Imports => {}
|
||||||
@ -254,121 +220,88 @@ impl<'s> Parser<'s> {
|
|||||||
/// End the parsing of a group.
|
/// End the parsing of a group.
|
||||||
///
|
///
|
||||||
/// This panics if no group was started.
|
/// This panics if no group was started.
|
||||||
pub fn end_group(&mut self) -> Span {
|
pub fn end_group(&mut self) {
|
||||||
let prev_mode = self.tokens.mode();
|
let group_mode = self.tokens.mode();
|
||||||
let group = self.groups.pop().expect("no started group");
|
let group = self.groups.pop().expect("no started group");
|
||||||
self.tokens.set_mode(group.prev_mode);
|
self.tokens.set_mode(group.prev_mode);
|
||||||
self.repeek();
|
self.repeek();
|
||||||
|
|
||||||
let mut rescan = self.tokens.mode() != prev_mode;
|
let mut rescan = self.tokens.mode() != group_mode;
|
||||||
|
|
||||||
// Eat the end delimiter if there is one.
|
// Eat the end delimiter if there is one.
|
||||||
if let Some((end, required)) = match group.kind {
|
if let Some((end, required)) = match group.kind {
|
||||||
Group::Paren => Some((Token::RightParen, true)),
|
Group::Paren => Some((NodeKind::RightParen, true)),
|
||||||
Group::Bracket => Some((Token::RightBracket, true)),
|
Group::Bracket => Some((NodeKind::RightBracket, true)),
|
||||||
Group::Brace => Some((Token::RightBrace, true)),
|
Group::Brace => Some((NodeKind::RightBrace, true)),
|
||||||
Group::Stmt => Some((Token::Semicolon, false)),
|
Group::Stmt => Some((NodeKind::Semicolon, false)),
|
||||||
Group::Expr => None,
|
Group::Expr => None,
|
||||||
Group::Imports => None,
|
Group::Imports => None,
|
||||||
} {
|
} {
|
||||||
if self.next == Some(end) {
|
if self.current.as_ref() == Some(&end) {
|
||||||
// Bump the delimeter and return. No need to rescan in this case.
|
// Bump the delimeter and return. No need to rescan in this case.
|
||||||
self.bump();
|
self.eat();
|
||||||
rescan = false;
|
rescan = false;
|
||||||
} else if required {
|
} else if required {
|
||||||
self.error(
|
self.push_error(format!("expected {}", end));
|
||||||
self.next_start() .. self.next_start(),
|
|
||||||
format!("expected {}", end.name()),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rescan the peeked token if the mode changed.
|
// Rescan the peeked token if the mode changed.
|
||||||
if rescan {
|
if rescan {
|
||||||
self.tokens.jump(self.prev_end());
|
if group_mode == TokenMode::Code {
|
||||||
self.bump();
|
self.children.truncate(self.trivia_start().0);
|
||||||
}
|
|
||||||
|
|
||||||
Span::new(self.id(), group.start, self.prev_end())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an error with location and message.
|
|
||||||
pub fn error(&mut self, span: impl IntoSpan, message: impl Into<String>) {
|
|
||||||
self.errors.push(Error::new(span.into_span(self.id()), message));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an error that `what` was expected at the given span.
|
|
||||||
pub fn expected_at(&mut self, span: impl IntoSpan, what: &str) {
|
|
||||||
self.error(span, format!("expected {}", what));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Eat the next token and add an error that it is not the expected `thing`.
|
|
||||||
pub fn expected(&mut self, what: &str) {
|
|
||||||
let before = self.next_start();
|
|
||||||
if let Some(found) = self.eat() {
|
|
||||||
let after = self.prev_end();
|
|
||||||
self.error(
|
|
||||||
before .. after,
|
|
||||||
format!("expected {}, found {}", what, found.name()),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
self.expected_at(self.next_start(), what);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Eat the next token and add an error that it is unexpected.
|
|
||||||
pub fn unexpected(&mut self) {
|
|
||||||
let before = self.next_start();
|
|
||||||
if let Some(found) = self.eat() {
|
|
||||||
let after = self.prev_end();
|
|
||||||
self.error(before .. after, format!("unexpected {}", found.name()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move to the next token.
|
|
||||||
fn bump(&mut self) {
|
|
||||||
self.prev_end = self.tokens.index().into();
|
|
||||||
self.next_start = self.tokens.index().into();
|
|
||||||
self.next = self.tokens.next();
|
|
||||||
|
|
||||||
if self.tokens.mode() == TokenMode::Code {
|
|
||||||
// Skip whitespace and comments.
|
|
||||||
while match self.next {
|
|
||||||
Some(Token::Space(n)) => n < 1 || !self.stop_at_newline(),
|
|
||||||
Some(Token::LineComment(_)) => true,
|
|
||||||
Some(Token::BlockComment(_)) => true,
|
|
||||||
_ => false,
|
|
||||||
} {
|
|
||||||
self.next_start = self.tokens.index().into();
|
|
||||||
self.next = self.tokens.next();
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
self.repeek();
|
self.tokens.jump(self.prev_end());
|
||||||
|
self.prev_end = self.tokens.index();
|
||||||
|
self.current_start = self.tokens.index();
|
||||||
|
self.current = self.tokens.next();
|
||||||
|
self.repeek();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Take another look at the next token to recheck whether it ends a group.
|
/// Low-level bump that consumes exactly one token without special trivia
|
||||||
|
/// handling.
|
||||||
|
fn bump(&mut self) {
|
||||||
|
let kind = self.current.take().unwrap();
|
||||||
|
let len = self.tokens.index() - self.current_start;
|
||||||
|
self.children.push(GreenData::new(kind, len).into());
|
||||||
|
self.current_start = self.tokens.index();
|
||||||
|
self.current = self.tokens.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Take another look at the current token to recheck whether it ends a
|
||||||
|
/// group.
|
||||||
fn repeek(&mut self) {
|
fn repeek(&mut self) {
|
||||||
self.peeked = self.next;
|
self.eof = match &self.current {
|
||||||
let token = match self.next {
|
Some(NodeKind::RightParen) => self.inside(Group::Paren),
|
||||||
Some(token) => token,
|
Some(NodeKind::RightBracket) => self.inside(Group::Bracket),
|
||||||
None => return,
|
Some(NodeKind::RightBrace) => self.inside(Group::Brace),
|
||||||
|
Some(NodeKind::Semicolon) => self.inside(Group::Stmt),
|
||||||
|
Some(NodeKind::From) => self.inside(Group::Imports),
|
||||||
|
Some(NodeKind::Space(n)) => *n >= 1 && self.stop_at_newline(),
|
||||||
|
Some(_) => false,
|
||||||
|
None => true,
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
if match token {
|
/// Returns whether the given type can be skipped over.
|
||||||
Token::RightParen => self.inside(Group::Paren),
|
fn is_trivia(&self, token: &NodeKind) -> bool {
|
||||||
Token::RightBracket => self.inside(Group::Bracket),
|
Self::is_trivia_ext(token, self.stop_at_newline())
|
||||||
Token::RightBrace => self.inside(Group::Brace),
|
}
|
||||||
Token::Semicolon => self.inside(Group::Stmt),
|
|
||||||
Token::From => self.inside(Group::Imports),
|
/// Returns whether the given type can be skipped over given the current
|
||||||
Token::Space(n) => n >= 1 && self.stop_at_newline(),
|
/// newline mode.
|
||||||
|
fn is_trivia_ext(token: &NodeKind, stop_at_newline: bool) -> bool {
|
||||||
|
match token {
|
||||||
|
NodeKind::Space(n) => *n == 0 || !stop_at_newline,
|
||||||
|
NodeKind::LineComment => true,
|
||||||
|
NodeKind::BlockComment => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
} {
|
|
||||||
self.peeked = None;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the active group ends at a newline.
|
/// Whether the active group must end at a newline.
|
||||||
fn stop_at_newline(&self) -> bool {
|
fn stop_at_newline(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self.groups.last().map(|group| group.kind),
|
self.groups.last().map(|group| group.kind),
|
||||||
@ -381,3 +314,134 @@ impl<'s> Parser<'s> {
|
|||||||
self.groups.iter().any(|g| g.kind == kind)
|
self.groups.iter().any(|g| g.kind == kind)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Error handling.
|
||||||
|
impl Parser<'_> {
|
||||||
|
/// Push an error into the children list.
|
||||||
|
pub fn push_error(&mut self, msg: impl Into<EcoString>) {
|
||||||
|
let error = NodeKind::Error(ErrorPos::Full, msg.into());
|
||||||
|
self.children.push(GreenData::new(error, 0).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eat the current token and add an error that it is unexpected.
|
||||||
|
pub fn unexpected(&mut self) {
|
||||||
|
match self.peek() {
|
||||||
|
Some(found) => {
|
||||||
|
let msg = format!("unexpected {}", found);
|
||||||
|
let error = NodeKind::Error(ErrorPos::Full, msg.into());
|
||||||
|
self.perform(error, Self::eat);
|
||||||
|
}
|
||||||
|
None => self.push_error("unexpected end of file"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eat the current token and add an error that it is not the expected `thing`.
|
||||||
|
pub fn expected(&mut self, thing: &str) {
|
||||||
|
match self.peek() {
|
||||||
|
Some(found) => {
|
||||||
|
let msg = format!("expected {}, found {}", thing, found);
|
||||||
|
let error = NodeKind::Error(ErrorPos::Full, msg.into());
|
||||||
|
self.perform(error, Self::eat);
|
||||||
|
}
|
||||||
|
None => self.expected_at(thing),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add an error that the `thing` was expected at the end of the last
|
||||||
|
/// non-trivia token.
|
||||||
|
pub fn expected_at(&mut self, thing: &str) {
|
||||||
|
self.trivia_start().expected(self, thing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A marker that indicates where a node may start.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub struct Marker(usize);
|
||||||
|
|
||||||
|
impl Marker {
|
||||||
|
/// Perform a subparse that wraps all children after the marker in a node
|
||||||
|
/// with the given kind.
|
||||||
|
pub fn perform<T, F>(self, p: &mut Parser, kind: NodeKind, f: F) -> T
|
||||||
|
where
|
||||||
|
F: FnOnce(&mut Parser) -> T,
|
||||||
|
{
|
||||||
|
let success = f(p);
|
||||||
|
self.end(p, kind);
|
||||||
|
success
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wrap all children after the marker (excluding trailing trivia) in a node
|
||||||
|
/// with the given `kind`.
|
||||||
|
pub fn end(self, p: &mut Parser, kind: NodeKind) {
|
||||||
|
let until = p.trivia_start();
|
||||||
|
let children = p.children.drain(self.0 .. until.0).collect();
|
||||||
|
p.children
|
||||||
|
.insert(self.0, GreenNode::with_children(kind, children).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Wrap all children that do not fulfill the predicate in error nodes.
|
||||||
|
pub fn filter_children<F>(self, p: &mut Parser, f: F)
|
||||||
|
where
|
||||||
|
F: Fn(&Green) -> Result<(), &'static str>,
|
||||||
|
{
|
||||||
|
for child in &mut p.children[self.0 ..] {
|
||||||
|
if (p.tokens.mode() == TokenMode::Markup
|
||||||
|
|| !Parser::is_trivia_ext(child.kind(), false))
|
||||||
|
&& !child.kind().is_error()
|
||||||
|
{
|
||||||
|
if let Err(msg) = f(child) {
|
||||||
|
let error = NodeKind::Error(ErrorPos::Full, msg.into());
|
||||||
|
let inner = mem::take(child);
|
||||||
|
*child = GreenNode::with_child(error, inner).into();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert an error message that `what` was expected at the marker position.
|
||||||
|
pub fn expected(self, p: &mut Parser, what: &str) {
|
||||||
|
let msg = format!("expected {}", what);
|
||||||
|
let error = NodeKind::Error(ErrorPos::Full, msg.into());
|
||||||
|
p.children.insert(self.0, GreenData::new(error, 0).into());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Peek at the child directly after the marker.
|
||||||
|
pub fn peek<'a>(self, p: &'a Parser) -> Option<&'a Green> {
|
||||||
|
p.children.get(self.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert the child directly after marker.
|
||||||
|
pub fn convert(self, p: &mut Parser, kind: NodeKind) {
|
||||||
|
if let Some(child) = p.children.get_mut(self.0) {
|
||||||
|
child.convert(kind);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A logical group of tokens, e.g. `[...]`.
|
||||||
|
struct GroupEntry {
|
||||||
|
/// The kind of group this is. This decides which tokens will end the group.
|
||||||
|
/// For example, a [`Group::Paren`] will be ended by
|
||||||
|
/// [`Token::RightParen`].
|
||||||
|
pub kind: Group,
|
||||||
|
/// The mode the parser was in _before_ the group started (to which we go
|
||||||
|
/// back once the group ends).
|
||||||
|
pub prev_mode: TokenMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A group, confined by optional start and end delimiters.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum Group {
|
||||||
|
/// A bracketed group: `[...]`.
|
||||||
|
Bracket,
|
||||||
|
/// A curly-braced group: `{...}`.
|
||||||
|
Brace,
|
||||||
|
/// A parenthesized group: `(...)`.
|
||||||
|
Paren,
|
||||||
|
/// A group ended by a semicolon or a line break: `;`, `\n`.
|
||||||
|
Stmt,
|
||||||
|
/// A group for a single expression, ended by a line break.
|
||||||
|
Expr,
|
||||||
|
/// A group for import items, ended by a semicolon, line break or `from`.
|
||||||
|
Imports,
|
||||||
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use super::{is_newline, Scanner};
|
use super::{is_ident, is_newline, Scanner};
|
||||||
use crate::syntax::{Ident, RawNode, Span};
|
use crate::syntax::ast::RawNode;
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
|
|
||||||
/// Resolve all escape sequences in a string.
|
/// Resolve all escape sequences in a string.
|
||||||
@ -25,11 +25,9 @@ pub fn resolve_string(string: &str) -> EcoString {
|
|||||||
let sequence = s.eat_while(|c| c.is_ascii_hexdigit());
|
let sequence = s.eat_while(|c| c.is_ascii_hexdigit());
|
||||||
let _terminated = s.eat_if('}');
|
let _terminated = s.eat_if('}');
|
||||||
|
|
||||||
if let Some(c) = resolve_hex(sequence) {
|
match resolve_hex(sequence) {
|
||||||
out.push(c);
|
Some(c) => out.push(c),
|
||||||
} else {
|
None => out.push_str(s.eaten_from(start)),
|
||||||
// TODO: Feedback that unicode escape sequence is wrong.
|
|
||||||
out.push_str(s.eaten_from(start));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,19 +46,17 @@ pub fn resolve_hex(sequence: &str) -> Option<char> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve the language tag and trims the raw text.
|
/// Resolve the language tag and trims the raw text.
|
||||||
pub fn resolve_raw(span: Span, column: usize, backticks: usize, text: &str) -> RawNode {
|
pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawNode {
|
||||||
if backticks > 1 {
|
if backticks > 1 {
|
||||||
let (tag, inner) = split_at_lang_tag(text);
|
let (tag, inner) = split_at_lang_tag(text);
|
||||||
let (text, block) = trim_and_split_raw(column, inner);
|
let (text, block) = trim_and_split_raw(column, inner);
|
||||||
RawNode {
|
RawNode {
|
||||||
span,
|
lang: is_ident(tag).then(|| tag.into()),
|
||||||
lang: Ident::new(tag, span.with_end(span.start + tag.len())),
|
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
block,
|
block,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
RawNode {
|
RawNode {
|
||||||
span,
|
|
||||||
lang: None,
|
lang: None,
|
||||||
text: split_lines(text).join("\n").into(),
|
text: split_lines(text).join("\n").into(),
|
||||||
block: false,
|
block: false,
|
||||||
@ -140,7 +136,6 @@ fn split_lines(text: &str) -> Vec<&str> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::syntax::Span;
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -190,7 +185,7 @@ mod tests {
|
|||||||
text: &str,
|
text: &str,
|
||||||
block: bool,
|
block: bool,
|
||||||
) {
|
) {
|
||||||
let node = resolve_raw(Span::detached(), column, backticks, raw);
|
let node = resolve_raw(column, backticks, raw);
|
||||||
assert_eq!(node.lang.as_deref(), lang);
|
assert_eq!(node.lang.as_deref(), lang);
|
||||||
assert_eq!(node.text, text);
|
assert_eq!(node.text, text);
|
||||||
assert_eq!(node.block, block);
|
assert_eq!(node.block, block);
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
use std::slice::SliceIndex;
|
use std::slice::SliceIndex;
|
||||||
|
|
||||||
|
use unicode_xid::UnicodeXID;
|
||||||
|
|
||||||
/// A featureful char-based scanner.
|
/// A featureful char-based scanner.
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct Scanner<'s> {
|
pub struct Scanner<'s> {
|
||||||
@ -114,6 +116,12 @@ impl<'s> Scanner<'s> {
|
|||||||
self.index = index;
|
self.index = index;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The full source string.
|
||||||
|
#[inline]
|
||||||
|
pub fn src(&self) -> &'s str {
|
||||||
|
&self.src
|
||||||
|
}
|
||||||
|
|
||||||
/// Slice out part of the source string.
|
/// Slice out part of the source string.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn get<I>(&self, index: I) -> &'s str
|
pub fn get<I>(&self, index: I) -> &'s str
|
||||||
@ -150,6 +158,16 @@ impl<'s> Scanner<'s> {
|
|||||||
// optimized away in some cases.
|
// optimized away in some cases.
|
||||||
self.src.get(start .. self.index).unwrap_or_default()
|
self.src.get(start .. self.index).unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The column index of a given index in the source string.
|
||||||
|
#[inline]
|
||||||
|
pub fn column(&self, index: usize) -> usize {
|
||||||
|
self.src[.. index]
|
||||||
|
.chars()
|
||||||
|
.rev()
|
||||||
|
.take_while(|&c| !is_newline(c))
|
||||||
|
.count()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether this character denotes a newline.
|
/// Whether this character denotes a newline.
|
||||||
@ -163,3 +181,30 @@ pub fn is_newline(character: char) -> bool {
|
|||||||
'\u{0085}' | '\u{2028}' | '\u{2029}'
|
'\u{0085}' | '\u{2028}' | '\u{2029}'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether a string is a valid unicode identifier.
|
||||||
|
///
|
||||||
|
/// In addition to what is specified in the [Unicode Standard][uax31], we allow:
|
||||||
|
/// - `_` as a starting character,
|
||||||
|
/// - `_` and `-` as continuing characters.
|
||||||
|
///
|
||||||
|
/// [uax31]: http://www.unicode.org/reports/tr31/
|
||||||
|
#[inline]
|
||||||
|
pub fn is_ident(string: &str) -> bool {
|
||||||
|
let mut chars = string.chars();
|
||||||
|
chars
|
||||||
|
.next()
|
||||||
|
.map_or(false, |c| is_id_start(c) && chars.all(is_id_continue))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether a character can start an identifier.
|
||||||
|
#[inline]
|
||||||
|
pub fn is_id_start(c: char) -> bool {
|
||||||
|
c.is_xid_start() || c == '_'
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether a character can continue an identifier.
|
||||||
|
#[inline]
|
||||||
|
pub fn is_id_continue(c: char) -> bool {
|
||||||
|
c.is_xid_continue() || c == '_' || c == '-'
|
||||||
|
}
|
||||||
|
@ -1,6 +1,13 @@
|
|||||||
use super::{is_newline, Scanner};
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
is_id_continue, is_id_start, is_newline, resolve_hex, resolve_raw, resolve_string,
|
||||||
|
Scanner,
|
||||||
|
};
|
||||||
use crate::geom::{AngularUnit, LengthUnit};
|
use crate::geom::{AngularUnit, LengthUnit};
|
||||||
use crate::syntax::*;
|
use crate::syntax::ast::{MathNode, RawNode};
|
||||||
|
use crate::syntax::{ErrorPos, NodeKind};
|
||||||
|
use crate::util::EcoString;
|
||||||
|
|
||||||
/// An iterator over the tokens of a string of source code.
|
/// An iterator over the tokens of a string of source code.
|
||||||
pub struct Tokens<'s> {
|
pub struct Tokens<'s> {
|
||||||
@ -59,7 +66,7 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'s> Iterator for Tokens<'s> {
|
impl<'s> Iterator for Tokens<'s> {
|
||||||
type Item = Token<'s>;
|
type Item = NodeKind;
|
||||||
|
|
||||||
/// Parse the next token in the source code.
|
/// Parse the next token in the source code.
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -68,19 +75,21 @@ impl<'s> Iterator for Tokens<'s> {
|
|||||||
let c = self.s.eat()?;
|
let c = self.s.eat()?;
|
||||||
Some(match c {
|
Some(match c {
|
||||||
// Blocks and templates.
|
// Blocks and templates.
|
||||||
'[' => Token::LeftBracket,
|
'[' => NodeKind::LeftBracket,
|
||||||
']' => Token::RightBracket,
|
']' => NodeKind::RightBracket,
|
||||||
'{' => Token::LeftBrace,
|
'{' => NodeKind::LeftBrace,
|
||||||
'}' => Token::RightBrace,
|
'}' => NodeKind::RightBrace,
|
||||||
|
|
||||||
// Whitespace.
|
// Whitespace.
|
||||||
' ' if self.s.check_or(true, |c| !c.is_whitespace()) => Token::Space(0),
|
' ' if self.s.check_or(true, |c| !c.is_whitespace()) => NodeKind::Space(0),
|
||||||
c if c.is_whitespace() => self.whitespace(),
|
c if c.is_whitespace() => self.whitespace(),
|
||||||
|
|
||||||
// Comments with special case for URLs.
|
// Comments with special case for URLs.
|
||||||
'/' if self.s.eat_if('*') => self.block_comment(),
|
'/' if self.s.eat_if('*') => self.block_comment(),
|
||||||
'/' if !self.maybe_in_url() && self.s.eat_if('/') => self.line_comment(),
|
'/' if !self.maybe_in_url() && self.s.eat_if('/') => self.line_comment(),
|
||||||
'*' if self.s.eat_if('/') => Token::Invalid(self.s.eaten_from(start)),
|
'*' if self.s.eat_if('/') => {
|
||||||
|
NodeKind::Unknown(self.s.eaten_from(start).into())
|
||||||
|
}
|
||||||
|
|
||||||
// Other things.
|
// Other things.
|
||||||
_ => match self.mode {
|
_ => match self.mode {
|
||||||
@ -93,7 +102,7 @@ impl<'s> Iterator for Tokens<'s> {
|
|||||||
|
|
||||||
impl<'s> Tokens<'s> {
|
impl<'s> Tokens<'s> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn markup(&mut self, start: usize, c: char) -> Token<'s> {
|
fn markup(&mut self, start: usize, c: char) -> NodeKind {
|
||||||
match c {
|
match c {
|
||||||
// Escape sequences.
|
// Escape sequences.
|
||||||
'\\' => self.backslash(),
|
'\\' => self.backslash(),
|
||||||
@ -102,13 +111,15 @@ impl<'s> Tokens<'s> {
|
|||||||
'#' => self.hash(),
|
'#' => self.hash(),
|
||||||
|
|
||||||
// Markup.
|
// Markup.
|
||||||
'~' => Token::Tilde,
|
'~' => NodeKind::NonBreakingSpace,
|
||||||
'*' => Token::Star,
|
'*' => NodeKind::Strong,
|
||||||
'_' => Token::Underscore,
|
'_' => NodeKind::Emph,
|
||||||
'`' => self.raw(),
|
'`' => self.raw(),
|
||||||
'$' => self.math(),
|
'$' => self.math(),
|
||||||
'-' => self.hyph(start),
|
'-' => self.hyph(),
|
||||||
'=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => Token::Eq,
|
'=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => {
|
||||||
|
NodeKind::Eq
|
||||||
|
}
|
||||||
c if c == '.' || c.is_ascii_digit() => self.numbering(start, c),
|
c if c == '.' || c.is_ascii_digit() => self.numbering(start, c),
|
||||||
|
|
||||||
// Plain text.
|
// Plain text.
|
||||||
@ -116,35 +127,35 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code(&mut self, start: usize, c: char) -> Token<'s> {
|
fn code(&mut self, start: usize, c: char) -> NodeKind {
|
||||||
match c {
|
match c {
|
||||||
// Parens.
|
// Parens.
|
||||||
'(' => Token::LeftParen,
|
'(' => NodeKind::LeftParen,
|
||||||
')' => Token::RightParen,
|
')' => NodeKind::RightParen,
|
||||||
|
|
||||||
// Length two.
|
// Length two.
|
||||||
'=' if self.s.eat_if('=') => Token::EqEq,
|
'=' if self.s.eat_if('=') => NodeKind::EqEq,
|
||||||
'!' if self.s.eat_if('=') => Token::ExclEq,
|
'!' if self.s.eat_if('=') => NodeKind::ExclEq,
|
||||||
'<' if self.s.eat_if('=') => Token::LtEq,
|
'<' if self.s.eat_if('=') => NodeKind::LtEq,
|
||||||
'>' if self.s.eat_if('=') => Token::GtEq,
|
'>' if self.s.eat_if('=') => NodeKind::GtEq,
|
||||||
'+' if self.s.eat_if('=') => Token::PlusEq,
|
'+' if self.s.eat_if('=') => NodeKind::PlusEq,
|
||||||
'-' if self.s.eat_if('=') => Token::HyphEq,
|
'-' if self.s.eat_if('=') => NodeKind::HyphEq,
|
||||||
'*' if self.s.eat_if('=') => Token::StarEq,
|
'*' if self.s.eat_if('=') => NodeKind::StarEq,
|
||||||
'/' if self.s.eat_if('=') => Token::SlashEq,
|
'/' if self.s.eat_if('=') => NodeKind::SlashEq,
|
||||||
'.' if self.s.eat_if('.') => Token::Dots,
|
'.' if self.s.eat_if('.') => NodeKind::Dots,
|
||||||
'=' if self.s.eat_if('>') => Token::Arrow,
|
'=' if self.s.eat_if('>') => NodeKind::Arrow,
|
||||||
|
|
||||||
// Length one.
|
// Length one.
|
||||||
',' => Token::Comma,
|
',' => NodeKind::Comma,
|
||||||
';' => Token::Semicolon,
|
';' => NodeKind::Semicolon,
|
||||||
':' => Token::Colon,
|
':' => NodeKind::Colon,
|
||||||
'+' => Token::Plus,
|
'+' => NodeKind::Plus,
|
||||||
'-' => Token::Hyph,
|
'-' => NodeKind::Minus,
|
||||||
'*' => Token::Star,
|
'*' => NodeKind::Star,
|
||||||
'/' => Token::Slash,
|
'/' => NodeKind::Slash,
|
||||||
'=' => Token::Eq,
|
'=' => NodeKind::Eq,
|
||||||
'<' => Token::Lt,
|
'<' => NodeKind::Lt,
|
||||||
'>' => Token::Gt,
|
'>' => NodeKind::Gt,
|
||||||
|
|
||||||
// Identifiers.
|
// Identifiers.
|
||||||
c if is_id_start(c) => self.ident(start),
|
c if is_id_start(c) => self.ident(start),
|
||||||
@ -159,12 +170,12 @@ impl<'s> Tokens<'s> {
|
|||||||
// Strings.
|
// Strings.
|
||||||
'"' => self.string(),
|
'"' => self.string(),
|
||||||
|
|
||||||
_ => Token::Invalid(self.s.eaten_from(start)),
|
_ => NodeKind::Unknown(self.s.eaten_from(start).into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn text(&mut self, start: usize) -> Token<'s> {
|
fn text(&mut self, start: usize) -> NodeKind {
|
||||||
macro_rules! table {
|
macro_rules! table {
|
||||||
($($c:literal)|*) => {{
|
($($c:literal)|*) => {{
|
||||||
let mut t = [false; 128];
|
let mut t = [false; 128];
|
||||||
@ -186,10 +197,10 @@ impl<'s> Tokens<'s> {
|
|||||||
TABLE.get(c as usize).copied().unwrap_or_else(|| c.is_whitespace())
|
TABLE.get(c as usize).copied().unwrap_or_else(|| c.is_whitespace())
|
||||||
});
|
});
|
||||||
|
|
||||||
Token::Text(self.s.eaten_from(start))
|
NodeKind::Text(self.s.eaten_from(start).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn whitespace(&mut self) -> Token<'s> {
|
fn whitespace(&mut self) -> NodeKind {
|
||||||
self.s.uneat();
|
self.s.uneat();
|
||||||
|
|
||||||
// Count the number of newlines.
|
// Count the number of newlines.
|
||||||
@ -208,73 +219,81 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Token::Space(newlines)
|
NodeKind::Space(newlines)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn backslash(&mut self) -> Token<'s> {
|
fn backslash(&mut self) -> NodeKind {
|
||||||
if let Some(c) = self.s.peek() {
|
match self.s.peek() {
|
||||||
match c {
|
Some(c) => match c {
|
||||||
// Backslash and comments.
|
// Backslash and comments.
|
||||||
'\\' | '/' |
|
'\\' | '/' |
|
||||||
// Parenthesis and hashtag.
|
// Parenthesis and hashtag.
|
||||||
'[' | ']' | '{' | '}' | '#' |
|
'[' | ']' | '{' | '}' | '#' |
|
||||||
// Markup.
|
// Markup.
|
||||||
'*' | '_' | '=' | '~' | '`' | '$' => {
|
'*' | '_' | '=' | '~' | '`' | '$' => {
|
||||||
let start = self.s.index();
|
|
||||||
self.s.eat_assert(c);
|
self.s.eat_assert(c);
|
||||||
Token::Text(&self.s.eaten_from(start))
|
NodeKind::Text(c.into())
|
||||||
}
|
}
|
||||||
'u' if self.s.rest().starts_with("u{") => {
|
'u' if self.s.rest().starts_with("u{") => {
|
||||||
self.s.eat_assert('u');
|
self.s.eat_assert('u');
|
||||||
self.s.eat_assert('{');
|
self.s.eat_assert('{');
|
||||||
Token::UnicodeEscape(UnicodeEscapeToken {
|
let sequence = self.s.eat_while(|c| c.is_ascii_alphanumeric());
|
||||||
// Allow more than `ascii_hexdigit` for better error recovery.
|
if self.s.eat_if('}') {
|
||||||
sequence: self.s.eat_while(|c| c.is_ascii_alphanumeric()),
|
if let Some(c) = resolve_hex(&sequence) {
|
||||||
terminated: self.s.eat_if('}'),
|
NodeKind::UnicodeEscape(c)
|
||||||
})
|
} else {
|
||||||
|
NodeKind::Error(
|
||||||
|
ErrorPos::Full,
|
||||||
|
"invalid unicode escape sequence".into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
NodeKind::Error(
|
||||||
|
ErrorPos::End,
|
||||||
|
"expected closing brace".into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
c if c.is_whitespace() => Token::Backslash,
|
c if c.is_whitespace() => NodeKind::Linebreak,
|
||||||
_ => Token::Text("\\"),
|
_ => NodeKind::Text('\\'.into()),
|
||||||
}
|
},
|
||||||
} else {
|
None => NodeKind::Linebreak,
|
||||||
Token::Backslash
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn hash(&mut self) -> Token<'s> {
|
fn hash(&mut self) -> NodeKind {
|
||||||
if self.s.check_or(false, is_id_start) {
|
if self.s.check_or(false, is_id_start) {
|
||||||
let read = self.s.eat_while(is_id_continue);
|
let read = self.s.eat_while(is_id_continue);
|
||||||
if let Some(keyword) = keyword(read) {
|
match keyword(read) {
|
||||||
keyword
|
Some(keyword) => keyword,
|
||||||
} else {
|
None => NodeKind::Ident(read.into()),
|
||||||
Token::Ident(read)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Token::Text("#")
|
NodeKind::Text("#".into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hyph(&mut self, start: usize) -> Token<'s> {
|
fn hyph(&mut self) -> NodeKind {
|
||||||
if self.s.eat_if('-') {
|
if self.s.eat_if('-') {
|
||||||
if self.s.eat_if('-') {
|
if self.s.eat_if('-') {
|
||||||
Token::HyphHyphHyph
|
NodeKind::EmDash
|
||||||
} else {
|
} else {
|
||||||
Token::HyphHyph
|
NodeKind::EnDash
|
||||||
}
|
}
|
||||||
} else if self.s.check_or(true, char::is_whitespace) {
|
} else if self.s.check_or(true, char::is_whitespace) {
|
||||||
Token::Hyph
|
NodeKind::Minus
|
||||||
} else {
|
} else {
|
||||||
Token::Text(self.s.eaten_from(start))
|
NodeKind::Text("-".into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn numbering(&mut self, start: usize, c: char) -> Token<'s> {
|
fn numbering(&mut self, start: usize, c: char) -> NodeKind {
|
||||||
let number = if c != '.' {
|
let number = if c != '.' {
|
||||||
self.s.eat_while(|c| c.is_ascii_digit());
|
self.s.eat_while(|c| c.is_ascii_digit());
|
||||||
let read = self.s.eaten_from(start);
|
let read = self.s.eaten_from(start);
|
||||||
if !self.s.eat_if('.') {
|
if !self.s.eat_if('.') {
|
||||||
return Token::Text(read);
|
return NodeKind::Text(self.s.eaten_from(start).into());
|
||||||
}
|
}
|
||||||
read.parse().ok()
|
read.parse().ok()
|
||||||
} else {
|
} else {
|
||||||
@ -282,13 +301,15 @@ impl<'s> Tokens<'s> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if self.s.check_or(true, char::is_whitespace) {
|
if self.s.check_or(true, char::is_whitespace) {
|
||||||
Token::Numbering(number)
|
NodeKind::EnumNumbering(number)
|
||||||
} else {
|
} else {
|
||||||
Token::Text(self.s.eaten_from(start))
|
NodeKind::Text(self.s.eaten_from(start).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn raw(&mut self) -> Token<'s> {
|
fn raw(&mut self) -> NodeKind {
|
||||||
|
let column = self.s.column(self.s.index() - 1);
|
||||||
|
|
||||||
let mut backticks = 1;
|
let mut backticks = 1;
|
||||||
while self.s.eat_if('`') {
|
while self.s.eat_if('`') {
|
||||||
backticks += 1;
|
backticks += 1;
|
||||||
@ -296,7 +317,11 @@ impl<'s> Tokens<'s> {
|
|||||||
|
|
||||||
// Special case for empty inline block.
|
// Special case for empty inline block.
|
||||||
if backticks == 2 {
|
if backticks == 2 {
|
||||||
return Token::Raw(RawToken { text: "", backticks: 1, terminated: true });
|
return NodeKind::Raw(Rc::new(RawNode {
|
||||||
|
text: EcoString::new(),
|
||||||
|
lang: None,
|
||||||
|
block: false,
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = self.s.index();
|
let start = self.s.index();
|
||||||
@ -310,17 +335,30 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let terminated = found == backticks;
|
if found == backticks {
|
||||||
let end = self.s.index() - if terminated { found } else { 0 };
|
let end = self.s.index() - found as usize;
|
||||||
|
NodeKind::Raw(Rc::new(resolve_raw(
|
||||||
|
column,
|
||||||
|
backticks,
|
||||||
|
self.s.get(start .. end).into(),
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
let remaining = backticks - found;
|
||||||
|
let noun = if remaining == 1 { "backtick" } else { "backticks" };
|
||||||
|
|
||||||
Token::Raw(RawToken {
|
NodeKind::Error(
|
||||||
text: self.s.get(start .. end),
|
ErrorPos::End,
|
||||||
backticks,
|
if found == 0 {
|
||||||
terminated,
|
format!("expected {} {}", remaining, noun)
|
||||||
})
|
} else {
|
||||||
|
format!("expected {} more {}", remaining, noun)
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn math(&mut self) -> Token<'s> {
|
fn math(&mut self) -> NodeKind {
|
||||||
let mut display = false;
|
let mut display = false;
|
||||||
if self.s.eat_if('[') {
|
if self.s.eat_if('[') {
|
||||||
display = true;
|
display = true;
|
||||||
@ -350,25 +388,36 @@ impl<'s> Tokens<'s> {
|
|||||||
(true, true) => 2,
|
(true, true) => 2,
|
||||||
};
|
};
|
||||||
|
|
||||||
Token::Math(MathToken {
|
if terminated {
|
||||||
formula: self.s.get(start .. end),
|
NodeKind::Math(Rc::new(MathNode {
|
||||||
display,
|
formula: self.s.get(start .. end).into(),
|
||||||
terminated,
|
display,
|
||||||
})
|
}))
|
||||||
}
|
} else {
|
||||||
|
NodeKind::Error(
|
||||||
fn ident(&mut self, start: usize) -> Token<'s> {
|
ErrorPos::End,
|
||||||
self.s.eat_while(is_id_continue);
|
if !display || (!escaped && dollar) {
|
||||||
match self.s.eaten_from(start) {
|
"expected closing dollar sign"
|
||||||
"none" => Token::None,
|
} else {
|
||||||
"auto" => Token::Auto,
|
"expected closing bracket and dollar sign"
|
||||||
"true" => Token::Bool(true),
|
}
|
||||||
"false" => Token::Bool(false),
|
.into(),
|
||||||
id => keyword(id).unwrap_or(Token::Ident(id)),
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn number(&mut self, start: usize, c: char) -> Token<'s> {
|
fn ident(&mut self, start: usize) -> NodeKind {
|
||||||
|
self.s.eat_while(is_id_continue);
|
||||||
|
match self.s.eaten_from(start) {
|
||||||
|
"none" => NodeKind::None,
|
||||||
|
"auto" => NodeKind::Auto,
|
||||||
|
"true" => NodeKind::Bool(true),
|
||||||
|
"false" => NodeKind::Bool(false),
|
||||||
|
id => keyword(id).unwrap_or(NodeKind::Ident(id.into())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn number(&mut self, start: usize, c: char) -> NodeKind {
|
||||||
// Read the first part (integer or fractional depending on `first`).
|
// Read the first part (integer or fractional depending on `first`).
|
||||||
self.s.eat_while(|c| c.is_ascii_digit());
|
self.s.eat_while(|c| c.is_ascii_digit());
|
||||||
|
|
||||||
@ -396,55 +445,56 @@ impl<'s> Tokens<'s> {
|
|||||||
|
|
||||||
// Find out whether it is a simple number.
|
// Find out whether it is a simple number.
|
||||||
if suffix.is_empty() {
|
if suffix.is_empty() {
|
||||||
if let Ok(int) = number.parse::<i64>() {
|
if let Ok(i) = number.parse::<i64>() {
|
||||||
return Token::Int(int);
|
return NodeKind::Int(i);
|
||||||
} else if let Ok(float) = number.parse::<f64>() {
|
|
||||||
return Token::Float(float);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise parse into the fitting numeric type.
|
if let Ok(f) = number.parse::<f64>() {
|
||||||
let build = match suffix {
|
match suffix {
|
||||||
"%" => Token::Percent,
|
"" => NodeKind::Float(f),
|
||||||
"fr" => Token::Fraction,
|
"%" => NodeKind::Percentage(f),
|
||||||
"pt" => |x| Token::Length(x, LengthUnit::Pt),
|
"fr" => NodeKind::Fraction(f),
|
||||||
"mm" => |x| Token::Length(x, LengthUnit::Mm),
|
"pt" => NodeKind::Length(f, LengthUnit::Pt),
|
||||||
"cm" => |x| Token::Length(x, LengthUnit::Cm),
|
"mm" => NodeKind::Length(f, LengthUnit::Mm),
|
||||||
"in" => |x| Token::Length(x, LengthUnit::In),
|
"cm" => NodeKind::Length(f, LengthUnit::Cm),
|
||||||
"rad" => |x| Token::Angle(x, AngularUnit::Rad),
|
"in" => NodeKind::Length(f, LengthUnit::In),
|
||||||
"deg" => |x| Token::Angle(x, AngularUnit::Deg),
|
"deg" => NodeKind::Angle(f, AngularUnit::Deg),
|
||||||
_ => return Token::Invalid(all),
|
"rad" => NodeKind::Angle(f, AngularUnit::Rad),
|
||||||
};
|
_ => {
|
||||||
|
return NodeKind::Unknown(all.into());
|
||||||
if let Ok(float) = number.parse::<f64>() {
|
}
|
||||||
build(float)
|
}
|
||||||
} else {
|
} else {
|
||||||
Token::Invalid(all)
|
NodeKind::Unknown(all.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn string(&mut self) -> Token<'s> {
|
|
||||||
|
fn string(&mut self) -> NodeKind {
|
||||||
let mut escaped = false;
|
let mut escaped = false;
|
||||||
Token::Str(StrToken {
|
let string = resolve_string(self.s.eat_until(|c| {
|
||||||
string: self.s.eat_until(|c| {
|
if c == '"' && !escaped {
|
||||||
if c == '"' && !escaped {
|
true
|
||||||
true
|
} else {
|
||||||
} else {
|
escaped = c == '\\' && !escaped;
|
||||||
escaped = c == '\\' && !escaped;
|
false
|
||||||
false
|
}
|
||||||
}
|
}));
|
||||||
}),
|
|
||||||
terminated: self.s.eat_if('"'),
|
if self.s.eat_if('"') {
|
||||||
})
|
NodeKind::Str(string)
|
||||||
|
} else {
|
||||||
|
NodeKind::Error(ErrorPos::End, "expected quote".into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn line_comment(&mut self) -> Token<'s> {
|
fn line_comment(&mut self) -> NodeKind {
|
||||||
Token::LineComment(self.s.eat_until(is_newline))
|
self.s.eat_until(is_newline);
|
||||||
|
NodeKind::LineComment
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_comment(&mut self) -> Token<'s> {
|
fn block_comment(&mut self) -> NodeKind {
|
||||||
let start = self.s.index();
|
|
||||||
|
|
||||||
let mut state = '_';
|
let mut state = '_';
|
||||||
let mut depth = 1;
|
let mut depth = 1;
|
||||||
|
|
||||||
@ -466,10 +516,7 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let terminated = depth == 0;
|
NodeKind::BlockComment
|
||||||
let end = self.s.index() - if terminated { 2 } else { 0 };
|
|
||||||
|
|
||||||
Token::BlockComment(self.s.get(start .. end))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_in_url(&self) -> bool {
|
fn maybe_in_url(&self) -> bool {
|
||||||
@ -477,24 +524,24 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn keyword(ident: &str) -> Option<Token<'static>> {
|
fn keyword(ident: &str) -> Option<NodeKind> {
|
||||||
Some(match ident {
|
Some(match ident {
|
||||||
"not" => Token::Not,
|
"not" => NodeKind::Not,
|
||||||
"and" => Token::And,
|
"and" => NodeKind::And,
|
||||||
"or" => Token::Or,
|
"or" => NodeKind::Or,
|
||||||
"with" => Token::With,
|
"with" => NodeKind::With,
|
||||||
"let" => Token::Let,
|
"let" => NodeKind::Let,
|
||||||
"if" => Token::If,
|
"if" => NodeKind::If,
|
||||||
"else" => Token::Else,
|
"else" => NodeKind::Else,
|
||||||
"for" => Token::For,
|
"for" => NodeKind::For,
|
||||||
"in" => Token::In,
|
"in" => NodeKind::In,
|
||||||
"while" => Token::While,
|
"while" => NodeKind::While,
|
||||||
"break" => Token::Break,
|
"break" => NodeKind::Break,
|
||||||
"continue" => Token::Continue,
|
"continue" => NodeKind::Continue,
|
||||||
"return" => Token::Return,
|
"return" => NodeKind::Return,
|
||||||
"import" => Token::Import,
|
"import" => NodeKind::Import,
|
||||||
"include" => Token::Include,
|
"include" => NodeKind::Include,
|
||||||
"from" => Token::From,
|
"from" => NodeKind::From,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -506,24 +553,45 @@ mod tests {
|
|||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
use ErrorPos::*;
|
||||||
|
use NodeKind::*;
|
||||||
use Option::None;
|
use Option::None;
|
||||||
use Token::{Ident, *};
|
|
||||||
use TokenMode::{Code, Markup};
|
use TokenMode::{Code, Markup};
|
||||||
|
|
||||||
const fn UnicodeEscape(sequence: &str, terminated: bool) -> Token {
|
fn UnicodeEscape(c: char) -> NodeKind {
|
||||||
Token::UnicodeEscape(UnicodeEscapeToken { sequence, terminated })
|
NodeKind::UnicodeEscape(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn Raw(text: &str, backticks: usize, terminated: bool) -> Token {
|
fn Error(pos: ErrorPos, message: &str) -> NodeKind {
|
||||||
Token::Raw(RawToken { text, backticks, terminated })
|
NodeKind::Error(pos, message.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn Math(formula: &str, display: bool, terminated: bool) -> Token {
|
fn Raw(text: &str, lang: Option<&str>, block: bool) -> NodeKind {
|
||||||
Token::Math(MathToken { formula, display, terminated })
|
NodeKind::Raw(Rc::new(RawNode {
|
||||||
|
text: text.into(),
|
||||||
|
lang: lang.map(Into::into),
|
||||||
|
block,
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn Str(string: &str, terminated: bool) -> Token {
|
fn Math(formula: &str, display: bool) -> NodeKind {
|
||||||
Token::Str(StrToken { string, terminated })
|
NodeKind::Math(Rc::new(MathNode { formula: formula.into(), display }))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn Str(string: &str) -> NodeKind {
|
||||||
|
NodeKind::Str(string.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn Text(string: &str) -> NodeKind {
|
||||||
|
NodeKind::Text(string.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn Ident(ident: &str) -> NodeKind {
|
||||||
|
NodeKind::Ident(ident.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn Invalid(invalid: &str) -> NodeKind {
|
||||||
|
NodeKind::Unknown(invalid.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Building blocks for suffix testing.
|
/// Building blocks for suffix testing.
|
||||||
@ -541,40 +609,6 @@ mod tests {
|
|||||||
/// - '/': symbols
|
/// - '/': symbols
|
||||||
const BLOCKS: &str = " a1/";
|
const BLOCKS: &str = " a1/";
|
||||||
|
|
||||||
/// Suffixes described by four-tuples of:
|
|
||||||
///
|
|
||||||
/// - block the suffix is part of
|
|
||||||
/// - mode in which the suffix is applicable
|
|
||||||
/// - the suffix string
|
|
||||||
/// - the resulting suffix token
|
|
||||||
const SUFFIXES: &[(char, Option<TokenMode>, &str, Token)] = &[
|
|
||||||
// Whitespace suffixes.
|
|
||||||
(' ', None, " ", Space(0)),
|
|
||||||
(' ', None, "\n", Space(1)),
|
|
||||||
(' ', None, "\r", Space(1)),
|
|
||||||
(' ', None, "\r\n", Space(1)),
|
|
||||||
// Letter suffixes.
|
|
||||||
('a', Some(Markup), "hello", Text("hello")),
|
|
||||||
('a', Some(Markup), "💚", Text("💚")),
|
|
||||||
('a', Some(Code), "val", Ident("val")),
|
|
||||||
('a', Some(Code), "α", Ident("α")),
|
|
||||||
('a', Some(Code), "_", Ident("_")),
|
|
||||||
// Number suffixes.
|
|
||||||
('1', Some(Code), "2", Int(2)),
|
|
||||||
('1', Some(Code), ".2", Float(0.2)),
|
|
||||||
// Symbol suffixes.
|
|
||||||
('/', None, "[", LeftBracket),
|
|
||||||
('/', None, "//", LineComment("")),
|
|
||||||
('/', None, "/**/", BlockComment("")),
|
|
||||||
('/', Some(Markup), "*", Star),
|
|
||||||
('/', Some(Markup), "$ $", Math(" ", false, true)),
|
|
||||||
('/', Some(Markup), r"\\", Text(r"\")),
|
|
||||||
('/', Some(Markup), "#let", Let),
|
|
||||||
('/', Some(Code), "(", LeftParen),
|
|
||||||
('/', Some(Code), ":", Colon),
|
|
||||||
('/', Some(Code), "+=", PlusEq),
|
|
||||||
];
|
|
||||||
|
|
||||||
macro_rules! t {
|
macro_rules! t {
|
||||||
(Both $($tts:tt)*) => {
|
(Both $($tts:tt)*) => {
|
||||||
t!(Markup $($tts)*);
|
t!(Markup $($tts)*);
|
||||||
@ -584,8 +618,42 @@ mod tests {
|
|||||||
// Test without suffix.
|
// Test without suffix.
|
||||||
t!(@$mode: $src => $($token),*);
|
t!(@$mode: $src => $($token),*);
|
||||||
|
|
||||||
|
// Suffixes described by four-tuples of:
|
||||||
|
//
|
||||||
|
// - block the suffix is part of
|
||||||
|
// - mode in which the suffix is applicable
|
||||||
|
// - the suffix string
|
||||||
|
// - the resulting suffix NodeKind
|
||||||
|
let suffixes: &[(char, Option<TokenMode>, &str, NodeKind)] = &[
|
||||||
|
// Whitespace suffixes.
|
||||||
|
(' ', None, " ", Space(0)),
|
||||||
|
(' ', None, "\n", Space(1)),
|
||||||
|
(' ', None, "\r", Space(1)),
|
||||||
|
(' ', None, "\r\n", Space(1)),
|
||||||
|
// Letter suffixes.
|
||||||
|
('a', Some(Markup), "hello", Text("hello")),
|
||||||
|
('a', Some(Markup), "💚", Text("💚")),
|
||||||
|
('a', Some(Code), "val", Ident("val")),
|
||||||
|
('a', Some(Code), "α", Ident("α")),
|
||||||
|
('a', Some(Code), "_", Ident("_")),
|
||||||
|
// Number suffixes.
|
||||||
|
('1', Some(Code), "2", Int(2)),
|
||||||
|
('1', Some(Code), ".2", Float(0.2)),
|
||||||
|
// Symbol suffixes.
|
||||||
|
('/', None, "[", LeftBracket),
|
||||||
|
('/', None, "//", LineComment),
|
||||||
|
('/', None, "/**/", BlockComment),
|
||||||
|
('/', Some(Markup), "*", Strong),
|
||||||
|
('/', Some(Markup), "$ $", Math(" ", false)),
|
||||||
|
('/', Some(Markup), r"\\", Text("\\")),
|
||||||
|
('/', Some(Markup), "#let", Let),
|
||||||
|
('/', Some(Code), "(", LeftParen),
|
||||||
|
('/', Some(Code), ":", Colon),
|
||||||
|
('/', Some(Code), "+=", PlusEq),
|
||||||
|
];
|
||||||
|
|
||||||
// Test with each applicable suffix.
|
// Test with each applicable suffix.
|
||||||
for &(block, mode, suffix, token) in SUFFIXES {
|
for &(block, mode, suffix, ref token) in suffixes {
|
||||||
let src = $src;
|
let src = $src;
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
let blocks = BLOCKS;
|
let blocks = BLOCKS;
|
||||||
@ -599,7 +667,7 @@ mod tests {
|
|||||||
(@$mode:ident: $src:expr => $($token:expr),*) => {{
|
(@$mode:ident: $src:expr => $($token:expr),*) => {{
|
||||||
let src = $src;
|
let src = $src;
|
||||||
let found = Tokens::new(&src, $mode).collect::<Vec<_>>();
|
let found = Tokens::new(&src, $mode).collect::<Vec<_>>();
|
||||||
let expected = vec![$($token),*];
|
let expected = vec![$($token.clone()),*];
|
||||||
check(&src, found, expected);
|
check(&src, found, expected);
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
@ -671,7 +739,7 @@ mod tests {
|
|||||||
|
|
||||||
// Test text ends.
|
// Test text ends.
|
||||||
t!(Markup[""]: "hello " => Text("hello"), Space(0));
|
t!(Markup[""]: "hello " => Text("hello"), Space(0));
|
||||||
t!(Markup[""]: "hello~" => Text("hello"), Tilde);
|
t!(Markup[""]: "hello~" => Text("hello"), NonBreakingSpace);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -698,31 +766,31 @@ mod tests {
|
|||||||
t!(Markup[" /"]: r#"\""# => Text(r"\"), Text("\""));
|
t!(Markup[" /"]: r#"\""# => Text(r"\"), Text("\""));
|
||||||
|
|
||||||
// Test basic unicode escapes.
|
// Test basic unicode escapes.
|
||||||
t!(Markup: r"\u{}" => UnicodeEscape("", true));
|
t!(Markup: r"\u{}" => Error(Full, "invalid unicode escape sequence"));
|
||||||
t!(Markup: r"\u{2603}" => UnicodeEscape("2603", true));
|
t!(Markup: r"\u{2603}" => UnicodeEscape('☃'));
|
||||||
t!(Markup: r"\u{P}" => UnicodeEscape("P", true));
|
t!(Markup: r"\u{P}" => Error(Full, "invalid unicode escape sequence"));
|
||||||
|
|
||||||
// Test unclosed unicode escapes.
|
// Test unclosed unicode escapes.
|
||||||
t!(Markup[" /"]: r"\u{" => UnicodeEscape("", false));
|
t!(Markup[" /"]: r"\u{" => Error(End, "expected closing brace"));
|
||||||
t!(Markup[" /"]: r"\u{1" => UnicodeEscape("1", false));
|
t!(Markup[" /"]: r"\u{1" => Error(End, "expected closing brace"));
|
||||||
t!(Markup[" /"]: r"\u{26A4" => UnicodeEscape("26A4", false));
|
t!(Markup[" /"]: r"\u{26A4" => Error(End, "expected closing brace"));
|
||||||
t!(Markup[" /"]: r"\u{1Q3P" => UnicodeEscape("1Q3P", false));
|
t!(Markup[" /"]: r"\u{1Q3P" => Error(End, "expected closing brace"));
|
||||||
t!(Markup: r"\u{1🏕}" => UnicodeEscape("1", false), Text("🏕"), RightBrace);
|
t!(Markup: r"\u{1🏕}" => Error(End, "expected closing brace"), Text("🏕"), RightBrace);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_markup_symbols() {
|
fn test_tokenize_markup_symbols() {
|
||||||
// Test markup tokens.
|
// Test markup tokens.
|
||||||
t!(Markup[" a1"]: "*" => Star);
|
t!(Markup[" a1"]: "*" => Strong);
|
||||||
t!(Markup: "_" => Underscore);
|
t!(Markup: "_" => Emph);
|
||||||
t!(Markup[""]: "===" => Eq, Eq, Eq);
|
t!(Markup[""]: "===" => Eq, Eq, Eq);
|
||||||
t!(Markup["a1/"]: "= " => Eq, Space(0));
|
t!(Markup["a1/"]: "= " => Eq, Space(0));
|
||||||
t!(Markup: "~" => Tilde);
|
t!(Markup: "~" => NonBreakingSpace);
|
||||||
t!(Markup[" "]: r"\" => Backslash);
|
t!(Markup[" "]: r"\" => Linebreak);
|
||||||
t!(Markup["a "]: r"a--" => Text("a"), HyphHyph);
|
t!(Markup["a "]: r"a--" => Text("a"), EnDash);
|
||||||
t!(Markup["a1/"]: "- " => Hyph, Space(0));
|
t!(Markup["a1/"]: "- " => Minus, Space(0));
|
||||||
t!(Markup[" "]: "." => Numbering(None));
|
t!(Markup[" "]: "." => EnumNumbering(None));
|
||||||
t!(Markup[" "]: "1." => Numbering(Some(1)));
|
t!(Markup[" "]: "1." => EnumNumbering(Some(1)));
|
||||||
t!(Markup[" "]: "1.a" => Text("1."), Text("a"));
|
t!(Markup[" "]: "1.a" => Text("1."), Text("a"));
|
||||||
t!(Markup[" /"]: "a1." => Text("a1."));
|
t!(Markup[" /"]: "a1." => Text("a1."));
|
||||||
}
|
}
|
||||||
@ -734,7 +802,7 @@ mod tests {
|
|||||||
t!(Code: ";" => Semicolon);
|
t!(Code: ";" => Semicolon);
|
||||||
t!(Code: ":" => Colon);
|
t!(Code: ":" => Colon);
|
||||||
t!(Code: "+" => Plus);
|
t!(Code: "+" => Plus);
|
||||||
t!(Code: "-" => Hyph);
|
t!(Code: "-" => Minus);
|
||||||
t!(Code[" a1"]: "*" => Star);
|
t!(Code[" a1"]: "*" => Star);
|
||||||
t!(Code[" a1"]: "/" => Slash);
|
t!(Code[" a1"]: "/" => Slash);
|
||||||
t!(Code: "=" => Eq);
|
t!(Code: "=" => Eq);
|
||||||
@ -756,10 +824,10 @@ mod tests {
|
|||||||
t!(Code[" a/"]: "..." => Dots, Invalid("."));
|
t!(Code[" a/"]: "..." => Dots, Invalid("."));
|
||||||
|
|
||||||
// Test hyphen as symbol vs part of identifier.
|
// Test hyphen as symbol vs part of identifier.
|
||||||
t!(Code[" /"]: "-1" => Hyph, Int(1));
|
t!(Code[" /"]: "-1" => Minus, Int(1));
|
||||||
t!(Code[" /"]: "-a" => Hyph, Ident("a"));
|
t!(Code[" /"]: "-a" => Minus, Ident("a"));
|
||||||
t!(Code[" /"]: "--1" => Hyph, Hyph, Int(1));
|
t!(Code[" /"]: "--1" => Minus, Minus, Int(1));
|
||||||
t!(Code[" /"]: "--_a" => Hyph, Hyph, Ident("_a"));
|
t!(Code[" /"]: "--_a" => Minus, Minus, Ident("_a"));
|
||||||
t!(Code[" /"]: "a-b" => Ident("a-b"));
|
t!(Code[" /"]: "a-b" => Ident("a-b"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -776,13 +844,13 @@ mod tests {
|
|||||||
("import", Import),
|
("import", Import),
|
||||||
];
|
];
|
||||||
|
|
||||||
for &(s, t) in &list {
|
for (s, t) in list.clone() {
|
||||||
t!(Markup[" "]: format!("#{}", s) => t);
|
t!(Markup[" "]: format!("#{}", s) => t);
|
||||||
t!(Markup[" "]: format!("#{0}#{0}", s) => t, t);
|
t!(Markup[" "]: format!("#{0}#{0}", s) => t, t);
|
||||||
t!(Markup[" /"]: format!("# {}", s) => Token::Text("#"), Space(0), Text(s));
|
t!(Markup[" /"]: format!("# {}", s) => Text("#"), Space(0), Text(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
for &(s, t) in &list {
|
for (s, t) in list {
|
||||||
t!(Code[" "]: s => t);
|
t!(Code[" "]: s => t);
|
||||||
t!(Markup[" /"]: s => Text(s));
|
t!(Markup[" /"]: s => Text(s));
|
||||||
}
|
}
|
||||||
@ -796,45 +864,43 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_raw_blocks() {
|
fn test_tokenize_raw_blocks() {
|
||||||
let empty = Raw("", 1, true);
|
|
||||||
|
|
||||||
// Test basic raw block.
|
// Test basic raw block.
|
||||||
t!(Markup: "``" => empty);
|
t!(Markup: "``" => Raw("", None, false));
|
||||||
t!(Markup: "`raw`" => Raw("raw", 1, true));
|
t!(Markup: "`raw`" => Raw("raw", None, false));
|
||||||
t!(Markup[""]: "`]" => Raw("]", 1, false));
|
t!(Markup[""]: "`]" => Error(End, "expected 1 backtick"));
|
||||||
|
|
||||||
// Test special symbols in raw block.
|
// Test special symbols in raw block.
|
||||||
t!(Markup: "`[brackets]`" => Raw("[brackets]", 1, true));
|
t!(Markup: "`[brackets]`" => Raw("[brackets]", None, false));
|
||||||
t!(Markup[""]: r"`\`` " => Raw(r"\", 1, true), Raw(" ", 1, false));
|
t!(Markup[""]: r"`\`` " => Raw(r"\", None, false), Error(End, "expected 1 backtick"));
|
||||||
|
|
||||||
// Test separated closing backticks.
|
// Test separated closing backticks.
|
||||||
t!(Markup: "```not `y`e`t```" => Raw("not `y`e`t", 3, true));
|
t!(Markup: "```not `y`e`t```" => Raw("`y`e`t", Some("not"), false));
|
||||||
|
|
||||||
// Test more backticks.
|
// Test more backticks.
|
||||||
t!(Markup: "``nope``" => empty, Text("nope"), empty);
|
t!(Markup: "``nope``" => Raw("", None, false), Text("nope"), Raw("", None, false));
|
||||||
t!(Markup: "````🚀````" => Raw("🚀", 4, true));
|
t!(Markup: "````🚀````" => Raw("", None, false));
|
||||||
t!(Markup[""]: "`````👩🚀````noend" => Raw("👩🚀````noend", 5, false));
|
t!(Markup[""]: "`````👩🚀````noend" => Error(End, "expected 5 backticks"));
|
||||||
t!(Markup[""]: "````raw``````" => Raw("raw", 4, true), empty);
|
t!(Markup[""]: "````raw``````" => Raw("", Some("raw"), false), Raw("", None, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_math_formulas() {
|
fn test_tokenize_math_formulas() {
|
||||||
// Test basic formula.
|
// Test basic formula.
|
||||||
t!(Markup: "$$" => Math("", false, true));
|
t!(Markup: "$$" => Math("", false));
|
||||||
t!(Markup: "$x$" => Math("x", false, true));
|
t!(Markup: "$x$" => Math("x", false));
|
||||||
t!(Markup: r"$\\$" => Math(r"\\", false, true));
|
t!(Markup: r"$\\$" => Math(r"\\", false));
|
||||||
t!(Markup: "$[x + y]$" => Math("x + y", true, true));
|
t!(Markup: "$[x + y]$" => Math("x + y", true));
|
||||||
t!(Markup: r"$[\\]$" => Math(r"\\", true, true));
|
t!(Markup: r"$[\\]$" => Math(r"\\", true));
|
||||||
|
|
||||||
// Test unterminated.
|
// Test unterminated.
|
||||||
t!(Markup[""]: "$x" => Math("x", false, false));
|
t!(Markup[""]: "$x" => Error(End, "expected closing dollar sign"));
|
||||||
t!(Markup[""]: "$[x" => Math("x", true, false));
|
t!(Markup[""]: "$[x" => Error(End, "expected closing bracket and dollar sign"));
|
||||||
t!(Markup[""]: "$[x]\n$" => Math("x]\n$", true, false));
|
t!(Markup[""]: "$[x]\n$" => Error(End, "expected closing bracket and dollar sign"));
|
||||||
|
|
||||||
// Test escape sequences.
|
// Test escape sequences.
|
||||||
t!(Markup: r"$\$x$" => Math(r"\$x", false, true));
|
t!(Markup: r"$\$x$" => Math(r"\$x", false));
|
||||||
t!(Markup: r"$[\\\]$]$" => Math(r"\\\]$", true, true));
|
t!(Markup: r"$[\\\]$]$" => Math(r"\\\]$", true));
|
||||||
t!(Markup[""]: r"$[ ]\\$" => Math(r" ]\\$", true, false));
|
t!(Markup[""]: r"$[ ]\\$" => Error(End, "expected closing bracket and dollar sign"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -896,8 +962,8 @@ mod tests {
|
|||||||
let nums = ints.iter().map(|&(k, v)| (k, v as f64)).chain(floats);
|
let nums = ints.iter().map(|&(k, v)| (k, v as f64)).chain(floats);
|
||||||
|
|
||||||
let suffixes = [
|
let suffixes = [
|
||||||
("%", Percent as fn(f64) -> Token<'static>),
|
("%", Percentage as fn(f64) -> NodeKind),
|
||||||
("fr", Fraction as fn(f64) -> Token<'static>),
|
("fr", Fraction as fn(f64) -> NodeKind),
|
||||||
("mm", |x| Length(x, LengthUnit::Mm)),
|
("mm", |x| Length(x, LengthUnit::Mm)),
|
||||||
("pt", |x| Length(x, LengthUnit::Pt)),
|
("pt", |x| Length(x, LengthUnit::Pt)),
|
||||||
("cm", |x| Length(x, LengthUnit::Cm)),
|
("cm", |x| Length(x, LengthUnit::Cm)),
|
||||||
@ -922,62 +988,62 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_strings() {
|
fn test_tokenize_strings() {
|
||||||
// Test basic strings.
|
// Test basic strings.
|
||||||
t!(Code: "\"hi\"" => Str("hi", true));
|
t!(Code: "\"hi\"" => Str("hi"));
|
||||||
t!(Code: "\"hi\nthere\"" => Str("hi\nthere", true));
|
t!(Code: "\"hi\nthere\"" => Str("hi\nthere"));
|
||||||
t!(Code: "\"🌎\"" => Str("🌎", true));
|
t!(Code: "\"🌎\"" => Str("🌎"));
|
||||||
|
|
||||||
// Test unterminated.
|
// Test unterminated.
|
||||||
t!(Code[""]: "\"hi" => Str("hi", false));
|
t!(Code[""]: "\"hi" => Error(End, "expected quote"));
|
||||||
|
|
||||||
// Test escaped quote.
|
// Test escaped quote.
|
||||||
t!(Code: r#""a\"bc""# => Str(r#"a\"bc"#, true));
|
t!(Code: r#""a\"bc""# => Str("a\"bc"));
|
||||||
t!(Code[""]: r#""\""# => Str(r#"\""#, false));
|
t!(Code[""]: r#""\""# => Error(End, "expected quote"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_line_comments() {
|
fn test_tokenize_line_comments() {
|
||||||
// Test line comment with no trailing newline.
|
// Test line comment with no trailing newline.
|
||||||
t!(Both[""]: "//" => LineComment(""));
|
t!(Both[""]: "//" => LineComment);
|
||||||
|
|
||||||
// Test line comment ends at newline.
|
// Test line comment ends at newline.
|
||||||
t!(Both["a1/"]: "//bc\n" => LineComment("bc"), Space(1));
|
t!(Both["a1/"]: "//bc\n" => LineComment, Space(1));
|
||||||
t!(Both["a1/"]: "// bc \n" => LineComment(" bc "), Space(1));
|
t!(Both["a1/"]: "// bc \n" => LineComment, Space(1));
|
||||||
t!(Both["a1/"]: "//bc\r\n" => LineComment("bc"), Space(1));
|
t!(Both["a1/"]: "//bc\r\n" => LineComment, Space(1));
|
||||||
|
|
||||||
// Test nested line comments.
|
// Test nested line comments.
|
||||||
t!(Both["a1/"]: "//a//b\n" => LineComment("a//b"), Space(1));
|
t!(Both["a1/"]: "//a//b\n" => LineComment, Space(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_block_comments() {
|
fn test_tokenize_block_comments() {
|
||||||
// Test basic block comments.
|
// Test basic block comments.
|
||||||
t!(Both[""]: "/*" => BlockComment(""));
|
t!(Both[""]: "/*" => BlockComment);
|
||||||
t!(Both: "/**/" => BlockComment(""));
|
t!(Both: "/**/" => BlockComment);
|
||||||
t!(Both: "/*🏞*/" => BlockComment("🏞"));
|
t!(Both: "/*🏞*/" => BlockComment);
|
||||||
t!(Both: "/*\n*/" => BlockComment("\n"));
|
t!(Both: "/*\n*/" => BlockComment);
|
||||||
|
|
||||||
// Test depth 1 and 2 nested block comments.
|
// Test depth 1 and 2 nested block comments.
|
||||||
t!(Both: "/* /* */ */" => BlockComment(" /* */ "));
|
t!(Both: "/* /* */ */" => BlockComment);
|
||||||
t!(Both: "/*/*/**/*/*/" => BlockComment("/*/**/*/"));
|
t!(Both: "/*/*/**/*/*/" => BlockComment);
|
||||||
|
|
||||||
// Test two nested, one unclosed block comments.
|
// Test two nested, one unclosed block comments.
|
||||||
t!(Both[""]: "/*/*/**/*/" => BlockComment("/*/**/*/"));
|
t!(Both[""]: "/*/*/**/*/" => BlockComment);
|
||||||
|
|
||||||
// Test all combinations of up to two following slashes and stars.
|
// Test all combinations of up to two following slashes and stars.
|
||||||
t!(Both[""]: "/*" => BlockComment(""));
|
t!(Both[""]: "/*" => BlockComment);
|
||||||
t!(Both[""]: "/*/" => BlockComment("/"));
|
t!(Both[""]: "/*/" => BlockComment);
|
||||||
t!(Both[""]: "/**" => BlockComment("*"));
|
t!(Both[""]: "/**" => BlockComment);
|
||||||
t!(Both[""]: "/*//" => BlockComment("//"));
|
t!(Both[""]: "/*//" => BlockComment);
|
||||||
t!(Both[""]: "/*/*" => BlockComment("/*"));
|
t!(Both[""]: "/*/*" => BlockComment);
|
||||||
t!(Both[""]: "/**/" => BlockComment(""));
|
t!(Both[""]: "/**/" => BlockComment);
|
||||||
t!(Both[""]: "/***" => BlockComment("**"));
|
t!(Both[""]: "/***" => BlockComment);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tokenize_invalid() {
|
fn test_tokenize_invalid() {
|
||||||
// Test invalidly closed block comments.
|
// Test invalidly closed block comments.
|
||||||
t!(Both: "*/" => Token::Invalid("*/"));
|
t!(Both: "*/" => Invalid("*/"));
|
||||||
t!(Both: "/**/*/" => BlockComment(""), Token::Invalid("*/"));
|
t!(Both: "/**/*/" => BlockComment, Invalid("*/"));
|
||||||
|
|
||||||
// Test invalid expressions.
|
// Test invalid expressions.
|
||||||
t!(Code: r"\" => Invalid(r"\"));
|
t!(Code: r"\" => Invalid(r"\"));
|
||||||
@ -990,6 +1056,6 @@ mod tests {
|
|||||||
// Test invalid number suffixes.
|
// Test invalid number suffixes.
|
||||||
t!(Code[" /"]: "1foo" => Invalid("1foo"));
|
t!(Code[" /"]: "1foo" => Invalid("1foo"));
|
||||||
t!(Code: "1p%" => Invalid("1p"), Invalid("%"));
|
t!(Code: "1p%" => Invalid("1p"), Invalid("%"));
|
||||||
t!(Code: "1%%" => Percent(1.0), Invalid("%"));
|
t!(Code: "1%%" => Percentage(1.0), Invalid("%"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,11 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::diag::TypResult;
|
||||||
use crate::loading::{FileHash, Loader};
|
use crate::loading::{FileHash, Loader};
|
||||||
use crate::parse::{is_newline, Scanner};
|
use crate::parse::{is_newline, parse, Scanner};
|
||||||
|
use crate::syntax::ast::Markup;
|
||||||
|
use crate::syntax::{GreenNode, RedNode};
|
||||||
use crate::util::PathExt;
|
use crate::util::PathExt;
|
||||||
|
|
||||||
#[cfg(feature = "codespan-reporting")]
|
#[cfg(feature = "codespan-reporting")]
|
||||||
@ -124,6 +127,7 @@ pub struct SourceFile {
|
|||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
src: String,
|
src: String,
|
||||||
line_starts: Vec<usize>,
|
line_starts: Vec<usize>,
|
||||||
|
root: Rc<GreenNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceFile {
|
impl SourceFile {
|
||||||
@ -134,11 +138,23 @@ impl SourceFile {
|
|||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
path: path.normalize(),
|
path: path.normalize(),
|
||||||
|
root: parse(&src),
|
||||||
src,
|
src,
|
||||||
line_starts,
|
line_starts,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The file's abstract syntax tree.
|
||||||
|
pub fn ast(&self) -> TypResult<Markup> {
|
||||||
|
let red = RedNode::from_root(self.root.clone(), self.id);
|
||||||
|
let errors = red.errors();
|
||||||
|
if errors.is_empty() {
|
||||||
|
Ok(red.cast().unwrap())
|
||||||
|
} else {
|
||||||
|
Err(Box::new(errors))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a source file without a real id and path, usually for testing.
|
/// Create a source file without a real id and path, usually for testing.
|
||||||
pub fn detached(src: impl Into<String>) -> Self {
|
pub fn detached(src: impl Into<String>) -> Self {
|
||||||
Self::new(SourceId(0), Path::new(""), src.into())
|
Self::new(SourceId(0), Path::new(""), src.into())
|
||||||
|
988
src/syntax/ast.rs
Normal file
988
src/syntax/ast.rs
Normal file
@ -0,0 +1,988 @@
|
|||||||
|
//! A typed layer over the red-green tree.
|
||||||
|
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
use super::{Green, GreenData, NodeKind, RedNode, RedRef, Span};
|
||||||
|
use crate::geom::{AngularUnit, LengthUnit};
|
||||||
|
use crate::util::EcoString;
|
||||||
|
|
||||||
|
/// A typed AST node.
|
||||||
|
pub trait TypedNode: Sized {
|
||||||
|
/// Convert from a red node to a typed node.
|
||||||
|
fn from_red(value: RedRef) -> Option<Self>;
|
||||||
|
|
||||||
|
/// A reference to the underlying red node.
|
||||||
|
fn as_red(&self) -> RedRef<'_>;
|
||||||
|
|
||||||
|
/// The source code location.
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.as_red().span()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! node {
|
||||||
|
($(#[$attr:meta])* $name:ident) => {
|
||||||
|
node!{$(#[$attr])* $name: $name}
|
||||||
|
};
|
||||||
|
($(#[$attr:meta])* $name:ident: $variant:ident) => {
|
||||||
|
node!{$(#[$attr])* $name: NodeKind::$variant}
|
||||||
|
};
|
||||||
|
($(#[$attr:meta])* $name:ident: $($variant:pat)|*) => {
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
#[repr(transparent)]
|
||||||
|
$(#[$attr])*
|
||||||
|
pub struct $name(RedNode);
|
||||||
|
|
||||||
|
impl TypedNode for $name {
|
||||||
|
fn from_red(node: RedRef) -> Option<Self> {
|
||||||
|
if matches!(node.kind(), $($variant)|*) {
|
||||||
|
Some(Self(node.own()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_red(&self) -> RedRef<'_> {
|
||||||
|
self.0.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// The syntactical root capable of representing a full parsed document.
|
||||||
|
Markup
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Markup {
|
||||||
|
/// The markup nodes.
|
||||||
|
pub fn nodes(&self) -> impl Iterator<Item = MarkupNode> + '_ {
|
||||||
|
self.0.children().filter_map(|node| match node.kind() {
|
||||||
|
NodeKind::Space(_) => Some(MarkupNode::Space),
|
||||||
|
NodeKind::Linebreak => Some(MarkupNode::Linebreak),
|
||||||
|
NodeKind::Parbreak => Some(MarkupNode::Parbreak),
|
||||||
|
NodeKind::Strong => Some(MarkupNode::Strong),
|
||||||
|
NodeKind::Emph => Some(MarkupNode::Emph),
|
||||||
|
NodeKind::Text(s) => Some(MarkupNode::Text(s.clone())),
|
||||||
|
NodeKind::UnicodeEscape(c) => Some(MarkupNode::Text((*c).into())),
|
||||||
|
NodeKind::EnDash => Some(MarkupNode::Text("\u{2013}".into())),
|
||||||
|
NodeKind::EmDash => Some(MarkupNode::Text("\u{2014}".into())),
|
||||||
|
NodeKind::NonBreakingSpace => Some(MarkupNode::Text("\u{00A0}".into())),
|
||||||
|
NodeKind::Math(math) => Some(MarkupNode::Math(math.as_ref().clone())),
|
||||||
|
NodeKind::Raw(raw) => Some(MarkupNode::Raw(raw.as_ref().clone())),
|
||||||
|
NodeKind::Heading => node.cast().map(MarkupNode::Heading),
|
||||||
|
NodeKind::List => node.cast().map(MarkupNode::List),
|
||||||
|
NodeKind::Enum => node.cast().map(MarkupNode::Enum),
|
||||||
|
_ => node.cast().map(MarkupNode::Expr),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single piece of markup.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum MarkupNode {
|
||||||
|
/// Whitespace containing less than two newlines.
|
||||||
|
Space,
|
||||||
|
/// A forced line break: `\`.
|
||||||
|
Linebreak,
|
||||||
|
/// A paragraph break: Two or more newlines.
|
||||||
|
Parbreak,
|
||||||
|
/// Strong text was enabled / disabled: `*`.
|
||||||
|
Strong,
|
||||||
|
/// Emphasized text was enabled / disabled: `_`.
|
||||||
|
Emph,
|
||||||
|
/// Plain text.
|
||||||
|
Text(EcoString),
|
||||||
|
/// A raw block with optional syntax highlighting: `` `...` ``.
|
||||||
|
Raw(RawNode),
|
||||||
|
/// A math formula: `$a^2 = b^2 + c^2$`.
|
||||||
|
Math(MathNode),
|
||||||
|
/// A section heading: `= Introduction`.
|
||||||
|
Heading(HeadingNode),
|
||||||
|
/// An item in an unordered list: `- ...`.
|
||||||
|
List(ListNode),
|
||||||
|
/// An item in an enumeration (ordered list): `1. ...`.
|
||||||
|
Enum(EnumNode),
|
||||||
|
/// An expression.
|
||||||
|
Expr(Expr),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A raw block with optional syntax highlighting: `` `...` ``.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct RawNode {
|
||||||
|
/// An optional identifier specifying the language to syntax-highlight in.
|
||||||
|
pub lang: Option<EcoString>,
|
||||||
|
/// The raw text, determined as the raw string between the backticks trimmed
|
||||||
|
/// according to the above rules.
|
||||||
|
pub text: EcoString,
|
||||||
|
/// Whether the element is block-level, that is, it has 3+ backticks
|
||||||
|
/// and contains at least one newline.
|
||||||
|
pub block: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A math formula: `$a^2 + b^2 = c^2$`.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct MathNode {
|
||||||
|
/// The formula between the dollars / brackets.
|
||||||
|
pub formula: EcoString,
|
||||||
|
/// Whether the formula is display-level, that is, it is surrounded by
|
||||||
|
/// `$[..]$`.
|
||||||
|
pub display: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A section heading: `= Introduction`.
|
||||||
|
HeadingNode: Heading
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HeadingNode {
|
||||||
|
/// The contents of the heading.
|
||||||
|
pub fn body(&self) -> Markup {
|
||||||
|
self.0.cast_first_child().expect("heading is missing markup body")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The section depth (numer of equals signs).
|
||||||
|
pub fn level(&self) -> usize {
|
||||||
|
self.0.children().filter(|n| n.kind() == &NodeKind::Eq).count()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An item in an unordered list: `- ...`.
|
||||||
|
ListNode: List
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ListNode {
|
||||||
|
/// The contents of the list item.
|
||||||
|
pub fn body(&self) -> Markup {
|
||||||
|
self.0.cast_first_child().expect("list node is missing body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An item in an enumeration (ordered list): `1. ...`.
|
||||||
|
EnumNode: Enum
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EnumNode {
|
||||||
|
/// The contents of the list item.
|
||||||
|
pub fn body(&self) -> Markup {
|
||||||
|
self.0.cast_first_child().expect("enum node is missing body")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number, if any.
|
||||||
|
pub fn number(&self) -> Option<usize> {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.find_map(|node| match node.kind() {
|
||||||
|
NodeKind::EnumNumbering(num) => Some(num.clone()),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.expect("enum node is missing number")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An expression.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Expr {
|
||||||
|
/// A literal: `1`, `true`, ...
|
||||||
|
Lit(Lit),
|
||||||
|
/// An identifier: `left`.
|
||||||
|
Ident(Ident),
|
||||||
|
/// An array expression: `(1, "hi", 12cm)`.
|
||||||
|
Array(ArrayExpr),
|
||||||
|
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||||
|
Dict(DictExpr),
|
||||||
|
/// A template expression: `[*Hi* there!]`.
|
||||||
|
Template(TemplateExpr),
|
||||||
|
/// A grouped expression: `(1 + 2)`.
|
||||||
|
Group(GroupExpr),
|
||||||
|
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||||
|
Block(BlockExpr),
|
||||||
|
/// A unary operation: `-x`.
|
||||||
|
Unary(UnaryExpr),
|
||||||
|
/// A binary operation: `a + b`.
|
||||||
|
Binary(BinaryExpr),
|
||||||
|
/// An invocation of a function: `f(x, y)`.
|
||||||
|
Call(CallExpr),
|
||||||
|
/// A closure expression: `(x, y) => z`.
|
||||||
|
Closure(ClosureExpr),
|
||||||
|
/// A with expression: `f with (x, y: 1)`.
|
||||||
|
With(WithExpr),
|
||||||
|
/// A let expression: `let x = 1`.
|
||||||
|
Let(LetExpr),
|
||||||
|
/// An if-else expression: `if x { y } else { z }`.
|
||||||
|
If(IfExpr),
|
||||||
|
/// A while loop expression: `while x { y }`.
|
||||||
|
While(WhileExpr),
|
||||||
|
/// A for loop expression: `for x in y { z }`.
|
||||||
|
For(ForExpr),
|
||||||
|
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||||
|
Import(ImportExpr),
|
||||||
|
/// An include expression: `include "chapter1.typ"`.
|
||||||
|
Include(IncludeExpr),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypedNode for Expr {
|
||||||
|
fn from_red(node: RedRef) -> Option<Self> {
|
||||||
|
match node.kind() {
|
||||||
|
NodeKind::Ident(_) => node.cast().map(Self::Ident),
|
||||||
|
NodeKind::Array => node.cast().map(Self::Array),
|
||||||
|
NodeKind::Dict => node.cast().map(Self::Dict),
|
||||||
|
NodeKind::Template => node.cast().map(Self::Template),
|
||||||
|
NodeKind::Group => node.cast().map(Self::Group),
|
||||||
|
NodeKind::Block => node.cast().map(Self::Block),
|
||||||
|
NodeKind::Unary => node.cast().map(Self::Unary),
|
||||||
|
NodeKind::Binary => node.cast().map(Self::Binary),
|
||||||
|
NodeKind::Call => node.cast().map(Self::Call),
|
||||||
|
NodeKind::Closure => node.cast().map(Self::Closure),
|
||||||
|
NodeKind::WithExpr => node.cast().map(Self::With),
|
||||||
|
NodeKind::LetExpr => node.cast().map(Self::Let),
|
||||||
|
NodeKind::IfExpr => node.cast().map(Self::If),
|
||||||
|
NodeKind::WhileExpr => node.cast().map(Self::While),
|
||||||
|
NodeKind::ForExpr => node.cast().map(Self::For),
|
||||||
|
NodeKind::ImportExpr => node.cast().map(Self::Import),
|
||||||
|
NodeKind::IncludeExpr => node.cast().map(Self::Include),
|
||||||
|
_ => node.cast().map(Self::Lit),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_red(&self) -> RedRef<'_> {
|
||||||
|
match self {
|
||||||
|
Self::Lit(v) => v.as_red(),
|
||||||
|
Self::Ident(v) => v.as_red(),
|
||||||
|
Self::Array(v) => v.as_red(),
|
||||||
|
Self::Dict(v) => v.as_red(),
|
||||||
|
Self::Template(v) => v.as_red(),
|
||||||
|
Self::Group(v) => v.as_red(),
|
||||||
|
Self::Block(v) => v.as_red(),
|
||||||
|
Self::Unary(v) => v.as_red(),
|
||||||
|
Self::Binary(v) => v.as_red(),
|
||||||
|
Self::Call(v) => v.as_red(),
|
||||||
|
Self::Closure(v) => v.as_red(),
|
||||||
|
Self::With(v) => v.as_red(),
|
||||||
|
Self::Let(v) => v.as_red(),
|
||||||
|
Self::If(v) => v.as_red(),
|
||||||
|
Self::While(v) => v.as_red(),
|
||||||
|
Self::For(v) => v.as_red(),
|
||||||
|
Self::Import(v) => v.as_red(),
|
||||||
|
Self::Include(v) => v.as_red(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Expr {
|
||||||
|
/// Whether the expression can be shortened in markup with a hashtag.
|
||||||
|
pub fn has_short_form(&self) -> bool {
|
||||||
|
matches!(self,
|
||||||
|
Self::Ident(_)
|
||||||
|
| Self::Call(_)
|
||||||
|
| Self::Let(_)
|
||||||
|
| Self::If(_)
|
||||||
|
| Self::While(_)
|
||||||
|
| Self::For(_)
|
||||||
|
| Self::Import(_)
|
||||||
|
| Self::Include(_)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A literal: `1`, `true`, ...
|
||||||
|
Lit: NodeKind::None
|
||||||
|
| NodeKind::Auto
|
||||||
|
| NodeKind::Bool(_)
|
||||||
|
| NodeKind::Int(_)
|
||||||
|
| NodeKind::Float(_)
|
||||||
|
| NodeKind::Length(_, _)
|
||||||
|
| NodeKind::Angle(_, _)
|
||||||
|
| NodeKind::Percentage(_)
|
||||||
|
| NodeKind::Fraction(_)
|
||||||
|
| NodeKind::Str(_)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Lit {
|
||||||
|
/// The kind of literal.
|
||||||
|
pub fn kind(&self) -> LitKind {
|
||||||
|
match *self.0.kind() {
|
||||||
|
NodeKind::None => LitKind::None,
|
||||||
|
NodeKind::Auto => LitKind::Auto,
|
||||||
|
NodeKind::Bool(v) => LitKind::Bool(v),
|
||||||
|
NodeKind::Int(v) => LitKind::Int(v),
|
||||||
|
NodeKind::Float(v) => LitKind::Float(v),
|
||||||
|
NodeKind::Length(v, unit) => LitKind::Length(v, unit),
|
||||||
|
NodeKind::Angle(v, unit) => LitKind::Angle(v, unit),
|
||||||
|
NodeKind::Percentage(v) => LitKind::Percent(v),
|
||||||
|
NodeKind::Fraction(v) => LitKind::Fractional(v),
|
||||||
|
NodeKind::Str(ref v) => LitKind::Str(v.clone()),
|
||||||
|
_ => panic!("literal is of wrong kind"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The kind of a literal.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum LitKind {
|
||||||
|
/// The none literal: `none`.
|
||||||
|
None,
|
||||||
|
/// The auto literal: `auto`.
|
||||||
|
Auto,
|
||||||
|
/// A boolean literal: `true`, `false`.
|
||||||
|
Bool(bool),
|
||||||
|
/// An integer literal: `120`.
|
||||||
|
Int(i64),
|
||||||
|
/// A floating-point literal: `1.2`, `10e-4`.
|
||||||
|
Float(f64),
|
||||||
|
/// A length literal: `12pt`, `3cm`.
|
||||||
|
Length(f64, LengthUnit),
|
||||||
|
/// An angle literal: `1.5rad`, `90deg`.
|
||||||
|
Angle(f64, AngularUnit),
|
||||||
|
/// A percent literal: `50%`.
|
||||||
|
///
|
||||||
|
/// _Note_: `50%` is stored as `50.0` here, but as `0.5` in the
|
||||||
|
/// corresponding [value](crate::geom::Relative).
|
||||||
|
Percent(f64),
|
||||||
|
/// A fraction unit literal: `1fr`.
|
||||||
|
Fractional(f64),
|
||||||
|
/// A string literal: `"hello!"`.
|
||||||
|
Str(EcoString),
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An array expression: `(1, "hi", 12cm)`.
|
||||||
|
ArrayExpr: Array
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ArrayExpr {
|
||||||
|
/// The array items.
|
||||||
|
pub fn items(&self) -> impl Iterator<Item = Expr> + '_ {
|
||||||
|
self.0.children().filter_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||||
|
DictExpr: Dict
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DictExpr {
|
||||||
|
/// The named dictionary items.
|
||||||
|
pub fn items(&self) -> impl Iterator<Item = Named> + '_ {
|
||||||
|
self.0.children().filter_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A pair of a name and an expression: `pattern: dashed`.
|
||||||
|
Named
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Named {
|
||||||
|
/// The name: `pattern`.
|
||||||
|
pub fn name(&self) -> Ident {
|
||||||
|
self.0.cast_first_child().expect("named pair is missing name")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The right-hand side of the pair: `dashed`.
|
||||||
|
pub fn expr(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("named pair is missing expression")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A template expression: `[*Hi* there!]`.
|
||||||
|
TemplateExpr: Template
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TemplateExpr {
|
||||||
|
/// The contents of the template.
|
||||||
|
pub fn body(&self) -> Markup {
|
||||||
|
self.0.cast_first_child().expect("template is missing body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A grouped expression: `(1 + 2)`.
|
||||||
|
GroupExpr: Group
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GroupExpr {
|
||||||
|
/// The wrapped expression.
|
||||||
|
pub fn expr(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("group is missing expression")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||||
|
BlockExpr: Block
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlockExpr {
|
||||||
|
/// The list of expressions contained in the block.
|
||||||
|
pub fn exprs(&self) -> impl Iterator<Item = Expr> + '_ {
|
||||||
|
self.0.children().filter_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A unary operation: `-x`.
|
||||||
|
UnaryExpr: Unary
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UnaryExpr {
|
||||||
|
/// The operator: `-`.
|
||||||
|
pub fn op(&self) -> UnOp {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.find_map(|node| UnOp::from_token(node.kind()))
|
||||||
|
.expect("unary expression is missing operator")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to operator on: `x`.
|
||||||
|
pub fn expr(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("unary expression is missing child")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A unary operator.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum UnOp {
|
||||||
|
/// The plus operator: `+`.
|
||||||
|
Pos,
|
||||||
|
/// The negation operator: `-`.
|
||||||
|
Neg,
|
||||||
|
/// The boolean `not`.
|
||||||
|
Not,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UnOp {
|
||||||
|
/// Try to convert the token into a unary operation.
|
||||||
|
pub fn from_token(token: &NodeKind) -> Option<Self> {
|
||||||
|
Some(match token {
|
||||||
|
NodeKind::Plus => Self::Pos,
|
||||||
|
NodeKind::Minus => Self::Neg,
|
||||||
|
NodeKind::Not => Self::Not,
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The precedence of this operator.
|
||||||
|
pub fn precedence(self) -> usize {
|
||||||
|
match self {
|
||||||
|
Self::Pos | Self::Neg => 7,
|
||||||
|
Self::Not => 4,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The string representation of this operation.
|
||||||
|
pub fn as_str(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Pos => "+",
|
||||||
|
Self::Neg => "-",
|
||||||
|
Self::Not => "not",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A binary operation: `a + b`.
|
||||||
|
BinaryExpr: Binary
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinaryExpr {
|
||||||
|
/// The binary operator: `+`.
|
||||||
|
pub fn op(&self) -> BinOp {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.find_map(|node| BinOp::from_token(node.kind()))
|
||||||
|
.expect("binary expression is missing operator")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The left-hand side of the operation: `a`.
|
||||||
|
pub fn lhs(&self) -> Expr {
|
||||||
|
self.0
|
||||||
|
.cast_first_child()
|
||||||
|
.expect("binary expression is missing left-hand side")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The right-hand side of the operation: `b`.
|
||||||
|
pub fn rhs(&self) -> Expr {
|
||||||
|
self.0
|
||||||
|
.cast_last_child()
|
||||||
|
.expect("binary expression is missing right-hand side")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A binary operator.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum BinOp {
|
||||||
|
/// The addition operator: `+`.
|
||||||
|
Add,
|
||||||
|
/// The subtraction operator: `-`.
|
||||||
|
Sub,
|
||||||
|
/// The multiplication operator: `*`.
|
||||||
|
Mul,
|
||||||
|
/// The division operator: `/`.
|
||||||
|
Div,
|
||||||
|
/// The short-circuiting boolean `and`.
|
||||||
|
And,
|
||||||
|
/// The short-circuiting boolean `or`.
|
||||||
|
Or,
|
||||||
|
/// The equality operator: `==`.
|
||||||
|
Eq,
|
||||||
|
/// The inequality operator: `!=`.
|
||||||
|
Neq,
|
||||||
|
/// The less-than operator: `<`.
|
||||||
|
Lt,
|
||||||
|
/// The less-than or equal operator: `<=`.
|
||||||
|
Leq,
|
||||||
|
/// The greater-than operator: `>`.
|
||||||
|
Gt,
|
||||||
|
/// The greater-than or equal operator: `>=`.
|
||||||
|
Geq,
|
||||||
|
/// The assignment operator: `=`.
|
||||||
|
Assign,
|
||||||
|
/// The add-assign operator: `+=`.
|
||||||
|
AddAssign,
|
||||||
|
/// The subtract-assign oeprator: `-=`.
|
||||||
|
SubAssign,
|
||||||
|
/// The multiply-assign operator: `*=`.
|
||||||
|
MulAssign,
|
||||||
|
/// The divide-assign operator: `/=`.
|
||||||
|
DivAssign,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinOp {
|
||||||
|
/// Try to convert the token into a binary operation.
|
||||||
|
pub fn from_token(token: &NodeKind) -> Option<Self> {
|
||||||
|
Some(match token {
|
||||||
|
NodeKind::Plus => Self::Add,
|
||||||
|
NodeKind::Minus => Self::Sub,
|
||||||
|
NodeKind::Star => Self::Mul,
|
||||||
|
NodeKind::Slash => Self::Div,
|
||||||
|
NodeKind::And => Self::And,
|
||||||
|
NodeKind::Or => Self::Or,
|
||||||
|
NodeKind::EqEq => Self::Eq,
|
||||||
|
NodeKind::ExclEq => Self::Neq,
|
||||||
|
NodeKind::Lt => Self::Lt,
|
||||||
|
NodeKind::LtEq => Self::Leq,
|
||||||
|
NodeKind::Gt => Self::Gt,
|
||||||
|
NodeKind::GtEq => Self::Geq,
|
||||||
|
NodeKind::Eq => Self::Assign,
|
||||||
|
NodeKind::PlusEq => Self::AddAssign,
|
||||||
|
NodeKind::HyphEq => Self::SubAssign,
|
||||||
|
NodeKind::StarEq => Self::MulAssign,
|
||||||
|
NodeKind::SlashEq => Self::DivAssign,
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The precedence of this operator.
|
||||||
|
pub fn precedence(self) -> usize {
|
||||||
|
match self {
|
||||||
|
Self::Mul | Self::Div => 6,
|
||||||
|
Self::Add | Self::Sub => 5,
|
||||||
|
Self::Eq | Self::Neq | Self::Lt | Self::Leq | Self::Gt | Self::Geq => 4,
|
||||||
|
Self::And => 3,
|
||||||
|
Self::Or => 2,
|
||||||
|
Self::Assign
|
||||||
|
| Self::AddAssign
|
||||||
|
| Self::SubAssign
|
||||||
|
| Self::MulAssign
|
||||||
|
| Self::DivAssign => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The associativity of this operator.
|
||||||
|
pub fn associativity(self) -> Associativity {
|
||||||
|
match self {
|
||||||
|
Self::Add
|
||||||
|
| Self::Sub
|
||||||
|
| Self::Mul
|
||||||
|
| Self::Div
|
||||||
|
| Self::And
|
||||||
|
| Self::Or
|
||||||
|
| Self::Eq
|
||||||
|
| Self::Neq
|
||||||
|
| Self::Lt
|
||||||
|
| Self::Leq
|
||||||
|
| Self::Gt
|
||||||
|
| Self::Geq => Associativity::Left,
|
||||||
|
Self::Assign
|
||||||
|
| Self::AddAssign
|
||||||
|
| Self::SubAssign
|
||||||
|
| Self::MulAssign
|
||||||
|
| Self::DivAssign => Associativity::Right,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The string representation of this operation.
|
||||||
|
pub fn as_str(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Add => "+",
|
||||||
|
Self::Sub => "-",
|
||||||
|
Self::Mul => "*",
|
||||||
|
Self::Div => "/",
|
||||||
|
Self::And => "and",
|
||||||
|
Self::Or => "or",
|
||||||
|
Self::Eq => "==",
|
||||||
|
Self::Neq => "!=",
|
||||||
|
Self::Lt => "<",
|
||||||
|
Self::Leq => "<=",
|
||||||
|
Self::Gt => ">",
|
||||||
|
Self::Geq => ">=",
|
||||||
|
Self::Assign => "=",
|
||||||
|
Self::AddAssign => "+=",
|
||||||
|
Self::SubAssign => "-=",
|
||||||
|
Self::MulAssign => "*=",
|
||||||
|
Self::DivAssign => "/=",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The associativity of a binary operator.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||||
|
pub enum Associativity {
|
||||||
|
/// Left-associative: `a + b + c` is equivalent to `(a + b) + c`.
|
||||||
|
Left,
|
||||||
|
/// Right-associative: `a = b = c` is equivalent to `a = (b = c)`.
|
||||||
|
Right,
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An invocation of a function: `foo(...)`.
|
||||||
|
CallExpr: Call
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallExpr {
|
||||||
|
/// The function to call.
|
||||||
|
pub fn callee(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("call is missing callee")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The arguments to the function.
|
||||||
|
pub fn args(&self) -> CallArgs {
|
||||||
|
self.0.cast_last_child().expect("call is missing argument list")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// The arguments to a function: `12, draw: false`.
|
||||||
|
CallArgs
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallArgs {
|
||||||
|
/// The positional and named arguments.
|
||||||
|
pub fn items(&self) -> impl Iterator<Item = CallArg> + '_ {
|
||||||
|
self.0.children().filter_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An argument to a function call.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum CallArg {
|
||||||
|
/// A positional argument: `12`.
|
||||||
|
Pos(Expr),
|
||||||
|
/// A named argument: `draw: false`.
|
||||||
|
Named(Named),
|
||||||
|
/// A spreaded argument: `..things`.
|
||||||
|
Spread(Expr),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypedNode for CallArg {
|
||||||
|
fn from_red(node: RedRef) -> Option<Self> {
|
||||||
|
match node.kind() {
|
||||||
|
NodeKind::Named => node.cast().map(CallArg::Named),
|
||||||
|
NodeKind::Spread => node.cast_first_child().map(CallArg::Spread),
|
||||||
|
_ => node.cast().map(CallArg::Pos),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_red(&self) -> RedRef<'_> {
|
||||||
|
match self {
|
||||||
|
Self::Pos(v) => v.as_red(),
|
||||||
|
Self::Named(v) => v.as_red(),
|
||||||
|
Self::Spread(v) => v.as_red(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CallArg {
|
||||||
|
/// The name of this argument.
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
Self::Pos(expr) => expr.span(),
|
||||||
|
Self::Named(named) => named.span(),
|
||||||
|
Self::Spread(expr) => expr.span(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A closure expression: `(x, y) => z`.
|
||||||
|
ClosureExpr: Closure
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClosureExpr {
|
||||||
|
/// The name of the closure.
|
||||||
|
///
|
||||||
|
/// This only exists if you use the function syntax sugar: `let f(x) = y`.
|
||||||
|
pub fn name(&self) -> Option<Ident> {
|
||||||
|
self.0.cast_first_child()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The parameter bindings.
|
||||||
|
pub fn params(&self) -> impl Iterator<Item = ClosureParam> + '_ {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.find(|x| x.kind() == &NodeKind::ClosureParams)
|
||||||
|
.expect("closure is missing parameter list")
|
||||||
|
.children()
|
||||||
|
.filter_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The body of the closure.
|
||||||
|
pub fn body(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("closure is missing body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A parameter to a closure.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum ClosureParam {
|
||||||
|
/// A positional parameter: `x`.
|
||||||
|
Pos(Ident),
|
||||||
|
/// A named parameter with a default value: `draw: false`.
|
||||||
|
Named(Named),
|
||||||
|
/// A parameter sink: `..args`.
|
||||||
|
Sink(Ident),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypedNode for ClosureParam {
|
||||||
|
fn from_red(node: RedRef) -> Option<Self> {
|
||||||
|
match node.kind() {
|
||||||
|
NodeKind::Ident(_) => node.cast().map(ClosureParam::Pos),
|
||||||
|
NodeKind::Named => node.cast().map(ClosureParam::Named),
|
||||||
|
NodeKind::Spread => node.cast_first_child().map(ClosureParam::Sink),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_red(&self) -> RedRef<'_> {
|
||||||
|
match self {
|
||||||
|
Self::Pos(v) => v.as_red(),
|
||||||
|
Self::Named(v) => v.as_red(),
|
||||||
|
Self::Sink(v) => v.as_red(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A with expression: `f with (x, y: 1)`.
|
||||||
|
WithExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WithExpr {
|
||||||
|
/// The function to apply the arguments to.
|
||||||
|
pub fn callee(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("with expression is missing callee")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The arguments to apply to the function.
|
||||||
|
pub fn args(&self) -> CallArgs {
|
||||||
|
self.0
|
||||||
|
.cast_first_child()
|
||||||
|
.expect("with expression is missing argument list")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A let expression: `let x = 1`.
|
||||||
|
LetExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LetExpr {
|
||||||
|
/// The binding to assign to.
|
||||||
|
pub fn binding(&self) -> Ident {
|
||||||
|
match self.0.cast_first_child() {
|
||||||
|
Some(Expr::Ident(binding)) => binding,
|
||||||
|
Some(Expr::With(with)) => match with.callee() {
|
||||||
|
Expr::Ident(binding) => binding,
|
||||||
|
_ => panic!("let .. with callee must be identifier"),
|
||||||
|
},
|
||||||
|
Some(Expr::Closure(closure)) => {
|
||||||
|
closure.name().expect("let-bound closure is missing name")
|
||||||
|
}
|
||||||
|
_ => panic!("let expression is missing binding"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression the binding is initialized with.
|
||||||
|
pub fn init(&self) -> Option<Expr> {
|
||||||
|
if self.0.cast_first_child::<Ident>().is_some() {
|
||||||
|
self.0.children().filter_map(RedRef::cast).nth(1)
|
||||||
|
} else {
|
||||||
|
// This is a let .. with expression.
|
||||||
|
self.0.cast_first_child()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||||
|
ImportExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ImportExpr {
|
||||||
|
/// The items to be imported.
|
||||||
|
pub fn imports(&self) -> Imports {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.find_map(|node| match node.kind() {
|
||||||
|
NodeKind::Star => Some(Imports::Wildcard),
|
||||||
|
NodeKind::ImportItems => {
|
||||||
|
let items = node.children().filter_map(RedRef::cast).collect();
|
||||||
|
Some(Imports::Items(items))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.expect("import is missing items")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The location of the importable file.
|
||||||
|
pub fn path(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("import is missing path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The items that ought to be imported from a file.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum Imports {
|
||||||
|
/// All items in the scope of the file should be imported.
|
||||||
|
Wildcard,
|
||||||
|
/// The specified items from the file should be imported.
|
||||||
|
Items(Vec<Ident>),
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An include expression: `include "chapter1.typ"`.
|
||||||
|
IncludeExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IncludeExpr {
|
||||||
|
/// The location of the file to be included.
|
||||||
|
pub fn path(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("include is missing path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An if-else expression: `if x { y } else { z }`.
|
||||||
|
IfExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IfExpr {
|
||||||
|
/// The condition which selects the body to evaluate.
|
||||||
|
pub fn condition(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("if expression is missing condition")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to evaluate if the condition is true.
|
||||||
|
pub fn if_body(&self) -> Expr {
|
||||||
|
self.0
|
||||||
|
.children()
|
||||||
|
.filter_map(RedRef::cast)
|
||||||
|
.nth(1)
|
||||||
|
.expect("if expression is missing body")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to evaluate if the condition is false.
|
||||||
|
pub fn else_body(&self) -> Option<Expr> {
|
||||||
|
self.0.children().filter_map(RedRef::cast).nth(2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A while loop expression: `while x { y }`.
|
||||||
|
WhileExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WhileExpr {
|
||||||
|
/// The condition which selects whether to evaluate the body.
|
||||||
|
pub fn condition(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("while loop is missing condition")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to evaluate while the condition is true.
|
||||||
|
pub fn body(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("while loop is missing body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A for loop expression: `for x in y { z }`.
|
||||||
|
ForExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ForExpr {
|
||||||
|
/// The pattern to assign to.
|
||||||
|
pub fn pattern(&self) -> ForPattern {
|
||||||
|
self.0.cast_first_child().expect("for loop is missing pattern")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to iterate over.
|
||||||
|
pub fn iter(&self) -> Expr {
|
||||||
|
self.0.cast_first_child().expect("for loop is missing iterable")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The expression to evaluate for each iteration.
|
||||||
|
pub fn body(&self) -> Expr {
|
||||||
|
self.0.cast_last_child().expect("for loop is missing body")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// A for-in loop expression: `for x in y { z }`.
|
||||||
|
ForPattern
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ForPattern {
|
||||||
|
/// The key part of the pattern: index for arrays, name for dictionaries.
|
||||||
|
pub fn key(&self) -> Option<Ident> {
|
||||||
|
let mut children = self.0.children().filter_map(RedRef::cast);
|
||||||
|
let key = children.next();
|
||||||
|
if children.next().is_some() { key } else { None }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The value part of the pattern.
|
||||||
|
pub fn value(&self) -> Ident {
|
||||||
|
self.0.cast_last_child().expect("for loop pattern is missing value")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
node! {
|
||||||
|
/// An identifier.
|
||||||
|
Ident: NodeKind::Ident(_)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ident {
|
||||||
|
/// Take out the contained [`EcoString`].
|
||||||
|
pub fn take(self) -> EcoString {
|
||||||
|
match self.0.green {
|
||||||
|
Green::Token(GreenData { kind: NodeKind::Ident(id), .. }) => id,
|
||||||
|
_ => panic!("identifier is of wrong kind"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Ident {
|
||||||
|
type Target = str;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
match &self.0.green {
|
||||||
|
Green::Token(GreenData { kind: NodeKind::Ident(id), .. }) => id,
|
||||||
|
_ => panic!("identifier is of wrong kind"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,584 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use super::{Ident, Markup, Span, Token};
|
|
||||||
use crate::geom::{AngularUnit, LengthUnit};
|
|
||||||
use crate::util::EcoString;
|
|
||||||
|
|
||||||
/// An expression.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum Expr {
|
|
||||||
/// An identifier: `left`.
|
|
||||||
Ident(Box<Ident>),
|
|
||||||
/// A literal: `1`, `true`, ...
|
|
||||||
Lit(Box<Lit>),
|
|
||||||
/// An array expression: `(1, "hi", 12cm)`.
|
|
||||||
Array(Box<ArrayExpr>),
|
|
||||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
|
||||||
Dict(Box<DictExpr>),
|
|
||||||
/// A template expression: `[*Hi* there!]`.
|
|
||||||
Template(Box<TemplateExpr>),
|
|
||||||
/// A grouped expression: `(1 + 2)`.
|
|
||||||
Group(Box<GroupExpr>),
|
|
||||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
|
||||||
Block(Box<BlockExpr>),
|
|
||||||
/// A unary operation: `-x`.
|
|
||||||
Unary(Box<UnaryExpr>),
|
|
||||||
/// A binary operation: `a + b`.
|
|
||||||
Binary(Box<BinaryExpr>),
|
|
||||||
/// An invocation of a function: `f(x, y)`.
|
|
||||||
Call(Box<CallExpr>),
|
|
||||||
/// A closure expression: `(x, y) => z`.
|
|
||||||
Closure(Box<ClosureExpr>),
|
|
||||||
/// A with expression: `f with (x, y: 1)`.
|
|
||||||
With(Box<WithExpr>),
|
|
||||||
/// A let expression: `let x = 1`.
|
|
||||||
Let(Box<LetExpr>),
|
|
||||||
/// An if-else expression: `if x { y } else { z }`.
|
|
||||||
If(Box<IfExpr>),
|
|
||||||
/// A while loop expression: `while x { y }`.
|
|
||||||
While(Box<WhileExpr>),
|
|
||||||
/// A for loop expression: `for x in y { z }`.
|
|
||||||
For(Box<ForExpr>),
|
|
||||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
|
||||||
Import(Box<ImportExpr>),
|
|
||||||
/// An include expression: `include "chapter1.typ"`.
|
|
||||||
Include(Box<IncludeExpr>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Expr {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Self::Ident(v) => v.span,
|
|
||||||
Self::Lit(v) => v.span(),
|
|
||||||
Self::Array(v) => v.span,
|
|
||||||
Self::Dict(v) => v.span,
|
|
||||||
Self::Template(v) => v.span,
|
|
||||||
Self::Group(v) => v.span,
|
|
||||||
Self::Block(v) => v.span,
|
|
||||||
Self::Unary(v) => v.span,
|
|
||||||
Self::Binary(v) => v.span,
|
|
||||||
Self::Call(v) => v.span,
|
|
||||||
Self::Closure(v) => v.span,
|
|
||||||
Self::With(v) => v.span,
|
|
||||||
Self::Let(v) => v.span,
|
|
||||||
Self::If(v) => v.span,
|
|
||||||
Self::While(v) => v.span,
|
|
||||||
Self::For(v) => v.span,
|
|
||||||
Self::Import(v) => v.span,
|
|
||||||
Self::Include(v) => v.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether the expression can be shortened in markup with a hashtag.
|
|
||||||
pub fn has_short_form(&self) -> bool {
|
|
||||||
matches!(self,
|
|
||||||
Self::Ident(_)
|
|
||||||
| Self::Call(_)
|
|
||||||
| Self::Let(_)
|
|
||||||
| Self::If(_)
|
|
||||||
| Self::While(_)
|
|
||||||
| Self::For(_)
|
|
||||||
| Self::Import(_)
|
|
||||||
| Self::Include(_)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A literal: `1`, `true`, ...
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum Lit {
|
|
||||||
/// The none literal: `none`.
|
|
||||||
None(Span),
|
|
||||||
/// The auto literal: `auto`.
|
|
||||||
Auto(Span),
|
|
||||||
/// A boolean literal: `true`, `false`.
|
|
||||||
Bool(Span, bool),
|
|
||||||
/// An integer literal: `120`.
|
|
||||||
Int(Span, i64),
|
|
||||||
/// A floating-point literal: `1.2`, `10e-4`.
|
|
||||||
Float(Span, f64),
|
|
||||||
/// A length literal: `12pt`, `3cm`.
|
|
||||||
Length(Span, f64, LengthUnit),
|
|
||||||
/// An angle literal: `1.5rad`, `90deg`.
|
|
||||||
Angle(Span, f64, AngularUnit),
|
|
||||||
/// A percent literal: `50%`.
|
|
||||||
///
|
|
||||||
/// _Note_: `50%` is stored as `50.0` here, but as `0.5` in the
|
|
||||||
/// corresponding [value](crate::geom::Relative).
|
|
||||||
Percent(Span, f64),
|
|
||||||
/// A fraction unit literal: `1fr`.
|
|
||||||
Fractional(Span, f64),
|
|
||||||
/// A string literal: `"hello!"`.
|
|
||||||
Str(Span, EcoString),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Lit {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match *self {
|
|
||||||
Self::None(span) => span,
|
|
||||||
Self::Auto(span) => span,
|
|
||||||
Self::Bool(span, _) => span,
|
|
||||||
Self::Int(span, _) => span,
|
|
||||||
Self::Float(span, _) => span,
|
|
||||||
Self::Length(span, _, _) => span,
|
|
||||||
Self::Angle(span, _, _) => span,
|
|
||||||
Self::Percent(span, _) => span,
|
|
||||||
Self::Fractional(span, _) => span,
|
|
||||||
Self::Str(span, _) => span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An array expression: `(1, "hi", 12cm)`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct ArrayExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The entries of the array.
|
|
||||||
pub items: Vec<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct DictExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The named dictionary entries.
|
|
||||||
pub items: Vec<Named>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A pair of a name and an expression: `pattern: dashed`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Named {
|
|
||||||
/// The name: `pattern`.
|
|
||||||
pub name: Ident,
|
|
||||||
/// The right-hand side of the pair: `dashed`.
|
|
||||||
pub expr: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Named {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
self.name.span.join(self.expr.span())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A template expression: `[*Hi* there!]`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct TemplateExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The contents of the template.
|
|
||||||
pub body: Markup,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A grouped expression: `(1 + 2)`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct GroupExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The wrapped expression.
|
|
||||||
pub expr: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct BlockExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The list of expressions contained in the block.
|
|
||||||
pub exprs: Vec<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unary operation: `-x`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct UnaryExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The operator: `-`.
|
|
||||||
pub op: UnOp,
|
|
||||||
/// The expression to operator on: `x`.
|
|
||||||
pub expr: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unary operator.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
|
||||||
pub enum UnOp {
|
|
||||||
/// The plus operator: `+`.
|
|
||||||
Pos,
|
|
||||||
/// The negation operator: `-`.
|
|
||||||
Neg,
|
|
||||||
/// The boolean `not`.
|
|
||||||
Not,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnOp {
|
|
||||||
/// Try to convert the token into a unary operation.
|
|
||||||
pub fn from_token(token: Token) -> Option<Self> {
|
|
||||||
Some(match token {
|
|
||||||
Token::Plus => Self::Pos,
|
|
||||||
Token::Hyph => Self::Neg,
|
|
||||||
Token::Not => Self::Not,
|
|
||||||
_ => return None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The precedence of this operator.
|
|
||||||
pub fn precedence(self) -> usize {
|
|
||||||
match self {
|
|
||||||
Self::Pos | Self::Neg => 8,
|
|
||||||
Self::Not => 3,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The string representation of this operation.
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Self::Pos => "+",
|
|
||||||
Self::Neg => "-",
|
|
||||||
Self::Not => "not",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A binary operation: `a + b`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct BinaryExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The left-hand side of the operation: `a`.
|
|
||||||
pub lhs: Expr,
|
|
||||||
/// The operator: `+`.
|
|
||||||
pub op: BinOp,
|
|
||||||
/// The right-hand side of the operation: `b`.
|
|
||||||
pub rhs: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A binary operator.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
|
||||||
pub enum BinOp {
|
|
||||||
/// The addition operator: `+`.
|
|
||||||
Add,
|
|
||||||
/// The subtraction operator: `-`.
|
|
||||||
Sub,
|
|
||||||
/// The multiplication operator: `*`.
|
|
||||||
Mul,
|
|
||||||
/// The division operator: `/`.
|
|
||||||
Div,
|
|
||||||
/// The short-circuiting boolean `and`.
|
|
||||||
And,
|
|
||||||
/// The short-circuiting boolean `or`.
|
|
||||||
Or,
|
|
||||||
/// The equality operator: `==`.
|
|
||||||
Eq,
|
|
||||||
/// The inequality operator: `!=`.
|
|
||||||
Neq,
|
|
||||||
/// The less-than operator: `<`.
|
|
||||||
Lt,
|
|
||||||
/// The less-than or equal operator: `<=`.
|
|
||||||
Leq,
|
|
||||||
/// The greater-than operator: `>`.
|
|
||||||
Gt,
|
|
||||||
/// The greater-than or equal operator: `>=`.
|
|
||||||
Geq,
|
|
||||||
/// The assignment operator: `=`.
|
|
||||||
Assign,
|
|
||||||
/// The add-assign operator: `+=`.
|
|
||||||
AddAssign,
|
|
||||||
/// The subtract-assign oeprator: `-=`.
|
|
||||||
SubAssign,
|
|
||||||
/// The multiply-assign operator: `*=`.
|
|
||||||
MulAssign,
|
|
||||||
/// The divide-assign operator: `/=`.
|
|
||||||
DivAssign,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BinOp {
|
|
||||||
/// Try to convert the token into a binary operation.
|
|
||||||
pub fn from_token(token: Token) -> Option<Self> {
|
|
||||||
Some(match token {
|
|
||||||
Token::Plus => Self::Add,
|
|
||||||
Token::Hyph => Self::Sub,
|
|
||||||
Token::Star => Self::Mul,
|
|
||||||
Token::Slash => Self::Div,
|
|
||||||
Token::And => Self::And,
|
|
||||||
Token::Or => Self::Or,
|
|
||||||
Token::EqEq => Self::Eq,
|
|
||||||
Token::ExclEq => Self::Neq,
|
|
||||||
Token::Lt => Self::Lt,
|
|
||||||
Token::LtEq => Self::Leq,
|
|
||||||
Token::Gt => Self::Gt,
|
|
||||||
Token::GtEq => Self::Geq,
|
|
||||||
Token::Eq => Self::Assign,
|
|
||||||
Token::PlusEq => Self::AddAssign,
|
|
||||||
Token::HyphEq => Self::SubAssign,
|
|
||||||
Token::StarEq => Self::MulAssign,
|
|
||||||
Token::SlashEq => Self::DivAssign,
|
|
||||||
_ => return None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The precedence of this operator.
|
|
||||||
pub fn precedence(self) -> usize {
|
|
||||||
match self {
|
|
||||||
Self::Mul | Self::Div => 6,
|
|
||||||
Self::Add | Self::Sub => 5,
|
|
||||||
Self::Eq | Self::Neq | Self::Lt | Self::Leq | Self::Gt | Self::Geq => 4,
|
|
||||||
Self::And => 3,
|
|
||||||
Self::Or => 2,
|
|
||||||
Self::Assign
|
|
||||||
| Self::AddAssign
|
|
||||||
| Self::SubAssign
|
|
||||||
| Self::MulAssign
|
|
||||||
| Self::DivAssign => 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The associativity of this operator.
|
|
||||||
pub fn associativity(self) -> Associativity {
|
|
||||||
match self {
|
|
||||||
Self::Add
|
|
||||||
| Self::Sub
|
|
||||||
| Self::Mul
|
|
||||||
| Self::Div
|
|
||||||
| Self::And
|
|
||||||
| Self::Or
|
|
||||||
| Self::Eq
|
|
||||||
| Self::Neq
|
|
||||||
| Self::Lt
|
|
||||||
| Self::Leq
|
|
||||||
| Self::Gt
|
|
||||||
| Self::Geq => Associativity::Left,
|
|
||||||
Self::Assign
|
|
||||||
| Self::AddAssign
|
|
||||||
| Self::SubAssign
|
|
||||||
| Self::MulAssign
|
|
||||||
| Self::DivAssign => Associativity::Right,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The string representation of this operation.
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Self::Add => "+",
|
|
||||||
Self::Sub => "-",
|
|
||||||
Self::Mul => "*",
|
|
||||||
Self::Div => "/",
|
|
||||||
Self::And => "and",
|
|
||||||
Self::Or => "or",
|
|
||||||
Self::Eq => "==",
|
|
||||||
Self::Neq => "!=",
|
|
||||||
Self::Lt => "<",
|
|
||||||
Self::Leq => "<=",
|
|
||||||
Self::Gt => ">",
|
|
||||||
Self::Geq => ">=",
|
|
||||||
Self::Assign => "=",
|
|
||||||
Self::AddAssign => "+=",
|
|
||||||
Self::SubAssign => "-=",
|
|
||||||
Self::MulAssign => "*=",
|
|
||||||
Self::DivAssign => "/=",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The associativity of a binary operator.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
|
||||||
pub enum Associativity {
|
|
||||||
/// Left-associative: `a + b + c` is equivalent to `(a + b) + c`.
|
|
||||||
Left,
|
|
||||||
/// Right-associative: `a = b = c` is equivalent to `a = (b = c)`.
|
|
||||||
Right,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An invocation of a function: `foo(...)`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct CallExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The function to call.
|
|
||||||
pub callee: Expr,
|
|
||||||
/// The arguments to the function.
|
|
||||||
pub args: CallArgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The arguments to a function: `12, draw: false`.
|
|
||||||
///
|
|
||||||
/// In case of a bracketed invocation with a body, the body is _not_
|
|
||||||
/// included in the span for the sake of clearer error messages.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct CallArgs {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The positional and named arguments.
|
|
||||||
pub items: Vec<CallArg>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An argument to a function call.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum CallArg {
|
|
||||||
/// A positional argument: `12`.
|
|
||||||
Pos(Expr),
|
|
||||||
/// A named argument: `draw: false`.
|
|
||||||
Named(Named),
|
|
||||||
/// A spreaded argument: `..things`.
|
|
||||||
Spread(Expr),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CallArg {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Self::Pos(expr) => expr.span(),
|
|
||||||
Self::Named(named) => named.span(),
|
|
||||||
Self::Spread(expr) => expr.span(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A closure expression: `(x, y) => z`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct ClosureExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The name of the closure.
|
|
||||||
///
|
|
||||||
/// This only exists if you use the function syntax sugar: `let f(x) = y`.
|
|
||||||
pub name: Option<Ident>,
|
|
||||||
/// The parameter bindings.
|
|
||||||
pub params: Vec<ClosureParam>,
|
|
||||||
/// The body of the closure.
|
|
||||||
pub body: Rc<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An parameter to a closure.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum ClosureParam {
|
|
||||||
/// A positional parameter: `x`.
|
|
||||||
Pos(Ident),
|
|
||||||
/// A named parameter with a default value: `draw: false`.
|
|
||||||
Named(Named),
|
|
||||||
/// A parameter sink: `..args`.
|
|
||||||
Sink(Ident),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClosureParam {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Self::Pos(ident) => ident.span,
|
|
||||||
Self::Named(named) => named.span(),
|
|
||||||
Self::Sink(ident) => ident.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A with expression: `f with (x, y: 1)`.
|
|
||||||
///
|
|
||||||
/// Applies arguments to a function.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct WithExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The function to apply the arguments to.
|
|
||||||
pub callee: Expr,
|
|
||||||
/// The arguments to apply to the function.
|
|
||||||
pub args: CallArgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A let expression: `let x = 1`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct LetExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The binding to assign to.
|
|
||||||
pub binding: Ident,
|
|
||||||
/// The expression the binding is initialized with.
|
|
||||||
pub init: Option<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct ImportExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The items to be imported.
|
|
||||||
pub imports: Imports,
|
|
||||||
/// The location of the importable file.
|
|
||||||
pub path: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The items that ought to be imported from a file.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum Imports {
|
|
||||||
/// All items in the scope of the file should be imported.
|
|
||||||
Wildcard,
|
|
||||||
/// The specified identifiers from the file should be imported.
|
|
||||||
Idents(Vec<Ident>),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An include expression: `include "chapter1.typ"`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct IncludeExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The location of the file to be included.
|
|
||||||
pub path: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An if-else expression: `if x { y } else { z }`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct IfExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The condition which selects the body to evaluate.
|
|
||||||
pub condition: Expr,
|
|
||||||
/// The expression to evaluate if the condition is true.
|
|
||||||
pub if_body: Expr,
|
|
||||||
/// The expression to evaluate if the condition is false.
|
|
||||||
pub else_body: Option<Expr>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A while loop expression: `while x { y }`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct WhileExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The condition which selects whether to evaluate the body.
|
|
||||||
pub condition: Expr,
|
|
||||||
/// The expression to evaluate while the condition is true.
|
|
||||||
pub body: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A for loop expression: `for x in y { z }`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct ForExpr {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The pattern to assign to.
|
|
||||||
pub pattern: ForPattern,
|
|
||||||
/// The expression to iterate over.
|
|
||||||
pub iter: Expr,
|
|
||||||
/// The expression to evaluate for each iteration.
|
|
||||||
pub body: Expr,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A pattern in a for loop.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum ForPattern {
|
|
||||||
/// A value pattern: `for v in array`.
|
|
||||||
Value(Ident),
|
|
||||||
/// A key-value pattern: `for k, v in dict`.
|
|
||||||
KeyValue(Ident, Ident),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ForPattern {
|
|
||||||
/// The source code location.
|
|
||||||
pub fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Self::Value(v) => v.span,
|
|
||||||
Self::KeyValue(k, v) => k.span.join(v.span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,85 +0,0 @@
|
|||||||
use std::borrow::Borrow;
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
use unicode_xid::UnicodeXID;
|
|
||||||
|
|
||||||
use super::Span;
|
|
||||||
use crate::util::EcoString;
|
|
||||||
|
|
||||||
/// An unicode identifier with a few extra permissible characters.
|
|
||||||
///
|
|
||||||
/// In addition to what is specified in the [Unicode Standard][uax31], we allow:
|
|
||||||
/// - `_` as a starting character,
|
|
||||||
/// - `_` and `-` as continuing characters.
|
|
||||||
///
|
|
||||||
/// [uax31]: http://www.unicode.org/reports/tr31/
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct Ident {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The identifier string.
|
|
||||||
pub string: EcoString,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ident {
|
|
||||||
/// Create a new identifier from a string checking that it is a valid.
|
|
||||||
pub fn new(
|
|
||||||
string: impl AsRef<str> + Into<EcoString>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> Option<Self> {
|
|
||||||
if is_ident(string.as_ref()) {
|
|
||||||
Some(Self { span: span.into(), string: string.into() })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a reference to the underlying string.
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Ident {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
self.string.as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<str> for Ident {
|
|
||||||
fn as_ref(&self) -> &str {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Borrow<str> for Ident {
|
|
||||||
fn borrow(&self) -> &str {
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Ident> for EcoString {
|
|
||||||
fn from(ident: &Ident) -> Self {
|
|
||||||
ident.string.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether a string is a valid identifier.
|
|
||||||
pub fn is_ident(string: &str) -> bool {
|
|
||||||
let mut chars = string.chars();
|
|
||||||
chars
|
|
||||||
.next()
|
|
||||||
.map_or(false, |c| is_id_start(c) && chars.all(is_id_continue))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether a character can start an identifier.
|
|
||||||
pub fn is_id_start(c: char) -> bool {
|
|
||||||
c.is_xid_start() || c == '_'
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether a character can continue an identifier.
|
|
||||||
pub fn is_id_continue(c: char) -> bool {
|
|
||||||
c.is_xid_continue() || c == '_' || c == '-'
|
|
||||||
}
|
|
@ -1,78 +0,0 @@
|
|||||||
use super::{Expr, Ident, Span};
|
|
||||||
use crate::util::EcoString;
|
|
||||||
|
|
||||||
/// The syntactical root capable of representing a full parsed document.
|
|
||||||
pub type Markup = Vec<MarkupNode>;
|
|
||||||
|
|
||||||
/// A single piece of markup.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum MarkupNode {
|
|
||||||
/// Whitespace containing less than two newlines.
|
|
||||||
Space,
|
|
||||||
/// A forced line break: `\`.
|
|
||||||
Linebreak(Span),
|
|
||||||
/// A paragraph break: Two or more newlines.
|
|
||||||
Parbreak(Span),
|
|
||||||
/// Strong text was enabled / disabled: `*`.
|
|
||||||
Strong(Span),
|
|
||||||
/// Emphasized text was enabled / disabled: `_`.
|
|
||||||
Emph(Span),
|
|
||||||
/// Plain text.
|
|
||||||
Text(EcoString),
|
|
||||||
/// A raw block with optional syntax highlighting: `` `...` ``.
|
|
||||||
Raw(Box<RawNode>),
|
|
||||||
/// A section heading: `= Introduction`.
|
|
||||||
Heading(Box<HeadingNode>),
|
|
||||||
/// An item in an unordered list: `- ...`.
|
|
||||||
List(Box<ListNode>),
|
|
||||||
/// An item in an enumeration (ordered list): `1. ...`.
|
|
||||||
Enum(Box<EnumNode>),
|
|
||||||
/// An expression.
|
|
||||||
Expr(Expr),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A raw block with optional syntax highlighting: `` `...` ``.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct RawNode {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// An optional identifier specifying the language to syntax-highlight in.
|
|
||||||
pub lang: Option<Ident>,
|
|
||||||
/// The raw text, determined as the raw string between the backticks trimmed
|
|
||||||
/// according to the above rules.
|
|
||||||
pub text: EcoString,
|
|
||||||
/// Whether the element is block-level, that is, it has 3+ backticks
|
|
||||||
/// and contains at least one newline.
|
|
||||||
pub block: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A section heading: `= Introduction`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct HeadingNode {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The section depth (numer of equals signs).
|
|
||||||
pub level: usize,
|
|
||||||
/// The contents of the heading.
|
|
||||||
pub body: Markup,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An item in an unordered list: `- ...`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct ListNode {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The contents of the list item.
|
|
||||||
pub body: Markup,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An item in an enumeration (ordered list): `1. ...`.
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct EnumNode {
|
|
||||||
/// The source code location.
|
|
||||||
pub span: Span,
|
|
||||||
/// The number, if any.
|
|
||||||
pub number: Option<usize>,
|
|
||||||
/// The contents of the list item.
|
|
||||||
pub body: Markup,
|
|
||||||
}
|
|
@ -1,16 +1,747 @@
|
|||||||
//! Syntax types.
|
//! Syntax types.
|
||||||
|
|
||||||
mod expr;
|
pub mod ast;
|
||||||
mod ident;
|
|
||||||
mod markup;
|
|
||||||
mod pretty;
|
mod pretty;
|
||||||
mod span;
|
mod span;
|
||||||
mod token;
|
|
||||||
pub mod visit;
|
|
||||||
|
|
||||||
pub use expr::*;
|
use std::fmt::{self, Debug, Display, Formatter};
|
||||||
pub use ident::*;
|
use std::rc::Rc;
|
||||||
pub use markup::*;
|
|
||||||
pub use pretty::*;
|
pub use pretty::*;
|
||||||
pub use span::*;
|
pub use span::*;
|
||||||
pub use token::*;
|
|
||||||
|
use self::ast::{MathNode, RawNode, TypedNode};
|
||||||
|
use crate::diag::Error;
|
||||||
|
use crate::geom::{AngularUnit, LengthUnit};
|
||||||
|
use crate::source::SourceId;
|
||||||
|
use crate::util::EcoString;
|
||||||
|
|
||||||
|
/// An inner of leaf node in the untyped green tree.
|
||||||
|
#[derive(Clone, PartialEq)]
|
||||||
|
pub enum Green {
|
||||||
|
/// A reference-counted inner node.
|
||||||
|
Node(Rc<GreenNode>),
|
||||||
|
/// A terminal, owned token.
|
||||||
|
Token(GreenData),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Green {
|
||||||
|
/// Returns the metadata of the node.
|
||||||
|
fn data(&self) -> &GreenData {
|
||||||
|
match self {
|
||||||
|
Green::Node(n) => &n.data,
|
||||||
|
Green::Token(t) => &t,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The type of the node.
|
||||||
|
pub fn kind(&self) -> &NodeKind {
|
||||||
|
self.data().kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The length of the node.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.data().len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether the node or its children contain an error.
|
||||||
|
pub fn erroneous(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Self::Node(node) => node.erroneous,
|
||||||
|
Self::Token(data) => data.kind.is_error(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The node's children.
|
||||||
|
pub fn children(&self) -> &[Green] {
|
||||||
|
match self {
|
||||||
|
Green::Node(n) => &n.children(),
|
||||||
|
Green::Token(_) => &[],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Change the type of the node.
|
||||||
|
pub fn convert(&mut self, kind: NodeKind) {
|
||||||
|
match self {
|
||||||
|
Self::Node(node) => {
|
||||||
|
let node = Rc::make_mut(node);
|
||||||
|
node.erroneous |= kind.is_error();
|
||||||
|
node.data.kind = kind;
|
||||||
|
}
|
||||||
|
Self::Token(data) => data.kind = kind,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Green {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::Token(GreenData::new(NodeKind::None, 0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for Green {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{:?}: {}", self.kind(), self.len())?;
|
||||||
|
if let Self::Node(n) = self {
|
||||||
|
if !n.children.is_empty() {
|
||||||
|
f.write_str(" ")?;
|
||||||
|
f.debug_list().entries(&n.children).finish()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An inner node in the untyped green tree.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct GreenNode {
|
||||||
|
/// Node metadata.
|
||||||
|
data: GreenData,
|
||||||
|
/// This node's children, losslessly make up this node.
|
||||||
|
children: Vec<Green>,
|
||||||
|
/// Whether this node or any of its children are erroneous.
|
||||||
|
erroneous: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GreenNode {
|
||||||
|
/// Creates a new node with the given kind and a single child.
|
||||||
|
pub fn with_child(kind: NodeKind, child: impl Into<Green>) -> Self {
|
||||||
|
Self::with_children(kind, vec![child.into()])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new node with the given kind and children.
|
||||||
|
pub fn with_children(kind: NodeKind, children: Vec<Green>) -> Self {
|
||||||
|
let mut erroneous = kind.is_error();
|
||||||
|
let len = children
|
||||||
|
.iter()
|
||||||
|
.inspect(|c| erroneous |= c.erroneous())
|
||||||
|
.map(Green::len)
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
data: GreenData::new(kind, len),
|
||||||
|
children,
|
||||||
|
erroneous,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The node's children.
|
||||||
|
pub fn children(&self) -> &[Green] {
|
||||||
|
&self.children
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<GreenNode> for Green {
|
||||||
|
fn from(node: GreenNode) -> Self {
|
||||||
|
Rc::new(node).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Rc<GreenNode>> for Green {
|
||||||
|
fn from(node: Rc<GreenNode>) -> Self {
|
||||||
|
Self::Node(node)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Data shared between inner and leaf nodes.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct GreenData {
|
||||||
|
/// What kind of node this is (each kind would have its own struct in a
|
||||||
|
/// strongly typed AST).
|
||||||
|
kind: NodeKind,
|
||||||
|
/// The byte length of the node in the source.
|
||||||
|
len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GreenData {
|
||||||
|
/// Create new node metadata.
|
||||||
|
pub fn new(kind: NodeKind, len: usize) -> Self {
|
||||||
|
Self { len, kind }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The type of the node.
|
||||||
|
pub fn kind(&self) -> &NodeKind {
|
||||||
|
&self.kind
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The length of the node.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<GreenData> for Green {
|
||||||
|
fn from(token: GreenData) -> Self {
|
||||||
|
Self::Token(token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A owned wrapper for a green node with span information.
|
||||||
|
///
|
||||||
|
/// Owned variant of [`RedRef`]. Can be [cast](Self::cast) to an AST node.
|
||||||
|
#[derive(Clone, PartialEq)]
|
||||||
|
pub struct RedNode {
|
||||||
|
id: SourceId,
|
||||||
|
offset: usize,
|
||||||
|
green: Green,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RedNode {
|
||||||
|
/// Create a new red node from a root [`GreenNode`].
|
||||||
|
pub fn from_root(root: Rc<GreenNode>, id: SourceId) -> Self {
|
||||||
|
Self { id, offset: 0, green: root.into() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert to a borrowed representation.
|
||||||
|
pub fn as_ref(&self) -> RedRef<'_> {
|
||||||
|
RedRef {
|
||||||
|
id: self.id,
|
||||||
|
offset: self.offset,
|
||||||
|
green: &self.green,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The type of the node.
|
||||||
|
pub fn kind(&self) -> &NodeKind {
|
||||||
|
self.as_ref().kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The length of the node.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.as_ref().len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The span of the node.
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.as_ref().span()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The error messages for this node and its descendants.
|
||||||
|
pub fn errors(&self) -> Vec<Error> {
|
||||||
|
self.as_ref().errors()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert the node to a typed AST node.
|
||||||
|
pub fn cast<T>(self) -> Option<T>
|
||||||
|
where
|
||||||
|
T: TypedNode,
|
||||||
|
{
|
||||||
|
self.as_ref().cast()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The children of the node.
|
||||||
|
pub fn children(&self) -> Children<'_> {
|
||||||
|
self.as_ref().children()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the first child that can cast to some AST type.
|
||||||
|
pub fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
|
self.as_ref().cast_first_child()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the last child that can cast to some AST type.
|
||||||
|
pub fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
|
||||||
|
self.as_ref().cast_last_child()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for RedNode {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
|
self.as_ref().fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A borrowed wrapper for a green node with span information.
|
||||||
|
///
|
||||||
|
/// Borrowed variant of [`RedNode`]. Can be [cast](Self::cast) to an AST node.
|
||||||
|
#[derive(Copy, Clone, PartialEq)]
|
||||||
|
pub struct RedRef<'a> {
|
||||||
|
id: SourceId,
|
||||||
|
offset: usize,
|
||||||
|
green: &'a Green,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RedRef<'a> {
|
||||||
|
/// Convert to an owned representation.
|
||||||
|
pub fn own(self) -> RedNode {
|
||||||
|
RedNode {
|
||||||
|
id: self.id,
|
||||||
|
offset: self.offset,
|
||||||
|
green: self.green.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The type of the node.
|
||||||
|
pub fn kind(self) -> &'a NodeKind {
|
||||||
|
self.green.kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The length of the node.
|
||||||
|
pub fn len(self) -> usize {
|
||||||
|
self.green.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The span of the node.
|
||||||
|
pub fn span(self) -> Span {
|
||||||
|
Span::new(self.id, self.offset, self.offset + self.green.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The error messages for this node and its descendants.
|
||||||
|
pub fn errors(self) -> Vec<Error> {
|
||||||
|
if !self.green.erroneous() {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.kind() {
|
||||||
|
NodeKind::Error(pos, msg) => {
|
||||||
|
let span = match pos {
|
||||||
|
ErrorPos::Start => self.span().at_start(),
|
||||||
|
ErrorPos::Full => self.span(),
|
||||||
|
ErrorPos::End => self.span().at_end(),
|
||||||
|
};
|
||||||
|
|
||||||
|
vec![Error::new(span, msg.to_string())]
|
||||||
|
}
|
||||||
|
_ => self
|
||||||
|
.children()
|
||||||
|
.filter(|red| red.green.erroneous())
|
||||||
|
.flat_map(|red| red.errors())
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert the node to a typed AST node.
|
||||||
|
pub fn cast<T>(self) -> Option<T>
|
||||||
|
where
|
||||||
|
T: TypedNode,
|
||||||
|
{
|
||||||
|
T::from_red(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The node's children.
|
||||||
|
pub fn children(self) -> Children<'a> {
|
||||||
|
let children = match &self.green {
|
||||||
|
Green::Node(node) => node.children(),
|
||||||
|
Green::Token(_) => &[],
|
||||||
|
};
|
||||||
|
|
||||||
|
Children {
|
||||||
|
id: self.id,
|
||||||
|
iter: children.iter(),
|
||||||
|
front: self.offset,
|
||||||
|
back: self.offset + self.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the first child that can cast to some AST type.
|
||||||
|
pub fn cast_first_child<T: TypedNode>(self) -> Option<T> {
|
||||||
|
self.children().find_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the last child that can cast to some AST type.
|
||||||
|
pub fn cast_last_child<T: TypedNode>(self) -> Option<T> {
|
||||||
|
self.children().rev().find_map(RedRef::cast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Debug for RedRef<'_> {
|
||||||
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{:?}: {:?}", self.kind(), self.span())?;
|
||||||
|
let mut children = self.children().peekable();
|
||||||
|
if children.peek().is_some() {
|
||||||
|
f.write_str(" ")?;
|
||||||
|
f.debug_list().entries(children.map(RedRef::own)).finish()?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An iterator over the children of a red node.
|
||||||
|
pub struct Children<'a> {
|
||||||
|
id: SourceId,
|
||||||
|
iter: std::slice::Iter<'a, Green>,
|
||||||
|
front: usize,
|
||||||
|
back: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for Children<'a> {
|
||||||
|
type Item = RedRef<'a>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iter.next().map(|green| {
|
||||||
|
let offset = self.front;
|
||||||
|
self.front += green.len();
|
||||||
|
RedRef { id: self.id, offset, green }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.iter.size_hint()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for Children<'_> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.iter.next_back().map(|green| {
|
||||||
|
self.back -= green.len();
|
||||||
|
RedRef { id: self.id, offset: self.back, green }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for Children<'_> {}
|
||||||
|
|
||||||
|
/// All syntactical building blocks that can be part of a Typst document.
|
||||||
|
///
|
||||||
|
/// Can be emitted as a token by the tokenizer or as part of a green node by
|
||||||
|
/// the parser.
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum NodeKind {
|
||||||
|
/// A left square bracket: `[`.
|
||||||
|
LeftBracket,
|
||||||
|
/// A right square bracket: `]`.
|
||||||
|
RightBracket,
|
||||||
|
/// A left curly brace: `{`.
|
||||||
|
LeftBrace,
|
||||||
|
/// A right curly brace: `}`.
|
||||||
|
RightBrace,
|
||||||
|
/// A left round parenthesis: `(`.
|
||||||
|
LeftParen,
|
||||||
|
/// A right round parenthesis: `)`.
|
||||||
|
RightParen,
|
||||||
|
/// An asterisk: `*`.
|
||||||
|
Star,
|
||||||
|
/// A comma: `,`.
|
||||||
|
Comma,
|
||||||
|
/// A semicolon: `;`.
|
||||||
|
Semicolon,
|
||||||
|
/// A colon: `:`.
|
||||||
|
Colon,
|
||||||
|
/// A plus: `+`.
|
||||||
|
Plus,
|
||||||
|
/// A hyphen: `-`.
|
||||||
|
Minus,
|
||||||
|
/// A slash: `/`.
|
||||||
|
Slash,
|
||||||
|
/// A single equals sign: `=`.
|
||||||
|
Eq,
|
||||||
|
/// Two equals signs: `==`.
|
||||||
|
EqEq,
|
||||||
|
/// An exclamation mark followed by an equals sign: `!=`.
|
||||||
|
ExclEq,
|
||||||
|
/// A less-than sign: `<`.
|
||||||
|
Lt,
|
||||||
|
/// A less-than sign followed by an equals sign: `<=`.
|
||||||
|
LtEq,
|
||||||
|
/// A greater-than sign: `>`.
|
||||||
|
Gt,
|
||||||
|
/// A greater-than sign followed by an equals sign: `>=`.
|
||||||
|
GtEq,
|
||||||
|
/// A plus followed by an equals sign: `+=`.
|
||||||
|
PlusEq,
|
||||||
|
/// A hyphen followed by an equals sign: `-=`.
|
||||||
|
HyphEq,
|
||||||
|
/// An asterisk followed by an equals sign: `*=`.
|
||||||
|
StarEq,
|
||||||
|
/// A slash followed by an equals sign: `/=`.
|
||||||
|
SlashEq,
|
||||||
|
/// The `not` operator.
|
||||||
|
Not,
|
||||||
|
/// The `and` operator.
|
||||||
|
And,
|
||||||
|
/// The `or` operator.
|
||||||
|
Or,
|
||||||
|
/// The `with` operator.
|
||||||
|
With,
|
||||||
|
/// Two dots: `..`.
|
||||||
|
Dots,
|
||||||
|
/// An equals sign followed by a greater-than sign: `=>`.
|
||||||
|
Arrow,
|
||||||
|
/// The none literal: `none`.
|
||||||
|
None,
|
||||||
|
/// The auto literal: `auto`.
|
||||||
|
Auto,
|
||||||
|
/// The `let` keyword.
|
||||||
|
Let,
|
||||||
|
/// The `if` keyword.
|
||||||
|
If,
|
||||||
|
/// The `else` keyword.
|
||||||
|
Else,
|
||||||
|
/// The `for` keyword.
|
||||||
|
For,
|
||||||
|
/// The `in` keyword.
|
||||||
|
In,
|
||||||
|
/// The `while` keyword.
|
||||||
|
While,
|
||||||
|
/// The `break` keyword.
|
||||||
|
Break,
|
||||||
|
/// The `continue` keyword.
|
||||||
|
Continue,
|
||||||
|
/// The `return` keyword.
|
||||||
|
Return,
|
||||||
|
/// The `import` keyword.
|
||||||
|
Import,
|
||||||
|
/// The `include` keyword.
|
||||||
|
Include,
|
||||||
|
/// The `from` keyword.
|
||||||
|
From,
|
||||||
|
/// Template markup.
|
||||||
|
Markup,
|
||||||
|
/// One or more whitespace characters.
|
||||||
|
Space(usize),
|
||||||
|
/// A forced line break: `\`.
|
||||||
|
Linebreak,
|
||||||
|
/// A paragraph break: Two or more newlines.
|
||||||
|
Parbreak,
|
||||||
|
/// A consecutive non-markup string.
|
||||||
|
Text(EcoString),
|
||||||
|
/// A non-breaking space: `~`.
|
||||||
|
NonBreakingSpace,
|
||||||
|
/// An en-dash: `--`.
|
||||||
|
EnDash,
|
||||||
|
/// An em-dash: `---`.
|
||||||
|
EmDash,
|
||||||
|
/// A slash and the letter "u" followed by a hexadecimal unicode entity
|
||||||
|
/// enclosed in curly braces: `\u{1F5FA}`.
|
||||||
|
UnicodeEscape(char),
|
||||||
|
/// Strong text was enabled / disabled: `*`.
|
||||||
|
Strong,
|
||||||
|
/// Emphasized text was enabled / disabled: `_`.
|
||||||
|
Emph,
|
||||||
|
/// A section heading: `= Introduction`.
|
||||||
|
Heading,
|
||||||
|
/// An item in an enumeration (ordered list): `1. ...`.
|
||||||
|
Enum,
|
||||||
|
/// A numbering: `23.`.
|
||||||
|
///
|
||||||
|
/// Can also exist without the number: `.`.
|
||||||
|
EnumNumbering(Option<usize>),
|
||||||
|
/// An item in an unordered list: `- ...`.
|
||||||
|
List,
|
||||||
|
/// An arbitrary number of backticks followed by inner contents, terminated
|
||||||
|
/// with the same number of backticks: `` `...` ``.
|
||||||
|
Raw(Rc<RawNode>),
|
||||||
|
/// Dollar signs surrounding inner contents.
|
||||||
|
Math(Rc<MathNode>),
|
||||||
|
/// An identifier: `center`.
|
||||||
|
Ident(EcoString),
|
||||||
|
/// A boolean: `true`, `false`.
|
||||||
|
Bool(bool),
|
||||||
|
/// An integer: `120`.
|
||||||
|
Int(i64),
|
||||||
|
/// A floating-point number: `1.2`, `10e-4`.
|
||||||
|
Float(f64),
|
||||||
|
/// A length: `12pt`, `3cm`.
|
||||||
|
Length(f64, LengthUnit),
|
||||||
|
/// An angle: `90deg`.
|
||||||
|
Angle(f64, AngularUnit),
|
||||||
|
/// A percentage: `50%`.
|
||||||
|
///
|
||||||
|
/// _Note_: `50%` is stored as `50.0` here, as in the corresponding
|
||||||
|
/// [literal](ast::LitKind::Percent).
|
||||||
|
Percentage(f64),
|
||||||
|
/// A fraction unit: `3fr`.
|
||||||
|
Fraction(f64),
|
||||||
|
/// A quoted string: `"..."`.
|
||||||
|
Str(EcoString),
|
||||||
|
/// An array expression: `(1, "hi", 12cm)`.
|
||||||
|
Array,
|
||||||
|
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||||
|
Dict,
|
||||||
|
/// A named pair: `thickness: 3pt`.
|
||||||
|
Named,
|
||||||
|
/// A grouped expression: `(1 + 2)`.
|
||||||
|
Group,
|
||||||
|
/// A unary operation: `-x`.
|
||||||
|
Unary,
|
||||||
|
/// A binary operation: `a + b`.
|
||||||
|
Binary,
|
||||||
|
/// An invocation of a function: `f(x, y)`.
|
||||||
|
Call,
|
||||||
|
/// A function call's argument list: `(x, y)`.
|
||||||
|
CallArgs,
|
||||||
|
/// A closure expression: `(x, y) => z`.
|
||||||
|
Closure,
|
||||||
|
/// A closure's parameters: `(x, y)`.
|
||||||
|
ClosureParams,
|
||||||
|
/// A parameter sink: `..x`.
|
||||||
|
Spread,
|
||||||
|
/// A template expression: `[*Hi* there!]`.
|
||||||
|
Template,
|
||||||
|
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||||
|
Block,
|
||||||
|
/// A for loop expression: `for x in y { ... }`.
|
||||||
|
ForExpr,
|
||||||
|
/// A while loop expression: `while x { ... }`.
|
||||||
|
WhileExpr,
|
||||||
|
/// An if expression: `if x { ... }`.
|
||||||
|
IfExpr,
|
||||||
|
/// A let expression: `let x = 1`.
|
||||||
|
LetExpr,
|
||||||
|
/// The `with` expression: `with (1)`.
|
||||||
|
WithExpr,
|
||||||
|
/// A for loop's destructuring pattern: `x` or `x, y`.
|
||||||
|
ForPattern,
|
||||||
|
/// The import expression: `import x from "foo.typ"`.
|
||||||
|
ImportExpr,
|
||||||
|
/// Items to import: `a, b, c`.
|
||||||
|
ImportItems,
|
||||||
|
/// The include expression: `include "foo.typ"`.
|
||||||
|
IncludeExpr,
|
||||||
|
/// Two slashes followed by inner contents, terminated with a newline:
|
||||||
|
/// `//<str>\n`.
|
||||||
|
LineComment,
|
||||||
|
/// A slash and a star followed by inner contents, terminated with a star
|
||||||
|
/// and a slash: `/*<str>*/`.
|
||||||
|
///
|
||||||
|
/// The comment can contain nested block comments.
|
||||||
|
BlockComment,
|
||||||
|
/// Tokens that appear in the wrong place.
|
||||||
|
Error(ErrorPos, EcoString),
|
||||||
|
/// Unknown character sequences.
|
||||||
|
Unknown(EcoString),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Where in a node an error should be annotated.
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum ErrorPos {
|
||||||
|
/// At the start of the node.
|
||||||
|
Start,
|
||||||
|
/// Over the full width of the node.
|
||||||
|
Full,
|
||||||
|
/// At the end of the node.
|
||||||
|
End,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeKind {
|
||||||
|
/// Whether this is some kind of parenthesis.
|
||||||
|
pub fn is_paren(&self) -> bool {
|
||||||
|
matches!(self, Self::LeftParen | Self::RightParen)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this is some kind of bracket.
|
||||||
|
pub fn is_bracket(&self) -> bool {
|
||||||
|
matches!(self, Self::LeftBracket | Self::RightBracket)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this is some kind of brace.
|
||||||
|
pub fn is_brace(&self) -> bool {
|
||||||
|
matches!(self, Self::LeftBrace | Self::RightBrace)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this is some kind of error.
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
matches!(self, NodeKind::Error(_, _) | NodeKind::Unknown(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A human-readable name for the kind.
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::LeftBracket => "opening bracket",
|
||||||
|
Self::RightBracket => "closing bracket",
|
||||||
|
Self::LeftBrace => "opening brace",
|
||||||
|
Self::RightBrace => "closing brace",
|
||||||
|
Self::LeftParen => "opening paren",
|
||||||
|
Self::RightParen => "closing paren",
|
||||||
|
Self::Star => "star",
|
||||||
|
Self::Comma => "comma",
|
||||||
|
Self::Semicolon => "semicolon",
|
||||||
|
Self::Colon => "colon",
|
||||||
|
Self::Plus => "plus",
|
||||||
|
Self::Minus => "minus",
|
||||||
|
Self::Slash => "slash",
|
||||||
|
Self::Eq => "assignment operator",
|
||||||
|
Self::EqEq => "equality operator",
|
||||||
|
Self::ExclEq => "inequality operator",
|
||||||
|
Self::Lt => "less-than operator",
|
||||||
|
Self::LtEq => "less-than or equal operator",
|
||||||
|
Self::Gt => "greater-than operator",
|
||||||
|
Self::GtEq => "greater-than or equal operator",
|
||||||
|
Self::PlusEq => "add-assign operator",
|
||||||
|
Self::HyphEq => "subtract-assign operator",
|
||||||
|
Self::StarEq => "multiply-assign operator",
|
||||||
|
Self::SlashEq => "divide-assign operator",
|
||||||
|
Self::Not => "operator `not`",
|
||||||
|
Self::And => "operator `and`",
|
||||||
|
Self::Or => "operator `or`",
|
||||||
|
Self::With => "operator `with`",
|
||||||
|
Self::Dots => "dots",
|
||||||
|
Self::Arrow => "arrow",
|
||||||
|
Self::None => "`none`",
|
||||||
|
Self::Auto => "`auto`",
|
||||||
|
Self::Let => "keyword `let`",
|
||||||
|
Self::If => "keyword `if`",
|
||||||
|
Self::Else => "keyword `else`",
|
||||||
|
Self::For => "keyword `for`",
|
||||||
|
Self::In => "keyword `in`",
|
||||||
|
Self::While => "keyword `while`",
|
||||||
|
Self::Break => "keyword `break`",
|
||||||
|
Self::Continue => "keyword `continue`",
|
||||||
|
Self::Return => "keyword `return`",
|
||||||
|
Self::Import => "keyword `import`",
|
||||||
|
Self::Include => "keyword `include`",
|
||||||
|
Self::From => "keyword `from`",
|
||||||
|
Self::Markup => "markup",
|
||||||
|
Self::Space(_) => "space",
|
||||||
|
Self::Linebreak => "forced linebreak",
|
||||||
|
Self::Parbreak => "paragraph break",
|
||||||
|
Self::Text(_) => "text",
|
||||||
|
Self::NonBreakingSpace => "non-breaking space",
|
||||||
|
Self::EnDash => "en dash",
|
||||||
|
Self::EmDash => "em dash",
|
||||||
|
Self::UnicodeEscape(_) => "unicode escape sequence",
|
||||||
|
Self::Strong => "strong",
|
||||||
|
Self::Emph => "emphasis",
|
||||||
|
Self::Heading => "heading",
|
||||||
|
Self::Enum => "enumeration item",
|
||||||
|
Self::EnumNumbering(_) => "enumeration item numbering",
|
||||||
|
Self::List => "list item",
|
||||||
|
Self::Raw(_) => "raw block",
|
||||||
|
Self::Math(_) => "math formula",
|
||||||
|
Self::Ident(_) => "identifier",
|
||||||
|
Self::Bool(_) => "boolean",
|
||||||
|
Self::Int(_) => "integer",
|
||||||
|
Self::Float(_) => "float",
|
||||||
|
Self::Length(_, _) => "length",
|
||||||
|
Self::Angle(_, _) => "angle",
|
||||||
|
Self::Percentage(_) => "percentage",
|
||||||
|
Self::Fraction(_) => "`fr` value",
|
||||||
|
Self::Str(_) => "string",
|
||||||
|
Self::Array => "array",
|
||||||
|
Self::Dict => "dictionary",
|
||||||
|
Self::Named => "named argument",
|
||||||
|
Self::Group => "group",
|
||||||
|
Self::Unary => "unary expression",
|
||||||
|
Self::Binary => "binary expression",
|
||||||
|
Self::Call => "call",
|
||||||
|
Self::CallArgs => "call arguments",
|
||||||
|
Self::Closure => "closure",
|
||||||
|
Self::ClosureParams => "closure parameters",
|
||||||
|
Self::Spread => "parameter sink",
|
||||||
|
Self::Template => "template",
|
||||||
|
Self::Block => "block",
|
||||||
|
Self::ForExpr => "for-loop expression",
|
||||||
|
Self::WhileExpr => "while-loop expression",
|
||||||
|
Self::IfExpr => "`if` expression",
|
||||||
|
Self::LetExpr => "`let` expression",
|
||||||
|
Self::WithExpr => "`with` expression",
|
||||||
|
Self::ForPattern => "for-loop destructuring pattern",
|
||||||
|
Self::ImportExpr => "`import` expression",
|
||||||
|
Self::ImportItems => "import items",
|
||||||
|
Self::IncludeExpr => "`include` expression",
|
||||||
|
Self::LineComment => "line comment",
|
||||||
|
Self::BlockComment => "block comment",
|
||||||
|
Self::Error(_, _) => "parse error",
|
||||||
|
Self::Unknown(src) => match src.as_str() {
|
||||||
|
"*/" => "end of block comment",
|
||||||
|
_ => "invalid token",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for NodeKind {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.pad(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use std::fmt::{self, Arguments, Write};
|
use std::fmt::{self, Arguments, Write};
|
||||||
|
|
||||||
use super::*;
|
use super::ast::*;
|
||||||
|
|
||||||
/// Pretty print an item and return the resulting string.
|
/// Pretty print an item and return the resulting string.
|
||||||
pub fn pretty<T>(item: &T) -> String
|
pub fn pretty<T>(item: &T) -> String
|
||||||
@ -46,20 +46,24 @@ impl Printer {
|
|||||||
Write::write_fmt(self, fmt)
|
Write::write_fmt(self, fmt)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a list of items joined by a joiner.
|
/// Write a list of items joined by a joiner and return how many there were.
|
||||||
pub fn join<T, I, F>(&mut self, items: I, joiner: &str, mut write_item: F)
|
pub fn join<T, I, F>(&mut self, items: I, joiner: &str, mut write_item: F) -> usize
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = T>,
|
I: IntoIterator<Item = T>,
|
||||||
F: FnMut(T, &mut Self),
|
F: FnMut(T, &mut Self),
|
||||||
{
|
{
|
||||||
|
let mut count = 0;
|
||||||
let mut iter = items.into_iter();
|
let mut iter = items.into_iter();
|
||||||
if let Some(first) = iter.next() {
|
if let Some(first) = iter.next() {
|
||||||
write_item(first, self);
|
write_item(first, self);
|
||||||
|
count += 1;
|
||||||
}
|
}
|
||||||
for item in iter {
|
for item in iter {
|
||||||
self.push_str(joiner);
|
self.push_str(joiner);
|
||||||
write_item(item, self);
|
write_item(item, self);
|
||||||
|
count += 1;
|
||||||
}
|
}
|
||||||
|
count
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finish pretty printing and return the underlying buffer.
|
/// Finish pretty printing and return the underlying buffer.
|
||||||
@ -77,7 +81,7 @@ impl Write for Printer {
|
|||||||
|
|
||||||
impl Pretty for Markup {
|
impl Pretty for Markup {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
for node in self {
|
for node in self.nodes() {
|
||||||
node.pretty(p);
|
node.pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -88,12 +92,13 @@ impl Pretty for MarkupNode {
|
|||||||
match self {
|
match self {
|
||||||
// TODO: Handle escaping.
|
// TODO: Handle escaping.
|
||||||
Self::Space => p.push(' '),
|
Self::Space => p.push(' '),
|
||||||
Self::Linebreak(_) => p.push_str(r"\"),
|
Self::Linebreak => p.push_str(r"\"),
|
||||||
Self::Parbreak(_) => p.push_str("\n\n"),
|
Self::Parbreak => p.push_str("\n\n"),
|
||||||
Self::Strong(_) => p.push('*'),
|
Self::Strong => p.push('*'),
|
||||||
Self::Emph(_) => p.push('_'),
|
Self::Emph => p.push('_'),
|
||||||
Self::Text(text) => p.push_str(text),
|
Self::Text(text) => p.push_str(text),
|
||||||
Self::Raw(raw) => raw.pretty(p),
|
Self::Raw(raw) => raw.pretty(p),
|
||||||
|
Self::Math(math) => math.pretty(p),
|
||||||
Self::Heading(heading) => heading.pretty(p),
|
Self::Heading(heading) => heading.pretty(p),
|
||||||
Self::List(list) => list.pretty(p),
|
Self::List(list) => list.pretty(p),
|
||||||
Self::Enum(enum_) => enum_.pretty(p),
|
Self::Enum(enum_) => enum_.pretty(p),
|
||||||
@ -136,7 +141,7 @@ impl Pretty for RawNode {
|
|||||||
|
|
||||||
// Language tag.
|
// Language tag.
|
||||||
if let Some(lang) = &self.lang {
|
if let Some(lang) = &self.lang {
|
||||||
lang.pretty(p);
|
p.push_str(lang);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start untrimming.
|
// Start untrimming.
|
||||||
@ -163,38 +168,52 @@ impl Pretty for RawNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Pretty for MathNode {
|
||||||
|
fn pretty(&self, p: &mut Printer) {
|
||||||
|
p.push('$');
|
||||||
|
if self.display {
|
||||||
|
p.push('[');
|
||||||
|
}
|
||||||
|
p.push_str(&self.formula);
|
||||||
|
if self.display {
|
||||||
|
p.push(']');
|
||||||
|
}
|
||||||
|
p.push('$');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Pretty for HeadingNode {
|
impl Pretty for HeadingNode {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
for _ in 0 .. self.level {
|
for _ in 0 .. self.level() {
|
||||||
p.push('=');
|
p.push('=');
|
||||||
}
|
}
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for ListNode {
|
impl Pretty for ListNode {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("- ");
|
p.push_str("- ");
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for EnumNode {
|
impl Pretty for EnumNode {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
if let Some(number) = self.number {
|
if let Some(number) = self.number() {
|
||||||
write!(p, "{}", number).unwrap();
|
write!(p, "{}", number).unwrap();
|
||||||
}
|
}
|
||||||
p.push_str(". ");
|
p.push_str(". ");
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for Expr {
|
impl Pretty for Expr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
match self {
|
match self {
|
||||||
Self::Ident(v) => v.pretty(p),
|
|
||||||
Self::Lit(v) => v.pretty(p),
|
Self::Lit(v) => v.pretty(p),
|
||||||
|
Self::Ident(v) => v.pretty(p),
|
||||||
Self::Array(v) => v.pretty(p),
|
Self::Array(v) => v.pretty(p),
|
||||||
Self::Dict(v) => v.pretty(p),
|
Self::Dict(v) => v.pretty(p),
|
||||||
Self::Template(v) => v.pretty(p),
|
Self::Template(v) => v.pretty(p),
|
||||||
@ -217,17 +236,17 @@ impl Pretty for Expr {
|
|||||||
|
|
||||||
impl Pretty for Lit {
|
impl Pretty for Lit {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
match self {
|
match self.kind() {
|
||||||
Self::None(_) => p.push_str("none"),
|
LitKind::None => p.push_str("none"),
|
||||||
Self::Auto(_) => p.push_str("auto"),
|
LitKind::Auto => p.push_str("auto"),
|
||||||
Self::Bool(_, v) => write!(p, "{}", v).unwrap(),
|
LitKind::Bool(v) => write!(p, "{}", v).unwrap(),
|
||||||
Self::Int(_, v) => write!(p, "{}", v).unwrap(),
|
LitKind::Int(v) => write!(p, "{}", v).unwrap(),
|
||||||
Self::Float(_, v) => write!(p, "{}", v).unwrap(),
|
LitKind::Float(v) => write!(p, "{}", v).unwrap(),
|
||||||
Self::Length(_, v, u) => write!(p, "{}{:?}", v, u).unwrap(),
|
LitKind::Length(v, u) => write!(p, "{}{:?}", v, u).unwrap(),
|
||||||
Self::Angle(_, v, u) => write!(p, "{}{:?}", v, u).unwrap(),
|
LitKind::Angle(v, u) => write!(p, "{}{:?}", v, u).unwrap(),
|
||||||
Self::Percent(_, v) => write!(p, "{}%", v).unwrap(),
|
LitKind::Percent(v) => write!(p, "{}%", v).unwrap(),
|
||||||
Self::Fractional(_, v) => write!(p, "{}fr", v).unwrap(),
|
LitKind::Fractional(v) => write!(p, "{}fr", v).unwrap(),
|
||||||
Self::Str(_, v) => write!(p, "{:?}", v).unwrap(),
|
LitKind::Str(v) => write!(p, "{:?}", v).unwrap(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -235,8 +254,10 @@ impl Pretty for Lit {
|
|||||||
impl Pretty for ArrayExpr {
|
impl Pretty for ArrayExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
p.join(&self.items, ", ", |item, p| item.pretty(p));
|
|
||||||
if self.items.len() == 1 {
|
let items = self.items();
|
||||||
|
let len = p.join(items, ", ", |item, p| item.pretty(p));
|
||||||
|
if len == 1 {
|
||||||
p.push(',');
|
p.push(',');
|
||||||
}
|
}
|
||||||
p.push(')');
|
p.push(')');
|
||||||
@ -246,10 +267,9 @@ impl Pretty for ArrayExpr {
|
|||||||
impl Pretty for DictExpr {
|
impl Pretty for DictExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
if self.items.is_empty() {
|
let len = p.join(self.items(), ", ", |named, p| named.pretty(p));
|
||||||
|
if len == 0 {
|
||||||
p.push(':');
|
p.push(':');
|
||||||
} else {
|
|
||||||
p.join(&self.items, ", ", |named, p| named.pretty(p));
|
|
||||||
}
|
}
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
@ -257,16 +277,16 @@ impl Pretty for DictExpr {
|
|||||||
|
|
||||||
impl Pretty for Named {
|
impl Pretty for Named {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
self.name.pretty(p);
|
self.name().pretty(p);
|
||||||
p.push_str(": ");
|
p.push_str(": ");
|
||||||
self.expr.pretty(p);
|
self.expr().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for TemplateExpr {
|
impl Pretty for TemplateExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('[');
|
p.push('[');
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
p.push(']');
|
p.push(']');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -274,7 +294,7 @@ impl Pretty for TemplateExpr {
|
|||||||
impl Pretty for GroupExpr {
|
impl Pretty for GroupExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
self.expr.pretty(p);
|
self.expr().pretty(p);
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -282,11 +302,11 @@ impl Pretty for GroupExpr {
|
|||||||
impl Pretty for BlockExpr {
|
impl Pretty for BlockExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push('{');
|
p.push('{');
|
||||||
if self.exprs.len() > 1 {
|
if self.exprs().count() > 1 {
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
}
|
}
|
||||||
p.join(&self.exprs, "; ", |expr, p| expr.pretty(p));
|
let len = p.join(self.exprs(), "; ", |expr, p| expr.pretty(p));
|
||||||
if self.exprs.len() > 1 {
|
if len > 1 {
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
}
|
}
|
||||||
p.push('}');
|
p.push('}');
|
||||||
@ -295,11 +315,12 @@ impl Pretty for BlockExpr {
|
|||||||
|
|
||||||
impl Pretty for UnaryExpr {
|
impl Pretty for UnaryExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
self.op.pretty(p);
|
let op = self.op();
|
||||||
if self.op == UnOp::Not {
|
op.pretty(p);
|
||||||
|
if op == UnOp::Not {
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
}
|
}
|
||||||
self.expr.pretty(p);
|
self.expr().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -311,11 +332,11 @@ impl Pretty for UnOp {
|
|||||||
|
|
||||||
impl Pretty for BinaryExpr {
|
impl Pretty for BinaryExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
self.lhs.pretty(p);
|
self.lhs().pretty(p);
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.op.pretty(p);
|
self.op().pretty(p);
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.rhs.pretty(p);
|
self.rhs().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,7 +348,7 @@ impl Pretty for BinOp {
|
|||||||
|
|
||||||
impl Pretty for CallExpr {
|
impl Pretty for CallExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
self.callee.pretty(p);
|
self.callee().pretty(p);
|
||||||
|
|
||||||
let mut write_args = |items: &[CallArg]| {
|
let mut write_args = |items: &[CallArg]| {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
@ -335,7 +356,8 @@ impl Pretty for CallExpr {
|
|||||||
p.push(')');
|
p.push(')');
|
||||||
};
|
};
|
||||||
|
|
||||||
match self.args.items.as_slice() {
|
let args: Vec<_> = self.args().items().collect();
|
||||||
|
match args.as_slice() {
|
||||||
// This can be moved behind the arguments.
|
// This can be moved behind the arguments.
|
||||||
//
|
//
|
||||||
// Example: Transforms "#v(a, [b])" => "#v(a)[b]".
|
// Example: Transforms "#v(a, [b])" => "#v(a)[b]".
|
||||||
@ -345,7 +367,6 @@ impl Pretty for CallExpr {
|
|||||||
}
|
}
|
||||||
template.pretty(p);
|
template.pretty(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
items => write_args(items),
|
items => write_args(items),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -353,7 +374,7 @@ impl Pretty for CallExpr {
|
|||||||
|
|
||||||
impl Pretty for CallArgs {
|
impl Pretty for CallArgs {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.join(&self.items, ", ", |item, p| item.pretty(p));
|
p.join(self.items(), ", ", |item, p| item.pretty(p));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -372,15 +393,16 @@ impl Pretty for CallArg {
|
|||||||
|
|
||||||
impl Pretty for ClosureExpr {
|
impl Pretty for ClosureExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
if let [param] = self.params.as_slice() {
|
let params: Vec<_> = self.params().collect();
|
||||||
|
if let [param] = params.as_slice() {
|
||||||
param.pretty(p);
|
param.pretty(p);
|
||||||
} else {
|
} else {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
p.join(self.params.iter(), ", ", |item, p| item.pretty(p));
|
p.join(params.iter(), ", ", |item, p| item.pretty(p));
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
p.push_str(" => ");
|
p.push_str(" => ");
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,9 +421,9 @@ impl Pretty for ClosureParam {
|
|||||||
|
|
||||||
impl Pretty for WithExpr {
|
impl Pretty for WithExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
self.callee.pretty(p);
|
self.callee().pretty(p);
|
||||||
p.push_str(" with (");
|
p.push_str(" with (");
|
||||||
self.args.pretty(p);
|
self.args().pretty(p);
|
||||||
p.push(')');
|
p.push(')');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -409,13 +431,13 @@ impl Pretty for WithExpr {
|
|||||||
impl Pretty for LetExpr {
|
impl Pretty for LetExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("let ");
|
p.push_str("let ");
|
||||||
self.binding.pretty(p);
|
self.binding().pretty(p);
|
||||||
if let Some(Expr::Closure(closure)) = &self.init {
|
if let Some(Expr::Closure(closure)) = self.init() {
|
||||||
p.push('(');
|
p.push('(');
|
||||||
p.join(closure.params.iter(), ", ", |item, p| item.pretty(p));
|
p.join(closure.params(), ", ", |item, p| item.pretty(p));
|
||||||
p.push_str(") = ");
|
p.push_str(") = ");
|
||||||
closure.body.pretty(p);
|
closure.body().pretty(p);
|
||||||
} else if let Some(init) = &self.init {
|
} else if let Some(init) = self.init() {
|
||||||
p.push_str(" = ");
|
p.push_str(" = ");
|
||||||
init.pretty(p);
|
init.pretty(p);
|
||||||
}
|
}
|
||||||
@ -425,10 +447,10 @@ impl Pretty for LetExpr {
|
|||||||
impl Pretty for IfExpr {
|
impl Pretty for IfExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("if ");
|
p.push_str("if ");
|
||||||
self.condition.pretty(p);
|
self.condition().pretty(p);
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.if_body.pretty(p);
|
self.if_body().pretty(p);
|
||||||
if let Some(expr) = &self.else_body {
|
if let Some(expr) = self.else_body() {
|
||||||
p.push_str(" else ");
|
p.push_str(" else ");
|
||||||
expr.pretty(p);
|
expr.pretty(p);
|
||||||
}
|
}
|
||||||
@ -438,42 +460,40 @@ impl Pretty for IfExpr {
|
|||||||
impl Pretty for WhileExpr {
|
impl Pretty for WhileExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("while ");
|
p.push_str("while ");
|
||||||
self.condition.pretty(p);
|
self.condition().pretty(p);
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for ForExpr {
|
impl Pretty for ForExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("for ");
|
p.push_str("for ");
|
||||||
self.pattern.pretty(p);
|
self.pattern().pretty(p);
|
||||||
p.push_str(" in ");
|
p.push_str(" in ");
|
||||||
self.iter.pretty(p);
|
self.iter().pretty(p);
|
||||||
p.push(' ');
|
p.push(' ');
|
||||||
self.body.pretty(p);
|
self.body().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for ForPattern {
|
impl Pretty for ForPattern {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
match self {
|
if let Some(key) = self.key() {
|
||||||
Self::Value(v) => v.pretty(p),
|
key.pretty(p);
|
||||||
Self::KeyValue(k, v) => {
|
p.push_str(", ");
|
||||||
k.pretty(p);
|
|
||||||
p.push_str(", ");
|
|
||||||
v.pretty(p);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.value().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for ImportExpr {
|
impl Pretty for ImportExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("import ");
|
p.push_str("import ");
|
||||||
self.imports.pretty(p);
|
self.imports().pretty(p);
|
||||||
p.push_str(" from ");
|
p.push_str(" from ");
|
||||||
self.path.pretty(p);
|
self.path().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -481,7 +501,9 @@ impl Pretty for Imports {
|
|||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
match self {
|
match self {
|
||||||
Self::Wildcard => p.push('*'),
|
Self::Wildcard => p.push('*'),
|
||||||
Self::Idents(idents) => p.join(idents, ", ", |item, p| item.pretty(p)),
|
Self::Items(idents) => {
|
||||||
|
p.join(idents, ", ", |item, p| item.pretty(p));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -489,20 +511,19 @@ impl Pretty for Imports {
|
|||||||
impl Pretty for IncludeExpr {
|
impl Pretty for IncludeExpr {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str("include ");
|
p.push_str("include ");
|
||||||
self.path.pretty(p);
|
self.path().pretty(p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pretty for Ident {
|
impl Pretty for Ident {
|
||||||
fn pretty(&self, p: &mut Printer) {
|
fn pretty(&self, p: &mut Printer) {
|
||||||
p.push_str(self.as_str());
|
p.push_str(self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::parse::parse;
|
|
||||||
use crate::source::SourceFile;
|
use crate::source::SourceFile;
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
@ -513,7 +534,7 @@ mod tests {
|
|||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn test_parse(src: &str, expected: &str) {
|
fn test_parse(src: &str, expected: &str) {
|
||||||
let source = SourceFile::detached(src);
|
let source = SourceFile::detached(src);
|
||||||
let ast = parse(&source).unwrap();
|
let ast = source.ast().unwrap();
|
||||||
let found = pretty(&ast);
|
let found = pretty(&ast);
|
||||||
if found != expected {
|
if found != expected {
|
||||||
println!("tree: {:#?}", ast);
|
println!("tree: {:#?}", ast);
|
||||||
@ -551,6 +572,11 @@ mod tests {
|
|||||||
test_parse("``` 1```", "`1`");
|
test_parse("``` 1```", "`1`");
|
||||||
test_parse("``` 1 ```", "`1 `");
|
test_parse("``` 1 ```", "`1 `");
|
||||||
test_parse("```` ` ````", "``` ` ```");
|
test_parse("```` ` ````", "``` ` ```");
|
||||||
|
|
||||||
|
// Math node.
|
||||||
|
roundtrip("$$");
|
||||||
|
roundtrip("$a+b$");
|
||||||
|
roundtrip("$[ a^2 + b^2 = c^2 ]$");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
use std::ops::{Add, Range};
|
use std::ops::Range;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -53,23 +53,19 @@ pub struct Span {
|
|||||||
/// The id of the source file.
|
/// The id of the source file.
|
||||||
pub source: SourceId,
|
pub source: SourceId,
|
||||||
/// The inclusive start position.
|
/// The inclusive start position.
|
||||||
pub start: Pos,
|
pub start: usize,
|
||||||
/// The inclusive end position.
|
/// The inclusive end position.
|
||||||
pub end: Pos,
|
pub end: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Span {
|
impl Span {
|
||||||
/// Create a new span from start and end positions.
|
/// Create a new span from start and end positions.
|
||||||
pub fn new(source: SourceId, start: impl Into<Pos>, end: impl Into<Pos>) -> Self {
|
pub fn new(source: SourceId, start: usize, end: usize) -> Self {
|
||||||
Self {
|
Self { source, start, end }
|
||||||
source,
|
|
||||||
start: start.into(),
|
|
||||||
end: end.into(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a span including just a single position.
|
/// Create a span including just a single position.
|
||||||
pub fn at(source: SourceId, pos: impl Into<Pos> + Copy) -> Self {
|
pub fn at(source: SourceId, pos: usize) -> Self {
|
||||||
Self::new(source, pos, pos)
|
Self::new(source, pos, pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,19 +73,34 @@ impl Span {
|
|||||||
pub fn detached() -> Self {
|
pub fn detached() -> Self {
|
||||||
Self {
|
Self {
|
||||||
source: SourceId::from_raw(0),
|
source: SourceId::from_raw(0),
|
||||||
start: Pos::ZERO,
|
start: 0,
|
||||||
end: Pos::ZERO,
|
end: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a span with a different start position.
|
/// Create a span with a different start position.
|
||||||
pub fn with_start(self, start: impl Into<Pos>) -> Self {
|
pub fn with_start(self, start: usize) -> Self {
|
||||||
Self { start: start.into(), ..self }
|
Self { start, ..self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a span with a different end position.
|
/// Create a span with a different end position.
|
||||||
pub fn with_end(self, end: impl Into<Pos>) -> Self {
|
pub fn with_end(self, end: usize) -> Self {
|
||||||
Self { end: end.into(), ..self }
|
Self { end, ..self }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The byte length of the spanned region.
|
||||||
|
pub fn len(self) -> usize {
|
||||||
|
self.end - self.start
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A new span at the position of this span's start.
|
||||||
|
pub fn at_start(&self) -> Span {
|
||||||
|
Self::at(self.source, self.start)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A new span at the position of this span's end.
|
||||||
|
pub fn at_end(&self) -> Span {
|
||||||
|
Self::at(self.source, self.end)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new span with the earlier start and later end position.
|
/// Create a new span with the earlier start and later end position.
|
||||||
@ -109,14 +120,19 @@ impl Span {
|
|||||||
*self = self.join(other)
|
*self = self.join(other)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Test whether a position is within the span.
|
||||||
|
pub fn contains(&self, pos: usize) -> bool {
|
||||||
|
self.start <= pos && self.end >= pos
|
||||||
|
}
|
||||||
|
|
||||||
/// Test whether one span complete contains the other span.
|
/// Test whether one span complete contains the other span.
|
||||||
pub fn contains(self, other: Self) -> bool {
|
pub fn surrounds(self, other: Self) -> bool {
|
||||||
self.source == other.source && self.start <= other.start && self.end >= other.end
|
self.source == other.source && self.start <= other.start && self.end >= other.end
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert to a `Range<Pos>` for indexing.
|
/// Convert to a `Range<usize>` for indexing.
|
||||||
pub fn to_range(self) -> Range<usize> {
|
pub fn to_range(self) -> Range<usize> {
|
||||||
self.start.to_usize() .. self.end.to_usize()
|
self.start .. self.end
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,77 +151,3 @@ impl PartialOrd for Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A byte position in source code.
|
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
|
||||||
pub struct Pos(pub u32);
|
|
||||||
|
|
||||||
impl Pos {
|
|
||||||
/// The zero position.
|
|
||||||
pub const ZERO: Self = Self(0);
|
|
||||||
|
|
||||||
/// Convert to a usize for indexing.
|
|
||||||
pub fn to_usize(self) -> usize {
|
|
||||||
self.0 as usize
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for Pos {
|
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
|
||||||
Debug::fmt(&self.0, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<u32> for Pos {
|
|
||||||
fn from(index: u32) -> Self {
|
|
||||||
Self(index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<usize> for Pos {
|
|
||||||
fn from(index: usize) -> Self {
|
|
||||||
Self(index as u32)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Add<T> for Pos
|
|
||||||
where
|
|
||||||
T: Into<Pos>,
|
|
||||||
{
|
|
||||||
type Output = Self;
|
|
||||||
|
|
||||||
fn add(self, rhs: T) -> Self {
|
|
||||||
Pos(self.0 + rhs.into().0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a position or range into a span.
|
|
||||||
pub trait IntoSpan {
|
|
||||||
/// Convert into a span by providing the source id.
|
|
||||||
fn into_span(self, source: SourceId) -> Span;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoSpan for Span {
|
|
||||||
fn into_span(self, source: SourceId) -> Span {
|
|
||||||
debug_assert_eq!(self.source, source);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoSpan for Pos {
|
|
||||||
fn into_span(self, source: SourceId) -> Span {
|
|
||||||
Span::new(source, self, self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoSpan for usize {
|
|
||||||
fn into_span(self, source: SourceId) -> Span {
|
|
||||||
Span::new(source, self, self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoSpan for Range<usize> {
|
|
||||||
fn into_span(self, source: SourceId) -> Span {
|
|
||||||
Span::new(source, self.start, self.end)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,276 +0,0 @@
|
|||||||
use crate::geom::{AngularUnit, LengthUnit};
|
|
||||||
|
|
||||||
/// A minimal semantic entity of source code.
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub enum Token<'s> {
|
|
||||||
/// A left square bracket: `[`.
|
|
||||||
LeftBracket,
|
|
||||||
/// A right square bracket: `]`.
|
|
||||||
RightBracket,
|
|
||||||
/// A left curly brace: `{`.
|
|
||||||
LeftBrace,
|
|
||||||
/// A right curly brace: `}`.
|
|
||||||
RightBrace,
|
|
||||||
/// A left round parenthesis: `(`.
|
|
||||||
LeftParen,
|
|
||||||
/// A right round parenthesis: `)`.
|
|
||||||
RightParen,
|
|
||||||
/// An asterisk: `*`.
|
|
||||||
Star,
|
|
||||||
/// An underscore: `_`.
|
|
||||||
Underscore,
|
|
||||||
/// A tilde: `~`.
|
|
||||||
Tilde,
|
|
||||||
/// Two hyphens: `--`.
|
|
||||||
HyphHyph,
|
|
||||||
/// Three hyphens: `---`.
|
|
||||||
HyphHyphHyph,
|
|
||||||
/// A backslash followed by nothing or whitespace: `\`.
|
|
||||||
Backslash,
|
|
||||||
/// A comma: `,`.
|
|
||||||
Comma,
|
|
||||||
/// A semicolon: `;`.
|
|
||||||
Semicolon,
|
|
||||||
/// A colon: `:`.
|
|
||||||
Colon,
|
|
||||||
/// A plus: `+`.
|
|
||||||
Plus,
|
|
||||||
/// A hyphen: `-`.
|
|
||||||
Hyph,
|
|
||||||
/// A slash: `/`.
|
|
||||||
Slash,
|
|
||||||
/// A single equals sign: `=`.
|
|
||||||
Eq,
|
|
||||||
/// Two equals signs: `==`.
|
|
||||||
EqEq,
|
|
||||||
/// An exclamation mark followed by an equals sign: `!=`.
|
|
||||||
ExclEq,
|
|
||||||
/// A less-than sign: `<`.
|
|
||||||
Lt,
|
|
||||||
/// A less-than sign followed by an equals sign: `<=`.
|
|
||||||
LtEq,
|
|
||||||
/// A greater-than sign: `>`.
|
|
||||||
Gt,
|
|
||||||
/// A greater-than sign followed by an equals sign: `>=`.
|
|
||||||
GtEq,
|
|
||||||
/// A plus followed by an equals sign: `+=`.
|
|
||||||
PlusEq,
|
|
||||||
/// A hyphen followed by an equals sign: `-=`.
|
|
||||||
HyphEq,
|
|
||||||
/// An asterisk followed by an equals sign: `*=`.
|
|
||||||
StarEq,
|
|
||||||
/// A slash followed by an equals sign: `/=`.
|
|
||||||
SlashEq,
|
|
||||||
/// Two dots: `..`.
|
|
||||||
Dots,
|
|
||||||
/// An equals sign followed by a greater-than sign: `=>`.
|
|
||||||
Arrow,
|
|
||||||
/// The `not` operator.
|
|
||||||
Not,
|
|
||||||
/// The `and` operator.
|
|
||||||
And,
|
|
||||||
/// The `or` operator.
|
|
||||||
Or,
|
|
||||||
/// The `with` operator.
|
|
||||||
With,
|
|
||||||
/// The none literal: `none`.
|
|
||||||
None,
|
|
||||||
/// The auto literal: `auto`.
|
|
||||||
Auto,
|
|
||||||
/// The `let` keyword.
|
|
||||||
Let,
|
|
||||||
/// The `if` keyword.
|
|
||||||
If,
|
|
||||||
/// The `else` keyword.
|
|
||||||
Else,
|
|
||||||
/// The `for` keyword.
|
|
||||||
For,
|
|
||||||
/// The `in` keyword.
|
|
||||||
In,
|
|
||||||
/// The `while` keyword.
|
|
||||||
While,
|
|
||||||
/// The `break` keyword.
|
|
||||||
Break,
|
|
||||||
/// The `continue` keyword.
|
|
||||||
Continue,
|
|
||||||
/// The `return` keyword.
|
|
||||||
Return,
|
|
||||||
/// The `import` keyword.
|
|
||||||
Import,
|
|
||||||
/// The `include` keyword.
|
|
||||||
Include,
|
|
||||||
/// The `from` keyword.
|
|
||||||
From,
|
|
||||||
/// One or more whitespace characters.
|
|
||||||
///
|
|
||||||
/// The contained `usize` denotes the number of newlines that were contained
|
|
||||||
/// in the whitespace.
|
|
||||||
Space(usize),
|
|
||||||
/// A consecutive non-markup string.
|
|
||||||
Text(&'s str),
|
|
||||||
/// A slash and the letter "u" followed by a hexadecimal unicode entity
|
|
||||||
/// enclosed in curly braces: `\u{1F5FA}`.
|
|
||||||
UnicodeEscape(UnicodeEscapeToken<'s>),
|
|
||||||
/// An arbitrary number of backticks followed by inner contents, terminated
|
|
||||||
/// with the same number of backticks: `` `...` ``.
|
|
||||||
Raw(RawToken<'s>),
|
|
||||||
/// One or two dollar signs followed by inner contents, terminated with the
|
|
||||||
/// same number of dollar signs.
|
|
||||||
Math(MathToken<'s>),
|
|
||||||
/// A numbering: `23.`.
|
|
||||||
///
|
|
||||||
/// Can also exist without the number: `.`.
|
|
||||||
Numbering(Option<usize>),
|
|
||||||
/// An identifier: `center`.
|
|
||||||
Ident(&'s str),
|
|
||||||
/// A boolean: `true`, `false`.
|
|
||||||
Bool(bool),
|
|
||||||
/// An integer: `120`.
|
|
||||||
Int(i64),
|
|
||||||
/// A floating-point number: `1.2`, `10e-4`.
|
|
||||||
Float(f64),
|
|
||||||
/// A length: `12pt`, `3cm`.
|
|
||||||
Length(f64, LengthUnit),
|
|
||||||
/// An angle: `90deg`.
|
|
||||||
Angle(f64, AngularUnit),
|
|
||||||
/// A percentage: `50%`.
|
|
||||||
///
|
|
||||||
/// _Note_: `50%` is stored as `50.0` here, as in the corresponding
|
|
||||||
/// [literal](super::Lit::Percent).
|
|
||||||
Percent(f64),
|
|
||||||
/// A fraction unit: `3fr`.
|
|
||||||
Fraction(f64),
|
|
||||||
/// A quoted string: `"..."`.
|
|
||||||
Str(StrToken<'s>),
|
|
||||||
/// Two slashes followed by inner contents, terminated with a newline:
|
|
||||||
/// `//<str>\n`.
|
|
||||||
LineComment(&'s str),
|
|
||||||
/// A slash and a star followed by inner contents, terminated with a star
|
|
||||||
/// and a slash: `/*<str>*/`.
|
|
||||||
///
|
|
||||||
/// The comment can contain nested block comments.
|
|
||||||
BlockComment(&'s str),
|
|
||||||
/// Things that are not valid tokens.
|
|
||||||
Invalid(&'s str),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A quoted string token: `"..."`.
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub struct StrToken<'s> {
|
|
||||||
/// The string inside the quotes.
|
|
||||||
///
|
|
||||||
/// _Note_: If the string contains escape sequences these are not yet
|
|
||||||
/// applied to be able to just store a string slice here instead of
|
|
||||||
/// a `String`. The resolving is done later in the parser.
|
|
||||||
pub string: &'s str,
|
|
||||||
/// Whether the closing quote was present.
|
|
||||||
pub terminated: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A raw block token: `` `...` ``.
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub struct RawToken<'s> {
|
|
||||||
/// The raw text between the backticks.
|
|
||||||
pub text: &'s str,
|
|
||||||
/// The number of opening backticks.
|
|
||||||
pub backticks: usize,
|
|
||||||
/// Whether all closing backticks were present.
|
|
||||||
pub terminated: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A math formula token: `$2pi + x$` or `$[f'(x) = x^2]$`.
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub struct MathToken<'s> {
|
|
||||||
/// The formula between the dollars.
|
|
||||||
pub formula: &'s str,
|
|
||||||
/// Whether the formula is display-level, that is, it is surrounded by
|
|
||||||
/// `$[..]`.
|
|
||||||
pub display: bool,
|
|
||||||
/// Whether the closing dollars were present.
|
|
||||||
pub terminated: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unicode escape sequence token: `\u{1F5FA}`.
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
|
||||||
pub struct UnicodeEscapeToken<'s> {
|
|
||||||
/// The escape sequence between the braces.
|
|
||||||
pub sequence: &'s str,
|
|
||||||
/// Whether the closing brace was present.
|
|
||||||
pub terminated: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'s> Token<'s> {
|
|
||||||
/// The English name of this token for use in error messages.
|
|
||||||
pub fn name(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Self::LeftBracket => "opening bracket",
|
|
||||||
Self::RightBracket => "closing bracket",
|
|
||||||
Self::LeftBrace => "opening brace",
|
|
||||||
Self::RightBrace => "closing brace",
|
|
||||||
Self::LeftParen => "opening paren",
|
|
||||||
Self::RightParen => "closing paren",
|
|
||||||
Self::Star => "star",
|
|
||||||
Self::Underscore => "underscore",
|
|
||||||
Self::Tilde => "tilde",
|
|
||||||
Self::HyphHyph => "en dash",
|
|
||||||
Self::HyphHyphHyph => "em dash",
|
|
||||||
Self::Backslash => "backslash",
|
|
||||||
Self::Comma => "comma",
|
|
||||||
Self::Semicolon => "semicolon",
|
|
||||||
Self::Colon => "colon",
|
|
||||||
Self::Plus => "plus",
|
|
||||||
Self::Hyph => "minus",
|
|
||||||
Self::Slash => "slash",
|
|
||||||
Self::Eq => "assignment operator",
|
|
||||||
Self::EqEq => "equality operator",
|
|
||||||
Self::ExclEq => "inequality operator",
|
|
||||||
Self::Lt => "less-than operator",
|
|
||||||
Self::LtEq => "less-than or equal operator",
|
|
||||||
Self::Gt => "greater-than operator",
|
|
||||||
Self::GtEq => "greater-than or equal operator",
|
|
||||||
Self::PlusEq => "add-assign operator",
|
|
||||||
Self::HyphEq => "subtract-assign operator",
|
|
||||||
Self::StarEq => "multiply-assign operator",
|
|
||||||
Self::SlashEq => "divide-assign operator",
|
|
||||||
Self::Dots => "dots",
|
|
||||||
Self::Arrow => "arrow",
|
|
||||||
Self::Not => "operator `not`",
|
|
||||||
Self::And => "operator `and`",
|
|
||||||
Self::Or => "operator `or`",
|
|
||||||
Self::With => "operator `with`",
|
|
||||||
Self::None => "`none`",
|
|
||||||
Self::Auto => "`auto`",
|
|
||||||
Self::Let => "keyword `let`",
|
|
||||||
Self::If => "keyword `if`",
|
|
||||||
Self::Else => "keyword `else`",
|
|
||||||
Self::For => "keyword `for`",
|
|
||||||
Self::In => "keyword `in`",
|
|
||||||
Self::While => "keyword `while`",
|
|
||||||
Self::Break => "keyword `break`",
|
|
||||||
Self::Continue => "keyword `continue`",
|
|
||||||
Self::Return => "keyword `return`",
|
|
||||||
Self::Import => "keyword `import`",
|
|
||||||
Self::Include => "keyword `include`",
|
|
||||||
Self::From => "keyword `from`",
|
|
||||||
Self::Space(_) => "space",
|
|
||||||
Self::Text(_) => "text",
|
|
||||||
Self::UnicodeEscape(_) => "unicode escape sequence",
|
|
||||||
Self::Raw(_) => "raw block",
|
|
||||||
Self::Math(_) => "math formula",
|
|
||||||
Self::Numbering(_) => "numbering",
|
|
||||||
Self::Ident(_) => "identifier",
|
|
||||||
Self::Bool(_) => "boolean",
|
|
||||||
Self::Int(_) => "integer",
|
|
||||||
Self::Float(_) => "float",
|
|
||||||
Self::Length(_, _) => "length",
|
|
||||||
Self::Angle(_, _) => "angle",
|
|
||||||
Self::Percent(_) => "percentage",
|
|
||||||
Self::Fraction(_) => "`fr` value",
|
|
||||||
Self::Str(_) => "string",
|
|
||||||
Self::LineComment(_) => "line comment",
|
|
||||||
Self::BlockComment(_) => "block comment",
|
|
||||||
Self::Invalid("*/") => "end of block comment",
|
|
||||||
Self::Invalid(_) => "invalid token",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,263 +0,0 @@
|
|||||||
//! Mutable and immutable syntax tree traversal.
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Implement the immutable and the mutable visitor version.
|
|
||||||
macro_rules! impl_visitors {
|
|
||||||
($($name:ident($($tts:tt)*) $body:block)*) => {
|
|
||||||
macro_rules! r {
|
|
||||||
(rc: $x:expr) => { $x.as_ref() };
|
|
||||||
($x:expr) => { &$x };
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_visitor! {
|
|
||||||
Visit,
|
|
||||||
immutable,
|
|
||||||
immutably,
|
|
||||||
[$(($name($($tts)*) $body))*]
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! r {
|
|
||||||
(rc: $x:expr) => { std::rc::Rc::make_mut(&mut $x) };
|
|
||||||
($x:expr) => { &mut $x };
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_visitor! {
|
|
||||||
VisitMut,
|
|
||||||
mutable,
|
|
||||||
mutably,
|
|
||||||
[$(($name($($tts)*) $body mut))*] mut
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Implement an immutable or mutable visitor.
|
|
||||||
macro_rules! impl_visitor {
|
|
||||||
(
|
|
||||||
$visit:ident,
|
|
||||||
$mutability:ident,
|
|
||||||
$adjective:ident,
|
|
||||||
[$((
|
|
||||||
$name:ident($v:ident, $node:ident: $ty:ty)
|
|
||||||
$body:block
|
|
||||||
$($fmut:tt)?
|
|
||||||
))*]
|
|
||||||
$($mut:tt)?
|
|
||||||
) => {
|
|
||||||
#[doc = concat!("Visit syntax trees ", stringify!($adjective), ".")]
|
|
||||||
pub trait $visit<'ast> {
|
|
||||||
/// Visit a definition of a binding.
|
|
||||||
///
|
|
||||||
/// Bindings are, for example, left-hand side of let expressions,
|
|
||||||
/// and key/value patterns in for loops.
|
|
||||||
fn visit_binding(&mut self, _: &'ast $($mut)? Ident) {}
|
|
||||||
|
|
||||||
/// Visit the entry into a scope.
|
|
||||||
fn visit_enter(&mut self) {}
|
|
||||||
|
|
||||||
/// Visit the exit from a scope.
|
|
||||||
fn visit_exit(&mut self) {}
|
|
||||||
|
|
||||||
$(fn $name(&mut self, $node: &'ast $($fmut)? $ty) {
|
|
||||||
$mutability::$name(self, $node);
|
|
||||||
})*
|
|
||||||
}
|
|
||||||
|
|
||||||
#[doc = concat!("Visitor functions that are ", stringify!($mutability), ".")]
|
|
||||||
pub mod $mutability {
|
|
||||||
use super::*;
|
|
||||||
$(
|
|
||||||
#[doc = concat!("Visit a node of type [`", stringify!($ty), "`].")]
|
|
||||||
pub fn $name<'ast, V>($v: &mut V, $node: &'ast $($fmut)? $ty)
|
|
||||||
where
|
|
||||||
V: $visit<'ast> + ?Sized
|
|
||||||
$body
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
impl_visitors! {
|
|
||||||
visit_tree(v, markup: Markup) {
|
|
||||||
for node in markup {
|
|
||||||
v.visit_node(node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_node(v, node: MarkupNode) {
|
|
||||||
match node {
|
|
||||||
MarkupNode::Space => {}
|
|
||||||
MarkupNode::Linebreak(_) => {}
|
|
||||||
MarkupNode::Parbreak(_) => {}
|
|
||||||
MarkupNode::Strong(_) => {}
|
|
||||||
MarkupNode::Emph(_) => {}
|
|
||||||
MarkupNode::Text(_) => {}
|
|
||||||
MarkupNode::Raw(_) => {}
|
|
||||||
MarkupNode::Heading(n) => v.visit_heading(n),
|
|
||||||
MarkupNode::List(n) => v.visit_list(n),
|
|
||||||
MarkupNode::Enum(n) => v.visit_enum(n),
|
|
||||||
MarkupNode::Expr(n) => v.visit_expr(n),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_heading(v, heading: HeadingNode) {
|
|
||||||
v.visit_tree(r!(heading.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_list(v, list: ListNode) {
|
|
||||||
v.visit_tree(r!(list.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_enum(v, enum_: EnumNode) {
|
|
||||||
v.visit_tree(r!(enum_.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_expr(v, expr: Expr) {
|
|
||||||
match expr {
|
|
||||||
Expr::Ident(_) => {}
|
|
||||||
Expr::Lit(_) => {},
|
|
||||||
Expr::Array(e) => v.visit_array(e),
|
|
||||||
Expr::Dict(e) => v.visit_dict(e),
|
|
||||||
Expr::Template(e) => v.visit_template(e),
|
|
||||||
Expr::Group(e) => v.visit_group(e),
|
|
||||||
Expr::Block(e) => v.visit_block(e),
|
|
||||||
Expr::Unary(e) => v.visit_unary(e),
|
|
||||||
Expr::Binary(e) => v.visit_binary(e),
|
|
||||||
Expr::Call(e) => v.visit_call(e),
|
|
||||||
Expr::Closure(e) => v.visit_closure(e),
|
|
||||||
Expr::With(e) => v.visit_with(e),
|
|
||||||
Expr::Let(e) => v.visit_let(e),
|
|
||||||
Expr::If(e) => v.visit_if(e),
|
|
||||||
Expr::While(e) => v.visit_while(e),
|
|
||||||
Expr::For(e) => v.visit_for(e),
|
|
||||||
Expr::Import(e) => v.visit_import(e),
|
|
||||||
Expr::Include(e) => v.visit_include(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_array(v, array: ArrayExpr) {
|
|
||||||
for expr in r!(array.items) {
|
|
||||||
v.visit_expr(expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_dict(v, dict: DictExpr) {
|
|
||||||
for named in r!(dict.items) {
|
|
||||||
v.visit_expr(r!(named.expr));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_template(v, template: TemplateExpr) {
|
|
||||||
v.visit_enter();
|
|
||||||
v.visit_tree(r!(template.body));
|
|
||||||
v.visit_exit();
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_group(v, group: GroupExpr) {
|
|
||||||
v.visit_expr(r!(group.expr));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_block(v, block: BlockExpr) {
|
|
||||||
v.visit_enter();
|
|
||||||
for expr in r!(block.exprs) {
|
|
||||||
v.visit_expr(expr);
|
|
||||||
}
|
|
||||||
v.visit_exit();
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_binary(v, binary: BinaryExpr) {
|
|
||||||
v.visit_expr(r!(binary.lhs));
|
|
||||||
v.visit_expr(r!(binary.rhs));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_unary(v, unary: UnaryExpr) {
|
|
||||||
v.visit_expr(r!(unary.expr));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_call(v, call: CallExpr) {
|
|
||||||
v.visit_expr(r!(call.callee));
|
|
||||||
v.visit_args(r!(call.args));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_args(v, args: CallArgs) {
|
|
||||||
for arg in r!(args.items) {
|
|
||||||
v.visit_arg(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_arg(v, arg: CallArg) {
|
|
||||||
match arg {
|
|
||||||
CallArg::Pos(expr) => v.visit_expr(expr),
|
|
||||||
CallArg::Named(named) => v.visit_expr(r!(named.expr)),
|
|
||||||
CallArg::Spread(expr) => v.visit_expr(expr),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_closure(v, closure: ClosureExpr) {
|
|
||||||
for param in r!(closure.params) {
|
|
||||||
v.visit_param(param);
|
|
||||||
}
|
|
||||||
v.visit_expr(r!(rc: closure.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_param(v, param: ClosureParam) {
|
|
||||||
match param {
|
|
||||||
ClosureParam::Pos(binding) => v.visit_binding(binding),
|
|
||||||
ClosureParam::Named(named) => {
|
|
||||||
v.visit_binding(r!(named.name));
|
|
||||||
v.visit_expr(r!(named.expr));
|
|
||||||
}
|
|
||||||
ClosureParam::Sink(binding) => v.visit_binding(binding),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_with(v, with_expr: WithExpr) {
|
|
||||||
v.visit_expr(r!(with_expr.callee));
|
|
||||||
v.visit_args(r!(with_expr.args));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_let(v, let_expr: LetExpr) {
|
|
||||||
if let Some(init) = r!(let_expr.init) {
|
|
||||||
v.visit_expr(init);
|
|
||||||
}
|
|
||||||
v.visit_binding(r!(let_expr.binding));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_if(v, if_expr: IfExpr) {
|
|
||||||
v.visit_expr(r!(if_expr.condition));
|
|
||||||
v.visit_expr(r!(if_expr.if_body));
|
|
||||||
if let Some(body) = r!(if_expr.else_body) {
|
|
||||||
v.visit_expr(body);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_while(v, while_expr: WhileExpr) {
|
|
||||||
v.visit_expr(r!(while_expr.condition));
|
|
||||||
v.visit_expr(r!(while_expr.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_for(v, for_expr: ForExpr) {
|
|
||||||
v.visit_expr(r!(for_expr.iter));
|
|
||||||
match r!(for_expr.pattern) {
|
|
||||||
ForPattern::Value(value) => v.visit_binding(value),
|
|
||||||
ForPattern::KeyValue(key, value) => {
|
|
||||||
v.visit_binding(key);
|
|
||||||
v.visit_binding(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
v.visit_expr(r!(for_expr.body));
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_import(v, import_expr: ImportExpr) {
|
|
||||||
v.visit_expr(r!(import_expr.path));
|
|
||||||
if let Imports::Idents(idents) = r!(import_expr.imports) {
|
|
||||||
for ident in idents {
|
|
||||||
v.visit_binding(ident);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
visit_include(v, include_expr: IncludeExpr) {
|
|
||||||
v.visit_expr(r!(include_expr.path));
|
|
||||||
}
|
|
||||||
}
|
|
Binary file not shown.
Before Width: | Height: | Size: 6.5 KiB After Width: | Height: | Size: 6.3 KiB |
BIN
tests/ref/markup/math.png
Normal file
BIN
tests/ref/markup/math.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.4 KiB |
@ -56,6 +56,52 @@
|
|||||||
test(f(), 3)
|
test(f(), 3)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
---
|
||||||
|
// Import bindings.
|
||||||
|
{
|
||||||
|
let b = "target.typ"
|
||||||
|
let f() = {
|
||||||
|
import b from b
|
||||||
|
b
|
||||||
|
}
|
||||||
|
test(f(), 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
---
|
||||||
|
// For loop bindings.
|
||||||
|
{
|
||||||
|
let v = (1, 2, 3)
|
||||||
|
let s = 0
|
||||||
|
let f() = {
|
||||||
|
for v in v { s += v }
|
||||||
|
}
|
||||||
|
f()
|
||||||
|
test(s, 6)
|
||||||
|
}
|
||||||
|
|
||||||
|
---
|
||||||
|
// Let + closure bindings.
|
||||||
|
{
|
||||||
|
let g = "hi"
|
||||||
|
let f() = {
|
||||||
|
let g() = "bye"
|
||||||
|
g()
|
||||||
|
}
|
||||||
|
test(f(), "bye")
|
||||||
|
}
|
||||||
|
|
||||||
|
---
|
||||||
|
// Parameter bindings.
|
||||||
|
{
|
||||||
|
let x = 5
|
||||||
|
let g() = {
|
||||||
|
let f(x, y: x) = x + y
|
||||||
|
f
|
||||||
|
}
|
||||||
|
|
||||||
|
test(g()(8), 13)
|
||||||
|
}
|
||||||
|
|
||||||
---
|
---
|
||||||
// Don't leak environment.
|
// Don't leak environment.
|
||||||
{
|
{
|
||||||
|
@ -79,7 +79,7 @@ This is never reached.
|
|||||||
// Error: 22 expected keyword `from`
|
// Error: 22 expected keyword `from`
|
||||||
#import afrom, "b", c
|
#import afrom, "b", c
|
||||||
|
|
||||||
// Error: 8 expected import items
|
// Error: 9 expected import items
|
||||||
#import from "target.typ"
|
#import from "target.typ"
|
||||||
|
|
||||||
// Error: 9-10 expected expression, found assignment operator
|
// Error: 9-10 expected expression, found assignment operator
|
||||||
|
@ -56,7 +56,7 @@ Three
|
|||||||
#let v4 = 4 Four
|
#let v4 = 4 Four
|
||||||
|
|
||||||
// Terminated by semicolon even though we are in a paren group.
|
// Terminated by semicolon even though we are in a paren group.
|
||||||
// Error: 19 expected expression
|
// Error: 18 expected expression
|
||||||
// Error: 19 expected closing paren
|
// Error: 19 expected closing paren
|
||||||
#let v5 = (1, 2 + ; Five
|
#let v5 = (1, 2 + ; Five
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@
|
|||||||
#min(.."nope")
|
#min(.."nope")
|
||||||
|
|
||||||
---
|
---
|
||||||
// Error: 10-14 expected identifier
|
// Error: 8-14 expected identifier
|
||||||
#let f(..true) = none
|
#let f(..true) = none
|
||||||
|
|
||||||
---
|
---
|
||||||
@ -70,9 +70,9 @@
|
|||||||
#let f(..a, ..b) = none
|
#let f(..a, ..b) = none
|
||||||
|
|
||||||
---
|
---
|
||||||
// Error: 5-6 spreading is not allowed here
|
// Error: 3-6 spreading is not allowed here
|
||||||
{(..x)}
|
{(..x)}
|
||||||
|
|
||||||
---
|
---
|
||||||
// Error: 11-17 spreading is not allowed here
|
// Error: 9-17 spreading is not allowed here
|
||||||
{(1, 2, ..(1, 2))}
|
{(1, 2, ..(1, 2))}
|
||||||
|
@ -8,8 +8,8 @@
|
|||||||
=== Level 2
|
=== Level 2
|
||||||
====== Level 6
|
====== Level 6
|
||||||
|
|
||||||
// Too many hashtags.
|
// At some point, it should stop shrinking.
|
||||||
======= Level 7
|
=========== Level 11
|
||||||
|
|
||||||
---
|
---
|
||||||
// Heading vs. no heading.
|
// Heading vs. no heading.
|
||||||
|
12
tests/typ/markup/math.typ
Normal file
12
tests/typ/markup/math.typ
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// Test math formulas.
|
||||||
|
|
||||||
|
---
|
||||||
|
The sum of $a$ and $b$ is $a + b$.
|
||||||
|
|
||||||
|
---
|
||||||
|
We will show that:
|
||||||
|
$[ a^2 + b^2 = c^2 ]$
|
||||||
|
|
||||||
|
---
|
||||||
|
// Error: 2:1 expected closing bracket and dollar sign
|
||||||
|
$[a
|
@ -55,5 +55,5 @@ The keyword ```rust let```.
|
|||||||
|
|
||||||
---
|
---
|
||||||
// Unterminated.
|
// Unterminated.
|
||||||
// Error: 2:1 expected backtick(s)
|
// Error: 2:1 expected 1 backtick
|
||||||
`endless
|
`endless
|
||||||
|
@ -24,7 +24,7 @@ use typst::loading::FsLoader;
|
|||||||
use typst::parse::Scanner;
|
use typst::parse::Scanner;
|
||||||
use typst::source::SourceFile;
|
use typst::source::SourceFile;
|
||||||
use typst::style::Style;
|
use typst::style::Style;
|
||||||
use typst::syntax::{Pos, Span};
|
use typst::syntax::Span;
|
||||||
use typst::Context;
|
use typst::Context;
|
||||||
|
|
||||||
const TYP_DIR: &str = "./typ";
|
const TYP_DIR: &str = "./typ";
|
||||||
@ -355,12 +355,12 @@ fn parse_metadata(source: &SourceFile) -> (Option<bool>, Vec<Error>) {
|
|||||||
let comments =
|
let comments =
|
||||||
lines[i ..].iter().take_while(|line| line.starts_with("//")).count();
|
lines[i ..].iter().take_while(|line| line.starts_with("//")).count();
|
||||||
|
|
||||||
let pos = |s: &mut Scanner| -> Pos {
|
let pos = |s: &mut Scanner| -> usize {
|
||||||
let first = num(s) - 1;
|
let first = num(s) - 1;
|
||||||
let (delta, column) =
|
let (delta, column) =
|
||||||
if s.eat_if(':') { (first, num(s) - 1) } else { (0, first) };
|
if s.eat_if(':') { (first, num(s) - 1) } else { (0, first) };
|
||||||
let line = (i + comments) + delta;
|
let line = (i + comments) + delta;
|
||||||
source.line_column_to_byte(line, column).unwrap().into()
|
source.line_column_to_byte(line, column).unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut s = Scanner::new(rest);
|
let mut s = Scanner::new(rest);
|
||||||
@ -375,10 +375,10 @@ fn parse_metadata(source: &SourceFile) -> (Option<bool>, Vec<Error>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_error(source: &SourceFile, line: usize, error: &Error) {
|
fn print_error(source: &SourceFile, line: usize, error: &Error) {
|
||||||
let start_line = 1 + line + source.byte_to_line(error.span.start.to_usize()).unwrap();
|
let start_line = 1 + line + source.byte_to_line(error.span.start).unwrap();
|
||||||
let start_col = 1 + source.byte_to_column(error.span.start.to_usize()).unwrap();
|
let start_col = 1 + source.byte_to_column(error.span.start).unwrap();
|
||||||
let end_line = 1 + line + source.byte_to_line(error.span.end.to_usize()).unwrap();
|
let end_line = 1 + line + source.byte_to_line(error.span.end).unwrap();
|
||||||
let end_col = 1 + source.byte_to_column(error.span.end.to_usize()).unwrap();
|
let end_col = 1 + source.byte_to_column(error.span.end).unwrap();
|
||||||
println!(
|
println!(
|
||||||
"Error: {}:{}-{}:{}: {}",
|
"Error: {}:{}-{}:{}: {}",
|
||||||
start_line, start_col, end_line, end_col, error.message
|
start_line, start_col, end_line, end_col, error.message
|
||||||
|
Loading…
x
Reference in New Issue
Block a user