mirror of
https://github.com/typst/typst
synced 2025-05-14 04:56:26 +08:00
Change parser
This commit is contained in:
parent
ea6ee3f667
commit
4875633acf
@ -5,10 +5,11 @@ authors = ["The Typst Project Developers"]
|
||||
edition = "2018"
|
||||
|
||||
[features]
|
||||
default = ["cli", "fs", "layout-cache"]
|
||||
default = ["cli", "fs", "layout-cache", "parse-cache"]
|
||||
cli = ["anyhow", "codespan-reporting", "fs", "pico-args", "same-file"]
|
||||
fs = ["dirs", "memmap2", "same-file", "walkdir"]
|
||||
layout-cache = ["rand"]
|
||||
parse-cache = []
|
||||
|
||||
[profile.dev]
|
||||
# Faster compilation
|
||||
|
@ -1,8 +1,7 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::{Scope, Scopes, Value};
|
||||
use crate::syntax::visit::{immutable::visit_expr, Visit};
|
||||
use crate::syntax::{Expr, Ident};
|
||||
use crate::syntax::{ClosureParam, Expr, Imports, RedTicket};
|
||||
|
||||
/// A visitor that captures variable slots.
|
||||
pub struct CapturesVisitor<'a> {
|
||||
@ -21,36 +20,83 @@ impl<'a> CapturesVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit(&mut self, node: RedTicket) {
|
||||
let expr: Option<Expr> = node.cast();
|
||||
|
||||
match expr.as_ref() {
|
||||
Some(Expr::Let(expr)) => {
|
||||
self.visit(expr.init_ticket());
|
||||
let ident = expr.binding();
|
||||
self.internal.def_mut(ident.as_str(), Value::None);
|
||||
}
|
||||
Some(Expr::Closure(closure)) => {
|
||||
for arg in closure.params() {
|
||||
match arg {
|
||||
ClosureParam::Pos(ident) | ClosureParam::Sink(ident) => {
|
||||
self.internal.def_mut(ident.as_str(), Value::None);
|
||||
}
|
||||
ClosureParam::Named(name) => {
|
||||
self.internal.def_mut(name.name().as_str(), Value::None);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.visit(closure.body_ticket());
|
||||
}
|
||||
Some(Expr::For(forloop)) => {
|
||||
let pattern = forloop.pattern();
|
||||
self.internal.def_mut(pattern.value().as_str(), Value::None);
|
||||
|
||||
if let Some(key) = pattern.key() {
|
||||
self.internal.def_mut(key.as_str(), Value::None);
|
||||
}
|
||||
self.visit(forloop.body_ticket());
|
||||
}
|
||||
Some(Expr::Import(import)) => {
|
||||
if let Imports::Idents(idents) = import.imports() {
|
||||
for ident in idents {
|
||||
self.internal.def_mut(ident.as_str(), Value::None);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(Expr::Ident(ident)) => {
|
||||
if self.internal.get(ident.as_str()).is_none() {
|
||||
if let Some(slot) = self.external.get(ident.as_str()) {
|
||||
self.captures.def_slot(ident.as_str(), Rc::clone(slot));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match expr.as_ref() {
|
||||
Some(Expr::Let(_)) | Some(Expr::For(_)) | Some(Expr::Closure(_)) => {}
|
||||
|
||||
Some(Expr::Block(_)) => {
|
||||
self.internal.enter();
|
||||
for child in node.own().children() {
|
||||
self.visit(child);
|
||||
}
|
||||
self.internal.exit();
|
||||
}
|
||||
|
||||
Some(Expr::Template(_)) => {
|
||||
self.internal.enter();
|
||||
for child in node.own().children() {
|
||||
self.visit(child);
|
||||
}
|
||||
self.internal.exit();
|
||||
}
|
||||
|
||||
_ => {
|
||||
for child in node.own().children() {
|
||||
self.visit(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the scope of captured variables.
|
||||
pub fn finish(self) -> Scope {
|
||||
self.captures
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast> Visit<'ast> for CapturesVisitor<'_> {
|
||||
fn visit_expr(&mut self, node: &'ast Expr) {
|
||||
if let Expr::Ident(ident) = node {
|
||||
// Find out whether the name is not locally defined and if so if it
|
||||
// can be captured.
|
||||
if self.internal.get(ident).is_none() {
|
||||
if let Some(slot) = self.external.get(ident) {
|
||||
self.captures.def_slot(ident.as_str(), Rc::clone(slot));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
visit_expr(self, node);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_binding(&mut self, ident: &'ast Ident) {
|
||||
self.internal.def_mut(ident.as_str(), Value::None);
|
||||
}
|
||||
|
||||
fn visit_enter(&mut self) {
|
||||
self.internal.enter();
|
||||
}
|
||||
|
||||
fn visit_exit(&mut self) {
|
||||
self.internal.exit();
|
||||
}
|
||||
}
|
||||
|
160
src/eval/mod.rs
160
src/eval/mod.rs
@ -36,9 +36,7 @@ use crate::diag::{At, Error, StrResult, Trace, Tracepoint, TypResult};
|
||||
use crate::geom::{Angle, Fractional, Length, Relative};
|
||||
use crate::image::ImageStore;
|
||||
use crate::loading::Loader;
|
||||
use crate::parse::parse;
|
||||
use crate::source::{SourceId, SourceStore};
|
||||
use crate::syntax::visit::Visit;
|
||||
use crate::syntax::*;
|
||||
use crate::util::RefMutExt;
|
||||
use crate::Context;
|
||||
@ -114,7 +112,7 @@ impl<'a> EvalContext<'a> {
|
||||
|
||||
// Parse the file.
|
||||
let source = self.sources.get(id);
|
||||
let ast = parse(&source)?;
|
||||
let ast = source.ast()?;
|
||||
|
||||
// Prepare the new context.
|
||||
let new_scopes = Scopes::new(self.scopes.base);
|
||||
@ -122,7 +120,7 @@ impl<'a> EvalContext<'a> {
|
||||
self.route.push(id);
|
||||
|
||||
// Evaluate the module.
|
||||
let template = Rc::new(ast).eval(self).trace(|| Tracepoint::Import, span)?;
|
||||
let template = ast.eval(self).trace(|| Tracepoint::Import, span)?;
|
||||
|
||||
// Restore the old context.
|
||||
let new_scopes = mem::replace(&mut self.scopes, old_scopes);
|
||||
@ -232,7 +230,7 @@ impl Eval for ArrayExpr {
|
||||
type Output = Array;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
self.items.iter().map(|expr| expr.eval(ctx)).collect()
|
||||
self.items().iter().map(|expr| expr.eval(ctx)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -240,9 +238,9 @@ impl Eval for DictExpr {
|
||||
type Output = Dict;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
self.items
|
||||
self.items()
|
||||
.iter()
|
||||
.map(|Named { name, expr }| Ok(((&name.string).into(), expr.eval(ctx)?)))
|
||||
.map(|x| Ok(((&x.name().string).into(), x.expr().eval(ctx)?)))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
@ -251,7 +249,7 @@ impl Eval for TemplateExpr {
|
||||
type Output = Template;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
self.body.eval(ctx)
|
||||
self.body().eval(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,7 +257,7 @@ impl Eval for GroupExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
self.expr.eval(ctx)
|
||||
self.expr().eval(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,7 +268,7 @@ impl Eval for BlockExpr {
|
||||
ctx.scopes.enter();
|
||||
|
||||
let mut output = Value::None;
|
||||
for expr in &self.exprs {
|
||||
for expr in &self.exprs() {
|
||||
let value = expr.eval(ctx)?;
|
||||
output = ops::join(output, value).at(expr.span())?;
|
||||
}
|
||||
@ -285,13 +283,13 @@ impl Eval for UnaryExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let value = self.expr.eval(ctx)?;
|
||||
let result = match self.op {
|
||||
let value = self.expr().eval(ctx)?;
|
||||
let result = match self.op() {
|
||||
UnOp::Pos => ops::pos(value),
|
||||
UnOp::Neg => ops::neg(value),
|
||||
UnOp::Not => ops::not(value),
|
||||
};
|
||||
result.at(self.span)
|
||||
result.at(self.span())
|
||||
}
|
||||
}
|
||||
|
||||
@ -299,7 +297,7 @@ impl Eval for BinaryExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
match self.op {
|
||||
match self.op() {
|
||||
BinOp::Add => self.apply(ctx, ops::add),
|
||||
BinOp::Sub => self.apply(ctx, ops::sub),
|
||||
BinOp::Mul => self.apply(ctx, ops::mul),
|
||||
@ -327,17 +325,17 @@ impl BinaryExpr {
|
||||
where
|
||||
F: FnOnce(Value, Value) -> StrResult<Value>,
|
||||
{
|
||||
let lhs = self.lhs.eval(ctx)?;
|
||||
let lhs = self.lhs().eval(ctx)?;
|
||||
|
||||
// Short-circuit boolean operations.
|
||||
if (self.op == BinOp::And && lhs == Value::Bool(false))
|
||||
|| (self.op == BinOp::Or && lhs == Value::Bool(true))
|
||||
if (self.op() == BinOp::And && lhs == Value::Bool(false))
|
||||
|| (self.op() == BinOp::Or && lhs == Value::Bool(true))
|
||||
{
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
let rhs = self.rhs.eval(ctx)?;
|
||||
op(lhs, rhs).at(self.span)
|
||||
let rhs = self.rhs().eval(ctx)?;
|
||||
op(lhs, rhs).at(self.span())
|
||||
}
|
||||
|
||||
/// Apply an assignment operation.
|
||||
@ -345,10 +343,10 @@ impl BinaryExpr {
|
||||
where
|
||||
F: FnOnce(Value, Value) -> StrResult<Value>,
|
||||
{
|
||||
let rhs = self.rhs.eval(ctx)?;
|
||||
let mut target = self.lhs.access(ctx)?;
|
||||
let rhs = self.rhs().eval(ctx)?;
|
||||
let mut target = self.lhs().access(ctx)?;
|
||||
let lhs = mem::take(&mut *target);
|
||||
*target = op(lhs, rhs).at(self.span)?;
|
||||
*target = op(lhs, rhs).at(self.span())?;
|
||||
Ok(Value::None)
|
||||
}
|
||||
}
|
||||
@ -357,27 +355,27 @@ impl Eval for CallExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let callee = self.callee.eval(ctx)?;
|
||||
let mut args = self.args.eval(ctx)?;
|
||||
let callee = self.callee().eval(ctx)?;
|
||||
let mut args = self.args().eval(ctx)?;
|
||||
|
||||
match callee {
|
||||
Value::Array(array) => {
|
||||
array.get(args.into_index()?).map(Value::clone).at(self.span)
|
||||
array.get(args.into_index()?).map(Value::clone).at(self.span())
|
||||
}
|
||||
|
||||
Value::Dict(dict) => {
|
||||
dict.get(args.into_key()?).map(Value::clone).at(self.span)
|
||||
dict.get(args.into_key()?).map(Value::clone).at(self.span())
|
||||
}
|
||||
|
||||
Value::Func(func) => {
|
||||
let point = || Tracepoint::Call(func.name().map(ToString::to_string));
|
||||
let value = func.call(ctx, &mut args).trace(point, self.span)?;
|
||||
let value = func.call(ctx, &mut args).trace(point, self.span())?;
|
||||
args.finish()?;
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
v => bail!(
|
||||
self.callee.span(),
|
||||
self.callee().span(),
|
||||
"expected function or collection, found {}",
|
||||
v.type_name(),
|
||||
),
|
||||
@ -389,9 +387,9 @@ impl Eval for CallArgs {
|
||||
type Output = Args;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let mut items = Vec::with_capacity(self.items.len());
|
||||
let mut items = Vec::with_capacity(self.items().len());
|
||||
|
||||
for arg in &self.items {
|
||||
for arg in &self.items() {
|
||||
let span = arg.span();
|
||||
match arg {
|
||||
CallArg::Pos(expr) => {
|
||||
@ -401,11 +399,11 @@ impl Eval for CallArgs {
|
||||
value: Spanned::new(expr.eval(ctx)?, expr.span()),
|
||||
});
|
||||
}
|
||||
CallArg::Named(Named { name, expr }) => {
|
||||
CallArg::Named(x) => {
|
||||
items.push(Arg {
|
||||
span,
|
||||
name: Some((&name.string).into()),
|
||||
value: Spanned::new(expr.eval(ctx)?, expr.span()),
|
||||
name: Some((&x.name().string).into()),
|
||||
value: Spanned::new(x.expr().eval(ctx)?, x.expr().span()),
|
||||
});
|
||||
}
|
||||
CallArg::Spread(expr) => match expr.eval(ctx)? {
|
||||
@ -438,7 +436,7 @@ impl Eval for CallArgs {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Args { span: self.span, items })
|
||||
Ok(Args { span: self.span(), items })
|
||||
}
|
||||
}
|
||||
|
||||
@ -446,26 +444,27 @@ impl Eval for ClosureExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let name = self.name.as_ref().map(|name| name.string.clone());
|
||||
let name = self.name().as_ref().map(|name| name.string.clone());
|
||||
|
||||
// Collect captured variables.
|
||||
let captured = {
|
||||
let mut visitor = CapturesVisitor::new(&ctx.scopes);
|
||||
visitor.visit_closure(self);
|
||||
visitor.visit(self.underlying());
|
||||
visitor.finish()
|
||||
};
|
||||
|
||||
let mut sink = None;
|
||||
let mut params = Vec::with_capacity(self.params.len());
|
||||
let params_src = self.params();
|
||||
let mut params = Vec::with_capacity(params_src.len());
|
||||
|
||||
// Collect parameters and an optional sink parameter.
|
||||
for param in &self.params {
|
||||
for param in ¶ms_src {
|
||||
match param {
|
||||
ClosureParam::Pos(name) => {
|
||||
params.push((name.string.clone(), None));
|
||||
}
|
||||
ClosureParam::Named(Named { name, expr }) => {
|
||||
params.push((name.string.clone(), Some(expr.eval(ctx)?)));
|
||||
ClosureParam::Named(x) => {
|
||||
params.push((x.name().string.clone(), Some(x.expr().eval(ctx)?)));
|
||||
}
|
||||
ClosureParam::Sink(name) => {
|
||||
if sink.is_some() {
|
||||
@ -478,7 +477,7 @@ impl Eval for ClosureExpr {
|
||||
|
||||
// Clone the body expression so that we don't have a lifetime
|
||||
// dependence on the AST.
|
||||
let body = Rc::clone(&self.body);
|
||||
let body = Rc::new(self.body());
|
||||
|
||||
// Define the actual function.
|
||||
let func = Function::new(name, move |ctx, args| {
|
||||
@ -515,8 +514,9 @@ impl Eval for WithExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let wrapped = self.callee.eval(ctx)?.cast::<Function>().at(self.callee.span())?;
|
||||
let applied = self.args.eval(ctx)?;
|
||||
let wrapped =
|
||||
self.callee().eval(ctx)?.cast::<Function>().at(self.callee().span())?;
|
||||
let applied = self.args().eval(ctx)?;
|
||||
|
||||
let name = wrapped.name().cloned();
|
||||
let func = Function::new(name, move |ctx, args| {
|
||||
@ -532,11 +532,11 @@ impl Eval for LetExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let value = match &self.init {
|
||||
let value = match &self.init() {
|
||||
Some(expr) => expr.eval(ctx)?,
|
||||
None => Value::None,
|
||||
};
|
||||
ctx.scopes.def_mut(self.binding.as_str(), value);
|
||||
ctx.scopes.def_mut(self.binding().as_str(), value);
|
||||
Ok(Value::None)
|
||||
}
|
||||
}
|
||||
@ -545,12 +545,15 @@ impl Eval for IfExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let condition =
|
||||
self.condition.eval(ctx)?.cast::<bool>().at(self.condition.span())?;
|
||||
let condition = self
|
||||
.condition()
|
||||
.eval(ctx)?
|
||||
.cast::<bool>()
|
||||
.at(self.condition().span())?;
|
||||
|
||||
if condition {
|
||||
self.if_body.eval(ctx)
|
||||
} else if let Some(else_body) = &self.else_body {
|
||||
self.if_body().eval(ctx)
|
||||
} else if let Some(else_body) = &self.else_body() {
|
||||
else_body.eval(ctx)
|
||||
} else {
|
||||
Ok(Value::None)
|
||||
@ -564,9 +567,14 @@ impl Eval for WhileExpr {
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let mut output = Value::None;
|
||||
|
||||
while self.condition.eval(ctx)?.cast::<bool>().at(self.condition.span())? {
|
||||
let value = self.body.eval(ctx)?;
|
||||
output = ops::join(output, value).at(self.body.span())?;
|
||||
while self
|
||||
.condition()
|
||||
.eval(ctx)?
|
||||
.cast::<bool>()
|
||||
.at(self.condition().span())?
|
||||
{
|
||||
let value = self.body().eval(ctx)?;
|
||||
output = ops::join(output, value).at(self.body().span())?;
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
@ -586,9 +594,9 @@ impl Eval for ForExpr {
|
||||
for ($($value),*) in $iter {
|
||||
$(ctx.scopes.def_mut($binding.as_str(), $value);)*
|
||||
|
||||
let value = self.body.eval(ctx)?;
|
||||
let value = self.body().eval(ctx)?;
|
||||
output = ops::join(output, value)
|
||||
.at(self.body.span())?;
|
||||
.at(self.body().span())?;
|
||||
}
|
||||
|
||||
ctx.scopes.exit();
|
||||
@ -596,28 +604,27 @@ impl Eval for ForExpr {
|
||||
}};
|
||||
}
|
||||
|
||||
let iter = self.iter.eval(ctx)?;
|
||||
match (&self.pattern, iter) {
|
||||
(ForPattern::Value(v), Value::Str(string)) => {
|
||||
iter!(for (v => value) in string.iter())
|
||||
}
|
||||
(ForPattern::Value(v), Value::Array(array)) => {
|
||||
let iter = self.iter().eval(ctx)?;
|
||||
let pattern = self.pattern();
|
||||
match (pattern.key(), pattern.value(), iter) {
|
||||
(None, v, Value::Str(string)) => iter!(for (v => value) in string.iter()),
|
||||
(None, v, Value::Array(array)) => {
|
||||
iter!(for (v => value) in array.into_iter())
|
||||
}
|
||||
(ForPattern::KeyValue(i, v), Value::Array(array)) => {
|
||||
(Some(i), v, Value::Array(array)) => {
|
||||
iter!(for (i => idx, v => value) in array.into_iter().enumerate())
|
||||
}
|
||||
(ForPattern::Value(v), Value::Dict(dict)) => {
|
||||
(None, v, Value::Dict(dict)) => {
|
||||
iter!(for (v => value) in dict.into_iter().map(|p| p.1))
|
||||
}
|
||||
(ForPattern::KeyValue(k, v), Value::Dict(dict)) => {
|
||||
(Some(k), v, Value::Dict(dict)) => {
|
||||
iter!(for (k => key, v => value) in dict.into_iter())
|
||||
}
|
||||
(ForPattern::KeyValue(_, _), Value::Str(_)) => {
|
||||
bail!(self.pattern.span(), "mismatched pattern");
|
||||
(_, _, Value::Str(_)) => {
|
||||
bail!(pattern.span(), "mismatched pattern");
|
||||
}
|
||||
(_, iter) => {
|
||||
bail!(self.iter.span(), "cannot loop over {}", iter.type_name());
|
||||
(_, _, iter) => {
|
||||
bail!(self.iter().span(), "cannot loop over {}", iter.type_name());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -627,12 +634,12 @@ impl Eval for ImportExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let path = self.path.eval(ctx)?.cast::<Str>().at(self.path.span())?;
|
||||
let path = self.path().eval(ctx)?.cast::<Str>().at(self.path().span())?;
|
||||
|
||||
let file = ctx.import(&path, self.path.span())?;
|
||||
let file = ctx.import(&path, self.path().span())?;
|
||||
let module = &ctx.modules[&file];
|
||||
|
||||
match &self.imports {
|
||||
match &self.imports() {
|
||||
Imports::Wildcard => {
|
||||
for (var, slot) in module.scope.iter() {
|
||||
ctx.scopes.def_mut(var, slot.borrow().clone());
|
||||
@ -657,9 +664,10 @@ impl Eval for IncludeExpr {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||
let path = self.path.eval(ctx)?.cast::<Str>().at(self.path.span())?;
|
||||
let path_node = self.path();
|
||||
let path = path_node.eval(ctx)?.cast::<Str>().at(path_node.span())?;
|
||||
|
||||
let file = ctx.import(&path, self.path.span())?;
|
||||
let file = ctx.import(&path, path_node.span())?;
|
||||
let module = &ctx.modules[&file];
|
||||
|
||||
Ok(Value::Template(module.template.clone()))
|
||||
@ -698,14 +706,14 @@ impl Access for Ident {
|
||||
|
||||
impl Access for CallExpr {
|
||||
fn access<'a>(&self, ctx: &'a mut EvalContext) -> TypResult<RefMut<'a, Value>> {
|
||||
let args = self.args.eval(ctx)?;
|
||||
let guard = self.callee.access(ctx)?;
|
||||
let args = self.args().eval(ctx)?;
|
||||
let guard = self.callee().access(ctx)?;
|
||||
|
||||
RefMut::try_map(guard, |value| match value {
|
||||
Value::Array(array) => array.get_mut(args.into_index()?).at(self.span),
|
||||
Value::Array(array) => array.get_mut(args.into_index()?).at(self.span()),
|
||||
Value::Dict(dict) => Ok(dict.get_mut(args.into_key()?)),
|
||||
v => bail!(
|
||||
self.callee.span(),
|
||||
self.callee().span(),
|
||||
"expected collection, found {}",
|
||||
v.type_name(),
|
||||
),
|
||||
|
@ -27,10 +27,10 @@ impl Walk for MarkupNode {
|
||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||
match self {
|
||||
Self::Space => ctx.template.space(),
|
||||
Self::Linebreak(_) => ctx.template.linebreak(),
|
||||
Self::Parbreak(_) => ctx.template.parbreak(),
|
||||
Self::Strong(_) => ctx.template.modify(|s| s.text_mut().strong.flip()),
|
||||
Self::Emph(_) => ctx.template.modify(|s| s.text_mut().emph.flip()),
|
||||
Self::Linebreak => ctx.template.linebreak(),
|
||||
Self::Parbreak => ctx.template.parbreak(),
|
||||
Self::Strong => ctx.template.modify(|s| s.text_mut().strong.flip()),
|
||||
Self::Emph => ctx.template.modify(|s| s.text_mut().emph.flip()),
|
||||
Self::Text(text) => ctx.template.text(text),
|
||||
Self::Raw(raw) => raw.walk(ctx)?,
|
||||
Self::Heading(heading) => heading.walk(ctx)?,
|
||||
@ -69,8 +69,8 @@ impl Walk for RawNode {
|
||||
|
||||
impl Walk for HeadingNode {
|
||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||
let level = self.level;
|
||||
let body = self.body.eval(ctx)?;
|
||||
let level = self.level().0;
|
||||
let body = self.body().eval(ctx)?;
|
||||
|
||||
ctx.template.parbreak();
|
||||
ctx.template.save();
|
||||
@ -90,7 +90,7 @@ impl Walk for HeadingNode {
|
||||
|
||||
impl Walk for ListNode {
|
||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||
let body = self.body.eval(ctx)?;
|
||||
let body = self.body().eval(ctx)?;
|
||||
walk_item(ctx, Str::from('•'), body);
|
||||
Ok(())
|
||||
}
|
||||
@ -98,8 +98,8 @@ impl Walk for ListNode {
|
||||
|
||||
impl Walk for EnumNode {
|
||||
fn walk(&self, ctx: &mut EvalContext) -> TypResult<()> {
|
||||
let body = self.body.eval(ctx)?;
|
||||
let label = format_str!("{}.", self.number.unwrap_or(1));
|
||||
let body = self.body().eval(ctx)?;
|
||||
let label = format_str!("{}.", self.number().0.unwrap_or(1));
|
||||
walk_item(ctx, label, body);
|
||||
Ok(())
|
||||
}
|
||||
|
@ -58,7 +58,6 @@ use crate::layout::{EvictionPolicy, LayoutCache};
|
||||
use crate::loading::Loader;
|
||||
use crate::source::{SourceId, SourceStore};
|
||||
use crate::style::Style;
|
||||
use crate::syntax::Markup;
|
||||
|
||||
/// The core context which holds the loader, configuration and cached artifacts.
|
||||
pub struct Context {
|
||||
@ -100,14 +99,9 @@ impl Context {
|
||||
&self.style
|
||||
}
|
||||
|
||||
/// Parse a source file and return the resulting markup.
|
||||
pub fn parse(&mut self, id: SourceId) -> TypResult<Markup> {
|
||||
parse::parse(self.sources.get(id))
|
||||
}
|
||||
|
||||
/// Evaluate a source file and return the resulting module.
|
||||
pub fn evaluate(&mut self, id: SourceId) -> TypResult<Module> {
|
||||
let ast = self.parse(id)?;
|
||||
let ast = self.sources.get(id).ast()?;
|
||||
eval::eval(self, id, &ast)
|
||||
}
|
||||
|
||||
|
1102
src/parse/mod.rs
1102
src/parse/mod.rs
File diff suppressed because it is too large
Load Diff
@ -1,29 +1,34 @@
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::{TokenMode, Tokens};
|
||||
use crate::diag::Error;
|
||||
use crate::source::{SourceFile, SourceId};
|
||||
use crate::syntax::{IntoSpan, Pos, Span, Token};
|
||||
use crate::syntax::{ErrorPosition, Green, GreenData, GreenNode, NodeKind};
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// A convenient token-based parser.
|
||||
pub struct Parser<'s> {
|
||||
/// The parsed file.
|
||||
source: &'s SourceFile,
|
||||
/// Parsing errors.
|
||||
errors: Vec<Error>,
|
||||
/// An iterator over the source tokens.
|
||||
tokens: Tokens<'s>,
|
||||
/// The stack of open groups.
|
||||
groups: Vec<GroupEntry>,
|
||||
/// The next token.
|
||||
next: Option<Token<'s>>,
|
||||
next: Option<NodeKind>,
|
||||
/// The peeked token.
|
||||
/// (Same as `next` except if we are at the end of group, then `None`).
|
||||
peeked: Option<Token<'s>>,
|
||||
peeked: Option<NodeKind>,
|
||||
/// The end index of the last (non-whitespace if in code mode) token.
|
||||
prev_end: usize,
|
||||
/// The start index of the peeked token.
|
||||
next_start: usize,
|
||||
/// A stack of outer children vectors.
|
||||
stack: Vec<Vec<Green>>,
|
||||
/// The children of the currently built node.
|
||||
children: Vec<Green>,
|
||||
/// Whether the last parsing step was successful.
|
||||
success: bool,
|
||||
}
|
||||
|
||||
/// A logical group of tokens, e.g. `[...]`.
|
||||
@ -32,9 +37,6 @@ struct GroupEntry {
|
||||
/// For example, a [`Group::Paren`] will be ended by
|
||||
/// [`Token::RightParen`].
|
||||
pub kind: Group,
|
||||
/// The start index of the group. Used by `Parser::end_group` to return the
|
||||
/// group's full span.
|
||||
pub start: usize,
|
||||
/// The mode the parser was in _before_ the group started (to which we go
|
||||
/// back once the group ends).
|
||||
pub prev_mode: TokenMode,
|
||||
@ -60,51 +62,204 @@ pub enum Group {
|
||||
impl<'s> Parser<'s> {
|
||||
/// Create a new parser for the source string.
|
||||
pub fn new(source: &'s SourceFile) -> Self {
|
||||
let mut tokens = Tokens::new(source.src(), TokenMode::Markup);
|
||||
let mut tokens = Tokens::new(source, TokenMode::Markup);
|
||||
let next = tokens.next();
|
||||
Self {
|
||||
source,
|
||||
errors: vec![],
|
||||
tokens,
|
||||
groups: vec![],
|
||||
next,
|
||||
next: next.clone(),
|
||||
peeked: next,
|
||||
prev_end: 0,
|
||||
next_start: 0,
|
||||
stack: vec![],
|
||||
children: vec![],
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Finish parsing and return all errors.
|
||||
pub fn finish(self) -> Vec<Error> {
|
||||
self.errors
|
||||
}
|
||||
|
||||
/// The id of the parsed source file.
|
||||
pub fn id(&self) -> SourceId {
|
||||
self.source.id()
|
||||
}
|
||||
|
||||
/// Start a nested node.
|
||||
///
|
||||
/// Each start call has to be matched with a call to `end`,
|
||||
/// `end_with_custom_children`, `lift`, `abort`, or `end_or_abort`.
|
||||
pub fn start(&mut self) {
|
||||
self.stack.push(std::mem::take(&mut self.children));
|
||||
}
|
||||
|
||||
/// Start a nested node, preserving a number of the current children.
|
||||
pub fn start_with(&mut self, preserve: usize) {
|
||||
let preserved = self.children.drain(self.children.len() - preserve ..).collect();
|
||||
self.stack.push(std::mem::replace(&mut self.children, preserved));
|
||||
}
|
||||
|
||||
/// Filter the last children using the given predicate.
|
||||
pub fn filter_children<F, G>(&mut self, count: usize, f: F, error: G)
|
||||
where
|
||||
F: Fn(&Green) -> bool,
|
||||
G: Fn(&NodeKind) -> (ErrorPosition, EcoString),
|
||||
{
|
||||
for child in &mut self.children[count ..] {
|
||||
if !((self.tokens.mode() != TokenMode::Code
|
||||
|| Self::skip_type_ext(child.kind(), false))
|
||||
|| child.kind().is_error()
|
||||
|| f(&child))
|
||||
{
|
||||
let (pos, msg) = error(child.kind());
|
||||
let inner = std::mem::take(child);
|
||||
*child =
|
||||
GreenNode::with_child(NodeKind::Error(pos, msg), inner.len(), inner)
|
||||
.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn child(&self, child: usize) -> Option<&Green> {
|
||||
self.node_index_from_back(child).map(|i| &self.children[i])
|
||||
}
|
||||
|
||||
fn node_index_from_back(&self, child: usize) -> Option<usize> {
|
||||
let len = self.children.len();
|
||||
let code = self.tokens.mode() == TokenMode::Code;
|
||||
let mut seen = 0;
|
||||
for x in (0 .. len).rev() {
|
||||
if self.skip_type(self.children[x].kind()) && code {
|
||||
continue;
|
||||
}
|
||||
if seen == child {
|
||||
return Some(x);
|
||||
}
|
||||
seen += 1;
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// End the current node as a node of given `kind`.
|
||||
pub fn end(&mut self, kind: NodeKind) {
|
||||
let outer = self.stack.pop().unwrap();
|
||||
let mut children = std::mem::replace(&mut self.children, outer);
|
||||
|
||||
// have trailing whitespace continue to sit in self.children in code
|
||||
// mode.
|
||||
let mut remains = vec![];
|
||||
if self.tokens.mode() == TokenMode::Code {
|
||||
let len = children.len();
|
||||
for n in (0 .. len).rev() {
|
||||
if !self.skip_type(&children[n].kind()) {
|
||||
break;
|
||||
}
|
||||
|
||||
remains.push(children.pop().unwrap());
|
||||
}
|
||||
remains.reverse();
|
||||
}
|
||||
|
||||
let len = children.iter().map(|c| c.len()).sum();
|
||||
self.children
|
||||
.push(GreenNode::with_children(kind, len, children.into_iter()).into());
|
||||
self.children.extend(remains);
|
||||
self.success = true;
|
||||
}
|
||||
|
||||
/// End the current node as a node of given `kind`, and start a new node
|
||||
/// with the ended node as a first child. The function returns how many
|
||||
/// children the stack frame had before and how many were appended (accounts
|
||||
/// for trivia).
|
||||
pub fn end_and_start_with(&mut self, kind: NodeKind) -> (usize, usize) {
|
||||
let stack_offset = self.stack.last().unwrap().len();
|
||||
self.end(kind);
|
||||
let diff = self.children.len() - stack_offset;
|
||||
self.start_with(diff);
|
||||
(stack_offset, diff)
|
||||
}
|
||||
|
||||
pub fn wrap(&mut self, index: usize, kind: NodeKind) {
|
||||
let index = self.node_index_from_back(index).unwrap();
|
||||
let child = std::mem::take(&mut self.children[index]);
|
||||
let item = GreenNode::with_child(kind, child.len(), child);
|
||||
self.children[index] = item.into();
|
||||
}
|
||||
|
||||
pub fn convert(&mut self, kind: NodeKind) {
|
||||
self.start();
|
||||
self.eat();
|
||||
self.end(kind);
|
||||
}
|
||||
|
||||
/// End the current node and undo its existence, inling all accumulated
|
||||
/// children into its parent.
|
||||
pub fn lift(&mut self) {
|
||||
let outer = self.stack.pop().unwrap();
|
||||
let children = std::mem::replace(&mut self.children, outer);
|
||||
self.children.extend(children);
|
||||
self.success = true;
|
||||
}
|
||||
|
||||
/// End the current node and undo its existence, deleting all accumulated
|
||||
/// children.
|
||||
pub fn abort(&mut self, msg: impl Into<String>) {
|
||||
self.end(NodeKind::Error(ErrorPosition::Full, msg.into().into()));
|
||||
self.success = false;
|
||||
}
|
||||
|
||||
pub fn may_lift_abort(&mut self) -> bool {
|
||||
if !self.success {
|
||||
self.lift();
|
||||
self.success = false;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn may_end_abort(&mut self, kind: NodeKind) -> bool {
|
||||
if !self.success {
|
||||
self.end(kind);
|
||||
self.success = false;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// End the current node as a node of given `kind` if the last parse was
|
||||
/// successful, otherwise, abort.
|
||||
pub fn end_or_abort(&mut self, kind: NodeKind) -> bool {
|
||||
if self.success {
|
||||
self.end(kind);
|
||||
true
|
||||
} else {
|
||||
self.may_end_abort(kind);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(&mut self) -> Rc<GreenNode> {
|
||||
if let Green::Node(n) = self.children.pop().unwrap() {
|
||||
n
|
||||
} else {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the end of the source string or group is reached.
|
||||
pub fn eof(&self) -> bool {
|
||||
self.peek().is_none()
|
||||
}
|
||||
|
||||
/// Consume the next token.
|
||||
pub fn eat(&mut self) -> Option<Token<'s>> {
|
||||
pub fn eat(&mut self) -> Option<NodeKind> {
|
||||
let token = self.peek()?;
|
||||
self.bump();
|
||||
Some(token)
|
||||
}
|
||||
|
||||
/// Eat the next token and return its source range.
|
||||
pub fn eat_span(&mut self) -> Span {
|
||||
let start = self.next_start();
|
||||
self.eat();
|
||||
Span::new(self.id(), start, self.prev_end())
|
||||
}
|
||||
|
||||
/// Consume the next token if it is the given one.
|
||||
pub fn eat_if(&mut self, t: Token) -> bool {
|
||||
pub fn eat_if(&mut self, t: NodeKind) -> bool {
|
||||
if self.peek() == Some(t) {
|
||||
self.bump();
|
||||
true
|
||||
@ -116,7 +271,7 @@ impl<'s> Parser<'s> {
|
||||
/// Consume the next token if the closure maps it a to `Some`-variant.
|
||||
pub fn eat_map<T, F>(&mut self, f: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(Token<'s>) -> Option<T>,
|
||||
F: FnOnce(NodeKind) -> Option<T>,
|
||||
{
|
||||
let token = self.peek()?;
|
||||
let mapped = f(token);
|
||||
@ -128,16 +283,16 @@ impl<'s> Parser<'s> {
|
||||
|
||||
/// Consume the next token if it is the given one and produce an error if
|
||||
/// not.
|
||||
pub fn eat_expect(&mut self, t: Token) -> bool {
|
||||
let eaten = self.eat_if(t);
|
||||
pub fn eat_expect(&mut self, t: NodeKind) -> bool {
|
||||
let eaten = self.eat_if(t.clone());
|
||||
if !eaten {
|
||||
self.expected_at(self.prev_end(), t.name());
|
||||
self.expected_at(&t.to_string());
|
||||
}
|
||||
eaten
|
||||
}
|
||||
|
||||
/// Consume the next token, debug-asserting that it is one of the given ones.
|
||||
pub fn eat_assert(&mut self, t: Token) {
|
||||
pub fn eat_assert(&mut self, t: NodeKind) {
|
||||
let next = self.eat();
|
||||
debug_assert_eq!(next, Some(t));
|
||||
}
|
||||
@ -145,7 +300,7 @@ impl<'s> Parser<'s> {
|
||||
/// Consume tokens while the condition is true.
|
||||
pub fn eat_while<F>(&mut self, mut f: F)
|
||||
where
|
||||
F: FnMut(Token<'s>) -> bool,
|
||||
F: FnMut(NodeKind) -> bool,
|
||||
{
|
||||
while self.peek().map_or(false, |t| f(t)) {
|
||||
self.eat();
|
||||
@ -153,42 +308,25 @@ impl<'s> Parser<'s> {
|
||||
}
|
||||
|
||||
/// Peek at the next token without consuming it.
|
||||
pub fn peek(&self) -> Option<Token<'s>> {
|
||||
self.peeked
|
||||
pub fn peek(&self) -> Option<NodeKind> {
|
||||
self.peeked.clone()
|
||||
}
|
||||
|
||||
/// Peek at the next token if it follows immediately after the last one
|
||||
/// without any whitespace in between.
|
||||
pub fn peek_direct(&self) -> Option<Token<'s>> {
|
||||
pub fn peek_direct(&self) -> Option<&NodeKind> {
|
||||
if self.next_start() == self.prev_end() {
|
||||
self.peeked
|
||||
self.peeked.as_ref()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Peek at the span of the next token.
|
||||
///
|
||||
/// Has length zero if `peek()` returns `None`.
|
||||
pub fn peek_span(&self) -> Span {
|
||||
Span::new(self.id(), self.next_start(), self.next_end())
|
||||
}
|
||||
|
||||
/// Peek at the source of the next token.
|
||||
pub fn peek_src(&self) -> &'s str {
|
||||
self.get(self.next_start() .. self.next_end())
|
||||
}
|
||||
|
||||
/// Checks whether the next token fulfills a condition.
|
||||
///
|
||||
/// Returns `false` if there is no next token.
|
||||
pub fn check<F>(&self, f: F) -> bool
|
||||
where
|
||||
F: FnOnce(Token<'s>) -> bool,
|
||||
{
|
||||
self.peek().map_or(false, f)
|
||||
}
|
||||
|
||||
/// The byte index at which the last token ended.
|
||||
///
|
||||
/// Refers to the end of the last _non-whitespace_ token in code mode.
|
||||
@ -219,11 +357,6 @@ impl<'s> Parser<'s> {
|
||||
self.source.get(range).unwrap()
|
||||
}
|
||||
|
||||
/// The span from `start` to [`self.prev_end()`](Self::prev_end).
|
||||
pub fn span_from(&self, start: impl Into<Pos>) -> Span {
|
||||
Span::new(self.id(), start, self.prev_end())
|
||||
}
|
||||
|
||||
/// Continue parsing in a group.
|
||||
///
|
||||
/// When the end delimiter of the group is reached, all subsequent calls to
|
||||
@ -232,19 +365,15 @@ impl<'s> Parser<'s> {
|
||||
///
|
||||
/// This panics if the next token does not start the given group.
|
||||
pub fn start_group(&mut self, kind: Group, mode: TokenMode) {
|
||||
self.groups.push(GroupEntry {
|
||||
kind,
|
||||
start: self.next_start(),
|
||||
prev_mode: self.tokens.mode(),
|
||||
});
|
||||
self.groups.push(GroupEntry { kind, prev_mode: self.tokens.mode() });
|
||||
|
||||
self.tokens.set_mode(mode);
|
||||
self.repeek();
|
||||
|
||||
match kind {
|
||||
Group::Paren => self.eat_assert(Token::LeftParen),
|
||||
Group::Bracket => self.eat_assert(Token::LeftBracket),
|
||||
Group::Brace => self.eat_assert(Token::LeftBrace),
|
||||
Group::Paren => self.eat_assert(NodeKind::LeftParen),
|
||||
Group::Bracket => self.eat_assert(NodeKind::LeftBracket),
|
||||
Group::Brace => self.eat_assert(NodeKind::LeftBrace),
|
||||
Group::Stmt => {}
|
||||
Group::Expr => {}
|
||||
Group::Imports => {}
|
||||
@ -254,7 +383,7 @@ impl<'s> Parser<'s> {
|
||||
/// End the parsing of a group.
|
||||
///
|
||||
/// This panics if no group was started.
|
||||
pub fn end_group(&mut self) -> Span {
|
||||
pub fn end_group(&mut self) {
|
||||
let prev_mode = self.tokens.mode();
|
||||
let group = self.groups.pop().expect("no started group");
|
||||
self.tokens.set_mode(group.prev_mode);
|
||||
@ -264,83 +393,125 @@ impl<'s> Parser<'s> {
|
||||
|
||||
// Eat the end delimiter if there is one.
|
||||
if let Some((end, required)) = match group.kind {
|
||||
Group::Paren => Some((Token::RightParen, true)),
|
||||
Group::Bracket => Some((Token::RightBracket, true)),
|
||||
Group::Brace => Some((Token::RightBrace, true)),
|
||||
Group::Stmt => Some((Token::Semicolon, false)),
|
||||
Group::Paren => Some((NodeKind::RightParen, true)),
|
||||
Group::Bracket => Some((NodeKind::RightBracket, true)),
|
||||
Group::Brace => Some((NodeKind::RightBrace, true)),
|
||||
Group::Stmt => Some((NodeKind::Semicolon, false)),
|
||||
Group::Expr => None,
|
||||
Group::Imports => None,
|
||||
} {
|
||||
if self.next == Some(end) {
|
||||
if self.next == Some(end.clone()) {
|
||||
// Bump the delimeter and return. No need to rescan in this case.
|
||||
self.bump();
|
||||
rescan = false;
|
||||
} else if required {
|
||||
self.error(
|
||||
self.next_start() .. self.next_start(),
|
||||
format!("expected {}", end.name()),
|
||||
);
|
||||
self.start();
|
||||
self.abort(format!("expected {}", end.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// Rescan the peeked token if the mode changed.
|
||||
if rescan {
|
||||
self.tokens.jump(self.prev_end());
|
||||
self.bump();
|
||||
|
||||
if prev_mode == TokenMode::Code {
|
||||
let len = self.children.len();
|
||||
for n in (0 .. len).rev() {
|
||||
if !self.skip_type(self.children[n].kind()) {
|
||||
break;
|
||||
}
|
||||
|
||||
self.children.pop();
|
||||
}
|
||||
}
|
||||
|
||||
self.fast_forward();
|
||||
}
|
||||
|
||||
Span::new(self.id(), group.start, self.prev_end())
|
||||
}
|
||||
|
||||
/// Add an error with location and message.
|
||||
pub fn error(&mut self, span: impl IntoSpan, message: impl Into<String>) {
|
||||
self.errors.push(Error::new(span.into_span(self.id()), message));
|
||||
}
|
||||
|
||||
/// Add an error that `what` was expected at the given span.
|
||||
pub fn expected_at(&mut self, span: impl IntoSpan, what: &str) {
|
||||
self.error(span, format!("expected {}", what));
|
||||
pub fn expected_at(&mut self, what: &str) {
|
||||
let mut found = self.children.len();
|
||||
for (i, node) in self.children.iter().enumerate().rev() {
|
||||
if !self.skip_type(node.kind()) {
|
||||
break;
|
||||
}
|
||||
found = i;
|
||||
}
|
||||
|
||||
self.expected_at_child(found, what);
|
||||
}
|
||||
|
||||
/// Add an error that `what` was expected at the given child index.
|
||||
pub fn expected_at_child(&mut self, index: usize, what: &str) {
|
||||
self.children.insert(
|
||||
index,
|
||||
GreenData::new(
|
||||
NodeKind::Error(ErrorPosition::Full, format!("expected {}", what).into()),
|
||||
0,
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Eat the next token and add an error that it is not the expected `thing`.
|
||||
pub fn expected(&mut self, what: &str) {
|
||||
let before = self.next_start();
|
||||
self.start();
|
||||
if let Some(found) = self.eat() {
|
||||
let after = self.prev_end();
|
||||
self.error(
|
||||
before .. after,
|
||||
format!("expected {}, found {}", what, found.name()),
|
||||
);
|
||||
self.abort(format!("expected {}, found {}", what, found.to_string()))
|
||||
} else {
|
||||
self.expected_at(self.next_start(), what);
|
||||
self.lift();
|
||||
self.expected_at(what);
|
||||
}
|
||||
}
|
||||
|
||||
/// Eat the next token and add an error that it is unexpected.
|
||||
pub fn unexpected(&mut self) {
|
||||
let before = self.next_start();
|
||||
self.start();
|
||||
if let Some(found) = self.eat() {
|
||||
let after = self.prev_end();
|
||||
self.error(before .. after, format!("unexpected {}", found.name()));
|
||||
self.abort(format!("unexpected {}", found.to_string()))
|
||||
} else {
|
||||
self.abort("unexpected end of file")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn skip_type_ext(token: &NodeKind, stop_at_newline: bool) -> bool {
|
||||
match token {
|
||||
NodeKind::Space(n) => n < &1 || !stop_at_newline,
|
||||
NodeKind::LineComment => true,
|
||||
NodeKind::BlockComment => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn skip_type(&self, token: &NodeKind) -> bool {
|
||||
Self::skip_type_ext(token, self.stop_at_newline())
|
||||
}
|
||||
|
||||
/// Move to the next token.
|
||||
fn bump(&mut self) {
|
||||
self.prev_end = self.tokens.index().into();
|
||||
self.children.push(
|
||||
GreenData::new(
|
||||
self.next.clone().unwrap(),
|
||||
self.tokens.index() - self.next_start,
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
|
||||
self.fast_forward();
|
||||
}
|
||||
|
||||
pub fn fast_forward(&mut self) {
|
||||
if !self.next.as_ref().map_or(false, |x| self.skip_type(x)) {
|
||||
self.prev_end = self.tokens.index().into();
|
||||
}
|
||||
self.next_start = self.tokens.index().into();
|
||||
self.next = self.tokens.next();
|
||||
|
||||
if self.tokens.mode() == TokenMode::Code {
|
||||
// Skip whitespace and comments.
|
||||
while match self.next {
|
||||
Some(Token::Space(n)) => n < 1 || !self.stop_at_newline(),
|
||||
Some(Token::LineComment(_)) => true,
|
||||
Some(Token::BlockComment(_)) => true,
|
||||
_ => false,
|
||||
} {
|
||||
self.next_start = self.tokens.index().into();
|
||||
self.next = self.tokens.next();
|
||||
while self.next.as_ref().map_or(false, |x| self.skip_type(x)) {
|
||||
self.bump();
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,19 +520,19 @@ impl<'s> Parser<'s> {
|
||||
|
||||
/// Take another look at the next token to recheck whether it ends a group.
|
||||
fn repeek(&mut self) {
|
||||
self.peeked = self.next;
|
||||
let token = match self.next {
|
||||
self.peeked = self.next.clone();
|
||||
let token = match self.next.as_ref() {
|
||||
Some(token) => token,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if match token {
|
||||
Token::RightParen => self.inside(Group::Paren),
|
||||
Token::RightBracket => self.inside(Group::Bracket),
|
||||
Token::RightBrace => self.inside(Group::Brace),
|
||||
Token::Semicolon => self.inside(Group::Stmt),
|
||||
Token::From => self.inside(Group::Imports),
|
||||
Token::Space(n) => n >= 1 && self.stop_at_newline(),
|
||||
NodeKind::RightParen => self.inside(Group::Paren),
|
||||
NodeKind::RightBracket => self.inside(Group::Bracket),
|
||||
NodeKind::RightBrace => self.inside(Group::Brace),
|
||||
NodeKind::Semicolon => self.inside(Group::Stmt),
|
||||
NodeKind::From => self.inside(Group::Imports),
|
||||
NodeKind::Space(n) => n > &0 && self.stop_at_newline(),
|
||||
_ => false,
|
||||
} {
|
||||
self.peeked = None;
|
||||
@ -380,4 +551,22 @@ impl<'s> Parser<'s> {
|
||||
fn inside(&self, kind: Group) -> bool {
|
||||
self.groups.iter().any(|g| g.kind == kind)
|
||||
}
|
||||
|
||||
pub fn last_child(&self) -> Option<&Green> {
|
||||
self.children.last()
|
||||
}
|
||||
|
||||
pub fn success(&mut self) -> bool {
|
||||
let s = self.success;
|
||||
self.success = true;
|
||||
s
|
||||
}
|
||||
|
||||
pub fn unsuccessful(&mut self) {
|
||||
self.success = false;
|
||||
}
|
||||
|
||||
pub fn child_count(&self) -> usize {
|
||||
self.children.len()
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{is_newline, Scanner};
|
||||
use crate::syntax::{Ident, RawNode, Span};
|
||||
use crate::syntax::RawToken;
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// Resolve all escape sequences in a string.
|
||||
@ -48,21 +48,28 @@ pub fn resolve_hex(sequence: &str) -> Option<char> {
|
||||
}
|
||||
|
||||
/// Resolve the language tag and trims the raw text.
|
||||
pub fn resolve_raw(span: Span, column: usize, backticks: usize, text: &str) -> RawNode {
|
||||
pub fn resolve_raw(
|
||||
column: usize,
|
||||
backticks: u8,
|
||||
text: &str,
|
||||
terminated: bool,
|
||||
) -> RawToken {
|
||||
if backticks > 1 {
|
||||
let (tag, inner) = split_at_lang_tag(text);
|
||||
let (text, block) = trim_and_split_raw(column, inner);
|
||||
RawNode {
|
||||
span,
|
||||
lang: Ident::new(tag, span.with_end(span.start + tag.len())),
|
||||
RawToken {
|
||||
lang: Some(tag.into()),
|
||||
text: text.into(),
|
||||
backticks,
|
||||
terminated,
|
||||
block,
|
||||
}
|
||||
} else {
|
||||
RawNode {
|
||||
span,
|
||||
RawToken {
|
||||
lang: None,
|
||||
text: split_lines(text).join("\n").into(),
|
||||
backticks,
|
||||
terminated,
|
||||
block: false,
|
||||
}
|
||||
}
|
||||
@ -140,7 +147,6 @@ fn split_lines(text: &str) -> Vec<&str> {
|
||||
#[cfg(test)]
|
||||
#[rustfmt::skip]
|
||||
mod tests {
|
||||
use crate::syntax::Span;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
@ -175,8 +181,8 @@ mod tests {
|
||||
test("typst\n it!", "typst", "\n it!");
|
||||
test("typst\n it!", "typst", "\n it!");
|
||||
test("abc`", "abc", "`");
|
||||
test(" hi", "", " hi");
|
||||
test("`", "", "`");
|
||||
test(" hi", "", " hi");
|
||||
test("`", "", "`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -184,13 +190,13 @@ mod tests {
|
||||
#[track_caller]
|
||||
fn test(
|
||||
column: usize,
|
||||
backticks: usize,
|
||||
backticks: u8,
|
||||
raw: &str,
|
||||
lang: Option<&str>,
|
||||
text: &str,
|
||||
block: bool,
|
||||
) {
|
||||
let node = resolve_raw(Span::detached(), column, backticks, raw);
|
||||
let node = resolve_raw(column, backticks, raw, true);
|
||||
assert_eq!(node.lang.as_deref(), lang);
|
||||
assert_eq!(node.text, text);
|
||||
assert_eq!(node.block, block);
|
||||
@ -204,15 +210,15 @@ mod tests {
|
||||
// More than one backtick with lang tag.
|
||||
test(0, 2, "js alert()", Some("js"), "alert()", false);
|
||||
test(0, 3, "py quit(\n\n)", Some("py"), "quit(\n\n)", true);
|
||||
test(0, 2, "♥", None, "", false);
|
||||
test(0, 2, "♥", Some("♥"), "", false);
|
||||
|
||||
// Trimming of whitespace (tested more thoroughly in separate test).
|
||||
test(0, 2, " a", None, "a", false);
|
||||
test(0, 2, " a", None, " a", false);
|
||||
test(0, 2, " \na", None, "a", true);
|
||||
test(0, 2, " a", Some(""), "a", false);
|
||||
test(0, 2, " a", Some(""), " a", false);
|
||||
test(0, 2, " \na", Some(""), "a", true);
|
||||
|
||||
// Dedenting
|
||||
test(2, 3, " def foo():\n bar()", None, "def foo():\n bar()", true);
|
||||
test(2, 3, " def foo():\n bar()", Some(""), "def foo():\n bar()", true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1,9 +1,13 @@
|
||||
use super::{is_newline, Scanner};
|
||||
use super::{is_newline, resolve_raw, Scanner};
|
||||
use crate::geom::{AngularUnit, LengthUnit};
|
||||
use crate::parse::resolve::{resolve_hex, resolve_string};
|
||||
use crate::source::SourceFile;
|
||||
use crate::syntax::*;
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// An iterator over the tokens of a string of source code.
|
||||
pub struct Tokens<'s> {
|
||||
source: &'s SourceFile,
|
||||
s: Scanner<'s>,
|
||||
mode: TokenMode,
|
||||
}
|
||||
@ -20,8 +24,12 @@ pub enum TokenMode {
|
||||
impl<'s> Tokens<'s> {
|
||||
/// Create a new token iterator with the given mode.
|
||||
#[inline]
|
||||
pub fn new(src: &'s str, mode: TokenMode) -> Self {
|
||||
Self { s: Scanner::new(src), mode }
|
||||
pub fn new(source: &'s SourceFile, mode: TokenMode) -> Self {
|
||||
Self {
|
||||
s: Scanner::new(source.src()),
|
||||
source,
|
||||
mode,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current token mode.
|
||||
@ -59,7 +67,7 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
|
||||
impl<'s> Iterator for Tokens<'s> {
|
||||
type Item = Token<'s>;
|
||||
type Item = NodeKind;
|
||||
|
||||
/// Parse the next token in the source code.
|
||||
#[inline]
|
||||
@ -68,19 +76,21 @@ impl<'s> Iterator for Tokens<'s> {
|
||||
let c = self.s.eat()?;
|
||||
Some(match c {
|
||||
// Blocks and templates.
|
||||
'[' => Token::LeftBracket,
|
||||
']' => Token::RightBracket,
|
||||
'{' => Token::LeftBrace,
|
||||
'}' => Token::RightBrace,
|
||||
'[' => NodeKind::LeftBracket,
|
||||
']' => NodeKind::RightBracket,
|
||||
'{' => NodeKind::LeftBrace,
|
||||
'}' => NodeKind::RightBrace,
|
||||
|
||||
// Whitespace.
|
||||
' ' if self.s.check_or(true, |c| !c.is_whitespace()) => Token::Space(0),
|
||||
' ' if self.s.check_or(true, |c| !c.is_whitespace()) => NodeKind::Space(0),
|
||||
c if c.is_whitespace() => self.whitespace(),
|
||||
|
||||
// Comments with special case for URLs.
|
||||
'/' if self.s.eat_if('*') => self.block_comment(),
|
||||
'/' if !self.maybe_in_url() && self.s.eat_if('/') => self.line_comment(),
|
||||
'*' if self.s.eat_if('/') => Token::Invalid(self.s.eaten_from(start)),
|
||||
'*' if self.s.eat_if('/') => {
|
||||
NodeKind::Error(ErrorPosition::Full, self.s.eaten_from(start).into())
|
||||
}
|
||||
|
||||
// Other things.
|
||||
_ => match self.mode {
|
||||
@ -93,7 +103,7 @@ impl<'s> Iterator for Tokens<'s> {
|
||||
|
||||
impl<'s> Tokens<'s> {
|
||||
#[inline]
|
||||
fn markup(&mut self, start: usize, c: char) -> Token<'s> {
|
||||
fn markup(&mut self, start: usize, c: char) -> NodeKind {
|
||||
match c {
|
||||
// Escape sequences.
|
||||
'\\' => self.backslash(),
|
||||
@ -102,13 +112,15 @@ impl<'s> Tokens<'s> {
|
||||
'#' => self.hash(),
|
||||
|
||||
// Markup.
|
||||
'~' => Token::Tilde,
|
||||
'*' => Token::Star,
|
||||
'_' => Token::Underscore,
|
||||
'~' => NodeKind::NonBreakingSpace,
|
||||
'*' => NodeKind::Strong,
|
||||
'_' => NodeKind::Emph,
|
||||
'`' => self.raw(),
|
||||
'$' => self.math(),
|
||||
'-' => self.hyph(start),
|
||||
'=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => Token::Eq,
|
||||
'-' => self.hyph(),
|
||||
'=' if self.s.check_or(true, |c| c == '=' || c.is_whitespace()) => {
|
||||
NodeKind::Eq
|
||||
}
|
||||
c if c == '.' || c.is_ascii_digit() => self.numbering(start, c),
|
||||
|
||||
// Plain text.
|
||||
@ -116,35 +128,35 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
}
|
||||
|
||||
fn code(&mut self, start: usize, c: char) -> Token<'s> {
|
||||
fn code(&mut self, start: usize, c: char) -> NodeKind {
|
||||
match c {
|
||||
// Parens.
|
||||
'(' => Token::LeftParen,
|
||||
')' => Token::RightParen,
|
||||
'(' => NodeKind::LeftParen,
|
||||
')' => NodeKind::RightParen,
|
||||
|
||||
// Length two.
|
||||
'=' if self.s.eat_if('=') => Token::EqEq,
|
||||
'!' if self.s.eat_if('=') => Token::ExclEq,
|
||||
'<' if self.s.eat_if('=') => Token::LtEq,
|
||||
'>' if self.s.eat_if('=') => Token::GtEq,
|
||||
'+' if self.s.eat_if('=') => Token::PlusEq,
|
||||
'-' if self.s.eat_if('=') => Token::HyphEq,
|
||||
'*' if self.s.eat_if('=') => Token::StarEq,
|
||||
'/' if self.s.eat_if('=') => Token::SlashEq,
|
||||
'.' if self.s.eat_if('.') => Token::Dots,
|
||||
'=' if self.s.eat_if('>') => Token::Arrow,
|
||||
'=' if self.s.eat_if('=') => NodeKind::EqEq,
|
||||
'!' if self.s.eat_if('=') => NodeKind::ExclEq,
|
||||
'<' if self.s.eat_if('=') => NodeKind::LtEq,
|
||||
'>' if self.s.eat_if('=') => NodeKind::GtEq,
|
||||
'+' if self.s.eat_if('=') => NodeKind::PlusEq,
|
||||
'-' if self.s.eat_if('=') => NodeKind::HyphEq,
|
||||
'*' if self.s.eat_if('=') => NodeKind::StarEq,
|
||||
'/' if self.s.eat_if('=') => NodeKind::SlashEq,
|
||||
'.' if self.s.eat_if('.') => NodeKind::Dots,
|
||||
'=' if self.s.eat_if('>') => NodeKind::Arrow,
|
||||
|
||||
// Length one.
|
||||
',' => Token::Comma,
|
||||
';' => Token::Semicolon,
|
||||
':' => Token::Colon,
|
||||
'+' => Token::Plus,
|
||||
'-' => Token::Hyph,
|
||||
'*' => Token::Star,
|
||||
'/' => Token::Slash,
|
||||
'=' => Token::Eq,
|
||||
'<' => Token::Lt,
|
||||
'>' => Token::Gt,
|
||||
',' => NodeKind::Comma,
|
||||
';' => NodeKind::Semicolon,
|
||||
':' => NodeKind::Colon,
|
||||
'+' => NodeKind::Plus,
|
||||
'-' => NodeKind::Minus,
|
||||
'*' => NodeKind::Star,
|
||||
'/' => NodeKind::Slash,
|
||||
'=' => NodeKind::Eq,
|
||||
'<' => NodeKind::Lt,
|
||||
'>' => NodeKind::Gt,
|
||||
|
||||
// Identifiers.
|
||||
c if is_id_start(c) => self.ident(start),
|
||||
@ -159,12 +171,12 @@ impl<'s> Tokens<'s> {
|
||||
// Strings.
|
||||
'"' => self.string(),
|
||||
|
||||
_ => Token::Invalid(self.s.eaten_from(start)),
|
||||
_ => NodeKind::Error(ErrorPosition::Full, self.s.eaten_from(start).into()),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn text(&mut self, start: usize) -> Token<'s> {
|
||||
fn text(&mut self, start: usize) -> NodeKind {
|
||||
macro_rules! table {
|
||||
($($c:literal)|*) => {{
|
||||
let mut t = [false; 128];
|
||||
@ -186,10 +198,10 @@ impl<'s> Tokens<'s> {
|
||||
TABLE.get(c as usize).copied().unwrap_or_else(|| c.is_whitespace())
|
||||
});
|
||||
|
||||
Token::Text(self.s.eaten_from(start))
|
||||
NodeKind::Text(resolve_string(self.s.eaten_from(start)))
|
||||
}
|
||||
|
||||
fn whitespace(&mut self) -> Token<'s> {
|
||||
fn whitespace(&mut self) -> NodeKind {
|
||||
self.s.uneat();
|
||||
|
||||
// Count the number of newlines.
|
||||
@ -208,10 +220,10 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
}
|
||||
|
||||
Token::Space(newlines)
|
||||
NodeKind::Space(newlines)
|
||||
}
|
||||
|
||||
fn backslash(&mut self) -> Token<'s> {
|
||||
fn backslash(&mut self) -> NodeKind {
|
||||
if let Some(c) = self.s.peek() {
|
||||
match c {
|
||||
// Backslash and comments.
|
||||
@ -220,61 +232,61 @@ impl<'s> Tokens<'s> {
|
||||
'[' | ']' | '{' | '}' | '#' |
|
||||
// Markup.
|
||||
'*' | '_' | '=' | '~' | '`' | '$' => {
|
||||
let start = self.s.index();
|
||||
self.s.eat_assert(c);
|
||||
Token::Text(&self.s.eaten_from(start))
|
||||
NodeKind::Text(c.into())
|
||||
}
|
||||
'u' if self.s.rest().starts_with("u{") => {
|
||||
self.s.eat_assert('u');
|
||||
self.s.eat_assert('{');
|
||||
Token::UnicodeEscape(UnicodeEscapeToken {
|
||||
// Allow more than `ascii_hexdigit` for better error recovery.
|
||||
sequence: self.s.eat_while(|c| c.is_ascii_alphanumeric()),
|
||||
terminated: self.s.eat_if('}'),
|
||||
let sequence: EcoString = self.s.eat_while(|c| c.is_ascii_alphanumeric()).into();
|
||||
NodeKind::UnicodeEscape(UnicodeEscapeToken {
|
||||
character: resolve_hex(&sequence),
|
||||
sequence,
|
||||
terminated: self.s.eat_if('}')
|
||||
})
|
||||
}
|
||||
c if c.is_whitespace() => Token::Backslash,
|
||||
_ => Token::Text("\\"),
|
||||
c if c.is_whitespace() => NodeKind::Linebreak,
|
||||
_ => NodeKind::Text("\\".into()),
|
||||
}
|
||||
} else {
|
||||
Token::Backslash
|
||||
NodeKind::Linebreak
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn hash(&mut self) -> Token<'s> {
|
||||
fn hash(&mut self) -> NodeKind {
|
||||
if self.s.check_or(false, is_id_start) {
|
||||
let read = self.s.eat_while(is_id_continue);
|
||||
if let Some(keyword) = keyword(read) {
|
||||
keyword
|
||||
} else {
|
||||
Token::Ident(read)
|
||||
NodeKind::Ident(read.into())
|
||||
}
|
||||
} else {
|
||||
Token::Text("#")
|
||||
NodeKind::Text("#".into())
|
||||
}
|
||||
}
|
||||
|
||||
fn hyph(&mut self, start: usize) -> Token<'s> {
|
||||
fn hyph(&mut self) -> NodeKind {
|
||||
if self.s.eat_if('-') {
|
||||
if self.s.eat_if('-') {
|
||||
Token::HyphHyphHyph
|
||||
NodeKind::EmDash
|
||||
} else {
|
||||
Token::HyphHyph
|
||||
NodeKind::EnDash
|
||||
}
|
||||
} else if self.s.check_or(true, char::is_whitespace) {
|
||||
Token::Hyph
|
||||
NodeKind::ListBullet
|
||||
} else {
|
||||
Token::Text(self.s.eaten_from(start))
|
||||
NodeKind::Text("-".into())
|
||||
}
|
||||
}
|
||||
|
||||
fn numbering(&mut self, start: usize, c: char) -> Token<'s> {
|
||||
fn numbering(&mut self, start: usize, c: char) -> NodeKind {
|
||||
let number = if c != '.' {
|
||||
self.s.eat_while(|c| c.is_ascii_digit());
|
||||
let read = self.s.eaten_from(start);
|
||||
if !self.s.eat_if('.') {
|
||||
return Token::Text(read);
|
||||
return NodeKind::Text(self.s.eaten_from(start).into());
|
||||
}
|
||||
read.parse().ok()
|
||||
} else {
|
||||
@ -282,21 +294,28 @@ impl<'s> Tokens<'s> {
|
||||
};
|
||||
|
||||
if self.s.check_or(true, char::is_whitespace) {
|
||||
Token::Numbering(number)
|
||||
NodeKind::EnumNumbering(number)
|
||||
} else {
|
||||
Token::Text(self.s.eaten_from(start))
|
||||
NodeKind::Text(self.s.eaten_from(start).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn raw(&mut self) -> Token<'s> {
|
||||
fn raw(&mut self) -> NodeKind {
|
||||
let column = self.source.byte_to_column(self.s.index() - 1).unwrap();
|
||||
let mut backticks = 1;
|
||||
while self.s.eat_if('`') {
|
||||
while self.s.eat_if('`') && backticks < u8::MAX {
|
||||
backticks += 1;
|
||||
}
|
||||
|
||||
// Special case for empty inline block.
|
||||
if backticks == 2 {
|
||||
return Token::Raw(RawToken { text: "", backticks: 1, terminated: true });
|
||||
return NodeKind::Raw(RawToken {
|
||||
text: EcoString::new(),
|
||||
lang: None,
|
||||
backticks: 1,
|
||||
terminated: true,
|
||||
block: false,
|
||||
});
|
||||
}
|
||||
|
||||
let start = self.s.index();
|
||||
@ -311,16 +330,17 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
|
||||
let terminated = found == backticks;
|
||||
let end = self.s.index() - if terminated { found } else { 0 };
|
||||
let end = self.s.index() - if terminated { found as usize } else { 0 };
|
||||
|
||||
Token::Raw(RawToken {
|
||||
text: self.s.get(start .. end),
|
||||
NodeKind::Raw(resolve_raw(
|
||||
column,
|
||||
backticks,
|
||||
self.s.get(start .. end).into(),
|
||||
terminated,
|
||||
})
|
||||
))
|
||||
}
|
||||
|
||||
fn math(&mut self) -> Token<'s> {
|
||||
fn math(&mut self) -> NodeKind {
|
||||
let mut display = false;
|
||||
if self.s.eat_if('[') {
|
||||
display = true;
|
||||
@ -350,25 +370,25 @@ impl<'s> Tokens<'s> {
|
||||
(true, true) => 2,
|
||||
};
|
||||
|
||||
Token::Math(MathToken {
|
||||
formula: self.s.get(start .. end),
|
||||
NodeKind::Math(MathToken {
|
||||
formula: self.s.get(start .. end).into(),
|
||||
display,
|
||||
terminated,
|
||||
})
|
||||
}
|
||||
|
||||
fn ident(&mut self, start: usize) -> Token<'s> {
|
||||
fn ident(&mut self, start: usize) -> NodeKind {
|
||||
self.s.eat_while(is_id_continue);
|
||||
match self.s.eaten_from(start) {
|
||||
"none" => Token::None,
|
||||
"auto" => Token::Auto,
|
||||
"true" => Token::Bool(true),
|
||||
"false" => Token::Bool(false),
|
||||
id => keyword(id).unwrap_or(Token::Ident(id)),
|
||||
"none" => NodeKind::None,
|
||||
"auto" => NodeKind::Auto,
|
||||
"true" => NodeKind::Bool(true),
|
||||
"false" => NodeKind::Bool(false),
|
||||
id => keyword(id).unwrap_or(NodeKind::Ident(id.into())),
|
||||
}
|
||||
}
|
||||
|
||||
fn number(&mut self, start: usize, c: char) -> Token<'s> {
|
||||
fn number(&mut self, start: usize, c: char) -> NodeKind {
|
||||
// Read the first part (integer or fractional depending on `first`).
|
||||
self.s.eat_while(|c| c.is_ascii_digit());
|
||||
|
||||
@ -380,7 +400,9 @@ impl<'s> Tokens<'s> {
|
||||
|
||||
// Read the exponent.
|
||||
if self.s.eat_if('e') || self.s.eat_if('E') {
|
||||
let _ = self.s.eat_if('+') || self.s.eat_if('-');
|
||||
if !self.s.eat_if('+') {
|
||||
self.s.eat_if('-');
|
||||
}
|
||||
self.s.eat_while(|c| c.is_ascii_digit());
|
||||
}
|
||||
|
||||
@ -396,55 +418,53 @@ impl<'s> Tokens<'s> {
|
||||
|
||||
// Find out whether it is a simple number.
|
||||
if suffix.is_empty() {
|
||||
if let Ok(int) = number.parse::<i64>() {
|
||||
return Token::Int(int);
|
||||
} else if let Ok(float) = number.parse::<f64>() {
|
||||
return Token::Float(float);
|
||||
if let Ok(i) = number.parse::<i64>() {
|
||||
return NodeKind::Int(i);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise parse into the fitting numeric type.
|
||||
let build = match suffix {
|
||||
"%" => Token::Percent,
|
||||
"fr" => Token::Fraction,
|
||||
"pt" => |x| Token::Length(x, LengthUnit::Pt),
|
||||
"mm" => |x| Token::Length(x, LengthUnit::Mm),
|
||||
"cm" => |x| Token::Length(x, LengthUnit::Cm),
|
||||
"in" => |x| Token::Length(x, LengthUnit::In),
|
||||
"rad" => |x| Token::Angle(x, AngularUnit::Rad),
|
||||
"deg" => |x| Token::Angle(x, AngularUnit::Deg),
|
||||
_ => return Token::Invalid(all),
|
||||
};
|
||||
|
||||
if let Ok(float) = number.parse::<f64>() {
|
||||
build(float)
|
||||
if let Ok(f) = number.parse::<f64>() {
|
||||
match suffix {
|
||||
"" => NodeKind::Float(f),
|
||||
"%" => NodeKind::Percentage(f),
|
||||
"fr" => NodeKind::Fraction(f),
|
||||
"pt" => NodeKind::Length(f, LengthUnit::Pt),
|
||||
"mm" => NodeKind::Length(f, LengthUnit::Mm),
|
||||
"cm" => NodeKind::Length(f, LengthUnit::Cm),
|
||||
"in" => NodeKind::Length(f, LengthUnit::In),
|
||||
"deg" => NodeKind::Angle(f, AngularUnit::Deg),
|
||||
"rad" => NodeKind::Angle(f, AngularUnit::Rad),
|
||||
_ => {
|
||||
return NodeKind::Error(ErrorPosition::Full, all.into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Token::Invalid(all)
|
||||
NodeKind::Error(ErrorPosition::Full, all.into())
|
||||
}
|
||||
}
|
||||
|
||||
fn string(&mut self) -> Token<'s> {
|
||||
|
||||
fn string(&mut self) -> NodeKind {
|
||||
let mut escaped = false;
|
||||
Token::Str(StrToken {
|
||||
string: self.s.eat_until(|c| {
|
||||
NodeKind::Str(StrToken {
|
||||
string: resolve_string(self.s.eat_until(|c| {
|
||||
if c == '"' && !escaped {
|
||||
true
|
||||
} else {
|
||||
escaped = c == '\\' && !escaped;
|
||||
false
|
||||
}
|
||||
}),
|
||||
})),
|
||||
terminated: self.s.eat_if('"'),
|
||||
})
|
||||
}
|
||||
|
||||
fn line_comment(&mut self) -> Token<'s> {
|
||||
Token::LineComment(self.s.eat_until(is_newline))
|
||||
fn line_comment(&mut self) -> NodeKind {
|
||||
self.s.eat_until(is_newline);
|
||||
NodeKind::LineComment
|
||||
}
|
||||
|
||||
fn block_comment(&mut self) -> Token<'s> {
|
||||
let start = self.s.index();
|
||||
|
||||
fn block_comment(&mut self) -> NodeKind {
|
||||
let mut state = '_';
|
||||
let mut depth = 1;
|
||||
|
||||
@ -466,10 +486,7 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
}
|
||||
|
||||
let terminated = depth == 0;
|
||||
let end = self.s.index() - if terminated { 2 } else { 0 };
|
||||
|
||||
Token::BlockComment(self.s.get(start .. end))
|
||||
NodeKind::BlockComment
|
||||
}
|
||||
|
||||
fn maybe_in_url(&self) -> bool {
|
||||
@ -477,24 +494,24 @@ impl<'s> Tokens<'s> {
|
||||
}
|
||||
}
|
||||
|
||||
fn keyword(ident: &str) -> Option<Token<'static>> {
|
||||
fn keyword(ident: &str) -> Option<NodeKind> {
|
||||
Some(match ident {
|
||||
"not" => Token::Not,
|
||||
"and" => Token::And,
|
||||
"or" => Token::Or,
|
||||
"with" => Token::With,
|
||||
"let" => Token::Let,
|
||||
"if" => Token::If,
|
||||
"else" => Token::Else,
|
||||
"for" => Token::For,
|
||||
"in" => Token::In,
|
||||
"while" => Token::While,
|
||||
"break" => Token::Break,
|
||||
"continue" => Token::Continue,
|
||||
"return" => Token::Return,
|
||||
"import" => Token::Import,
|
||||
"include" => Token::Include,
|
||||
"from" => Token::From,
|
||||
"not" => NodeKind::Not,
|
||||
"and" => NodeKind::And,
|
||||
"or" => NodeKind::Or,
|
||||
"with" => NodeKind::With,
|
||||
"let" => NodeKind::Let,
|
||||
"if" => NodeKind::If,
|
||||
"else" => NodeKind::Else,
|
||||
"for" => NodeKind::For,
|
||||
"in" => NodeKind::In,
|
||||
"while" => NodeKind::While,
|
||||
"break" => NodeKind::Break,
|
||||
"continue" => NodeKind::Continue,
|
||||
"return" => NodeKind::Return,
|
||||
"import" => NodeKind::Import,
|
||||
"include" => NodeKind::Include,
|
||||
"from" => NodeKind::From,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
@ -506,24 +523,56 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
use NodeKind::*;
|
||||
use Option::None;
|
||||
use Token::{Ident, *};
|
||||
use TokenMode::{Code, Markup};
|
||||
|
||||
const fn UnicodeEscape(sequence: &str, terminated: bool) -> Token {
|
||||
Token::UnicodeEscape(UnicodeEscapeToken { sequence, terminated })
|
||||
fn UnicodeEscape(sequence: &str, terminated: bool) -> NodeKind {
|
||||
NodeKind::UnicodeEscape(UnicodeEscapeToken {
|
||||
character: resolve_hex(sequence),
|
||||
sequence: sequence.into(),
|
||||
terminated,
|
||||
})
|
||||
}
|
||||
|
||||
const fn Raw(text: &str, backticks: usize, terminated: bool) -> Token {
|
||||
Token::Raw(RawToken { text, backticks, terminated })
|
||||
fn Raw(
|
||||
text: &str,
|
||||
lang: Option<&str>,
|
||||
backticks: u8,
|
||||
terminated: bool,
|
||||
block: bool,
|
||||
) -> NodeKind {
|
||||
NodeKind::Raw(RawToken {
|
||||
text: text.into(),
|
||||
lang: lang.map(Into::into),
|
||||
backticks,
|
||||
terminated,
|
||||
block,
|
||||
})
|
||||
}
|
||||
|
||||
const fn Math(formula: &str, display: bool, terminated: bool) -> Token {
|
||||
Token::Math(MathToken { formula, display, terminated })
|
||||
fn Math(formula: &str, display: bool, terminated: bool) -> NodeKind {
|
||||
NodeKind::Math(MathToken {
|
||||
formula: formula.into(),
|
||||
display,
|
||||
terminated,
|
||||
})
|
||||
}
|
||||
|
||||
const fn Str(string: &str, terminated: bool) -> Token {
|
||||
Token::Str(StrToken { string, terminated })
|
||||
fn Str(string: &str, terminated: bool) -> NodeKind {
|
||||
NodeKind::Str(StrToken { string: string.into(), terminated })
|
||||
}
|
||||
|
||||
fn Text(string: &str) -> NodeKind {
|
||||
NodeKind::Text(string.into())
|
||||
}
|
||||
|
||||
fn Ident(ident: &str) -> NodeKind {
|
||||
NodeKind::Ident(ident.into())
|
||||
}
|
||||
|
||||
fn Invalid(invalid: &str) -> NodeKind {
|
||||
NodeKind::Error(ErrorPosition::Full, invalid.into())
|
||||
}
|
||||
|
||||
/// Building blocks for suffix testing.
|
||||
@ -541,40 +590,6 @@ mod tests {
|
||||
/// - '/': symbols
|
||||
const BLOCKS: &str = " a1/";
|
||||
|
||||
/// Suffixes described by four-tuples of:
|
||||
///
|
||||
/// - block the suffix is part of
|
||||
/// - mode in which the suffix is applicable
|
||||
/// - the suffix string
|
||||
/// - the resulting suffix token
|
||||
const SUFFIXES: &[(char, Option<TokenMode>, &str, Token)] = &[
|
||||
// Whitespace suffixes.
|
||||
(' ', None, " ", Space(0)),
|
||||
(' ', None, "\n", Space(1)),
|
||||
(' ', None, "\r", Space(1)),
|
||||
(' ', None, "\r\n", Space(1)),
|
||||
// Letter suffixes.
|
||||
('a', Some(Markup), "hello", Text("hello")),
|
||||
('a', Some(Markup), "💚", Text("💚")),
|
||||
('a', Some(Code), "val", Ident("val")),
|
||||
('a', Some(Code), "α", Ident("α")),
|
||||
('a', Some(Code), "_", Ident("_")),
|
||||
// Number suffixes.
|
||||
('1', Some(Code), "2", Int(2)),
|
||||
('1', Some(Code), ".2", Float(0.2)),
|
||||
// Symbol suffixes.
|
||||
('/', None, "[", LeftBracket),
|
||||
('/', None, "//", LineComment("")),
|
||||
('/', None, "/**/", BlockComment("")),
|
||||
('/', Some(Markup), "*", Star),
|
||||
('/', Some(Markup), "$ $", Math(" ", false, true)),
|
||||
('/', Some(Markup), r"\\", Text(r"\")),
|
||||
('/', Some(Markup), "#let", Let),
|
||||
('/', Some(Code), "(", LeftParen),
|
||||
('/', Some(Code), ":", Colon),
|
||||
('/', Some(Code), "+=", PlusEq),
|
||||
];
|
||||
|
||||
macro_rules! t {
|
||||
(Both $($tts:tt)*) => {
|
||||
t!(Markup $($tts)*);
|
||||
@ -584,22 +599,56 @@ mod tests {
|
||||
// Test without suffix.
|
||||
t!(@$mode: $src => $($token),*);
|
||||
|
||||
// Suffixes described by four-tuples of:
|
||||
//
|
||||
// - block the suffix is part of
|
||||
// - mode in which the suffix is applicable
|
||||
// - the suffix string
|
||||
// - the resulting suffix NodeKind
|
||||
let suffixes: &[(char, Option<TokenMode>, &str, NodeKind)] = &[
|
||||
// Whitespace suffixes.
|
||||
(' ', None, " ", Space(0)),
|
||||
(' ', None, "\n", Space(1)),
|
||||
(' ', None, "\r", Space(1)),
|
||||
(' ', None, "\r\n", Space(1)),
|
||||
// Letter suffixes.
|
||||
('a', Some(Markup), "hello", Text("hello")),
|
||||
('a', Some(Markup), "💚", Text("💚")),
|
||||
('a', Some(Code), "val", Ident("val")),
|
||||
('a', Some(Code), "α", Ident("α")),
|
||||
('a', Some(Code), "_", Ident("_")),
|
||||
// Number suffixes.
|
||||
('1', Some(Code), "2", Int(2)),
|
||||
('1', Some(Code), ".2", Float(0.2)),
|
||||
// Symbol suffixes.
|
||||
('/', None, "[", LeftBracket),
|
||||
('/', None, "//", LineComment),
|
||||
('/', None, "/**/", BlockComment),
|
||||
('/', Some(Markup), "*", Strong),
|
||||
('/', Some(Markup), "$ $", Math(" ", false, true)),
|
||||
('/', Some(Markup), r"\\", Text("\\")),
|
||||
('/', Some(Markup), "#let", Let),
|
||||
('/', Some(Code), "(", LeftParen),
|
||||
('/', Some(Code), ":", Colon),
|
||||
('/', Some(Code), "+=", PlusEq),
|
||||
];
|
||||
|
||||
// Test with each applicable suffix.
|
||||
for &(block, mode, suffix, token) in SUFFIXES {
|
||||
for (block, mode, suffix, token) in suffixes {
|
||||
let src = $src;
|
||||
#[allow(unused_variables)]
|
||||
let blocks = BLOCKS;
|
||||
$(let blocks = $blocks;)?
|
||||
assert!(!blocks.contains(|c| !BLOCKS.contains(c)));
|
||||
if (mode.is_none() || mode == Some($mode)) && blocks.contains(block) {
|
||||
if (mode.is_none() || mode == &Some($mode)) && blocks.contains(*block) {
|
||||
t!(@$mode: format!("{}{}", src, suffix) => $($token,)* token);
|
||||
}
|
||||
}
|
||||
}};
|
||||
(@$mode:ident: $src:expr => $($token:expr),*) => {{
|
||||
let src = $src;
|
||||
let found = Tokens::new(&src, $mode).collect::<Vec<_>>();
|
||||
let expected = vec![$($token),*];
|
||||
let found = Tokens::new(&SourceFile::detached(src.clone()), $mode).collect::<Vec<_>>();
|
||||
let expected = vec![$($token.clone()),*];
|
||||
check(&src, found, expected);
|
||||
}};
|
||||
}
|
||||
@ -671,7 +720,7 @@ mod tests {
|
||||
|
||||
// Test text ends.
|
||||
t!(Markup[""]: "hello " => Text("hello"), Space(0));
|
||||
t!(Markup[""]: "hello~" => Text("hello"), Tilde);
|
||||
t!(Markup[""]: "hello~" => Text("hello"), NonBreakingSpace);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -713,16 +762,16 @@ mod tests {
|
||||
#[test]
|
||||
fn test_tokenize_markup_symbols() {
|
||||
// Test markup tokens.
|
||||
t!(Markup[" a1"]: "*" => Star);
|
||||
t!(Markup: "_" => Underscore);
|
||||
t!(Markup[" a1"]: "*" => Strong);
|
||||
t!(Markup: "_" => Emph);
|
||||
t!(Markup[""]: "===" => Eq, Eq, Eq);
|
||||
t!(Markup["a1/"]: "= " => Eq, Space(0));
|
||||
t!(Markup: "~" => Tilde);
|
||||
t!(Markup[" "]: r"\" => Backslash);
|
||||
t!(Markup["a "]: r"a--" => Text("a"), HyphHyph);
|
||||
t!(Markup["a1/"]: "- " => Hyph, Space(0));
|
||||
t!(Markup[" "]: "." => Numbering(None));
|
||||
t!(Markup[" "]: "1." => Numbering(Some(1)));
|
||||
t!(Markup: "~" => NonBreakingSpace);
|
||||
t!(Markup[" "]: r"\" => Linebreak);
|
||||
t!(Markup["a "]: r"a--" => Text("a"), EnDash);
|
||||
t!(Markup["a1/"]: "- " => ListBullet, Space(0));
|
||||
t!(Markup[" "]: "." => EnumNumbering(None));
|
||||
t!(Markup[" "]: "1." => EnumNumbering(Some(1)));
|
||||
t!(Markup[" "]: "1.a" => Text("1."), Text("a"));
|
||||
t!(Markup[" /"]: "a1." => Text("a1."));
|
||||
}
|
||||
@ -734,7 +783,7 @@ mod tests {
|
||||
t!(Code: ";" => Semicolon);
|
||||
t!(Code: ":" => Colon);
|
||||
t!(Code: "+" => Plus);
|
||||
t!(Code: "-" => Hyph);
|
||||
t!(Code: "-" => Minus);
|
||||
t!(Code[" a1"]: "*" => Star);
|
||||
t!(Code[" a1"]: "/" => Slash);
|
||||
t!(Code: "=" => Eq);
|
||||
@ -756,10 +805,10 @@ mod tests {
|
||||
t!(Code[" a/"]: "..." => Dots, Invalid("."));
|
||||
|
||||
// Test hyphen as symbol vs part of identifier.
|
||||
t!(Code[" /"]: "-1" => Hyph, Int(1));
|
||||
t!(Code[" /"]: "-a" => Hyph, Ident("a"));
|
||||
t!(Code[" /"]: "--1" => Hyph, Hyph, Int(1));
|
||||
t!(Code[" /"]: "--_a" => Hyph, Hyph, Ident("_a"));
|
||||
t!(Code[" /"]: "-1" => Minus, Int(1));
|
||||
t!(Code[" /"]: "-a" => Minus, Ident("a"));
|
||||
t!(Code[" /"]: "--1" => Minus, Minus, Int(1));
|
||||
t!(Code[" /"]: "--_a" => Minus, Minus, Ident("_a"));
|
||||
t!(Code[" /"]: "a-b" => Ident("a-b"));
|
||||
}
|
||||
|
||||
@ -776,13 +825,13 @@ mod tests {
|
||||
("import", Import),
|
||||
];
|
||||
|
||||
for &(s, t) in &list {
|
||||
for (s, t) in list.clone() {
|
||||
t!(Markup[" "]: format!("#{}", s) => t);
|
||||
t!(Markup[" "]: format!("#{0}#{0}", s) => t, t);
|
||||
t!(Markup[" /"]: format!("# {}", s) => Token::Text("#"), Space(0), Text(s));
|
||||
t!(Markup[" /"]: format!("# {}", s) => Text("#"), Space(0), Text(s));
|
||||
}
|
||||
|
||||
for &(s, t) in &list {
|
||||
for (s, t) in list {
|
||||
t!(Code[" "]: s => t);
|
||||
t!(Markup[" /"]: s => Text(s));
|
||||
}
|
||||
@ -796,25 +845,23 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_tokenize_raw_blocks() {
|
||||
let empty = Raw("", 1, true);
|
||||
|
||||
// Test basic raw block.
|
||||
t!(Markup: "``" => empty);
|
||||
t!(Markup: "`raw`" => Raw("raw", 1, true));
|
||||
t!(Markup[""]: "`]" => Raw("]", 1, false));
|
||||
t!(Markup: "``" => Raw("", None, 1, true, false));
|
||||
t!(Markup: "`raw`" => Raw("raw", None, 1, true, false));
|
||||
t!(Markup[""]: "`]" => Raw("]", None, 1, false, false));
|
||||
|
||||
// Test special symbols in raw block.
|
||||
t!(Markup: "`[brackets]`" => Raw("[brackets]", 1, true));
|
||||
t!(Markup[""]: r"`\`` " => Raw(r"\", 1, true), Raw(" ", 1, false));
|
||||
t!(Markup: "`[brackets]`" => Raw("[brackets]", None, 1, true, false));
|
||||
t!(Markup[""]: r"`\`` " => Raw(r"\", None, 1, true, false), Raw(" ", None, 1, false, false));
|
||||
|
||||
// Test separated closing backticks.
|
||||
t!(Markup: "```not `y`e`t```" => Raw("not `y`e`t", 3, true));
|
||||
t!(Markup: "```not `y`e`t```" => Raw("`y`e`t", Some("not"), 3, true, false));
|
||||
|
||||
// Test more backticks.
|
||||
t!(Markup: "``nope``" => empty, Text("nope"), empty);
|
||||
t!(Markup: "````🚀````" => Raw("🚀", 4, true));
|
||||
t!(Markup[""]: "`````👩🚀````noend" => Raw("👩🚀````noend", 5, false));
|
||||
t!(Markup[""]: "````raw``````" => Raw("raw", 4, true), empty);
|
||||
t!(Markup: "``nope``" => Raw("", None, 1, true, false), Text("nope"), Raw("", None, 1, true, false));
|
||||
t!(Markup: "````🚀````" => Raw("", Some("🚀"), 4, true, false));
|
||||
t!(Markup[""]: "`````👩🚀````noend" => Raw("````noend", Some("👩🚀"), 5, false, false));
|
||||
t!(Markup[""]: "````raw``````" => Raw("", Some("raw"), 4, true, false), Raw("", None, 1, true, false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -896,8 +943,8 @@ mod tests {
|
||||
let nums = ints.iter().map(|&(k, v)| (k, v as f64)).chain(floats);
|
||||
|
||||
let suffixes = [
|
||||
("%", Percent as fn(f64) -> Token<'static>),
|
||||
("fr", Fraction as fn(f64) -> Token<'static>),
|
||||
("%", Percentage as fn(f64) -> NodeKind),
|
||||
("fr", Fraction as fn(f64) -> NodeKind),
|
||||
("mm", |x| Length(x, LengthUnit::Mm)),
|
||||
("pt", |x| Length(x, LengthUnit::Pt)),
|
||||
("cm", |x| Length(x, LengthUnit::Cm)),
|
||||
@ -930,54 +977,54 @@ mod tests {
|
||||
t!(Code[""]: "\"hi" => Str("hi", false));
|
||||
|
||||
// Test escaped quote.
|
||||
t!(Code: r#""a\"bc""# => Str(r#"a\"bc"#, true));
|
||||
t!(Code[""]: r#""\""# => Str(r#"\""#, false));
|
||||
t!(Code: r#""a\"bc""# => Str("a\"bc", true));
|
||||
t!(Code[""]: r#""\""# => Str("\"", false));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenize_line_comments() {
|
||||
// Test line comment with no trailing newline.
|
||||
t!(Both[""]: "//" => LineComment(""));
|
||||
t!(Both[""]: "//" => LineComment);
|
||||
|
||||
// Test line comment ends at newline.
|
||||
t!(Both["a1/"]: "//bc\n" => LineComment("bc"), Space(1));
|
||||
t!(Both["a1/"]: "// bc \n" => LineComment(" bc "), Space(1));
|
||||
t!(Both["a1/"]: "//bc\r\n" => LineComment("bc"), Space(1));
|
||||
t!(Both["a1/"]: "//bc\n" => LineComment, Space(1));
|
||||
t!(Both["a1/"]: "// bc \n" => LineComment, Space(1));
|
||||
t!(Both["a1/"]: "//bc\r\n" => LineComment, Space(1));
|
||||
|
||||
// Test nested line comments.
|
||||
t!(Both["a1/"]: "//a//b\n" => LineComment("a//b"), Space(1));
|
||||
t!(Both["a1/"]: "//a//b\n" => LineComment, Space(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenize_block_comments() {
|
||||
// Test basic block comments.
|
||||
t!(Both[""]: "/*" => BlockComment(""));
|
||||
t!(Both: "/**/" => BlockComment(""));
|
||||
t!(Both: "/*🏞*/" => BlockComment("🏞"));
|
||||
t!(Both: "/*\n*/" => BlockComment("\n"));
|
||||
t!(Both[""]: "/*" => BlockComment);
|
||||
t!(Both: "/**/" => BlockComment);
|
||||
t!(Both: "/*🏞*/" => BlockComment);
|
||||
t!(Both: "/*\n*/" => BlockComment);
|
||||
|
||||
// Test depth 1 and 2 nested block comments.
|
||||
t!(Both: "/* /* */ */" => BlockComment(" /* */ "));
|
||||
t!(Both: "/*/*/**/*/*/" => BlockComment("/*/**/*/"));
|
||||
t!(Both: "/* /* */ */" => BlockComment);
|
||||
t!(Both: "/*/*/**/*/*/" => BlockComment);
|
||||
|
||||
// Test two nested, one unclosed block comments.
|
||||
t!(Both[""]: "/*/*/**/*/" => BlockComment("/*/**/*/"));
|
||||
t!(Both[""]: "/*/*/**/*/" => BlockComment);
|
||||
|
||||
// Test all combinations of up to two following slashes and stars.
|
||||
t!(Both[""]: "/*" => BlockComment(""));
|
||||
t!(Both[""]: "/*/" => BlockComment("/"));
|
||||
t!(Both[""]: "/**" => BlockComment("*"));
|
||||
t!(Both[""]: "/*//" => BlockComment("//"));
|
||||
t!(Both[""]: "/*/*" => BlockComment("/*"));
|
||||
t!(Both[""]: "/**/" => BlockComment(""));
|
||||
t!(Both[""]: "/***" => BlockComment("**"));
|
||||
t!(Both[""]: "/*" => BlockComment);
|
||||
t!(Both[""]: "/*/" => BlockComment);
|
||||
t!(Both[""]: "/**" => BlockComment);
|
||||
t!(Both[""]: "/*//" => BlockComment);
|
||||
t!(Both[""]: "/*/*" => BlockComment);
|
||||
t!(Both[""]: "/**/" => BlockComment);
|
||||
t!(Both[""]: "/***" => BlockComment);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenize_invalid() {
|
||||
// Test invalidly closed block comments.
|
||||
t!(Both: "*/" => Token::Invalid("*/"));
|
||||
t!(Both: "/**/*/" => BlockComment(""), Token::Invalid("*/"));
|
||||
t!(Both: "*/" => Invalid("*/"));
|
||||
t!(Both: "/**/*/" => BlockComment, Invalid("*/"));
|
||||
|
||||
// Test invalid expressions.
|
||||
t!(Code: r"\" => Invalid(r"\"));
|
||||
@ -990,6 +1037,6 @@ mod tests {
|
||||
// Test invalid number suffixes.
|
||||
t!(Code[" /"]: "1foo" => Invalid("1foo"));
|
||||
t!(Code: "1p%" => Invalid("1p"), Invalid("%"));
|
||||
t!(Code: "1%%" => Percent(1.0), Invalid("%"));
|
||||
t!(Code: "1%%" => Percentage(1.0), Invalid("%"));
|
||||
}
|
||||
}
|
||||
|
@ -8,8 +8,10 @@ use std::rc::Rc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::diag::{Error, TypResult};
|
||||
use crate::loading::{FileHash, Loader};
|
||||
use crate::parse::{is_newline, Scanner};
|
||||
use crate::parse::{is_newline, parse, Scanner};
|
||||
use crate::syntax::{GreenNode, Markup, NodeKind, RedNode};
|
||||
use crate::util::PathExt;
|
||||
|
||||
#[cfg(feature = "codespan-reporting")]
|
||||
@ -124,6 +126,7 @@ pub struct SourceFile {
|
||||
path: PathBuf,
|
||||
src: String,
|
||||
line_starts: Vec<usize>,
|
||||
root: Rc<GreenNode>,
|
||||
}
|
||||
|
||||
impl SourceFile {
|
||||
@ -131,11 +134,28 @@ impl SourceFile {
|
||||
pub fn new(id: SourceId, path: &Path, src: String) -> Self {
|
||||
let mut line_starts = vec![0];
|
||||
line_starts.extend(newlines(&src));
|
||||
Self {
|
||||
let mut init = Self {
|
||||
id,
|
||||
path: path.normalize(),
|
||||
src,
|
||||
line_starts,
|
||||
root: Rc::new(GreenNode::new(NodeKind::Markup, 0)),
|
||||
};
|
||||
|
||||
let root = parse(&init);
|
||||
init.root = root;
|
||||
init
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> TypResult<Markup> {
|
||||
let res = RedNode::new_root(self.root.clone(), self.id);
|
||||
let errors = res.errors();
|
||||
if errors.is_empty() {
|
||||
Ok(res.ticket().cast().unwrap())
|
||||
} else {
|
||||
Err(Box::new(
|
||||
errors.into_iter().map(|(span, msg)| Error::new(span, msg)).collect(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,75 +1,50 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::{Ident, Markup, Span, Token};
|
||||
use super::{Ident, Markup, NodeKind, RedNode, RedTicket, Span, TypedNode};
|
||||
use crate::geom::{AngularUnit, LengthUnit};
|
||||
use crate::node;
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// An expression.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Expr {
|
||||
/// An identifier: `left`.
|
||||
Ident(Box<Ident>),
|
||||
Ident(Ident),
|
||||
/// A literal: `1`, `true`, ...
|
||||
Lit(Box<Lit>),
|
||||
Lit(Lit),
|
||||
/// An array expression: `(1, "hi", 12cm)`.
|
||||
Array(Box<ArrayExpr>),
|
||||
Array(ArrayExpr),
|
||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||
Dict(Box<DictExpr>),
|
||||
Dict(DictExpr),
|
||||
/// A template expression: `[*Hi* there!]`.
|
||||
Template(Box<TemplateExpr>),
|
||||
Template(TemplateExpr),
|
||||
/// A grouped expression: `(1 + 2)`.
|
||||
Group(Box<GroupExpr>),
|
||||
Group(GroupExpr),
|
||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||
Block(Box<BlockExpr>),
|
||||
Block(BlockExpr),
|
||||
/// A unary operation: `-x`.
|
||||
Unary(Box<UnaryExpr>),
|
||||
Unary(UnaryExpr),
|
||||
/// A binary operation: `a + b`.
|
||||
Binary(Box<BinaryExpr>),
|
||||
Binary(BinaryExpr),
|
||||
/// An invocation of a function: `f(x, y)`.
|
||||
Call(Box<CallExpr>),
|
||||
Call(CallExpr),
|
||||
/// A closure expression: `(x, y) => z`.
|
||||
Closure(Box<ClosureExpr>),
|
||||
Closure(ClosureExpr),
|
||||
/// A with expression: `f with (x, y: 1)`.
|
||||
With(Box<WithExpr>),
|
||||
With(WithExpr),
|
||||
/// A let expression: `let x = 1`.
|
||||
Let(Box<LetExpr>),
|
||||
Let(LetExpr),
|
||||
/// An if-else expression: `if x { y } else { z }`.
|
||||
If(Box<IfExpr>),
|
||||
If(IfExpr),
|
||||
/// A while loop expression: `while x { y }`.
|
||||
While(Box<WhileExpr>),
|
||||
While(WhileExpr),
|
||||
/// A for loop expression: `for x in y { z }`.
|
||||
For(Box<ForExpr>),
|
||||
For(ForExpr),
|
||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||
Import(Box<ImportExpr>),
|
||||
Import(ImportExpr),
|
||||
/// An include expression: `include "chapter1.typ"`.
|
||||
Include(Box<IncludeExpr>),
|
||||
Include(IncludeExpr),
|
||||
}
|
||||
|
||||
impl Expr {
|
||||
/// The source code location.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Ident(v) => v.span,
|
||||
Self::Lit(v) => v.span(),
|
||||
Self::Array(v) => v.span,
|
||||
Self::Dict(v) => v.span,
|
||||
Self::Template(v) => v.span,
|
||||
Self::Group(v) => v.span,
|
||||
Self::Block(v) => v.span,
|
||||
Self::Unary(v) => v.span,
|
||||
Self::Binary(v) => v.span,
|
||||
Self::Call(v) => v.span,
|
||||
Self::Closure(v) => v.span,
|
||||
Self::With(v) => v.span,
|
||||
Self::Let(v) => v.span,
|
||||
Self::If(v) => v.span,
|
||||
Self::While(v) => v.span,
|
||||
Self::For(v) => v.span,
|
||||
Self::Import(v) => v.span,
|
||||
Self::Include(v) => v.span,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the expression can be shortened in markup with a hashtag.
|
||||
pub fn has_short_form(&self) -> bool {
|
||||
matches!(self,
|
||||
@ -83,6 +58,63 @@ impl Expr {
|
||||
| Self::Include(_)
|
||||
)
|
||||
}
|
||||
|
||||
/// Return the expression's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Ident(ident) => ident.span,
|
||||
Self::Lit(lit) => lit.span(),
|
||||
Self::Array(array) => array.span(),
|
||||
Self::Dict(dict) => dict.span(),
|
||||
Self::Template(template) => template.span(),
|
||||
Self::Group(group) => group.span(),
|
||||
Self::Block(block) => block.span(),
|
||||
Self::Unary(unary) => unary.span(),
|
||||
Self::Binary(binary) => binary.span(),
|
||||
Self::Call(call) => call.span(),
|
||||
Self::Closure(closure) => closure.span(),
|
||||
Self::With(with) => with.span(),
|
||||
Self::Let(let_) => let_.span(),
|
||||
Self::If(if_) => if_.span(),
|
||||
Self::While(while_) => while_.span(),
|
||||
Self::For(for_) => for_.span(),
|
||||
Self::Import(import) => import.span(),
|
||||
Self::Include(include) => include.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypedNode for Expr {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::Ident(_) => Some(Self::Ident(Ident::cast_from(node).unwrap())),
|
||||
NodeKind::Array => Some(Self::Array(ArrayExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Dict => Some(Self::Dict(DictExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Template => {
|
||||
Some(Self::Template(TemplateExpr::cast_from(node).unwrap()))
|
||||
}
|
||||
NodeKind::Group => Some(Self::Group(GroupExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Block => Some(Self::Block(BlockExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Unary => Some(Self::Unary(UnaryExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Binary => Some(Self::Binary(BinaryExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Call => Some(Self::Call(CallExpr::cast_from(node).unwrap())),
|
||||
NodeKind::Closure => {
|
||||
Some(Self::Closure(ClosureExpr::cast_from(node).unwrap()))
|
||||
}
|
||||
NodeKind::WithExpr => Some(Self::With(WithExpr::cast_from(node).unwrap())),
|
||||
NodeKind::LetExpr => Some(Self::Let(LetExpr::cast_from(node).unwrap())),
|
||||
NodeKind::IfExpr => Some(Self::If(IfExpr::cast_from(node).unwrap())),
|
||||
NodeKind::WhileExpr => Some(Self::While(WhileExpr::cast_from(node).unwrap())),
|
||||
NodeKind::ForExpr => Some(Self::For(ForExpr::cast_from(node).unwrap())),
|
||||
NodeKind::ImportExpr => {
|
||||
Some(Self::Import(ImportExpr::cast_from(node).unwrap()))
|
||||
}
|
||||
NodeKind::IncludeExpr => {
|
||||
Some(Self::Include(IncludeExpr::cast_from(node).unwrap()))
|
||||
}
|
||||
_ => Some(Self::Lit(Lit::cast_from(node)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A literal: `1`, `true`, ...
|
||||
@ -113,94 +145,145 @@ pub enum Lit {
|
||||
Str(Span, EcoString),
|
||||
}
|
||||
|
||||
impl Lit {
|
||||
/// The source code location.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
Self::None(span) => span,
|
||||
Self::Auto(span) => span,
|
||||
Self::Bool(span, _) => span,
|
||||
Self::Int(span, _) => span,
|
||||
Self::Float(span, _) => span,
|
||||
Self::Length(span, _, _) => span,
|
||||
Self::Angle(span, _, _) => span,
|
||||
Self::Percent(span, _) => span,
|
||||
Self::Fractional(span, _) => span,
|
||||
Self::Str(span, _) => span,
|
||||
impl TypedNode for Lit {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::None => Some(Self::None(node.own().span())),
|
||||
NodeKind::Auto => Some(Self::Auto(node.own().span())),
|
||||
NodeKind::Bool(b) => Some(Self::Bool(node.own().span(), *b)),
|
||||
NodeKind::Int(i) => Some(Self::Int(node.own().span(), *i)),
|
||||
NodeKind::Float(f) => Some(Self::Float(node.own().span(), *f)),
|
||||
NodeKind::Length(f, unit) => Some(Self::Length(node.own().span(), *f, *unit)),
|
||||
NodeKind::Angle(f, unit) => Some(Self::Angle(node.own().span(), *f, *unit)),
|
||||
NodeKind::Percentage(f) => Some(Self::Percent(node.own().span(), *f)),
|
||||
NodeKind::Fraction(f) => Some(Self::Fractional(node.own().span(), *f)),
|
||||
NodeKind::Str(s) => Some(Self::Str(node.own().span(), s.string.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An array expression: `(1, "hi", 12cm)`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ArrayExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The entries of the array.
|
||||
pub items: Vec<Expr>,
|
||||
}
|
||||
|
||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct DictExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The named dictionary entries.
|
||||
pub items: Vec<Named>,
|
||||
}
|
||||
|
||||
/// A pair of a name and an expression: `pattern: dashed`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Named {
|
||||
/// The name: `pattern`.
|
||||
pub name: Ident,
|
||||
/// The right-hand side of the pair: `dashed`.
|
||||
pub expr: Expr,
|
||||
}
|
||||
|
||||
impl Named {
|
||||
/// The source code location.
|
||||
impl Lit {
|
||||
pub fn span(&self) -> Span {
|
||||
self.name.span.join(self.expr.span())
|
||||
match self {
|
||||
Self::None(span) => *span,
|
||||
Self::Auto(span) => *span,
|
||||
Self::Bool(span, _) => *span,
|
||||
Self::Int(span, _) => *span,
|
||||
Self::Float(span, _) => *span,
|
||||
Self::Length(span, _, _) => *span,
|
||||
Self::Angle(span, _, _) => *span,
|
||||
Self::Percent(span, _) => *span,
|
||||
Self::Fractional(span, _) => *span,
|
||||
Self::Str(span, _) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A template expression: `[*Hi* there!]`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct TemplateExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// An array expression: `(1, "hi", 12cm)`.
|
||||
Array => ArrayExpr
|
||||
);
|
||||
|
||||
impl ArrayExpr {
|
||||
/// The array items.
|
||||
pub fn items(&self) -> Vec<Expr> {
|
||||
self.0.children().filter_map(RedTicket::cast).collect()
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||
Dict => DictExpr
|
||||
);
|
||||
|
||||
impl DictExpr {
|
||||
/// The named dictionary items.
|
||||
pub fn items(&self) -> Vec<Named> {
|
||||
self.0.children().filter_map(RedTicket::cast).collect()
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A pair of a name and an expression: `pattern: dashed`.
|
||||
Named
|
||||
);
|
||||
|
||||
impl Named {
|
||||
/// The name: `pattern`.
|
||||
pub fn name(&self) -> Ident {
|
||||
self.0.cast_first_child().expect("named pair is missing name ident")
|
||||
}
|
||||
|
||||
/// The right-hand side of the pair: `dashed`.
|
||||
pub fn expr(&self) -> Expr {
|
||||
self.0
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.nth(1)
|
||||
.expect("named pair is missing expression")
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A template expression: `[*Hi* there!]`.
|
||||
Template => TemplateExpr
|
||||
);
|
||||
|
||||
impl TemplateExpr {
|
||||
/// The contents of the template.
|
||||
pub body: Markup,
|
||||
pub fn body(&self) -> Markup {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("template expression is missing body")
|
||||
}
|
||||
}
|
||||
|
||||
/// A grouped expression: `(1 + 2)`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct GroupExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A grouped expression: `(1 + 2)`.
|
||||
Group => GroupExpr
|
||||
);
|
||||
|
||||
impl GroupExpr {
|
||||
/// The wrapped expression.
|
||||
pub expr: Expr,
|
||||
pub fn expr(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("group expression is missing expression")
|
||||
}
|
||||
}
|
||||
|
||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct BlockExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||
Block => BlockExpr
|
||||
);
|
||||
|
||||
impl BlockExpr {
|
||||
/// The list of expressions contained in the block.
|
||||
pub exprs: Vec<Expr>,
|
||||
pub fn exprs(&self) -> Vec<Expr> {
|
||||
self.0.children().filter_map(RedTicket::cast).collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// A unary operation: `-x`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct UnaryExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A unary operation: `-x`.
|
||||
Unary => UnaryExpr
|
||||
);
|
||||
|
||||
impl UnaryExpr {
|
||||
/// The operator: `-`.
|
||||
pub op: UnOp,
|
||||
pub fn op(&self) -> UnOp {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("unary expression is missing operator")
|
||||
}
|
||||
|
||||
/// The expression to operator on: `x`.
|
||||
pub expr: Expr,
|
||||
pub fn expr(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("unary expression is missing expression")
|
||||
}
|
||||
}
|
||||
|
||||
/// A unary operator.
|
||||
@ -214,13 +297,19 @@ pub enum UnOp {
|
||||
Not,
|
||||
}
|
||||
|
||||
impl TypedNode for UnOp {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
Self::from_token(node.kind())
|
||||
}
|
||||
}
|
||||
|
||||
impl UnOp {
|
||||
/// Try to convert the token into a unary operation.
|
||||
pub fn from_token(token: Token) -> Option<Self> {
|
||||
pub fn from_token(token: &NodeKind) -> Option<Self> {
|
||||
Some(match token {
|
||||
Token::Plus => Self::Pos,
|
||||
Token::Hyph => Self::Neg,
|
||||
Token::Not => Self::Not,
|
||||
NodeKind::Plus => Self::Pos,
|
||||
NodeKind::Minus => Self::Neg,
|
||||
NodeKind::Not => Self::Not,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
@ -229,7 +318,7 @@ impl UnOp {
|
||||
pub fn precedence(self) -> usize {
|
||||
match self {
|
||||
Self::Pos | Self::Neg => 8,
|
||||
Self::Not => 3,
|
||||
Self::Not => 4,
|
||||
}
|
||||
}
|
||||
|
||||
@ -243,17 +332,34 @@ impl UnOp {
|
||||
}
|
||||
}
|
||||
|
||||
/// A binary operation: `a + b`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct BinaryExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A binary operation: `a + b`.
|
||||
Binary => BinaryExpr
|
||||
);
|
||||
|
||||
impl BinaryExpr {
|
||||
/// The binary operator: `+`.
|
||||
pub fn op(&self) -> BinOp {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("binary expression is missing operator")
|
||||
}
|
||||
|
||||
/// The left-hand side of the operation: `a`.
|
||||
pub lhs: Expr,
|
||||
/// The operator: `+`.
|
||||
pub op: BinOp,
|
||||
pub fn lhs(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("binary expression is missing left-hand side")
|
||||
}
|
||||
|
||||
/// The right-hand side of the operation: `b`.
|
||||
pub rhs: Expr,
|
||||
pub fn rhs(&self) -> Expr {
|
||||
self.0
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.nth(1)
|
||||
.expect("binary expression is missing right-hand side")
|
||||
}
|
||||
}
|
||||
|
||||
/// A binary operator.
|
||||
@ -295,27 +401,33 @@ pub enum BinOp {
|
||||
DivAssign,
|
||||
}
|
||||
|
||||
impl TypedNode for BinOp {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
Self::from_token(node.kind())
|
||||
}
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
/// Try to convert the token into a binary operation.
|
||||
pub fn from_token(token: Token) -> Option<Self> {
|
||||
pub fn from_token(token: &NodeKind) -> Option<Self> {
|
||||
Some(match token {
|
||||
Token::Plus => Self::Add,
|
||||
Token::Hyph => Self::Sub,
|
||||
Token::Star => Self::Mul,
|
||||
Token::Slash => Self::Div,
|
||||
Token::And => Self::And,
|
||||
Token::Or => Self::Or,
|
||||
Token::EqEq => Self::Eq,
|
||||
Token::ExclEq => Self::Neq,
|
||||
Token::Lt => Self::Lt,
|
||||
Token::LtEq => Self::Leq,
|
||||
Token::Gt => Self::Gt,
|
||||
Token::GtEq => Self::Geq,
|
||||
Token::Eq => Self::Assign,
|
||||
Token::PlusEq => Self::AddAssign,
|
||||
Token::HyphEq => Self::SubAssign,
|
||||
Token::StarEq => Self::MulAssign,
|
||||
Token::SlashEq => Self::DivAssign,
|
||||
NodeKind::Plus => Self::Add,
|
||||
NodeKind::Minus => Self::Sub,
|
||||
NodeKind::Star => Self::Mul,
|
||||
NodeKind::Slash => Self::Div,
|
||||
NodeKind::And => Self::And,
|
||||
NodeKind::Or => Self::Or,
|
||||
NodeKind::EqEq => Self::Eq,
|
||||
NodeKind::ExclEq => Self::Neq,
|
||||
NodeKind::Lt => Self::Lt,
|
||||
NodeKind::LtEq => Self::Leq,
|
||||
NodeKind::Gt => Self::Gt,
|
||||
NodeKind::GtEq => Self::Geq,
|
||||
NodeKind::Eq => Self::Assign,
|
||||
NodeKind::PlusEq => Self::AddAssign,
|
||||
NodeKind::HyphEq => Self::SubAssign,
|
||||
NodeKind::StarEq => Self::MulAssign,
|
||||
NodeKind::SlashEq => Self::DivAssign,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
@ -392,27 +504,35 @@ pub enum Associativity {
|
||||
Right,
|
||||
}
|
||||
|
||||
/// An invocation of a function: `foo(...)`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct CallExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// An invocation of a function: `foo(...)`.
|
||||
Call => CallExpr
|
||||
);
|
||||
|
||||
impl CallExpr {
|
||||
/// The function to call.
|
||||
pub callee: Expr,
|
||||
pub fn callee(&self) -> Expr {
|
||||
self.0.cast_first_child().expect("call expression is missing callee")
|
||||
}
|
||||
|
||||
/// The arguments to the function.
|
||||
pub args: CallArgs,
|
||||
pub fn args(&self) -> CallArgs {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("call expression is missing argument list")
|
||||
}
|
||||
}
|
||||
|
||||
/// The arguments to a function: `12, draw: false`.
|
||||
///
|
||||
/// In case of a bracketed invocation with a body, the body is _not_
|
||||
/// included in the span for the sake of clearer error messages.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct CallArgs {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// The arguments to a function: `12, draw: false`.
|
||||
CallArgs
|
||||
);
|
||||
|
||||
impl CallArgs {
|
||||
/// The positional and named arguments.
|
||||
pub items: Vec<CallArg>,
|
||||
pub fn items(&self) -> Vec<CallArg> {
|
||||
self.0.children().filter_map(RedTicket::cast).collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// An argument to a function call.
|
||||
@ -426,30 +546,75 @@ pub enum CallArg {
|
||||
Spread(Expr),
|
||||
}
|
||||
|
||||
impl TypedNode for CallArg {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::Named => Some(CallArg::Named(
|
||||
node.cast().expect("named call argument is missing name"),
|
||||
)),
|
||||
NodeKind::ParameterSink => Some(CallArg::Spread(
|
||||
node.own()
|
||||
.cast_first_child()
|
||||
.expect("call argument sink is missing expression"),
|
||||
)),
|
||||
_ => Some(CallArg::Pos(node.cast()?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CallArg {
|
||||
/// The source code location.
|
||||
/// The name of this argument.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Pos(expr) => expr.span(),
|
||||
Self::Named(named) => named.span(),
|
||||
Self::Pos(expr) => expr.span(),
|
||||
Self::Spread(expr) => expr.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A closure expression: `(x, y) => z`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ClosureExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A closure expression: `(x, y) => z`.
|
||||
Closure => ClosureExpr
|
||||
);
|
||||
|
||||
impl ClosureExpr {
|
||||
/// The name of the closure.
|
||||
///
|
||||
/// This only exists if you use the function syntax sugar: `let f(x) = y`.
|
||||
pub name: Option<Ident>,
|
||||
pub fn name(&self) -> Option<Ident> {
|
||||
// `first_convert_child` does not work here because of the Option in the
|
||||
// Result.
|
||||
self.0.cast_first_child()
|
||||
}
|
||||
|
||||
/// The parameter bindings.
|
||||
pub params: Vec<ClosureParam>,
|
||||
pub fn params(&self) -> Vec<ClosureParam> {
|
||||
self.0
|
||||
.children()
|
||||
.find(|x| x.kind() == &NodeKind::ClosureParams)
|
||||
.expect("closure is missing parameter list")
|
||||
.own()
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// The body of the closure.
|
||||
pub body: Rc<Expr>,
|
||||
pub fn body(&self) -> Expr {
|
||||
// The filtering for the NodeKind is necessary here because otherwise,
|
||||
// `first_convert_child` will use the Ident if present.
|
||||
self.0.cast_last_child().expect("closure is missing body")
|
||||
}
|
||||
|
||||
/// The ticket of the body of the closure.
|
||||
pub fn body_ticket(&self) -> RedTicket {
|
||||
self.0
|
||||
.children()
|
||||
.filter(|x| x.cast::<Expr>().is_some())
|
||||
.last()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// An parameter to a closure.
|
||||
@ -463,50 +628,111 @@ pub enum ClosureParam {
|
||||
Sink(Ident),
|
||||
}
|
||||
|
||||
impl ClosureParam {
|
||||
/// The source code location.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Pos(ident) => ident.span,
|
||||
Self::Named(named) => named.span(),
|
||||
Self::Sink(ident) => ident.span,
|
||||
impl TypedNode for ClosureParam {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::Ident(i) => {
|
||||
Some(ClosureParam::Pos(Ident::new(i, node.own().span()).unwrap()))
|
||||
}
|
||||
NodeKind::Named => Some(ClosureParam::Named(
|
||||
node.cast().expect("named closure parameter is missing name"),
|
||||
)),
|
||||
NodeKind::ParameterSink => Some(ClosureParam::Sink(
|
||||
node.own()
|
||||
.cast_first_child()
|
||||
.expect("closure parameter sink is missing identifier"),
|
||||
)),
|
||||
_ => Some(ClosureParam::Pos(node.cast()?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A with expression: `f with (x, y: 1)`.
|
||||
///
|
||||
/// Applies arguments to a function.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct WithExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A with expression: `f with (x, y: 1)`.
|
||||
WithExpr
|
||||
);
|
||||
|
||||
impl WithExpr {
|
||||
/// The function to apply the arguments to.
|
||||
pub callee: Expr,
|
||||
pub fn callee(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("with expression is missing callee expression")
|
||||
}
|
||||
|
||||
/// The arguments to apply to the function.
|
||||
pub args: CallArgs,
|
||||
pub fn args(&self) -> CallArgs {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("with expression is missing argument list")
|
||||
}
|
||||
}
|
||||
|
||||
/// A let expression: `let x = 1`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct LetExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// A let expression: `let x = 1`.
|
||||
LetExpr
|
||||
);
|
||||
|
||||
impl LetExpr {
|
||||
/// The binding to assign to.
|
||||
pub binding: Ident,
|
||||
pub fn binding(&self) -> Ident {
|
||||
if let Some(c) = self.0.cast_first_child() {
|
||||
c
|
||||
} else if let Some(w) = self.0.typed_child(&NodeKind::WithExpr) {
|
||||
// Can't do an `first_convert_child` here because the WithExpr's
|
||||
// callee has to be an identifier.
|
||||
w.cast_first_child()
|
||||
.expect("with expression is missing an identifier callee")
|
||||
} else if let Some(Expr::Closure(c)) = self.0.cast_last_child() {
|
||||
c.name().expect("closure is missing an identifier name")
|
||||
} else {
|
||||
panic!("let expression is missing either an identifier or a with expression")
|
||||
}
|
||||
}
|
||||
|
||||
/// The expression the binding is initialized with.
|
||||
pub init: Option<Expr>,
|
||||
pub fn init(&self) -> Option<Expr> {
|
||||
if self.0.cast_first_child::<Ident>().is_some() {
|
||||
self.0.children().filter_map(RedTicket::cast).nth(1)
|
||||
} else {
|
||||
Some(
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("let expression is missing a with expression"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// The ticket for the expression the binding is initialized with.
|
||||
pub fn init_ticket(&self) -> RedTicket {
|
||||
if self.0.cast_first_child::<Ident>().is_some() {
|
||||
self.0.children().filter(|x| x.cast::<Expr>().is_some()).nth(1)
|
||||
} else {
|
||||
self.0.children().find(|x| x.cast::<Expr>().is_some())
|
||||
}
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ImportExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// An import expression: `import a, b, c from "utils.typ"`.
|
||||
ImportExpr
|
||||
);
|
||||
|
||||
impl ImportExpr {
|
||||
/// The items to be imported.
|
||||
pub imports: Imports,
|
||||
pub fn imports(&self) -> Imports {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("import expression is missing import list")
|
||||
}
|
||||
|
||||
/// The location of the importable file.
|
||||
pub path: Expr,
|
||||
pub fn path(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("import expression is missing path expression")
|
||||
}
|
||||
}
|
||||
|
||||
/// The items that ought to be imported from a file.
|
||||
@ -518,67 +744,137 @@ pub enum Imports {
|
||||
Idents(Vec<Ident>),
|
||||
}
|
||||
|
||||
/// An include expression: `include "chapter1.typ"`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct IncludeExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The location of the file to be included.
|
||||
pub path: Expr,
|
||||
}
|
||||
|
||||
/// An if-else expression: `if x { y } else { z }`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct IfExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The condition which selects the body to evaluate.
|
||||
pub condition: Expr,
|
||||
/// The expression to evaluate if the condition is true.
|
||||
pub if_body: Expr,
|
||||
/// The expression to evaluate if the condition is false.
|
||||
pub else_body: Option<Expr>,
|
||||
}
|
||||
|
||||
/// A while loop expression: `while x { y }`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct WhileExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The condition which selects whether to evaluate the body.
|
||||
pub condition: Expr,
|
||||
/// The expression to evaluate while the condition is true.
|
||||
pub body: Expr,
|
||||
}
|
||||
|
||||
/// A for loop expression: `for x in y { z }`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ForExpr {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The pattern to assign to.
|
||||
pub pattern: ForPattern,
|
||||
/// The expression to iterate over.
|
||||
pub iter: Expr,
|
||||
/// The expression to evaluate for each iteration.
|
||||
pub body: Expr,
|
||||
}
|
||||
|
||||
/// A pattern in a for loop.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum ForPattern {
|
||||
/// A value pattern: `for v in array`.
|
||||
Value(Ident),
|
||||
/// A key-value pattern: `for k, v in dict`.
|
||||
KeyValue(Ident, Ident),
|
||||
}
|
||||
|
||||
impl ForPattern {
|
||||
/// The source code location.
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Value(v) => v.span,
|
||||
Self::KeyValue(k, v) => k.span.join(v.span),
|
||||
impl TypedNode for Imports {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::Star => Some(Imports::Wildcard),
|
||||
NodeKind::ImportItems => {
|
||||
let idents = node.own().children().filter_map(RedTicket::cast).collect();
|
||||
Some(Imports::Idents(idents))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// An include expression: `include "chapter1.typ"`.
|
||||
IncludeExpr
|
||||
);
|
||||
|
||||
impl IncludeExpr {
|
||||
/// The location of the file to be included.
|
||||
pub fn path(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("include expression is missing path expression")
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// An if-else expression: `if x { y } else { z }`.
|
||||
IfExpr
|
||||
);
|
||||
|
||||
impl IfExpr {
|
||||
/// The condition which selects the body to evaluate.
|
||||
pub fn condition(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("if expression is missing condition expression")
|
||||
}
|
||||
|
||||
/// The expression to evaluate if the condition is true.
|
||||
pub fn if_body(&self) -> Expr {
|
||||
self.0
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.nth(1)
|
||||
.expect("if expression is missing if body")
|
||||
}
|
||||
|
||||
/// The expression to evaluate if the condition is false.
|
||||
pub fn else_body(&self) -> Option<Expr> {
|
||||
self.0.children().filter_map(RedTicket::cast).nth(2)
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A while loop expression: `while x { y }`.
|
||||
WhileExpr
|
||||
);
|
||||
|
||||
impl WhileExpr {
|
||||
/// The condition which selects whether to evaluate the body.
|
||||
pub fn condition(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("while loop expression is missing condition expression")
|
||||
}
|
||||
|
||||
/// The expression to evaluate while the condition is true.
|
||||
pub fn body(&self) -> Expr {
|
||||
self.0
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.nth(1)
|
||||
.expect("while loop expression is missing body")
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A for loop expression: `for x in y { z }`.
|
||||
ForExpr
|
||||
);
|
||||
|
||||
impl ForExpr {
|
||||
/// The pattern to assign to.
|
||||
pub fn pattern(&self) -> ForPattern {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("for loop expression is missing pattern")
|
||||
}
|
||||
|
||||
/// The expression to iterate over.
|
||||
pub fn iter(&self) -> Expr {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("for loop expression is missing iterable expression")
|
||||
}
|
||||
|
||||
/// The expression to evaluate for each iteration.
|
||||
pub fn body(&self) -> Expr {
|
||||
self.0
|
||||
.children()
|
||||
.filter_map(RedTicket::cast)
|
||||
.last()
|
||||
.expect("for loop expression is missing body")
|
||||
}
|
||||
|
||||
/// The ticket for the expression to evaluate for each iteration.
|
||||
pub fn body_ticket(&self) -> RedTicket {
|
||||
self.0
|
||||
.children()
|
||||
.filter(|x| x.cast::<Expr>().is_some())
|
||||
.last()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A for-in loop expression: `for x in y { z }`.
|
||||
ForPattern
|
||||
);
|
||||
|
||||
impl ForPattern {
|
||||
pub fn key(&self) -> Option<Ident> {
|
||||
let mut items: Vec<_> = self.0.children().filter_map(RedTicket::cast).collect();
|
||||
if items.len() > 1 { Some(items.remove(0)) } else { None }
|
||||
}
|
||||
|
||||
pub fn value(&self) -> Ident {
|
||||
self.0
|
||||
.cast_last_child()
|
||||
.expect("for-in loop pattern is missing value")
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use std::ops::Deref;
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use super::Span;
|
||||
use super::{NodeKind, RedTicket, Span, TypedNode};
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// An unicode identifier with a few extra permissible characters.
|
||||
@ -66,6 +66,16 @@ impl From<&Ident> for EcoString {
|
||||
}
|
||||
}
|
||||
|
||||
impl TypedNode for Ident {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if let NodeKind::Ident(i) = node.kind() {
|
||||
Some(Ident::new(i, node.own().span()).unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether a string is a valid identifier.
|
||||
pub fn is_ident(string: &str) -> bool {
|
||||
let mut chars = string.chars();
|
||||
|
@ -1,41 +1,87 @@
|
||||
use super::{Expr, Ident, Span};
|
||||
use super::{Expr, Ident, NodeKind, RedNode, RedTicket, Span, TypedNode};
|
||||
use crate::node;
|
||||
use crate::util::EcoString;
|
||||
use std::fmt::Write;
|
||||
|
||||
/// The syntactical root capable of representing a full parsed document.
|
||||
pub type Markup = Vec<MarkupNode>;
|
||||
|
||||
impl TypedNode for Markup {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if node.kind() != &NodeKind::Markup {
|
||||
return None;
|
||||
}
|
||||
|
||||
let children = node.own().children().filter_map(TypedNode::cast_from).collect();
|
||||
Some(children)
|
||||
}
|
||||
}
|
||||
|
||||
/// A single piece of markup.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum MarkupNode {
|
||||
/// Whitespace containing less than two newlines.
|
||||
Space,
|
||||
/// A forced line break: `\`.
|
||||
Linebreak(Span),
|
||||
Linebreak,
|
||||
/// A paragraph break: Two or more newlines.
|
||||
Parbreak(Span),
|
||||
Parbreak,
|
||||
/// Strong text was enabled / disabled: `*`.
|
||||
Strong(Span),
|
||||
Strong,
|
||||
/// Emphasized text was enabled / disabled: `_`.
|
||||
Emph(Span),
|
||||
Emph,
|
||||
/// Plain text.
|
||||
Text(EcoString),
|
||||
/// A raw block with optional syntax highlighting: `` `...` ``.
|
||||
Raw(Box<RawNode>),
|
||||
Raw(RawNode),
|
||||
/// A section heading: `= Introduction`.
|
||||
Heading(Box<HeadingNode>),
|
||||
Heading(HeadingNode),
|
||||
/// An item in an unordered list: `- ...`.
|
||||
List(Box<ListNode>),
|
||||
List(ListNode),
|
||||
/// An item in an enumeration (ordered list): `1. ...`.
|
||||
Enum(Box<EnumNode>),
|
||||
Enum(EnumNode),
|
||||
/// An expression.
|
||||
Expr(Expr),
|
||||
}
|
||||
|
||||
impl TypedNode for MarkupNode {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
match node.kind() {
|
||||
NodeKind::Space(_) => Some(MarkupNode::Space),
|
||||
NodeKind::Linebreak => Some(MarkupNode::Linebreak),
|
||||
NodeKind::Parbreak => Some(MarkupNode::Parbreak),
|
||||
NodeKind::Strong => Some(MarkupNode::Strong),
|
||||
NodeKind::Emph => Some(MarkupNode::Emph),
|
||||
NodeKind::Text(s) => Some(MarkupNode::Text(s.clone())),
|
||||
NodeKind::UnicodeEscape(u) => {
|
||||
Some(MarkupNode::Text(if let Some(s) = u.character {
|
||||
s.into()
|
||||
} else {
|
||||
let mut eco = EcoString::with_capacity(u.sequence.len() + 4);
|
||||
write!(&mut eco, "\\u{{{}}}", u.sequence).unwrap();
|
||||
eco
|
||||
}))
|
||||
}
|
||||
NodeKind::EnDash => Some(MarkupNode::Text(EcoString::from("\u{2013}"))),
|
||||
NodeKind::EmDash => Some(MarkupNode::Text(EcoString::from("\u{2014}"))),
|
||||
NodeKind::NonBreakingSpace => {
|
||||
Some(MarkupNode::Text(EcoString::from("\u{00A0}")))
|
||||
}
|
||||
NodeKind::Raw(_) => Some(MarkupNode::Raw(RawNode::cast_from(node).unwrap())),
|
||||
NodeKind::Heading => {
|
||||
Some(MarkupNode::Heading(HeadingNode::cast_from(node).unwrap()))
|
||||
}
|
||||
NodeKind::List => Some(MarkupNode::List(ListNode::cast_from(node).unwrap())),
|
||||
NodeKind::Enum => Some(MarkupNode::Enum(EnumNode::cast_from(node).unwrap())),
|
||||
NodeKind::Error(_, _) => None,
|
||||
_ => Some(MarkupNode::Expr(Expr::cast_from(node)?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A raw block with optional syntax highlighting: `` `...` ``.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct RawNode {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// An optional identifier specifying the language to syntax-highlight in.
|
||||
pub lang: Option<Ident>,
|
||||
/// The raw text, determined as the raw string between the backticks trimmed
|
||||
@ -46,33 +92,97 @@ pub struct RawNode {
|
||||
pub block: bool,
|
||||
}
|
||||
|
||||
/// A section heading: `= Introduction`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct HeadingNode {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
/// The section depth (numer of equals signs).
|
||||
pub level: usize,
|
||||
impl TypedNode for RawNode {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if let NodeKind::Raw(raw) = node.kind() {
|
||||
let span = node.own().span();
|
||||
let start = span.start + raw.backticks as usize;
|
||||
Some(Self {
|
||||
block: raw.block,
|
||||
lang: raw.lang.as_ref().and_then(|x| {
|
||||
let span = Span::new(span.source, start, start + x.len());
|
||||
Ident::new(x, span)
|
||||
}),
|
||||
text: raw.text.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// A section heading: `= Introduction`.
|
||||
Heading => HeadingNode
|
||||
);
|
||||
|
||||
impl HeadingNode {
|
||||
/// The contents of the heading.
|
||||
pub body: Markup,
|
||||
pub fn body(&self) -> Markup {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("heading node is missing markup body")
|
||||
}
|
||||
|
||||
/// The section depth (numer of equals signs).
|
||||
pub fn level(&self) -> HeadingLevel {
|
||||
self.0
|
||||
.cast_first_child()
|
||||
.expect("heading node is missing heading level")
|
||||
}
|
||||
}
|
||||
|
||||
/// An item in an unordered list: `- ...`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ListNode {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct HeadingLevel(pub usize);
|
||||
|
||||
impl TypedNode for HeadingLevel {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if let NodeKind::HeadingLevel(l) = node.kind() {
|
||||
Some(Self((*l).into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
node!(
|
||||
/// An item in an unordered list: `- ...`.
|
||||
List => ListNode
|
||||
);
|
||||
|
||||
impl ListNode {
|
||||
/// The contents of the list item.
|
||||
pub body: Markup,
|
||||
pub fn body(&self) -> Markup {
|
||||
self.0.cast_first_child().expect("list node is missing body")
|
||||
}
|
||||
}
|
||||
|
||||
/// An item in an enumeration (ordered list): `1. ...`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct EnumNode {
|
||||
/// The source code location.
|
||||
pub span: Span,
|
||||
node!(
|
||||
/// An item in an enumeration (ordered list): `1. ...`.
|
||||
Enum => EnumNode
|
||||
);
|
||||
|
||||
impl EnumNode {
|
||||
/// The contents of the list item.
|
||||
pub fn body(&self) -> Markup {
|
||||
self.0.cast_first_child().expect("enumeration node is missing body")
|
||||
}
|
||||
|
||||
/// The number, if any.
|
||||
pub number: Option<usize>,
|
||||
/// The contents of the list item.
|
||||
pub body: Markup,
|
||||
pub fn number(&self) -> EnumNumber {
|
||||
self.0.cast_first_child().expect("enumeration node is missing number")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct EnumNumber(pub Option<usize>);
|
||||
|
||||
impl TypedNode for EnumNumber {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if let NodeKind::EnumNumbering(x) = node.kind() {
|
||||
Some(Self(*x))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,11 @@ mod markup;
|
||||
mod pretty;
|
||||
mod span;
|
||||
mod token;
|
||||
pub mod visit;
|
||||
|
||||
use std::fmt;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub use expr::*;
|
||||
pub use ident::*;
|
||||
@ -14,3 +18,685 @@ pub use markup::*;
|
||||
pub use pretty::*;
|
||||
pub use span::*;
|
||||
pub use token::*;
|
||||
|
||||
use crate::geom::{AngularUnit, LengthUnit};
|
||||
use crate::source::SourceId;
|
||||
use crate::util::EcoString;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum NodeKind {
|
||||
/// A left square bracket: `[`.
|
||||
LeftBracket,
|
||||
/// A right square bracket: `]`.
|
||||
RightBracket,
|
||||
/// A left curly brace: `{`.
|
||||
LeftBrace,
|
||||
/// A right curly brace: `}`.
|
||||
RightBrace,
|
||||
/// A left round parenthesis: `(`.
|
||||
LeftParen,
|
||||
/// A right round parenthesis: `)`.
|
||||
RightParen,
|
||||
/// An asterisk: `*`.
|
||||
Star,
|
||||
/// A comma: `,`.
|
||||
Comma,
|
||||
/// A semicolon: `;`.
|
||||
Semicolon,
|
||||
/// A colon: `:`.
|
||||
Colon,
|
||||
/// A plus: `+`.
|
||||
Plus,
|
||||
/// A hyphen: `-`.
|
||||
Minus,
|
||||
/// A slash: `/`.
|
||||
Slash,
|
||||
/// A single equals sign: `=`.
|
||||
Eq,
|
||||
/// Two equals signs: `==`.
|
||||
EqEq,
|
||||
/// An exclamation mark followed by an equals sign: `!=`.
|
||||
ExclEq,
|
||||
/// A less-than sign: `<`.
|
||||
Lt,
|
||||
/// A less-than sign followed by an equals sign: `<=`.
|
||||
LtEq,
|
||||
/// A greater-than sign: `>`.
|
||||
Gt,
|
||||
/// A greater-than sign followed by an equals sign: `>=`.
|
||||
GtEq,
|
||||
/// A plus followed by an equals sign: `+=`.
|
||||
PlusEq,
|
||||
/// A hyphen followed by an equals sign: `-=`.
|
||||
HyphEq,
|
||||
/// An asterisk followed by an equals sign: `*=`.
|
||||
StarEq,
|
||||
/// A slash followed by an equals sign: `/=`.
|
||||
SlashEq,
|
||||
/// Two dots: `..`.
|
||||
Dots,
|
||||
/// An equals sign followed by a greater-than sign: `=>`.
|
||||
Arrow,
|
||||
/// The `not` operator.
|
||||
Not,
|
||||
/// The `and` operator.
|
||||
And,
|
||||
/// The `or` operator.
|
||||
Or,
|
||||
/// The `with` operator.
|
||||
With,
|
||||
/// The `with` expression: `with (1)`.
|
||||
WithExpr,
|
||||
/// The none literal: `none`.
|
||||
None,
|
||||
/// The auto literal: `auto`.
|
||||
Auto,
|
||||
/// The `let` keyword.
|
||||
Let,
|
||||
/// The `if` keyword.
|
||||
If,
|
||||
/// The `else` keyword.
|
||||
Else,
|
||||
/// The `for` keyword.
|
||||
For,
|
||||
/// The `in` keyword.
|
||||
In,
|
||||
/// The `while` keyword.
|
||||
While,
|
||||
/// The `break` keyword.
|
||||
Break,
|
||||
/// The `continue` keyword.
|
||||
Continue,
|
||||
/// The `return` keyword.
|
||||
Return,
|
||||
/// The `import` keyword.
|
||||
Import,
|
||||
/// The `include` keyword.
|
||||
Include,
|
||||
/// The `from` keyword.
|
||||
From,
|
||||
/// One or more whitespace characters.
|
||||
Space(usize),
|
||||
/// A consecutive non-markup string.
|
||||
Text(EcoString),
|
||||
/// A slash and the letter "u" followed by a hexadecimal unicode entity
|
||||
/// enclosed in curly braces: `\u{1F5FA}`.
|
||||
UnicodeEscape(UnicodeEscapeToken),
|
||||
/// An arbitrary number of backticks followed by inner contents, terminated
|
||||
/// with the same number of backticks: `` `...` ``.
|
||||
Raw(RawToken),
|
||||
/// Dollar signs surrounding inner contents.
|
||||
Math(MathToken),
|
||||
/// A numbering: `23.`.
|
||||
///
|
||||
/// Can also exist without the number: `.`.
|
||||
EnumNumbering(Option<usize>),
|
||||
/// An identifier: `center`.
|
||||
Ident(EcoString),
|
||||
/// A boolean: `true`, `false`.
|
||||
Bool(bool),
|
||||
/// An integer: `120`.
|
||||
Int(i64),
|
||||
/// A floating-point number: `1.2`, `10e-4`.
|
||||
Float(f64),
|
||||
/// A length: `12pt`, `3cm`.
|
||||
Length(f64, LengthUnit),
|
||||
/// An angle: `90deg`.
|
||||
Angle(f64, AngularUnit),
|
||||
/// A percentage: `50%`.
|
||||
///
|
||||
/// _Note_: `50%` is stored as `50.0` here, as in the corresponding
|
||||
/// [literal](super::Lit::Percent).
|
||||
Percentage(f64),
|
||||
/// A fraction unit: `3fr`.
|
||||
Fraction(f64),
|
||||
/// A quoted string: `"..."`.
|
||||
Str(StrToken),
|
||||
/// Two slashes followed by inner contents, terminated with a newline:
|
||||
/// `//<str>\n`.
|
||||
LineComment,
|
||||
/// A slash and a star followed by inner contents, terminated with a star
|
||||
/// and a slash: `/*<str>*/`.
|
||||
///
|
||||
/// The comment can contain nested block comments.
|
||||
BlockComment,
|
||||
/// A node that should never appear in a finished tree.
|
||||
Never,
|
||||
/// Tokens that appear in the wrong place.
|
||||
Error(ErrorPosition, EcoString),
|
||||
/// Template markup.
|
||||
Markup,
|
||||
/// A forced line break: `\`.
|
||||
Linebreak,
|
||||
/// A paragraph break: Two or more newlines.
|
||||
Parbreak,
|
||||
/// Strong text was enabled / disabled: `*`.
|
||||
Strong,
|
||||
/// Emphasized text was enabled / disabled: `_`.
|
||||
Emph,
|
||||
/// A non-breaking space: `~`.
|
||||
NonBreakingSpace,
|
||||
/// An en-dash: `--`.
|
||||
EnDash,
|
||||
/// An em-dash: `---`.
|
||||
EmDash,
|
||||
/// A section heading: `= Introduction`.
|
||||
Heading,
|
||||
/// A heading's level: `=`, `==`, `===`, etc.
|
||||
HeadingLevel(u8),
|
||||
/// An item in an unordered list: `- ...`.
|
||||
List,
|
||||
/// The bullet character of an item in an unordered list: `-`.
|
||||
ListBullet,
|
||||
/// An item in an enumeration (ordered list): `1. ...`.
|
||||
Enum,
|
||||
/// An array expression: `(1, "hi", 12cm)`.
|
||||
Array,
|
||||
/// A dictionary expression: `(thickness: 3pt, pattern: dashed)`.
|
||||
Dict,
|
||||
/// A named argument: `thickness: 3pt`.
|
||||
Named,
|
||||
/// A template expression: `[*Hi* there!]`.
|
||||
Template,
|
||||
/// A grouped expression: `(1 + 2)`.
|
||||
Group,
|
||||
/// A block expression: `{ let x = 1; x + 2 }`.
|
||||
Block,
|
||||
/// A unary operation: `-x`.
|
||||
Unary,
|
||||
/// A binary operation: `a + b`.
|
||||
Binary,
|
||||
/// An invocation of a function: `f(x, y)`.
|
||||
Call,
|
||||
/// A function call's argument list: `(x, y)`.
|
||||
CallArgs,
|
||||
/// A closure expression: `(x, y) => z`.
|
||||
Closure,
|
||||
/// A closure's parameters: `(x, y)`.
|
||||
ClosureParams,
|
||||
/// A parameter sink: `..x`.
|
||||
ParameterSink,
|
||||
/// A for loop expression: `for x in y { ... }`.
|
||||
ForExpr,
|
||||
/// A while loop expression: `while x { ... }`.
|
||||
WhileExpr,
|
||||
/// An if expression: `if x { ... }`.
|
||||
IfExpr,
|
||||
/// A let expression: `let x = 1`.
|
||||
LetExpr,
|
||||
/// A for loop's destructuring pattern: `x` or `x, y`.
|
||||
ForPattern,
|
||||
/// The import expression: `import x from "foo.typ"`.
|
||||
ImportExpr,
|
||||
/// Items to import: `a, b, c`.
|
||||
ImportItems,
|
||||
/// The include expression: `include "foo.typ"`.
|
||||
IncludeExpr,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum ErrorPosition {
|
||||
/// At the start of the node.
|
||||
Start,
|
||||
/// Over the full width of the node.
|
||||
Full,
|
||||
/// At the end of the node.
|
||||
End,
|
||||
}
|
||||
|
||||
impl Display for NodeKind {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.pad(match self {
|
||||
Self::LeftBracket => "opening bracket",
|
||||
Self::RightBracket => "closing bracket",
|
||||
Self::LeftBrace => "opening brace",
|
||||
Self::RightBrace => "closing brace",
|
||||
Self::LeftParen => "opening paren",
|
||||
Self::RightParen => "closing paren",
|
||||
Self::Star => "star",
|
||||
Self::Comma => "comma",
|
||||
Self::Semicolon => "semicolon",
|
||||
Self::Colon => "colon",
|
||||
Self::Plus => "plus",
|
||||
Self::Minus => "minus",
|
||||
Self::Slash => "slash",
|
||||
Self::Eq => "assignment operator",
|
||||
Self::EqEq => "equality operator",
|
||||
Self::ExclEq => "inequality operator",
|
||||
Self::Lt => "less-than operator",
|
||||
Self::LtEq => "less-than or equal operator",
|
||||
Self::Gt => "greater-than operator",
|
||||
Self::GtEq => "greater-than or equal operator",
|
||||
Self::PlusEq => "add-assign operator",
|
||||
Self::HyphEq => "subtract-assign operator",
|
||||
Self::StarEq => "multiply-assign operator",
|
||||
Self::SlashEq => "divide-assign operator",
|
||||
Self::Dots => "dots",
|
||||
Self::Arrow => "arrow",
|
||||
Self::Not => "operator `not`",
|
||||
Self::And => "operator `and`",
|
||||
Self::Or => "operator `or`",
|
||||
Self::With => "operator `with`",
|
||||
Self::WithExpr => "`with` expression",
|
||||
Self::None => "`none`",
|
||||
Self::Auto => "`auto`",
|
||||
Self::Let => "keyword `let`",
|
||||
Self::If => "keyword `if`",
|
||||
Self::Else => "keyword `else`",
|
||||
Self::For => "keyword `for`",
|
||||
Self::In => "keyword `in`",
|
||||
Self::While => "keyword `while`",
|
||||
Self::Break => "keyword `break`",
|
||||
Self::Continue => "keyword `continue`",
|
||||
Self::Return => "keyword `return`",
|
||||
Self::Import => "keyword `import`",
|
||||
Self::Include => "keyword `include`",
|
||||
Self::From => "keyword `from`",
|
||||
Self::Space(_) => "space",
|
||||
Self::Math(_) => "math formula",
|
||||
Self::EnumNumbering(_) => "numbering",
|
||||
Self::Str(_) => "string",
|
||||
Self::Never => "a node that should not be here",
|
||||
Self::LineComment => "line comment",
|
||||
Self::BlockComment => "block comment",
|
||||
Self::Markup => "markup",
|
||||
Self::Linebreak => "forced linebreak",
|
||||
Self::Parbreak => "paragraph break",
|
||||
Self::Strong => "strong",
|
||||
Self::Emph => "emphasis",
|
||||
Self::Text(_) => "text",
|
||||
Self::NonBreakingSpace => "non-breaking space",
|
||||
Self::EnDash => "en dash",
|
||||
Self::EmDash => "em dash",
|
||||
Self::UnicodeEscape(_) => "unicode escape sequence",
|
||||
Self::Raw(_) => "raw block",
|
||||
Self::Heading => "heading",
|
||||
Self::HeadingLevel(_) => "heading level",
|
||||
Self::List => "list",
|
||||
Self::ListBullet => "list bullet",
|
||||
Self::Enum => "enum",
|
||||
Self::Ident(_) => "identifier",
|
||||
Self::Bool(_) => "boolean",
|
||||
Self::Int(_) => "integer",
|
||||
Self::Float(_) => "float",
|
||||
Self::Length(_, _) => "length",
|
||||
Self::Angle(_, _) => "angle",
|
||||
Self::Percentage(_) => "percentage",
|
||||
Self::Fraction(_) => "`fr` value",
|
||||
Self::Array => "array",
|
||||
Self::Dict => "dictionary",
|
||||
Self::Named => "named argument",
|
||||
Self::Template => "template",
|
||||
Self::Group => "group",
|
||||
Self::Block => "block",
|
||||
Self::Unary => "unary expression",
|
||||
Self::Binary => "binary expression",
|
||||
Self::Call => "call",
|
||||
Self::CallArgs => "call arguments",
|
||||
Self::Closure => "closure",
|
||||
Self::ClosureParams => "closure parameters",
|
||||
Self::ParameterSink => "parameter sink",
|
||||
Self::ForExpr => "for-loop expression",
|
||||
Self::WhileExpr => "while-loop expression",
|
||||
Self::IfExpr => "if expression",
|
||||
Self::LetExpr => "let expression",
|
||||
Self::ForPattern => "for-loop destructuring pattern",
|
||||
Self::ImportExpr => "import expression",
|
||||
Self::ImportItems => "import items",
|
||||
Self::IncludeExpr => "include expression",
|
||||
Self::Error(_, src) => match src.as_str() {
|
||||
"*/" => "end of block comment",
|
||||
_ => "invalid token",
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeKind {
|
||||
pub fn is_parenthesis(&self) -> bool {
|
||||
match self {
|
||||
Self::LeftParen => true,
|
||||
Self::RightParen => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bracket(&self) -> bool {
|
||||
match self {
|
||||
Self::LeftBracket => true,
|
||||
Self::RightBracket => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_brace(&self) -> bool {
|
||||
match self {
|
||||
Self::LeftBrace => true,
|
||||
Self::RightBrace => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_error(&self) -> bool {
|
||||
matches!(self, NodeKind::Never | NodeKind::Error(_, _))
|
||||
}
|
||||
}
|
||||
|
||||
/// A syntactical node.
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct GreenNode {
|
||||
/// Node metadata.
|
||||
meta: GreenData,
|
||||
/// This node's children, losslessly make up this node.
|
||||
children: Vec<Green>,
|
||||
}
|
||||
|
||||
/// Data shared between [`GreenNode`]s and [`GreenToken`]s.
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct GreenData {
|
||||
/// What kind of node this is (each kind would have its own struct in a
|
||||
/// strongly typed AST).
|
||||
kind: NodeKind,
|
||||
/// The byte length of the node in the source.
|
||||
len: usize,
|
||||
/// Whether this node or any of its children are erroneous.
|
||||
has_error: bool,
|
||||
}
|
||||
|
||||
impl GreenData {
|
||||
pub fn new(kind: NodeKind, len: usize) -> Self {
|
||||
Self { len, has_error: kind.is_error(), kind }
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &NodeKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
|
||||
pub fn has_error(&self) -> bool {
|
||||
self.has_error
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GreenData> for Green {
|
||||
fn from(token: GreenData) -> Self {
|
||||
Self::Token(token)
|
||||
}
|
||||
}
|
||||
|
||||
/// Children of a [`GreenNode`].
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum Green {
|
||||
/// A terminal owned token.
|
||||
Token(GreenData),
|
||||
/// A non-terminal node in an Rc.
|
||||
Node(Rc<GreenNode>),
|
||||
}
|
||||
|
||||
impl Green {
|
||||
fn meta(&self) -> &GreenData {
|
||||
match self {
|
||||
Green::Token(t) => &t,
|
||||
Green::Node(n) => &n.meta,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &NodeKind {
|
||||
self.meta().kind()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.meta().len()
|
||||
}
|
||||
|
||||
pub fn has_error(&self) -> bool {
|
||||
self.meta().has_error()
|
||||
}
|
||||
|
||||
pub fn children(&self) -> &[Green] {
|
||||
match self {
|
||||
Green::Token(_) => &[],
|
||||
Green::Node(n) => &n.children(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GreenNode {
|
||||
pub fn new(kind: NodeKind, len: usize) -> Self {
|
||||
Self {
|
||||
meta: GreenData::new(kind, len),
|
||||
children: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_children(
|
||||
kind: NodeKind,
|
||||
len: usize,
|
||||
children: impl Iterator<Item = impl Into<Green>>,
|
||||
) -> Self {
|
||||
let mut meta = GreenData::new(kind, len);
|
||||
let children = children
|
||||
.map(|x| {
|
||||
let x = x.into();
|
||||
meta.has_error |= x.has_error();
|
||||
x
|
||||
})
|
||||
.collect();
|
||||
Self { meta, children }
|
||||
}
|
||||
|
||||
pub fn with_child(kind: NodeKind, len: usize, child: impl Into<Green>) -> Self {
|
||||
Self::with_children(kind, len, std::iter::once(child.into()))
|
||||
}
|
||||
|
||||
pub fn children(&self) -> &[Green] {
|
||||
&self.children
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GreenNode> for Green {
|
||||
fn from(node: GreenNode) -> Self {
|
||||
Rc::new(node).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Rc<GreenNode>> for Green {
|
||||
fn from(node: Rc<GreenNode>) -> Self {
|
||||
Self::Node(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Green {
|
||||
fn default() -> Self {
|
||||
Self::Token(GreenData::new(NodeKind::Never, 0))
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Green {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}: {}", self.kind(), self.len())?;
|
||||
if let Self::Node(n) = self {
|
||||
if !n.children.is_empty() {
|
||||
f.write_str(" ")?;
|
||||
f.debug_list().entries(&n.children).finish()?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub struct RedTicket<'a> {
|
||||
id: SourceId,
|
||||
offset: usize,
|
||||
green: &'a Green,
|
||||
}
|
||||
|
||||
impl<'a> RedTicket<'a> {
|
||||
pub fn own(self) -> RedNode {
|
||||
RedNode {
|
||||
id: self.id,
|
||||
offset: self.offset,
|
||||
green: self.green.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &NodeKind {
|
||||
self.green.kind()
|
||||
}
|
||||
|
||||
|
||||
pub fn cast<T>(self) -> Option<T>
|
||||
where
|
||||
T: TypedNode,
|
||||
{
|
||||
T::cast_from(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct RedNode {
|
||||
id: SourceId,
|
||||
offset: usize,
|
||||
green: Green,
|
||||
}
|
||||
|
||||
impl RedNode {
|
||||
pub fn new_root(root: Rc<GreenNode>, id: SourceId) -> Self {
|
||||
Self { id, offset: 0, green: root.into() }
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
Span::new(self.id, self.offset, self.offset + self.green.len())
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.green.len()
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &NodeKind {
|
||||
self.green.kind()
|
||||
}
|
||||
|
||||
pub fn children<'a>(&'a self) -> impl Iterator<Item = RedTicket<'a>> + Clone + 'a {
|
||||
let children = match &self.green {
|
||||
Green::Node(node) => node.children(),
|
||||
Green::Token(_) => &[],
|
||||
};
|
||||
|
||||
let mut offset = self.offset;
|
||||
children.iter().map(move |green_child| {
|
||||
let child_offset = offset;
|
||||
offset += green_child.len();
|
||||
RedTicket {
|
||||
id: self.id,
|
||||
offset: child_offset,
|
||||
green: &green_child,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn has_error(&self) -> bool {
|
||||
self.green.has_error()
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> Vec<(Span, EcoString)> {
|
||||
if !self.green.has_error() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
if let NodeKind::Error(pos, msg) = self.kind() {
|
||||
let span = match pos {
|
||||
ErrorPosition::Start => self.span().at_start(),
|
||||
ErrorPosition::Full => self.span(),
|
||||
ErrorPosition::End => self.span().at_end(),
|
||||
};
|
||||
|
||||
vec![(span, msg.clone())]
|
||||
} else if let NodeKind::Never = self.kind() {
|
||||
vec![(self.span(), "found a never node".into())]
|
||||
} else {
|
||||
self.children()
|
||||
.filter(|ticket| ticket.green.has_error())
|
||||
.flat_map(|ticket| ticket.own().errors())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ticket<'a>(&'a self) -> RedTicket<'a> {
|
||||
RedTicket {
|
||||
id: self.id,
|
||||
offset: self.offset,
|
||||
green: &self.green,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn typed_child(&self, kind: &NodeKind) -> Option<RedNode> {
|
||||
self.children()
|
||||
.find(|x| mem::discriminant(x.kind()) == mem::discriminant(kind))
|
||||
.map(RedTicket::own)
|
||||
}
|
||||
|
||||
pub(crate) fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
|
||||
self.children().find_map(RedTicket::cast)
|
||||
}
|
||||
|
||||
pub(crate) fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
|
||||
self.children().filter_map(RedTicket::cast).last()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for RedNode {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}: {:?}", self.kind(), self.span())?;
|
||||
let children = self.children().collect::<Vec<_>>();
|
||||
if !children.is_empty() {
|
||||
f.write_str(" ")?;
|
||||
f.debug_list()
|
||||
.entries(children.into_iter().map(RedTicket::own))
|
||||
.finish()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TypedNode: Sized {
|
||||
/// Performs the conversion.
|
||||
fn cast_from(value: RedTicket) -> Option<Self>;
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! node {
|
||||
(#[doc = $doc:expr] $name:ident) => {
|
||||
node!(#[doc = $doc] $name => $name);
|
||||
};
|
||||
(#[doc = $doc:expr] $variant:ident => $name:ident) => {
|
||||
#[doc = $doc]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct $name(RedNode);
|
||||
|
||||
impl TypedNode for $name {
|
||||
fn cast_from(node: RedTicket) -> Option<Self> {
|
||||
if node.kind() != &NodeKind::$variant {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Self(node.own()))
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
pub fn span(&self) -> Span {
|
||||
self.0.span()
|
||||
}
|
||||
|
||||
pub fn underlying(&self) -> RedTicket {
|
||||
self.0.ticket()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -88,10 +88,10 @@ impl Pretty for MarkupNode {
|
||||
match self {
|
||||
// TODO: Handle escaping.
|
||||
Self::Space => p.push(' '),
|
||||
Self::Linebreak(_) => p.push_str(r"\"),
|
||||
Self::Parbreak(_) => p.push_str("\n\n"),
|
||||
Self::Strong(_) => p.push('*'),
|
||||
Self::Emph(_) => p.push('_'),
|
||||
Self::Linebreak => p.push_str(r"\"),
|
||||
Self::Parbreak => p.push_str("\n\n"),
|
||||
Self::Strong => p.push('*'),
|
||||
Self::Emph => p.push('_'),
|
||||
Self::Text(text) => p.push_str(text),
|
||||
Self::Raw(raw) => raw.pretty(p),
|
||||
Self::Heading(heading) => heading.pretty(p),
|
||||
@ -165,28 +165,28 @@ impl Pretty for RawNode {
|
||||
|
||||
impl Pretty for HeadingNode {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
for _ in 0 .. self.level {
|
||||
for _ in 0 .. self.level().0 {
|
||||
p.push('=');
|
||||
}
|
||||
p.push(' ');
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for ListNode {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("- ");
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for EnumNode {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
if let Some(number) = self.number {
|
||||
if let Some(number) = self.number().0 {
|
||||
write!(p, "{}", number).unwrap();
|
||||
}
|
||||
p.push_str(". ");
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -235,8 +235,10 @@ impl Pretty for Lit {
|
||||
impl Pretty for ArrayExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push('(');
|
||||
p.join(&self.items, ", ", |item, p| item.pretty(p));
|
||||
if self.items.len() == 1 {
|
||||
|
||||
let items = self.items();
|
||||
p.join(&items, ", ", |item, p| item.pretty(p));
|
||||
if items.len() == 1 {
|
||||
p.push(',');
|
||||
}
|
||||
p.push(')');
|
||||
@ -246,10 +248,12 @@ impl Pretty for ArrayExpr {
|
||||
impl Pretty for DictExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push('(');
|
||||
if self.items.is_empty() {
|
||||
|
||||
let items = self.items();
|
||||
if items.is_empty() {
|
||||
p.push(':');
|
||||
} else {
|
||||
p.join(&self.items, ", ", |named, p| named.pretty(p));
|
||||
p.join(&items, ", ", |named, p| named.pretty(p));
|
||||
}
|
||||
p.push(')');
|
||||
}
|
||||
@ -257,16 +261,16 @@ impl Pretty for DictExpr {
|
||||
|
||||
impl Pretty for Named {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
self.name.pretty(p);
|
||||
self.name().pretty(p);
|
||||
p.push_str(": ");
|
||||
self.expr.pretty(p);
|
||||
self.expr().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for TemplateExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push('[');
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
p.push(']');
|
||||
}
|
||||
}
|
||||
@ -274,7 +278,7 @@ impl Pretty for TemplateExpr {
|
||||
impl Pretty for GroupExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push('(');
|
||||
self.expr.pretty(p);
|
||||
self.expr().pretty(p);
|
||||
p.push(')');
|
||||
}
|
||||
}
|
||||
@ -282,11 +286,13 @@ impl Pretty for GroupExpr {
|
||||
impl Pretty for BlockExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push('{');
|
||||
if self.exprs.len() > 1 {
|
||||
|
||||
let exprs = self.exprs();
|
||||
if exprs.len() > 1 {
|
||||
p.push(' ');
|
||||
}
|
||||
p.join(&self.exprs, "; ", |expr, p| expr.pretty(p));
|
||||
if self.exprs.len() > 1 {
|
||||
p.join(&exprs, "; ", |expr, p| expr.pretty(p));
|
||||
if exprs.len() > 1 {
|
||||
p.push(' ');
|
||||
}
|
||||
p.push('}');
|
||||
@ -295,11 +301,12 @@ impl Pretty for BlockExpr {
|
||||
|
||||
impl Pretty for UnaryExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
self.op.pretty(p);
|
||||
if self.op == UnOp::Not {
|
||||
let op = self.op();
|
||||
op.pretty(p);
|
||||
if op == UnOp::Not {
|
||||
p.push(' ');
|
||||
}
|
||||
self.expr.pretty(p);
|
||||
self.expr().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -311,11 +318,11 @@ impl Pretty for UnOp {
|
||||
|
||||
impl Pretty for BinaryExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
self.lhs.pretty(p);
|
||||
self.lhs().pretty(p);
|
||||
p.push(' ');
|
||||
self.op.pretty(p);
|
||||
self.op().pretty(p);
|
||||
p.push(' ');
|
||||
self.rhs.pretty(p);
|
||||
self.rhs().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -327,7 +334,7 @@ impl Pretty for BinOp {
|
||||
|
||||
impl Pretty for CallExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
self.callee.pretty(p);
|
||||
self.callee().pretty(p);
|
||||
|
||||
let mut write_args = |items: &[CallArg]| {
|
||||
p.push('(');
|
||||
@ -335,25 +342,26 @@ impl Pretty for CallExpr {
|
||||
p.push(')');
|
||||
};
|
||||
|
||||
match self.args.items.as_slice() {
|
||||
// This can be moved behind the arguments.
|
||||
//
|
||||
// Example: Transforms "#v(a, [b])" => "#v(a)[b]".
|
||||
[head @ .., CallArg::Pos(Expr::Template(template))] => {
|
||||
if !head.is_empty() {
|
||||
write_args(head);
|
||||
}
|
||||
template.pretty(p);
|
||||
}
|
||||
let arg_list = self.args();
|
||||
let args = arg_list.items();
|
||||
|
||||
items => write_args(items),
|
||||
if let Some(Expr::Template(template)) = args
|
||||
.last()
|
||||
.and_then(|x| if let CallArg::Pos(arg) = x { Some(arg) } else { None })
|
||||
{
|
||||
if args.len() > 1 {
|
||||
write_args(&args[0 .. args.len() - 1]);
|
||||
}
|
||||
template.pretty(p);
|
||||
} else {
|
||||
write_args(&args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for CallArgs {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.join(&self.items, ", ", |item, p| item.pretty(p));
|
||||
p.join(&self.items(), ", ", |item, p| item.pretty(p));
|
||||
}
|
||||
}
|
||||
|
||||
@ -372,15 +380,15 @@ impl Pretty for CallArg {
|
||||
|
||||
impl Pretty for ClosureExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
if let [param] = self.params.as_slice() {
|
||||
if let [param] = self.params().as_slice() {
|
||||
param.pretty(p);
|
||||
} else {
|
||||
p.push('(');
|
||||
p.join(self.params.iter(), ", ", |item, p| item.pretty(p));
|
||||
p.join(self.params().iter(), ", ", |item, p| item.pretty(p));
|
||||
p.push(')');
|
||||
}
|
||||
p.push_str(" => ");
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -399,9 +407,9 @@ impl Pretty for ClosureParam {
|
||||
|
||||
impl Pretty for WithExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
self.callee.pretty(p);
|
||||
self.callee().pretty(p);
|
||||
p.push_str(" with (");
|
||||
self.args.pretty(p);
|
||||
self.args().pretty(p);
|
||||
p.push(')');
|
||||
}
|
||||
}
|
||||
@ -409,13 +417,13 @@ impl Pretty for WithExpr {
|
||||
impl Pretty for LetExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("let ");
|
||||
self.binding.pretty(p);
|
||||
if let Some(Expr::Closure(closure)) = &self.init {
|
||||
self.binding().pretty(p);
|
||||
if let Some(Expr::Closure(closure)) = &self.init() {
|
||||
p.push('(');
|
||||
p.join(closure.params.iter(), ", ", |item, p| item.pretty(p));
|
||||
p.join(closure.params().iter(), ", ", |item, p| item.pretty(p));
|
||||
p.push_str(") = ");
|
||||
closure.body.pretty(p);
|
||||
} else if let Some(init) = &self.init {
|
||||
closure.body().pretty(p);
|
||||
} else if let Some(init) = &self.init() {
|
||||
p.push_str(" = ");
|
||||
init.pretty(p);
|
||||
}
|
||||
@ -425,10 +433,10 @@ impl Pretty for LetExpr {
|
||||
impl Pretty for IfExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("if ");
|
||||
self.condition.pretty(p);
|
||||
self.condition().pretty(p);
|
||||
p.push(' ');
|
||||
self.if_body.pretty(p);
|
||||
if let Some(expr) = &self.else_body {
|
||||
self.if_body().pretty(p);
|
||||
if let Some(expr) = &self.else_body() {
|
||||
p.push_str(" else ");
|
||||
expr.pretty(p);
|
||||
}
|
||||
@ -438,42 +446,40 @@ impl Pretty for IfExpr {
|
||||
impl Pretty for WhileExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("while ");
|
||||
self.condition.pretty(p);
|
||||
self.condition().pretty(p);
|
||||
p.push(' ');
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for ForExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("for ");
|
||||
self.pattern.pretty(p);
|
||||
self.pattern().pretty(p);
|
||||
p.push_str(" in ");
|
||||
self.iter.pretty(p);
|
||||
self.iter().pretty(p);
|
||||
p.push(' ');
|
||||
self.body.pretty(p);
|
||||
self.body().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for ForPattern {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
match self {
|
||||
Self::Value(v) => v.pretty(p),
|
||||
Self::KeyValue(k, v) => {
|
||||
k.pretty(p);
|
||||
p.push_str(", ");
|
||||
v.pretty(p);
|
||||
}
|
||||
if let Some(key) = self.key() {
|
||||
key.pretty(p);
|
||||
p.push_str(", ");
|
||||
}
|
||||
|
||||
self.value().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
impl Pretty for ImportExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("import ");
|
||||
self.imports.pretty(p);
|
||||
self.imports().pretty(p);
|
||||
p.push_str(" from ");
|
||||
self.path.pretty(p);
|
||||
self.path().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -489,7 +495,7 @@ impl Pretty for Imports {
|
||||
impl Pretty for IncludeExpr {
|
||||
fn pretty(&self, p: &mut Printer) {
|
||||
p.push_str("include ");
|
||||
self.path.pretty(p);
|
||||
self.path().pretty(p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -502,7 +508,6 @@ impl Pretty for Ident {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::parse::parse;
|
||||
use crate::source::SourceFile;
|
||||
|
||||
#[track_caller]
|
||||
@ -513,7 +518,7 @@ mod tests {
|
||||
#[track_caller]
|
||||
fn test_parse(src: &str, expected: &str) {
|
||||
let source = SourceFile::detached(src);
|
||||
let ast = parse(&source).unwrap();
|
||||
let ast: Markup = source.ast().unwrap();
|
||||
let found = pretty(&ast);
|
||||
if found != expected {
|
||||
println!("tree: {:#?}", ast);
|
||||
|
@ -109,6 +109,11 @@ impl Span {
|
||||
*self = self.join(other)
|
||||
}
|
||||
|
||||
/// Test whether a position is within the span.
|
||||
pub fn contains_pos(&self, pos: Pos) -> bool {
|
||||
self.start <= pos && self.end >= pos
|
||||
}
|
||||
|
||||
/// Test whether one span complete contains the other span.
|
||||
pub fn contains(self, other: Self) -> bool {
|
||||
self.source == other.source && self.start <= other.start && self.end >= other.end
|
||||
@ -118,6 +123,16 @@ impl Span {
|
||||
pub fn to_range(self) -> Range<usize> {
|
||||
self.start.to_usize() .. self.end.to_usize()
|
||||
}
|
||||
|
||||
/// A new span at the position of this span's start.
|
||||
pub fn at_start(&self) -> Span {
|
||||
Self::at(self.source, self.start)
|
||||
}
|
||||
|
||||
/// A new span at the position of this span's end.
|
||||
pub fn at_end(&self) -> Span {
|
||||
Self::at(self.source, self.end)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
|
@ -1,188 +1,38 @@
|
||||
use crate::geom::{AngularUnit, LengthUnit};
|
||||
|
||||
/// A minimal semantic entity of source code.
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub enum Token<'s> {
|
||||
/// A left square bracket: `[`.
|
||||
LeftBracket,
|
||||
/// A right square bracket: `]`.
|
||||
RightBracket,
|
||||
/// A left curly brace: `{`.
|
||||
LeftBrace,
|
||||
/// A right curly brace: `}`.
|
||||
RightBrace,
|
||||
/// A left round parenthesis: `(`.
|
||||
LeftParen,
|
||||
/// A right round parenthesis: `)`.
|
||||
RightParen,
|
||||
/// An asterisk: `*`.
|
||||
Star,
|
||||
/// An underscore: `_`.
|
||||
Underscore,
|
||||
/// A tilde: `~`.
|
||||
Tilde,
|
||||
/// Two hyphens: `--`.
|
||||
HyphHyph,
|
||||
/// Three hyphens: `---`.
|
||||
HyphHyphHyph,
|
||||
/// A backslash followed by nothing or whitespace: `\`.
|
||||
Backslash,
|
||||
/// A comma: `,`.
|
||||
Comma,
|
||||
/// A semicolon: `;`.
|
||||
Semicolon,
|
||||
/// A colon: `:`.
|
||||
Colon,
|
||||
/// A plus: `+`.
|
||||
Plus,
|
||||
/// A hyphen: `-`.
|
||||
Hyph,
|
||||
/// A slash: `/`.
|
||||
Slash,
|
||||
/// A single equals sign: `=`.
|
||||
Eq,
|
||||
/// Two equals signs: `==`.
|
||||
EqEq,
|
||||
/// An exclamation mark followed by an equals sign: `!=`.
|
||||
ExclEq,
|
||||
/// A less-than sign: `<`.
|
||||
Lt,
|
||||
/// A less-than sign followed by an equals sign: `<=`.
|
||||
LtEq,
|
||||
/// A greater-than sign: `>`.
|
||||
Gt,
|
||||
/// A greater-than sign followed by an equals sign: `>=`.
|
||||
GtEq,
|
||||
/// A plus followed by an equals sign: `+=`.
|
||||
PlusEq,
|
||||
/// A hyphen followed by an equals sign: `-=`.
|
||||
HyphEq,
|
||||
/// An asterisk followed by an equals sign: `*=`.
|
||||
StarEq,
|
||||
/// A slash followed by an equals sign: `/=`.
|
||||
SlashEq,
|
||||
/// Two dots: `..`.
|
||||
Dots,
|
||||
/// An equals sign followed by a greater-than sign: `=>`.
|
||||
Arrow,
|
||||
/// The `not` operator.
|
||||
Not,
|
||||
/// The `and` operator.
|
||||
And,
|
||||
/// The `or` operator.
|
||||
Or,
|
||||
/// The `with` operator.
|
||||
With,
|
||||
/// The none literal: `none`.
|
||||
None,
|
||||
/// The auto literal: `auto`.
|
||||
Auto,
|
||||
/// The `let` keyword.
|
||||
Let,
|
||||
/// The `if` keyword.
|
||||
If,
|
||||
/// The `else` keyword.
|
||||
Else,
|
||||
/// The `for` keyword.
|
||||
For,
|
||||
/// The `in` keyword.
|
||||
In,
|
||||
/// The `while` keyword.
|
||||
While,
|
||||
/// The `break` keyword.
|
||||
Break,
|
||||
/// The `continue` keyword.
|
||||
Continue,
|
||||
/// The `return` keyword.
|
||||
Return,
|
||||
/// The `import` keyword.
|
||||
Import,
|
||||
/// The `include` keyword.
|
||||
Include,
|
||||
/// The `from` keyword.
|
||||
From,
|
||||
/// One or more whitespace characters.
|
||||
///
|
||||
/// The contained `usize` denotes the number of newlines that were contained
|
||||
/// in the whitespace.
|
||||
Space(usize),
|
||||
/// A consecutive non-markup string.
|
||||
Text(&'s str),
|
||||
/// A slash and the letter "u" followed by a hexadecimal unicode entity
|
||||
/// enclosed in curly braces: `\u{1F5FA}`.
|
||||
UnicodeEscape(UnicodeEscapeToken<'s>),
|
||||
/// An arbitrary number of backticks followed by inner contents, terminated
|
||||
/// with the same number of backticks: `` `...` ``.
|
||||
Raw(RawToken<'s>),
|
||||
/// One or two dollar signs followed by inner contents, terminated with the
|
||||
/// same number of dollar signs.
|
||||
Math(MathToken<'s>),
|
||||
/// A numbering: `23.`.
|
||||
///
|
||||
/// Can also exist without the number: `.`.
|
||||
Numbering(Option<usize>),
|
||||
/// An identifier: `center`.
|
||||
Ident(&'s str),
|
||||
/// A boolean: `true`, `false`.
|
||||
Bool(bool),
|
||||
/// An integer: `120`.
|
||||
Int(i64),
|
||||
/// A floating-point number: `1.2`, `10e-4`.
|
||||
Float(f64),
|
||||
/// A length: `12pt`, `3cm`.
|
||||
Length(f64, LengthUnit),
|
||||
/// An angle: `90deg`.
|
||||
Angle(f64, AngularUnit),
|
||||
/// A percentage: `50%`.
|
||||
///
|
||||
/// _Note_: `50%` is stored as `50.0` here, as in the corresponding
|
||||
/// [literal](super::Lit::Percent).
|
||||
Percent(f64),
|
||||
/// A fraction unit: `3fr`.
|
||||
Fraction(f64),
|
||||
/// A quoted string: `"..."`.
|
||||
Str(StrToken<'s>),
|
||||
/// Two slashes followed by inner contents, terminated with a newline:
|
||||
/// `//<str>\n`.
|
||||
LineComment(&'s str),
|
||||
/// A slash and a star followed by inner contents, terminated with a star
|
||||
/// and a slash: `/*<str>*/`.
|
||||
///
|
||||
/// The comment can contain nested block comments.
|
||||
BlockComment(&'s str),
|
||||
/// Things that are not valid tokens.
|
||||
Invalid(&'s str),
|
||||
}
|
||||
use crate::util::EcoString;
|
||||
|
||||
/// A quoted string token: `"..."`.
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct StrToken<'s> {
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct StrToken {
|
||||
/// The string inside the quotes.
|
||||
///
|
||||
/// _Note_: If the string contains escape sequences these are not yet
|
||||
/// applied to be able to just store a string slice here instead of
|
||||
/// a `String`. The resolving is done later in the parser.
|
||||
pub string: &'s str,
|
||||
pub string: EcoString,
|
||||
/// Whether the closing quote was present.
|
||||
pub terminated: bool,
|
||||
}
|
||||
|
||||
/// A raw block token: `` `...` ``.
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct RawToken<'s> {
|
||||
/// The raw text between the backticks.
|
||||
pub text: &'s str,
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct RawToken {
|
||||
/// The raw text in the block.
|
||||
pub text: EcoString,
|
||||
/// The programming language of the raw text.
|
||||
pub lang: Option<EcoString>,
|
||||
/// The number of opening backticks.
|
||||
pub backticks: usize,
|
||||
pub backticks: u8,
|
||||
/// Whether all closing backticks were present.
|
||||
pub terminated: bool,
|
||||
/// Whether to display this as a block.
|
||||
pub block: bool,
|
||||
}
|
||||
|
||||
/// A math formula token: `$2pi + x$` or `$[f'(x) = x^2]$`.
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct MathToken<'s> {
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct MathToken {
|
||||
/// The formula between the dollars.
|
||||
pub formula: &'s str,
|
||||
pub formula: EcoString,
|
||||
/// Whether the formula is display-level, that is, it is surrounded by
|
||||
/// `$[..]`.
|
||||
pub display: bool,
|
||||
@ -191,86 +41,21 @@ pub struct MathToken<'s> {
|
||||
}
|
||||
|
||||
/// A unicode escape sequence token: `\u{1F5FA}`.
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
pub struct UnicodeEscapeToken<'s> {
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct UnicodeEscapeToken {
|
||||
/// The escape sequence between the braces.
|
||||
pub sequence: &'s str,
|
||||
pub sequence: EcoString,
|
||||
/// The resulting unicode character.
|
||||
pub character: Option<char>,
|
||||
/// Whether the closing brace was present.
|
||||
pub terminated: bool,
|
||||
}
|
||||
|
||||
impl<'s> Token<'s> {
|
||||
/// The English name of this token for use in error messages.
|
||||
pub fn name(self) -> &'static str {
|
||||
match self {
|
||||
Self::LeftBracket => "opening bracket",
|
||||
Self::RightBracket => "closing bracket",
|
||||
Self::LeftBrace => "opening brace",
|
||||
Self::RightBrace => "closing brace",
|
||||
Self::LeftParen => "opening paren",
|
||||
Self::RightParen => "closing paren",
|
||||
Self::Star => "star",
|
||||
Self::Underscore => "underscore",
|
||||
Self::Tilde => "tilde",
|
||||
Self::HyphHyph => "en dash",
|
||||
Self::HyphHyphHyph => "em dash",
|
||||
Self::Backslash => "backslash",
|
||||
Self::Comma => "comma",
|
||||
Self::Semicolon => "semicolon",
|
||||
Self::Colon => "colon",
|
||||
Self::Plus => "plus",
|
||||
Self::Hyph => "minus",
|
||||
Self::Slash => "slash",
|
||||
Self::Eq => "assignment operator",
|
||||
Self::EqEq => "equality operator",
|
||||
Self::ExclEq => "inequality operator",
|
||||
Self::Lt => "less-than operator",
|
||||
Self::LtEq => "less-than or equal operator",
|
||||
Self::Gt => "greater-than operator",
|
||||
Self::GtEq => "greater-than or equal operator",
|
||||
Self::PlusEq => "add-assign operator",
|
||||
Self::HyphEq => "subtract-assign operator",
|
||||
Self::StarEq => "multiply-assign operator",
|
||||
Self::SlashEq => "divide-assign operator",
|
||||
Self::Dots => "dots",
|
||||
Self::Arrow => "arrow",
|
||||
Self::Not => "operator `not`",
|
||||
Self::And => "operator `and`",
|
||||
Self::Or => "operator `or`",
|
||||
Self::With => "operator `with`",
|
||||
Self::None => "`none`",
|
||||
Self::Auto => "`auto`",
|
||||
Self::Let => "keyword `let`",
|
||||
Self::If => "keyword `if`",
|
||||
Self::Else => "keyword `else`",
|
||||
Self::For => "keyword `for`",
|
||||
Self::In => "keyword `in`",
|
||||
Self::While => "keyword `while`",
|
||||
Self::Break => "keyword `break`",
|
||||
Self::Continue => "keyword `continue`",
|
||||
Self::Return => "keyword `return`",
|
||||
Self::Import => "keyword `import`",
|
||||
Self::Include => "keyword `include`",
|
||||
Self::From => "keyword `from`",
|
||||
Self::Space(_) => "space",
|
||||
Self::Text(_) => "text",
|
||||
Self::UnicodeEscape(_) => "unicode escape sequence",
|
||||
Self::Raw(_) => "raw block",
|
||||
Self::Math(_) => "math formula",
|
||||
Self::Numbering(_) => "numbering",
|
||||
Self::Ident(_) => "identifier",
|
||||
Self::Bool(_) => "boolean",
|
||||
Self::Int(_) => "integer",
|
||||
Self::Float(_) => "float",
|
||||
Self::Length(_, _) => "length",
|
||||
Self::Angle(_, _) => "angle",
|
||||
Self::Percent(_) => "percentage",
|
||||
Self::Fraction(_) => "`fr` value",
|
||||
Self::Str(_) => "string",
|
||||
Self::LineComment(_) => "line comment",
|
||||
Self::BlockComment(_) => "block comment",
|
||||
Self::Invalid("*/") => "end of block comment",
|
||||
Self::Invalid(_) => "invalid token",
|
||||
}
|
||||
}
|
||||
/// A unit-bound number token: `1.2em`.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct UnitToken {
|
||||
/// The number part.
|
||||
pub number: std::ops::Range<usize>,
|
||||
/// The unit part.
|
||||
pub unit: std::ops::Range<usize>,
|
||||
}
|
||||
|
@ -1,263 +0,0 @@
|
||||
//! Mutable and immutable syntax tree traversal.
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Implement the immutable and the mutable visitor version.
|
||||
macro_rules! impl_visitors {
|
||||
($($name:ident($($tts:tt)*) $body:block)*) => {
|
||||
macro_rules! r {
|
||||
(rc: $x:expr) => { $x.as_ref() };
|
||||
($x:expr) => { &$x };
|
||||
}
|
||||
|
||||
impl_visitor! {
|
||||
Visit,
|
||||
immutable,
|
||||
immutably,
|
||||
[$(($name($($tts)*) $body))*]
|
||||
}
|
||||
|
||||
macro_rules! r {
|
||||
(rc: $x:expr) => { std::rc::Rc::make_mut(&mut $x) };
|
||||
($x:expr) => { &mut $x };
|
||||
}
|
||||
|
||||
impl_visitor! {
|
||||
VisitMut,
|
||||
mutable,
|
||||
mutably,
|
||||
[$(($name($($tts)*) $body mut))*] mut
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Implement an immutable or mutable visitor.
|
||||
macro_rules! impl_visitor {
|
||||
(
|
||||
$visit:ident,
|
||||
$mutability:ident,
|
||||
$adjective:ident,
|
||||
[$((
|
||||
$name:ident($v:ident, $node:ident: $ty:ty)
|
||||
$body:block
|
||||
$($fmut:tt)?
|
||||
))*]
|
||||
$($mut:tt)?
|
||||
) => {
|
||||
#[doc = concat!("Visit syntax trees ", stringify!($adjective), ".")]
|
||||
pub trait $visit<'ast> {
|
||||
/// Visit a definition of a binding.
|
||||
///
|
||||
/// Bindings are, for example, left-hand side of let expressions,
|
||||
/// and key/value patterns in for loops.
|
||||
fn visit_binding(&mut self, _: &'ast $($mut)? Ident) {}
|
||||
|
||||
/// Visit the entry into a scope.
|
||||
fn visit_enter(&mut self) {}
|
||||
|
||||
/// Visit the exit from a scope.
|
||||
fn visit_exit(&mut self) {}
|
||||
|
||||
$(fn $name(&mut self, $node: &'ast $($fmut)? $ty) {
|
||||
$mutability::$name(self, $node);
|
||||
})*
|
||||
}
|
||||
|
||||
#[doc = concat!("Visitor functions that are ", stringify!($mutability), ".")]
|
||||
pub mod $mutability {
|
||||
use super::*;
|
||||
$(
|
||||
#[doc = concat!("Visit a node of type [`", stringify!($ty), "`].")]
|
||||
pub fn $name<'ast, V>($v: &mut V, $node: &'ast $($fmut)? $ty)
|
||||
where
|
||||
V: $visit<'ast> + ?Sized
|
||||
$body
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_visitors! {
|
||||
visit_tree(v, markup: Markup) {
|
||||
for node in markup {
|
||||
v.visit_node(node);
|
||||
}
|
||||
}
|
||||
|
||||
visit_node(v, node: MarkupNode) {
|
||||
match node {
|
||||
MarkupNode::Space => {}
|
||||
MarkupNode::Linebreak(_) => {}
|
||||
MarkupNode::Parbreak(_) => {}
|
||||
MarkupNode::Strong(_) => {}
|
||||
MarkupNode::Emph(_) => {}
|
||||
MarkupNode::Text(_) => {}
|
||||
MarkupNode::Raw(_) => {}
|
||||
MarkupNode::Heading(n) => v.visit_heading(n),
|
||||
MarkupNode::List(n) => v.visit_list(n),
|
||||
MarkupNode::Enum(n) => v.visit_enum(n),
|
||||
MarkupNode::Expr(n) => v.visit_expr(n),
|
||||
}
|
||||
}
|
||||
|
||||
visit_heading(v, heading: HeadingNode) {
|
||||
v.visit_tree(r!(heading.body));
|
||||
}
|
||||
|
||||
visit_list(v, list: ListNode) {
|
||||
v.visit_tree(r!(list.body));
|
||||
}
|
||||
|
||||
visit_enum(v, enum_: EnumNode) {
|
||||
v.visit_tree(r!(enum_.body));
|
||||
}
|
||||
|
||||
visit_expr(v, expr: Expr) {
|
||||
match expr {
|
||||
Expr::Ident(_) => {}
|
||||
Expr::Lit(_) => {},
|
||||
Expr::Array(e) => v.visit_array(e),
|
||||
Expr::Dict(e) => v.visit_dict(e),
|
||||
Expr::Template(e) => v.visit_template(e),
|
||||
Expr::Group(e) => v.visit_group(e),
|
||||
Expr::Block(e) => v.visit_block(e),
|
||||
Expr::Unary(e) => v.visit_unary(e),
|
||||
Expr::Binary(e) => v.visit_binary(e),
|
||||
Expr::Call(e) => v.visit_call(e),
|
||||
Expr::Closure(e) => v.visit_closure(e),
|
||||
Expr::With(e) => v.visit_with(e),
|
||||
Expr::Let(e) => v.visit_let(e),
|
||||
Expr::If(e) => v.visit_if(e),
|
||||
Expr::While(e) => v.visit_while(e),
|
||||
Expr::For(e) => v.visit_for(e),
|
||||
Expr::Import(e) => v.visit_import(e),
|
||||
Expr::Include(e) => v.visit_include(e),
|
||||
}
|
||||
}
|
||||
|
||||
visit_array(v, array: ArrayExpr) {
|
||||
for expr in r!(array.items) {
|
||||
v.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
|
||||
visit_dict(v, dict: DictExpr) {
|
||||
for named in r!(dict.items) {
|
||||
v.visit_expr(r!(named.expr));
|
||||
}
|
||||
}
|
||||
|
||||
visit_template(v, template: TemplateExpr) {
|
||||
v.visit_enter();
|
||||
v.visit_tree(r!(template.body));
|
||||
v.visit_exit();
|
||||
}
|
||||
|
||||
visit_group(v, group: GroupExpr) {
|
||||
v.visit_expr(r!(group.expr));
|
||||
}
|
||||
|
||||
visit_block(v, block: BlockExpr) {
|
||||
v.visit_enter();
|
||||
for expr in r!(block.exprs) {
|
||||
v.visit_expr(expr);
|
||||
}
|
||||
v.visit_exit();
|
||||
}
|
||||
|
||||
visit_binary(v, binary: BinaryExpr) {
|
||||
v.visit_expr(r!(binary.lhs));
|
||||
v.visit_expr(r!(binary.rhs));
|
||||
}
|
||||
|
||||
visit_unary(v, unary: UnaryExpr) {
|
||||
v.visit_expr(r!(unary.expr));
|
||||
}
|
||||
|
||||
visit_call(v, call: CallExpr) {
|
||||
v.visit_expr(r!(call.callee));
|
||||
v.visit_args(r!(call.args));
|
||||
}
|
||||
|
||||
visit_args(v, args: CallArgs) {
|
||||
for arg in r!(args.items) {
|
||||
v.visit_arg(arg);
|
||||
}
|
||||
}
|
||||
|
||||
visit_arg(v, arg: CallArg) {
|
||||
match arg {
|
||||
CallArg::Pos(expr) => v.visit_expr(expr),
|
||||
CallArg::Named(named) => v.visit_expr(r!(named.expr)),
|
||||
CallArg::Spread(expr) => v.visit_expr(expr),
|
||||
}
|
||||
}
|
||||
|
||||
visit_closure(v, closure: ClosureExpr) {
|
||||
for param in r!(closure.params) {
|
||||
v.visit_param(param);
|
||||
}
|
||||
v.visit_expr(r!(rc: closure.body));
|
||||
}
|
||||
|
||||
visit_param(v, param: ClosureParam) {
|
||||
match param {
|
||||
ClosureParam::Pos(binding) => v.visit_binding(binding),
|
||||
ClosureParam::Named(named) => {
|
||||
v.visit_binding(r!(named.name));
|
||||
v.visit_expr(r!(named.expr));
|
||||
}
|
||||
ClosureParam::Sink(binding) => v.visit_binding(binding),
|
||||
}
|
||||
}
|
||||
|
||||
visit_with(v, with_expr: WithExpr) {
|
||||
v.visit_expr(r!(with_expr.callee));
|
||||
v.visit_args(r!(with_expr.args));
|
||||
}
|
||||
|
||||
visit_let(v, let_expr: LetExpr) {
|
||||
if let Some(init) = r!(let_expr.init) {
|
||||
v.visit_expr(init);
|
||||
}
|
||||
v.visit_binding(r!(let_expr.binding));
|
||||
}
|
||||
|
||||
visit_if(v, if_expr: IfExpr) {
|
||||
v.visit_expr(r!(if_expr.condition));
|
||||
v.visit_expr(r!(if_expr.if_body));
|
||||
if let Some(body) = r!(if_expr.else_body) {
|
||||
v.visit_expr(body);
|
||||
}
|
||||
}
|
||||
|
||||
visit_while(v, while_expr: WhileExpr) {
|
||||
v.visit_expr(r!(while_expr.condition));
|
||||
v.visit_expr(r!(while_expr.body));
|
||||
}
|
||||
|
||||
visit_for(v, for_expr: ForExpr) {
|
||||
v.visit_expr(r!(for_expr.iter));
|
||||
match r!(for_expr.pattern) {
|
||||
ForPattern::Value(value) => v.visit_binding(value),
|
||||
ForPattern::KeyValue(key, value) => {
|
||||
v.visit_binding(key);
|
||||
v.visit_binding(value);
|
||||
}
|
||||
}
|
||||
v.visit_expr(r!(for_expr.body));
|
||||
}
|
||||
|
||||
visit_import(v, import_expr: ImportExpr) {
|
||||
v.visit_expr(r!(import_expr.path));
|
||||
if let Imports::Idents(idents) = r!(import_expr.imports) {
|
||||
for ident in idents {
|
||||
v.visit_binding(ident);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visit_include(v, include_expr: IncludeExpr) {
|
||||
v.visit_expr(r!(include_expr.path));
|
||||
}
|
||||
}
|
@ -72,7 +72,7 @@
|
||||
{(,1)}
|
||||
|
||||
// Missing expression makes named pair incomplete, making this an empty array.
|
||||
// Error: 5 expected expression
|
||||
// Error: 3-5 expected expression, found named pair
|
||||
{(a:)}
|
||||
|
||||
// Named pair after this is already identified as an array.
|
||||
|
@ -72,7 +72,7 @@
|
||||
// Error: 10-12 expected expression, found end of block comment
|
||||
#func(a:1*/)
|
||||
|
||||
// Error: 8 expected comma
|
||||
// Error: 9 expected comma
|
||||
#func(1 2)
|
||||
|
||||
// Error: 7-8 expected identifier
|
||||
|
@ -42,7 +42,7 @@
|
||||
|
||||
// Identified as dictionary due to initial colon.
|
||||
// Error: 4-5 expected named pair, found expression
|
||||
// Error: 5 expected comma
|
||||
// Error: 6 expected comma
|
||||
// Error: 12-16 expected identifier
|
||||
// Error: 17-18 expected expression, found colon
|
||||
{(:1 b:"", true::)}
|
||||
|
@ -79,7 +79,7 @@ This is never reached.
|
||||
// Error: 22 expected keyword `from`
|
||||
#import afrom, "b", c
|
||||
|
||||
// Error: 8 expected import items
|
||||
// Error: 9 expected import items
|
||||
#import from "target.typ"
|
||||
|
||||
// Error: 9-10 expected expression, found assignment operator
|
||||
@ -114,4 +114,5 @@ This is never reached.
|
||||
// An item after a star.
|
||||
// Should output `, a from "target.typ"`.
|
||||
// Error: 10 expected keyword `from`
|
||||
// Error: 10 expected semicolon or line break
|
||||
#import *, a from "target.typ"
|
||||
|
@ -62,7 +62,7 @@
|
||||
#min(.."nope")
|
||||
|
||||
---
|
||||
// Error: 10-14 expected identifier
|
||||
// Error: 8-14 expected identifier
|
||||
#let f(..true) = none
|
||||
|
||||
---
|
||||
@ -70,9 +70,9 @@
|
||||
#let f(..a, ..b) = none
|
||||
|
||||
---
|
||||
// Error: 5-6 spreading is not allowed here
|
||||
// Error: 3-6 spreading is not allowed here
|
||||
{(..x)}
|
||||
|
||||
---
|
||||
// Error: 11-17 spreading is not allowed here
|
||||
// Error: 9-17 spreading is not allowed here
|
||||
{(1, 2, ..(1, 2))}
|
||||
|
Loading…
x
Reference in New Issue
Block a user