mirror of
https://github.com/typst/typst
synced 2025-05-15 01:25:28 +08:00
Extract syntax module into typst-syntax crate
This commit is contained in:
parent
7dc605307c
commit
f5953887c9
16
Cargo.lock
generated
16
Cargo.lock
generated
@ -2408,6 +2408,7 @@ dependencies = [
|
|||||||
"tracing",
|
"tracing",
|
||||||
"ttf-parser",
|
"ttf-parser",
|
||||||
"typst-macros",
|
"typst-macros",
|
||||||
|
"typst-syntax",
|
||||||
"unicode-general-category",
|
"unicode-general-category",
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
"unicode-math-class",
|
"unicode-math-class",
|
||||||
@ -2516,6 +2517,21 @@ dependencies = [
|
|||||||
"syn 2.0.16",
|
"syn 2.0.16",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typst-syntax"
|
||||||
|
version = "0.6.0"
|
||||||
|
dependencies = [
|
||||||
|
"comemo",
|
||||||
|
"ecow",
|
||||||
|
"once_cell",
|
||||||
|
"serde",
|
||||||
|
"tracing",
|
||||||
|
"unicode-ident",
|
||||||
|
"unicode-math-class",
|
||||||
|
"unicode-segmentation",
|
||||||
|
"unscanny",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typst-tests"
|
name = "typst-tests"
|
||||||
version = "0.6.0"
|
version = "0.6.0"
|
||||||
|
@ -7,9 +7,8 @@ use termcolor::{ColorChoice, StandardStream};
|
|||||||
use typst::diag::{bail, SourceError, StrResult};
|
use typst::diag::{bail, SourceError, StrResult};
|
||||||
use typst::doc::Document;
|
use typst::doc::Document;
|
||||||
use typst::eval::eco_format;
|
use typst::eval::eco_format;
|
||||||
use typst::file::FileId;
|
|
||||||
use typst::geom::Color;
|
use typst::geom::Color;
|
||||||
use typst::syntax::Source;
|
use typst::syntax::{FileId, Source};
|
||||||
use typst::World;
|
use typst::World;
|
||||||
|
|
||||||
use crate::args::{CompileCommand, DiagnosticFormat};
|
use crate::args::{CompileCommand, DiagnosticFormat};
|
||||||
@ -168,7 +167,7 @@ fn print_diagnostics(
|
|||||||
.map(|e| (eco_format!("hint: {e}")).into())
|
.map(|e| (eco_format!("hint: {e}")).into())
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
.with_labels(vec![Label::primary(error.span.id(), error.span.range(world))]);
|
.with_labels(vec![Label::primary(error.span.id(), world.range(error.span))]);
|
||||||
|
|
||||||
term::emit(&mut w, &config, world, &diag)?;
|
term::emit(&mut w, &config, world, &diag)?;
|
||||||
|
|
||||||
@ -176,7 +175,7 @@ fn print_diagnostics(
|
|||||||
for point in error.trace {
|
for point in error.trace {
|
||||||
let message = point.v.to_string();
|
let message = point.v.to_string();
|
||||||
let help = Diagnostic::help().with_message(message).with_labels(vec![
|
let help = Diagnostic::help().with_message(message).with_labels(vec![
|
||||||
Label::primary(point.span.id(), point.span.range(world)),
|
Label::primary(point.span.id(), world.range(point.span)),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
term::emit(&mut w, &config, world, &help)?;
|
term::emit(&mut w, &config, world, &help)?;
|
||||||
|
@ -5,7 +5,7 @@ use std::path::{Path, PathBuf};
|
|||||||
use codespan_reporting::term::{self, termcolor};
|
use codespan_reporting::term::{self, termcolor};
|
||||||
use termcolor::WriteColor;
|
use termcolor::WriteColor;
|
||||||
use typst::diag::{PackageError, PackageResult};
|
use typst::diag::{PackageError, PackageResult};
|
||||||
use typst::file::PackageSpec;
|
use typst::syntax::PackageSpec;
|
||||||
|
|
||||||
use super::color_stream;
|
use super::color_stream;
|
||||||
|
|
||||||
|
@ -10,9 +10,8 @@ use same_file::Handle;
|
|||||||
use siphasher::sip128::{Hasher128, SipHasher13};
|
use siphasher::sip128::{Hasher128, SipHasher13};
|
||||||
use typst::diag::{FileError, FileResult, StrResult};
|
use typst::diag::{FileError, FileResult, StrResult};
|
||||||
use typst::eval::{eco_format, Datetime, Library};
|
use typst::eval::{eco_format, Datetime, Library};
|
||||||
use typst::file::FileId;
|
|
||||||
use typst::font::{Font, FontBook};
|
use typst::font::{Font, FontBook};
|
||||||
use typst::syntax::Source;
|
use typst::syntax::{FileId, Source};
|
||||||
use typst::util::{Bytes, PathExt};
|
use typst::util::{Bytes, PathExt};
|
||||||
use typst::World;
|
use typst::World;
|
||||||
|
|
||||||
|
@ -5,10 +5,9 @@ use pulldown_cmark as md;
|
|||||||
use typed_arena::Arena;
|
use typed_arena::Arena;
|
||||||
use typst::diag::FileResult;
|
use typst::diag::FileResult;
|
||||||
use typst::eval::Datetime;
|
use typst::eval::Datetime;
|
||||||
use typst::file::FileId;
|
|
||||||
use typst::font::{Font, FontBook};
|
use typst::font::{Font, FontBook};
|
||||||
use typst::geom::{Point, Size};
|
use typst::geom::{Point, Size};
|
||||||
use typst::syntax::Source;
|
use typst::syntax::{FileId, Source};
|
||||||
use typst::util::Bytes;
|
use typst::util::Bytes;
|
||||||
use typst::World;
|
use typst::World;
|
||||||
use yaml_front_matter::YamlFrontMatter;
|
use yaml_front_matter::YamlFrontMatter;
|
||||||
|
@ -19,8 +19,6 @@ pub use typst::eval::{
|
|||||||
Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm,
|
Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm,
|
||||||
};
|
};
|
||||||
#[doc(no_inline)]
|
#[doc(no_inline)]
|
||||||
pub use typst::file::FileId;
|
|
||||||
#[doc(no_inline)]
|
|
||||||
pub use typst::geom::*;
|
pub use typst::geom::*;
|
||||||
#[doc(no_inline)]
|
#[doc(no_inline)]
|
||||||
pub use typst::model::{
|
pub use typst::model::{
|
||||||
@ -30,7 +28,7 @@ pub use typst::model::{
|
|||||||
Unlabellable, Vt,
|
Unlabellable, Vt,
|
||||||
};
|
};
|
||||||
#[doc(no_inline)]
|
#[doc(no_inline)]
|
||||||
pub use typst::syntax::{Span, Spanned};
|
pub use typst::syntax::{FileId, Span, Spanned};
|
||||||
#[doc(no_inline)]
|
#[doc(no_inline)]
|
||||||
pub use typst::util::NonZeroExt;
|
pub use typst::util::NonZeroExt;
|
||||||
#[doc(no_inline)]
|
#[doc(no_inline)]
|
||||||
|
27
crates/typst-syntax/Cargo.toml
Normal file
27
crates/typst-syntax/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[package]
|
||||||
|
name = "typst-syntax"
|
||||||
|
description = "Parser and syntax tree for Typst."
|
||||||
|
categories = ["compilers", "science"]
|
||||||
|
keywords = ["typst"]
|
||||||
|
version.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
repository.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
doctest = false
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
comemo = "0.3"
|
||||||
|
ecow = "0.1.1"
|
||||||
|
once_cell = "1"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
tracing = "0.1.37"
|
||||||
|
unicode-ident = "1.0"
|
||||||
|
unicode-math-class = "0.1"
|
||||||
|
unicode-segmentation = "1"
|
||||||
|
unscanny = "0.1"
|
@ -11,8 +11,6 @@ use unscanny::Scanner;
|
|||||||
use super::{
|
use super::{
|
||||||
is_id_continue, is_id_start, is_newline, split_newlines, Span, SyntaxKind, SyntaxNode,
|
is_id_continue, is_id_start, is_newline, split_newlines, Span, SyntaxKind, SyntaxNode,
|
||||||
};
|
};
|
||||||
use crate::geom::{AbsUnit, AngleUnit};
|
|
||||||
use crate::util::NonZeroExt;
|
|
||||||
|
|
||||||
/// A typed AST node.
|
/// A typed AST node.
|
||||||
pub trait AstNode: Sized {
|
pub trait AstNode: Sized {
|
||||||
@ -680,7 +678,7 @@ impl Heading {
|
|||||||
.children()
|
.children()
|
||||||
.find(|node| node.kind() == SyntaxKind::HeadingMarker)
|
.find(|node| node.kind() == SyntaxKind::HeadingMarker)
|
||||||
.and_then(|node| node.len().try_into().ok())
|
.and_then(|node| node.len().try_into().ok())
|
||||||
.unwrap_or(NonZeroUsize::ONE)
|
.unwrap_or(NonZeroUsize::new(1).unwrap())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1012,12 +1010,12 @@ impl Numeric {
|
|||||||
let split = text.len() - count;
|
let split = text.len() - count;
|
||||||
let value = text[..split].parse().unwrap_or_default();
|
let value = text[..split].parse().unwrap_or_default();
|
||||||
let unit = match &text[split..] {
|
let unit = match &text[split..] {
|
||||||
"pt" => Unit::Length(AbsUnit::Pt),
|
"pt" => Unit::Pt,
|
||||||
"mm" => Unit::Length(AbsUnit::Mm),
|
"mm" => Unit::Mm,
|
||||||
"cm" => Unit::Length(AbsUnit::Cm),
|
"cm" => Unit::Cm,
|
||||||
"in" => Unit::Length(AbsUnit::In),
|
"in" => Unit::In,
|
||||||
"deg" => Unit::Angle(AngleUnit::Deg),
|
"deg" => Unit::Deg,
|
||||||
"rad" => Unit::Angle(AngleUnit::Rad),
|
"rad" => Unit::Rad,
|
||||||
"em" => Unit::Em,
|
"em" => Unit::Em,
|
||||||
"fr" => Unit::Fr,
|
"fr" => Unit::Fr,
|
||||||
"%" => Unit::Percent,
|
"%" => Unit::Percent,
|
||||||
@ -1031,10 +1029,18 @@ impl Numeric {
|
|||||||
/// Unit of a numeric value.
|
/// Unit of a numeric value.
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||||
pub enum Unit {
|
pub enum Unit {
|
||||||
/// An absolute length unit.
|
/// Points.
|
||||||
Length(AbsUnit),
|
Pt,
|
||||||
/// An angular unit.
|
/// Millimeters.
|
||||||
Angle(AngleUnit),
|
Mm,
|
||||||
|
/// Centimeters.
|
||||||
|
Cm,
|
||||||
|
/// Inches.
|
||||||
|
In,
|
||||||
|
/// Radians.
|
||||||
|
Rad,
|
||||||
|
/// Degrees.
|
||||||
|
Deg,
|
||||||
/// Font-relative: `1em` is the same as the font size.
|
/// Font-relative: `1em` is the same as the font size.
|
||||||
Em,
|
Em,
|
||||||
/// Fractions: `fr`.
|
/// Fractions: `fr`.
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::{self, Debug, Display, Formatter};
|
use std::fmt::{self, Debug, Display, Formatter};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Component, Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::RwLock;
|
use std::sync::RwLock;
|
||||||
|
|
||||||
@ -10,9 +10,7 @@ use ecow::{eco_format, EcoString};
|
|||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
use crate::diag::{bail, FileError, StrResult};
|
use super::is_ident;
|
||||||
use crate::syntax::is_ident;
|
|
||||||
use crate::util::PathExt;
|
|
||||||
|
|
||||||
/// The global package-path interner.
|
/// The global package-path interner.
|
||||||
static INTERNER: Lazy<RwLock<Interner>> =
|
static INTERNER: Lazy<RwLock<Interner>> =
|
||||||
@ -27,7 +25,7 @@ struct Interner {
|
|||||||
/// An interned pair of a package specification and a path.
|
/// An interned pair of a package specification and a path.
|
||||||
type Pair = &'static (Option<PackageSpec>, PathBuf);
|
type Pair = &'static (Option<PackageSpec>, PathBuf);
|
||||||
|
|
||||||
/// Identifies a file.
|
/// Identifies a file in a project or package.
|
||||||
///
|
///
|
||||||
/// This type is globally interned and thus cheap to copy, compare, and hash.
|
/// This type is globally interned and thus cheap to copy, compare, and hash.
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
@ -48,7 +46,7 @@ impl FileId {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Try to find an existing entry that we can reuse.
|
// Try to find an existing entry that we can reuse.
|
||||||
let pair = (package, path.normalize());
|
let pair = (package, normalize_path(path));
|
||||||
if let Some(&id) = INTERNER.read().unwrap().to_id.get(&pair) {
|
if let Some(&id) = INTERNER.read().unwrap().to_id.get(&pair) {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
@ -99,9 +97,9 @@ impl FileId {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve a file location relative to this file.
|
/// Resolve a file location relative to this file.
|
||||||
pub fn join(self, path: &str) -> StrResult<Self> {
|
pub fn join(self, path: &str) -> Result<Self, EcoString> {
|
||||||
if self.is_detached() {
|
if self.is_detached() {
|
||||||
bail!("cannot access file system from here");
|
Err("cannot access file system from here")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let package = self.package().cloned();
|
let package = self.package().cloned();
|
||||||
@ -145,6 +143,29 @@ impl Debug for FileId {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Lexically normalize a path.
|
||||||
|
fn normalize_path(path: &Path) -> PathBuf {
|
||||||
|
let mut out = PathBuf::new();
|
||||||
|
for component in path.components() {
|
||||||
|
match component {
|
||||||
|
Component::CurDir => {}
|
||||||
|
Component::ParentDir => match out.components().next_back() {
|
||||||
|
Some(Component::Normal(_)) => {
|
||||||
|
out.pop();
|
||||||
|
}
|
||||||
|
_ => out.push(component),
|
||||||
|
},
|
||||||
|
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
|
||||||
|
out.push(component)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if out.as_os_str().is_empty() {
|
||||||
|
out.push(Component::CurDir);
|
||||||
|
}
|
||||||
|
out
|
||||||
|
}
|
||||||
|
|
||||||
/// Identifies a package.
|
/// Identifies a package.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||||
pub struct PackageSpec {
|
pub struct PackageSpec {
|
||||||
@ -153,7 +174,7 @@ pub struct PackageSpec {
|
|||||||
/// The name of the package within its namespace.
|
/// The name of the package within its namespace.
|
||||||
pub name: EcoString,
|
pub name: EcoString,
|
||||||
/// The package's version.
|
/// The package's version.
|
||||||
pub version: Version,
|
pub version: PackageVersion,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for PackageSpec {
|
impl FromStr for PackageSpec {
|
||||||
@ -162,30 +183,30 @@ impl FromStr for PackageSpec {
|
|||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let mut s = unscanny::Scanner::new(s);
|
let mut s = unscanny::Scanner::new(s);
|
||||||
if !s.eat_if('@') {
|
if !s.eat_if('@') {
|
||||||
bail!("package specification must start with '@'");
|
Err("package specification must start with '@'")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let namespace = s.eat_until('/');
|
let namespace = s.eat_until('/');
|
||||||
if namespace.is_empty() {
|
if namespace.is_empty() {
|
||||||
bail!("package specification is missing namespace");
|
Err("package specification is missing namespace")?;
|
||||||
} else if !is_ident(namespace) {
|
} else if !is_ident(namespace) {
|
||||||
bail!("`{namespace}` is not a valid package namespace");
|
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
s.eat_if('/');
|
s.eat_if('/');
|
||||||
|
|
||||||
let name = s.eat_until(':');
|
let name = s.eat_until(':');
|
||||||
if name.is_empty() {
|
if name.is_empty() {
|
||||||
bail!("package specification is missing name");
|
Err("package specification is missing name")?;
|
||||||
} else if !is_ident(name) {
|
} else if !is_ident(name) {
|
||||||
bail!("`{name}` is not a valid package name");
|
Err(eco_format!("`{name}` is not a valid package name"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
s.eat_if(':');
|
s.eat_if(':');
|
||||||
|
|
||||||
let version = s.after();
|
let version = s.after();
|
||||||
if version.is_empty() {
|
if version.is_empty() {
|
||||||
bail!("package specification is missing version");
|
Err("package specification is missing version")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
@ -204,7 +225,7 @@ impl Display for PackageSpec {
|
|||||||
|
|
||||||
/// A package's version.
|
/// A package's version.
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub struct Version {
|
pub struct PackageVersion {
|
||||||
/// The package's major version.
|
/// The package's major version.
|
||||||
pub major: u32,
|
pub major: u32,
|
||||||
/// The package's minor version.
|
/// The package's minor version.
|
||||||
@ -213,15 +234,16 @@ pub struct Version {
|
|||||||
pub patch: u32,
|
pub patch: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for Version {
|
impl FromStr for PackageVersion {
|
||||||
type Err = EcoString;
|
type Err = EcoString;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let mut parts = s.split('.');
|
let mut parts = s.split('.');
|
||||||
let mut next = |kind| {
|
let mut next = |kind| {
|
||||||
let Some(part) = parts.next().filter(|s| !s.is_empty()) else {
|
let part = parts
|
||||||
bail!("version number is missing {kind} version");
|
.next()
|
||||||
};
|
.filter(|s| !s.is_empty())
|
||||||
|
.ok_or_else(|| eco_format!("version number is missing {kind} version"))?;
|
||||||
part.parse::<u32>()
|
part.parse::<u32>()
|
||||||
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
|
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
|
||||||
};
|
};
|
||||||
@ -230,74 +252,28 @@ impl FromStr for Version {
|
|||||||
let minor = next("minor")?;
|
let minor = next("minor")?;
|
||||||
let patch = next("patch")?;
|
let patch = next("patch")?;
|
||||||
if let Some(rest) = parts.next() {
|
if let Some(rest) = parts.next() {
|
||||||
bail!("version number has unexpected fourth component: `{rest}`");
|
Err(eco_format!("version number has unexpected fourth component: `{rest}`"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { major, minor, patch })
|
Ok(Self { major, minor, patch })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Version {
|
impl Display for PackageVersion {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
|
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for Version {
|
impl Serialize for PackageVersion {
|
||||||
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
|
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
|
||||||
s.collect_str(self)
|
s.collect_str(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Version {
|
impl<'de> Deserialize<'de> for PackageVersion {
|
||||||
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
|
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
|
||||||
let string = EcoString::deserialize(d)?;
|
let string = EcoString::deserialize(d)?;
|
||||||
string.parse().map_err(serde::de::Error::custom)
|
string.parse().map_err(serde::de::Error::custom)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A parsed package manifest.
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
|
||||||
pub struct PackageManifest {
|
|
||||||
/// Details about the package itself.
|
|
||||||
pub package: PackageInfo,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PackageManifest {
|
|
||||||
/// Parse the manifest from raw bytes.
|
|
||||||
pub fn parse(bytes: &[u8]) -> StrResult<Self> {
|
|
||||||
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
|
|
||||||
toml::from_str(string).map_err(|err| {
|
|
||||||
eco_format!("package manifest is malformed: {}", err.message())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ensure that this manifest is indeed for the specified package.
|
|
||||||
pub fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
|
|
||||||
if self.package.name != spec.name {
|
|
||||||
bail!("package manifest contains mismatched name `{}`", self.package.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.package.version != spec.version {
|
|
||||||
bail!(
|
|
||||||
"package manifest contains mismatched version {}",
|
|
||||||
self.package.version
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The `package` key in the manifest.
|
|
||||||
///
|
|
||||||
/// More fields are specified, but they are not relevant to the compiler.
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
|
||||||
pub struct PackageInfo {
|
|
||||||
/// The name of the package within its namespace.
|
|
||||||
pub name: EcoString,
|
|
||||||
/// The package's version.
|
|
||||||
pub version: Version,
|
|
||||||
/// The path of the entrypoint into the package.
|
|
||||||
pub entrypoint: EcoString,
|
|
||||||
}
|
|
@ -649,7 +649,7 @@ fn keyword(ident: &str) -> Option<SyntaxKind> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether this character denotes a newline.
|
/// Whether a character is interpreted as a newline by Typst.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn is_newline(character: char) -> bool {
|
pub fn is_newline(character: char) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
@ -716,13 +716,13 @@ pub fn is_ident(string: &str) -> bool {
|
|||||||
|
|
||||||
/// Whether a character can start an identifier.
|
/// Whether a character can start an identifier.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn is_id_start(c: char) -> bool {
|
pub fn is_id_start(c: char) -> bool {
|
||||||
is_xid_start(c) || c == '_'
|
is_xid_start(c) || c == '_'
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether a character can continue an identifier.
|
/// Whether a character can continue an identifier.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn is_id_continue(c: char) -> bool {
|
pub fn is_id_continue(c: char) -> bool {
|
||||||
is_xid_continue(c) || c == '_' || c == '-'
|
is_xid_continue(c) || c == '_' || c == '-'
|
||||||
}
|
}
|
||||||
|
|
23
crates/typst-syntax/src/lib.rs
Normal file
23
crates/typst-syntax/src/lib.rs
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
//! Parser and syntax tree for Typst.
|
||||||
|
|
||||||
|
pub mod ast;
|
||||||
|
|
||||||
|
mod file;
|
||||||
|
mod kind;
|
||||||
|
mod lexer;
|
||||||
|
mod node;
|
||||||
|
mod parser;
|
||||||
|
mod reparser;
|
||||||
|
mod source;
|
||||||
|
mod span;
|
||||||
|
|
||||||
|
pub use self::file::{FileId, PackageSpec, PackageVersion};
|
||||||
|
pub use self::kind::SyntaxKind;
|
||||||
|
pub use self::lexer::{is_id_continue, is_id_start, is_ident, is_newline};
|
||||||
|
pub use self::node::{LinkedChildren, LinkedNode, SyntaxError, SyntaxNode};
|
||||||
|
pub use self::parser::{parse, parse_code, parse_math};
|
||||||
|
pub use self::source::Source;
|
||||||
|
pub use self::span::{Span, Spanned};
|
||||||
|
|
||||||
|
use self::lexer::{split_newlines, LexMode, Lexer};
|
||||||
|
use self::parser::{reparse_block, reparse_markup};
|
@ -6,9 +6,7 @@ use std::sync::Arc;
|
|||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
|
|
||||||
use super::ast::AstNode;
|
use super::ast::AstNode;
|
||||||
use super::{Span, SyntaxKind};
|
use super::{FileId, Span, SyntaxKind};
|
||||||
use crate::diag::SourceError;
|
|
||||||
use crate::file::FileId;
|
|
||||||
|
|
||||||
/// A node in the untyped syntax tree.
|
/// A node in the untyped syntax tree.
|
||||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||||
@ -60,7 +58,7 @@ impl SyntaxNode {
|
|||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Leaf(leaf) => leaf.len(),
|
Repr::Leaf(leaf) => leaf.len(),
|
||||||
Repr::Inner(inner) => inner.len,
|
Repr::Inner(inner) => inner.len,
|
||||||
Repr::Error(error) => error.len(),
|
Repr::Error(node) => node.len(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,19 +67,19 @@ impl SyntaxNode {
|
|||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Leaf(leaf) => leaf.span,
|
Repr::Leaf(leaf) => leaf.span,
|
||||||
Repr::Inner(inner) => inner.span,
|
Repr::Inner(inner) => inner.span,
|
||||||
Repr::Error(error) => error.span,
|
Repr::Error(node) => node.error.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The text of the node if it is a leaf node.
|
/// The text of the node if it is a leaf or error node.
|
||||||
///
|
///
|
||||||
/// Returns the empty string if this is an inner node.
|
/// Returns the empty string if this is an inner node.
|
||||||
pub fn text(&self) -> &EcoString {
|
pub fn text(&self) -> &EcoString {
|
||||||
static EMPTY: EcoString = EcoString::new();
|
static EMPTY: EcoString = EcoString::new();
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Leaf(leaf) => &leaf.text,
|
Repr::Leaf(leaf) => &leaf.text,
|
||||||
Repr::Error(error) => &error.text,
|
|
||||||
Repr::Inner(_) => &EMPTY,
|
Repr::Inner(_) => &EMPTY,
|
||||||
|
Repr::Error(node) => &node.text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,10 +89,10 @@ impl SyntaxNode {
|
|||||||
pub fn into_text(self) -> EcoString {
|
pub fn into_text(self) -> EcoString {
|
||||||
match self.0 {
|
match self.0 {
|
||||||
Repr::Leaf(leaf) => leaf.text,
|
Repr::Leaf(leaf) => leaf.text,
|
||||||
Repr::Error(error) => error.text.clone(),
|
Repr::Inner(inner) => {
|
||||||
Repr::Inner(node) => {
|
inner.children.iter().cloned().map(Self::into_text).collect()
|
||||||
node.children.iter().cloned().map(Self::into_text).collect()
|
|
||||||
}
|
}
|
||||||
|
Repr::Error(node) => node.text.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,27 +128,19 @@ impl SyntaxNode {
|
|||||||
pub fn erroneous(&self) -> bool {
|
pub fn erroneous(&self) -> bool {
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Leaf(_) => false,
|
Repr::Leaf(_) => false,
|
||||||
Repr::Inner(node) => node.erroneous,
|
Repr::Inner(inner) => inner.erroneous,
|
||||||
Repr::Error(_) => true,
|
Repr::Error(_) => true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a user-presentable hint if this is an error node.
|
|
||||||
pub fn hint(&mut self, hint: impl Into<EcoString>) {
|
|
||||||
if let Repr::Error(error) = &mut self.0 {
|
|
||||||
Arc::make_mut(error).hint(hint);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The error messages for this node and its descendants.
|
/// The error messages for this node and its descendants.
|
||||||
pub fn errors(&self) -> Vec<SourceError> {
|
pub fn errors(&self) -> Vec<SyntaxError> {
|
||||||
if !self.erroneous() {
|
if !self.erroneous() {
|
||||||
return vec![];
|
return vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Repr::Error(error) = &self.0 {
|
if let Repr::Error(node) = &self.0 {
|
||||||
vec![SourceError::new(error.span, error.message.clone())
|
vec![node.error.clone()]
|
||||||
.with_hints(error.hints.to_owned())]
|
|
||||||
} else {
|
} else {
|
||||||
self.children()
|
self.children()
|
||||||
.filter(|node| node.erroneous())
|
.filter(|node| node.erroneous())
|
||||||
@ -159,12 +149,19 @@ impl SyntaxNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a user-presentable hint if this is an error node.
|
||||||
|
pub fn hint(&mut self, hint: impl Into<EcoString>) {
|
||||||
|
if let Repr::Error(node) = &mut self.0 {
|
||||||
|
Arc::make_mut(node).hint(hint);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Set a synthetic span for the node and all its descendants.
|
/// Set a synthetic span for the node and all its descendants.
|
||||||
pub fn synthesize(&mut self, span: Span) {
|
pub fn synthesize(&mut self, span: Span) {
|
||||||
match &mut self.0 {
|
match &mut self.0 {
|
||||||
Repr::Leaf(leaf) => leaf.span = span,
|
Repr::Leaf(leaf) => leaf.span = span,
|
||||||
Repr::Inner(inner) => Arc::make_mut(inner).synthesize(span),
|
Repr::Inner(inner) => Arc::make_mut(inner).synthesize(span),
|
||||||
Repr::Error(error) => Arc::make_mut(error).span = span,
|
Repr::Error(node) => Arc::make_mut(node).error.span = span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -209,7 +206,7 @@ impl SyntaxNode {
|
|||||||
match &mut self.0 {
|
match &mut self.0 {
|
||||||
Repr::Leaf(leaf) => leaf.span = mid,
|
Repr::Leaf(leaf) => leaf.span = mid,
|
||||||
Repr::Inner(inner) => Arc::make_mut(inner).numberize(id, None, within)?,
|
Repr::Inner(inner) => Arc::make_mut(inner).numberize(id, None, within)?,
|
||||||
Repr::Error(error) => Arc::make_mut(error).span = mid,
|
Repr::Error(node) => Arc::make_mut(node).error.span = mid,
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -271,9 +268,9 @@ impl SyntaxNode {
|
|||||||
/// The upper bound of assigned numbers in this subtree.
|
/// The upper bound of assigned numbers in this subtree.
|
||||||
pub(super) fn upper(&self) -> u64 {
|
pub(super) fn upper(&self) -> u64 {
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Inner(inner) => inner.upper,
|
|
||||||
Repr::Leaf(leaf) => leaf.span.number() + 1,
|
Repr::Leaf(leaf) => leaf.span.number() + 1,
|
||||||
Repr::Error(error) => error.span.number() + 1,
|
Repr::Inner(inner) => inner.upper,
|
||||||
|
Repr::Error(node) => node.error.span.number() + 1,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -281,8 +278,8 @@ impl SyntaxNode {
|
|||||||
impl Debug for SyntaxNode {
|
impl Debug for SyntaxNode {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
Repr::Inner(node) => node.fmt(f),
|
Repr::Leaf(leaf) => leaf.fmt(f),
|
||||||
Repr::Leaf(node) => node.fmt(f),
|
Repr::Inner(inner) => inner.fmt(f),
|
||||||
Repr::Error(node) => node.fmt(f),
|
Repr::Error(node) => node.fmt(f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -541,25 +538,22 @@ impl Debug for InnerNode {
|
|||||||
/// An error node in the untyped syntax tree.
|
/// An error node in the untyped syntax tree.
|
||||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||||
struct ErrorNode {
|
struct ErrorNode {
|
||||||
/// The error message.
|
|
||||||
message: EcoString,
|
|
||||||
/// The source text of the node.
|
/// The source text of the node.
|
||||||
text: EcoString,
|
text: EcoString,
|
||||||
/// The node's span.
|
/// The syntax error.
|
||||||
span: Span,
|
error: SyntaxError,
|
||||||
/// Additonal hints to the user, indicating how this error could be avoided
|
|
||||||
/// or worked around.
|
|
||||||
hints: Vec<EcoString>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorNode {
|
impl ErrorNode {
|
||||||
/// Create new error node.
|
/// Create new error node.
|
||||||
fn new(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
|
fn new(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
message: message.into(),
|
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
|
error: SyntaxError {
|
||||||
span: Span::detached(),
|
span: Span::detached(),
|
||||||
|
message: message.into(),
|
||||||
hints: vec![],
|
hints: vec![],
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -570,16 +564,28 @@ impl ErrorNode {
|
|||||||
|
|
||||||
/// Add a user-presentable hint to this error node.
|
/// Add a user-presentable hint to this error node.
|
||||||
fn hint(&mut self, hint: impl Into<EcoString>) {
|
fn hint(&mut self, hint: impl Into<EcoString>) {
|
||||||
self.hints.push(hint.into());
|
self.error.hints.push(hint.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for ErrorNode {
|
impl Debug for ErrorNode {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
write!(f, "Error: {:?} ({})", self.text, self.message)
|
write!(f, "Error: {:?} ({})", self.text, self.error.message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A syntactical error.
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||||
|
pub struct SyntaxError {
|
||||||
|
/// The node's span.
|
||||||
|
pub span: Span,
|
||||||
|
/// The error message.
|
||||||
|
pub message: EcoString,
|
||||||
|
/// Additonal hints to the user, indicating how this error could be avoided
|
||||||
|
/// or worked around.
|
||||||
|
pub hints: Vec<EcoString>,
|
||||||
|
}
|
||||||
|
|
||||||
/// A syntax node in a context.
|
/// A syntax node in a context.
|
||||||
///
|
///
|
||||||
/// Knows its exact offset in the file and provides access to its
|
/// Knows its exact offset in the file and provides access to its
|
||||||
@ -870,7 +876,7 @@ impl std::error::Error for Unnumberable {}
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::syntax::Source;
|
use crate::Source;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_linked_node() {
|
fn test_linked_node() {
|
@ -7,15 +7,15 @@ use unicode_math_class::MathClass;
|
|||||||
use super::{ast, is_newline, LexMode, Lexer, SyntaxKind, SyntaxNode};
|
use super::{ast, is_newline, LexMode, Lexer, SyntaxKind, SyntaxNode};
|
||||||
|
|
||||||
/// Parse a source file.
|
/// Parse a source file.
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
pub fn parse(text: &str) -> SyntaxNode {
|
pub fn parse(text: &str) -> SyntaxNode {
|
||||||
let mut p = Parser::new(text, 0, LexMode::Markup);
|
let mut p = Parser::new(text, 0, LexMode::Markup);
|
||||||
markup(&mut p, true, 0, |_| false);
|
markup(&mut p, true, 0, |_| false);
|
||||||
p.finish().into_iter().next().unwrap()
|
p.finish().into_iter().next().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse code directly.
|
/// Parse top-level code.
|
||||||
///
|
#[tracing::instrument(skip_all)]
|
||||||
/// This is only used for syntax highlighting.
|
|
||||||
pub fn parse_code(text: &str) -> SyntaxNode {
|
pub fn parse_code(text: &str) -> SyntaxNode {
|
||||||
let mut p = Parser::new(text, 0, LexMode::Code);
|
let mut p = Parser::new(text, 0, LexMode::Code);
|
||||||
let m = p.marker();
|
let m = p.marker();
|
||||||
@ -25,6 +25,14 @@ pub fn parse_code(text: &str) -> SyntaxNode {
|
|||||||
p.finish().into_iter().next().unwrap()
|
p.finish().into_iter().next().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse top-level math.
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
pub fn parse_math(text: &str) -> SyntaxNode {
|
||||||
|
let mut p = Parser::new(text, 0, LexMode::Math);
|
||||||
|
math(&mut p, |_| false);
|
||||||
|
p.finish().into_iter().next().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
fn markup(
|
fn markup(
|
||||||
p: &mut Parser,
|
p: &mut Parser,
|
||||||
mut at_start: bool,
|
mut at_start: bool,
|
@ -7,12 +7,8 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
use comemo::Prehashed;
|
use comemo::Prehashed;
|
||||||
|
|
||||||
use super::ast::Markup;
|
|
||||||
use super::reparser::reparse;
|
use super::reparser::reparse;
|
||||||
use super::{is_newline, parse, LinkedNode, Span, SyntaxNode};
|
use super::{is_newline, parse, FileId, LinkedNode, Span, SyntaxNode};
|
||||||
use crate::diag::SourceResult;
|
|
||||||
use crate::file::FileId;
|
|
||||||
use crate::util::StrExt;
|
|
||||||
|
|
||||||
/// A source file.
|
/// A source file.
|
||||||
///
|
///
|
||||||
@ -68,16 +64,6 @@ impl Source {
|
|||||||
&self.0.root
|
&self.0.root
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The root node of the file's typed abstract syntax tree.
|
|
||||||
pub fn ast(&self) -> SourceResult<Markup> {
|
|
||||||
let errors = self.root().errors();
|
|
||||||
if errors.is_empty() {
|
|
||||||
Ok(self.root().cast().expect("root node must be markup"))
|
|
||||||
} else {
|
|
||||||
Err(Box::new(errors))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The id of the source file.
|
/// The id of the source file.
|
||||||
pub fn id(&self) -> FileId {
|
pub fn id(&self) -> FileId {
|
||||||
self.0.id
|
self.0.id
|
||||||
@ -148,7 +134,7 @@ impl Source {
|
|||||||
/// Get the length of the file in UTF-16 code units.
|
/// Get the length of the file in UTF-16 code units.
|
||||||
pub fn len_utf16(&self) -> usize {
|
pub fn len_utf16(&self) -> usize {
|
||||||
let last = self.0.lines.last().unwrap();
|
let last = self.0.lines.last().unwrap();
|
||||||
last.utf16_idx + self.0.text[last.byte_idx..].len_utf16()
|
last.utf16_idx + len_utf16(&self.0.text[last.byte_idx..])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the length of the file in lines.
|
/// Get the length of the file in lines.
|
||||||
@ -163,12 +149,22 @@ impl Source {
|
|||||||
LinkedNode::new(self.root()).find(span)
|
LinkedNode::new(self.root()).find(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the byte range for the given span in this file.
|
||||||
|
///
|
||||||
|
/// Panics if the span does not point into this source file.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn range(&self, span: Span) -> Range<usize> {
|
||||||
|
self.find(span)
|
||||||
|
.expect("span does not point into this source file")
|
||||||
|
.range()
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the index of the UTF-16 code unit at the byte index.
|
/// Return the index of the UTF-16 code unit at the byte index.
|
||||||
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
|
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
|
||||||
let line_idx = self.byte_to_line(byte_idx)?;
|
let line_idx = self.byte_to_line(byte_idx)?;
|
||||||
let line = self.0.lines.get(line_idx)?;
|
let line = self.0.lines.get(line_idx)?;
|
||||||
let head = self.0.text.get(line.byte_idx..byte_idx)?;
|
let head = self.0.text.get(line.byte_idx..byte_idx)?;
|
||||||
Some(line.utf16_idx + head.len_utf16())
|
Some(line.utf16_idx + len_utf16(head))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the index of the line that contains the given byte index.
|
/// Return the index of the line that contains the given byte index.
|
||||||
@ -306,6 +302,12 @@ fn lines_from(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The number of code units this string would use if it was encoded in
|
||||||
|
/// UTF16. This runs in linear time.
|
||||||
|
fn len_utf16(string: &str) -> usize {
|
||||||
|
string.chars().map(char::len_utf16).sum()
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
@ -2,15 +2,13 @@ use std::fmt::{self, Debug, Formatter};
|
|||||||
use std::num::NonZeroU64;
|
use std::num::NonZeroU64;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
||||||
use super::Source;
|
use super::FileId;
|
||||||
use crate::file::FileId;
|
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// A unique identifier for a syntax node.
|
/// A unique identifier for a syntax node.
|
||||||
///
|
///
|
||||||
/// This is used throughout the compiler to track which source section an error
|
/// This is used throughout the compiler to track which source section an error
|
||||||
/// or element stems from. Can be [mapped back](Self::range) to a byte range for
|
/// or element stems from. Can be [mapped back](super::Source::range) to a byte
|
||||||
/// user facing display.
|
/// range for user facing display.
|
||||||
///
|
///
|
||||||
/// During editing, the span values stay mostly stable, even for nodes behind an
|
/// During editing, the span values stay mostly stable, even for nodes behind an
|
||||||
/// insertion. This is not true for simple ranges as they would shift. Spans can
|
/// insertion. This is not true for simple ranges as they would shift. Spans can
|
||||||
@ -79,24 +77,6 @@ impl Span {
|
|||||||
pub const fn is_detached(self) -> bool {
|
pub const fn is_detached(self) -> bool {
|
||||||
self.id().is_detached()
|
self.id().is_detached()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the byte range for this span.
|
|
||||||
#[track_caller]
|
|
||||||
pub fn range(self, world: &dyn World) -> Range<usize> {
|
|
||||||
let source = world
|
|
||||||
.source(self.id())
|
|
||||||
.expect("span does not point into any source file");
|
|
||||||
self.range_in(&source)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the byte range for this span in the given source file.
|
|
||||||
#[track_caller]
|
|
||||||
pub fn range_in(self, source: &Source) -> Range<usize> {
|
|
||||||
source
|
|
||||||
.find(self)
|
|
||||||
.expect("span does not point into this source file")
|
|
||||||
.range()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A value with a span locating it in the source code.
|
/// A value with a span locating it in the source code.
|
@ -17,6 +17,7 @@ bench = false
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
typst-macros = { path = "../typst-macros" }
|
typst-macros = { path = "../typst-macros" }
|
||||||
|
typst-syntax = { path = "../typst-syntax" }
|
||||||
bitflags = { version = "2", features = ["serde"] }
|
bitflags = { version = "2", features = ["serde"] }
|
||||||
bytemuck = "1"
|
bytemuck = "1"
|
||||||
comemo = "0.3"
|
comemo = "0.3"
|
||||||
|
@ -8,8 +8,7 @@ use std::string::FromUtf8Error;
|
|||||||
|
|
||||||
use comemo::Tracked;
|
use comemo::Tracked;
|
||||||
|
|
||||||
use crate::file::PackageSpec;
|
use crate::syntax::{PackageSpec, Span, Spanned, SyntaxError};
|
||||||
use crate::syntax::{Span, Spanned};
|
|
||||||
use crate::World;
|
use crate::World;
|
||||||
|
|
||||||
/// Early-return with a [`StrResult`] or [`SourceResult`].
|
/// Early-return with a [`StrResult`] or [`SourceResult`].
|
||||||
@ -103,6 +102,17 @@ impl SourceError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxError> for SourceError {
|
||||||
|
fn from(error: SyntaxError) -> Self {
|
||||||
|
Self {
|
||||||
|
span: error.span,
|
||||||
|
message: error.message,
|
||||||
|
trace: vec![],
|
||||||
|
hints: error.hints,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A part of an error's [trace](SourceError::trace).
|
/// A part of an error's [trace](SourceError::trace).
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub enum Tracepoint {
|
pub enum Tracepoint {
|
||||||
@ -151,11 +161,11 @@ impl<T> Trace<T> for SourceResult<T> {
|
|||||||
return errors;
|
return errors;
|
||||||
}
|
}
|
||||||
|
|
||||||
let trace_range = span.range(&*world);
|
let trace_range = world.range(span);
|
||||||
for error in errors.iter_mut().filter(|e| !e.span.is_detached()) {
|
for error in errors.iter_mut().filter(|e| !e.span.is_detached()) {
|
||||||
// Skip traces that surround the error.
|
// Skip traces that surround the error.
|
||||||
if error.span.id() == span.id() {
|
if error.span.id() == span.id() {
|
||||||
let error_range = error.span.range(&*world);
|
let error_range = world.range(error.span);
|
||||||
if trace_range.start <= error_range.start
|
if trace_range.start <= error_range.start
|
||||||
&& trace_range.end >= error_range.end
|
&& trace_range.end >= error_range.end
|
||||||
{
|
{
|
||||||
|
@ -11,10 +11,9 @@ use super::{
|
|||||||
Value, Vm,
|
Value, Vm,
|
||||||
};
|
};
|
||||||
use crate::diag::{bail, SourceResult, StrResult};
|
use crate::diag::{bail, SourceResult, StrResult};
|
||||||
use crate::file::FileId;
|
|
||||||
use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt};
|
use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt};
|
||||||
use crate::syntax::ast::{self, AstNode, Expr, Ident};
|
use crate::syntax::ast::{self, AstNode, Expr, Ident};
|
||||||
use crate::syntax::{Span, SyntaxNode};
|
use crate::syntax::{FileId, Span, SyntaxNode};
|
||||||
use crate::World;
|
use crate::World;
|
||||||
|
|
||||||
/// An evaluatable function.
|
/// An evaluatable function.
|
||||||
@ -380,8 +379,9 @@ impl Closure {
|
|||||||
}
|
}
|
||||||
ast::Pattern::Normal(_) => unreachable!(),
|
ast::Pattern::Normal(_) => unreachable!(),
|
||||||
_ => {
|
_ => {
|
||||||
pattern.define(
|
super::define_pattern(
|
||||||
&mut vm,
|
&mut vm,
|
||||||
|
pattern,
|
||||||
args.expect::<Value>("pattern parameter")?,
|
args.expect::<Value>("pattern parameter")?,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -61,19 +61,22 @@ use std::path::Path;
|
|||||||
|
|
||||||
use comemo::{Track, Tracked, TrackedMut, Validate};
|
use comemo::{Track, Tracked, TrackedMut, Validate};
|
||||||
use ecow::{EcoString, EcoVec};
|
use ecow::{EcoString, EcoVec};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
use self::func::{CapturesVisitor, Closure};
|
use self::func::{CapturesVisitor, Closure};
|
||||||
use crate::diag::{
|
use crate::diag::{
|
||||||
bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint,
|
bail, error, At, FileError, SourceError, SourceResult, StrResult, Trace, Tracepoint,
|
||||||
};
|
};
|
||||||
use crate::file::{FileId, PackageManifest, PackageSpec};
|
|
||||||
use crate::model::{
|
use crate::model::{
|
||||||
Content, DelayedErrors, Introspector, Label, Locator, Recipe, ShowableSelector,
|
Content, DelayedErrors, Introspector, Label, Locator, Recipe, ShowableSelector,
|
||||||
Styles, Transform, Unlabellable, Vt,
|
Styles, Transform, Unlabellable, Vt,
|
||||||
};
|
};
|
||||||
use crate::syntax::ast::{self, AstNode};
|
use crate::syntax::ast::{self, AstNode};
|
||||||
use crate::syntax::{parse_code, Source, Span, Spanned, SyntaxKind, SyntaxNode};
|
use crate::syntax::{
|
||||||
|
parse_code, FileId, PackageSpec, PackageVersion, Source, Span, Spanned, SyntaxKind,
|
||||||
|
SyntaxNode,
|
||||||
|
};
|
||||||
use crate::World;
|
use crate::World;
|
||||||
|
|
||||||
const MAX_ITERATIONS: usize = 10_000;
|
const MAX_ITERATIONS: usize = 10_000;
|
||||||
@ -114,13 +117,16 @@ pub fn eval(
|
|||||||
let route = Route::insert(route, id);
|
let route = Route::insert(route, id);
|
||||||
let scopes = Scopes::new(Some(library));
|
let scopes = Scopes::new(Some(library));
|
||||||
let mut vm = Vm::new(vt, route.track(), id, scopes);
|
let mut vm = Vm::new(vt, route.track(), id, scopes);
|
||||||
let root = match source.root().cast::<ast::Markup>() {
|
|
||||||
Some(markup) if vm.traced.is_some() => markup,
|
let root = source.root();
|
||||||
_ => source.ast()?,
|
let errors = root.errors();
|
||||||
};
|
if !errors.is_empty() && vm.traced.is_none() {
|
||||||
|
return Err(Box::new(errors.into_iter().map(Into::into).collect()));
|
||||||
|
}
|
||||||
|
|
||||||
// Evaluate the module.
|
// Evaluate the module.
|
||||||
let result = root.eval(&mut vm);
|
let markup = root.cast::<ast::Markup>().unwrap();
|
||||||
|
let result = markup.eval(&mut vm);
|
||||||
|
|
||||||
// Handle control flow.
|
// Handle control flow.
|
||||||
if let Some(flow) = vm.flow {
|
if let Some(flow) = vm.flow {
|
||||||
@ -146,7 +152,7 @@ pub fn eval_string(
|
|||||||
|
|
||||||
let errors = root.errors();
|
let errors = root.errors();
|
||||||
if !errors.is_empty() {
|
if !errors.is_empty() {
|
||||||
return Err(Box::new(errors));
|
return Err(Box::new(errors.into_iter().map(Into::into).collect()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepare VT.
|
// Prepare VT.
|
||||||
@ -506,7 +512,11 @@ impl Eval for ast::Expr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Expr {
|
trait ExprExt {
|
||||||
|
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExprExt for ast::Expr {
|
||||||
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content> {
|
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content> {
|
||||||
Ok(self.eval(vm)?.display().spanned(self.span()))
|
Ok(self.eval(vm)?.display().spanned(self.span()))
|
||||||
}
|
}
|
||||||
@ -1013,73 +1023,71 @@ impl Eval for ast::Binary {
|
|||||||
#[tracing::instrument(name = "Binary::eval", skip_all)]
|
#[tracing::instrument(name = "Binary::eval", skip_all)]
|
||||||
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||||
match self.op() {
|
match self.op() {
|
||||||
ast::BinOp::Add => self.apply(vm, ops::add),
|
ast::BinOp::Add => apply_binary_expr(self, vm, ops::add),
|
||||||
ast::BinOp::Sub => self.apply(vm, ops::sub),
|
ast::BinOp::Sub => apply_binary_expr(self, vm, ops::sub),
|
||||||
ast::BinOp::Mul => self.apply(vm, ops::mul),
|
ast::BinOp::Mul => apply_binary_expr(self, vm, ops::mul),
|
||||||
ast::BinOp::Div => self.apply(vm, ops::div),
|
ast::BinOp::Div => apply_binary_expr(self, vm, ops::div),
|
||||||
ast::BinOp::And => self.apply(vm, ops::and),
|
ast::BinOp::And => apply_binary_expr(self, vm, ops::and),
|
||||||
ast::BinOp::Or => self.apply(vm, ops::or),
|
ast::BinOp::Or => apply_binary_expr(self, vm, ops::or),
|
||||||
ast::BinOp::Eq => self.apply(vm, ops::eq),
|
ast::BinOp::Eq => apply_binary_expr(self, vm, ops::eq),
|
||||||
ast::BinOp::Neq => self.apply(vm, ops::neq),
|
ast::BinOp::Neq => apply_binary_expr(self, vm, ops::neq),
|
||||||
ast::BinOp::Lt => self.apply(vm, ops::lt),
|
ast::BinOp::Lt => apply_binary_expr(self, vm, ops::lt),
|
||||||
ast::BinOp::Leq => self.apply(vm, ops::leq),
|
ast::BinOp::Leq => apply_binary_expr(self, vm, ops::leq),
|
||||||
ast::BinOp::Gt => self.apply(vm, ops::gt),
|
ast::BinOp::Gt => apply_binary_expr(self, vm, ops::gt),
|
||||||
ast::BinOp::Geq => self.apply(vm, ops::geq),
|
ast::BinOp::Geq => apply_binary_expr(self, vm, ops::geq),
|
||||||
ast::BinOp::In => self.apply(vm, ops::in_),
|
ast::BinOp::In => apply_binary_expr(self, vm, ops::in_),
|
||||||
ast::BinOp::NotIn => self.apply(vm, ops::not_in),
|
ast::BinOp::NotIn => apply_binary_expr(self, vm, ops::not_in),
|
||||||
ast::BinOp::Assign => self.assign(vm, |_, b| Ok(b)),
|
ast::BinOp::Assign => apply_assignment(self, vm, |_, b| Ok(b)),
|
||||||
ast::BinOp::AddAssign => self.assign(vm, ops::add),
|
ast::BinOp::AddAssign => apply_assignment(self, vm, ops::add),
|
||||||
ast::BinOp::SubAssign => self.assign(vm, ops::sub),
|
ast::BinOp::SubAssign => apply_assignment(self, vm, ops::sub),
|
||||||
ast::BinOp::MulAssign => self.assign(vm, ops::mul),
|
ast::BinOp::MulAssign => apply_assignment(self, vm, ops::mul),
|
||||||
ast::BinOp::DivAssign => self.assign(vm, ops::div),
|
ast::BinOp::DivAssign => apply_assignment(self, vm, ops::div),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Binary {
|
/// Apply a basic binary operation.
|
||||||
/// Apply a basic binary operation.
|
fn apply_binary_expr(
|
||||||
fn apply(
|
binary: &ast::Binary,
|
||||||
&self,
|
|
||||||
vm: &mut Vm,
|
vm: &mut Vm,
|
||||||
op: fn(Value, Value) -> StrResult<Value>,
|
op: fn(Value, Value) -> StrResult<Value>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let lhs = self.lhs().eval(vm)?;
|
let lhs = binary.lhs().eval(vm)?;
|
||||||
|
|
||||||
// Short-circuit boolean operations.
|
// Short-circuit boolean operations.
|
||||||
if (self.op() == ast::BinOp::And && lhs == Value::Bool(false))
|
if (binary.op() == ast::BinOp::And && lhs == Value::Bool(false))
|
||||||
|| (self.op() == ast::BinOp::Or && lhs == Value::Bool(true))
|
|| (binary.op() == ast::BinOp::Or && lhs == Value::Bool(true))
|
||||||
{
|
{
|
||||||
return Ok(lhs);
|
return Ok(lhs);
|
||||||
}
|
}
|
||||||
|
|
||||||
let rhs = self.rhs().eval(vm)?;
|
let rhs = binary.rhs().eval(vm)?;
|
||||||
op(lhs, rhs).at(self.span())
|
op(lhs, rhs).at(binary.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply an assignment operation.
|
/// Apply an assignment operation.
|
||||||
fn assign(
|
fn apply_assignment(
|
||||||
&self,
|
binary: &ast::Binary,
|
||||||
vm: &mut Vm,
|
vm: &mut Vm,
|
||||||
op: fn(Value, Value) -> StrResult<Value>,
|
op: fn(Value, Value) -> StrResult<Value>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let rhs = self.rhs().eval(vm)?;
|
let rhs = binary.rhs().eval(vm)?;
|
||||||
let lhs = self.lhs();
|
let lhs = binary.lhs();
|
||||||
|
|
||||||
// An assignment to a dictionary field is different from a normal access
|
// An assignment to a dictionary field is different from a normal access
|
||||||
// since it can create the field instead of just modifying it.
|
// since it can create the field instead of just modifying it.
|
||||||
if self.op() == ast::BinOp::Assign {
|
if binary.op() == ast::BinOp::Assign {
|
||||||
if let ast::Expr::FieldAccess(access) = &lhs {
|
if let ast::Expr::FieldAccess(access) = &lhs {
|
||||||
let dict = access.access_dict(vm)?;
|
let dict = access_dict(vm, access)?;
|
||||||
dict.insert(access.field().take().into(), rhs);
|
dict.insert(access.field().take().into(), rhs);
|
||||||
return Ok(Value::None);
|
return Ok(Value::None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let location = self.lhs().access(vm)?;
|
let location = binary.lhs().access(vm)?;
|
||||||
let lhs = std::mem::take(&mut *location);
|
let lhs = std::mem::take(&mut *location);
|
||||||
*location = op(lhs, rhs).at(self.span())?;
|
*location = op(lhs, rhs).at(binary.span())?;
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Eval for ast::FieldAccess {
|
impl Eval for ast::FieldAccess {
|
||||||
@ -1293,17 +1301,69 @@ impl Eval for ast::Closure {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ast::Pattern {
|
/// Destruct the value into the pattern by binding.
|
||||||
fn destruct_array<F>(
|
fn define_pattern(
|
||||||
&self,
|
|
||||||
vm: &mut Vm,
|
vm: &mut Vm,
|
||||||
|
pattern: &ast::Pattern,
|
||||||
|
value: Value,
|
||||||
|
) -> SourceResult<Value> {
|
||||||
|
destructure(vm, pattern, value, |vm, expr, value| match expr {
|
||||||
|
ast::Expr::Ident(ident) => {
|
||||||
|
vm.define(ident, value);
|
||||||
|
Ok(Value::None)
|
||||||
|
}
|
||||||
|
_ => bail!(expr.span(), "nested patterns are currently not supported"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Destruct the value into the pattern by assignment.
|
||||||
|
fn assign_pattern(
|
||||||
|
vm: &mut Vm,
|
||||||
|
pattern: &ast::Pattern,
|
||||||
|
value: Value,
|
||||||
|
) -> SourceResult<Value> {
|
||||||
|
destructure(vm, pattern, value, |vm, expr, value| {
|
||||||
|
let location = expr.access(vm)?;
|
||||||
|
*location = value;
|
||||||
|
Ok(Value::None)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Destruct the given value into the pattern and apply the function to each binding.
|
||||||
|
#[tracing::instrument(skip_all)]
|
||||||
|
fn destructure<T>(
|
||||||
|
vm: &mut Vm,
|
||||||
|
pattern: &ast::Pattern,
|
||||||
|
value: Value,
|
||||||
|
f: T,
|
||||||
|
) -> SourceResult<Value>
|
||||||
|
where
|
||||||
|
T: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
||||||
|
{
|
||||||
|
match pattern {
|
||||||
|
ast::Pattern::Normal(expr) => {
|
||||||
|
f(vm, expr.clone(), value)?;
|
||||||
|
Ok(Value::None)
|
||||||
|
}
|
||||||
|
ast::Pattern::Placeholder(_) => Ok(Value::None),
|
||||||
|
ast::Pattern::Destructuring(destruct) => match value {
|
||||||
|
Value::Array(value) => destructure_array(vm, pattern, value, f, destruct),
|
||||||
|
Value::Dict(value) => destructure_dict(vm, value, f, destruct),
|
||||||
|
_ => bail!(pattern.span(), "cannot destructure {}", value.type_name()),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn destructure_array<F>(
|
||||||
|
vm: &mut Vm,
|
||||||
|
pattern: &ast::Pattern,
|
||||||
value: Array,
|
value: Array,
|
||||||
f: F,
|
f: F,
|
||||||
destruct: &ast::Destructuring,
|
destruct: &ast::Destructuring,
|
||||||
) -> SourceResult<Value>
|
) -> SourceResult<Value>
|
||||||
where
|
where
|
||||||
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
||||||
{
|
{
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let len = value.as_slice().len();
|
let len = value.as_slice().len();
|
||||||
for p in destruct.bindings() {
|
for p in destruct.bindings() {
|
||||||
@ -1324,7 +1384,7 @@ impl ast::Pattern {
|
|||||||
}
|
}
|
||||||
i += sink_size;
|
i += sink_size;
|
||||||
} else {
|
} else {
|
||||||
bail!(self.span(), "not enough elements to destructure")
|
bail!(pattern.span(), "not enough elements to destructure")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::DestructuringKind::Named(named) => {
|
ast::DestructuringKind::Named(named) => {
|
||||||
@ -1340,22 +1400,21 @@ impl ast::Pattern {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if i < len {
|
if i < len {
|
||||||
bail!(self.span(), "too many elements to destructure");
|
bail!(pattern.span(), "too many elements to destructure");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn destruct_dict<F>(
|
fn destructure_dict<F>(
|
||||||
&self,
|
|
||||||
vm: &mut Vm,
|
vm: &mut Vm,
|
||||||
dict: Dict,
|
dict: Dict,
|
||||||
f: F,
|
f: F,
|
||||||
destruct: &ast::Destructuring,
|
destruct: &ast::Destructuring,
|
||||||
) -> SourceResult<Value>
|
) -> SourceResult<Value>
|
||||||
where
|
where
|
||||||
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
||||||
{
|
{
|
||||||
let mut sink = None;
|
let mut sink = None;
|
||||||
let mut used = HashSet::new();
|
let mut used = HashSet::new();
|
||||||
for p in destruct.bindings() {
|
for p in destruct.bindings() {
|
||||||
@ -1396,47 +1455,6 @@ impl ast::Pattern {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
|
||||||
|
|
||||||
/// Destruct the given value into the pattern and apply the function to each binding.
|
|
||||||
#[tracing::instrument(skip_all)]
|
|
||||||
fn apply<T>(&self, vm: &mut Vm, value: Value, f: T) -> SourceResult<Value>
|
|
||||||
where
|
|
||||||
T: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
|
|
||||||
{
|
|
||||||
match self {
|
|
||||||
ast::Pattern::Normal(expr) => {
|
|
||||||
f(vm, expr.clone(), value)?;
|
|
||||||
Ok(Value::None)
|
|
||||||
}
|
|
||||||
ast::Pattern::Placeholder(_) => Ok(Value::None),
|
|
||||||
ast::Pattern::Destructuring(destruct) => match value {
|
|
||||||
Value::Array(value) => self.destruct_array(vm, value, f, destruct),
|
|
||||||
Value::Dict(value) => self.destruct_dict(vm, value, f, destruct),
|
|
||||||
_ => bail!(self.span(), "cannot destructure {}", value.type_name()),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Destruct the value into the pattern by binding.
|
|
||||||
pub fn define(&self, vm: &mut Vm, value: Value) -> SourceResult<Value> {
|
|
||||||
self.apply(vm, value, |vm, expr, value| match expr {
|
|
||||||
ast::Expr::Ident(ident) => {
|
|
||||||
vm.define(ident, value);
|
|
||||||
Ok(Value::None)
|
|
||||||
}
|
|
||||||
_ => bail!(expr.span(), "nested patterns are currently not supported"),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Destruct the value into the pattern by assignment.
|
|
||||||
pub fn assign(&self, vm: &mut Vm, value: Value) -> SourceResult<Value> {
|
|
||||||
self.apply(vm, value, |vm, expr, value| {
|
|
||||||
let location = expr.access(vm)?;
|
|
||||||
*location = value;
|
|
||||||
Ok(Value::None)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Eval for ast::LetBinding {
|
impl Eval for ast::LetBinding {
|
||||||
@ -1450,7 +1468,7 @@ impl Eval for ast::LetBinding {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match self.kind() {
|
match self.kind() {
|
||||||
ast::LetBindingKind::Normal(pattern) => pattern.define(vm, value),
|
ast::LetBindingKind::Normal(pattern) => define_pattern(vm, &pattern, value),
|
||||||
ast::LetBindingKind::Closure(ident) => {
|
ast::LetBindingKind::Closure(ident) => {
|
||||||
vm.define(ident, value);
|
vm.define(ident, value);
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
@ -1464,7 +1482,7 @@ impl Eval for ast::DestructAssignment {
|
|||||||
|
|
||||||
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||||
let value = self.value().eval(vm)?;
|
let value = self.value().eval(vm)?;
|
||||||
self.pattern().assign(vm, value)?;
|
assign_pattern(vm, &self.pattern(), value)?;
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1614,7 +1632,7 @@ impl Eval for ast::ForLoop {
|
|||||||
|
|
||||||
#[allow(unused_parens)]
|
#[allow(unused_parens)]
|
||||||
for value in $iter {
|
for value in $iter {
|
||||||
$pat.define(vm, value.into_value())?;
|
define_pattern(vm, &$pat, value.into_value())?;
|
||||||
|
|
||||||
let body = self.body();
|
let body = self.body();
|
||||||
let value = body.eval(vm)?;
|
let value = body.eval(vm)?;
|
||||||
@ -1812,6 +1830,52 @@ fn import_file(vm: &mut Vm, path: &str, span: Span) -> SourceResult<Module> {
|
|||||||
.trace(world, point, span)
|
.trace(world, point, span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A parsed package manifest.
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||||
|
struct PackageManifest {
|
||||||
|
/// Details about the package itself.
|
||||||
|
package: PackageInfo,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PackageManifest {
|
||||||
|
/// Parse the manifest from raw bytes.
|
||||||
|
fn parse(bytes: &[u8]) -> StrResult<Self> {
|
||||||
|
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
|
||||||
|
toml::from_str(string).map_err(|err| {
|
||||||
|
eco_format!("package manifest is malformed: {}", err.message())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ensure that this manifest is indeed for the specified package.
|
||||||
|
fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
|
||||||
|
if self.package.name != spec.name {
|
||||||
|
bail!("package manifest contains mismatched name `{}`", self.package.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.package.version != spec.version {
|
||||||
|
bail!(
|
||||||
|
"package manifest contains mismatched version {}",
|
||||||
|
self.package.version
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The `package` key in the manifest.
|
||||||
|
///
|
||||||
|
/// More fields are specified, but they are not relevant to the compiler.
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||||
|
struct PackageInfo {
|
||||||
|
/// The name of the package within its namespace.
|
||||||
|
name: EcoString,
|
||||||
|
/// The package's version.
|
||||||
|
version: PackageVersion,
|
||||||
|
/// The path of the entrypoint into the package.
|
||||||
|
entrypoint: EcoString,
|
||||||
|
}
|
||||||
|
|
||||||
impl Eval for ast::LoopBreak {
|
impl Eval for ast::LoopBreak {
|
||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
@ -1889,21 +1953,22 @@ impl Access for ast::Parenthesized {
|
|||||||
|
|
||||||
impl Access for ast::FieldAccess {
|
impl Access for ast::FieldAccess {
|
||||||
fn access<'a>(&self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
|
fn access<'a>(&self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
|
||||||
self.access_dict(vm)?.at_mut(&self.field().take()).at(self.span())
|
access_dict(vm, self)?.at_mut(&self.field().take()).at(self.span())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ast::FieldAccess {
|
fn access_dict<'a>(
|
||||||
fn access_dict<'a>(&self, vm: &'a mut Vm) -> SourceResult<&'a mut Dict> {
|
vm: &'a mut Vm,
|
||||||
match self.target().access(vm)? {
|
access: &ast::FieldAccess,
|
||||||
|
) -> SourceResult<&'a mut Dict> {
|
||||||
|
match access.target().access(vm)? {
|
||||||
Value::Dict(dict) => Ok(dict),
|
Value::Dict(dict) => Ok(dict),
|
||||||
value => bail!(
|
value => bail!(
|
||||||
self.target().span(),
|
access.target().span(),
|
||||||
"expected dictionary, found {}",
|
"expected dictionary, found {}",
|
||||||
value.type_name(),
|
value.type_name(),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Access for ast::FuncCall {
|
impl Access for ast::FuncCall {
|
||||||
|
@ -82,8 +82,12 @@ impl Value {
|
|||||||
pub fn numeric(pair: (f64, ast::Unit)) -> Self {
|
pub fn numeric(pair: (f64, ast::Unit)) -> Self {
|
||||||
let (v, unit) = pair;
|
let (v, unit) = pair;
|
||||||
match unit {
|
match unit {
|
||||||
ast::Unit::Length(unit) => Abs::with_unit(v, unit).into_value(),
|
ast::Unit::Pt => Abs::pt(v).into_value(),
|
||||||
ast::Unit::Angle(unit) => Angle::with_unit(v, unit).into_value(),
|
ast::Unit::Mm => Abs::mm(v).into_value(),
|
||||||
|
ast::Unit::Cm => Abs::cm(v).into_value(),
|
||||||
|
ast::Unit::In => Abs::inches(v).into_value(),
|
||||||
|
ast::Unit::Rad => Angle::rad(v).into_value(),
|
||||||
|
ast::Unit::Deg => Angle::deg(v).into_value(),
|
||||||
ast::Unit::Em => Em::new(v).into_value(),
|
ast::Unit::Em => Em::new(v).into_value(),
|
||||||
ast::Unit::Fr => Fr::new(v).into_value(),
|
ast::Unit::Fr => Fr::new(v).into_value(),
|
||||||
ast::Unit::Percent => Ratio::new(v / 100.0).into_value(),
|
ast::Unit::Percent => Ratio::new(v / 100.0).into_value(),
|
||||||
|
@ -3,10 +3,9 @@ use std::num::NonZeroUsize;
|
|||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
|
|
||||||
use crate::doc::{Destination, Frame, FrameItem, Meta, Position};
|
use crate::doc::{Destination, Frame, FrameItem, Meta, Position};
|
||||||
use crate::file::FileId;
|
|
||||||
use crate::geom::{Geometry, Point, Size};
|
use crate::geom::{Geometry, Point, Size};
|
||||||
use crate::model::Introspector;
|
use crate::model::Introspector;
|
||||||
use crate::syntax::{LinkedNode, Source, Span, SyntaxKind};
|
use crate::syntax::{FileId, LinkedNode, Source, Span, SyntaxKind};
|
||||||
use crate::World;
|
use crate::World;
|
||||||
|
|
||||||
/// Where to [jump](jump_from_click) to.
|
/// Where to [jump](jump_from_click) to.
|
||||||
|
@ -40,18 +40,20 @@ extern crate self as typst;
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod util;
|
pub mod util;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod diag;
|
|
||||||
#[macro_use]
|
|
||||||
pub mod eval;
|
pub mod eval;
|
||||||
|
pub mod diag;
|
||||||
pub mod doc;
|
pub mod doc;
|
||||||
pub mod export;
|
pub mod export;
|
||||||
pub mod file;
|
|
||||||
pub mod font;
|
pub mod font;
|
||||||
pub mod geom;
|
pub mod geom;
|
||||||
pub mod ide;
|
pub mod ide;
|
||||||
pub mod image;
|
pub mod image;
|
||||||
pub mod model;
|
pub mod model;
|
||||||
pub mod syntax;
|
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use typst_syntax as syntax;
|
||||||
|
|
||||||
|
use std::ops::Range;
|
||||||
|
|
||||||
use comemo::{Prehashed, Track, TrackedMut};
|
use comemo::{Prehashed, Track, TrackedMut};
|
||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
@ -59,9 +61,8 @@ use ecow::EcoString;
|
|||||||
use crate::diag::{FileResult, SourceResult};
|
use crate::diag::{FileResult, SourceResult};
|
||||||
use crate::doc::Document;
|
use crate::doc::Document;
|
||||||
use crate::eval::{Datetime, Library, Route, Tracer};
|
use crate::eval::{Datetime, Library, Route, Tracer};
|
||||||
use crate::file::{FileId, PackageSpec};
|
|
||||||
use crate::font::{Font, FontBook};
|
use crate::font::{Font, FontBook};
|
||||||
use crate::syntax::Source;
|
use crate::syntax::{FileId, PackageSpec, Source, Span};
|
||||||
use crate::util::Bytes;
|
use crate::util::Bytes;
|
||||||
|
|
||||||
/// Compile a source file into a fully layouted document.
|
/// Compile a source file into a fully layouted document.
|
||||||
@ -75,7 +76,6 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
|
|||||||
let mut tracer = tracer.track_mut();
|
let mut tracer = tracer.track_mut();
|
||||||
|
|
||||||
// Evaluate the source file into a module.
|
// Evaluate the source file into a module.
|
||||||
tracing::info!("Starting evaluation");
|
|
||||||
let module = eval::eval(
|
let module = eval::eval(
|
||||||
world,
|
world,
|
||||||
route.track(),
|
route.track(),
|
||||||
@ -144,4 +144,12 @@ pub trait World {
|
|||||||
fn packages(&self) -> &[(PackageSpec, Option<EcoString>)] {
|
fn packages(&self) -> &[(PackageSpec, Option<EcoString>)] {
|
||||||
&[]
|
&[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the byte range for a span.
|
||||||
|
#[track_caller]
|
||||||
|
fn range(&self, span: Span) -> Range<usize> {
|
||||||
|
self.source(span.id())
|
||||||
|
.expect("span does not point into any source file")
|
||||||
|
.range(span)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,23 +0,0 @@
|
|||||||
//! Syntax definition, parsing, and highlighting.
|
|
||||||
|
|
||||||
pub mod ast;
|
|
||||||
|
|
||||||
mod kind;
|
|
||||||
mod lexer;
|
|
||||||
mod node;
|
|
||||||
mod parser;
|
|
||||||
mod reparser;
|
|
||||||
mod source;
|
|
||||||
mod span;
|
|
||||||
|
|
||||||
pub use self::kind::SyntaxKind;
|
|
||||||
pub use self::lexer::{is_ident, is_newline};
|
|
||||||
pub use self::node::{LinkedChildren, LinkedNode, SyntaxNode};
|
|
||||||
pub use self::parser::{parse, parse_code};
|
|
||||||
pub use self::source::Source;
|
|
||||||
pub use self::span::{Span, Spanned};
|
|
||||||
|
|
||||||
pub(crate) use self::lexer::{is_id_continue, is_id_start};
|
|
||||||
|
|
||||||
use self::lexer::{split_newlines, LexMode, Lexer};
|
|
||||||
use self::parser::{reparse_block, reparse_markup};
|
|
@ -53,19 +53,6 @@ impl NonZeroExt for NonZeroUsize {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extra methods for [`str`].
|
|
||||||
pub trait StrExt {
|
|
||||||
/// The number of code units this string would use if it was encoded in
|
|
||||||
/// UTF16. This runs in linear time.
|
|
||||||
fn len_utf16(&self) -> usize;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StrExt for str {
|
|
||||||
fn len_utf16(&self) -> usize {
|
|
||||||
self.chars().map(char::len_utf16).sum()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extra methods for [`Arc`].
|
/// Extra methods for [`Arc`].
|
||||||
pub trait ArcExt<T> {
|
pub trait ArcExt<T> {
|
||||||
/// Takes the inner value if there is exactly one strong reference and
|
/// Takes the inner value if there is exactly one strong reference and
|
||||||
@ -123,9 +110,6 @@ where
|
|||||||
|
|
||||||
/// Extra methods for [`Path`].
|
/// Extra methods for [`Path`].
|
||||||
pub trait PathExt {
|
pub trait PathExt {
|
||||||
/// Lexically normalize a path.
|
|
||||||
fn normalize(&self) -> PathBuf;
|
|
||||||
|
|
||||||
/// Treat `self` as a virtual root relative to which the `path` is resolved.
|
/// Treat `self` as a virtual root relative to which the `path` is resolved.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the path lexically escapes the root. The path
|
/// Returns `None` if the path lexically escapes the root. The path
|
||||||
@ -134,28 +118,6 @@ pub trait PathExt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PathExt for Path {
|
impl PathExt for Path {
|
||||||
fn normalize(&self) -> PathBuf {
|
|
||||||
let mut out = PathBuf::new();
|
|
||||||
for component in self.components() {
|
|
||||||
match component {
|
|
||||||
Component::CurDir => {}
|
|
||||||
Component::ParentDir => match out.components().next_back() {
|
|
||||||
Some(Component::Normal(_)) => {
|
|
||||||
out.pop();
|
|
||||||
}
|
|
||||||
_ => out.push(component),
|
|
||||||
},
|
|
||||||
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
|
|
||||||
out.push(component)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if out.as_os_str().is_empty() {
|
|
||||||
out.push(Component::CurDir);
|
|
||||||
}
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
fn join_rooted(&self, path: &Path) -> Option<PathBuf> {
|
fn join_rooted(&self, path: &Path) -> Option<PathBuf> {
|
||||||
let mut parts: Vec<_> = self.components().collect();
|
let mut parts: Vec<_> = self.components().collect();
|
||||||
let root = parts.len();
|
let root = parts.len();
|
||||||
|
@ -41,7 +41,7 @@ them.
|
|||||||
|
|
||||||
|
|
||||||
## Parsing
|
## Parsing
|
||||||
The syntax tree and parser are located in `crates/typst/src/syntax`. Parsing is
|
The syntax tree and parser are located in `crates/typst-syntax`. Parsing is
|
||||||
a pure function `&str -> SyntaxNode` without any further dependencies. The
|
a pure function `&str -> SyntaxNode` without any further dependencies. The
|
||||||
result is a concrete syntax tree reflecting the whole file structure, including
|
result is a concrete syntax tree reflecting the whole file structure, including
|
||||||
whitespace and comments. Parsing cannot fail. If there are syntactic errors, the
|
whitespace and comments. Parsing cannot fail. If there are syntactic errors, the
|
||||||
|
@ -2,10 +2,9 @@ use comemo::{Prehashed, Track, Tracked};
|
|||||||
use iai::{black_box, main, Iai};
|
use iai::{black_box, main, Iai};
|
||||||
use typst::diag::FileResult;
|
use typst::diag::FileResult;
|
||||||
use typst::eval::{Datetime, Library};
|
use typst::eval::{Datetime, Library};
|
||||||
use typst::file::FileId;
|
|
||||||
use typst::font::{Font, FontBook};
|
use typst::font::{Font, FontBook};
|
||||||
use typst::geom::Color;
|
use typst::geom::Color;
|
||||||
use typst::syntax::Source;
|
use typst::syntax::{FileId, Source};
|
||||||
use typst::util::Bytes;
|
use typst::util::Bytes;
|
||||||
use typst::World;
|
use typst::World;
|
||||||
use unscanny::Scanner;
|
use unscanny::Scanner;
|
||||||
|
@ -17,7 +17,6 @@ use oxipng::{InFile, Options, OutFile};
|
|||||||
use rayon::iter::{ParallelBridge, ParallelIterator};
|
use rayon::iter::{ParallelBridge, ParallelIterator};
|
||||||
use std::cell::OnceCell;
|
use std::cell::OnceCell;
|
||||||
use tiny_skia as sk;
|
use tiny_skia as sk;
|
||||||
use typst::file::FileId;
|
|
||||||
use unscanny::Scanner;
|
use unscanny::Scanner;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
@ -26,7 +25,7 @@ use typst::doc::{Document, Frame, FrameItem, Meta};
|
|||||||
use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value};
|
use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value};
|
||||||
use typst::font::{Font, FontBook};
|
use typst::font::{Font, FontBook};
|
||||||
use typst::geom::{Abs, Color, RgbaColor, Smart};
|
use typst::geom::{Abs, Color, RgbaColor, Smart};
|
||||||
use typst::syntax::{Source, Span, SyntaxNode};
|
use typst::syntax::{FileId, Source, Span, SyntaxNode};
|
||||||
use typst::util::{Bytes, PathExt};
|
use typst::util::{Bytes, PathExt};
|
||||||
use typst::World;
|
use typst::World;
|
||||||
use typst_library::layout::{Margin, PageElem};
|
use typst_library::layout::{Margin, PageElem};
|
||||||
@ -541,7 +540,7 @@ fn test_part(
|
|||||||
.inspect(|error| assert!(!error.span.is_detached()))
|
.inspect(|error| assert!(!error.span.is_detached()))
|
||||||
.filter(|error| error.span.id() == source.id())
|
.filter(|error| error.span.id() == source.id())
|
||||||
.flat_map(|error| {
|
.flat_map(|error| {
|
||||||
let range = error.span.range(world);
|
let range = world.range(error.span);
|
||||||
let output_error =
|
let output_error =
|
||||||
UserOutput::Error(range.clone(), error.message.replace('\\', "/"));
|
UserOutput::Error(range.clone(), error.message.replace('\\', "/"));
|
||||||
let hints = error
|
let hints = error
|
||||||
|
Loading…
x
Reference in New Issue
Block a user