mirror of
https://github.com/typst/typst
synced 2025-08-13 14:47:54 +08:00
Merge branch 'typst:main' into bibliography-entry
This commit is contained in:
commit
f336214711
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2995,6 +2995,7 @@ dependencies = [
|
|||||||
"typst-timing",
|
"typst-timing",
|
||||||
"typst-utils",
|
"typst-utils",
|
||||||
"unicode-math-class",
|
"unicode-math-class",
|
||||||
|
"unicode-normalization",
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
"unscanny",
|
"unscanny",
|
||||||
"usvg",
|
"usvg",
|
||||||
|
@ -129,6 +129,7 @@ unicode-bidi = "0.3.18"
|
|||||||
unicode-ident = "1.0"
|
unicode-ident = "1.0"
|
||||||
unicode-math-class = "0.1"
|
unicode-math-class = "0.1"
|
||||||
unicode-script = "0.5"
|
unicode-script = "0.5"
|
||||||
|
unicode-normalization = "0.1.24"
|
||||||
unicode-segmentation = "1"
|
unicode-segmentation = "1"
|
||||||
unscanny = "0.1"
|
unscanny = "0.1"
|
||||||
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
|
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
|
||||||
|
@ -466,7 +466,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Code and content blocks create a scope.
|
// Code and content blocks create a scope.
|
||||||
Some(ast::Expr::Code(_) | ast::Expr::Content(_)) => {
|
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
|
||||||
self.internal.enter();
|
self.internal.enter();
|
||||||
for child in node.children() {
|
for child in node.children() {
|
||||||
self.visit(child);
|
self.visit(child);
|
||||||
@ -516,7 +516,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
|
|
||||||
// A let expression contains a binding, but that binding is only
|
// A let expression contains a binding, but that binding is only
|
||||||
// active after the body is evaluated.
|
// active after the body is evaluated.
|
||||||
Some(ast::Expr::Let(expr)) => {
|
Some(ast::Expr::LetBinding(expr)) => {
|
||||||
if let Some(init) = expr.init() {
|
if let Some(init) = expr.init() {
|
||||||
self.visit(init.to_untyped());
|
self.visit(init.to_untyped());
|
||||||
}
|
}
|
||||||
@ -529,7 +529,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
// A for loop contains one or two bindings in its pattern. These are
|
// A for loop contains one or two bindings in its pattern. These are
|
||||||
// active after the iterable is evaluated but before the body is
|
// active after the iterable is evaluated but before the body is
|
||||||
// evaluated.
|
// evaluated.
|
||||||
Some(ast::Expr::For(expr)) => {
|
Some(ast::Expr::ForLoop(expr)) => {
|
||||||
self.visit(expr.iterable().to_untyped());
|
self.visit(expr.iterable().to_untyped());
|
||||||
self.internal.enter();
|
self.internal.enter();
|
||||||
|
|
||||||
@ -544,7 +544,7 @@ impl<'a> CapturesVisitor<'a> {
|
|||||||
|
|
||||||
// An import contains items, but these are active only after the
|
// An import contains items, but these are active only after the
|
||||||
// path is evaluated.
|
// path is evaluated.
|
||||||
Some(ast::Expr::Import(expr)) => {
|
Some(ast::Expr::ModuleImport(expr)) => {
|
||||||
self.visit(expr.source().to_untyped());
|
self.visit(expr.source().to_untyped());
|
||||||
if let Some(ast::Imports::Items(items)) = expr.imports() {
|
if let Some(ast::Imports::Items(items)) = expr.imports() {
|
||||||
for item in items.iter() {
|
for item in items.iter() {
|
||||||
|
@ -30,7 +30,7 @@ fn eval_code<'a>(
|
|||||||
while let Some(expr) = exprs.next() {
|
while let Some(expr) = exprs.next() {
|
||||||
let span = expr.span();
|
let span = expr.span();
|
||||||
let value = match expr {
|
let value = match expr {
|
||||||
ast::Expr::Set(set) => {
|
ast::Expr::SetRule(set) => {
|
||||||
let styles = set.eval(vm)?;
|
let styles = set.eval(vm)?;
|
||||||
if vm.flow.is_some() {
|
if vm.flow.is_some() {
|
||||||
break;
|
break;
|
||||||
@ -39,7 +39,7 @@ fn eval_code<'a>(
|
|||||||
let tail = eval_code(vm, exprs)?.display();
|
let tail = eval_code(vm, exprs)?.display();
|
||||||
Value::Content(tail.styled_with_map(styles))
|
Value::Content(tail.styled_with_map(styles))
|
||||||
}
|
}
|
||||||
ast::Expr::Show(show) => {
|
ast::Expr::ShowRule(show) => {
|
||||||
let recipe = show.eval(vm)?;
|
let recipe = show.eval(vm)?;
|
||||||
if vm.flow.is_some() {
|
if vm.flow.is_some() {
|
||||||
break;
|
break;
|
||||||
@ -94,9 +94,9 @@ impl Eval for ast::Expr<'_> {
|
|||||||
Self::Label(v) => v.eval(vm),
|
Self::Label(v) => v.eval(vm),
|
||||||
Self::Ref(v) => v.eval(vm).map(Value::Content),
|
Self::Ref(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Heading(v) => v.eval(vm).map(Value::Content),
|
Self::Heading(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::List(v) => v.eval(vm).map(Value::Content),
|
Self::ListItem(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Enum(v) => v.eval(vm).map(Value::Content),
|
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Term(v) => v.eval(vm).map(Value::Content),
|
Self::TermItem(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Equation(v) => v.eval(vm).map(Value::Content),
|
Self::Equation(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Math(v) => v.eval(vm).map(Value::Content),
|
Self::Math(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::MathText(v) => v.eval(vm).map(Value::Content),
|
Self::MathText(v) => v.eval(vm).map(Value::Content),
|
||||||
@ -116,8 +116,8 @@ impl Eval for ast::Expr<'_> {
|
|||||||
Self::Float(v) => v.eval(vm),
|
Self::Float(v) => v.eval(vm),
|
||||||
Self::Numeric(v) => v.eval(vm),
|
Self::Numeric(v) => v.eval(vm),
|
||||||
Self::Str(v) => v.eval(vm),
|
Self::Str(v) => v.eval(vm),
|
||||||
Self::Code(v) => v.eval(vm),
|
Self::CodeBlock(v) => v.eval(vm),
|
||||||
Self::Content(v) => v.eval(vm).map(Value::Content),
|
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Array(v) => v.eval(vm).map(Value::Array),
|
Self::Array(v) => v.eval(vm).map(Value::Array),
|
||||||
Self::Dict(v) => v.eval(vm).map(Value::Dict),
|
Self::Dict(v) => v.eval(vm).map(Value::Dict),
|
||||||
Self::Parenthesized(v) => v.eval(vm),
|
Self::Parenthesized(v) => v.eval(vm),
|
||||||
@ -126,19 +126,19 @@ impl Eval for ast::Expr<'_> {
|
|||||||
Self::Closure(v) => v.eval(vm),
|
Self::Closure(v) => v.eval(vm),
|
||||||
Self::Unary(v) => v.eval(vm),
|
Self::Unary(v) => v.eval(vm),
|
||||||
Self::Binary(v) => v.eval(vm),
|
Self::Binary(v) => v.eval(vm),
|
||||||
Self::Let(v) => v.eval(vm),
|
Self::LetBinding(v) => v.eval(vm),
|
||||||
Self::DestructAssign(v) => v.eval(vm),
|
Self::DestructAssignment(v) => v.eval(vm),
|
||||||
Self::Set(_) => bail!(forbidden("set")),
|
Self::SetRule(_) => bail!(forbidden("set")),
|
||||||
Self::Show(_) => bail!(forbidden("show")),
|
Self::ShowRule(_) => bail!(forbidden("show")),
|
||||||
Self::Contextual(v) => v.eval(vm).map(Value::Content),
|
Self::Contextual(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Conditional(v) => v.eval(vm),
|
Self::Conditional(v) => v.eval(vm),
|
||||||
Self::While(v) => v.eval(vm),
|
Self::WhileLoop(v) => v.eval(vm),
|
||||||
Self::For(v) => v.eval(vm),
|
Self::ForLoop(v) => v.eval(vm),
|
||||||
Self::Import(v) => v.eval(vm),
|
Self::ModuleImport(v) => v.eval(vm),
|
||||||
Self::Include(v) => v.eval(vm).map(Value::Content),
|
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
|
||||||
Self::Break(v) => v.eval(vm),
|
Self::LoopBreak(v) => v.eval(vm),
|
||||||
Self::Continue(v) => v.eval(vm),
|
Self::LoopContinue(v) => v.eval(vm),
|
||||||
Self::Return(v) => v.eval(vm),
|
Self::FuncReturn(v) => v.eval(vm),
|
||||||
}?
|
}?
|
||||||
.spanned(span);
|
.spanned(span);
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ fn eval_markup<'a>(
|
|||||||
|
|
||||||
while let Some(expr) = exprs.next() {
|
while let Some(expr) = exprs.next() {
|
||||||
match expr {
|
match expr {
|
||||||
ast::Expr::Set(set) => {
|
ast::Expr::SetRule(set) => {
|
||||||
let styles = set.eval(vm)?;
|
let styles = set.eval(vm)?;
|
||||||
if vm.flow.is_some() {
|
if vm.flow.is_some() {
|
||||||
break;
|
break;
|
||||||
@ -41,7 +41,7 @@ fn eval_markup<'a>(
|
|||||||
|
|
||||||
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
|
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
|
||||||
}
|
}
|
||||||
ast::Expr::Show(show) => {
|
ast::Expr::ShowRule(show) => {
|
||||||
let recipe = show.eval(vm)?;
|
let recipe = show.eval(vm)?;
|
||||||
if vm.flow.is_some() {
|
if vm.flow.is_some() {
|
||||||
break;
|
break;
|
||||||
|
@ -45,7 +45,7 @@ impl Eval for ast::ShowRule<'_> {
|
|||||||
|
|
||||||
let transform = self.transform();
|
let transform = self.transform();
|
||||||
let transform = match transform {
|
let transform = match transform {
|
||||||
ast::Expr::Set(set) => Transformation::Style(set.eval(vm)?),
|
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
|
||||||
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
|
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -410,9 +410,17 @@ fn field_access_completions(
|
|||||||
elem.into_iter().chain(Some(ty))
|
elem.into_iter().chain(Some(ty))
|
||||||
};
|
};
|
||||||
|
|
||||||
// Autocomplete methods from the element's or type's scope.
|
// Autocomplete methods from the element's or type's scope. We only complete
|
||||||
|
// those which have a `self` parameter.
|
||||||
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
|
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
|
||||||
ctx.call_completion(name.clone(), binding.read());
|
let Ok(func) = binding.read().clone().cast::<Func>() else { continue };
|
||||||
|
if func
|
||||||
|
.params()
|
||||||
|
.and_then(|params| params.first())
|
||||||
|
.is_some_and(|param| param.name == "self")
|
||||||
|
{
|
||||||
|
ctx.call_completion(name.clone(), binding.read());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(scope) = value.scope() {
|
if let Some(scope) = value.scope() {
|
||||||
@ -509,7 +517,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
|
|||||||
// "#import "path.typ": a, b, |".
|
// "#import "path.typ": a, b, |".
|
||||||
if_chain! {
|
if_chain! {
|
||||||
if let Some(prev) = ctx.leaf.prev_sibling();
|
if let Some(prev) = ctx.leaf.prev_sibling();
|
||||||
if let Some(ast::Expr::Import(import)) = prev.get().cast();
|
if let Some(ast::Expr::ModuleImport(import)) = prev.get().cast();
|
||||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||||
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
|
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
|
||||||
then {
|
then {
|
||||||
@ -528,7 +536,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
|
|||||||
if let Some(grand) = parent.parent();
|
if let Some(grand) = parent.parent();
|
||||||
if grand.kind() == SyntaxKind::ImportItems;
|
if grand.kind() == SyntaxKind::ImportItems;
|
||||||
if let Some(great) = grand.parent();
|
if let Some(great) = grand.parent();
|
||||||
if let Some(ast::Expr::Import(import)) = great.get().cast();
|
if let Some(ast::Expr::ModuleImport(import)) = great.get().cast();
|
||||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||||
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
|
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
|
||||||
then {
|
then {
|
||||||
@ -669,10 +677,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
|
|||||||
if let Some(args) = parent.get().cast::<ast::Args>();
|
if let Some(args) = parent.get().cast::<ast::Args>();
|
||||||
if let Some(grand) = parent.parent();
|
if let Some(grand) = parent.parent();
|
||||||
if let Some(expr) = grand.get().cast::<ast::Expr>();
|
if let Some(expr) = grand.get().cast::<ast::Expr>();
|
||||||
let set = matches!(expr, ast::Expr::Set(_));
|
let set = matches!(expr, ast::Expr::SetRule(_));
|
||||||
if let Some(callee) = match expr {
|
if let Some(callee) = match expr {
|
||||||
ast::Expr::FuncCall(call) => Some(call.callee()),
|
ast::Expr::FuncCall(call) => Some(call.callee()),
|
||||||
ast::Expr::Set(set) => Some(set.target()),
|
ast::Expr::SetRule(set) => Some(set.target()),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
then {
|
then {
|
||||||
@ -1764,6 +1772,7 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_autocomplete_type_methods() {
|
fn test_autocomplete_type_methods() {
|
||||||
test("#\"hello\".", -1).must_include(["len", "contains"]);
|
test("#\"hello\".", -1).must_include(["len", "contains"]);
|
||||||
|
test("#table().", -1).must_exclude(["cell"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -232,7 +232,9 @@ pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
|
|||||||
ast::Expr::FuncCall(call) => {
|
ast::Expr::FuncCall(call) => {
|
||||||
DerefTarget::Callee(expr_node.find(call.callee().span())?)
|
DerefTarget::Callee(expr_node.find(call.callee().span())?)
|
||||||
}
|
}
|
||||||
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
|
ast::Expr::SetRule(set) => {
|
||||||
|
DerefTarget::Callee(expr_node.find(set.target().span())?)
|
||||||
|
}
|
||||||
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
|
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
|
||||||
DerefTarget::VarAccess(expr_node)
|
DerefTarget::VarAccess(expr_node)
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ fn named_param_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Toolti
|
|||||||
if let Some(expr) = grand_grand.cast::<ast::Expr>();
|
if let Some(expr) = grand_grand.cast::<ast::Expr>();
|
||||||
if let Some(ast::Expr::Ident(callee)) = match expr {
|
if let Some(ast::Expr::Ident(callee)) = match expr {
|
||||||
ast::Expr::FuncCall(call) => Some(call.callee()),
|
ast::Expr::FuncCall(call) => Some(call.callee()),
|
||||||
ast::Expr::Set(set) => Some(set.target()),
|
ast::Expr::SetRule(set) => Some(set.target()),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -284,6 +284,7 @@ impl<'a> CurveBuilder<'a> {
|
|||||||
self.last_point = point;
|
self.last_point = point;
|
||||||
self.last_control_from = point;
|
self.last_control_from = point;
|
||||||
self.is_started = true;
|
self.is_started = true;
|
||||||
|
self.is_empty = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a line segment.
|
/// Add a line segment.
|
||||||
|
@ -61,6 +61,7 @@ ttf-parser = { workspace = true }
|
|||||||
two-face = { workspace = true }
|
two-face = { workspace = true }
|
||||||
typed-arena = { workspace = true }
|
typed-arena = { workspace = true }
|
||||||
unicode-math-class = { workspace = true }
|
unicode-math-class = { workspace = true }
|
||||||
|
unicode-normalization = { workspace = true }
|
||||||
unicode-segmentation = { workspace = true }
|
unicode-segmentation = { workspace = true }
|
||||||
unscanny = { workspace = true }
|
unscanny = { workspace = true }
|
||||||
usvg = { workspace = true }
|
usvg = { workspace = true }
|
||||||
|
@ -312,7 +312,8 @@ impl Route<'_> {
|
|||||||
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
|
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
|
||||||
bail!(
|
bail!(
|
||||||
"maximum show rule depth exceeded";
|
"maximum show rule depth exceeded";
|
||||||
hint: "check whether the show rule matches its own output"
|
hint: "maybe a show rule matches its own output";
|
||||||
|
hint: "maybe there are too deeply nested elements"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -7,12 +7,13 @@ use comemo::Tracked;
|
|||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use typst_syntax::{Span, Spanned};
|
use typst_syntax::{Span, Spanned};
|
||||||
|
use unicode_normalization::UnicodeNormalization;
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
use crate::diag::{bail, At, SourceResult, StrResult};
|
use crate::diag::{bail, At, SourceResult, StrResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, dict, func, repr, scope, ty, Array, Bytes, Context, Decimal, Dict, Func,
|
cast, dict, func, repr, scope, ty, Array, Bytes, Cast, Context, Decimal, Dict, Func,
|
||||||
IntoValue, Label, Repr, Type, Value, Version,
|
IntoValue, Label, Repr, Type, Value, Version,
|
||||||
};
|
};
|
||||||
use crate::layout::Alignment;
|
use crate::layout::Alignment;
|
||||||
@ -286,6 +287,30 @@ impl Str {
|
|||||||
Ok(c.into())
|
Ok(c.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Normalizes the string to the given Unicode normal form.
|
||||||
|
///
|
||||||
|
/// This is useful when manipulating strings containing Unicode combining
|
||||||
|
/// characters.
|
||||||
|
///
|
||||||
|
/// ```typ
|
||||||
|
/// #assert.eq("é".normalize(form: "nfd"), "e\u{0301}")
|
||||||
|
/// #assert.eq("ſ́".normalize(form: "nfkc"), "ś")
|
||||||
|
/// ```
|
||||||
|
#[func]
|
||||||
|
pub fn normalize(
|
||||||
|
&self,
|
||||||
|
#[named]
|
||||||
|
#[default(UnicodeNormalForm::Nfc)]
|
||||||
|
form: UnicodeNormalForm,
|
||||||
|
) -> Str {
|
||||||
|
match form {
|
||||||
|
UnicodeNormalForm::Nfc => self.nfc().collect(),
|
||||||
|
UnicodeNormalForm::Nfd => self.nfd().collect(),
|
||||||
|
UnicodeNormalForm::Nfkc => self.nfkc().collect(),
|
||||||
|
UnicodeNormalForm::Nfkd => self.nfkd().collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Whether the string contains the specified pattern.
|
/// Whether the string contains the specified pattern.
|
||||||
///
|
///
|
||||||
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
|
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
|
||||||
@ -788,6 +813,25 @@ cast! {
|
|||||||
v: Str => Self::Str(v),
|
v: Str => Self::Str(v),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A Unicode normalization form.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
|
||||||
|
pub enum UnicodeNormalForm {
|
||||||
|
/// Canonical composition where e.g. accented letters are turned into a
|
||||||
|
/// single Unicode codepoint.
|
||||||
|
#[string("nfc")]
|
||||||
|
Nfc,
|
||||||
|
/// Canonical decomposition where e.g. accented letters are split into a
|
||||||
|
/// separate base and diacritic.
|
||||||
|
#[string("nfd")]
|
||||||
|
Nfd,
|
||||||
|
/// Like NFC, but using the Unicode compatibility decompositions.
|
||||||
|
#[string("nfkc")]
|
||||||
|
Nfkc,
|
||||||
|
/// Like NFD, but using the Unicode compatibility decompositions.
|
||||||
|
#[string("nfkd")]
|
||||||
|
Nfkd,
|
||||||
|
}
|
||||||
|
|
||||||
/// Convert an item of std's `match_indices` to a dictionary.
|
/// Convert an item of std's `match_indices` to a dictionary.
|
||||||
fn match_to_dict((start, text): (usize, &str)) -> Dict {
|
fn match_to_dict((start, text): (usize, &str)) -> Dict {
|
||||||
dict! {
|
dict! {
|
||||||
|
@ -50,6 +50,42 @@ impl Dir {
|
|||||||
pub const TTB: Self = Self::TTB;
|
pub const TTB: Self = Self::TTB;
|
||||||
pub const BTT: Self = Self::BTT;
|
pub const BTT: Self = Self::BTT;
|
||||||
|
|
||||||
|
/// Returns a direction from a starting point.
|
||||||
|
///
|
||||||
|
/// ```example
|
||||||
|
/// direction.from(left) \
|
||||||
|
/// direction.from(right) \
|
||||||
|
/// direction.from(top) \
|
||||||
|
/// direction.from(bottom)
|
||||||
|
/// ```
|
||||||
|
#[func]
|
||||||
|
pub const fn from(side: Side) -> Dir {
|
||||||
|
match side {
|
||||||
|
Side::Left => Self::LTR,
|
||||||
|
Side::Right => Self::RTL,
|
||||||
|
Side::Top => Self::TTB,
|
||||||
|
Side::Bottom => Self::BTT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a direction from an end point.
|
||||||
|
///
|
||||||
|
/// ```example
|
||||||
|
/// direction.to(left) \
|
||||||
|
/// direction.to(right) \
|
||||||
|
/// direction.to(top) \
|
||||||
|
/// direction.to(bottom)
|
||||||
|
/// ```
|
||||||
|
#[func]
|
||||||
|
pub const fn to(side: Side) -> Dir {
|
||||||
|
match side {
|
||||||
|
Side::Right => Self::LTR,
|
||||||
|
Side::Left => Self::RTL,
|
||||||
|
Side::Bottom => Self::TTB,
|
||||||
|
Side::Top => Self::BTT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The axis this direction belongs to, either `{"horizontal"}` or
|
/// The axis this direction belongs to, either `{"horizontal"}` or
|
||||||
/// `{"vertical"}`.
|
/// `{"vertical"}`.
|
||||||
///
|
///
|
||||||
@ -65,6 +101,22 @@ impl Dir {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The corresponding sign, for use in calculations.
|
||||||
|
///
|
||||||
|
/// ```example
|
||||||
|
/// #ltr.sign() \
|
||||||
|
/// #rtl.sign() \
|
||||||
|
/// #ttb.sign() \
|
||||||
|
/// #btt.sign()
|
||||||
|
/// ```
|
||||||
|
#[func]
|
||||||
|
pub const fn sign(self) -> i64 {
|
||||||
|
match self {
|
||||||
|
Self::LTR | Self::TTB => 1,
|
||||||
|
Self::RTL | Self::BTT => -1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The start point of this direction, as an alignment.
|
/// The start point of this direction, as an alignment.
|
||||||
///
|
///
|
||||||
/// ```example
|
/// ```example
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,6 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
use ecow::{eco_format, eco_vec, EcoString, EcoVec};
|
use ecow::{eco_format, eco_vec, EcoString, EcoVec};
|
||||||
|
|
||||||
use crate::ast::AstNode;
|
|
||||||
use crate::{FileId, Span, SyntaxKind};
|
use crate::{FileId, Span, SyntaxKind};
|
||||||
|
|
||||||
/// A node in the untyped syntax tree.
|
/// A node in the untyped syntax tree.
|
||||||
@ -119,26 +118,6 @@ impl SyntaxNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the node can be cast to the given AST node.
|
|
||||||
pub fn is<'a, T: AstNode<'a>>(&'a self) -> bool {
|
|
||||||
self.cast::<T>().is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Try to convert the node to a typed AST node.
|
|
||||||
pub fn cast<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
|
|
||||||
T::from_untyped(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cast the first child that can cast to the AST type `T`.
|
|
||||||
pub fn cast_first_match<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
|
|
||||||
self.children().find_map(Self::cast)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cast the last child that can cast to the AST type `T`.
|
|
||||||
pub fn cast_last_match<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
|
|
||||||
self.children().rev().find_map(Self::cast)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether the node or its children contain an error.
|
/// Whether the node or its children contain an error.
|
||||||
pub fn erroneous(&self) -> bool {
|
pub fn erroneous(&self) -> bool {
|
||||||
match &self.0 {
|
match &self.0 {
|
||||||
|
BIN
tests/ref/curve-multiple-non-closed.png
Normal file
BIN
tests/ref/curve-multiple-non-closed.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 85 B |
@ -86,6 +86,13 @@
|
|||||||
// Error: 2-28 0x110000 is not a valid codepoint
|
// Error: 2-28 0x110000 is not a valid codepoint
|
||||||
#str.from-unicode(0x110000) // 0x10ffff is the highest valid code point
|
#str.from-unicode(0x110000) // 0x10ffff is the highest valid code point
|
||||||
|
|
||||||
|
--- str-normalize ---
|
||||||
|
// Test the `normalize` method.
|
||||||
|
#test("e\u{0301}".normalize(form: "nfc"), "é")
|
||||||
|
#test("é".normalize(form: "nfd"), "e\u{0301}")
|
||||||
|
#test("ſ\u{0301}".normalize(form: "nfkc"), "ś")
|
||||||
|
#test("ſ\u{0301}".normalize(form: "nfkd"), "s\u{0301}")
|
||||||
|
|
||||||
--- string-len ---
|
--- string-len ---
|
||||||
// Test the `len` method.
|
// Test the `len` method.
|
||||||
#test("Hello World!".len(), 12)
|
#test("Hello World!".len(), 12)
|
||||||
|
@ -1,10 +1,35 @@
|
|||||||
|
--- dir-from ---
|
||||||
|
#test(direction.from(left), ltr)
|
||||||
|
#test(direction.from(right), rtl)
|
||||||
|
#test(direction.from(top), ttb)
|
||||||
|
#test(direction.from(bottom), btt)
|
||||||
|
|
||||||
|
--- dir-from-invalid ---
|
||||||
|
// Error: 17-23 cannot convert this alignment to a side
|
||||||
|
#direction.from(center)
|
||||||
|
|
||||||
|
--- dir-to ---
|
||||||
|
#test(direction.to(left), rtl)
|
||||||
|
#test(direction.to(right), ltr)
|
||||||
|
#test(direction.to(top), btt)
|
||||||
|
#test(direction.to(bottom), ttb)
|
||||||
|
|
||||||
|
-- dir-to-invalid ---
|
||||||
|
// Error: 15-21 cannot convert this alignment to a side
|
||||||
|
#direction.to(center)
|
||||||
|
|
||||||
--- dir-axis ---
|
--- dir-axis ---
|
||||||
// Test direction methods.
|
|
||||||
#test(ltr.axis(), "horizontal")
|
#test(ltr.axis(), "horizontal")
|
||||||
#test(rtl.axis(), "horizontal")
|
#test(rtl.axis(), "horizontal")
|
||||||
#test(ttb.axis(), "vertical")
|
#test(ttb.axis(), "vertical")
|
||||||
#test(btt.axis(), "vertical")
|
#test(btt.axis(), "vertical")
|
||||||
|
|
||||||
|
--- dir-sign ---
|
||||||
|
#test(ltr.sign(), 1)
|
||||||
|
#test(rtl.sign(), -1)
|
||||||
|
#test(ttb.sign(), 1)
|
||||||
|
#test(btt.sign(), -1)
|
||||||
|
|
||||||
--- dir-start ---
|
--- dir-start ---
|
||||||
#test(ltr.start(), left)
|
#test(ltr.start(), left)
|
||||||
#test(rtl.start(), right)
|
#test(rtl.start(), right)
|
||||||
|
@ -44,18 +44,21 @@
|
|||||||
--- recursion-via-include-in-layout ---
|
--- recursion-via-include-in-layout ---
|
||||||
// Test cyclic imports during layout.
|
// Test cyclic imports during layout.
|
||||||
// Error: 2-38 maximum show rule depth exceeded
|
// Error: 2-38 maximum show rule depth exceeded
|
||||||
// Hint: 2-38 check whether the show rule matches its own output
|
// Hint: 2-38 maybe a show rule matches its own output
|
||||||
|
// Hint: 2-38 maybe there are too deeply nested elements
|
||||||
#layout(_ => include "recursion.typ")
|
#layout(_ => include "recursion.typ")
|
||||||
|
|
||||||
--- recursion-show-math ---
|
--- recursion-show-math ---
|
||||||
// Test recursive show rules.
|
// Test recursive show rules.
|
||||||
// Error: 22-25 maximum show rule depth exceeded
|
// Error: 22-25 maximum show rule depth exceeded
|
||||||
// Hint: 22-25 check whether the show rule matches its own output
|
// Hint: 22-25 maybe a show rule matches its own output
|
||||||
|
// Hint: 22-25 maybe there are too deeply nested elements
|
||||||
#show math.equation: $x$
|
#show math.equation: $x$
|
||||||
$ x $
|
$ x $
|
||||||
|
|
||||||
--- recursion-show-math-realize ---
|
--- recursion-show-math-realize ---
|
||||||
// Error: 22-33 maximum show rule depth exceeded
|
// Error: 22-33 maximum show rule depth exceeded
|
||||||
// Hint: 22-33 check whether the show rule matches its own output
|
// Hint: 22-33 maybe a show rule matches its own output
|
||||||
|
// Hint: 22-33 maybe there are too deeply nested elements
|
||||||
#show heading: it => heading[it]
|
#show heading: it => heading[it]
|
||||||
$ #heading[hi] $
|
$ #heading[hi] $
|
||||||
|
@ -38,6 +38,16 @@
|
|||||||
curve.close(mode: "smooth"),
|
curve.close(mode: "smooth"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
--- curve-multiple-non-closed ---
|
||||||
|
#curve(
|
||||||
|
stroke: 2pt,
|
||||||
|
curve.line((20pt, 0pt)),
|
||||||
|
curve.move((0pt, 10pt)),
|
||||||
|
curve.line((20pt, 10pt)),
|
||||||
|
curve.move((0pt, 20pt)),
|
||||||
|
curve.line((20pt, 20pt)),
|
||||||
|
)
|
||||||
|
|
||||||
--- curve-line ---
|
--- curve-line ---
|
||||||
#curve(
|
#curve(
|
||||||
fill: purple,
|
fill: purple,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user