Merge branch 'typst:main' into bibliography-entry

This commit is contained in:
Kevin K. 2025-03-02 17:03:45 +01:00 committed by GitHub
commit f336214711
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 578 additions and 319 deletions

1
Cargo.lock generated
View File

@ -2995,6 +2995,7 @@ dependencies = [
"typst-timing",
"typst-utils",
"unicode-math-class",
"unicode-normalization",
"unicode-segmentation",
"unscanny",
"usvg",

View File

@ -129,6 +129,7 @@ unicode-bidi = "0.3.18"
unicode-ident = "1.0"
unicode-math-class = "0.1"
unicode-script = "0.5"
unicode-normalization = "0.1.24"
unicode-segmentation = "1"
unscanny = "0.1"
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }

View File

@ -466,7 +466,7 @@ impl<'a> CapturesVisitor<'a> {
}
// Code and content blocks create a scope.
Some(ast::Expr::Code(_) | ast::Expr::Content(_)) => {
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
self.internal.enter();
for child in node.children() {
self.visit(child);
@ -516,7 +516,7 @@ impl<'a> CapturesVisitor<'a> {
// A let expression contains a binding, but that binding is only
// active after the body is evaluated.
Some(ast::Expr::Let(expr)) => {
Some(ast::Expr::LetBinding(expr)) => {
if let Some(init) = expr.init() {
self.visit(init.to_untyped());
}
@ -529,7 +529,7 @@ impl<'a> CapturesVisitor<'a> {
// A for loop contains one or two bindings in its pattern. These are
// active after the iterable is evaluated but before the body is
// evaluated.
Some(ast::Expr::For(expr)) => {
Some(ast::Expr::ForLoop(expr)) => {
self.visit(expr.iterable().to_untyped());
self.internal.enter();
@ -544,7 +544,7 @@ impl<'a> CapturesVisitor<'a> {
// An import contains items, but these are active only after the
// path is evaluated.
Some(ast::Expr::Import(expr)) => {
Some(ast::Expr::ModuleImport(expr)) => {
self.visit(expr.source().to_untyped());
if let Some(ast::Imports::Items(items)) = expr.imports() {
for item in items.iter() {

View File

@ -30,7 +30,7 @@ fn eval_code<'a>(
while let Some(expr) = exprs.next() {
let span = expr.span();
let value = match expr {
ast::Expr::Set(set) => {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -39,7 +39,7 @@ fn eval_code<'a>(
let tail = eval_code(vm, exprs)?.display();
Value::Content(tail.styled_with_map(styles))
}
ast::Expr::Show(show) => {
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;
@ -94,9 +94,9 @@ impl Eval for ast::Expr<'_> {
Self::Label(v) => v.eval(vm),
Self::Ref(v) => v.eval(vm).map(Value::Content),
Self::Heading(v) => v.eval(vm).map(Value::Content),
Self::List(v) => v.eval(vm).map(Value::Content),
Self::Enum(v) => v.eval(vm).map(Value::Content),
Self::Term(v) => v.eval(vm).map(Value::Content),
Self::ListItem(v) => v.eval(vm).map(Value::Content),
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
Self::TermItem(v) => v.eval(vm).map(Value::Content),
Self::Equation(v) => v.eval(vm).map(Value::Content),
Self::Math(v) => v.eval(vm).map(Value::Content),
Self::MathText(v) => v.eval(vm).map(Value::Content),
@ -116,8 +116,8 @@ impl Eval for ast::Expr<'_> {
Self::Float(v) => v.eval(vm),
Self::Numeric(v) => v.eval(vm),
Self::Str(v) => v.eval(vm),
Self::Code(v) => v.eval(vm),
Self::Content(v) => v.eval(vm).map(Value::Content),
Self::CodeBlock(v) => v.eval(vm),
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
Self::Array(v) => v.eval(vm).map(Value::Array),
Self::Dict(v) => v.eval(vm).map(Value::Dict),
Self::Parenthesized(v) => v.eval(vm),
@ -126,19 +126,19 @@ impl Eval for ast::Expr<'_> {
Self::Closure(v) => v.eval(vm),
Self::Unary(v) => v.eval(vm),
Self::Binary(v) => v.eval(vm),
Self::Let(v) => v.eval(vm),
Self::DestructAssign(v) => v.eval(vm),
Self::Set(_) => bail!(forbidden("set")),
Self::Show(_) => bail!(forbidden("show")),
Self::LetBinding(v) => v.eval(vm),
Self::DestructAssignment(v) => v.eval(vm),
Self::SetRule(_) => bail!(forbidden("set")),
Self::ShowRule(_) => bail!(forbidden("show")),
Self::Contextual(v) => v.eval(vm).map(Value::Content),
Self::Conditional(v) => v.eval(vm),
Self::While(v) => v.eval(vm),
Self::For(v) => v.eval(vm),
Self::Import(v) => v.eval(vm),
Self::Include(v) => v.eval(vm).map(Value::Content),
Self::Break(v) => v.eval(vm),
Self::Continue(v) => v.eval(vm),
Self::Return(v) => v.eval(vm),
Self::WhileLoop(v) => v.eval(vm),
Self::ForLoop(v) => v.eval(vm),
Self::ModuleImport(v) => v.eval(vm),
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
Self::LoopBreak(v) => v.eval(vm),
Self::LoopContinue(v) => v.eval(vm),
Self::FuncReturn(v) => v.eval(vm),
}?
.spanned(span);

View File

@ -33,7 +33,7 @@ fn eval_markup<'a>(
while let Some(expr) = exprs.next() {
match expr {
ast::Expr::Set(set) => {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -41,7 +41,7 @@ fn eval_markup<'a>(
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
}
ast::Expr::Show(show) => {
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;

View File

@ -45,7 +45,7 @@ impl Eval for ast::ShowRule<'_> {
let transform = self.transform();
let transform = match transform {
ast::Expr::Set(set) => Transformation::Style(set.eval(vm)?),
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
};

View File

@ -410,9 +410,17 @@ fn field_access_completions(
elem.into_iter().chain(Some(ty))
};
// Autocomplete methods from the element's or type's scope.
// Autocomplete methods from the element's or type's scope. We only complete
// those which have a `self` parameter.
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
ctx.call_completion(name.clone(), binding.read());
let Ok(func) = binding.read().clone().cast::<Func>() else { continue };
if func
.params()
.and_then(|params| params.first())
.is_some_and(|param| param.name == "self")
{
ctx.call_completion(name.clone(), binding.read());
}
}
if let Some(scope) = value.scope() {
@ -509,7 +517,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
// "#import "path.typ": a, b, |".
if_chain! {
if let Some(prev) = ctx.leaf.prev_sibling();
if let Some(ast::Expr::Import(import)) = prev.get().cast();
if let Some(ast::Expr::ModuleImport(import)) = prev.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
then {
@ -528,7 +536,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
if let Some(grand) = parent.parent();
if grand.kind() == SyntaxKind::ImportItems;
if let Some(great) = grand.parent();
if let Some(ast::Expr::Import(import)) = great.get().cast();
if let Some(ast::Expr::ModuleImport(import)) = great.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
then {
@ -669,10 +677,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
if let Some(args) = parent.get().cast::<ast::Args>();
if let Some(grand) = parent.parent();
if let Some(expr) = grand.get().cast::<ast::Expr>();
let set = matches!(expr, ast::Expr::Set(_));
let set = matches!(expr, ast::Expr::SetRule(_));
if let Some(callee) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::Set(set) => Some(set.target()),
ast::Expr::SetRule(set) => Some(set.target()),
_ => None,
};
then {
@ -1764,6 +1772,7 @@ mod tests {
#[test]
fn test_autocomplete_type_methods() {
test("#\"hello\".", -1).must_include(["len", "contains"]);
test("#table().", -1).must_exclude(["cell"]);
}
#[test]

View File

@ -232,7 +232,9 @@ pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
ast::Expr::FuncCall(call) => {
DerefTarget::Callee(expr_node.find(call.callee().span())?)
}
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
ast::Expr::SetRule(set) => {
DerefTarget::Callee(expr_node.find(set.target().span())?)
}
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
DerefTarget::VarAccess(expr_node)
}

View File

@ -201,7 +201,7 @@ fn named_param_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Toolti
if let Some(expr) = grand_grand.cast::<ast::Expr>();
if let Some(ast::Expr::Ident(callee)) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::Set(set) => Some(set.target()),
ast::Expr::SetRule(set) => Some(set.target()),
_ => None,
};

View File

@ -284,6 +284,7 @@ impl<'a> CurveBuilder<'a> {
self.last_point = point;
self.last_control_from = point;
self.is_started = true;
self.is_empty = true;
}
/// Add a line segment.

View File

@ -61,6 +61,7 @@ ttf-parser = { workspace = true }
two-face = { workspace = true }
typed-arena = { workspace = true }
unicode-math-class = { workspace = true }
unicode-normalization = { workspace = true }
unicode-segmentation = { workspace = true }
unscanny = { workspace = true }
usvg = { workspace = true }

View File

@ -312,7 +312,8 @@ impl Route<'_> {
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
bail!(
"maximum show rule depth exceeded";
hint: "check whether the show rule matches its own output"
hint: "maybe a show rule matches its own output";
hint: "maybe there are too deeply nested elements"
);
}
Ok(())

View File

@ -7,12 +7,13 @@ use comemo::Tracked;
use ecow::EcoString;
use serde::{Deserialize, Serialize};
use typst_syntax::{Span, Spanned};
use unicode_normalization::UnicodeNormalization;
use unicode_segmentation::UnicodeSegmentation;
use crate::diag::{bail, At, SourceResult, StrResult};
use crate::engine::Engine;
use crate::foundations::{
cast, dict, func, repr, scope, ty, Array, Bytes, Context, Decimal, Dict, Func,
cast, dict, func, repr, scope, ty, Array, Bytes, Cast, Context, Decimal, Dict, Func,
IntoValue, Label, Repr, Type, Value, Version,
};
use crate::layout::Alignment;
@ -286,6 +287,30 @@ impl Str {
Ok(c.into())
}
/// Normalizes the string to the given Unicode normal form.
///
/// This is useful when manipulating strings containing Unicode combining
/// characters.
///
/// ```typ
/// #assert.eq("é".normalize(form: "nfd"), "e\u{0301}")
/// #assert.eq("ſ́".normalize(form: "nfkc"), "ś")
/// ```
#[func]
pub fn normalize(
&self,
#[named]
#[default(UnicodeNormalForm::Nfc)]
form: UnicodeNormalForm,
) -> Str {
match form {
UnicodeNormalForm::Nfc => self.nfc().collect(),
UnicodeNormalForm::Nfd => self.nfd().collect(),
UnicodeNormalForm::Nfkc => self.nfkc().collect(),
UnicodeNormalForm::Nfkd => self.nfkd().collect(),
}
}
/// Whether the string contains the specified pattern.
///
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
@ -788,6 +813,25 @@ cast! {
v: Str => Self::Str(v),
}
/// A Unicode normalization form.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
pub enum UnicodeNormalForm {
/// Canonical composition where e.g. accented letters are turned into a
/// single Unicode codepoint.
#[string("nfc")]
Nfc,
/// Canonical decomposition where e.g. accented letters are split into a
/// separate base and diacritic.
#[string("nfd")]
Nfd,
/// Like NFC, but using the Unicode compatibility decompositions.
#[string("nfkc")]
Nfkc,
/// Like NFD, but using the Unicode compatibility decompositions.
#[string("nfkd")]
Nfkd,
}
/// Convert an item of std's `match_indices` to a dictionary.
fn match_to_dict((start, text): (usize, &str)) -> Dict {
dict! {

View File

@ -50,6 +50,42 @@ impl Dir {
pub const TTB: Self = Self::TTB;
pub const BTT: Self = Self::BTT;
/// Returns a direction from a starting point.
///
/// ```example
/// direction.from(left) \
/// direction.from(right) \
/// direction.from(top) \
/// direction.from(bottom)
/// ```
#[func]
pub const fn from(side: Side) -> Dir {
match side {
Side::Left => Self::LTR,
Side::Right => Self::RTL,
Side::Top => Self::TTB,
Side::Bottom => Self::BTT,
}
}
/// Returns a direction from an end point.
///
/// ```example
/// direction.to(left) \
/// direction.to(right) \
/// direction.to(top) \
/// direction.to(bottom)
/// ```
#[func]
pub const fn to(side: Side) -> Dir {
match side {
Side::Right => Self::LTR,
Side::Left => Self::RTL,
Side::Bottom => Self::TTB,
Side::Top => Self::BTT,
}
}
/// The axis this direction belongs to, either `{"horizontal"}` or
/// `{"vertical"}`.
///
@ -65,6 +101,22 @@ impl Dir {
}
}
/// The corresponding sign, for use in calculations.
///
/// ```example
/// #ltr.sign() \
/// #rtl.sign() \
/// #ttb.sign() \
/// #btt.sign()
/// ```
#[func]
pub const fn sign(self) -> i64 {
match self {
Self::LTR | Self::TTB => 1,
Self::RTL | Self::BTT => -1,
}
}
/// The start point of this direction, as an alignment.
///
/// ```example

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,6 @@ use std::sync::Arc;
use ecow::{eco_format, eco_vec, EcoString, EcoVec};
use crate::ast::AstNode;
use crate::{FileId, Span, SyntaxKind};
/// A node in the untyped syntax tree.
@ -119,26 +118,6 @@ impl SyntaxNode {
}
}
/// Whether the node can be cast to the given AST node.
pub fn is<'a, T: AstNode<'a>>(&'a self) -> bool {
self.cast::<T>().is_some()
}
/// Try to convert the node to a typed AST node.
pub fn cast<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
T::from_untyped(self)
}
/// Cast the first child that can cast to the AST type `T`.
pub fn cast_first_match<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
self.children().find_map(Self::cast)
}
/// Cast the last child that can cast to the AST type `T`.
pub fn cast_last_match<'a, T: AstNode<'a>>(&'a self) -> Option<T> {
self.children().rev().find_map(Self::cast)
}
/// Whether the node or its children contain an error.
pub fn erroneous(&self) -> bool {
match &self.0 {

Binary file not shown.

After

Width:  |  Height:  |  Size: 85 B

View File

@ -86,6 +86,13 @@
// Error: 2-28 0x110000 is not a valid codepoint
#str.from-unicode(0x110000) // 0x10ffff is the highest valid code point
--- str-normalize ---
// Test the `normalize` method.
#test("e\u{0301}".normalize(form: "nfc"), "é")
#test("é".normalize(form: "nfd"), "e\u{0301}")
#test("ſ\u{0301}".normalize(form: "nfkc"), "ś")
#test("ſ\u{0301}".normalize(form: "nfkd"), "s\u{0301}")
--- string-len ---
// Test the `len` method.
#test("Hello World!".len(), 12)

View File

@ -1,10 +1,35 @@
--- dir-from ---
#test(direction.from(left), ltr)
#test(direction.from(right), rtl)
#test(direction.from(top), ttb)
#test(direction.from(bottom), btt)
--- dir-from-invalid ---
// Error: 17-23 cannot convert this alignment to a side
#direction.from(center)
--- dir-to ---
#test(direction.to(left), rtl)
#test(direction.to(right), ltr)
#test(direction.to(top), btt)
#test(direction.to(bottom), ttb)
-- dir-to-invalid ---
// Error: 15-21 cannot convert this alignment to a side
#direction.to(center)
--- dir-axis ---
// Test direction methods.
#test(ltr.axis(), "horizontal")
#test(rtl.axis(), "horizontal")
#test(ttb.axis(), "vertical")
#test(btt.axis(), "vertical")
--- dir-sign ---
#test(ltr.sign(), 1)
#test(rtl.sign(), -1)
#test(ttb.sign(), 1)
#test(btt.sign(), -1)
--- dir-start ---
#test(ltr.start(), left)
#test(rtl.start(), right)

View File

@ -44,18 +44,21 @@
--- recursion-via-include-in-layout ---
// Test cyclic imports during layout.
// Error: 2-38 maximum show rule depth exceeded
// Hint: 2-38 check whether the show rule matches its own output
// Hint: 2-38 maybe a show rule matches its own output
// Hint: 2-38 maybe there are too deeply nested elements
#layout(_ => include "recursion.typ")
--- recursion-show-math ---
// Test recursive show rules.
// Error: 22-25 maximum show rule depth exceeded
// Hint: 22-25 check whether the show rule matches its own output
// Hint: 22-25 maybe a show rule matches its own output
// Hint: 22-25 maybe there are too deeply nested elements
#show math.equation: $x$
$ x $
--- recursion-show-math-realize ---
// Error: 22-33 maximum show rule depth exceeded
// Hint: 22-33 check whether the show rule matches its own output
// Hint: 22-33 maybe a show rule matches its own output
// Hint: 22-33 maybe there are too deeply nested elements
#show heading: it => heading[it]
$ #heading[hi] $

View File

@ -38,6 +38,16 @@
curve.close(mode: "smooth"),
)
--- curve-multiple-non-closed ---
#curve(
stroke: 2pt,
curve.line((20pt, 0pt)),
curve.move((0pt, 10pt)),
curve.line((20pt, 10pt)),
curve.move((0pt, 20pt)),
curve.line((20pt, 20pt)),
)
--- curve-line ---
#curve(
fill: purple,