Merge branch 'main' into pdf-accessibility

This commit is contained in:
Tobias Schmitz 2025-08-06 11:52:46 +02:00
commit 011c84ee0e
No known key found for this signature in database
97 changed files with 978 additions and 315 deletions

2
.cargo/config.toml Normal file
View File

@ -0,0 +1,2 @@
[alias]
testit = "test --workspace --test tests --"

70
Cargo.lock generated
View File

@ -448,42 +448,19 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "comemo"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df6916408a724339aa77b18214233355f3eb04c42eb895e5f8909215bd8a7a91"
dependencies = [
"comemo-macros 0.4.0",
"once_cell",
"parking_lot",
"siphasher",
]
[[package]]
name = "comemo"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "649d7b2d867b569729c03c0f6968db10bc95921182a1f2b2012b1b549492f39d"
dependencies = [
"comemo-macros 0.5.0",
"comemo-macros",
"parking_lot",
"rustc-hash",
"siphasher",
"slab",
]
[[package]]
name = "comemo-macros"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8936e42f9b4f5bdfaf23700609ac1f11cb03ad4c1ec128a4ee4fd0903e228db"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "comemo-macros"
version = "0.5.0"
@ -1445,11 +1422,11 @@ dependencies = [
[[package]]
name = "krilla"
version = "0.4.0"
source = "git+https://github.com/LaurenzV/krilla?branch=main#1246755ed5ff18a9a8c888694e4b91f3bed1b41a"
source = "git+https://github.com/LaurenzV/krilla?rev=1246755#1246755ed5ff18a9a8c888694e4b91f3bed1b41a"
dependencies = [
"base64",
"bumpalo",
"comemo 0.5.0",
"comemo",
"flate2",
"float-cmp 0.10.0",
"gif",
@ -1475,7 +1452,7 @@ dependencies = [
[[package]]
name = "krilla-svg"
version = "0.1.0"
source = "git+https://github.com/LaurenzV/krilla?branch=main#1246755ed5ff18a9a8c888694e4b91f3bed1b41a"
source = "git+https://github.com/LaurenzV/krilla?rev=1246755#1246755ed5ff18a9a8c888694e4b91f3bed1b41a"
dependencies = [
"flate2",
"fontdb",
@ -2941,8 +2918,9 @@ checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
name = "typst"
version = "0.13.1"
dependencies = [
"comemo 0.4.0",
"comemo",
"ecow",
"rustc-hash",
"typst-eval",
"typst-html",
"typst-layout",
@ -2969,7 +2947,7 @@ dependencies = [
"clap_mangen",
"codespan-reporting",
"color-print",
"comemo 0.4.0",
"comemo",
"dirs",
"ecow",
"fs_extra",
@ -2978,6 +2956,7 @@ dependencies = [
"parking_lot",
"pathdiff",
"rayon",
"rustc-hash",
"same-file",
"self-replace",
"semver",
@ -3018,6 +2997,7 @@ dependencies = [
"ecow",
"heck",
"pulldown-cmark",
"rustc-hash",
"serde",
"serde_json",
"serde_yaml 0.9.34+deprecated",
@ -3037,9 +3017,10 @@ dependencies = [
name = "typst-eval"
version = "0.13.1"
dependencies = [
"comemo 0.4.0",
"comemo",
"ecow",
"indexmap 2.7.1",
"rustc-hash",
"stacker",
"toml",
"typst-library",
@ -3054,7 +3035,7 @@ dependencies = [
name = "typst-fuzz"
version = "0.13.1"
dependencies = [
"comemo 0.4.0",
"comemo",
"libfuzzer-sys",
"typst",
"typst-assets",
@ -3067,9 +3048,10 @@ name = "typst-html"
version = "0.13.1"
dependencies = [
"bumpalo",
"comemo 0.4.0",
"comemo",
"ecow",
"palette",
"rustc-hash",
"time",
"typst-assets",
"typst-library",
@ -3084,10 +3066,11 @@ dependencies = [
name = "typst-ide"
version = "0.13.1"
dependencies = [
"comemo 0.4.0",
"comemo",
"ecow",
"once_cell",
"pathdiff",
"rustc-hash",
"serde",
"typst",
"typst-assets",
@ -3127,7 +3110,7 @@ dependencies = [
"az",
"bumpalo",
"codex",
"comemo 0.4.0",
"comemo",
"ecow",
"hypher",
"icu_properties",
@ -3137,6 +3120,7 @@ dependencies = [
"icu_segmenter",
"kurbo",
"memchr",
"rustc-hash",
"rustybuzz",
"smallvec",
"ttf-parser",
@ -3162,7 +3146,7 @@ dependencies = [
"chinese-number",
"ciborium",
"codex",
"comemo 0.4.0",
"comemo",
"csv",
"ecow",
"flate2",
@ -3188,6 +3172,7 @@ dependencies = [
"regex-syntax",
"roxmltree",
"rust_decimal",
"rustc-hash",
"rustybuzz",
"serde",
"serde_json",
@ -3232,13 +3217,14 @@ version = "0.13.1"
dependencies = [
"az",
"bytemuck",
"comemo 0.4.0",
"comemo",
"ecow",
"image",
"infer",
"krilla",
"krilla-svg",
"pretty_assertions",
"rustc-hash",
"serde",
"smallvec",
"typst-assets",
@ -3255,7 +3241,7 @@ version = "0.13.1"
dependencies = [
"arrayvec",
"bumpalo",
"comemo 0.4.0",
"comemo",
"ecow",
"regex",
"typst-library",
@ -3270,7 +3256,7 @@ name = "typst-render"
version = "0.13.1"
dependencies = [
"bytemuck",
"comemo 0.4.0",
"comemo",
"hayro",
"image",
"pixglyph",
@ -3288,11 +3274,12 @@ name = "typst-svg"
version = "0.13.1"
dependencies = [
"base64",
"comemo 0.4.0",
"comemo",
"ecow",
"flate2",
"hayro",
"image",
"rustc-hash",
"ttf-parser",
"typst-assets",
"typst-library",
@ -3308,6 +3295,7 @@ name = "typst-syntax"
version = "0.13.1"
dependencies = [
"ecow",
"rustc-hash",
"serde",
"toml",
"typst-timing",
@ -3324,12 +3312,13 @@ name = "typst-tests"
version = "0.13.1"
dependencies = [
"clap",
"comemo 0.4.0",
"comemo",
"ecow",
"oxipng",
"parking_lot",
"rayon",
"regex",
"rustc-hash",
"tiny-skia",
"typst",
"typst-assets",
@ -3361,6 +3350,7 @@ dependencies = [
"once_cell",
"portable-atomic",
"rayon",
"rustc-hash",
"siphasher",
"thin-vec",
"unicode-math-class",

View File

@ -49,7 +49,7 @@ clap_mangen = "0.2.10"
codespan-reporting = "0.11"
codex = { git = "https://github.com/typst/codex", rev = "9ac86f9" }
color-print = "0.3.6"
comemo = "0.4"
comemo = "0.5.0"
csv = "1"
ctrlc = "3.4.1"
dirs = "6"
@ -59,6 +59,7 @@ fastrand = "2.3"
flate2 = "1"
fontdb = { version = "0.23", default-features = false }
fs_extra = "1.3"
rustc-hash = "2.1"
glidesort = "0.1.2"
hayagriva = "0.8.1"
hayro-syntax = { git = "https://github.com/LaurenzV/hayro", rev = "e701f95" }
@ -74,8 +75,8 @@ image = { version = "0.25.5", default-features = false, features = ["png", "jpeg
indexmap = { version = "2", features = ["serde"] }
infer = { version = "0.19.0", default-features = false }
kamadak-exif = "0.6"
krilla = { git = "https://github.com/LaurenzV/krilla", branch = "main", default-features = false, features = ["raster-images", "comemo", "rayon", "pdf"] }
krilla-svg = { git = "https://github.com/LaurenzV/krilla", branch = "main" }
krilla = { git = "https://github.com/LaurenzV/krilla", rev = "1246755", default-features = false, features = ["raster-images", "comemo", "rayon", "pdf"] }
krilla-svg = { git = "https://github.com/LaurenzV/krilla", rev = "1246755" }
kurbo = "0.11"
libfuzzer-sys = "0.4"
lipsum = "0.9"

View File

@ -41,6 +41,7 @@ open = { workspace = true }
parking_lot = { workspace = true }
pathdiff = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
self-replace = { workspace = true, optional = true }
semver = { workspace = true }

View File

@ -155,6 +155,10 @@ pub struct QueryCommand {
#[clap(long)]
pub pretty: bool,
/// The target to compile for.
#[clap(long, default_value_t)]
pub target: Target,
/// World arguments.
#[clap(flatten)]
pub world: WorldArgs,
@ -464,6 +468,18 @@ pub enum OutputFormat {
display_possible_values!(OutputFormat);
/// The target to compile for.
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
pub enum Target {
/// PDF and image formats.
#[default]
Paged,
/// HTML.
Html,
}
display_possible_values!(Target);
/// Which format to use for diagnostics.
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
pub enum DiagnosticFormat {

View File

@ -5,11 +5,13 @@ use typst::World;
use typst::diag::{HintedStrResult, StrResult, Warned, bail};
use typst::engine::Sink;
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
use typst::introspection::Introspector;
use typst::layout::PagedDocument;
use typst::syntax::{Span, SyntaxMode};
use typst_eval::eval_string;
use typst_html::HtmlDocument;
use crate::args::{QueryCommand, SerializationFormat};
use crate::args::{QueryCommand, SerializationFormat, Target};
use crate::compile::print_diagnostics;
use crate::set_failed;
use crate::world::SystemWorld;
@ -22,12 +24,17 @@ pub fn query(command: &QueryCommand) -> HintedStrResult<()> {
world.reset();
world.source(world.main()).map_err(|err| err.to_string())?;
let Warned { output, warnings } = typst::compile(&world);
let Warned { output, warnings } = match command.target {
Target::Paged => typst::compile::<PagedDocument>(&world)
.map(|output| output.map(|document| document.introspector)),
Target::Html => typst::compile::<HtmlDocument>(&world)
.map(|output| output.map(|document| document.introspector)),
};
match output {
// Retrieve and print query results.
Ok(document) => {
let data = retrieve(&world, command, &document)?;
Ok(introspector) => {
let data = retrieve(&world, command, &introspector)?;
let serialized = format(data, command)?;
println!("{serialized}");
print_diagnostics(&world, &[], &warnings, command.process.diagnostic_format)
@ -54,7 +61,7 @@ pub fn query(command: &QueryCommand) -> HintedStrResult<()> {
fn retrieve(
world: &dyn World,
command: &QueryCommand,
document: &PagedDocument,
introspector: &Introspector,
) -> HintedStrResult<Vec<Content>> {
let selector = eval_string(
&typst::ROUTINES,
@ -76,11 +83,7 @@ fn retrieve(
})?
.cast::<LocatableSelector>()?;
Ok(document
.introspector
.query(&selector.0)
.into_iter()
.collect::<Vec<_>>())
Ok(introspector.query(&selector.0).into_iter().collect::<Vec<_>>())
}
/// Format the query result in the output format.

View File

@ -1,4 +1,3 @@
use std::collections::{HashMap, HashSet};
use std::io::{self, Write};
use std::iter;
use std::path::PathBuf;
@ -9,6 +8,7 @@ use codespan_reporting::term::termcolor::WriteColor;
use codespan_reporting::term::{self, termcolor};
use ecow::eco_format;
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher as _};
use rustc_hash::{FxHashMap, FxHashSet};
use same_file::is_same_file;
use typst::diag::{StrResult, bail, warning};
use typst::syntax::Span;
@ -91,10 +91,10 @@ struct Watcher {
/// Keeps track of which paths are watched via `watcher`. The boolean is
/// used during updating for mark-and-sweep garbage collection of paths we
/// should unwatch.
watched: HashMap<PathBuf, bool>,
watched: FxHashMap<PathBuf, bool>,
/// A set of files that should be watched, but don't exist. We manually poll
/// for those.
missing: HashSet<PathBuf>,
missing: FxHashSet<PathBuf>,
}
impl Watcher {
@ -127,8 +127,8 @@ impl Watcher {
output,
rx,
watcher,
watched: HashMap::new(),
missing: HashSet::new(),
watched: FxHashMap::default(),
missing: FxHashSet::default(),
})
}

View File

@ -1,4 +1,3 @@
use std::collections::HashMap;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::sync::{LazyLock, OnceLock};
@ -7,6 +6,7 @@ use std::{fmt, fs, io, mem};
use chrono::{DateTime, Datelike, FixedOffset, Local, Utc};
use ecow::{EcoString, eco_format};
use parking_lot::Mutex;
use rustc_hash::FxHashMap;
use typst::diag::{FileError, FileResult};
use typst::foundations::{Bytes, Datetime, Dict, IntoValue};
use typst::syntax::{FileId, Lines, Source, VirtualPath};
@ -41,7 +41,7 @@ pub struct SystemWorld {
/// Locations of and storage for lazily loaded fonts.
fonts: Vec<FontSlot>,
/// Maps file ids to source files and buffers.
slots: Mutex<HashMap<FileId, FileSlot>>,
slots: Mutex<FxHashMap<FileId, FileSlot>>,
/// Holds information about where packages are stored.
package_storage: PackageStorage,
/// The current datetime if requested. This is stored here to ensure it is
@ -139,7 +139,7 @@ impl SystemWorld {
library: LazyHash::new(library),
book: LazyHash::new(fonts.book),
fonts: fonts.fonts,
slots: Mutex::new(HashMap::new()),
slots: Mutex::new(FxHashMap::default()),
package_storage: package::storage(&world_args.package),
now,
})

View File

@ -21,6 +21,7 @@ typst-utils = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
indexmap = { workspace = true }
rustc-hash = { workspace = true }
toml = { workspace = true }
unicode-segmentation = { workspace = true }

View File

@ -1,6 +1,5 @@
use std::collections::HashSet;
use ecow::eco_format;
use rustc_hash::FxHashSet;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{Array, Dict, Value};
use typst_syntax::ast::{self, AstNode};
@ -137,7 +136,7 @@ where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
{
let mut sink = None;
let mut used = HashSet::new();
let mut used = FxHashSet::default();
for p in destruct.items() {
match p {

View File

@ -246,7 +246,7 @@ impl Eval for ast::Dict<'_> {
type Output = Dict;
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let mut map = indexmap::IndexMap::new();
let mut map = indexmap::IndexMap::default();
let mut invalid_keys = eco_vec![];
for item in self.items() {

View File

@ -1,5 +1,5 @@
use ecow::eco_format;
use typst_library::diag::{At, SourceResult};
use typst_library::diag::{At, SourceResult, warning};
use typst_library::foundations::{Content, NativeElement, Symbol, SymbolElem, Value};
use typst_library::math::{
AlignPointElem, AttachElem, FracElem, LrElem, PrimesElem, RootElem,
@ -80,7 +80,14 @@ impl Eval for ast::MathAttach<'_> {
let mut elem = AttachElem::new(base);
if let Some(expr) = self.top() {
elem.t.set(Some(expr.eval_display(vm)?));
let top = expr.eval(vm)?;
if let Value::Func(_) = top {
vm.engine.sink.warn(warning!(
expr.span(), "function literal used as superscript";
hint: "wrap the entire function call in parentheses",
));
}
elem.t.set(Some(top.display().spanned(self.span())));
}
// Always attach primes in scripts style (not limits style),
@ -90,7 +97,14 @@ impl Eval for ast::MathAttach<'_> {
}
if let Some(expr) = self.bottom() {
elem.b.set(Some(expr.eval_display(vm)?));
let bottom = expr.eval(vm)?;
if let Value::Func(_) = bottom {
vm.engine.sink.warn(warning!(
expr.span(), "function literal used as subscript";
hint: "wrap the entire function call in parentheses",
));
}
elem.b.set(Some(bottom.display().spanned(self.span())));
}
Ok(elem.pack())

View File

@ -24,6 +24,7 @@ bumpalo = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
palette = { workspace = true }
rustc-hash = { workspace = true }
time = { workspace = true }
[lints]

View File

@ -1,3 +1,4 @@
use ecow::EcoVec;
use typst_library::diag::{SourceResult, warning};
use typst_library::engine::Engine;
use typst_library::foundations::{Content, StyleChain, Target, TargetElem};
@ -15,8 +16,8 @@ pub fn convert_to_nodes<'a>(
engine: &mut Engine,
locator: &mut SplitLocator,
children: impl IntoIterator<Item = Pair<'a>>,
) -> SourceResult<Vec<HtmlNode>> {
let mut output = Vec::new();
) -> SourceResult<EcoVec<HtmlNode>> {
let mut output = EcoVec::new();
for (child, styles) in children {
handle(engine, child, locator, styles, &mut output)?;
}
@ -29,12 +30,12 @@ fn handle(
child: &Content,
locator: &mut SplitLocator,
styles: StyleChain,
output: &mut Vec<HtmlNode>,
output: &mut EcoVec<HtmlNode>,
) -> SourceResult<()> {
if let Some(elem) = child.to_packed::<TagElem>() {
output.push(HtmlNode::Tag(elem.tag.clone()));
} else if let Some(elem) = child.to_packed::<HtmlElem>() {
let mut children = vec![];
let mut children = EcoVec::new();
if let Some(body) = elem.body.get_ref(styles) {
children = html_fragment(engine, body, locator.next(&elem.span()), styles)?;
}

View File

@ -26,7 +26,6 @@ impl Properties {
}
/// Adds a new property in builder-style.
#[expect(unused)]
pub fn with(mut self, property: &str, value: impl Display) -> Self {
self.push(property, value);
self

View File

@ -1,7 +1,8 @@
use std::collections::HashSet;
use std::num::NonZeroUsize;
use comemo::{Tracked, TrackedMut};
use ecow::{EcoVec, eco_vec};
use rustc_hash::FxHashSet;
use typst_library::World;
use typst_library::diag::{SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced};
@ -87,7 +88,7 @@ fn html_document_impl(
children.iter().copied(),
)?;
let mut link_targets = HashSet::new();
let mut link_targets = FxHashSet::default();
let mut introspector = introspect_html(&output, &mut link_targets);
let mut root = root_element(output, &info)?;
crate::link::identify_link_targets(&mut root, &mut introspector, link_targets);
@ -99,12 +100,12 @@ fn html_document_impl(
#[typst_macros::time(name = "introspect html")]
fn introspect_html(
output: &[HtmlNode],
link_targets: &mut HashSet<Location>,
link_targets: &mut FxHashSet<Location>,
) -> Introspector {
fn discover(
builder: &mut IntrospectorBuilder,
sink: &mut Vec<(Content, Position)>,
link_targets: &mut HashSet<Location>,
link_targets: &mut FxHashSet<Location>,
nodes: &[HtmlNode],
) {
for node in nodes {
@ -141,19 +142,22 @@ fn introspect_html(
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
/// supplying a suitable `<head>`.
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
fn root_element(
output: EcoVec<HtmlNode>,
info: &DocumentInfo,
) -> SourceResult<HtmlElement> {
let head = head_element(info);
let body = match classify_output(output)? {
OutputKind::Html(element) => return Ok(element),
OutputKind::Body(body) => body,
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
};
Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
Ok(HtmlElement::new(tag::html).with_children(eco_vec![head.into(), body.into()]))
}
/// Generate a `<head>` element.
fn head_element(info: &DocumentInfo) -> HtmlElement {
let mut children = vec![];
let mut children = EcoVec::new();
children.push(HtmlElement::new(tag::meta).with_attr(attr::charset, "utf-8").into());
@ -167,7 +171,7 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
if let Some(title) = &info.title {
children.push(
HtmlElement::new(tag::title)
.with_children(vec![HtmlNode::Text(title.clone(), Span::detached())])
.with_children(eco_vec![HtmlNode::Text(title.clone(), Span::detached())])
.into(),
);
}
@ -203,9 +207,9 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
}
/// Determine which kind of output the user generated.
fn classify_output(mut output: Vec<HtmlNode>) -> SourceResult<OutputKind> {
fn classify_output(mut output: EcoVec<HtmlNode>) -> SourceResult<OutputKind> {
let count = output.iter().filter(|node| !matches!(node, HtmlNode::Tag(_))).count();
for node in &mut output {
for node in output.make_mut() {
let HtmlNode::Element(elem) = node else { continue };
let tag = elem.tag;
let mut take = || std::mem::replace(elem, HtmlElement::new(tag::html));
@ -232,5 +236,5 @@ enum OutputKind {
/// one, but need supply the `<html>` element.
Body(HtmlElement),
/// The user generated leafs which we wrap in a `<body>` and `<html>`.
Leafs(Vec<HtmlNode>),
Leafs(EcoVec<HtmlNode>),
}

View File

@ -57,7 +57,7 @@ pub struct HtmlElement {
/// The element's attributes.
pub attrs: HtmlAttrs,
/// The element's children.
pub children: Vec<HtmlNode>,
pub children: EcoVec<HtmlNode>,
/// The span from which the element originated, if any.
pub span: Span,
}
@ -68,7 +68,7 @@ impl HtmlElement {
Self {
tag,
attrs: HtmlAttrs::default(),
children: vec![],
children: EcoVec::new(),
span: Span::detached(),
}
}
@ -76,7 +76,7 @@ impl HtmlElement {
/// Attach children to the element.
///
/// Note: This overwrites potential previous children.
pub fn with_children(mut self, children: Vec<HtmlNode>) -> Self {
pub fn with_children(mut self, children: EcoVec<HtmlNode>) -> Self {
self.children = children;
self
}
@ -105,8 +105,53 @@ impl HtmlTag {
bail!("tag name must not be empty");
}
if let Some(c) = string.chars().find(|&c| !charsets::is_valid_in_tag_name(c)) {
bail!("the character {} is not valid in a tag name", c.repr());
let mut has_hyphen = false;
let mut has_uppercase = false;
for c in string.chars() {
if c == '-' {
has_hyphen = true;
} else if !charsets::is_valid_in_tag_name(c) {
bail!("the character {} is not valid in a tag name", c.repr());
} else {
has_uppercase |= c.is_ascii_uppercase();
}
}
// If we encounter a hyphen, we are dealing with a custom element rather
// than a standard HTML element.
//
// A valid custom element name must:
// - Contain at least one hyphen (U+002D)
// - Start with an ASCII lowercase letter (a-z)
// - Not contain any ASCII uppercase letters (A-Z)
// - Not be one of the reserved names
// - Only contain valid characters (ASCII alphanumeric and hyphens)
//
// See https://html.spec.whatwg.org/multipage/custom-elements.html#valid-custom-element-name
if has_hyphen {
if !string.starts_with(|c: char| c.is_ascii_lowercase()) {
bail!("custom element name must start with a lowercase letter");
}
if has_uppercase {
bail!("custom element name must not contain uppercase letters");
}
// These names are used in SVG and MathML. Since `html.elem` only
// supports creation of _HTML_ elements, they are forbidden.
if matches!(
string,
"annotation-xml"
| "color-profile"
| "font-face"
| "font-face-src"
| "font-face-uri"
| "font-face-format"
| "font-face-name"
| "missing-glyph"
) {
bail!("name is reserved and not valid for a custom element");
}
}
Ok(Self(PicoStr::intern(string)))
@ -292,7 +337,7 @@ pub struct HtmlFrame {
/// An ID to assign to the SVG itself.
pub id: Option<EcoString>,
/// IDs to assign to destination jump points within the SVG.
pub link_points: Vec<(Point, EcoString)>,
pub link_points: EcoVec<(Point, EcoString)>,
}
impl HtmlFrame {
@ -302,7 +347,7 @@ impl HtmlFrame {
inner,
text_size: styles.resolve(TextElem::size),
id: None,
link_points: vec![],
link_points: EcoVec::new(),
}
}
}

View File

@ -1,4 +1,5 @@
use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec;
use typst_library::diag::{At, SourceResult};
use typst_library::engine::{Engine, Route, Sink, Traced};
use typst_library::foundations::{Content, StyleChain};
@ -16,7 +17,7 @@ pub fn html_fragment(
content: &Content,
locator: Locator,
styles: StyleChain,
) -> SourceResult<Vec<HtmlNode>> {
) -> SourceResult<EcoVec<HtmlNode>> {
html_fragment_impl(
engine.routines,
engine.world,
@ -43,7 +44,7 @@ fn html_fragment_impl(
content: &Content,
locator: Tracked<Locator>,
styles: StyleChain,
) -> SourceResult<Vec<HtmlNode>> {
) -> SourceResult<EcoVec<HtmlNode>> {
let link = LocatorLink::new(locator);
let mut locator = Locator::link(&link).split();
let mut engine = Engine {

View File

@ -16,7 +16,7 @@ mod typed;
pub use self::document::html_document;
pub use self::dom::*;
pub use self::encode::html;
pub use self::rules::register;
pub use self::rules::{html_span_filled, register};
use ecow::EcoString;
use typst_library::Category;

View File

@ -1,7 +1,8 @@
use std::collections::{HashMap, HashSet, VecDeque};
use std::collections::VecDeque;
use comemo::Track;
use ecow::{EcoString, eco_format};
use ecow::{EcoString, EcoVec, eco_format, eco_vec};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_library::foundations::{Label, NativeElement};
use typst_library::introspection::{Introspector, Location, Tag};
use typst_library::layout::{Frame, FrameItem, Point};
@ -16,7 +17,7 @@ use crate::{HtmlElement, HtmlNode, attr, tag};
/// in favor of the query in `identify_link_targets`. For the time being, some
/// links are created without existence of a `LinkElem`, so this is
/// unfortunately necessary.
pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) {
pub fn introspect_frame_links(frame: &Frame, targets: &mut FxHashSet<Location>) {
for (_, item) in frame.items() {
match item {
FrameItem::Link(Destination::Location(loc), _) => {
@ -35,7 +36,7 @@ pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) {
pub fn identify_link_targets(
root: &mut HtmlElement,
introspector: &mut Introspector,
mut targets: HashSet<Location>,
mut targets: FxHashSet<Location>,
) {
// Query for all links with an intra-doc (i.e. `Location`) destination to
// know what needs IDs.
@ -72,13 +73,13 @@ pub fn identify_link_targets(
/// Traverses a list of nodes.
fn traverse(
work: &mut Work,
targets: &HashSet<Location>,
targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>,
nodes: &mut Vec<HtmlNode>,
nodes: &mut EcoVec<HtmlNode>,
) {
let mut i = 0;
while i < nodes.len() {
let node = &mut nodes[i];
let node = &mut nodes.make_mut()[i];
match node {
// When visiting a start tag, we check whether the element needs an
// ID and if so, add it to the queue, so that its first child node
@ -114,7 +115,7 @@ fn traverse(
HtmlNode::Text(..) => {
work.drain(|label| {
let mut element =
HtmlElement::new(tag::span).with_children(vec![node.clone()]);
HtmlElement::new(tag::span).with_children(eco_vec![node.clone()]);
let id = identificator.assign(&mut element, label);
*node = HtmlNode::Element(element);
id
@ -144,10 +145,10 @@ fn traverse(
/// Traverses a frame embedded in HTML.
fn traverse_frame(
work: &mut Work,
targets: &HashSet<Location>,
targets: &FxHashSet<Location>,
identificator: &mut Identificator<'_>,
frame: &Frame,
link_points: &mut Vec<(Point, EcoString)>,
link_points: &mut EcoVec<(Point, EcoString)>,
) {
for (_, item) in frame.items() {
match item {
@ -174,13 +175,13 @@ struct Work {
/// now.
queue: VecDeque<(Location, Option<Label>)>,
/// The resulting mapping from element location's to HTML IDs.
ids: HashMap<Location, EcoString>,
ids: FxHashMap<Location, EcoString>,
}
impl Work {
/// Sets up.
fn new() -> Self {
Self { queue: VecDeque::new(), ids: HashMap::new() }
Self { queue: VecDeque::new(), ids: FxHashMap::default() }
}
/// Marks the element with the given location and label as in need of an
@ -215,7 +216,7 @@ impl Work {
struct Identificator<'a> {
introspector: &'a Introspector,
loc_counter: usize,
label_counter: HashMap<Label, usize>,
label_counter: FxHashMap<Label, usize>,
}
impl<'a> Identificator<'a> {
@ -224,7 +225,7 @@ impl<'a> Identificator<'a> {
Self {
introspector,
loc_counter: 0,
label_counter: HashMap::new(),
label_counter: FxHashMap::default(),
}
}

View File

@ -11,13 +11,13 @@ use typst_library::layout::{OuterVAlignment, Sizing};
use typst_library::model::{
Attribution, CiteElem, CiteGroup, Destination, EmphElem, EnumElem, FigureCaption,
FigureElem, HeadingElem, LinkElem, LinkTarget, ListElem, ParbreakElem, QuoteElem,
RefElem, StrongElem, TableCell, TableElem, TermsElem,
RefElem, StrongElem, TableCell, TableElem, TermsElem, TitleElem,
};
use typst_library::text::{
HighlightElem, LinebreakElem, OverlineElem, RawElem, RawLine, SmallcapsElem,
SpaceElem, StrikeElem, SubElem, SuperElem, UnderlineElem,
};
use typst_library::visualize::ImageElem;
use typst_library::visualize::{Color, ImageElem};
use crate::{FrameElem, HtmlAttrs, HtmlElem, HtmlTag, attr, css, tag};
@ -32,6 +32,7 @@ pub fn register(rules: &mut NativeRuleMap) {
rules.register(Html, ENUM_RULE);
rules.register(Html, TERMS_RULE);
rules.register(Html, LINK_RULE);
rules.register(Html, TITLE_RULE);
rules.register(Html, HEADING_RULE);
rules.register(Html, FIGURE_RULE);
rules.register(Html, FIGURE_CAPTION_RULE);
@ -161,6 +162,12 @@ const LINK_RULE: ShowFn<LinkElem> = |elem, engine, _| {
.pack())
};
const TITLE_RULE: ShowFn<TitleElem> = |elem, _, styles| {
Ok(HtmlElem::new(tag::h1)
.with_body(Some(elem.resolve_body(styles).at(elem.span())?))
.pack())
};
const HEADING_RULE: ShowFn<HeadingElem> = |elem, engine, styles| {
let span = elem.span();
@ -415,11 +422,36 @@ const RAW_RULE: ShowFn<RawElem> = |elem, _, styles| {
seq.push(line.clone().pack());
}
Ok(HtmlElem::new(if elem.block.get(styles) { tag::pre } else { tag::code })
let mut inline = css::Properties::new();
let block = elem.block.get(styles);
if !block {
// Without the `<pre>` tag, whitespace would be collapsed by default.
inline.push("white-space", "pre-wrap");
}
let code = HtmlElem::new(tag::code)
.with_styles(inline)
.with_body(Some(Content::sequence(seq)))
.pack())
.pack()
.spanned(elem.span());
Ok(if block { HtmlElem::new(tag::pre).with_body(Some(code)).pack() } else { code })
};
/// This is used by `RawElem::synthesize` through a routine.
///
/// It's a temporary workaround until `TextElem::fill` is supported in HTML
/// export.
#[doc(hidden)]
pub fn html_span_filled(content: Content, color: Color) -> Content {
let span = content.span();
HtmlElem::new(tag::span)
.with_styles(css::Properties::new().with("color", css::color(color)))
.with_body(Some(content))
.pack()
.spanned(span)
}
const RAW_LINE_RULE: ShowFn<RawLine> = |elem, _, _| Ok(elem.body.clone());
const IMAGE_RULE: ShowFn<ImageElem> = |elem, engine, styles| {

View File

@ -18,6 +18,7 @@ typst-eval = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
pathdiff = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
unscanny = { workspace = true }

View File

@ -1,7 +1,6 @@
use std::collections::HashSet;
use comemo::Track;
use ecow::{EcoString, EcoVec, eco_vec};
use rustc_hash::FxHashSet;
use typst::foundations::{Label, Styles, Value};
use typst::layout::PagedDocument;
use typst::model::{BibliographyElem, FigureElem};
@ -76,7 +75,7 @@ pub fn analyze_labels(
document: &PagedDocument,
) -> (Vec<(Label, Option<EcoString>)>, usize) {
let mut output = vec![];
let mut seen_labels = HashSet::new();
let mut seen_labels = FxHashSet::default();
// Labels in the document.
for elem in document.introspector.all() {

View File

@ -1,8 +1,9 @@
use std::cmp::Reverse;
use std::collections::{BTreeMap, HashSet};
use std::collections::BTreeMap;
use std::ffi::OsStr;
use ecow::{EcoString, eco_format};
use rustc_hash::FxHashSet;
use serde::{Deserialize, Serialize};
use typst::foundations::{
AutoValue, CastInfo, Func, Label, NoneValue, ParamInfo, Repr, StyleChain, Styles,
@ -739,7 +740,7 @@ fn param_completions<'a>(
// Determine which arguments are already present.
let mut existing_positional = 0;
let mut existing_named = HashSet::new();
let mut existing_named = FxHashSet::default();
for arg in args.items() {
match arg {
ast::Arg::Pos(_) => {
@ -1116,7 +1117,7 @@ struct CompletionContext<'a> {
explicit: bool,
from: usize,
completions: Vec<Completion>,
seen_casts: HashSet<u128>,
seen_casts: FxHashSet<u128>,
}
impl<'a> CompletionContext<'a> {
@ -1141,7 +1142,7 @@ impl<'a> CompletionContext<'a> {
explicit,
from: cursor,
completions: vec![],
seen_casts: HashSet::new(),
seen_casts: FxHashSet::default(),
})
}

View File

@ -1,8 +1,8 @@
use std::borrow::Borrow;
use std::collections::HashMap;
use std::sync::Arc;
use ecow::EcoString;
use rustc_hash::FxHashMap;
use typst::diag::{FileError, FileResult};
use typst::foundations::{Bytes, Datetime, Smart};
use typst::layout::{Abs, Margin, PageElem};
@ -137,8 +137,8 @@ impl IdeWorld for TestWorld {
/// Test-specific files.
#[derive(Default, Clone)]
struct TestFiles {
assets: HashMap<FileId, Bytes>,
sources: HashMap<FileId, Source>,
assets: FxHashMap<FileId, Bytes>,
sources: FxHashMap<FileId, Source>,
}
/// Shared foundation of all test worlds.

View File

@ -32,6 +32,7 @@ icu_provider_blob = { workspace = true }
icu_segmenter = { workspace = true }
kurbo = { workspace = true }
memchr = { workspace = true }
rustc-hash = { workspace = true }
rustybuzz = { workspace = true }
smallvec = { workspace = true }
ttf-parser = { workspace = true }

View File

@ -7,13 +7,13 @@ mod distribute;
pub(crate) use self::block::unbreakable_pod;
use std::collections::HashSet;
use std::num::NonZeroUsize;
use std::rc::Rc;
use bumpalo::Bump;
use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec;
use rustc_hash::FxHashSet;
use typst_library::World;
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail};
use typst_library::engine::{Engine, Route, Sink, Traced};
@ -303,7 +303,7 @@ struct Work<'a, 'b> {
/// Identifies floats and footnotes that can be skipped if visited because
/// they were already handled and incorporated as column or page level
/// insertions.
skips: Rc<HashSet<Location>>,
skips: Rc<FxHashSet<Location>>,
}
impl<'a, 'b> Work<'a, 'b> {
@ -316,7 +316,7 @@ impl<'a, 'b> Work<'a, 'b> {
footnotes: EcoVec::new(),
footnote_spill: None,
tags: EcoVec::new(),
skips: Rc::new(HashSet::new()),
skips: Rc::new(FxHashSet::default()),
}
}

View File

@ -739,7 +739,9 @@ fn assemble(
}
advance -= max_overlap;
growable += max_overlap - min_overlap;
// In case we have that max_overlap < min_overlap, ensure we
// don't decrease the value of growable.
growable += (max_overlap - min_overlap).max(Abs::zero());
}
full += advance;

View File

@ -1,5 +1,4 @@
use std::collections::HashSet;
use rustc_hash::FxHashSet;
use typst_library::foundations::StyleChain;
use typst_library::introspection::{Locator, SplitLocator, Tag, TagElem};
use typst_library::layout::{PagebreakElem, Parity};
@ -134,7 +133,7 @@ fn migrate_unterminated_tags(children: &mut [Pair], mid: usize) -> usize {
// Determine the set of tag locations which we won't migrate (because they
// are terminated).
let excluded: HashSet<_> = children[start..mid]
let excluded: FxHashSet<_> = children[start..mid]
.iter()
.filter_map(|(c, _)| match c.to_packed::<TagElem>()?.tag {
Tag::Start(_) => None,

View File

@ -21,7 +21,7 @@ use typst_library::model::{
Attribution, BibliographyElem, CiteElem, CiteGroup, CslSource, Destination, EmphElem,
EnumElem, FigureCaption, FigureElem, FootnoteElem, FootnoteEntry, HeadingElem,
LinkElem, ListElem, Outlinable, OutlineElem, OutlineEntry, ParElem, ParbreakElem,
QuoteElem, RefElem, StrongElem, TableCell, TableElem, TermsElem, Works,
QuoteElem, RefElem, StrongElem, TableCell, TableElem, TermsElem, TitleElem, Works,
};
use typst_library::pdf::{ArtifactElem, EmbedElem, PdfMarkerTag};
use typst_library::text::{
@ -48,6 +48,7 @@ pub fn register(rules: &mut NativeRuleMap) {
rules.register(Paged, TERMS_RULE);
rules.register(Paged, LINK_MARKER_RULE);
rules.register(Paged, LINK_RULE);
rules.register(Paged, TITLE_RULE);
rules.register(Paged, HEADING_RULE);
rules.register(Paged, FIGURE_RULE);
rules.register(Paged, FIGURE_CAPTION_RULE);
@ -231,6 +232,12 @@ const LINK_RULE: ShowFn<LinkElem> = |elem, engine, styles| {
.set(LinkElem::current, Some(dest)))
};
const TITLE_RULE: ShowFn<TitleElem> = |elem, _, styles| {
Ok(BlockElem::new()
.with_body(Some(BlockBody::Content(elem.resolve_body(styles).at(elem.span())?)))
.pack())
};
const HEADING_RULE: ShowFn<HeadingElem> = |elem, engine, styles| {
const SPACING_TO_NUMBERING: Em = Em::new(0.3);

View File

@ -50,6 +50,7 @@ regex = { workspace = true }
regex-syntax = { workspace = true }
roxmltree = { workspace = true }
rust_decimal = { workspace = true }
rustc-hash = { workspace = true }
rustybuzz = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

View File

@ -151,6 +151,13 @@ pub struct Warned<T> {
pub warnings: EcoVec<SourceDiagnostic>,
}
impl<T> Warned<T> {
/// Maps the output, keeping the same warnings.
pub fn map<R, F: FnOnce(T) -> R>(self, f: F) -> Warned<R> {
Warned { output: f(self.output), warnings: self.warnings }
}
}
/// An error or warning in a source or text file.
///
/// The contained spans will only be detached if any of the input source files

View File

@ -1,11 +1,11 @@
//! Definition of the central compilation context.
use std::collections::HashSet;
use std::sync::atomic::{AtomicUsize, Ordering};
use comemo::{Track, Tracked, TrackedMut, Validate};
use comemo::{Track, Tracked, TrackedMut};
use ecow::EcoVec;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
use rustc_hash::FxHashSet;
use typst_syntax::{FileId, Span};
use crate::World;
@ -135,7 +135,7 @@ pub struct Sink {
/// Warnings emitted during iteration.
warnings: EcoVec<SourceDiagnostic>,
/// Hashes of all warning's spans and messages for warning deduplication.
warnings_set: HashSet<u128>,
warnings_set: FxHashSet<u128>,
/// A sequence of traced values for a span.
values: EcoVec<(Value, Option<Styles>)>,
}
@ -219,7 +219,7 @@ pub struct Route<'a> {
// We need to override the constraint's lifetime here so that `Tracked` is
// covariant over the constraint. If it becomes invariant, we're in for a
// world of lifetime pain.
outer: Option<Tracked<'a, Self, <Route<'static> as Validate>::Constraint>>,
outer: Option<Tracked<'a, Self, <Route<'static> as Track>::Call>>,
/// This is set if this route segment was inserted through the start of a
/// module evaluation.
id: Option<FileId>,

View File

@ -5,6 +5,7 @@ use std::sync::Arc;
use ecow::{EcoString, eco_format};
use indexmap::IndexMap;
use rustc_hash::FxBuildHasher;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use typst_syntax::is_ident;
use typst_utils::ArcExt;
@ -20,7 +21,7 @@ use crate::foundations::{
macro_rules! __dict {
($($key:expr => $value:expr),* $(,)?) => {{
#[allow(unused_mut)]
let mut map = $crate::foundations::IndexMap::new();
let mut map = $crate::foundations::IndexMap::default();
$(map.insert($key.into(), $crate::foundations::IntoValue::into_value($value));)*
$crate::foundations::Dict::from(map)
}};
@ -66,7 +67,7 @@ pub use crate::__dict as dict;
/// ```
#[ty(scope, cast, name = "dictionary")]
#[derive(Default, Clone, PartialEq)]
pub struct Dict(Arc<IndexMap<Str, Value>>);
pub struct Dict(Arc<IndexMap<Str, Value, FxBuildHasher>>);
impl Dict {
/// Create a new, empty dictionary.
@ -343,7 +344,7 @@ impl<'de> Deserialize<'de> for Dict {
where
D: Deserializer<'de>,
{
Ok(IndexMap::<Str, Value>::deserialize(deserializer)?.into())
Ok(IndexMap::<Str, Value, FxBuildHasher>::deserialize(deserializer)?.into())
}
}
@ -377,8 +378,8 @@ impl<'a> IntoIterator for &'a Dict {
}
}
impl From<IndexMap<Str, Value>> for Dict {
fn from(map: IndexMap<Str, Value>) -> Self {
impl From<IndexMap<Str, Value, FxBuildHasher>> for Dict {
fn from(map: IndexMap<Str, Value, FxBuildHasher>) -> Self {
Self(Arc::new(map))
}
}

View File

@ -4,6 +4,7 @@ use std::hash::{Hash, Hasher};
use ecow::{EcoString, eco_format};
use indexmap::IndexMap;
use indexmap::map::Entry;
use rustc_hash::FxBuildHasher;
use typst_syntax::Span;
use crate::diag::{DeprecationSink, HintedStrResult, HintedString, StrResult, bail};
@ -102,7 +103,7 @@ impl<'a> Scopes<'a> {
/// A map from binding names to values.
#[derive(Default, Clone)]
pub struct Scope {
map: IndexMap<EcoString, Binding>,
map: IndexMap<EcoString, Binding, FxBuildHasher>,
deduplicate: bool,
category: Option<Category>,
}

View File

@ -1,11 +1,11 @@
use std::any::{Any, TypeId};
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::{mem, ptr};
use comemo::Tracked;
use ecow::{EcoString, EcoVec, eco_vec};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use typst_syntax::Span;
use typst_utils::LazyHash;
@ -938,7 +938,7 @@ fn block_wrong_type(func: Element, id: u8, value: &Block) -> ! {
/// Holds native show rules.
pub struct NativeRuleMap {
rules: HashMap<(Element, Target), NativeShowRule>,
rules: FxHashMap<(Element, Target), NativeShowRule>,
}
/// The signature of a native show rule.
@ -956,7 +956,7 @@ impl NativeRuleMap {
///
/// Contains built-in rules for a few special elements.
pub fn new() -> Self {
let mut rules = Self { rules: HashMap::new() };
let mut rules = Self { rules: FxHashMap::default() };
// ContextElem is as special as SequenceElem and StyledElem and could,
// in theory, also be special cased in realization.

View File

@ -1,9 +1,10 @@
use std::collections::{BTreeSet, HashMap};
use std::collections::BTreeSet;
use std::fmt::{self, Debug, Display, Formatter, Write};
use std::sync::Arc;
use codex::ModifierSet;
use ecow::{EcoString, eco_format};
use rustc_hash::FxHashMap;
use serde::{Serialize, Serializer};
use typst_syntax::{Span, Spanned, is_ident};
use typst_utils::hash128;
@ -221,7 +222,7 @@ impl Symbol {
// Maps from canonicalized 128-bit hashes to indices of variants we've
// seen before.
let mut seen = HashMap::<u128, usize>::new();
let mut seen = FxHashMap::<u128, usize>::default();
// A list of modifiers, cleared & reused in each iteration.
let mut modifiers = Vec::new();

View File

@ -1,10 +1,11 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::BTreeSet;
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::num::NonZeroUsize;
use std::sync::RwLock;
use ecow::{EcoString, EcoVec};
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use typst_utils::NonZeroExt;
@ -31,14 +32,14 @@ pub struct Introspector {
keys: MultiMap<u128, Location>,
/// Accelerates lookup of elements by location.
locations: HashMap<Location, usize>,
locations: FxHashMap<Location, usize>,
/// Accelerates lookup of elements by label.
labels: MultiMap<Label, usize>,
/// Maps from element locations to assigned HTML IDs. This used to support
/// intra-doc links in HTML export. In paged export, is is simply left
/// empty and [`Self::html_id`] is not used.
html_ids: HashMap<Location, EcoString>,
html_ids: FxHashMap<Location, EcoString>,
/// Caches queries done on the introspector. This is important because
/// even if all top-level queries are distinct, they often have shared
@ -63,7 +64,7 @@ impl Introspector {
/// Enriches an existing introspector with HTML IDs, which were assigned
/// to the DOM in a post-processing step.
pub fn set_html_ids(&mut self, html_ids: HashMap<Location, EcoString>) {
pub fn set_html_ids(&mut self, html_ids: FxHashMap<Location, EcoString>) {
self.html_ids = html_ids;
}
@ -313,7 +314,7 @@ impl Debug for Introspector {
/// A map from one keys to multiple elements.
#[derive(Clone)]
struct MultiMap<K, V>(HashMap<K, SmallVec<[V; 1]>>);
struct MultiMap<K, V>(FxHashMap<K, SmallVec<[V; 1]>>);
impl<K, V> MultiMap<K, V>
where
@ -334,13 +335,13 @@ where
impl<K, V> Default for MultiMap<K, V> {
fn default() -> Self {
Self(HashMap::new())
Self(FxHashMap::default())
}
}
/// Caches queries.
#[derive(Default)]
struct QueryCache(RwLock<HashMap<u128, EcoVec<Content>>>);
struct QueryCache(RwLock<FxHashMap<u128, EcoVec<Content>>>);
impl QueryCache {
fn get(&self, hash: u128) -> Option<EcoVec<Content>> {
@ -364,11 +365,11 @@ pub struct IntrospectorBuilder {
pub pages: usize,
pub page_numberings: Vec<Option<Numbering>>,
pub page_supplements: Vec<Content>,
pub html_ids: HashMap<Location, EcoString>,
seen: HashSet<Location>,
pub html_ids: FxHashMap<Location, EcoString>,
seen: FxHashSet<Location>,
insertions: MultiMap<Location, Vec<Pair>>,
keys: MultiMap<u128, Location>,
locations: HashMap<Location, usize>,
locations: FxHashMap<Location, usize>,
labels: MultiMap<Label, usize>,
}

View File

@ -1,9 +1,9 @@
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::sync::OnceLock;
use comemo::{Tracked, Validate};
use comemo::{Track, Tracked};
use rustc_hash::FxHashMap;
use crate::introspection::{Introspector, Location};
@ -188,7 +188,7 @@ impl<'a> Locator<'a> {
SplitLocator {
local: self.local,
outer: self.outer,
disambiguators: HashMap::new(),
disambiguators: FxHashMap::default(),
}
}
@ -244,7 +244,7 @@ pub struct SplitLocator<'a> {
/// for all the layers beyond the memoization boundary on-demand.
outer: Option<&'a LocatorLink<'a>>,
/// Simply counts up the number of times we've seen each local hash.
disambiguators: HashMap<u128, usize>,
disambiguators: FxHashMap<u128, usize>,
}
impl<'a> SplitLocator<'a> {
@ -312,7 +312,7 @@ enum LinkKind<'a> {
/// We need to override the constraint's lifetime here so that `Tracked` is
/// covariant over the constraint. If it becomes invariant, we're in for a
/// world of lifetime pain.
Outer(Tracked<'a, Locator<'a>, <Locator<'static> as Validate>::Constraint>),
Outer(Tracked<'a, Locator<'a>, <Locator<'static> as Track>::Call>),
/// A link which indicates that we are in measurement mode.
Measure(Location),
}

View File

@ -117,6 +117,8 @@ use crate::foundations::{Array, Context, LocatableSelector, Value, func};
/// ]
/// ```
///
/// ## Retrieving a specific field
///
/// Frequently, you're interested in only one specific field of the resulting
/// elements. In the case of the `metadata` element, the `value` field is the
/// interesting one. You can extract just this field with the `--field`
@ -134,6 +136,12 @@ use crate::foundations::{Array, Context, LocatableSelector, Value, func};
/// $ typst query example.typ "<note>" --field value --one
/// "This is a note"
/// ```
///
/// ## Querying for a specific export target
///
/// In case you need to query a document when exporting for a specific target,
/// you can use the `--target` argument. Valid values are `paged`, and `html`
/// (if the [`html`]($html) feature is enabled).
#[func(contextual)]
pub fn query(
engine: &mut Engine,

View File

@ -1,5 +1,4 @@
use std::any::TypeId;
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fmt::{self, Debug, Formatter};
use std::path::Path;
@ -14,6 +13,7 @@ use hayagriva::{
SpecificLocator, citationberg,
};
use indexmap::IndexMap;
use rustc_hash::{FxBuildHasher, FxHashMap};
use smallvec::{SmallVec, smallvec};
use typst_syntax::{Span, Spanned, SyntaxMode};
use typst_utils::{ManuallyHash, PicoStr};
@ -217,7 +217,9 @@ impl LocalName for Packed<BibliographyElem> {
/// A loaded bibliography.
#[derive(Clone, PartialEq, Hash)]
pub struct Bibliography(Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry>>>);
pub struct Bibliography(
Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry, FxBuildHasher>>>,
);
impl Bibliography {
/// Load a bibliography from data sources.
@ -234,7 +236,7 @@ impl Bibliography {
#[comemo::memoize]
#[typst_macros::time(name = "load bibliography")]
fn decode(data: &[Loaded]) -> SourceResult<Bibliography> {
let mut map = IndexMap::new();
let mut map = IndexMap::default();
let mut duplicates = Vec::<EcoString>::new();
// We might have multiple bib/yaml files
@ -440,9 +442,35 @@ impl Reflect for CslSource {
#[comemo::memoize]
fn input() -> CastInfo {
let source = std::iter::once(DataSource::input());
let names = ArchivedStyle::all().iter().map(|name| {
CastInfo::Value(name.names()[0].into_value(), name.display_name())
});
/// All possible names and their short documentation for `ArchivedStyle`, including aliases.
static ARCHIVED_STYLE_NAMES: LazyLock<Vec<(&&str, &'static str)>> =
LazyLock::new(|| {
ArchivedStyle::all()
.iter()
.flat_map(|name| {
let (main_name, aliases) = name
.names()
.split_first()
.expect("all ArchivedStyle should have at least one name");
std::iter::once((main_name, name.display_name())).chain(
aliases.iter().map(move |alias| {
// Leaking is okay here, because we are in a `LazyLock`.
let docs: &'static str = Box::leak(
format!("A short alias of `{main_name}`")
.into_boxed_str(),
);
(alias, docs)
}),
)
})
.collect()
});
let names = ARCHIVED_STYLE_NAMES
.iter()
.map(|(value, docs)| CastInfo::Value(value.into_value(), docs));
CastInfo::Union(source.into_iter().chain(names).collect())
}
@ -486,7 +514,7 @@ impl IntoValue for CslSource {
/// citations to do it.
pub struct Works {
/// Maps from the location of a citation group to its rendered content.
pub citations: HashMap<Location, SourceResult<Content>>,
pub citations: FxHashMap<Location, SourceResult<Content>>,
/// Lists all references in the bibliography, with optional prefix, or
/// `None` if the citation style can't be used for bibliographies.
pub references: Option<Vec<(Option<Content>, Content)>>,
@ -528,7 +556,7 @@ struct Generator<'a> {
/// bibliography driver and needed when processing hayagriva's output.
infos: Vec<GroupInfo>,
/// Citations with unresolved keys.
failures: HashMap<Location, SourceResult<Content>>,
failures: FxHashMap<Location, SourceResult<Content>>,
}
/// Details about a group of merged citations. All citations are put into groups
@ -571,7 +599,7 @@ impl<'a> Generator<'a> {
bibliography,
groups,
infos,
failures: HashMap::new(),
failures: FxHashMap::default(),
})
}
@ -702,10 +730,10 @@ impl<'a> Generator<'a> {
fn display_citations(
&mut self,
rendered: &hayagriva::Rendered,
) -> StrResult<HashMap<Location, SourceResult<Content>>> {
) -> StrResult<FxHashMap<Location, SourceResult<Content>>> {
// Determine for each citation key where in the bibliography it is,
// so that we can link there.
let mut links = HashMap::new();
let mut links = FxHashMap::default();
if let Some(bibliography) = &rendered.bibliography {
let location = self.bibliography.location().unwrap();
for (k, item) in bibliography.items.iter().enumerate() {
@ -760,7 +788,7 @@ impl<'a> Generator<'a> {
// Determine for each citation key where it first occurred, so that we
// can link there.
let mut first_occurrences = HashMap::new();
let mut first_occurrences = FxHashMap::default();
for info in &self.infos {
for subinfo in &info.subinfos {
let key = subinfo.key.resolve();
@ -1056,4 +1084,28 @@ mod tests {
let _ = CslStyle::from_archived(archived);
}
}
#[test]
fn test_csl_source_cast_info_include_all_names() {
let CastInfo::Union(cast_info) = CslSource::input() else {
panic!("the cast info of CslSource should be a union");
};
let missing: Vec<_> = ArchivedStyle::all()
.iter()
.flat_map(|style| style.names())
.filter(|name| {
let found = cast_info.iter().any(|info| match info {
CastInfo::Value(Value::Str(n), _) => n.as_str() == **name,
_ => false,
});
!found
})
.collect();
assert!(
missing.is_empty(),
"missing style names in CslSource cast info: '{missing:?}'"
);
}
}

View File

@ -20,6 +20,7 @@ mod reference;
mod strong;
mod table;
mod terms;
mod title;
pub use self::bibliography::*;
pub use self::cite::*;
@ -39,6 +40,7 @@ pub use self::reference::*;
pub use self::strong::*;
pub use self::table::*;
pub use self::terms::*;
pub use self::title::*;
use crate::foundations::Scope;
@ -54,6 +56,7 @@ pub fn define(global: &mut Scope) {
global.define_elem::<EnumElem>();
global.define_elem::<TermsElem>();
global.define_elem::<LinkElem>();
global.define_elem::<TitleElem>();
global.define_elem::<HeadingElem>();
global.define_elem::<FigureElem>();
global.define_elem::<QuoteElem>();

View File

@ -0,0 +1,77 @@
use crate::diag::{Hint, HintedStrResult};
use crate::foundations::{Content, Packed, ShowSet, Smart, StyleChain, Styles, elem};
use crate::introspection::Locatable;
use crate::layout::{BlockElem, Em};
use crate::model::DocumentElem;
use crate::text::{FontWeight, TextElem, TextSize};
/// A document title.
///
/// This should be used to display the main title of the whole document and
/// should occur only once per document. In contrast, level 1
/// [headings]($heading) are intended to be used for the top-level sections of
/// the document.
///
/// Note that additional frontmatter (like an author list) that should appear
/// together with the title does not belong in its body.
///
/// In HTML export, this shows as a `h1` element while level 1 headings show
/// as `h2` elements.
///
/// # Example
/// ```example
/// #set document(
/// title: [Interstellar Mail Delivery]
/// )
///
/// #title()
///
/// = Introduction
/// In recent years, ...
/// ```
#[elem(Locatable, ShowSet)]
pub struct TitleElem {
/// The content of the title.
///
/// When omitted (or `{auto}`), this will default to [`document.title`]. In
/// this case, a document title must have been previously set with
/// `{set document(title: [..])}`.
///
/// ```example
/// #set document(title: "Course ABC, Homework 1")
/// #title[Homework 1]
///
/// ...
/// ```
#[positional]
pub body: Smart<Content>,
}
impl TitleElem {
pub fn resolve_body(&self, styles: StyleChain) -> HintedStrResult<Content> {
match self.body.get_cloned(styles) {
Smart::Auto => styles
.get_cloned(DocumentElem::title)
.ok_or("document title was not set")
.hint("set the title with `set document(title: [...])`")
.hint("or provide an explicit body with `title[..]`"),
Smart::Custom(body) => Ok(body),
}
}
}
impl ShowSet for Packed<TitleElem> {
fn show_set(&self, _styles: StyleChain) -> Styles {
const SIZE: Em = Em::new(1.7);
const ABOVE: Em = Em::new(1.125);
const BELOW: Em = Em::new(0.75);
let mut out = Styles::new();
out.set(TextElem::size, TextSize(SIZE.into()));
out.set(TextElem::weight, FontWeight::BOLD);
out.set(BlockElem::above, Smart::Custom(ABOVE.into()));
out.set(BlockElem::below, Smart::Custom(BELOW.into()));
out.set(BlockElem::sticky, true);
out
}
}

View File

@ -15,6 +15,7 @@ use crate::foundations::{
use crate::introspection::{Introspector, Locator, SplitLocator};
use crate::layout::{Frame, Region};
use crate::model::DocumentInfo;
use crate::visualize::Color;
/// Defines the `Routines` struct.
macro_rules! routines {
@ -95,6 +96,12 @@ routines! {
/// Constructs the `html` module.
fn html_module() -> Module
/// Wraps content in a span with a color.
///
/// This is a temporary workaround until `TextElem::fill` is supported in
/// HTML export.
fn html_span_filled(content: Content, color: Color) -> Content
}
/// Defines what kind of realization we are performing.

View File

@ -127,13 +127,8 @@ fn draw_raster_glyph(
Some(())
}
/// Draws a glyph from the COLR table into the frame.
fn draw_colr_glyph(
frame: &mut Frame,
font: &Font,
upem: Abs,
glyph_id: GlyphId,
) -> Option<()> {
/// Convert a COLR glyph into an SVG file.
pub fn colr_glyph_to_svg(font: &Font, glyph_id: GlyphId) -> Option<String> {
let mut svg = XmlWriter::new(xmlwriter::Options::default());
let ttf = font.ttf();
@ -176,7 +171,25 @@ fn draw_colr_glyph(
ttf.paint_color_glyph(glyph_id, 0, RgbaColor::new(0, 0, 0, 255), &mut glyph_painter)?;
svg.end_element();
let data = Bytes::from_string(svg.end_document());
Some(svg.end_document())
}
/// Draws a glyph from the COLR table into the frame.
fn draw_colr_glyph(
frame: &mut Frame,
font: &Font,
upem: Abs,
glyph_id: GlyphId,
) -> Option<()> {
let svg_string = colr_glyph_to_svg(font, glyph_id)?;
let ttf = font.ttf();
let width = ttf.global_bounding_box().width() as f64;
let height = ttf.global_bounding_box().height() as f64;
let x_min = ttf.global_bounding_box().x_min as f64;
let y_max = ttf.global_bounding_box().y_max as f64;
let data = Bytes::from_string(svg_string);
let image = Image::plain(SvgImage::new(data).ok()?);
let y_shift = Abs::pt(upem.to_pt() - y_max);

View File

@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::str::FromStr;
use ecow::{EcoString, eco_format};
use rustc_hash::FxHashMap;
use crate::diag::Hint;
use crate::foundations::{StyleChain, cast};
@ -278,13 +278,13 @@ pub fn localized_str(lang: Lang, region: Option<Region>, key: &str) -> &'static
fn parse_language_bundle(
lang: Lang,
region: Option<Region>,
) -> Result<HashMap<&'static str, &'static str>, &'static str> {
) -> Result<FxHashMap<&'static str, &'static str>, &'static str> {
let language_tuple = TRANSLATIONS.iter().find(|it| it.0 == lang_str(lang, region));
let Some((_lang_name, language_file)) = language_tuple else {
return Ok(HashMap::new());
return Ok(FxHashMap::default());
};
let mut bundle = HashMap::new();
let mut bundle = FxHashMap::default();
let lines = language_file.trim().lines();
for line in lines {
if line.trim().starts_with('#') {
@ -313,9 +313,9 @@ fn lang_str(lang: Lang, region: Option<Region>) -> EcoString {
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use std::path::PathBuf;
use rustc_hash::FxHashSet;
use typst_utils::option_eq;
use super::*;
@ -337,7 +337,7 @@ mod tests {
#[test]
fn test_all_translations_included() {
let defined_keys =
HashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang));
FxHashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang));
let mut checked = 0;
for file in translation_files_iter() {
assert!(

View File

@ -18,12 +18,13 @@ use crate::diag::{
use crate::engine::Engine;
use crate::foundations::{
Bytes, Content, Derived, OneOrMultiple, Packed, PlainText, ShowSet, Smart,
StyleChain, Styles, Synthesize, cast, elem, scope,
StyleChain, Styles, Synthesize, Target, TargetElem, cast, elem, scope,
};
use crate::introspection::Locatable;
use crate::layout::{Em, HAlignment};
use crate::loading::{DataSource, Load};
use crate::model::{Figurable, ParElem};
use crate::routines::Routines;
use crate::text::{FontFamily, FontList, LocalName, TextElem, TextSize};
use crate::visualize::Color;
@ -302,8 +303,12 @@ impl RawElem {
}
impl Synthesize for Packed<RawElem> {
fn synthesize(&mut self, _: &mut Engine, styles: StyleChain) -> SourceResult<()> {
let seq = self.highlight(styles);
fn synthesize(
&mut self,
engine: &mut Engine,
styles: StyleChain,
) -> SourceResult<()> {
let seq = self.highlight(engine.routines, styles);
self.lines = Some(seq);
Ok(())
}
@ -311,7 +316,7 @@ impl Synthesize for Packed<RawElem> {
impl Packed<RawElem> {
#[comemo::memoize]
fn highlight(&self, styles: StyleChain) -> Vec<Packed<RawLine>> {
fn highlight(&self, routines: &Routines, styles: StyleChain) -> Vec<Packed<RawLine>> {
let elem = self.as_ref();
let lines = preprocess(&elem.text, styles, self.span());
@ -343,6 +348,7 @@ impl Packed<RawElem> {
};
let foreground = theme.settings.foreground.unwrap_or(synt::Color::BLACK);
let target = styles.get(TargetElem::target);
let mut seq = vec![];
if matches!(lang.as_deref(), Some("typ" | "typst" | "typc" | "typm")) {
@ -365,7 +371,15 @@ impl Packed<RawElem> {
let span_offset = text[..range.start]
.rfind('\n')
.map_or(0, |i| range.start - (i + 1));
styled(&text[range], foreground, style, span, span_offset)
styled(
routines,
target,
&text[range],
foreground,
style,
span,
span_offset,
)
},
&mut |i, range, line| {
let span = lines.get(i).map_or_else(Span::detached, |l| l.1);
@ -402,6 +416,8 @@ impl Packed<RawElem> {
.flatten()
{
line_content.push(styled(
routines,
target,
piece,
foreground,
style,
@ -771,6 +787,8 @@ fn preprocess(
/// Style a piece of text with a syntect style.
fn styled(
routines: &Routines,
target: Target,
piece: &str,
foreground: synt::Color,
style: synt::Style,
@ -784,7 +802,11 @@ fn styled(
}
if style.foreground != foreground {
body = body.set(TextElem::fill, to_typst(style.foreground).into());
let color = to_typst(style.foreground);
body = match target {
Target::Html => (routines.html_span_filled)(body, color),
Target::Paged => body.set(TextElem::fill, color.into()),
};
}
if style.font_style.contains(synt::FontStyle::BOLD) {

View File

@ -1,8 +1,8 @@
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex};
use comemo::Tracked;
use rustc_hash::FxHashMap;
use siphasher::sip128::{Hasher128, SipHasher13};
use crate::World;
@ -144,9 +144,9 @@ struct FontResolver<'a> {
/// The active list of font families at the location of the SVG.
families: &'a [&'a str],
/// A mapping from Typst font indices to fontdb IDs.
to_id: HashMap<usize, Option<fontdb::ID>>,
to_id: FxHashMap<usize, Option<fontdb::ID>>,
/// The reverse mapping.
from_id: HashMap<fontdb::ID, Font>,
from_id: FxHashMap<fontdb::ID, Font>,
/// Accumulates a hash of all used fonts.
hasher: SipHasher13,
}
@ -162,8 +162,8 @@ impl<'a> FontResolver<'a> {
book,
world,
families,
to_id: HashMap::new(),
from_id: HashMap::new(),
to_id: FxHashMap::default(),
from_id: FxHashMap::default(),
hasher: SipHasher13::new(),
}
}

View File

@ -27,6 +27,7 @@ image = { workspace = true }
infer = { workspace = true }
krilla = { workspace = true }
krilla-svg = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
smallvec = { workspace = true }

View File

@ -1,5 +1,3 @@
use std::collections::{HashMap, HashSet};
use ecow::{EcoVec, eco_format};
use krilla::configure::{Configuration, ValidationError, Validator};
use krilla::destination::{NamedDestination, XyzDestination};
@ -13,6 +11,7 @@ use krilla::tagging::TagId;
use krilla::tagging::fmt::Output;
use krilla::{Document, SerializeSettings};
use krilla_svg::render_svg_glyph;
use rustc_hash::{FxHashMap, FxHashSet};
use typst_library::diag::{SourceDiagnostic, SourceResult, bail, error};
use typst_library::foundations::{NativeElement, Repr};
use typst_library::introspection::{Location, Tag};
@ -250,22 +249,22 @@ impl FrameContext {
/// Globally needed context for converting a typst document.
pub(crate) struct GlobalContext<'a> {
/// Cache the conversion between krilla and Typst fonts (forward and backward).
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
pub(crate) fonts_forward: FxHashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: FxHashMap<krilla::text::Font, Font>,
/// Mapping between images and their span.
// Note: In theory, the same image can have multiple spans
// if it appears in the document multiple times. We just store the
// first appearance, though.
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
pub(crate) image_to_spans: FxHashMap<krilla::image::Image, Span>,
/// The spans of all images that appear in the document. We use this so
/// we can give more accurate error messages.
pub(crate) image_spans: HashSet<Span>,
pub(crate) image_spans: FxHashSet<Span>,
/// The document to convert.
pub(crate) document: &'a PagedDocument,
/// Options for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// Mapping between locations in the document and named destinations.
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
pub(crate) loc_to_names: FxHashMap<Location, NamedDestination>,
pub(crate) page_index_converter: PageIndexConverter,
/// Tagged PDF context.
pub(crate) tags: Tags,
@ -275,17 +274,17 @@ impl<'a> GlobalContext<'a> {
pub(crate) fn new(
document: &'a PagedDocument,
options: &'a PdfOptions,
loc_to_names: HashMap<Location, NamedDestination>,
loc_to_names: FxHashMap<Location, NamedDestination>,
page_index_converter: PageIndexConverter,
) -> GlobalContext<'a> {
Self {
fonts_forward: HashMap::new(),
fonts_backward: HashMap::new(),
fonts_forward: FxHashMap::default(),
fonts_backward: FxHashMap::default(),
document,
options,
loc_to_names,
image_to_spans: HashMap::new(),
image_spans: HashSet::new(),
image_to_spans: FxHashMap::default(),
image_spans: FxHashSet::default(),
page_index_converter,
tags: Tags::new(),
@ -696,13 +695,13 @@ fn to_span(loc: Option<krilla::surface::Location>) -> Span {
fn collect_named_destinations(
document: &PagedDocument,
pic: &PageIndexConverter,
) -> HashMap<Location, NamedDestination> {
let mut locs_to_names = HashMap::new();
) -> FxHashMap<Location, NamedDestination> {
let mut locs_to_names = FxHashMap::default();
// Find all headings that have a label and are the first among other
// headings with the same label.
let matches: Vec<_> = {
let mut seen = HashSet::new();
let mut seen = FxHashSet::default();
document
.introspector
.query(&HeadingElem::ELEM.select())
@ -737,13 +736,13 @@ fn collect_named_destinations(
}
pub(crate) struct PageIndexConverter {
page_indices: HashMap<usize, usize>,
page_indices: FxHashMap<usize, usize>,
skipped_pages: usize,
}
impl PageIndexConverter {
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
let mut page_indices = HashMap::new();
let mut page_indices = FxHashMap::default();
let mut skipped_pages = 0;
for i in 0..document.pages.len() {

View File

@ -157,11 +157,12 @@ fn convert_gradient(
RelativeTo::Parent => state.container_size(),
};
let angle = gradient.angle().unwrap_or_else(Angle::zero);
let mut angle = gradient.angle().unwrap_or_else(Angle::zero);
let base_transform = correct_transform(state, gradient.unwrap_relative(on_text));
let stops = convert_gradient_stops(gradient);
match &gradient {
Gradient::Linear(_) => {
angle = Gradient::correct_aspect_ratio(angle, size.aspect_ratio());
let (x1, y1, x2, y2) = {
let (mut sin, mut cos) = (angle.sin(), angle.cos());

View File

@ -24,6 +24,7 @@ ecow = { workspace = true }
flate2 = { workspace = true }
hayro = { workspace = true }
image = { workspace = true }
rustc-hash = { workspace = true }
ttf-parser = { workspace = true }
xmlparser = { workspace = true }
xmlwriter = { workspace = true }

View File

@ -6,10 +6,10 @@ mod shape;
mod text;
pub use image::{convert_image_scaling, convert_image_to_base64_url};
use rustc_hash::FxHashMap;
use typst_library::introspection::Introspector;
use typst_library::model::Destination;
use std::collections::HashMap;
use std::fmt::{self, Display, Formatter, Write};
use ecow::EcoString;
@ -421,12 +421,16 @@ impl<'a> SVGRenderer<'a> {
struct Deduplicator<T> {
kind: char,
vec: Vec<(u128, T)>,
present: HashMap<u128, Id>,
present: FxHashMap<u128, Id>,
}
impl<T> Deduplicator<T> {
fn new(kind: char) -> Self {
Self { kind, vec: Vec::new(), present: HashMap::new() }
Self {
kind,
vec: Vec::new(),
present: FxHashMap::default(),
}
}
/// Inserts a value into the vector. If the hash is already present, returns

View File

@ -5,6 +5,7 @@ use ecow::EcoString;
use ttf_parser::GlyphId;
use typst_library::foundations::Bytes;
use typst_library::layout::{Abs, Point, Ratio, Size, Transform};
use typst_library::text::color::colr_glyph_to_svg;
use typst_library::text::{Font, TextItem};
use typst_library::visualize::{
ExchangeFormat, FillRule, Image, Paint, RasterImage, RelativeTo,
@ -31,7 +32,8 @@ impl SVGRenderer<'_> {
let x_offset = x + glyph.x_offset.at(text.size).to_pt();
let y_offset = y + glyph.y_offset.at(text.size).to_pt();
self.render_svg_glyph(text, id, x_offset, y_offset, scale)
self.render_colr_glyph(text, id, x_offset, y_offset, scale)
.or_else(|| self.render_svg_glyph(text, id, x_offset, y_offset, scale))
.or_else(|| self.render_bitmap_glyph(text, id, x_offset, y_offset))
.or_else(|| {
self.render_outline_glyph(
@ -87,6 +89,42 @@ impl SVGRenderer<'_> {
Some(())
}
/// Render a glyph defined by COLR glyph descriptions.
fn render_colr_glyph(
&mut self,
text: &TextItem,
id: GlyphId,
x_offset: f64,
y_offset: f64,
scale: f64,
) -> Option<()> {
let ttf = text.font.ttf();
let converted = colr_glyph_to_svg(&text.font, id)?;
let width = ttf.global_bounding_box().width() as f64;
let height = ttf.global_bounding_box().height() as f64;
let data_url = svg_to_base64(&converted);
let x_min = ttf.global_bounding_box().x_min as f64;
let y_max = ttf.global_bounding_box().y_max as f64;
let glyph_hash = hash128(&(&text.font, id));
let id = self.glyphs.insert_with(glyph_hash, || RenderedGlyph::Image {
url: data_url,
width,
height,
ts: Transform::scale(Ratio::new(scale), Ratio::new(-scale))
.pre_concat(Transform::translate(Abs::pt(x_min), -Abs::pt(y_max))),
});
self.xml.start_element("use");
self.xml.write_attribute_fmt("xlink:href", format_args!("#{id}"));
self.xml.write_attribute("x", &(x_offset));
self.xml.write_attribute("y", &(y_offset));
self.xml.end_element();
Some(())
}
/// Render a glyph defined by a bitmap.
fn render_bitmap_glyph(
&mut self,
@ -320,10 +358,14 @@ fn convert_svg_glyph_to_base64_url(font: &Font, id: GlyphId) -> Option<EcoString
);
}
Some(svg_to_base64(&svg_str))
}
fn svg_to_base64(svg_str: &str) -> EcoString {
let mut url: EcoString = "data:image/svg+xml;base64,".into();
let b64_encoded =
base64::engine::general_purpose::STANDARD.encode(svg_str.as_bytes());
url.push_str(&b64_encoded);
Some(url)
url
}

View File

@ -16,6 +16,7 @@ readme = { workspace = true }
typst-timing = { workspace = true }
typst-utils = { workspace = true }
ecow = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
toml = { workspace = true }
unicode-ident = { workspace = true }

View File

@ -1,21 +1,22 @@
//! File and package management.
use std::collections::HashMap;
use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU16;
use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
use crate::VirtualPath;
use crate::package::PackageSpec;
/// The global package-path interner.
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
RwLock::new(Interner { to_id: HashMap::new(), from_id: Vec::new() })
RwLock::new(Interner { to_id: FxHashMap::default(), from_id: Vec::new() })
});
/// A package-path interner.
struct Interner {
to_id: HashMap<Pair, FileId>,
to_id: FxHashMap<Pair, FileId>,
from_id: Vec<Pair>,
}

View File

@ -1,8 +1,8 @@
use std::collections::{HashMap, HashSet};
use std::mem;
use std::ops::{Index, IndexMut, Range};
use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_utils::default_math_class;
use unicode_math_class::MathClass;
@ -384,10 +384,10 @@ fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
fn math_op(kind: SyntaxKind) -> Option<(SyntaxKind, SyntaxKind, ast::Assoc, usize)> {
match kind {
SyntaxKind::Underscore => {
Some((SyntaxKind::MathAttach, SyntaxKind::Hat, ast::Assoc::Right, 2))
Some((SyntaxKind::MathAttach, SyntaxKind::Hat, ast::Assoc::Right, 3))
}
SyntaxKind::Hat => {
Some((SyntaxKind::MathAttach, SyntaxKind::Underscore, ast::Assoc::Right, 2))
Some((SyntaxKind::MathAttach, SyntaxKind::Underscore, ast::Assoc::Right, 3))
}
SyntaxKind::Slash => {
Some((SyntaxKind::MathFrac, SyntaxKind::End, ast::Assoc::Left, 1))
@ -481,7 +481,7 @@ fn math_args(p: &mut Parser) {
let mut has_arrays = false;
let mut maybe_array_start = p.marker();
let mut seen = HashSet::new();
let mut seen = FxHashSet::default();
while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
positional = math_arg(p, &mut seen);
@ -522,7 +522,7 @@ fn math_args(p: &mut Parser) {
/// Parses a single argument in a math argument list.
///
/// Returns whether the parsed argument was positional or not.
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) -> bool {
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) -> bool {
let m = p.marker();
let start = p.current_start();
@ -831,7 +831,7 @@ fn let_binding(p: &mut Parser) {
closure = true;
}
} else {
pattern(p, false, &mut HashSet::new(), None);
pattern(p, false, &mut FxHashSet::default(), None);
other = true;
}
@ -923,7 +923,7 @@ fn for_loop(p: &mut Parser) {
let m = p.marker();
p.assert(SyntaxKind::For);
let mut seen = HashSet::new();
let mut seen = FxHashSet::default();
pattern(p, false, &mut seen, None);
if p.at(SyntaxKind::Comma) {
@ -1084,7 +1084,7 @@ fn expr_with_paren(p: &mut Parser, atomic: bool) {
} else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized {
p.restore(checkpoint);
let m = p.marker();
destructuring_or_parenthesized(p, true, &mut HashSet::new());
destructuring_or_parenthesized(p, true, &mut FxHashSet::default());
if !p.expect(SyntaxKind::Eq) {
return;
}
@ -1107,7 +1107,7 @@ fn parenthesized_or_array_or_dict(p: &mut Parser) -> SyntaxKind {
count: 0,
maybe_just_parens: true,
kind: None,
seen: HashSet::new(),
seen: FxHashSet::default(),
};
// An edge case with parens is whether we can interpret a leading spread
@ -1169,7 +1169,7 @@ struct GroupState {
/// The `SyntaxKind` to wrap as (if we've figured it out yet).
kind: Option<SyntaxKind>,
/// Store named arguments so we can give an error if they're repeated.
seen: HashSet<EcoString>,
seen: FxHashSet<EcoString>,
}
/// Parses a single item in an array or dictionary.
@ -1238,7 +1238,7 @@ fn args(p: &mut Parser) {
p.with_nl_mode(AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftParen);
let mut seen = HashSet::new();
let mut seen = FxHashSet::default();
while !p.current().is_terminator() {
if !p.at_set(set::ARG) {
p.unexpected();
@ -1264,7 +1264,7 @@ fn args(p: &mut Parser) {
}
/// Parses a single argument in an argument list.
fn arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) {
fn arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) {
let m = p.marker();
// Parses a spread argument: `..args`.
@ -1301,7 +1301,7 @@ fn params(p: &mut Parser) {
p.with_nl_mode(AtNewline::Continue, |p| {
p.assert(SyntaxKind::LeftParen);
let mut seen = HashSet::new();
let mut seen = FxHashSet::default();
let mut sink = false;
while !p.current().is_terminator() {
@ -1323,7 +1323,7 @@ fn params(p: &mut Parser) {
}
/// Parses a single parameter in a parameter list.
fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
fn param<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>, sink: &mut bool) {
let m = p.marker();
// Parses argument sink: `..sink`.
@ -1358,7 +1358,7 @@ fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
fn pattern<'s>(
p: &mut Parser<'s>,
reassignment: bool,
seen: &mut HashSet<&'s str>,
seen: &mut FxHashSet<&'s str>,
dupe: Option<&'s str>,
) {
match p.current() {
@ -1372,7 +1372,7 @@ fn pattern<'s>(
fn destructuring_or_parenthesized<'s>(
p: &mut Parser<'s>,
reassignment: bool,
seen: &mut HashSet<&'s str>,
seen: &mut FxHashSet<&'s str>,
) {
let mut sink = false;
let mut count = 0;
@ -1410,7 +1410,7 @@ fn destructuring_or_parenthesized<'s>(
fn destructuring_item<'s>(
p: &mut Parser<'s>,
reassignment: bool,
seen: &mut HashSet<&'s str>,
seen: &mut FxHashSet<&'s str>,
maybe_just_parens: &mut bool,
sink: &mut bool,
) {
@ -1457,7 +1457,7 @@ fn destructuring_item<'s>(
fn pattern_leaf<'s>(
p: &mut Parser<'s>,
reassignment: bool,
seen: &mut HashSet<&'s str>,
seen: &mut FxHashSet<&'s str>,
dupe: Option<&'s str>,
) {
if p.current().is_keyword() {
@ -1920,7 +1920,7 @@ struct MemoArena {
/// A map from the parser's current position to a range of previously parsed
/// nodes in the arena and a checkpoint of the parser's state. These allow
/// us to reset the parser to avoid parsing the same location again.
memo_map: HashMap<MemoKey, (Range<usize>, PartialState)>,
memo_map: FxHashMap<MemoKey, (Range<usize>, PartialState)>,
}
/// A type alias for the memo key so it doesn't get confused with other usizes.

View File

@ -71,6 +71,14 @@ pub fn enable() {
ENABLED.store(true, Ordering::Relaxed);
}
/// Disable the timer.
#[inline]
pub fn disable() {
// We only need atomicity and no synchronization of other
// operations, so `Relaxed` is fine.
ENABLED.store(false, Ordering::Relaxed);
}
/// Whether the timer is enabled.
#[inline]
pub fn is_enabled() -> bool {

View File

@ -16,6 +16,7 @@ readme = { workspace = true }
once_cell = { workspace = true }
portable-atomic = { workspace = true }
rayon = { workspace = true }
rustc-hash = { workspace = true }
siphasher = { workspace = true }
thin-vec = { workspace = true }
unicode-math-class = { workspace = true }

View File

@ -1,22 +1,24 @@
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::fmt::{self, Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
use std::num::NonZeroU64;
use std::ops::Deref;
use std::sync::{LazyLock, RwLock};
use rustc_hash::FxHashMap;
/// Marks a number as a bitcode encoded `PicoStr``.
const MARKER: u64 = 1 << 63;
/// The global runtime string interner.
static INTERNER: LazyLock<RwLock<Interner>> =
LazyLock::new(|| RwLock::new(Interner { seen: HashMap::new(), strings: Vec::new() }));
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
RwLock::new(Interner { seen: FxHashMap::default(), strings: Vec::new() })
});
/// A string interner.
struct Interner {
seen: HashMap<&'static str, PicoStr>,
seen: FxHashMap<&'static str, PicoStr>,
strings: Vec<&'static str>,
}

View File

@ -24,6 +24,7 @@ typst-timing = { workspace = true }
typst-utils = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
rustc-hash = { workspace = true }
[lints]
workspace = true

View File

@ -38,11 +38,11 @@ pub use typst_syntax as syntax;
#[doc(inline)]
pub use typst_utils as utils;
use std::collections::HashSet;
use std::sync::LazyLock;
use comemo::{Track, Tracked, Validate};
use comemo::{Track, Tracked};
use ecow::{EcoString, EcoVec, eco_format, eco_vec};
use rustc_hash::FxHashSet;
use typst_html::HtmlDocument;
use typst_library::diag::{
FileError, SourceDiagnostic, SourceResult, Warned, bail, warning,
@ -135,7 +135,7 @@ fn compile_impl<D: Document>(
subsink = Sink::new();
let constraint = <Introspector as Validate>::Constraint::new();
let constraint = comemo::Constraint::new();
let mut engine = Engine {
world,
introspector: introspector.track_with(&constraint),
@ -150,7 +150,7 @@ fn compile_impl<D: Document>(
introspector = document.introspector();
iter += 1;
if timed!("check stabilized", introspector.validate(&constraint)) {
if timed!("check stabilized", constraint.validate(introspector)) {
break;
}
@ -176,7 +176,7 @@ fn compile_impl<D: Document>(
/// Deduplicate diagnostics.
fn deduplicate(mut diags: EcoVec<SourceDiagnostic>) -> EcoVec<SourceDiagnostic> {
let mut unique = HashSet::new();
let mut unique = FxHashSet::default();
diags.retain(|diag| {
let hash = typst_utils::hash128(&(&diag.span, &diag.message));
unique.insert(hash)
@ -358,4 +358,5 @@ pub static ROUTINES: LazyLock<Routines> = LazyLock::new(|| Routines {
realize: typst_realize::realize,
layout_frame: typst_layout::layout_frame,
html_module: typst_html::module,
html_span_filled: typst_html::html_span_filled,
});

View File

@ -26,6 +26,7 @@ codex = { workspace = true }
ecow = { workspace = true }
heck = { workspace = true }
pulldown-cmark = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true, optional = true }
serde_yaml = { workspace = true }

View File

@ -477,12 +477,12 @@ and their corresponding Typst functions.
Although _many_ things are built-in, not everything can be. That's why Typst has
its own [package ecosystem]($universe) where the community share its creations
and automations. Let's take, for instance, the _cetz_ package: This package
allows you to create complex drawings and plots. To use cetz in your document,
and automations. Let's take, for instance, the _CeTZ_ package: This package
allows you to create complex drawings and plots. To use CeTZ in your document,
you can just write:
```typ
#import "@preview/cetz:0.2.1"
#import "@preview/cetz:0.4.1"
```
(The `@preview` is a _namespace_ that is used while the package manager is still

View File

@ -4,9 +4,8 @@ description: Guides for Typst.
# Guides
Welcome to the Guides section! Here, you'll find helpful material for specific
user groups or use cases. Currently, two guides are available: An introduction
to Typst for LaTeX users, and a detailed look at page setup. Feel free to
propose other topics for guides!
user groups or use cases. Please see the list below for the available guides.
Feel free to propose other topics for guides!
## List of Guides
- [Guide for LaTeX users]($guides/guide-for-latex-users)

View File

@ -1,5 +1,5 @@
Drawing and data visualization.
If you want to create more advanced drawings or plots, also have a look at the
[CetZ](https://github.com/johannes-wolf/cetz) package as well as more
[CeTZ](https://github.com/johannes-wolf/cetz) package as well as more
specialized [packages]($universe) for your use case.

View File

@ -1,7 +1,7 @@
use std::cmp::Reverse;
use std::collections::HashMap;
use std::fmt::Write;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use crate::{Html, Resolver};
@ -12,7 +12,7 @@ pub fn contributors(resolver: &dyn Resolver, from: &str, to: &str) -> Option<Htm
let bots = ["dependabot[bot]"];
// Determine number of contributions per person.
let mut contributors = HashMap::<String, Contributor>::new();
let mut contributors = FxHashMap::<String, Contributor>::default();
for commit in resolver.commits(from, to) {
contributors
.entry(commit.author.login.clone())

View File

@ -9,10 +9,9 @@ pub use self::contribs::*;
pub use self::html::*;
pub use self::model::*;
use std::collections::HashSet;
use ecow::{EcoString, eco_format};
use heck::ToTitleCase;
use rustc_hash::FxHashSet;
use serde::Deserialize;
use serde_yaml as yaml;
use std::sync::LazyLock;
@ -260,7 +259,7 @@ fn category_page(resolver: &dyn Resolver, category: Category) -> PageModel {
shorthands = Some(ShorthandsModel { markup, math });
}
let mut skip = HashSet::new();
let mut skip = FxHashSet::default();
if category == Category::Math {
skip = GROUPS
.iter()

View File

@ -1,5 +1,5 @@
use typst::diag::{StrResult, bail};
use typst::foundations::{Binding, Func};
use typst::foundations::{Binding, Func, Type};
use crate::{GROUPS, LIBRARY, get_module};
@ -94,25 +94,158 @@ fn resolve_definition(head: &str, base: &str) -> StrResult<String> {
let mut route = format!("{}reference/{}/{name}", base, category.name());
if let Some(next) = parts.next() {
if let Ok(field) = value.field(next, ()) {
// For top-level definitions
route.push_str("/#definitions-");
route.push_str(next);
if let Some(next) = parts.next()
&& field.cast::<Func>().is_ok_and(|func| func.param(next).is_some())
{
route.push('-');
route.push_str(next);
let mut focus = field;
// For subsequent parameters, definitions, or definitions parameters
for next in parts.by_ref() {
if let Ok(field) = focus.field(next, ()) {
// For definitions
route.push_str("-definitions-");
route.push_str(next);
focus = field.clone();
} else if focus
.clone()
.cast::<Func>()
.is_ok_and(|func| func.param(next).is_some())
{
// For parameters
route.push('-');
route.push_str(next);
}
}
} else if let Ok(ty) = value.clone().cast::<Type>()
&& let Ok(func) = ty.constructor()
&& func.param(next).is_some()
{
// For parameters of a constructor function
route.push_str("/#constructor-");
route.push_str(next);
} else if value
.clone()
.cast::<Func>()
.is_ok_and(|func| func.param(next).is_some())
{
// For parameters of a function (except for constructor functions)
route.push_str("/#parameters-");
route.push_str(next);
} else {
bail!("field {next} not found");
}
if let Some(next) = parts.next() {
bail!("found redundant field {next}");
}
}
Ok(route)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_function() {
assert_eq!(
resolve_definition("$figure", "/"),
Ok("/reference/model/figure".into())
);
assert_eq!(
resolve_definition("$figure.body", "/"),
Ok("/reference/model/figure/#parameters-body".into())
);
assert_eq!(
resolve_definition("$figure.caption", "/"),
Ok("/reference/model/figure/#definitions-caption".into())
);
assert_eq!(
resolve_definition("$figure.caption.position", "/"),
Ok("/reference/model/figure/#definitions-caption-position".into())
);
}
#[test]
fn test_function_definition() {
assert_eq!(
resolve_definition("$outline", "/"),
Ok("/reference/model/outline".into())
);
assert_eq!(
resolve_definition("$outline.title", "/"),
Ok("/reference/model/outline/#parameters-title".into())
);
assert_eq!(
resolve_definition("$outline.entry", "/"),
Ok("/reference/model/outline/#definitions-entry".into())
);
assert_eq!(
resolve_definition("$outline.entry.fill", "/"),
Ok("/reference/model/outline/#definitions-entry-fill".into())
);
}
#[test]
fn test_function_definition_definition() {
assert_eq!(
resolve_definition("$outline.entry.indented", "/"),
Ok("/reference/model/outline/#definitions-entry-definitions-indented".into())
);
assert_eq!(
resolve_definition("$outline.entry.indented.prefix", "/"),
Ok("/reference/model/outline/#definitions-entry-definitions-indented-prefix"
.into())
);
}
#[test]
fn test_type() {
assert_eq!(
resolve_definition("$array", "/"),
Ok("/reference/foundations/array".into())
);
assert_eq!(
resolve_definition("$array.at", "/"),
Ok("/reference/foundations/array/#definitions-at".into())
);
assert_eq!(
resolve_definition("$array.at.index", "/"),
Ok("/reference/foundations/array/#definitions-at-index".into())
);
}
#[test]
fn test_type_constructor() {
assert_eq!(
resolve_definition("$str.base", "/"),
Ok("/reference/foundations/str/#constructor-base".into())
);
assert_eq!(
resolve_definition("$tiling.relative", "/"),
Ok("/reference/visualize/tiling/#constructor-relative".into())
);
}
#[test]
fn test_group() {
assert_eq!(
resolve_definition("$calc.abs", "/"),
Ok("/reference/foundations/calc/#functions-abs".into())
);
assert_eq!(
resolve_definition("$calc.pow.exponent", "/"),
Ok("/reference/foundations/calc/#functions-pow-exponent".into())
);
}
#[test]
fn test_redundant_field() {
assert_eq!(
resolve_definition("$figure.body.anything", "/"),
Err("found redundant field anything".into())
);
}
}

View File

@ -46,6 +46,7 @@ oxipng = { workspace = true }
parking_lot = { workspace = true }
rayon = { workspace = true }
regex = { workspace = true }
rustc-hash = { workspace = true }
tiny-skia = { workspace = true }
unscanny = { workspace = true }
walkdir = { workspace = true }

View File

@ -21,37 +21,37 @@ Running just the integration tests (the tests in this directory):
cargo test --workspace --test tests
```
You may want to [make yourself an alias](#making-an-alias) `testit` so that you can
write shorter commands. In the examples below, we will use this alias.
The repository includes the alias `cargo testit` to make this less verbose. In
the examples below, we will use this alias.
Running all tests with the given name pattern. You can use
[regular expression](https://docs.rs/regex/latest/regex/)s.
```bash
testit math # The name has "math" anywhere
testit math page # The name has "math" or "page" anywhere
testit "^math" "^page" # The name begins with "math" or "page"
testit "^(math|page)" # Same as above.
cargo testit math # The name has "math" anywhere
cargo testit math page # The name has "math" or "page" anywhere
cargo testit "^math" "^page" # The name begins with "math" or "page"
cargo testit "^(math|page)" # Same as above.
```
Running all tests discovered under given paths:
```bash
testit -p tests/suite/math/attach.typ
testit -p tests/suite/model -p tests/suite/text
cargo testit -p tests/suite/math/attach.typ
cargo testit -p tests/suite/model -p tests/suite/text
```
Running tests that begin with `issue` under a given path:
```bash
testit "^issue" -p tests/suite/model
cargo testit "^issue" -p tests/suite/model
```
Running a test with the exact test name `math-attach-mixed`.
```bash
testit --exact math-attach-mixed
cargo testit --exact math-attach-mixed
```
You may find more options in the help message:
```bash
testit --help
cargo testit --help
```
To make the integration tests go faster they don't generate PDFs or SVGs by
@ -59,7 +59,7 @@ default. Pass the `--pdf` or `--svg` flag to generate those. Mind that PDFs and
SVGs are **not** tested automatically at the moment, so you should always check
the output manually when making changes.
```bash
testit --pdf
cargo testit --pdf
```
## Writing tests
@ -119,7 +119,7 @@ If you created a new test or fixed a bug in an existing test, you may need to
update the reference output used for comparison. For this, you can use the
`--update` flag:
```bash
testit --exact my-test-name --update
cargo testit --exact my-test-name --update
```
For visual tests, this will generally generate compressed reference images (to
@ -127,24 +127,3 @@ remain within the size limit).
If you use the VS Code test helper extension (see the `tools` folder), you can
alternatively use the save button to update the reference output.
## Making an alias
If you want to have a quicker way to run the tests, consider adding a shortcut
to your shell profile so that you can simply write something like:
```bash
testit --exact my-test-name
```
### Bash
Open your Bash configuration by executing `nano ~/.bashrc`.
```bash
alias testit="cargo test --workspace --test tests --"
```
### PowerShell
Open your PowerShell profile by executing `notepad $profile`.
```ps
function testit {
cargo test --workspace --test tests -- $args
}
```

View File

@ -0,0 +1,8 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body><my-element>Hi</my-element><custom-button>Hi</custom-button><multi-word-component>Hi</multi-word-component><element->Hi</element-></body>
</html>

View File

@ -0,0 +1,10 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<p>This has <code style="white-space: pre-wrap">double spaces inside</code>, which should be kept.</p>
</body>
</html>

View File

@ -0,0 +1,11 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<p>This is <code style="white-space: pre-wrap"><strong>*</strong><strong>inline</strong><strong>*</strong></code>.</p>
<pre><code><span style="color: #d73a49">#</span><span style="color: #d73a49">set</span> <span style="color: #4b69c6">text</span>(blue)<br><strong>*</strong><strong>Hello</strong><strong>*</strong> <em>_</em><em>world</em><em>_</em>!</code></pre>
</body>
</html>

View File

@ -0,0 +1,11 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<h1>A cool title</h1>
<h2>Some level one heading</h2>
</body>
</html>

View File

@ -0,0 +1,10 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<h1>Some Title</h1>
</body>
</html>

View File

@ -0,0 +1,11 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>My title</title>
</head>
<body>
<h1>My display title</h1>
</body>
</html>

12
tests/ref/html/title.html Normal file
View File

@ -0,0 +1,12 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>My title</title>
</head>
<body>
<h1>My title</h1>
<h2>A level one heading</h2>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 137 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 927 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 630 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 585 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.1 KiB

After

Width:  |  Height:  |  Size: 9.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 615 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

BIN
tests/ref/title.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -1,4 +1,3 @@
use std::collections::{HashMap, HashSet};
use std::fmt::{self, Display, Formatter};
use std::ops::Range;
use std::path::{Path, PathBuf};
@ -6,6 +5,7 @@ use std::str::FromStr;
use std::sync::LazyLock;
use ecow::{EcoString, eco_format};
use rustc_hash::{FxHashMap, FxHashSet};
use typst_syntax::package::PackageVersion;
use typst_syntax::{
FileId, Lines, Source, VirtualPath, is_id_continue, is_ident, is_newline,
@ -123,7 +123,7 @@ impl Display for NoteKind {
struct Collector {
tests: Vec<Test>,
errors: Vec<TestParseError>,
seen: HashMap<EcoString, (FilePos, Vec<Attr>)>,
seen: FxHashMap<EcoString, (FilePos, Vec<Attr>)>,
skipped: usize,
}
@ -133,7 +133,7 @@ impl Collector {
Self {
tests: vec![],
errors: vec![],
seen: HashMap::new(),
seen: FxHashMap::default(),
skipped: 0,
}
}
@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
/// Whether a test is within the selected set to run.
fn selected(name: &str, abs: PathBuf) -> bool {
static SKIPPED: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
static SKIPPED: LazyLock<FxHashSet<&'static str>> = LazyLock::new(|| {
String::leak(std::fs::read_to_string(crate::SKIP_PATH).unwrap())
.lines()
.map(|line| line.trim())

View File

@ -1,5 +1,4 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs;
use std::io::Write;
use std::path::{Path, PathBuf};
@ -8,6 +7,7 @@ use std::sync::OnceLock;
use comemo::Tracked;
use parking_lot::Mutex;
use rustc_hash::FxHashMap;
use typst::diag::{At, FileError, FileResult, SourceResult, StrResult, bail};
use typst::engine::Engine;
use typst::foundations::{
@ -108,7 +108,7 @@ struct TestBase {
library: LazyHash<Library>,
book: LazyHash<FontBook>,
fonts: Vec<Font>,
slots: Mutex<HashMap<FileId, FileSlot>>,
slots: Mutex<FxHashMap<FileId, FileSlot>>,
}
impl Default for TestBase {
@ -122,7 +122,7 @@ impl Default for TestBase {
library: LazyHash::new(library()),
book: LazyHash::new(FontBook::from_fonts(&fonts)),
fonts,
slots: Mutex::new(HashMap::new()),
slots: Mutex::new(FxHashMap::default()),
}
}
}

View File

@ -13,3 +13,25 @@ Text
val
})
#metadata("Hi") <l>
--- html-elem-custom html ---
#html.elem("my-element")[Hi]
#html.elem("custom-button")[Hi]
#html.elem("multi-word-component")[Hi]
#html.elem("element-")[Hi]
--- html-elem-invalid ---
// Error: 12-24 the character "@" is not valid in a tag name
#html.elem("my@element")
--- html-elem-custom-bad-start html ---
// Error: 12-22 custom element name must start with a lowercase letter
#html.elem("1-custom")
--- html-elem-custom-uppercase html ---
// Error: 12-21 custom element name must not contain uppercase letters
#html.elem("my-ELEM")
--- html-elem-custom-reserved html ---
// Error: 12-28 name is reserved and not valid for a custom element
#html.elem("annotation-xml")

View File

@ -34,7 +34,7 @@ $tilde(sum), tilde(sum, size: #50%), accent(H, hat, size: #200%)$
--- math-accent-sized-script ---
// Test accent size in script size.
$tilde(U, size: #1.1em), x^tilde(U, size: #1.1em), sscript(tilde(U, size: #1.1em))$
$tilde(U, size: #1.1em), x^(tilde(U, size: #1.1em)), sscript(tilde(U, size: #1.1em))$
--- math-accent-dotless ---
// Test dotless glyph variants.

View File

@ -42,9 +42,41 @@ attach(a, tl: u, t: t, tr: v, bl: x, b: b, br: y)
$
--- math-attach-followed-by-func-call ---
// Test function call after subscript.
$pi_1(Y), a_f(x), a^zeta (x), a^abs(b)_sqrt(c) \
a^subset.eq (x), a_(zeta(x)), pi_(1(Y)), a^(abs(b))_(sqrt(c))$
// Test function call after subscripts and superscripts.
#set page(width: auto)
$a_1(x), a^10(x), a_f(x), a^zeta(x), a_"text"(x),
// Warning: 22-25 function literal used as subscript
// Hint: 22-25 wrap the entire function call in parentheses
// Warning: 32-36 function literal used as superscript
// Hint: 32-36 wrap the entire function call in parentheses
a_∂(x), a^cos(x), a_abs(x), a^root(2, x), a_subset.eq(x) \
a_(1(x)), a^(10(x)), a_(f(x)), a^(zeta(x)), a_("text"(x)),
a_(∂(x)), a^(cos(x)), a_(abs(x)), a^(root(2, x)), a_(subset.eq(x))$
--- math-attach-followed-by-func-call-complex ---
// Test function call with named arguments after scripts.
#let cross = $c$
// Warning: 4-10 function literal used as subscript
// Hint: 4-10 wrap the entire function call in parentheses
$a_cancel(x, cross: #true)$
--- math-attach-followed-by-func-call-error ---
// Error: 14-19 unknown variable: cross
// Hint: 14-19 if you meant to display multiple letters as is, try adding spaces between each letter: `c r o s s`
// Hint: 14-19 or if you meant to display this as text, try placing it in quotes: `"cross"`
// Warning: 4-10 function literal used as subscript
// Hint: 4-10 wrap the entire function call in parentheses
$a_cancel(x, cross: #true)$
--- math-attach-followed-by-func-call-warning ---
// Test warning for function literal as subscripts and superscripts.
#let myfunc(arg) = $arg$
// Warning: 4-10 function literal used as subscript
// Hint: 4-10 wrap the entire function call in parentheses
$a_myfunc(x) a_(myfunc(x))$
// Warning: 4-10 function literal used as subscript
// Hint: 4-10 wrap the entire function call in parentheses
$a_myfunc a_(myfunc)$
--- math-attach-nested ---
// Test associativity and scaling.

View File

@ -91,3 +91,12 @@ $ body^"text" $
}
$body^"long text"$
}
--- math-stretch-min-overlap-exceeds-max ---
// Test that glyph assembly doesn't end up with negative lengths if the max
// overlap calculated is less than the minConnectorOverlap.
#show math.equation: set text(font: "STIX Two Math")
// Warning: glyph has assembly parts with overlap less than minConnectorOverlap
// Hint: its rendering may appear broken - this is probably a font bug
// Hint: please file an issue at https://github.com/typst/typst/issues
$ stretch(->)^"Gauss-Jordan Elimination" $

View File

@ -0,0 +1,24 @@
// Test title element.
--- title render html ---
#set document(title: "My title")
#title()
= A level one heading
--- title-with-body render html ---
#set document(title: "My title")
#title[My display title]
--- title-with-body-auto render ---
#set document(title: "My title")
#title(auto)
--- title-show-set ---
#show title: set text(blue)
#title[A blue title]
--- title-unset ---
// Error: 2-9 document title was not set
// Hint: 2-9 set the title with `set document(title: [...])`
// Hint: 2-9 or provide an explicit body with `title[..]`
#title()

View File

@ -215,7 +215,7 @@ box[]
hello.world
hello.world()
hello.my.world()
f_zeta(x), f_zeta(x)/1
f_(zeta(x)), f_(zeta(x))/1
emph(hello.my.world())
emph(hello.my().world)
emph(hello.my().world())
@ -487,6 +487,16 @@ test
assert.eq(block, c.input.block, message: "in point " + c.name + ", expect " + repr(block) + ", got " + repr(c.input.block) + "")
}
--- raw-html html ---
This is ```typ *inline*```.
```typ
#set text(blue)
*Hello* _world_!
```
--- raw-html-inline-spaces html ---
This has `double spaces inside`, which should be kept.
--- raw-line ---
#set page(width: 200pt)

View File

@ -692,3 +692,6 @@ $ A = mat(
(blue, 100%),
)
)
--- issue-6680-gradient-linear-with-aspect-correction ---
#set page(width: 200pt, height: auto, margin: 10pt, fill: gradient.linear(red, blue, angle: 45deg).sharp(2))