mirror of
https://github.com/typst/typst
synced 2025-08-24 19:54:14 +08:00
Merge branch 'main' into rect_caps
This commit is contained in:
commit
ffb02a0446
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@ -5,6 +5,7 @@ env:
|
||||
RUSTFLAGS: "-Dwarnings"
|
||||
RUSTDOCFLAGS: "-Dwarnings"
|
||||
TYPST_TESTS_EXTENDED: true
|
||||
PKG_CONFIG_i686-unknown-linux-gnu: /usr/bin/i686-linux-gnu-pkgconf
|
||||
|
||||
jobs:
|
||||
# This allows us to have one branch protection rule for the full test matrix.
|
||||
@ -27,30 +28,43 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
bits: [64]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
bits: 32
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.83.0
|
||||
- if: startsWith(matrix.os, 'ubuntu-') && matrix.bits == 32
|
||||
run: |
|
||||
sudo dpkg --add-architecture i386
|
||||
sudo apt update
|
||||
sudo apt install -y gcc-multilib libssl-dev:i386 pkg-config:i386
|
||||
- uses: dtolnay/rust-toolchain@1.87.0
|
||||
with:
|
||||
targets: ${{ matrix.bits == 32 && 'i686-unknown-linux-gnu' || '' }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo test --workspace --no-run
|
||||
- run: cargo test --workspace --no-fail-fast
|
||||
with:
|
||||
key: ${{ matrix.bits }}
|
||||
- run: cargo test --workspace --no-run ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
|
||||
- run: cargo test --workspace --no-fail-fast ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
|
||||
- name: Upload rendered test output
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: tests-rendered-${{ matrix.os }}
|
||||
name: tests-rendered-${{ matrix.os }}-${{ matrix.bits }}
|
||||
path: tests/store/render/**
|
||||
retention-days: 3
|
||||
- name: Update test artifacts
|
||||
if: failure()
|
||||
run: |
|
||||
cargo test --workspace --test tests -- --update
|
||||
cargo test --workspace --test tests ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }} -- --update
|
||||
echo 'updated_artifacts=1' >> "$GITHUB_ENV"
|
||||
- name: Upload updated reference output (for use if the test changes are desired)
|
||||
if: failure() && env.updated_artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: tests-updated-${{ matrix.os }}
|
||||
name: tests-updated-${{ matrix.os }}-${{ matrix.bits }}
|
||||
path: tests/ref/**
|
||||
retention-days: 3
|
||||
|
||||
@ -59,7 +73,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.83.0
|
||||
- uses: dtolnay/rust-toolchain@1.87.0
|
||||
with:
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@ -73,7 +87,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.80.0
|
||||
- uses: dtolnay/rust-toolchain@1.83.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo check --workspace
|
||||
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@ -44,7 +44,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.83.0
|
||||
- uses: dtolnay/rust-toolchain@1.87.0
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
|
1158
Cargo.lock
generated
1158
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
85
Cargo.toml
85
Cargo.toml
@ -4,8 +4,8 @@ default-members = ["crates/typst-cli"]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.12.0"
|
||||
rust-version = "1.80" # also change in ci.yml
|
||||
version = "0.13.1"
|
||||
rust-version = "1.83" # also change in ci.yml
|
||||
authors = ["The Typst Project Developers"]
|
||||
edition = "2021"
|
||||
homepage = "https://typst.app"
|
||||
@ -16,24 +16,24 @@ keywords = ["typst"]
|
||||
readme = "README.md"
|
||||
|
||||
[workspace.dependencies]
|
||||
typst = { path = "crates/typst", version = "0.12.0" }
|
||||
typst-cli = { path = "crates/typst-cli", version = "0.12.0" }
|
||||
typst-eval = { path = "crates/typst-eval", version = "0.12.0" }
|
||||
typst-html = { path = "crates/typst-html", version = "0.12.0" }
|
||||
typst-ide = { path = "crates/typst-ide", version = "0.12.0" }
|
||||
typst-kit = { path = "crates/typst-kit", version = "0.12.0" }
|
||||
typst-layout = { path = "crates/typst-layout", version = "0.12.0" }
|
||||
typst-library = { path = "crates/typst-library", version = "0.12.0" }
|
||||
typst-macros = { path = "crates/typst-macros", version = "0.12.0" }
|
||||
typst-pdf = { path = "crates/typst-pdf", version = "0.12.0" }
|
||||
typst-realize = { path = "crates/typst-realize", version = "0.12.0" }
|
||||
typst-render = { path = "crates/typst-render", version = "0.12.0" }
|
||||
typst-svg = { path = "crates/typst-svg", version = "0.12.0" }
|
||||
typst-syntax = { path = "crates/typst-syntax", version = "0.12.0" }
|
||||
typst-timing = { path = "crates/typst-timing", version = "0.12.0" }
|
||||
typst-utils = { path = "crates/typst-utils", version = "0.12.0" }
|
||||
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "8cccef9" }
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "7f8999d" }
|
||||
typst = { path = "crates/typst", version = "0.13.1" }
|
||||
typst-cli = { path = "crates/typst-cli", version = "0.13.1" }
|
||||
typst-eval = { path = "crates/typst-eval", version = "0.13.1" }
|
||||
typst-html = { path = "crates/typst-html", version = "0.13.1" }
|
||||
typst-ide = { path = "crates/typst-ide", version = "0.13.1" }
|
||||
typst-kit = { path = "crates/typst-kit", version = "0.13.1" }
|
||||
typst-layout = { path = "crates/typst-layout", version = "0.13.1" }
|
||||
typst-library = { path = "crates/typst-library", version = "0.13.1" }
|
||||
typst-macros = { path = "crates/typst-macros", version = "0.13.1" }
|
||||
typst-pdf = { path = "crates/typst-pdf", version = "0.13.1" }
|
||||
typst-realize = { path = "crates/typst-realize", version = "0.13.1" }
|
||||
typst-render = { path = "crates/typst-render", version = "0.13.1" }
|
||||
typst-svg = { path = "crates/typst-svg", version = "0.13.1" }
|
||||
typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
|
||||
typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
|
||||
typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
|
||||
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "c74e539" }
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "fddbf8b" }
|
||||
arrayvec = "0.7.4"
|
||||
az = "1.2"
|
||||
base64 = "0.22"
|
||||
@ -47,18 +47,20 @@ clap = { version = "4.4", features = ["derive", "env", "wrap_help"] }
|
||||
clap_complete = "4.2.1"
|
||||
clap_mangen = "0.2.10"
|
||||
codespan-reporting = "0.11"
|
||||
codex = { git = "https://github.com/typst/codex", rev = "343a9b1" }
|
||||
codex = { git = "https://github.com/typst/codex", rev = "56eb217" }
|
||||
color-print = "0.3.6"
|
||||
comemo = "0.4"
|
||||
csv = "1"
|
||||
ctrlc = "3.4.1"
|
||||
dirs = "5"
|
||||
dirs = "6"
|
||||
ecow = { version = "0.2", features = ["serde"] }
|
||||
env_proxy = "0.4"
|
||||
fastrand = "2.3"
|
||||
flate2 = "1"
|
||||
fontdb = { version = "0.21", default-features = false }
|
||||
fontdb = { version = "0.23", default-features = false }
|
||||
fs_extra = "1.3"
|
||||
hayagriva = "0.8"
|
||||
glidesort = "0.1.2"
|
||||
hayagriva = "0.8.1"
|
||||
heck = "0.5"
|
||||
hypher = "0.1.4"
|
||||
icu_properties = { version = "1.4", features = ["serde"] }
|
||||
@ -67,25 +69,27 @@ icu_provider_adapters = "1.4"
|
||||
icu_provider_blob = "1.4"
|
||||
icu_segmenter = { version = "1.4", features = ["serde"] }
|
||||
if_chain = "1"
|
||||
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
|
||||
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif", "webp"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
kamadak-exif = "0.5"
|
||||
infer = { version = "0.19.0", default-features = false }
|
||||
kamadak-exif = "0.6"
|
||||
krilla = { version = "0.4.0", default-features = false, features = ["raster-images", "comemo", "rayon"] }
|
||||
krilla-svg = "0.1.0"
|
||||
kurbo = "0.11"
|
||||
libfuzzer-sys = "0.4"
|
||||
lipsum = "0.9"
|
||||
miniz_oxide = "0.8"
|
||||
memchr = "2"
|
||||
native-tls = "0.2"
|
||||
notify = "6"
|
||||
notify = "8"
|
||||
once_cell = "1"
|
||||
open = "5.0.1"
|
||||
openssl = "0.10"
|
||||
openssl = "0.10.72"
|
||||
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
|
||||
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pdf-writer = "0.12"
|
||||
phf = { version = "0.11", features = ["macros"] }
|
||||
pixglyph = "0.5.1"
|
||||
pixglyph = "0.6"
|
||||
png = "0.17"
|
||||
portable-atomic = "1.6"
|
||||
proc-macro2 = "1"
|
||||
@ -95,10 +99,10 @@ quote = "1"
|
||||
rayon = "1.7.0"
|
||||
regex = "1"
|
||||
regex-syntax = "0.8"
|
||||
resvg = { version = "0.43", default-features = false, features = ["raster-images"] }
|
||||
resvg = { version = "0.45", default-features = false, features = ["raster-images"] }
|
||||
roxmltree = "0.20"
|
||||
rust_decimal = { version = "1.36.0", default-features = false, features = ["maths"] }
|
||||
rustybuzz = "0.18"
|
||||
rustybuzz = "0.20"
|
||||
same-file = "1"
|
||||
self-replace = "1.3.7"
|
||||
semver = "1"
|
||||
@ -110,8 +114,6 @@ sigpipe = "0.1"
|
||||
siphasher = "1"
|
||||
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
|
||||
stacker = "0.1.15"
|
||||
subsetter = "0.2"
|
||||
svg2pdf = "0.12"
|
||||
syn = { version = "2", features = ["full", "extra-traits"] }
|
||||
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
|
||||
tar = "0.4"
|
||||
@ -121,26 +123,27 @@ time = { version = "0.3.20", features = ["formatting", "macros", "parsing"] }
|
||||
tiny_http = "0.12"
|
||||
tiny-skia = "0.11"
|
||||
toml = { version = "0.8", default-features = false, features = ["parse", "display"] }
|
||||
ttf-parser = "0.24.1"
|
||||
two-face = { version = "0.4.0", default-features = false, features = ["syntect-fancy"] }
|
||||
ttf-parser = "0.25.0"
|
||||
two-face = { version = "0.4.3", default-features = false, features = ["syntect-fancy"] }
|
||||
typed-arena = "2"
|
||||
unicode-bidi = "0.3.18"
|
||||
unicode-ident = "1.0"
|
||||
unicode-math-class = "0.1"
|
||||
unicode-script = "0.5"
|
||||
unicode-normalization = "0.1.24"
|
||||
unicode-segmentation = "1"
|
||||
unscanny = "0.1"
|
||||
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
|
||||
usvg = { version = "0.43", default-features = false, features = ["text"] }
|
||||
usvg = { version = "0.45", default-features = false, features = ["text"] }
|
||||
utf8_iter = "1.0.4"
|
||||
walkdir = "2"
|
||||
wasmi = "0.39.0"
|
||||
wasmi = "0.40.0"
|
||||
web-sys = "0.3"
|
||||
xmlparser = "0.13.5"
|
||||
xmlwriter = "0.1.0"
|
||||
xmp-writer = "0.3"
|
||||
xz2 = { version = "0.1", features = ["static"] }
|
||||
yaml-front-matter = "0.1"
|
||||
zip = { version = "2", default-features = false, features = ["deflate"] }
|
||||
zip = { version = "2.5", default-features = false, features = ["deflate"] }
|
||||
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 2
|
||||
|
29
README.md
29
README.md
@ -113,7 +113,9 @@ Typst's CLI is available from different sources:
|
||||
|
||||
- You can install Typst through different package managers. Note that the
|
||||
versions in the package managers might lag behind the latest release.
|
||||
- Linux: View [Typst on Repology][repology]
|
||||
- Linux:
|
||||
- View [Typst on Repology][repology]
|
||||
- View [Typst's Snap][snap]
|
||||
- macOS: `brew install typst`
|
||||
- Windows: `winget install --id Typst.Typst`
|
||||
|
||||
@ -175,22 +177,22 @@ If you prefer an integrated IDE-like experience with autocompletion and instant
|
||||
preview, you can also check out [Typst's free web app][app].
|
||||
|
||||
## Community
|
||||
The main place where the community gathers is our [Discord server][discord].
|
||||
Feel free to join there to ask questions, help out others, share cool things
|
||||
you created with Typst, or just to chat.
|
||||
The main places where the community gathers are our [Forum][forum] and our
|
||||
[Discord server][discord]. The Forum is a great place to ask questions, help
|
||||
others, and share cool things you created with Typst. The Discord server is more
|
||||
suitable for quicker questions, discussions about contributing, or just to chat.
|
||||
We'd be happy to see you there!
|
||||
|
||||
Aside from that there are a few places where you can find things built by
|
||||
the community:
|
||||
|
||||
- The official [package list](https://typst.app/docs/packages)
|
||||
- The [Awesome Typst](https://github.com/qjcg/awesome-typst) repository
|
||||
[Typst Universe][universe] is where the community shares templates and packages.
|
||||
If you want to share your own creations, you can submit them to our
|
||||
[package repository][packages].
|
||||
|
||||
If you had a bad experience in our community, please [reach out to us][contact].
|
||||
|
||||
## Contributing
|
||||
We would love to see contributions from the community. If you experience bugs,
|
||||
feel free to open an issue. If you would like to implement a new feature or bug
|
||||
fix, please follow the steps outlined in the [contribution guide][contributing].
|
||||
We love to see contributions from the community. If you experience bugs, feel
|
||||
free to open an issue. If you would like to implement a new feature or bug fix,
|
||||
please follow the steps outlined in the [contribution guide][contributing].
|
||||
|
||||
To build Typst yourself, first ensure that you have the
|
||||
[latest stable Rust][rust] installed. Then, clone this repository and build the
|
||||
@ -241,6 +243,8 @@ instant preview. To achieve these goals, we follow three core design principles:
|
||||
[docs]: https://typst.app/docs/
|
||||
[app]: https://typst.app/
|
||||
[discord]: https://discord.gg/2uDybryKPe
|
||||
[forum]: https://forum.typst.app/
|
||||
[universe]: https://typst.app/universe/
|
||||
[tutorial]: https://typst.app/docs/tutorial/
|
||||
[show]: https://typst.app/docs/reference/styling/#show-rules
|
||||
[math]: https://typst.app/docs/reference/math/
|
||||
@ -254,3 +258,4 @@ instant preview. To achieve these goals, we follow three core design principles:
|
||||
[contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md
|
||||
[packages]: https://github.com/typst/packages/
|
||||
[`comemo`]: https://github.com/typst/comemo/
|
||||
[snap]: https://snapcraft.io/typst
|
||||
|
@ -361,7 +361,7 @@ pub struct FontArgs {
|
||||
|
||||
/// Ensures system fonts won't be searched, unless explicitly included via
|
||||
/// `--font-path`.
|
||||
#[arg(long)]
|
||||
#[arg(long, env = "TYPST_IGNORE_SYSTEM_FONTS")]
|
||||
pub ignore_system_fonts: bool,
|
||||
}
|
||||
|
||||
@ -467,15 +467,45 @@ display_possible_values!(Feature);
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.4.
|
||||
#[value(name = "1.4")]
|
||||
V_1_4,
|
||||
/// PDF 1.5.
|
||||
#[value(name = "1.5")]
|
||||
V_1_5,
|
||||
/// PDF 1.5.
|
||||
#[value(name = "1.6")]
|
||||
V_1_6,
|
||||
/// PDF 1.7.
|
||||
#[value(name = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF 2.0.
|
||||
#[value(name = "2.0")]
|
||||
V_2_0,
|
||||
/// PDF/A-1b.
|
||||
#[value(name = "a-1b")]
|
||||
A_1b,
|
||||
/// PDF/A-2b.
|
||||
#[value(name = "a-2b")]
|
||||
A_2b,
|
||||
/// PDF/A-3b.
|
||||
/// PDF/A-2u.
|
||||
#[value(name = "a-2u")]
|
||||
A_2u,
|
||||
/// PDF/A-3u.
|
||||
#[value(name = "a-3b")]
|
||||
A_3b,
|
||||
/// PDF/A-3u.
|
||||
#[value(name = "a-3u")]
|
||||
A_3u,
|
||||
/// PDF/A-4.
|
||||
#[value(name = "a-4")]
|
||||
A_4,
|
||||
/// PDF/A-4f.
|
||||
#[value(name = "a-4f")]
|
||||
A_4f,
|
||||
/// PDF/A-4e.
|
||||
#[value(name = "a-4e")]
|
||||
A_4e,
|
||||
}
|
||||
|
||||
display_possible_values!(PdfStandard);
|
||||
|
@ -6,8 +6,9 @@ use std::path::{Path, PathBuf};
|
||||
use chrono::{DateTime, Datelike, Timelike, Utc};
|
||||
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
||||
use codespan_reporting::term;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use ecow::eco_format;
|
||||
use parking_lot::RwLock;
|
||||
use pathdiff::diff_paths;
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use typst::diag::{
|
||||
bail, At, Severity, SourceDiagnostic, SourceResult, StrResult, Warned,
|
||||
@ -15,7 +16,7 @@ use typst::diag::{
|
||||
use typst::foundations::{Datetime, Smart};
|
||||
use typst::html::HtmlDocument;
|
||||
use typst::layout::{Frame, Page, PageRanges, PagedDocument};
|
||||
use typst::syntax::{FileId, Source, Span};
|
||||
use typst::syntax::{FileId, Lines, Span};
|
||||
use typst::WorldExt;
|
||||
use typst_pdf::{PdfOptions, PdfStandards, Timestamp};
|
||||
|
||||
@ -62,8 +63,7 @@ pub struct CompileConfig {
|
||||
/// Opens the output file with the default viewer or a specific program after
|
||||
/// compilation.
|
||||
pub open: Option<Option<String>>,
|
||||
/// One (or multiple comma-separated) PDF standards that Typst will enforce
|
||||
/// conformance with.
|
||||
/// A list of standards the PDF should conform to.
|
||||
pub pdf_standards: PdfStandards,
|
||||
/// A path to write a Makefile rule describing the current compilation.
|
||||
pub make_deps: Option<PathBuf>,
|
||||
@ -129,18 +129,9 @@ impl CompileConfig {
|
||||
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
|
||||
});
|
||||
|
||||
let pdf_standards = {
|
||||
let list = args
|
||||
.pdf_standard
|
||||
.iter()
|
||||
.map(|standard| match standard {
|
||||
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
||||
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
||||
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
PdfStandards::new(&list)?
|
||||
};
|
||||
let pdf_standards = PdfStandards::new(
|
||||
&args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
|
||||
)?;
|
||||
|
||||
#[cfg(feature = "http-server")]
|
||||
let server = match watch {
|
||||
@ -188,7 +179,7 @@ pub fn compile_once(
|
||||
|
||||
match output {
|
||||
// Export the PDF / PNG.
|
||||
Ok(()) => {
|
||||
Ok(outputs) => {
|
||||
let duration = start.elapsed();
|
||||
|
||||
if config.watching {
|
||||
@ -202,7 +193,7 @@ pub fn compile_once(
|
||||
print_diagnostics(world, &[], &warnings, config.diagnostic_format)
|
||||
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
|
||||
|
||||
write_make_deps(world, config)?;
|
||||
write_make_deps(world, config, outputs)?;
|
||||
open_output(config)?;
|
||||
}
|
||||
|
||||
@ -226,12 +217,15 @@ pub fn compile_once(
|
||||
fn compile_and_export(
|
||||
world: &mut SystemWorld,
|
||||
config: &mut CompileConfig,
|
||||
) -> Warned<SourceResult<()>> {
|
||||
) -> Warned<SourceResult<Vec<Output>>> {
|
||||
match config.output_format {
|
||||
OutputFormat::Html => {
|
||||
let Warned { output, warnings } = typst::compile::<HtmlDocument>(world);
|
||||
let result = output.and_then(|document| export_html(&document, config));
|
||||
Warned { output: result, warnings }
|
||||
Warned {
|
||||
output: result.map(|()| vec![config.output.clone()]),
|
||||
warnings,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let Warned { output, warnings } = typst::compile::<PagedDocument>(world);
|
||||
@ -257,9 +251,14 @@ fn export_html(document: &HtmlDocument, config: &CompileConfig) -> SourceResult<
|
||||
}
|
||||
|
||||
/// Export to a paged target format.
|
||||
fn export_paged(document: &PagedDocument, config: &CompileConfig) -> SourceResult<()> {
|
||||
fn export_paged(
|
||||
document: &PagedDocument,
|
||||
config: &CompileConfig,
|
||||
) -> SourceResult<Vec<Output>> {
|
||||
match config.output_format {
|
||||
OutputFormat::Pdf => export_pdf(document, config),
|
||||
OutputFormat::Pdf => {
|
||||
export_pdf(document, config).map(|()| vec![config.output.clone()])
|
||||
}
|
||||
OutputFormat::Png => {
|
||||
export_image(document, config, ImageExportFormat::Png).at(Span::detached())
|
||||
}
|
||||
@ -286,6 +285,7 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let options = PdfOptions {
|
||||
ident: Smart::Auto,
|
||||
timestamp,
|
||||
@ -327,7 +327,7 @@ fn export_image(
|
||||
document: &PagedDocument,
|
||||
config: &CompileConfig,
|
||||
fmt: ImageExportFormat,
|
||||
) -> StrResult<()> {
|
||||
) -> StrResult<Vec<Output>> {
|
||||
// Determine whether we have indexable templates in output
|
||||
let can_handle_multiple = match config.output {
|
||||
Output::Stdout => false,
|
||||
@ -341,7 +341,7 @@ fn export_image(
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| {
|
||||
config.pages.as_ref().map_or(true, |exported_page_ranges| {
|
||||
config.pages.as_ref().is_none_or(|exported_page_ranges| {
|
||||
exported_page_ranges.includes_page_index(*i)
|
||||
})
|
||||
})
|
||||
@ -383,7 +383,7 @@ fn export_image(
|
||||
&& config.export_cache.is_cached(*i, &page.frame)
|
||||
&& path.exists()
|
||||
{
|
||||
return Ok(());
|
||||
return Ok(Output::Path(path.to_path_buf()));
|
||||
}
|
||||
|
||||
Output::Path(path.to_owned())
|
||||
@ -392,11 +392,9 @@ fn export_image(
|
||||
};
|
||||
|
||||
export_image_page(config, page, &output, fmt)?;
|
||||
Ok(())
|
||||
Ok(output)
|
||||
})
|
||||
.collect::<Result<Vec<()>, EcoString>>()?;
|
||||
|
||||
Ok(())
|
||||
.collect::<StrResult<Vec<Output>>>()
|
||||
}
|
||||
|
||||
mod output_template {
|
||||
@ -501,14 +499,25 @@ impl ExportCache {
|
||||
/// Writes a Makefile rule describing the relationship between the output and
|
||||
/// its dependencies to the path specified by the --make-deps argument, if it
|
||||
/// was provided.
|
||||
fn write_make_deps(world: &mut SystemWorld, config: &CompileConfig) -> StrResult<()> {
|
||||
fn write_make_deps(
|
||||
world: &mut SystemWorld,
|
||||
config: &CompileConfig,
|
||||
outputs: Vec<Output>,
|
||||
) -> StrResult<()> {
|
||||
let Some(ref make_deps_path) = config.make_deps else { return Ok(()) };
|
||||
let Output::Path(output_path) = &config.output else {
|
||||
bail!("failed to create make dependencies file because output was stdout")
|
||||
};
|
||||
let Some(output_path) = output_path.as_os_str().to_str() else {
|
||||
let Ok(output_paths) = outputs
|
||||
.into_iter()
|
||||
.filter_map(|o| match o {
|
||||
Output::Path(path) => Some(path.into_os_string().into_string()),
|
||||
Output::Stdout => None,
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
else {
|
||||
bail!("failed to create make dependencies file because output path was not valid unicode")
|
||||
};
|
||||
if output_paths.is_empty() {
|
||||
bail!("failed to create make dependencies file because output was stdout")
|
||||
}
|
||||
|
||||
// Based on `munge` in libcpp/mkdeps.cc from the GCC source code. This isn't
|
||||
// perfect as some special characters can't be escaped.
|
||||
@ -522,6 +531,10 @@ fn write_make_deps(world: &mut SystemWorld, config: &CompileConfig) -> StrResult
|
||||
res.push('$');
|
||||
slashes = 0;
|
||||
}
|
||||
':' => {
|
||||
res.push('\\');
|
||||
slashes = 0;
|
||||
}
|
||||
' ' | '\t' => {
|
||||
// `munge`'s source contains a comment here that says: "A
|
||||
// space or tab preceded by 2N+1 backslashes represents N
|
||||
@ -544,18 +557,29 @@ fn write_make_deps(world: &mut SystemWorld, config: &CompileConfig) -> StrResult
|
||||
|
||||
fn write(
|
||||
make_deps_path: &Path,
|
||||
output_path: &str,
|
||||
output_paths: Vec<String>,
|
||||
root: PathBuf,
|
||||
dependencies: impl Iterator<Item = PathBuf>,
|
||||
) -> io::Result<()> {
|
||||
let mut file = File::create(make_deps_path)?;
|
||||
let current_dir = std::env::current_dir()?;
|
||||
let relative_root = diff_paths(&root, ¤t_dir).unwrap_or(root.clone());
|
||||
|
||||
file.write_all(munge(output_path).as_bytes())?;
|
||||
for (i, output_path) in output_paths.into_iter().enumerate() {
|
||||
if i != 0 {
|
||||
file.write_all(b" ")?;
|
||||
}
|
||||
file.write_all(munge(&output_path).as_bytes())?;
|
||||
}
|
||||
file.write_all(b":")?;
|
||||
for dependency in dependencies {
|
||||
let Some(dependency) =
|
||||
dependency.strip_prefix(&root).unwrap_or(&dependency).to_str()
|
||||
else {
|
||||
let relative_dependency = match dependency.strip_prefix(&root) {
|
||||
Ok(root_relative_dependency) => {
|
||||
relative_root.join(root_relative_dependency)
|
||||
}
|
||||
Err(_) => dependency,
|
||||
};
|
||||
let Some(relative_dependency) = relative_dependency.to_str() else {
|
||||
// Silently skip paths that aren't valid unicode so we still
|
||||
// produce a rule that will work for the other paths that can be
|
||||
// processed.
|
||||
@ -563,14 +587,14 @@ fn write_make_deps(world: &mut SystemWorld, config: &CompileConfig) -> StrResult
|
||||
};
|
||||
|
||||
file.write_all(b" ")?;
|
||||
file.write_all(munge(dependency).as_bytes())?;
|
||||
file.write_all(munge(relative_dependency).as_bytes())?;
|
||||
}
|
||||
file.write_all(b"\n")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
write(make_deps_path, output_path, world.root().to_owned(), world.dependencies())
|
||||
write(make_deps_path, output_paths, world.root().to_owned(), world.dependencies())
|
||||
.map_err(|err| {
|
||||
eco_format!("failed to create make dependencies file due to IO error ({err})")
|
||||
})
|
||||
@ -672,7 +696,7 @@ fn label(world: &SystemWorld, span: Span) -> Option<Label<FileId>> {
|
||||
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
|
||||
type FileId = FileId;
|
||||
type Name = String;
|
||||
type Source = Source;
|
||||
type Source = Lines<String>;
|
||||
|
||||
fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
|
||||
let vpath = id.vpath();
|
||||
@ -732,3 +756,23 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PdfStandard> for typst_pdf::PdfStandard {
|
||||
fn from(standard: PdfStandard) -> Self {
|
||||
match standard {
|
||||
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
|
||||
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
|
||||
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
|
||||
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
||||
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
|
||||
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
|
||||
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
||||
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
|
||||
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
|
||||
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
|
||||
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
|
||||
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
|
||||
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,11 +2,12 @@ use comemo::Track;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use serde::Serialize;
|
||||
use typst::diag::{bail, HintedStrResult, StrResult, Warned};
|
||||
use typst::engine::Sink;
|
||||
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
|
||||
use typst::layout::PagedDocument;
|
||||
use typst::syntax::Span;
|
||||
use typst::syntax::{Span, SyntaxMode};
|
||||
use typst::World;
|
||||
use typst_eval::{eval_string, EvalMode};
|
||||
use typst_eval::eval_string;
|
||||
|
||||
use crate::args::{QueryCommand, SerializationFormat};
|
||||
use crate::compile::print_diagnostics;
|
||||
@ -58,9 +59,11 @@ fn retrieve(
|
||||
let selector = eval_string(
|
||||
&typst::ROUTINES,
|
||||
world.track(),
|
||||
// TODO: propagate warnings
|
||||
Sink::new().track_mut(),
|
||||
&command.selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
SyntaxMode::Code,
|
||||
Scope::default(),
|
||||
)
|
||||
.map_err(|errors| {
|
||||
|
@ -85,6 +85,6 @@ fn resolve_span(world: &SystemWorld, span: Span) -> Option<(String, u32)> {
|
||||
let id = span.id()?;
|
||||
let source = world.source(id).ok()?;
|
||||
let range = source.range(span)?;
|
||||
let line = source.byte_to_line(range.start)?;
|
||||
let line = source.lines().byte_to_line(range.start)?;
|
||||
Some((format!("{id:?}"), line as u32 + 1))
|
||||
}
|
||||
|
@ -10,11 +10,12 @@ use codespan_reporting::term::{self, termcolor};
|
||||
use ecow::eco_format;
|
||||
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher as _};
|
||||
use same_file::is_same_file;
|
||||
use typst::diag::{bail, StrResult};
|
||||
use typst::diag::{bail, warning, StrResult};
|
||||
use typst::syntax::Span;
|
||||
use typst::utils::format_duration;
|
||||
|
||||
use crate::args::{Input, Output, WatchCommand};
|
||||
use crate::compile::{compile_once, CompileConfig};
|
||||
use crate::compile::{compile_once, print_diagnostics, CompileConfig};
|
||||
use crate::timings::Timer;
|
||||
use crate::world::{SystemWorld, WorldCreationError};
|
||||
use crate::{print_error, terminal};
|
||||
@ -55,11 +56,16 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
|
||||
// Perform initial compilation.
|
||||
timer.record(&mut world, |world| compile_once(world, &mut config))??;
|
||||
|
||||
// Watch all dependencies of the initial compilation.
|
||||
watcher.update(world.dependencies())?;
|
||||
// Print warning when trying to watch stdin.
|
||||
if matches!(&config.input, Input::Stdin) {
|
||||
warn_watching_std(&world, &config)?;
|
||||
}
|
||||
|
||||
// Recompile whenever something relevant happens.
|
||||
loop {
|
||||
// Watch all dependencies of the most recent compilation.
|
||||
watcher.update(world.dependencies())?;
|
||||
|
||||
// Wait until anything relevant happens.
|
||||
watcher.wait()?;
|
||||
|
||||
@ -71,9 +77,6 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
|
||||
|
||||
// Evict the cache.
|
||||
comemo::evict(10);
|
||||
|
||||
// Adjust the file watching.
|
||||
watcher.update(world.dependencies())?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -204,6 +207,10 @@ impl Watcher {
|
||||
let event = event
|
||||
.map_err(|err| eco_format!("failed to watch dependencies ({err})"))?;
|
||||
|
||||
if !is_relevant_event_kind(&event.kind) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Workaround for notify-rs' implicit unwatch on remove/rename
|
||||
// (triggered by some editors when saving files) with the
|
||||
// inotify backend. By keeping track of the potentially
|
||||
@ -224,7 +231,17 @@ impl Watcher {
|
||||
}
|
||||
}
|
||||
|
||||
relevant |= self.is_event_relevant(&event);
|
||||
// Don't recompile because the output file changed.
|
||||
// FIXME: This doesn't work properly for multifile image export.
|
||||
if event
|
||||
.paths
|
||||
.iter()
|
||||
.all(|path| is_same_file(path, &self.output).unwrap_or(false))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
relevant = true;
|
||||
}
|
||||
|
||||
// If we found a relevant event or if any of the missing files now
|
||||
@ -234,19 +251,11 @@ impl Watcher {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether a watch event is relevant for compilation.
|
||||
fn is_event_relevant(&self, event: ¬ify::Event) -> bool {
|
||||
// Never recompile because the output file changed.
|
||||
if event
|
||||
.paths
|
||||
.iter()
|
||||
.all(|path| is_same_file(path, &self.output).unwrap_or(false))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
match &event.kind {
|
||||
/// Whether a kind of watch event is relevant for compilation.
|
||||
fn is_relevant_event_kind(kind: ¬ify::EventKind) -> bool {
|
||||
match kind {
|
||||
notify::EventKind::Any => true,
|
||||
notify::EventKind::Access(_) => false,
|
||||
notify::EventKind::Create(_) => true,
|
||||
@ -260,7 +269,6 @@ impl Watcher {
|
||||
notify::EventKind::Remove(_) => true,
|
||||
notify::EventKind::Other => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The status in which the watcher can be.
|
||||
@ -330,3 +338,15 @@ impl Status {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Emits a warning when trying to watch stdin.
|
||||
fn warn_watching_std(world: &SystemWorld, config: &CompileConfig) -> StrResult<()> {
|
||||
let warning = warning!(
|
||||
Span::detached(),
|
||||
"cannot watch changes for stdin";
|
||||
hint: "to recompile on changes, watch a regular file instead";
|
||||
hint: "to compile once and exit, please use `typst compile` instead"
|
||||
);
|
||||
print_diagnostics(world, &[], &[warning], config.diagnostic_format)
|
||||
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use ecow::{eco_format, EcoString};
|
||||
use parking_lot::Mutex;
|
||||
use typst::diag::{FileError, FileResult};
|
||||
use typst::foundations::{Bytes, Datetime, Dict, IntoValue};
|
||||
use typst::syntax::{FileId, Source, VirtualPath};
|
||||
use typst::syntax::{FileId, Lines, Source, VirtualPath};
|
||||
use typst::text::{Font, FontBook};
|
||||
use typst::utils::LazyHash;
|
||||
use typst::{Library, World};
|
||||
@ -181,10 +181,20 @@ impl SystemWorld {
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup a source file by id.
|
||||
/// Lookup line metadata for a file by id.
|
||||
#[track_caller]
|
||||
pub fn lookup(&self, id: FileId) -> Source {
|
||||
self.source(id).expect("file id does not point to any source file")
|
||||
pub fn lookup(&self, id: FileId) -> Lines<String> {
|
||||
self.slot(id, |slot| {
|
||||
if let Some(source) = slot.source.get() {
|
||||
let source = source.as_ref().expect("file is not valid");
|
||||
source.lines().clone()
|
||||
} else if let Some(bytes) = slot.file.get() {
|
||||
let bytes = bytes.as_ref().expect("file is not valid");
|
||||
Lines::try_from(bytes).expect("file is not valid utf-8")
|
||||
} else {
|
||||
panic!("file id does not point to any source file");
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -210,7 +220,9 @@ impl World for SystemWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
self.fonts[index].get()
|
||||
// comemo's validation may invoke this function with an invalid index. This is
|
||||
// impossible in typst-cli but possible if a custom tool mutates the fonts.
|
||||
self.fonts.get(index)?.get()
|
||||
}
|
||||
|
||||
fn today(&self, offset: Option<i64>) -> Option<Datetime> {
|
||||
@ -337,6 +349,11 @@ impl<T: Clone> SlotCell<T> {
|
||||
self.accessed = false;
|
||||
}
|
||||
|
||||
/// Gets the contents of the cell.
|
||||
fn get(&self) -> Option<&FileResult<T>> {
|
||||
self.data.as_ref()
|
||||
}
|
||||
|
||||
/// Gets the contents of the cell or initialize them.
|
||||
fn get_or_init(
|
||||
&mut self,
|
||||
|
@ -30,12 +30,14 @@ impl Access for ast::Ident<'_> {
|
||||
fn access<'a>(self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
|
||||
let span = self.span();
|
||||
if vm.inspected == Some(span) {
|
||||
if let Ok(value) = vm.scopes.get(&self).cloned() {
|
||||
vm.trace(value);
|
||||
if let Ok(binding) = vm.scopes.get(&self) {
|
||||
vm.trace(binding.read().clone());
|
||||
}
|
||||
}
|
||||
let value = vm.scopes.get_mut(&self).at(span)?;
|
||||
Ok(value)
|
||||
vm.scopes
|
||||
.get_mut(&self)
|
||||
.and_then(|b| b.write().map_err(Into::into))
|
||||
.at(span)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,8 +6,8 @@ use typst_library::diag::{
|
||||
};
|
||||
use typst_library::engine::{Engine, Sink, Traced};
|
||||
use typst_library::foundations::{
|
||||
Arg, Args, Capturer, Closure, Content, Context, Func, NativeElement, Scope, Scopes,
|
||||
SymbolElem, Value,
|
||||
Arg, Args, Binding, Capturer, Closure, Content, Context, Func, NativeElement, Scope,
|
||||
Scopes, SymbolElem, Value,
|
||||
};
|
||||
use typst_library::introspection::Introspector;
|
||||
use typst_library::math::LrElem;
|
||||
@ -25,19 +25,22 @@ impl Eval for ast::FuncCall<'_> {
|
||||
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||
let span = self.span();
|
||||
let callee = self.callee();
|
||||
let in_math = in_math(callee);
|
||||
let callee_span = callee.span();
|
||||
let args = self.args();
|
||||
let trailing_comma = args.trailing_comma();
|
||||
|
||||
vm.engine.route.check_call_depth().at(span)?;
|
||||
|
||||
// Try to evaluate as a call to an associated function or field.
|
||||
let (callee, args) = if let ast::Expr::FieldAccess(access) = callee {
|
||||
let (callee_value, args_value) = if let ast::Expr::FieldAccess(access) = callee {
|
||||
let target = access.target();
|
||||
let field = access.field();
|
||||
match eval_field_call(target, field, args, span, vm)? {
|
||||
FieldCall::Normal(callee, args) => (callee, args),
|
||||
FieldCall::Normal(callee, args) => {
|
||||
if vm.inspected == Some(callee_span) {
|
||||
vm.trace(callee.clone());
|
||||
}
|
||||
(callee, args)
|
||||
}
|
||||
FieldCall::Resolved(value) => return Ok(value),
|
||||
}
|
||||
} else {
|
||||
@ -45,9 +48,15 @@ impl Eval for ast::FuncCall<'_> {
|
||||
(callee.eval(vm)?, args.eval(vm)?.spanned(span))
|
||||
};
|
||||
|
||||
let func_result = callee.clone().cast::<Func>();
|
||||
if in_math && func_result.is_err() {
|
||||
return wrap_args_in_math(callee, callee_span, args, trailing_comma);
|
||||
let func_result = callee_value.clone().cast::<Func>();
|
||||
|
||||
if func_result.is_err() && in_math(callee) {
|
||||
return wrap_args_in_math(
|
||||
callee_value,
|
||||
callee_span,
|
||||
args_value,
|
||||
args.trailing_comma(),
|
||||
);
|
||||
}
|
||||
|
||||
let func = func_result
|
||||
@ -56,8 +65,11 @@ impl Eval for ast::FuncCall<'_> {
|
||||
|
||||
let point = || Tracepoint::Call(func.name().map(Into::into));
|
||||
let f = || {
|
||||
func.call(&mut vm.engine, vm.context, args)
|
||||
.trace(vm.world(), point, span)
|
||||
func.call(&mut vm.engine, vm.context, args_value).trace(
|
||||
vm.world(),
|
||||
point,
|
||||
span,
|
||||
)
|
||||
};
|
||||
|
||||
// Stacker is broken on WASM.
|
||||
@ -196,7 +208,7 @@ pub fn eval_closure(
|
||||
|
||||
// Provide the closure itself for recursive calls.
|
||||
if let Some(name) = name {
|
||||
vm.define(name, Value::Func(func.clone()));
|
||||
vm.define(name, func.clone());
|
||||
}
|
||||
|
||||
let num_pos_args = args.to_pos().len();
|
||||
@ -315,13 +327,15 @@ fn eval_field_call(
|
||||
(target, args)
|
||||
};
|
||||
|
||||
let field_span = field.span();
|
||||
let sink = (&mut vm.engine, field_span);
|
||||
if let Some(callee) = target.ty().scope().get(&field) {
|
||||
args.insert(0, target_expr.span(), target);
|
||||
Ok(FieldCall::Normal(callee.clone(), args))
|
||||
Ok(FieldCall::Normal(callee.read_checked(sink).clone(), args))
|
||||
} else if let Value::Content(content) = &target {
|
||||
if let Some(callee) = content.elem().scope().get(&field) {
|
||||
args.insert(0, target_expr.span(), target);
|
||||
Ok(FieldCall::Normal(callee.clone(), args))
|
||||
Ok(FieldCall::Normal(callee.read_checked(sink).clone(), args))
|
||||
} else {
|
||||
bail!(missing_field_call_error(target, field))
|
||||
}
|
||||
@ -331,7 +345,7 @@ fn eval_field_call(
|
||||
) {
|
||||
// Certain value types may have their own ways to access method fields.
|
||||
// e.g. `$arrow.r(v)$`, `table.cell[..]`
|
||||
let value = target.field(&field).at(field.span())?;
|
||||
let value = target.field(&field, sink).at(field_span)?;
|
||||
Ok(FieldCall::Normal(value, args))
|
||||
} else {
|
||||
// Otherwise we cannot call this field.
|
||||
@ -364,7 +378,7 @@ fn missing_field_call_error(target: Value, field: Ident) -> SourceDiagnostic {
|
||||
field.as_str(),
|
||||
));
|
||||
}
|
||||
_ if target.field(&field).is_ok() => {
|
||||
_ if target.field(&field, ()).is_ok() => {
|
||||
error.hint(eco_format!(
|
||||
"did you mean to access the field `{}`?",
|
||||
field.as_str(),
|
||||
@ -402,12 +416,14 @@ fn wrap_args_in_math(
|
||||
if trailing_comma {
|
||||
body += SymbolElem::packed(',');
|
||||
}
|
||||
Ok(Value::Content(
|
||||
callee.display().spanned(callee_span)
|
||||
|
||||
let formatted = callee.display().spanned(callee_span)
|
||||
+ LrElem::new(SymbolElem::packed('(') + body + SymbolElem::packed(')'))
|
||||
.pack()
|
||||
.spanned(args.span),
|
||||
))
|
||||
.spanned(args.span);
|
||||
|
||||
args.finish()?;
|
||||
Ok(Value::Content(formatted))
|
||||
}
|
||||
|
||||
/// Provide a hint if the callee is a shadowed standard library function.
|
||||
@ -458,15 +474,13 @@ impl<'a> CapturesVisitor<'a> {
|
||||
// Identifiers that shouldn't count as captures because they
|
||||
// actually bind a new name are handled below (individually through
|
||||
// the expressions that contain them).
|
||||
Some(ast::Expr::Ident(ident)) => {
|
||||
self.capture(ident.get(), ident.span(), Scopes::get)
|
||||
}
|
||||
Some(ast::Expr::Ident(ident)) => self.capture(ident.get(), Scopes::get),
|
||||
Some(ast::Expr::MathIdent(ident)) => {
|
||||
self.capture(ident.get(), ident.span(), Scopes::get_in_math)
|
||||
self.capture(ident.get(), Scopes::get_in_math)
|
||||
}
|
||||
|
||||
// Code and content blocks create a scope.
|
||||
Some(ast::Expr::Code(_) | ast::Expr::Content(_)) => {
|
||||
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
|
||||
self.internal.enter();
|
||||
for child in node.children() {
|
||||
self.visit(child);
|
||||
@ -516,7 +530,7 @@ impl<'a> CapturesVisitor<'a> {
|
||||
|
||||
// A let expression contains a binding, but that binding is only
|
||||
// active after the body is evaluated.
|
||||
Some(ast::Expr::Let(expr)) => {
|
||||
Some(ast::Expr::LetBinding(expr)) => {
|
||||
if let Some(init) = expr.init() {
|
||||
self.visit(init.to_untyped());
|
||||
}
|
||||
@ -529,7 +543,7 @@ impl<'a> CapturesVisitor<'a> {
|
||||
// A for loop contains one or two bindings in its pattern. These are
|
||||
// active after the iterable is evaluated but before the body is
|
||||
// evaluated.
|
||||
Some(ast::Expr::For(expr)) => {
|
||||
Some(ast::Expr::ForLoop(expr)) => {
|
||||
self.visit(expr.iterable().to_untyped());
|
||||
self.internal.enter();
|
||||
|
||||
@ -544,7 +558,7 @@ impl<'a> CapturesVisitor<'a> {
|
||||
|
||||
// An import contains items, but these are active only after the
|
||||
// path is evaluated.
|
||||
Some(ast::Expr::Import(expr)) => {
|
||||
Some(ast::Expr::ModuleImport(expr)) => {
|
||||
self.visit(expr.source().to_untyped());
|
||||
if let Some(ast::Imports::Items(items)) = expr.imports() {
|
||||
for item in items.iter() {
|
||||
@ -570,32 +584,34 @@ impl<'a> CapturesVisitor<'a> {
|
||||
|
||||
/// Bind a new internal variable.
|
||||
fn bind(&mut self, ident: ast::Ident) {
|
||||
self.internal.top.define_ident(ident, Value::None);
|
||||
// The concrete value does not matter as we only use the scoping
|
||||
// mechanism of `Scopes`, not the values themselves.
|
||||
self.internal
|
||||
.top
|
||||
.bind(ident.get().clone(), Binding::detached(Value::None));
|
||||
}
|
||||
|
||||
/// Capture a variable if it isn't internal.
|
||||
fn capture(
|
||||
&mut self,
|
||||
ident: &EcoString,
|
||||
span: Span,
|
||||
getter: impl FnOnce(&'a Scopes<'a>, &str) -> HintedStrResult<&'a Value>,
|
||||
getter: impl FnOnce(&'a Scopes<'a>, &str) -> HintedStrResult<&'a Binding>,
|
||||
) {
|
||||
if self.internal.get(ident).is_err() {
|
||||
let Some(value) = self
|
||||
.external
|
||||
.map(|external| getter(external, ident).ok())
|
||||
.unwrap_or(Some(&Value::None))
|
||||
else {
|
||||
if self.internal.get(ident).is_ok() {
|
||||
return;
|
||||
}
|
||||
|
||||
let binding = match self.external {
|
||||
Some(external) => match getter(external, ident) {
|
||||
Ok(binding) => binding.capture(self.capturer),
|
||||
Err(_) => return,
|
||||
},
|
||||
// The external scopes are only `None` when we are doing IDE capture
|
||||
// analysis, in which case the concrete value doesn't matter.
|
||||
None => Binding::detached(Value::None),
|
||||
};
|
||||
|
||||
self.captures.define_captured(
|
||||
ident.clone(),
|
||||
value.clone(),
|
||||
self.capturer,
|
||||
span,
|
||||
);
|
||||
}
|
||||
self.captures.bind(ident.clone(), binding);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ fn eval_code<'a>(
|
||||
while let Some(expr) = exprs.next() {
|
||||
let span = expr.span();
|
||||
let value = match expr {
|
||||
ast::Expr::Set(set) => {
|
||||
ast::Expr::SetRule(set) => {
|
||||
let styles = set.eval(vm)?;
|
||||
if vm.flow.is_some() {
|
||||
break;
|
||||
@ -39,7 +39,7 @@ fn eval_code<'a>(
|
||||
let tail = eval_code(vm, exprs)?.display();
|
||||
Value::Content(tail.styled_with_map(styles))
|
||||
}
|
||||
ast::Expr::Show(show) => {
|
||||
ast::Expr::ShowRule(show) => {
|
||||
let recipe = show.eval(vm)?;
|
||||
if vm.flow.is_some() {
|
||||
break;
|
||||
@ -94,9 +94,9 @@ impl Eval for ast::Expr<'_> {
|
||||
Self::Label(v) => v.eval(vm),
|
||||
Self::Ref(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Heading(v) => v.eval(vm).map(Value::Content),
|
||||
Self::List(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Enum(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Term(v) => v.eval(vm).map(Value::Content),
|
||||
Self::ListItem(v) => v.eval(vm).map(Value::Content),
|
||||
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
|
||||
Self::TermItem(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Equation(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Math(v) => v.eval(vm).map(Value::Content),
|
||||
Self::MathText(v) => v.eval(vm).map(Value::Content),
|
||||
@ -116,8 +116,8 @@ impl Eval for ast::Expr<'_> {
|
||||
Self::Float(v) => v.eval(vm),
|
||||
Self::Numeric(v) => v.eval(vm),
|
||||
Self::Str(v) => v.eval(vm),
|
||||
Self::Code(v) => v.eval(vm),
|
||||
Self::Content(v) => v.eval(vm).map(Value::Content),
|
||||
Self::CodeBlock(v) => v.eval(vm),
|
||||
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Array(v) => v.eval(vm).map(Value::Array),
|
||||
Self::Dict(v) => v.eval(vm).map(Value::Dict),
|
||||
Self::Parenthesized(v) => v.eval(vm),
|
||||
@ -126,19 +126,19 @@ impl Eval for ast::Expr<'_> {
|
||||
Self::Closure(v) => v.eval(vm),
|
||||
Self::Unary(v) => v.eval(vm),
|
||||
Self::Binary(v) => v.eval(vm),
|
||||
Self::Let(v) => v.eval(vm),
|
||||
Self::DestructAssign(v) => v.eval(vm),
|
||||
Self::Set(_) => bail!(forbidden("set")),
|
||||
Self::Show(_) => bail!(forbidden("show")),
|
||||
Self::LetBinding(v) => v.eval(vm),
|
||||
Self::DestructAssignment(v) => v.eval(vm),
|
||||
Self::SetRule(_) => bail!(forbidden("set")),
|
||||
Self::ShowRule(_) => bail!(forbidden("show")),
|
||||
Self::Contextual(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Conditional(v) => v.eval(vm),
|
||||
Self::While(v) => v.eval(vm),
|
||||
Self::For(v) => v.eval(vm),
|
||||
Self::Import(v) => v.eval(vm),
|
||||
Self::Include(v) => v.eval(vm).map(Value::Content),
|
||||
Self::Break(v) => v.eval(vm),
|
||||
Self::Continue(v) => v.eval(vm),
|
||||
Self::Return(v) => v.eval(vm),
|
||||
Self::WhileLoop(v) => v.eval(vm),
|
||||
Self::ForLoop(v) => v.eval(vm),
|
||||
Self::ModuleImport(v) => v.eval(vm),
|
||||
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
|
||||
Self::LoopBreak(v) => v.eval(vm),
|
||||
Self::LoopContinue(v) => v.eval(vm),
|
||||
Self::FuncReturn(v) => v.eval(vm),
|
||||
}?
|
||||
.spanned(span);
|
||||
|
||||
@ -154,7 +154,13 @@ impl Eval for ast::Ident<'_> {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||
vm.scopes.get(&self).cloned().at(self.span())
|
||||
let span = self.span();
|
||||
Ok(vm
|
||||
.scopes
|
||||
.get(&self)
|
||||
.at(span)?
|
||||
.read_checked((&mut vm.engine, span))
|
||||
.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@ -310,8 +316,9 @@ impl Eval for ast::FieldAccess<'_> {
|
||||
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||
let value = self.target().eval(vm)?;
|
||||
let field = self.field();
|
||||
let field_span = field.span();
|
||||
|
||||
let err = match value.field(&field).at(field.span()) {
|
||||
let err = match value.field(&field, (&mut vm.engine, field_span)).at(field_span) {
|
||||
Ok(value) => return Ok(value),
|
||||
Err(err) => err,
|
||||
};
|
||||
|
@ -4,7 +4,7 @@ use typst_library::diag::{
|
||||
bail, error, warning, At, FileError, SourceResult, Trace, Tracepoint,
|
||||
};
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{Content, Module, Value};
|
||||
use typst_library::foundations::{Binding, Content, Module, Value};
|
||||
use typst_library::World;
|
||||
use typst_syntax::ast::{self, AstNode, BareImportError};
|
||||
use typst_syntax::package::{PackageManifest, PackageSpec};
|
||||
@ -43,12 +43,11 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
// Source itself is imported if there is no import list or a rename.
|
||||
let bare_name = self.bare_name();
|
||||
// If there is a rename, import the source itself under that name.
|
||||
let new_name = self.new_name();
|
||||
if let Some(new_name) = new_name {
|
||||
if let Ok(source_name) = &bare_name {
|
||||
if source_name == new_name.as_str() {
|
||||
if let ast::Expr::Ident(ident) = self.source() {
|
||||
if ident.as_str() == new_name.as_str() {
|
||||
// Warn on `import x as x`
|
||||
vm.engine.sink.warn(warning!(
|
||||
new_name.span(),
|
||||
@ -58,7 +57,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
|
||||
// Define renamed module on the scope.
|
||||
vm.scopes.top.define_ident(new_name, source.clone());
|
||||
vm.define(new_name, source.clone());
|
||||
}
|
||||
|
||||
let scope = source.scope().unwrap();
|
||||
@ -76,7 +75,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
"this import has no effect",
|
||||
));
|
||||
}
|
||||
vm.scopes.top.define_spanned(name, source, source_span);
|
||||
vm.scopes.top.bind(name, Binding::new(source, source_span));
|
||||
}
|
||||
Ok(_) | Err(BareImportError::Dynamic) => bail!(
|
||||
source_span, "dynamic import requires an explicit name";
|
||||
@ -92,8 +91,8 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Wildcard) => {
|
||||
for (var, value, span) in scope.iter() {
|
||||
vm.scopes.top.define_spanned(var.clone(), value.clone(), span);
|
||||
for (var, binding) in scope.iter() {
|
||||
vm.scopes.top.bind(var.clone(), binding.clone());
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
@ -103,7 +102,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
let mut scope = scope;
|
||||
|
||||
while let Some(component) = &path.next() {
|
||||
let Some(value) = scope.get(component) else {
|
||||
let Some(binding) = scope.get(component) else {
|
||||
errors.push(error!(component.span(), "unresolved import"));
|
||||
break;
|
||||
};
|
||||
@ -111,6 +110,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
if path.peek().is_some() {
|
||||
// Nested import, as this is not the last component.
|
||||
// This must be a submodule.
|
||||
let value = binding.read();
|
||||
let Some(submodule) = value.scope() else {
|
||||
let error = if matches!(value, Value::Func(function) if function.scope().is_none())
|
||||
{
|
||||
@ -153,7 +153,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
vm.define(item.bound_name(), value.clone());
|
||||
vm.bind(item.bound_name(), binding.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ pub use self::call::{eval_closure, CapturesVisitor};
|
||||
pub use self::flow::FlowEvent;
|
||||
pub use self::import::import;
|
||||
pub use self::vm::Vm;
|
||||
pub use typst_library::routines::EvalMode;
|
||||
|
||||
use self::access::*;
|
||||
use self::binding::*;
|
||||
@ -32,7 +31,7 @@ use typst_library::introspection::Introspector;
|
||||
use typst_library::math::EquationElem;
|
||||
use typst_library::routines::Routines;
|
||||
use typst_library::World;
|
||||
use typst_syntax::{ast, parse, parse_code, parse_math, Source, Span};
|
||||
use typst_syntax::{ast, parse, parse_code, parse_math, Source, Span, SyntaxMode};
|
||||
|
||||
/// Evaluate a source file and return the resulting module.
|
||||
#[comemo::memoize]
|
||||
@ -101,15 +100,16 @@ pub fn eval(
|
||||
pub fn eval_string(
|
||||
routines: &Routines,
|
||||
world: Tracked<dyn World + '_>,
|
||||
sink: TrackedMut<Sink>,
|
||||
string: &str,
|
||||
span: Span,
|
||||
mode: EvalMode,
|
||||
mode: SyntaxMode,
|
||||
scope: Scope,
|
||||
) -> SourceResult<Value> {
|
||||
let mut root = match mode {
|
||||
EvalMode::Code => parse_code(string),
|
||||
EvalMode::Markup => parse(string),
|
||||
EvalMode::Math => parse_math(string),
|
||||
SyntaxMode::Code => parse_code(string),
|
||||
SyntaxMode::Markup => parse(string),
|
||||
SyntaxMode::Math => parse_math(string),
|
||||
};
|
||||
|
||||
root.synthesize(span);
|
||||
@ -121,7 +121,6 @@ pub fn eval_string(
|
||||
}
|
||||
|
||||
// Prepare the engine.
|
||||
let mut sink = Sink::new();
|
||||
let introspector = Introspector::default();
|
||||
let traced = Traced::default();
|
||||
let engine = Engine {
|
||||
@ -129,7 +128,7 @@ pub fn eval_string(
|
||||
world,
|
||||
introspector: introspector.track(),
|
||||
traced: traced.track(),
|
||||
sink: sink.track_mut(),
|
||||
sink,
|
||||
route: Route::default(),
|
||||
};
|
||||
|
||||
@ -141,11 +140,11 @@ pub fn eval_string(
|
||||
|
||||
// Evaluate the code.
|
||||
let output = match mode {
|
||||
EvalMode::Code => root.cast::<ast::Code>().unwrap().eval(&mut vm)?,
|
||||
EvalMode::Markup => {
|
||||
SyntaxMode::Code => root.cast::<ast::Code>().unwrap().eval(&mut vm)?,
|
||||
SyntaxMode::Markup => {
|
||||
Value::Content(root.cast::<ast::Markup>().unwrap().eval(&mut vm)?)
|
||||
}
|
||||
EvalMode::Math => Value::Content(
|
||||
SyntaxMode::Math => Value::Content(
|
||||
EquationElem::new(root.cast::<ast::Math>().unwrap().eval(&mut vm)?)
|
||||
.with_block(false)
|
||||
.pack()
|
||||
|
@ -33,7 +33,7 @@ fn eval_markup<'a>(
|
||||
|
||||
while let Some(expr) = exprs.next() {
|
||||
match expr {
|
||||
ast::Expr::Set(set) => {
|
||||
ast::Expr::SetRule(set) => {
|
||||
let styles = set.eval(vm)?;
|
||||
if vm.flow.is_some() {
|
||||
break;
|
||||
@ -41,7 +41,7 @@ fn eval_markup<'a>(
|
||||
|
||||
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
|
||||
}
|
||||
ast::Expr::Show(show) => {
|
||||
ast::Expr::ShowRule(show) => {
|
||||
let recipe = show.eval(vm)?;
|
||||
if vm.flow.is_some() {
|
||||
break;
|
||||
|
@ -35,7 +35,13 @@ impl Eval for ast::MathIdent<'_> {
|
||||
type Output = Value;
|
||||
|
||||
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||
vm.scopes.get_in_math(&self).cloned().at(self.span())
|
||||
let span = self.span();
|
||||
Ok(vm
|
||||
.scopes
|
||||
.get_in_math(&self)
|
||||
.at(span)?
|
||||
.read_checked((&mut vm.engine, span))
|
||||
.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ impl Eval for ast::ShowRule<'_> {
|
||||
|
||||
let transform = self.transform();
|
||||
let transform = match transform {
|
||||
ast::Expr::Set(set) => Transformation::Style(set.eval(vm)?),
|
||||
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
|
||||
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
|
||||
};
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use comemo::Tracked;
|
||||
use typst_library::diag::warning;
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{Context, IntoValue, Scopes, Value};
|
||||
use typst_library::foundations::{Binding, Context, IntoValue, Scopes, Value};
|
||||
use typst_library::World;
|
||||
use typst_syntax::ast::{self, AstNode};
|
||||
use typst_syntax::Span;
|
||||
@ -42,13 +42,23 @@ impl<'a> Vm<'a> {
|
||||
self.engine.world
|
||||
}
|
||||
|
||||
/// Define a variable in the current scope.
|
||||
/// Bind a value to an identifier.
|
||||
///
|
||||
/// This will create a [`Binding`] with the value and the identifier's span.
|
||||
pub fn define(&mut self, var: ast::Ident, value: impl IntoValue) {
|
||||
let value = value.into_value();
|
||||
if self.inspected == Some(var.span()) {
|
||||
self.trace(value.clone());
|
||||
self.bind(var, Binding::new(value, var.span()));
|
||||
}
|
||||
// This will become an error in the parser if 'is' becomes a keyword.
|
||||
|
||||
/// Insert a binding into the current scope.
|
||||
///
|
||||
/// This will insert the value into the top-most scope and make it available
|
||||
/// for dynamic tracing, assisting IDE functionality.
|
||||
pub fn bind(&mut self, var: ast::Ident, binding: Binding) {
|
||||
if self.inspected == Some(var.span()) {
|
||||
self.trace(binding.read().clone());
|
||||
}
|
||||
|
||||
// This will become an error in the parser if `is` becomes a keyword.
|
||||
if var.get() == "is" {
|
||||
self.engine.sink.warn(warning!(
|
||||
var.span(),
|
||||
@ -58,7 +68,8 @@ impl<'a> Vm<'a> {
|
||||
hint: "try `is_` instead"
|
||||
));
|
||||
}
|
||||
self.scopes.top.define_ident(var, value);
|
||||
|
||||
self.scopes.top.bind(var.get().clone(), binding);
|
||||
}
|
||||
|
||||
/// Trace a value.
|
||||
|
@ -83,8 +83,8 @@ fn html_document_impl(
|
||||
)?;
|
||||
|
||||
let output = handle_list(&mut engine, &mut locator, children.iter().copied())?;
|
||||
let introspector = Introspector::html(&output);
|
||||
let root = root_element(output, &info)?;
|
||||
let introspector = Introspector::html(&root);
|
||||
|
||||
Ok(HtmlDocument { info, root, introspector })
|
||||
}
|
||||
@ -263,13 +263,13 @@ fn handle(
|
||||
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
|
||||
/// supplying a suitable `<head>`.
|
||||
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
|
||||
let head = head_element(info);
|
||||
let body = match classify_output(output)? {
|
||||
OutputKind::Html(element) => return Ok(element),
|
||||
OutputKind::Body(body) => body,
|
||||
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
|
||||
};
|
||||
Ok(HtmlElement::new(tag::html)
|
||||
.with_children(vec![head_element(info).into(), body.into()]))
|
||||
Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
|
||||
}
|
||||
|
||||
/// Generate a `<head>` element.
|
||||
@ -302,23 +302,41 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
|
||||
);
|
||||
}
|
||||
|
||||
if !info.author.is_empty() {
|
||||
children.push(
|
||||
HtmlElement::new(tag::meta)
|
||||
.with_attr(attr::name, "authors")
|
||||
.with_attr(attr::content, info.author.join(", "))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
if !info.keywords.is_empty() {
|
||||
children.push(
|
||||
HtmlElement::new(tag::meta)
|
||||
.with_attr(attr::name, "keywords")
|
||||
.with_attr(attr::content, info.keywords.join(", "))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
HtmlElement::new(tag::head).with_children(children)
|
||||
}
|
||||
|
||||
/// Determine which kind of output the user generated.
|
||||
fn classify_output(mut output: Vec<HtmlNode>) -> SourceResult<OutputKind> {
|
||||
let len = output.len();
|
||||
let count = output.iter().filter(|node| !matches!(node, HtmlNode::Tag(_))).count();
|
||||
for node in &mut output {
|
||||
let HtmlNode::Element(elem) = node else { continue };
|
||||
let tag = elem.tag;
|
||||
let mut take = || std::mem::replace(elem, HtmlElement::new(tag::html));
|
||||
match (tag, len) {
|
||||
match (tag, count) {
|
||||
(tag::html, 1) => return Ok(OutputKind::Html(take())),
|
||||
(tag::body, 1) => return Ok(OutputKind::Body(take())),
|
||||
(tag::html | tag::body, _) => bail!(
|
||||
elem.span,
|
||||
"`{}` element must be the only element in the document",
|
||||
elem.tag
|
||||
elem.tag,
|
||||
),
|
||||
_ => {}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ pub fn analyze_expr(
|
||||
ast::Expr::Str(v) => Value::Str(v.get().into()),
|
||||
_ => {
|
||||
if node.kind() == SyntaxKind::Contextual {
|
||||
if let Some(child) = node.children().last() {
|
||||
if let Some(child) = node.children().next_back() {
|
||||
return analyze_expr(world, &child);
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ use typst::syntax::{
|
||||
ast, is_id_continue, is_id_start, is_ident, FileId, LinkedNode, Side, Source,
|
||||
SyntaxKind,
|
||||
};
|
||||
use typst::text::RawElem;
|
||||
use typst::text::{FontFlags, RawElem};
|
||||
use typst::visualize::Color;
|
||||
use unscanny::Scanner;
|
||||
|
||||
@ -298,15 +298,25 @@ fn complete_math(ctx: &mut CompletionContext) -> bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Start of an interpolated identifier: "#|".
|
||||
// Start of an interpolated identifier: "$#|$".
|
||||
if ctx.leaf.kind() == SyntaxKind::Hash {
|
||||
ctx.from = ctx.cursor;
|
||||
code_completions(ctx, true);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind existing interpolated identifier: "$#pa|$".
|
||||
if ctx.leaf.kind() == SyntaxKind::Ident {
|
||||
ctx.from = ctx.leaf.offset();
|
||||
code_completions(ctx, true);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind existing atom or identifier: "$a|$" or "$abc|$".
|
||||
if matches!(ctx.leaf.kind(), SyntaxKind::Text | SyntaxKind::MathIdent) {
|
||||
if matches!(
|
||||
ctx.leaf.kind(),
|
||||
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::MathIdent
|
||||
) {
|
||||
ctx.from = ctx.leaf.offset();
|
||||
math_completions(ctx);
|
||||
return true;
|
||||
@ -358,7 +368,7 @@ fn complete_field_accesses(ctx: &mut CompletionContext) -> bool {
|
||||
// Behind an expression plus dot: "emoji.|".
|
||||
if_chain! {
|
||||
if ctx.leaf.kind() == SyntaxKind::Dot
|
||||
|| (ctx.leaf.kind() == SyntaxKind::Text
|
||||
|| (matches!(ctx.leaf.kind(), SyntaxKind::Text | SyntaxKind::MathText)
|
||||
&& ctx.leaf.text() == ".");
|
||||
if ctx.leaf.range().end == ctx.cursor;
|
||||
if let Some(prev) = ctx.leaf.prev_sibling();
|
||||
@ -398,13 +408,31 @@ fn field_access_completions(
|
||||
value: &Value,
|
||||
styles: &Option<Styles>,
|
||||
) {
|
||||
for (name, value, _) in value.ty().scope().iter() {
|
||||
ctx.call_completion(name.clone(), value);
|
||||
let scopes = {
|
||||
let ty = value.ty().scope();
|
||||
let elem = match value {
|
||||
Value::Content(content) => Some(content.elem().scope()),
|
||||
_ => None,
|
||||
};
|
||||
elem.into_iter().chain(Some(ty))
|
||||
};
|
||||
|
||||
// Autocomplete methods from the element's or type's scope. We only complete
|
||||
// those which have a `self` parameter.
|
||||
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
|
||||
let Ok(func) = binding.read().clone().cast::<Func>() else { continue };
|
||||
if func
|
||||
.params()
|
||||
.and_then(|params| params.first())
|
||||
.is_some_and(|param| param.name == "self")
|
||||
{
|
||||
ctx.call_completion(name.clone(), binding.read());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(scope) = value.scope() {
|
||||
for (name, value, _) in scope.iter() {
|
||||
ctx.call_completion(name.clone(), value);
|
||||
for (name, binding) in scope.iter() {
|
||||
ctx.call_completion(name.clone(), binding.read());
|
||||
}
|
||||
}
|
||||
|
||||
@ -414,7 +442,7 @@ fn field_access_completions(
|
||||
// with method syntax;
|
||||
// 2. We can unwrap the field's value since it's a field belonging to
|
||||
// this value's type, so accessing it should not fail.
|
||||
ctx.value_completion(field, &value.field(field).unwrap());
|
||||
ctx.value_completion(field, &value.field(field, ()).unwrap());
|
||||
}
|
||||
|
||||
match value {
|
||||
@ -496,7 +524,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
|
||||
// "#import "path.typ": a, b, |".
|
||||
if_chain! {
|
||||
if let Some(prev) = ctx.leaf.prev_sibling();
|
||||
if let Some(ast::Expr::Import(import)) = prev.get().cast();
|
||||
if let Some(ast::Expr::ModuleImport(import)) = prev.get().cast();
|
||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
@ -515,7 +543,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
|
||||
if let Some(grand) = parent.parent();
|
||||
if grand.kind() == SyntaxKind::ImportItems;
|
||||
if let Some(great) = grand.parent();
|
||||
if let Some(ast::Expr::Import(import)) = great.get().cast();
|
||||
if let Some(ast::Expr::ModuleImport(import)) = great.get().cast();
|
||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
@ -541,9 +569,9 @@ fn import_item_completions<'a>(
|
||||
ctx.snippet_completion("*", "*", "Import everything.");
|
||||
}
|
||||
|
||||
for (name, value, _) in scope.iter() {
|
||||
for (name, binding) in scope.iter() {
|
||||
if existing.iter().all(|item| item.original_name().as_str() != name) {
|
||||
ctx.value_completion(name.clone(), value);
|
||||
ctx.value_completion(name.clone(), binding.read());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -656,10 +684,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
|
||||
if let Some(args) = parent.get().cast::<ast::Args>();
|
||||
if let Some(grand) = parent.parent();
|
||||
if let Some(expr) = grand.get().cast::<ast::Expr>();
|
||||
let set = matches!(expr, ast::Expr::Set(_));
|
||||
let set = matches!(expr, ast::Expr::SetRule(_));
|
||||
if let Some(callee) = match expr {
|
||||
ast::Expr::FuncCall(call) => Some(call.callee()),
|
||||
ast::Expr::Set(set) => Some(set.target()),
|
||||
ast::Expr::SetRule(set) => Some(set.target()),
|
||||
_ => None,
|
||||
};
|
||||
then {
|
||||
@ -673,7 +701,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
|
||||
let mut deciding = ctx.leaf.clone();
|
||||
while !matches!(
|
||||
deciding.kind(),
|
||||
SyntaxKind::LeftParen | SyntaxKind::Comma | SyntaxKind::Colon
|
||||
SyntaxKind::LeftParen
|
||||
| SyntaxKind::RightParen
|
||||
| SyntaxKind::Comma
|
||||
| SyntaxKind::Colon
|
||||
) {
|
||||
let Some(prev) = deciding.prev_leaf() else { break };
|
||||
deciding = prev;
|
||||
@ -820,7 +851,9 @@ fn param_value_completions<'a>(
|
||||
/// Returns which file extensions to complete for the given parameter if any.
|
||||
fn path_completion(func: &Func, param: &ParamInfo) -> Option<&'static [&'static str]> {
|
||||
Some(match (func.name(), param.name) {
|
||||
(Some("image"), "source") => &["png", "jpg", "jpeg", "gif", "svg", "svgz"],
|
||||
(Some("image"), "source") => {
|
||||
&["png", "jpg", "jpeg", "gif", "svg", "svgz", "webp"]
|
||||
}
|
||||
(Some("csv"), "source") => &["csv"],
|
||||
(Some("plugin"), "source") => &["wasm"],
|
||||
(Some("cbor"), "source") => &["cbor"],
|
||||
@ -846,13 +879,11 @@ fn resolve_global_callee<'a>(
|
||||
) -> Option<&'a Func> {
|
||||
let globals = globals(ctx.world, ctx.leaf);
|
||||
let value = match callee {
|
||||
ast::Expr::Ident(ident) => globals.get(&ident)?,
|
||||
ast::Expr::Ident(ident) => globals.get(&ident)?.read(),
|
||||
ast::Expr::FieldAccess(access) => match access.target() {
|
||||
ast::Expr::Ident(target) => match globals.get(&target)? {
|
||||
Value::Module(module) => module.field(&access.field()).ok()?,
|
||||
Value::Func(func) => func.field(&access.field()).ok()?,
|
||||
_ => return None,
|
||||
},
|
||||
ast::Expr::Ident(target) => {
|
||||
globals.get(&target)?.read().scope()?.get(&access.field())?.read()
|
||||
}
|
||||
_ => return None,
|
||||
},
|
||||
_ => return None,
|
||||
@ -1062,6 +1093,24 @@ fn code_completions(ctx: &mut CompletionContext, hash: bool) {
|
||||
}
|
||||
}
|
||||
|
||||
/// See if the AST node is somewhere within a show rule applying to equations
|
||||
fn is_in_equation_show_rule(leaf: &LinkedNode<'_>) -> bool {
|
||||
let mut node = leaf;
|
||||
while let Some(parent) = node.parent() {
|
||||
if_chain! {
|
||||
if let Some(expr) = parent.get().cast::<ast::Expr>();
|
||||
if let ast::Expr::ShowRule(show) = expr;
|
||||
if let Some(ast::Expr::FieldAccess(field)) = show.selector();
|
||||
if field.field().as_str() == "equation";
|
||||
then {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
node = parent;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Context for autocompletion.
|
||||
struct CompletionContext<'a> {
|
||||
world: &'a (dyn IdeWorld + 'a),
|
||||
@ -1133,10 +1182,12 @@ impl<'a> CompletionContext<'a> {
|
||||
|
||||
/// Add completions for all font families.
|
||||
fn font_completions(&mut self) {
|
||||
let equation = self.before_window(25).contains("equation");
|
||||
let equation = is_in_equation_show_rule(self.leaf);
|
||||
for (family, iter) in self.world.book().families() {
|
||||
let detail = summarize_font_family(iter);
|
||||
if !equation || family.contains("Math") {
|
||||
let variants: Vec<_> = iter.collect();
|
||||
let is_math = variants.iter().any(|f| f.flags.contains(FontFlags::MATH));
|
||||
let detail = summarize_font_family(variants);
|
||||
if !equation || is_math {
|
||||
self.str_completion(
|
||||
family,
|
||||
Some(CompletionKind::Font),
|
||||
@ -1444,7 +1495,7 @@ impl<'a> CompletionContext<'a> {
|
||||
let mut defined = BTreeMap::<EcoString, Option<Value>>::new();
|
||||
named_items(self.world, self.leaf.clone(), |item| {
|
||||
let name = item.name();
|
||||
if !name.is_empty() && item.value().as_ref().map_or(true, filter) {
|
||||
if !name.is_empty() && item.value().as_ref().is_none_or(filter) {
|
||||
defined.insert(name.clone(), item.value());
|
||||
}
|
||||
|
||||
@ -1464,7 +1515,8 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
for (name, value, _) in globals(self.world, self.leaf).iter() {
|
||||
for (name, binding) in globals(self.world, self.leaf).iter() {
|
||||
let value = binding.read();
|
||||
if filter(value) && !defined.contains_key(name) {
|
||||
self.value_completion_full(Some(name.clone()), value, parens, None, None);
|
||||
}
|
||||
@ -1624,6 +1676,13 @@ mod tests {
|
||||
test("#{() .a}", -2).must_include(["at", "any", "all"]);
|
||||
}
|
||||
|
||||
/// Test that autocomplete in math uses the correct global scope.
|
||||
#[test]
|
||||
fn test_autocomplete_math_scope() {
|
||||
test("$#col$", -2).must_include(["colbreak"]).must_exclude(["colon"]);
|
||||
test("$col$", -2).must_include(["colon"]).must_exclude(["colbreak"]);
|
||||
}
|
||||
|
||||
/// Test that the `before_window` doesn't slice into invalid byte
|
||||
/// boundaries.
|
||||
#[test]
|
||||
@ -1642,7 +1701,7 @@ mod tests {
|
||||
|
||||
// Then, add the invalid `#cite` call. Had the document been invalid
|
||||
// initially, we would have no populated document to autocomplete with.
|
||||
let end = world.main.len_bytes();
|
||||
let end = world.main.text().len();
|
||||
world.main.edit(end..end, " #cite()");
|
||||
|
||||
test_with_doc(&world, -2, doc.as_ref())
|
||||
@ -1678,6 +1737,8 @@ mod tests {
|
||||
test("#numbering(\"foo\", 1, )", -2)
|
||||
.must_include(["integer"])
|
||||
.must_exclude(["string"]);
|
||||
// After argument list no completions.
|
||||
test("#numbering()", -1).must_exclude(["string"]);
|
||||
}
|
||||
|
||||
/// Test that autocompletion for values of known type picks up nested
|
||||
@ -1748,4 +1809,43 @@ mod tests {
|
||||
.must_include(["this", "that"])
|
||||
.must_exclude(["*", "figure"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_type_methods() {
|
||||
test("#\"hello\".", -1).must_include(["len", "contains"]);
|
||||
test("#table().", -1).must_exclude(["cell"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_content_methods() {
|
||||
test("#show outline.entry: it => it.\n#outline()\n= Hi", 30)
|
||||
.must_include(["indented", "body", "page"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_symbol_variants() {
|
||||
test("#sym.arrow.", -1)
|
||||
.must_include(["r", "dashed"])
|
||||
.must_exclude(["cases"]);
|
||||
test("$ arrow. $", -3)
|
||||
.must_include(["r", "dashed"])
|
||||
.must_exclude(["cases"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_autocomplete_fonts() {
|
||||
test("#text(font:)", -2)
|
||||
.must_include(["\"Libertinus Serif\"", "\"New Computer Modern Math\""]);
|
||||
|
||||
test("#show link: set text(font: )", -2)
|
||||
.must_include(["\"Libertinus Serif\"", "\"New Computer Modern Math\""]);
|
||||
|
||||
test("#show math.equation: set text(font: )", -2)
|
||||
.must_include(["\"New Computer Modern Math\""])
|
||||
.must_exclude(["\"Libertinus Serif\""]);
|
||||
|
||||
test("#show math.equation: it => { set text(font: )\nit }", -7)
|
||||
.must_include(["\"New Computer Modern Math\""])
|
||||
.must_exclude(["\"Libertinus Serif\""]);
|
||||
}
|
||||
}
|
||||
|
@ -55,8 +55,8 @@ pub fn definition(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(value) = globals(world, &leaf).get(&name) {
|
||||
return Some(Definition::Std(value.clone()));
|
||||
if let Some(binding) = globals(world, &leaf).get(&name) {
|
||||
return Some(Definition::Std(binding.read().clone()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
|
||||
use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size};
|
||||
use typst::model::{Destination, Url};
|
||||
use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind};
|
||||
use typst::visualize::Geometry;
|
||||
use typst::visualize::{Curve, CurveItem, FillRule, Geometry};
|
||||
use typst::WorldExt;
|
||||
|
||||
use crate::IdeWorld;
|
||||
@ -53,10 +53,20 @@ pub fn jump_from_click(
|
||||
for (mut pos, item) in frame.items().rev() {
|
||||
match item {
|
||||
FrameItem::Group(group) => {
|
||||
// TODO: Handle transformation.
|
||||
if let Some(span) =
|
||||
jump_from_click(world, document, &group.frame, click - pos)
|
||||
{
|
||||
let pos = click - pos;
|
||||
if let Some(clip) = &group.clip {
|
||||
if !clip.contains(FillRule::NonZero, pos) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Realistic transforms should always be invertible.
|
||||
// An example of one that isn't is a scale of 0, which would
|
||||
// not be clickable anyway.
|
||||
let Some(inv_transform) = group.transform.invert() else {
|
||||
continue;
|
||||
};
|
||||
let pos = pos.transform_inf(inv_transform);
|
||||
if let Some(span) = jump_from_click(world, document, &group.frame, pos) {
|
||||
return Some(span);
|
||||
}
|
||||
}
|
||||
@ -73,7 +83,10 @@ pub fn jump_from_click(
|
||||
let Some(id) = span.id() else { continue };
|
||||
let source = world.source(id).ok()?;
|
||||
let node = source.find(span)?;
|
||||
let pos = if node.kind() == SyntaxKind::Text {
|
||||
let pos = if matches!(
|
||||
node.kind(),
|
||||
SyntaxKind::Text | SyntaxKind::MathText
|
||||
) {
|
||||
let range = node.range();
|
||||
let mut offset = range.start + usize::from(span_offset);
|
||||
if (click.x - pos.x) > width / 2.0 {
|
||||
@ -91,12 +104,35 @@ pub fn jump_from_click(
|
||||
}
|
||||
|
||||
FrameItem::Shape(shape, span) => {
|
||||
let Geometry::Rect(size) = shape.geometry else { continue };
|
||||
if is_in_rect(pos, size, click) {
|
||||
if shape.fill.is_some() {
|
||||
let within = match &shape.geometry {
|
||||
Geometry::Line(..) => false,
|
||||
Geometry::Rect(size) => is_in_rect(pos, *size, click),
|
||||
Geometry::Curve(curve) => {
|
||||
curve.contains(shape.fill_rule, click - pos)
|
||||
}
|
||||
};
|
||||
if within {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(stroke) = &shape.stroke {
|
||||
let within = !stroke.thickness.approx_empty() && {
|
||||
// This curve is rooted at (0, 0), not `pos`.
|
||||
let base_curve = match &shape.geometry {
|
||||
Geometry::Line(to) => &Curve(vec![CurveItem::Line(*to)]),
|
||||
Geometry::Rect(size) => &Curve::rect(*size),
|
||||
Geometry::Curve(curve) => curve,
|
||||
};
|
||||
base_curve.stroke_contains(stroke, click - pos)
|
||||
};
|
||||
if within {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FrameItem::Image(_, size, span) if is_in_rect(pos, *size, click) => {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
@ -115,7 +151,7 @@ pub fn jump_from_cursor(
|
||||
cursor: usize,
|
||||
) -> Vec<Position> {
|
||||
fn is_text(node: &LinkedNode) -> bool {
|
||||
node.get().kind() == SyntaxKind::Text
|
||||
matches!(node.kind(), SyntaxKind::Text | SyntaxKind::MathText)
|
||||
}
|
||||
|
||||
let root = LinkedNode::new(source.root());
|
||||
@ -143,9 +179,8 @@ pub fn jump_from_cursor(
|
||||
fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> {
|
||||
for (mut pos, item) in frame.items() {
|
||||
if let FrameItem::Group(group) = item {
|
||||
// TODO: Handle transformation.
|
||||
if let Some(point) = find_in_frame(&group.frame, span) {
|
||||
return Some(point + pos);
|
||||
return Some(pos + point.transform(group.transform));
|
||||
}
|
||||
}
|
||||
|
||||
@ -261,6 +296,102 @@ mod tests {
|
||||
test_click(s, point(21.0, 12.0), cursor(56));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_math() {
|
||||
test_click("$a + b$", point(28.0, 14.0), cursor(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_transform_clip() {
|
||||
let margin = point(10.0, 10.0);
|
||||
test_click(
|
||||
"#rect(width: 20pt, height: 20pt, fill: black)",
|
||||
point(10.0, 10.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
test_click(
|
||||
"#rect(width: 60pt, height: 10pt, fill: black)",
|
||||
point(5.0, 30.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#rotate(90deg, origin: bottom + left, rect(width: 60pt, height: 10pt, fill: black))",
|
||||
point(5.0, 30.0) + margin,
|
||||
cursor(38),
|
||||
);
|
||||
test_click(
|
||||
"#scale(x: 300%, y: 300%, origin: top + left, rect(width: 10pt, height: 10pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
cursor(45),
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: true, scale(x: 300%, y: 300%, \
|
||||
origin: top + left, rect(width: 10pt, height: 10pt, fill: black)))",
|
||||
point(20.0, 20.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: false, rect(width: 30pt, height: 30pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
cursor(45),
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: true, rect(width: 30pt, height: 30pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#rotate(90deg, origin: bottom + left)[hello world]",
|
||||
point(5.0, 15.0) + margin,
|
||||
cursor(40),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_shapes() {
|
||||
let margin = point(10.0, 10.0);
|
||||
|
||||
test_click(
|
||||
"#rect(width: 30pt, height: 30pt, fill: black)",
|
||||
point(15.0, 15.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
|
||||
let circle = "#circle(width: 30pt, height: 30pt, fill: black)";
|
||||
test_click(circle, point(15.0, 15.0) + margin, cursor(1));
|
||||
test_click(circle, point(1.0, 1.0) + margin, None);
|
||||
|
||||
let bowtie =
|
||||
"#polygon(fill: black, (0pt, 0pt), (20pt, 20pt), (20pt, 0pt), (0pt, 20pt))";
|
||||
test_click(bowtie, point(1.0, 2.0) + margin, cursor(1));
|
||||
test_click(bowtie, point(2.0, 1.0) + margin, None);
|
||||
test_click(bowtie, point(19.0, 10.0) + margin, cursor(1));
|
||||
|
||||
let evenodd = r#"#polygon(fill: black, fill-rule: "even-odd",
|
||||
(0pt, 10pt), (30pt, 10pt), (30pt, 20pt), (20pt, 20pt),
|
||||
(20pt, 0pt), (10pt, 0pt), (10pt, 30pt), (20pt, 30pt),
|
||||
(20pt, 20pt), (0pt, 20pt))"#;
|
||||
test_click(evenodd, point(15.0, 15.0) + margin, None);
|
||||
test_click(evenodd, point(5.0, 15.0) + margin, cursor(1));
|
||||
test_click(evenodd, point(15.0, 5.0) + margin, cursor(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_shapes_stroke() {
|
||||
let margin = point(10.0, 10.0);
|
||||
|
||||
let rect =
|
||||
"#place(dx: 10pt, dy: 10pt, rect(width: 10pt, height: 10pt, stroke: 5pt))";
|
||||
test_click(rect, point(15.0, 15.0) + margin, None);
|
||||
test_click(rect, point(10.0, 15.0) + margin, cursor(27));
|
||||
|
||||
test_click(
|
||||
"#line(angle: 45deg, length: 10pt, stroke: 2pt)",
|
||||
point(2.0, 2.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_cursor() {
|
||||
let s = "*Hello* #box[ABC] World";
|
||||
@ -268,6 +399,20 @@ mod tests {
|
||||
test_cursor(s, 14, pos(1, 37.55, 16.58));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_cursor_math() {
|
||||
test_cursor("$a + b$", -3, pos(1, 27.51, 16.83));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_cursor_transform() {
|
||||
test_cursor(
|
||||
r#"#rotate(90deg, origin: bottom + left, [hello world])"#,
|
||||
-5,
|
||||
pos(1, 10.0, 16.58),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_backlink() {
|
||||
let s = "#footnote[Hi]";
|
||||
|
@ -76,8 +76,12 @@ pub fn named_items<T>(
|
||||
// ```
|
||||
Some(ast::Imports::Wildcard) => {
|
||||
if let Some(scope) = source_value.and_then(Value::scope) {
|
||||
for (name, value, span) in scope.iter() {
|
||||
let item = NamedItem::Import(name, span, Some(value));
|
||||
for (name, binding) in scope.iter() {
|
||||
let item = NamedItem::Import(
|
||||
name,
|
||||
binding.span(),
|
||||
Some(binding.read()),
|
||||
);
|
||||
if let Some(res) = recv(item) {
|
||||
return Some(res);
|
||||
}
|
||||
@ -89,24 +93,26 @@ pub fn named_items<T>(
|
||||
// ```
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
for item in items.iter() {
|
||||
let mut iter = item.path().iter();
|
||||
let mut binding = source_value
|
||||
.and_then(Value::scope)
|
||||
.zip(iter.next())
|
||||
.and_then(|(scope, first)| scope.get(&first));
|
||||
|
||||
for ident in iter {
|
||||
binding = binding.and_then(|binding| {
|
||||
binding.read().scope()?.get(&ident)
|
||||
});
|
||||
}
|
||||
|
||||
let bound = item.bound_name();
|
||||
let (span, value) = match binding {
|
||||
Some(binding) => (binding.span(), Some(binding.read())),
|
||||
None => (bound.span(), None),
|
||||
};
|
||||
|
||||
let (span, value) = item.path().iter().fold(
|
||||
(bound.span(), source_value),
|
||||
|(span, value), path_ident| {
|
||||
let scope = value.and_then(|v| v.scope());
|
||||
let span = scope
|
||||
.and_then(|s| s.get_span(&path_ident))
|
||||
.unwrap_or(Span::detached())
|
||||
.or(span);
|
||||
let value = scope.and_then(|s| s.get(&path_ident));
|
||||
(span, value)
|
||||
},
|
||||
);
|
||||
|
||||
if let Some(res) =
|
||||
recv(NamedItem::Import(bound.get(), span, value))
|
||||
{
|
||||
let item = NamedItem::Import(bound.get(), span, value);
|
||||
if let Some(res) = recv(item) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
@ -226,7 +232,9 @@ pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
|
||||
ast::Expr::FuncCall(call) => {
|
||||
DerefTarget::Callee(expr_node.find(call.callee().span())?)
|
||||
}
|
||||
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
|
||||
ast::Expr::SetRule(set) => {
|
||||
DerefTarget::Callee(expr_node.find(set.target().span())?)
|
||||
}
|
||||
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
|
||||
DerefTarget::VarAccess(expr_node)
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ impl World for TestWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
Some(self.base.fonts[index].clone())
|
||||
self.base.fonts.get(index).cloned()
|
||||
}
|
||||
|
||||
fn today(&self, _: Option<i64>) -> Option<Datetime> {
|
||||
@ -202,7 +202,8 @@ impl WorldLike for &str {
|
||||
}
|
||||
}
|
||||
|
||||
/// Specifies a position in a file for a test.
|
||||
/// Specifies a position in a file for a test. Negative numbers index from the
|
||||
/// back. `-1` is at the very back.
|
||||
pub trait FilePos {
|
||||
fn resolve(self, world: &TestWorld) -> (Source, usize);
|
||||
}
|
||||
@ -228,7 +229,7 @@ impl FilePos for (&str, isize) {
|
||||
#[track_caller]
|
||||
fn cursor(source: &Source, cursor: isize) -> usize {
|
||||
if cursor < 0 {
|
||||
source.len_bytes().checked_add_signed(cursor + 1).unwrap()
|
||||
source.text().len().checked_add_signed(cursor + 1).unwrap()
|
||||
} else {
|
||||
cursor as usize
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use std::fmt::Write;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use if_chain::if_chain;
|
||||
use typst::engine::Sink;
|
||||
use typst::foundations::{repr, Capturer, CastInfo, Repr, Value};
|
||||
use typst::foundations::{repr, Binding, Capturer, CastInfo, Repr, Value};
|
||||
use typst::layout::{Length, PagedDocument};
|
||||
use typst::syntax::ast::AstNode;
|
||||
use typst::syntax::{ast, LinkedNode, Side, Source, SyntaxKind};
|
||||
@ -86,7 +86,7 @@ fn expr_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||
*count += 1;
|
||||
continue;
|
||||
} else if *count > 1 {
|
||||
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap();
|
||||
write!(pieces.last_mut().unwrap(), " (×{count})").unwrap();
|
||||
}
|
||||
}
|
||||
pieces.push(value.repr());
|
||||
@ -95,7 +95,7 @@ fn expr_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||
|
||||
if let Some((_, count)) = last {
|
||||
if count > 1 {
|
||||
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap();
|
||||
write!(pieces.last_mut().unwrap(), " (×{count})").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,12 +201,17 @@ fn named_param_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Toolti
|
||||
if let Some(expr) = grand_grand.cast::<ast::Expr>();
|
||||
if let Some(ast::Expr::Ident(callee)) = match expr {
|
||||
ast::Expr::FuncCall(call) => Some(call.callee()),
|
||||
ast::Expr::Set(set) => Some(set.target()),
|
||||
ast::Expr::SetRule(set) => Some(set.target()),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Find metadata about the function.
|
||||
if let Some(Value::Func(func)) = world.library().global.scope().get(&callee);
|
||||
if let Some(Value::Func(func)) = world
|
||||
.library()
|
||||
.global
|
||||
.scope()
|
||||
.get(&callee)
|
||||
.map(Binding::read);
|
||||
then { (func, named) }
|
||||
else { return None; }
|
||||
};
|
||||
@ -264,7 +269,7 @@ fn font_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||
.find(|&(family, _)| family.to_lowercase().as_str() == lower.as_str());
|
||||
|
||||
then {
|
||||
let detail = summarize_font_family(iter);
|
||||
let detail = summarize_font_family(iter.collect());
|
||||
return Some(Tooltip::Text(detail));
|
||||
}
|
||||
};
|
||||
@ -352,6 +357,13 @@ mod tests {
|
||||
.must_be_text("This closure captures `f` and `y`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tooltip_import() {
|
||||
let world = TestWorld::new("#import \"other.typ\": a, b")
|
||||
.with_source("other.typ", "#let (a, b, c) = (1, 2, 3)");
|
||||
test(&world, -5, Side::After).must_be_code("1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tooltip_star_import() {
|
||||
let world = TestWorld::new("#import \"other.typ\": *")
|
||||
@ -359,4 +371,11 @@ mod tests {
|
||||
test(&world, -2, Side::Before).must_be_none();
|
||||
test(&world, -2, Side::After).must_be_text("This star imports `a`, `b`, and `c`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tooltip_field_call() {
|
||||
let world = TestWorld::new("#import \"other.typ\"\n#other.f()")
|
||||
.with_source("other.typ", "#let f = (x) => 1");
|
||||
test(&world, -4, Side::After).must_be_code("(..) => ..");
|
||||
}
|
||||
}
|
||||
|
@ -77,23 +77,20 @@ pub fn plain_docs_sentence(docs: &str) -> EcoString {
|
||||
}
|
||||
|
||||
/// Create a short description of a font family.
|
||||
pub fn summarize_font_family<'a>(
|
||||
variants: impl Iterator<Item = &'a FontInfo>,
|
||||
) -> EcoString {
|
||||
let mut infos: Vec<_> = variants.collect();
|
||||
infos.sort_by_key(|info| info.variant);
|
||||
pub fn summarize_font_family(mut variants: Vec<&FontInfo>) -> EcoString {
|
||||
variants.sort_by_key(|info| info.variant);
|
||||
|
||||
let mut has_italic = false;
|
||||
let mut min_weight = u16::MAX;
|
||||
let mut max_weight = 0;
|
||||
for info in &infos {
|
||||
for info in &variants {
|
||||
let weight = info.variant.weight.to_number();
|
||||
has_italic |= info.variant.style == FontStyle::Italic;
|
||||
min_weight = min_weight.min(weight);
|
||||
max_weight = min_weight.max(weight);
|
||||
}
|
||||
|
||||
let count = infos.len();
|
||||
let count = variants.len();
|
||||
let mut detail = eco_format!("{count} variant{}.", if count == 1 { "" } else { "s" });
|
||||
|
||||
if min_weight == max_weight {
|
||||
@ -117,7 +114,9 @@ pub fn globals<'a>(world: &'a dyn IdeWorld, leaf: &LinkedNode) -> &'a Scope {
|
||||
| Some(SyntaxKind::Math)
|
||||
| Some(SyntaxKind::MathFrac)
|
||||
| Some(SyntaxKind::MathAttach)
|
||||
);
|
||||
) && leaf
|
||||
.prev_leaf()
|
||||
.is_none_or(|prev| !matches!(prev.kind(), SyntaxKind::Hash));
|
||||
|
||||
let library = world.library();
|
||||
if in_math {
|
||||
@ -171,7 +170,7 @@ where
|
||||
self.find_iter(content.fields().iter().map(|(_, v)| v))?;
|
||||
}
|
||||
Value::Module(module) => {
|
||||
self.find_iter(module.scope().iter().map(|(_, v, _)| v))?;
|
||||
self.find_iter(module.scope().iter().map(|(_, b)| b.read()))?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -19,10 +19,13 @@ typst-utils = { workspace = true }
|
||||
dirs = { workspace = true, optional = true }
|
||||
ecow = { workspace = true }
|
||||
env_proxy = { workspace = true, optional = true }
|
||||
fastrand = { workspace = true, optional = true }
|
||||
flate2 = { workspace = true, optional = true }
|
||||
fontdb = { workspace = true, optional = true }
|
||||
native-tls = { workspace = true, optional = true }
|
||||
once_cell = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tar = { workspace = true, optional = true }
|
||||
ureq = { workspace = true, optional = true }
|
||||
|
||||
@ -41,7 +44,7 @@ fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
|
||||
downloads = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
|
||||
|
||||
# Add package downloading utilities, implies `downloads`
|
||||
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]
|
||||
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar", "dep:fastrand"]
|
||||
|
||||
# Embeds some fonts into the binary:
|
||||
# - For text: Libertinus Serif, New Computer Modern
|
||||
|
@ -128,8 +128,7 @@ impl Downloader {
|
||||
}
|
||||
|
||||
// Configure native TLS.
|
||||
let connector =
|
||||
tls.build().map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
|
||||
let connector = tls.build().map_err(io::Error::other)?;
|
||||
builder = builder.tls_connector(Arc::new(connector));
|
||||
|
||||
builder.build().get(url).call()
|
||||
|
@ -1,14 +1,14 @@
|
||||
//! Download and unpack packages and package indices.
|
||||
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use ecow::eco_format;
|
||||
use once_cell::sync::OnceCell;
|
||||
use serde::Deserialize;
|
||||
use typst_library::diag::{bail, PackageError, PackageResult, StrResult};
|
||||
use typst_syntax::package::{
|
||||
PackageInfo, PackageSpec, PackageVersion, VersionlessPackageSpec,
|
||||
};
|
||||
use typst_syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec};
|
||||
|
||||
use crate::download::{Downloader, Progress};
|
||||
|
||||
@ -32,7 +32,7 @@ pub struct PackageStorage {
|
||||
/// The downloader used for fetching the index and packages.
|
||||
downloader: Downloader,
|
||||
/// The cached index of the default namespace.
|
||||
index: OnceCell<Vec<PackageInfo>>,
|
||||
index: OnceCell<Vec<serde_json::Value>>,
|
||||
}
|
||||
|
||||
impl PackageStorage {
|
||||
@ -42,6 +42,18 @@ impl PackageStorage {
|
||||
package_cache_path: Option<PathBuf>,
|
||||
package_path: Option<PathBuf>,
|
||||
downloader: Downloader,
|
||||
) -> Self {
|
||||
Self::with_index(package_cache_path, package_path, downloader, OnceCell::new())
|
||||
}
|
||||
|
||||
/// Creates a new package storage with a pre-defined index.
|
||||
///
|
||||
/// Useful for testing.
|
||||
fn with_index(
|
||||
package_cache_path: Option<PathBuf>,
|
||||
package_path: Option<PathBuf>,
|
||||
downloader: Downloader,
|
||||
index: OnceCell<Vec<serde_json::Value>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
package_cache_path: package_cache_path.or_else(|| {
|
||||
@ -51,7 +63,7 @@ impl PackageStorage {
|
||||
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
||||
}),
|
||||
downloader,
|
||||
index: OnceCell::new(),
|
||||
index,
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,7 +78,8 @@ impl PackageStorage {
|
||||
self.package_path.as_deref()
|
||||
}
|
||||
|
||||
/// Make a package available in the on-disk.
|
||||
/// Makes a package available on-disk and returns the path at which it is
|
||||
/// located (will be either in the cache or package directory).
|
||||
pub fn prepare_package(
|
||||
&self,
|
||||
spec: &PackageSpec,
|
||||
@ -89,7 +102,7 @@ impl PackageStorage {
|
||||
|
||||
// Download from network if it doesn't exist yet.
|
||||
if spec.namespace == DEFAULT_NAMESPACE {
|
||||
self.download_package(spec, &dir, progress)?;
|
||||
self.download_package(spec, cache_dir, progress)?;
|
||||
if dir.exists() {
|
||||
return Ok(dir);
|
||||
}
|
||||
@ -99,7 +112,7 @@ impl PackageStorage {
|
||||
Err(PackageError::NotFound(spec.clone()))
|
||||
}
|
||||
|
||||
/// Try to determine the latest version of a package.
|
||||
/// Tries to determine the latest version of a package.
|
||||
pub fn determine_latest_version(
|
||||
&self,
|
||||
spec: &VersionlessPackageSpec,
|
||||
@ -109,6 +122,7 @@ impl PackageStorage {
|
||||
// version.
|
||||
self.download_index()?
|
||||
.iter()
|
||||
.filter_map(|value| MinimalPackageInfo::deserialize(value).ok())
|
||||
.filter(|package| package.name == spec.name)
|
||||
.map(|package| package.version)
|
||||
.max()
|
||||
@ -131,7 +145,7 @@ impl PackageStorage {
|
||||
}
|
||||
|
||||
/// Download the package index. The result of this is cached for efficiency.
|
||||
pub fn download_index(&self) -> StrResult<&[PackageInfo]> {
|
||||
fn download_index(&self) -> StrResult<&[serde_json::Value]> {
|
||||
self.index
|
||||
.get_or_try_init(|| {
|
||||
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
|
||||
@ -152,10 +166,10 @@ impl PackageStorage {
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics if the package spec namespace isn't `DEFAULT_NAMESPACE`.
|
||||
pub fn download_package(
|
||||
fn download_package(
|
||||
&self,
|
||||
spec: &PackageSpec,
|
||||
package_dir: &Path,
|
||||
cache_dir: &Path,
|
||||
progress: &mut dyn Progress,
|
||||
) -> PackageResult<()> {
|
||||
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
|
||||
@ -179,10 +193,132 @@ impl PackageStorage {
|
||||
}
|
||||
};
|
||||
|
||||
// The directory in which the package's version lives.
|
||||
let base_dir = cache_dir.join(format!("{}/{}", spec.namespace, spec.name));
|
||||
|
||||
// The place at which the specific package version will live in the end.
|
||||
let package_dir = base_dir.join(format!("{}", spec.version));
|
||||
|
||||
// To prevent multiple Typst instances from interferring, we download
|
||||
// into a temporary directory first and then move this directory to
|
||||
// its final destination.
|
||||
//
|
||||
// In the `rename` function's documentation it is stated:
|
||||
// > This will not work if the new name is on a different mount point.
|
||||
//
|
||||
// By locating the temporary directory directly next to where the
|
||||
// package directory will live, we are (trying our best) making sure
|
||||
// that `tempdir` and `package_dir` are on the same mount point.
|
||||
let tempdir = Tempdir::create(base_dir.join(format!(
|
||||
".tmp-{}-{}",
|
||||
spec.version,
|
||||
fastrand::u32(..),
|
||||
)))
|
||||
.map_err(|err| error("failed to create temporary package directory", err))?;
|
||||
|
||||
// Decompress the archive into the temporary directory.
|
||||
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
|
||||
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
|
||||
fs::remove_dir_all(package_dir).ok();
|
||||
PackageError::MalformedArchive(Some(eco_format!("{err}")))
|
||||
})
|
||||
tar::Archive::new(decompressed)
|
||||
.unpack(&tempdir)
|
||||
.map_err(|err| PackageError::MalformedArchive(Some(eco_format!("{err}"))))?;
|
||||
|
||||
// When trying to move (i.e., `rename`) the directory from one place to
|
||||
// another and the target/destination directory is empty, then the
|
||||
// operation will succeed (if it's atomic, or hardware doesn't fail, or
|
||||
// power doesn't go off, etc.). If however the target directory is not
|
||||
// empty, i.e., another instance already successfully moved the package,
|
||||
// then we can safely ignore the `DirectoryNotEmpty` error.
|
||||
//
|
||||
// This means that we do not check the integrity of an existing moved
|
||||
// package, just like we don't check the integrity if the package
|
||||
// directory already existed in the first place. If situations with
|
||||
// broken packages still occur even with the rename safeguard, we might
|
||||
// consider more complex solutions like file locking or checksums.
|
||||
match fs::rename(&tempdir, &package_dir) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()),
|
||||
Err(err) => Err(error("failed to move downloaded package directory", err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Minimal information required about a package to determine its latest
|
||||
/// version.
|
||||
#[derive(Deserialize)]
|
||||
struct MinimalPackageInfo {
|
||||
name: String,
|
||||
version: PackageVersion,
|
||||
}
|
||||
|
||||
/// A temporary directory that is a automatically cleaned up.
|
||||
struct Tempdir(PathBuf);
|
||||
|
||||
impl Tempdir {
|
||||
/// Creates a directory at the path and auto-cleans it.
|
||||
fn create(path: PathBuf) -> io::Result<Self> {
|
||||
std::fs::create_dir_all(&path)?;
|
||||
Ok(Self(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Tempdir {
|
||||
fn drop(&mut self) {
|
||||
_ = fs::remove_dir_all(&self.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for Tempdir {
|
||||
fn as_ref(&self) -> &Path {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Enriches an I/O error with a message and turns it into a
|
||||
/// `PackageError::Other`.
|
||||
#[cold]
|
||||
fn error(message: &str, err: io::Error) -> PackageError {
|
||||
PackageError::Other(Some(eco_format!("{message}: {err}")))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn lazy_deser_index() {
|
||||
let storage = PackageStorage::with_index(
|
||||
None,
|
||||
None,
|
||||
Downloader::new("typst/test"),
|
||||
OnceCell::with_value(vec![
|
||||
serde_json::json!({
|
||||
"name": "charged-ieee",
|
||||
"version": "0.1.0",
|
||||
"entrypoint": "lib.typ",
|
||||
}),
|
||||
serde_json::json!({
|
||||
"name": "unequivocal-ams",
|
||||
// This version number is currently not valid, so this package
|
||||
// can't be parsed.
|
||||
"version": "0.2.0-dev",
|
||||
"entrypoint": "lib.typ",
|
||||
}),
|
||||
]),
|
||||
);
|
||||
|
||||
let ieee_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
||||
namespace: "preview".into(),
|
||||
name: "charged-ieee".into(),
|
||||
});
|
||||
assert_eq!(ieee_version, Ok(PackageVersion { major: 0, minor: 1, patch: 0 }));
|
||||
|
||||
let ams_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
||||
namespace: "preview".into(),
|
||||
name: "unequivocal-ams".into(),
|
||||
});
|
||||
assert_eq!(
|
||||
ams_version,
|
||||
Err("failed to find package @preview/unequivocal-ams".into())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ icu_provider_adapters = { workspace = true }
|
||||
icu_provider_blob = { workspace = true }
|
||||
icu_segmenter = { workspace = true }
|
||||
kurbo = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
rustybuzz = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
ttf-parser = { workspace = true }
|
||||
|
@ -124,7 +124,6 @@ impl<'a> Collector<'a, '_, '_> {
|
||||
styles,
|
||||
self.base,
|
||||
self.expand,
|
||||
None,
|
||||
)?
|
||||
.into_frames();
|
||||
|
||||
@ -133,7 +132,8 @@ impl<'a> Collector<'a, '_, '_> {
|
||||
self.output.push(Child::Tag(&elem.tag));
|
||||
}
|
||||
|
||||
self.lines(lines, styles);
|
||||
let leading = ParElem::leading_in(styles);
|
||||
self.lines(lines, leading, styles);
|
||||
|
||||
for (c, _) in &self.children[end..] {
|
||||
let elem = c.to_packed::<TagElem>().unwrap();
|
||||
@ -169,10 +169,12 @@ impl<'a> Collector<'a, '_, '_> {
|
||||
)?
|
||||
.into_frames();
|
||||
|
||||
let spacing = ParElem::spacing_in(styles);
|
||||
let spacing = elem.spacing(styles);
|
||||
let leading = elem.leading(styles);
|
||||
|
||||
self.output.push(Child::Rel(spacing.into(), 4));
|
||||
|
||||
self.lines(lines, styles);
|
||||
self.lines(lines, leading, styles);
|
||||
|
||||
self.output.push(Child::Rel(spacing.into(), 4));
|
||||
self.par_situation = ParSituation::Consecutive;
|
||||
@ -181,9 +183,8 @@ impl<'a> Collector<'a, '_, '_> {
|
||||
}
|
||||
|
||||
/// Collect laid-out lines.
|
||||
fn lines(&mut self, lines: Vec<Frame>, styles: StyleChain<'a>) {
|
||||
fn lines(&mut self, lines: Vec<Frame>, leading: Abs, styles: StyleChain<'a>) {
|
||||
let align = AlignElem::alignment_in(styles).resolve(styles);
|
||||
let leading = ParElem::leading_in(styles);
|
||||
let costs = TextElem::costs_in(styles);
|
||||
|
||||
// Determine whether to prevent widow and orphans.
|
||||
|
@ -115,7 +115,7 @@ impl<'a, 'b> Composer<'a, 'b, '_, '_> {
|
||||
let column_height = regions.size.y;
|
||||
let backlog: Vec<_> = std::iter::once(&column_height)
|
||||
.chain(regions.backlog)
|
||||
.flat_map(|&h| std::iter::repeat(h).take(self.config.columns.count))
|
||||
.flat_map(|&h| std::iter::repeat_n(h, self.config.columns.count))
|
||||
.skip(1)
|
||||
.collect();
|
||||
|
||||
|
@ -197,7 +197,50 @@ pub fn layout_flow<'a>(
|
||||
mode: FlowMode,
|
||||
) -> SourceResult<Fragment> {
|
||||
// Prepare configuration that is shared across the whole flow.
|
||||
let config = Config {
|
||||
let config = configuration(shared, regions, columns, column_gutter, mode);
|
||||
|
||||
// Collect the elements into pre-processed children. These are much easier
|
||||
// to handle than the raw elements.
|
||||
let bump = Bump::new();
|
||||
let children = collect(
|
||||
engine,
|
||||
&bump,
|
||||
children,
|
||||
locator.next(&()),
|
||||
Size::new(config.columns.width, regions.full),
|
||||
regions.expand.x,
|
||||
mode,
|
||||
)?;
|
||||
|
||||
let mut work = Work::new(&children);
|
||||
let mut finished = vec![];
|
||||
|
||||
// This loop runs once per region produced by the flow layout.
|
||||
loop {
|
||||
let frame = compose(engine, &mut work, &config, locator.next(&()), regions)?;
|
||||
finished.push(frame);
|
||||
|
||||
// Terminate the loop when everything is processed, though draining the
|
||||
// backlog if necessary.
|
||||
if work.done() && (!regions.expand.y || regions.backlog.is_empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
regions.next();
|
||||
}
|
||||
|
||||
Ok(Fragment::frames(finished))
|
||||
}
|
||||
|
||||
/// Determine the flow's configuration.
|
||||
fn configuration<'x>(
|
||||
shared: StyleChain<'x>,
|
||||
regions: Regions,
|
||||
columns: NonZeroUsize,
|
||||
column_gutter: Rel<Abs>,
|
||||
mode: FlowMode,
|
||||
) -> Config<'x> {
|
||||
Config {
|
||||
mode,
|
||||
shared,
|
||||
columns: {
|
||||
@ -235,39 +278,7 @@ pub fn layout_flow<'a>(
|
||||
)
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
// Collect the elements into pre-processed children. These are much easier
|
||||
// to handle than the raw elements.
|
||||
let bump = Bump::new();
|
||||
let children = collect(
|
||||
engine,
|
||||
&bump,
|
||||
children,
|
||||
locator.next(&()),
|
||||
Size::new(config.columns.width, regions.full),
|
||||
regions.expand.x,
|
||||
mode,
|
||||
)?;
|
||||
|
||||
let mut work = Work::new(&children);
|
||||
let mut finished = vec![];
|
||||
|
||||
// This loop runs once per region produced by the flow layout.
|
||||
loop {
|
||||
let frame = compose(engine, &mut work, &config, locator.next(&()), regions)?;
|
||||
finished.push(frame);
|
||||
|
||||
// Terminate the loop when everything is processed, though draining the
|
||||
// backlog if necessary.
|
||||
if work.done() && (!regions.expand.y || regions.backlog.is_empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
regions.next();
|
||||
}
|
||||
|
||||
Ok(Fragment::frames(finished))
|
||||
}
|
||||
|
||||
/// The work that is left to do by flow layout.
|
||||
|
@ -3,7 +3,9 @@ use std::fmt::Debug;
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{Resolve, StyleChain};
|
||||
use typst_library::layout::grid::resolve::{Cell, CellGrid, LinePosition, Repeatable};
|
||||
use typst_library::layout::grid::resolve::{
|
||||
Cell, CellGrid, Header, LinePosition, Repeatable,
|
||||
};
|
||||
use typst_library::layout::{
|
||||
Abs, Axes, Dir, Fr, Fragment, Frame, FrameItem, Length, Point, Region, Regions, Rel,
|
||||
Size, Sizing,
|
||||
@ -11,7 +13,7 @@ use typst_library::layout::{
|
||||
use typst_library::text::TextElem;
|
||||
use typst_library::visualize::Geometry;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::{MaybeReverseIter, Numeric};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use super::{
|
||||
generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row,
|
||||
@ -30,10 +32,8 @@ pub struct GridLayouter<'a> {
|
||||
pub(super) rcols: Vec<Abs>,
|
||||
/// The sum of `rcols`.
|
||||
pub(super) width: Abs,
|
||||
/// Resolve row sizes, by region.
|
||||
/// Resolved row sizes, by region.
|
||||
pub(super) rrows: Vec<Vec<RowPiece>>,
|
||||
/// Rows in the current region.
|
||||
pub(super) lrows: Vec<Row>,
|
||||
/// The amount of unbreakable rows remaining to be laid out in the
|
||||
/// current unbreakable row group. While this is positive, no region breaks
|
||||
/// should occur.
|
||||
@ -41,24 +41,155 @@ pub struct GridLayouter<'a> {
|
||||
/// Rowspans not yet laid out because not all of their spanned rows were
|
||||
/// laid out yet.
|
||||
pub(super) rowspans: Vec<Rowspan>,
|
||||
/// The initial size of the current region before we started subtracting.
|
||||
pub(super) initial: Size,
|
||||
/// Grid layout state for the current region.
|
||||
pub(super) current: Current,
|
||||
/// Frames for finished regions.
|
||||
pub(super) finished: Vec<Frame>,
|
||||
/// The amount and height of header rows on each finished region.
|
||||
pub(super) finished_header_rows: Vec<FinishedHeaderRowInfo>,
|
||||
/// Whether this is an RTL grid.
|
||||
pub(super) is_rtl: bool,
|
||||
/// The simulated header height.
|
||||
/// This field is reset in `layout_header` and properly updated by
|
||||
/// Currently repeating headers, one per level. Sorted by increasing
|
||||
/// levels.
|
||||
///
|
||||
/// Note that some levels may be absent, in particular level 0, which does
|
||||
/// not exist (so all levels are >= 1).
|
||||
pub(super) repeating_headers: Vec<&'a Header>,
|
||||
/// Headers, repeating or not, awaiting their first successful layout.
|
||||
/// Sorted by increasing levels.
|
||||
pub(super) pending_headers: &'a [Repeatable<Header>],
|
||||
/// Next headers to be processed.
|
||||
pub(super) upcoming_headers: &'a [Repeatable<Header>],
|
||||
/// State of the row being currently laid out.
|
||||
///
|
||||
/// This is kept as a field to avoid passing down too many parameters from
|
||||
/// `layout_row` into called functions, which would then have to pass them
|
||||
/// down to `push_row`, which reads these values.
|
||||
pub(super) row_state: RowState,
|
||||
/// The span of the grid element.
|
||||
pub(super) span: Span,
|
||||
}
|
||||
|
||||
/// Grid layout state for the current region. This should be reset or updated
|
||||
/// on each region break.
|
||||
pub(super) struct Current {
|
||||
/// The initial size of the current region before we started subtracting.
|
||||
pub(super) initial: Size,
|
||||
/// The height of the region after repeated headers were placed and footers
|
||||
/// prepared. This also includes pending repeating headers from the start,
|
||||
/// even if they were not repeated yet, since they will be repeated in the
|
||||
/// next region anyway (bar orphan prevention).
|
||||
///
|
||||
/// This is used to quickly tell if any additional space in the region has
|
||||
/// been occupied since then, meaning that additional space will become
|
||||
/// available after a region break (see
|
||||
/// [`GridLayouter::may_progress_with_repeats`]).
|
||||
pub(super) initial_after_repeats: Abs,
|
||||
/// Whether `layouter.regions.may_progress()` was `true` at the top of the
|
||||
/// region.
|
||||
pub(super) could_progress_at_top: bool,
|
||||
/// Rows in the current region.
|
||||
pub(super) lrows: Vec<Row>,
|
||||
/// The amount of repeated header rows at the start of the current region.
|
||||
/// Thus, excludes rows from pending headers (which were placed for the
|
||||
/// first time).
|
||||
///
|
||||
/// Note that `repeating_headers` and `pending_headers` can change if we
|
||||
/// find a new header inside the region (not at the top), so this field
|
||||
/// is required to access information from the top of the region.
|
||||
///
|
||||
/// This information is used on finish region to calculate the total height
|
||||
/// of resolved header rows at the top of the region, which is used by
|
||||
/// multi-page rowspans so they can properly skip the header rows at the
|
||||
/// top of each region during layout.
|
||||
pub(super) repeated_header_rows: usize,
|
||||
/// The end bound of the row range of the last repeating header at the
|
||||
/// start of the region.
|
||||
///
|
||||
/// The last row might have disappeared from layout due to being empty, so
|
||||
/// this is how we can become aware of where the last header ends without
|
||||
/// having to check the vector of rows. Line layout uses this to determine
|
||||
/// when to prioritize the last lines under a header.
|
||||
///
|
||||
/// A value of zero indicates no repeated headers were placed.
|
||||
pub(super) last_repeated_header_end: usize,
|
||||
/// Stores the length of `lrows` before a sequence of rows equipped with
|
||||
/// orphan prevention was laid out. In this case, if no more rows without
|
||||
/// orphan prevention are laid out after those rows before the region ends,
|
||||
/// the rows will be removed, and there may be an attempt to place them
|
||||
/// again in the new region. Effectively, this is the mechanism used for
|
||||
/// orphan prevention of rows.
|
||||
///
|
||||
/// At the moment, this is only used by repeated headers (they aren't laid
|
||||
/// out if alone in the region) and by new headers, which are moved to the
|
||||
/// `pending_headers` vector and so will automatically be placed again
|
||||
/// until they fit and are not orphans in at least one region (or exactly
|
||||
/// one, for non-repeated headers).
|
||||
pub(super) lrows_orphan_snapshot: Option<usize>,
|
||||
/// The height of effectively repeating headers, that is, ignoring
|
||||
/// non-repeating pending headers, in the current region.
|
||||
///
|
||||
/// This is used by multi-page auto rows so they can inform cell layout on
|
||||
/// how much space should be taken by headers if they break across regions.
|
||||
/// In particular, non-repeating headers only occupy the initial region,
|
||||
/// but disappear on new regions, so they can be ignored.
|
||||
///
|
||||
/// This field is reset on each new region and properly updated by
|
||||
/// `layout_auto_row` and `layout_relative_row`, and should not be read
|
||||
/// before all header rows are fully laid out. It is usually fine because
|
||||
/// header rows themselves are unbreakable, and unbreakable rows do not
|
||||
/// need to read this field at all.
|
||||
pub(super) header_height: Abs,
|
||||
///
|
||||
/// This height is not only computed at the beginning of the region. It is
|
||||
/// updated whenever a new header is found, subtracting the height of
|
||||
/// headers which stopped repeating and adding the height of all new
|
||||
/// headers.
|
||||
pub(super) repeating_header_height: Abs,
|
||||
/// The height for each repeating header that was placed in this region.
|
||||
/// Note that this includes headers not at the top of the region, before
|
||||
/// their first repetition (pending headers), and excludes headers removed
|
||||
/// by virtue of a new, conflicting header being found (short-lived
|
||||
/// headers).
|
||||
///
|
||||
/// This is used to know how much to update `repeating_header_height` by
|
||||
/// when finding a new header and causing existing repeating headers to
|
||||
/// stop.
|
||||
pub(super) repeating_header_heights: Vec<Abs>,
|
||||
/// The simulated footer height for this region.
|
||||
///
|
||||
/// The simulation occurs before any rows are laid out for a region.
|
||||
pub(super) footer_height: Abs,
|
||||
/// The span of the grid element.
|
||||
pub(super) span: Span,
|
||||
}
|
||||
|
||||
/// Data about the row being laid out right now.
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct RowState {
|
||||
/// If this is `Some`, this will be updated by the currently laid out row's
|
||||
/// height if it is auto or relative. This is used for header height
|
||||
/// calculation.
|
||||
pub(super) current_row_height: Option<Abs>,
|
||||
/// This is `true` when laying out non-short lived headers and footers.
|
||||
/// That is, headers and footers which are not immediately followed or
|
||||
/// preceded (respectively) by conflicting headers and footers of same or
|
||||
/// lower level, or the end or start of the table (respectively), which
|
||||
/// would cause them to never repeat, even once.
|
||||
///
|
||||
/// If this is `false`, the next row to be laid out will remove an active
|
||||
/// orphan snapshot and will flush pending headers, as there is no risk
|
||||
/// that they will be orphans anymore.
|
||||
pub(super) in_active_repeatable: bool,
|
||||
}
|
||||
|
||||
/// Data about laid out repeated header rows for a specific finished region.
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct FinishedHeaderRowInfo {
|
||||
/// The amount of repeated headers at the top of the region.
|
||||
pub(super) repeated_amount: usize,
|
||||
/// The end bound of the row range of the last repeated header at the top
|
||||
/// of the region.
|
||||
pub(super) last_repeated_header_end: usize,
|
||||
/// The total height of repeated headers at the top of the region.
|
||||
pub(super) repeated_height: Abs,
|
||||
}
|
||||
|
||||
/// Details about a resulting row piece.
|
||||
@ -114,14 +245,27 @@ impl<'a> GridLayouter<'a> {
|
||||
rcols: vec![Abs::zero(); grid.cols.len()],
|
||||
width: Abs::zero(),
|
||||
rrows: vec![],
|
||||
lrows: vec![],
|
||||
unbreakable_rows_left: 0,
|
||||
rowspans: vec![],
|
||||
initial: regions.size,
|
||||
finished: vec![],
|
||||
finished_header_rows: vec![],
|
||||
is_rtl: TextElem::dir_in(styles) == Dir::RTL,
|
||||
header_height: Abs::zero(),
|
||||
repeating_headers: vec![],
|
||||
upcoming_headers: &grid.headers,
|
||||
pending_headers: Default::default(),
|
||||
row_state: RowState::default(),
|
||||
current: Current {
|
||||
initial: regions.size,
|
||||
initial_after_repeats: regions.size.y,
|
||||
could_progress_at_top: regions.may_progress(),
|
||||
lrows: vec![],
|
||||
repeated_header_rows: 0,
|
||||
last_repeated_header_end: 0,
|
||||
lrows_orphan_snapshot: None,
|
||||
repeating_header_height: Abs::zero(),
|
||||
repeating_header_heights: vec![],
|
||||
footer_height: Abs::zero(),
|
||||
},
|
||||
span,
|
||||
}
|
||||
}
|
||||
@ -130,38 +274,57 @@ impl<'a> GridLayouter<'a> {
|
||||
pub fn layout(mut self, engine: &mut Engine) -> SourceResult<Fragment> {
|
||||
self.measure_columns(engine)?;
|
||||
|
||||
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer {
|
||||
// Ensure rows in the first region will be aware of the possible
|
||||
// presence of the footer.
|
||||
if let Some(footer) = &self.grid.footer {
|
||||
if footer.repeated {
|
||||
// Ensure rows in the first region will be aware of the
|
||||
// possible presence of the footer.
|
||||
self.prepare_footer(footer, engine, 0)?;
|
||||
if matches!(self.grid.header, None | Some(Repeatable::NotRepeated(_))) {
|
||||
// No repeatable header, so we won't subtract it later.
|
||||
self.regions.size.y -= self.footer_height;
|
||||
self.regions.size.y -= self.current.footer_height;
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
}
|
||||
}
|
||||
|
||||
for y in 0..self.grid.rows.len() {
|
||||
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
|
||||
if y < header.end {
|
||||
if y == 0 {
|
||||
self.layout_header(header, engine, 0)?;
|
||||
self.regions.size.y -= self.footer_height;
|
||||
}
|
||||
let mut y = 0;
|
||||
let mut consecutive_header_count = 0;
|
||||
while y < self.grid.rows.len() {
|
||||
if let Some(next_header) = self.upcoming_headers.get(consecutive_header_count)
|
||||
{
|
||||
if next_header.range.contains(&y) {
|
||||
self.place_new_headers(&mut consecutive_header_count, engine)?;
|
||||
y = next_header.range.end;
|
||||
|
||||
// Skip header rows during normal layout.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer {
|
||||
if y >= footer.start {
|
||||
if let Some(footer) = &self.grid.footer {
|
||||
if footer.repeated && y >= footer.start {
|
||||
if y == footer.start {
|
||||
self.layout_footer(footer, engine, self.finished.len())?;
|
||||
self.flush_orphans();
|
||||
}
|
||||
y = footer.end;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
self.layout_row(y, engine, 0)?;
|
||||
|
||||
// After the first non-header row is placed, pending headers are no
|
||||
// longer orphans and can repeat, so we move them to repeating
|
||||
// headers.
|
||||
//
|
||||
// Note that this is usually done in `push_row`, since the call to
|
||||
// `layout_row` above might trigger region breaks (for multi-page
|
||||
// auto rows), whereas this needs to be called as soon as any part
|
||||
// of a row is laid out. However, it's possible a row has no
|
||||
// visible output and thus does not push any rows even though it
|
||||
// was successfully laid out, in which case we additionally flush
|
||||
// here just in case.
|
||||
self.flush_orphans();
|
||||
|
||||
y += 1;
|
||||
}
|
||||
|
||||
self.finish_region(engine, true)?;
|
||||
@ -184,12 +347,46 @@ impl<'a> GridLayouter<'a> {
|
||||
self.render_fills_strokes()
|
||||
}
|
||||
|
||||
/// Layout the given row.
|
||||
/// Layout a row with a certain initial state, returning the final state.
|
||||
#[inline]
|
||||
pub(super) fn layout_row_with_state(
|
||||
&mut self,
|
||||
y: usize,
|
||||
engine: &mut Engine,
|
||||
disambiguator: usize,
|
||||
initial_state: RowState,
|
||||
) -> SourceResult<RowState> {
|
||||
// Keep a copy of the previous value in the stack, as this function can
|
||||
// call itself recursively (e.g. if a region break is triggered and a
|
||||
// header is placed), so we shouldn't outright overwrite it, but rather
|
||||
// save and later restore the state when back to this call.
|
||||
let previous = std::mem::replace(&mut self.row_state, initial_state);
|
||||
|
||||
// Keep it as a separate function to allow inlining the return below,
|
||||
// as it's usually not needed.
|
||||
self.layout_row_internal(y, engine, disambiguator)?;
|
||||
|
||||
Ok(std::mem::replace(&mut self.row_state, previous))
|
||||
}
|
||||
|
||||
/// Layout the given row with the default row state.
|
||||
#[inline]
|
||||
pub(super) fn layout_row(
|
||||
&mut self,
|
||||
y: usize,
|
||||
engine: &mut Engine,
|
||||
disambiguator: usize,
|
||||
) -> SourceResult<()> {
|
||||
self.layout_row_with_state(y, engine, disambiguator, RowState::default())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Layout the given row using the current state.
|
||||
pub(super) fn layout_row_internal(
|
||||
&mut self,
|
||||
y: usize,
|
||||
engine: &mut Engine,
|
||||
disambiguator: usize,
|
||||
) -> SourceResult<()> {
|
||||
// Skip to next region if current one is full, but only for content
|
||||
// rows, not for gutter rows, and only if we aren't laying out an
|
||||
@ -206,13 +403,18 @@ impl<'a> GridLayouter<'a> {
|
||||
}
|
||||
|
||||
// Don't layout gutter rows at the top of a region.
|
||||
if is_content_row || !self.lrows.is_empty() {
|
||||
if is_content_row || !self.current.lrows.is_empty() {
|
||||
match self.grid.rows[y] {
|
||||
Sizing::Auto => self.layout_auto_row(engine, disambiguator, y)?,
|
||||
Sizing::Rel(v) => {
|
||||
self.layout_relative_row(engine, disambiguator, v, y)?
|
||||
}
|
||||
Sizing::Fr(v) => self.lrows.push(Row::Fr(v, y, disambiguator)),
|
||||
Sizing::Fr(v) => {
|
||||
if !self.row_state.in_active_repeatable {
|
||||
self.flush_orphans();
|
||||
}
|
||||
self.current.lrows.push(Row::Fr(v, y, disambiguator))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -225,8 +427,13 @@ impl<'a> GridLayouter<'a> {
|
||||
fn render_fills_strokes(mut self) -> SourceResult<Fragment> {
|
||||
let mut finished = std::mem::take(&mut self.finished);
|
||||
let frame_amount = finished.len();
|
||||
for ((frame_index, frame), rows) in
|
||||
finished.iter_mut().enumerate().zip(&self.rrows)
|
||||
for (((frame_index, frame), rows), finished_header_rows) in
|
||||
finished.iter_mut().enumerate().zip(&self.rrows).zip(
|
||||
self.finished_header_rows
|
||||
.iter()
|
||||
.map(Some)
|
||||
.chain(std::iter::repeat(None)),
|
||||
)
|
||||
{
|
||||
if self.rcols.is_empty() || rows.is_empty() {
|
||||
continue;
|
||||
@ -347,7 +554,8 @@ impl<'a> GridLayouter<'a> {
|
||||
let hline_indices = rows
|
||||
.iter()
|
||||
.map(|piece| piece.y)
|
||||
.chain(std::iter::once(self.grid.rows.len()));
|
||||
.chain(std::iter::once(self.grid.rows.len()))
|
||||
.enumerate();
|
||||
|
||||
// Converts a row to the corresponding index in the vector of
|
||||
// hlines.
|
||||
@ -372,7 +580,7 @@ impl<'a> GridLayouter<'a> {
|
||||
};
|
||||
|
||||
let mut prev_y = None;
|
||||
for (y, dy) in hline_indices.zip(hline_offsets) {
|
||||
for ((i, y), dy) in hline_indices.zip(hline_offsets) {
|
||||
// Position of lines below the row index in the previous iteration.
|
||||
let expected_prev_line_position = prev_y
|
||||
.map(|prev_y| {
|
||||
@ -383,47 +591,40 @@ impl<'a> GridLayouter<'a> {
|
||||
})
|
||||
.unwrap_or(LinePosition::Before);
|
||||
|
||||
// FIXME: In the future, directly specify in 'self.rrows' when
|
||||
// we place a repeated header rather than its original rows.
|
||||
// That would let us remove most of those verbose checks, both
|
||||
// in 'lines.rs' and here. Those checks also aren't fully
|
||||
// accurate either, since they will also trigger when some rows
|
||||
// have been removed between the header and what's below it.
|
||||
let is_under_repeated_header = self
|
||||
.grid
|
||||
.header
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.zip(prev_y)
|
||||
.is_some_and(|(header, prev_y)| {
|
||||
// Note: 'y == header.end' would mean we're right below
|
||||
// the NON-REPEATED header, so that case should return
|
||||
// false.
|
||||
prev_y < header.end && y > header.end
|
||||
});
|
||||
// Header's lines at the bottom have priority when repeated.
|
||||
// This will store the end bound of the last header if the
|
||||
// current iteration is calculating lines under it.
|
||||
let last_repeated_header_end_above = match finished_header_rows {
|
||||
Some(info) if prev_y.is_some() && i == info.repeated_amount => {
|
||||
Some(info.last_repeated_header_end)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// If some grid rows were omitted between the previous resolved
|
||||
// row and the current one, we ensure lines below the previous
|
||||
// row don't "disappear" and are considered, albeit with less
|
||||
// priority. However, don't do this when we're below a header,
|
||||
// as it must have more priority instead of less, so it is
|
||||
// chained later instead of before. The exception is when the
|
||||
// chained later instead of before (stored in the
|
||||
// 'header_hlines' variable below). The exception is when the
|
||||
// last row in the header is removed, in which case we append
|
||||
// both the lines under the row above us and also (later) the
|
||||
// lines under the header's (removed) last row.
|
||||
let prev_lines = prev_y
|
||||
.filter(|prev_y| {
|
||||
prev_y + 1 != y
|
||||
&& (!is_under_repeated_header
|
||||
|| self
|
||||
.grid
|
||||
.header
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.is_some_and(|header| prev_y + 1 != header.end))
|
||||
})
|
||||
.map(|prev_y| get_hlines_at(prev_y + 1))
|
||||
.unwrap_or(&[]);
|
||||
let prev_lines = match prev_y {
|
||||
Some(prev_y)
|
||||
if prev_y + 1 != y
|
||||
&& last_repeated_header_end_above.is_none_or(
|
||||
|last_repeated_header_end| {
|
||||
prev_y + 1 != last_repeated_header_end
|
||||
},
|
||||
) =>
|
||||
{
|
||||
get_hlines_at(prev_y + 1)
|
||||
}
|
||||
|
||||
_ => &[],
|
||||
};
|
||||
|
||||
let expected_hline_position =
|
||||
expected_line_position(y, y == self.grid.rows.len());
|
||||
@ -441,15 +642,13 @@ impl<'a> GridLayouter<'a> {
|
||||
};
|
||||
|
||||
let mut expected_header_line_position = LinePosition::Before;
|
||||
let header_hlines = if let Some((Repeatable::Repeated(header), prev_y)) =
|
||||
self.grid.header.as_ref().zip(prev_y)
|
||||
{
|
||||
if is_under_repeated_header
|
||||
&& (!self.grid.has_gutter
|
||||
let header_hlines = match (last_repeated_header_end_above, prev_y) {
|
||||
(Some(header_end_above), Some(prev_y))
|
||||
if !self.grid.has_gutter
|
||||
|| matches!(
|
||||
self.grid.rows[prev_y],
|
||||
Sizing::Rel(length) if length.is_zero()
|
||||
))
|
||||
) =>
|
||||
{
|
||||
// For lines below a header, give priority to the
|
||||
// lines originally below the header rather than
|
||||
@ -468,15 +667,13 @@ impl<'a> GridLayouter<'a> {
|
||||
// column-gutter is specified, for example. In that
|
||||
// case, we still repeat the line under the gutter.
|
||||
expected_header_line_position = expected_line_position(
|
||||
header.end,
|
||||
header.end == self.grid.rows.len(),
|
||||
header_end_above,
|
||||
header_end_above == self.grid.rows.len(),
|
||||
);
|
||||
get_hlines_at(header.end)
|
||||
} else {
|
||||
&[]
|
||||
get_hlines_at(header_end_above)
|
||||
}
|
||||
} else {
|
||||
&[]
|
||||
|
||||
_ => &[],
|
||||
};
|
||||
|
||||
// The effective hlines to be considered at this row index are
|
||||
@ -529,6 +726,7 @@ impl<'a> GridLayouter<'a> {
|
||||
grid,
|
||||
rows,
|
||||
local_top_y,
|
||||
last_repeated_header_end_above,
|
||||
in_last_region,
|
||||
y,
|
||||
x,
|
||||
@ -574,7 +772,7 @@ impl<'a> GridLayouter<'a> {
|
||||
|
||||
// Reverse with RTL so that later columns start first.
|
||||
let mut dx = Abs::zero();
|
||||
for (x, &col) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
for (x, &col) in self.rcols.iter().enumerate() {
|
||||
let mut dy = Abs::zero();
|
||||
for row in rows {
|
||||
// We want to only draw the fill starting at the parent
|
||||
@ -643,18 +841,13 @@ impl<'a> GridLayouter<'a> {
|
||||
.sum()
|
||||
};
|
||||
let width = self.cell_spanned_width(cell, x);
|
||||
// In the grid, cell colspans expand to the right,
|
||||
// so we're at the leftmost (lowest 'x') column
|
||||
// spanned by the cell. However, in RTL, cells
|
||||
// expand to the left. Therefore, without the
|
||||
// offset below, cell fills would start at the
|
||||
// rightmost visual position of a cell and extend
|
||||
// over to unrelated columns to the right in RTL.
|
||||
// We avoid this by ensuring the fill starts at the
|
||||
// very left of the cell, even with colspan > 1.
|
||||
let offset =
|
||||
if self.is_rtl { -width + col } else { Abs::zero() };
|
||||
let pos = Point::new(dx + offset, dy);
|
||||
let mut pos = Point::new(dx, dy);
|
||||
if self.is_rtl {
|
||||
// In RTL cells expand to the left, thus the
|
||||
// position must additionally be offset by the
|
||||
// cell's width.
|
||||
pos.x = self.width - (dx + width);
|
||||
}
|
||||
let size = Size::new(width, height);
|
||||
let rect = Geometry::Rect(size).filled(fill);
|
||||
fills.push((pos, FrameItem::Shape(rect, self.span)));
|
||||
@ -946,15 +1139,9 @@ impl<'a> GridLayouter<'a> {
|
||||
let frame = self.layout_single_row(engine, disambiguator, first, y)?;
|
||||
self.push_row(frame, y, true);
|
||||
|
||||
if self
|
||||
.grid
|
||||
.header
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.is_some_and(|header| y < header.end)
|
||||
{
|
||||
// Add to header height.
|
||||
self.header_height += first;
|
||||
if let Some(row_height) = &mut self.row_state.current_row_height {
|
||||
// Add to header height, as we are in a header row.
|
||||
*row_height += first;
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
@ -963,19 +1150,21 @@ impl<'a> GridLayouter<'a> {
|
||||
// Expand all but the last region.
|
||||
// Skip the first region if the space is eaten up by an fr row.
|
||||
let len = resolved.len();
|
||||
for ((i, region), target) in self
|
||||
.regions
|
||||
for ((i, region), target) in
|
||||
self.regions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.zip(&mut resolved[..len - 1])
|
||||
.skip(self.lrows.iter().any(|row| matches!(row, Row::Fr(..))) as usize)
|
||||
.skip(self.current.lrows.iter().any(|row| matches!(row, Row::Fr(..)))
|
||||
as usize)
|
||||
{
|
||||
// Subtract header and footer heights from the region height when
|
||||
// it's not the first.
|
||||
// it's not the first. Ignore non-repeating headers as they only
|
||||
// appear on the first region by definition.
|
||||
target.set_max(
|
||||
region.y
|
||||
- if i > 0 {
|
||||
self.header_height + self.footer_height
|
||||
self.current.repeating_header_height + self.current.footer_height
|
||||
} else {
|
||||
Abs::zero()
|
||||
},
|
||||
@ -1186,25 +1375,19 @@ impl<'a> GridLayouter<'a> {
|
||||
let resolved = v.resolve(self.styles).relative_to(self.regions.base().y);
|
||||
let frame = self.layout_single_row(engine, disambiguator, resolved, y)?;
|
||||
|
||||
if self
|
||||
.grid
|
||||
.header
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.is_some_and(|header| y < header.end)
|
||||
{
|
||||
// Add to header height.
|
||||
self.header_height += resolved;
|
||||
if let Some(row_height) = &mut self.row_state.current_row_height {
|
||||
// Add to header height, as we are in a header row.
|
||||
*row_height += resolved;
|
||||
}
|
||||
|
||||
// Skip to fitting region, but only if we aren't part of an unbreakable
|
||||
// row group. We use 'in_last_with_offset' so our 'in_last' call
|
||||
// properly considers that a header and a footer would be added on each
|
||||
// region break.
|
||||
// row group. We use 'may_progress_with_repeats' to stop trying if we
|
||||
// would skip to a region with the same height and where the same
|
||||
// headers would be repeated.
|
||||
let height = frame.height();
|
||||
while self.unbreakable_rows_left == 0
|
||||
&& !self.regions.size.y.fits(height)
|
||||
&& !in_last_with_offset(self.regions, self.header_height + self.footer_height)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
self.finish_region(engine, false)?;
|
||||
|
||||
@ -1236,10 +1419,9 @@ impl<'a> GridLayouter<'a> {
|
||||
}
|
||||
|
||||
let mut output = Frame::soft(Size::new(self.width, height));
|
||||
let mut pos = Point::zero();
|
||||
let mut offset = Point::zero();
|
||||
|
||||
// Reverse the column order when using RTL.
|
||||
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
for (x, &rcol) in self.rcols.iter().enumerate() {
|
||||
if let Some(cell) = self.grid.cell(x, y) {
|
||||
// Rowspans have a separate layout step
|
||||
if cell.rowspan.get() == 1 {
|
||||
@ -1257,25 +1439,17 @@ impl<'a> GridLayouter<'a> {
|
||||
let frame =
|
||||
layout_cell(cell, engine, disambiguator, self.styles, pod)?
|
||||
.into_frame();
|
||||
let mut pos = pos;
|
||||
let mut pos = offset;
|
||||
if self.is_rtl {
|
||||
// In the grid, cell colspans expand to the right,
|
||||
// so we're at the leftmost (lowest 'x') column
|
||||
// spanned by the cell. However, in RTL, cells
|
||||
// expand to the left. Therefore, without the
|
||||
// offset below, the cell's contents would be laid out
|
||||
// starting at its rightmost visual position and extend
|
||||
// over to unrelated cells to its right in RTL.
|
||||
// We avoid this by ensuring the rendered cell starts at
|
||||
// the very left of the cell, even with colspan > 1.
|
||||
let offset = -width + rcol;
|
||||
pos.x += offset;
|
||||
// In RTL cells expand to the left, thus the position
|
||||
// must additionally be offset by the cell's width.
|
||||
pos.x = self.width - (pos.x + width);
|
||||
}
|
||||
output.push_frame(pos, frame);
|
||||
}
|
||||
}
|
||||
|
||||
pos.x += rcol;
|
||||
offset.x += rcol;
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
@ -1302,8 +1476,8 @@ impl<'a> GridLayouter<'a> {
|
||||
pod.backlog = &heights[1..];
|
||||
|
||||
// Layout the row.
|
||||
let mut pos = Point::zero();
|
||||
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
let mut offset = Point::zero();
|
||||
for (x, &rcol) in self.rcols.iter().enumerate() {
|
||||
if let Some(cell) = self.grid.cell(x, y) {
|
||||
// Rowspans have a separate layout step
|
||||
if cell.rowspan.get() == 1 {
|
||||
@ -1314,17 +1488,19 @@ impl<'a> GridLayouter<'a> {
|
||||
let fragment =
|
||||
layout_cell(cell, engine, disambiguator, self.styles, pod)?;
|
||||
for (output, frame) in outputs.iter_mut().zip(fragment) {
|
||||
let mut pos = pos;
|
||||
let mut pos = offset;
|
||||
if self.is_rtl {
|
||||
let offset = -width + rcol;
|
||||
pos.x += offset;
|
||||
// In RTL cells expand to the left, thus the
|
||||
// position must additionally be offset by the
|
||||
// cell's width.
|
||||
pos.x = self.width - (offset.x + width);
|
||||
}
|
||||
output.push_frame(pos, frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pos.x += rcol;
|
||||
offset.x += rcol;
|
||||
}
|
||||
|
||||
Ok(Fragment::frames(outputs))
|
||||
@ -1335,8 +1511,13 @@ impl<'a> GridLayouter<'a> {
|
||||
/// will be pushed for this particular row. It can be `false` for rows
|
||||
/// spanning multiple regions.
|
||||
fn push_row(&mut self, frame: Frame, y: usize, is_last: bool) {
|
||||
if !self.row_state.in_active_repeatable {
|
||||
// There is now a row after the rows equipped with orphan
|
||||
// prevention, so no need to keep moving them anymore.
|
||||
self.flush_orphans();
|
||||
}
|
||||
self.regions.size.y -= frame.height();
|
||||
self.lrows.push(Row::Frame(frame, y, is_last));
|
||||
self.current.lrows.push(Row::Frame(frame, y, is_last));
|
||||
}
|
||||
|
||||
/// Finish rows for one region.
|
||||
@ -1345,68 +1526,73 @@ impl<'a> GridLayouter<'a> {
|
||||
engine: &mut Engine,
|
||||
last: bool,
|
||||
) -> SourceResult<()> {
|
||||
// The latest rows have orphan prevention (headers) and no other rows
|
||||
// were placed, so remove those rows and try again in a new region,
|
||||
// unless this is the last region.
|
||||
if let Some(orphan_snapshot) = self.current.lrows_orphan_snapshot.take() {
|
||||
if !last {
|
||||
self.current.lrows.truncate(orphan_snapshot);
|
||||
self.current.repeated_header_rows =
|
||||
self.current.repeated_header_rows.min(orphan_snapshot);
|
||||
|
||||
if orphan_snapshot == 0 {
|
||||
// Removed all repeated headers.
|
||||
self.current.last_repeated_header_end = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self
|
||||
.current
|
||||
.lrows
|
||||
.last()
|
||||
.is_some_and(|row| self.grid.is_gutter_track(row.index()))
|
||||
{
|
||||
// Remove the last row in the region if it is a gutter row.
|
||||
self.lrows.pop().unwrap();
|
||||
self.current.lrows.pop().unwrap();
|
||||
self.current.repeated_header_rows =
|
||||
self.current.repeated_header_rows.min(self.current.lrows.len());
|
||||
}
|
||||
|
||||
// If no rows other than the footer have been laid out so far, and
|
||||
// there are rows beside the footer, then don't lay it out at all.
|
||||
// This check doesn't apply, and is thus overridden, when there is a
|
||||
// header.
|
||||
let mut footer_would_be_orphan = self.lrows.is_empty()
|
||||
&& !in_last_with_offset(
|
||||
self.regions,
|
||||
self.header_height + self.footer_height,
|
||||
)
|
||||
&& self
|
||||
.grid
|
||||
.footer
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.is_some_and(|footer| footer.start != 0);
|
||||
|
||||
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
|
||||
if self.grid.rows.len() > header.end
|
||||
&& self
|
||||
.grid
|
||||
.footer
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.map_or(true, |footer| footer.start != header.end)
|
||||
&& self.lrows.last().is_some_and(|row| row.index() < header.end)
|
||||
&& !in_last_with_offset(
|
||||
self.regions,
|
||||
self.header_height + self.footer_height,
|
||||
)
|
||||
{
|
||||
// Header and footer would be alone in this region, but there are more
|
||||
// rows beyond the header and the footer. Push an empty region.
|
||||
self.lrows.clear();
|
||||
footer_would_be_orphan = true;
|
||||
}
|
||||
}
|
||||
// If no rows other than the footer have been laid out so far
|
||||
// (e.g. due to header orphan prevention), and there are rows
|
||||
// beside the footer, then don't lay it out at all.
|
||||
//
|
||||
// It is worth noting that the footer is made non-repeatable at
|
||||
// the grid resolving stage if it is short-lived, that is, if
|
||||
// it is at the start of the table (or right after headers at
|
||||
// the start of the table).
|
||||
//
|
||||
// TODO(subfooters): explicitly check for short-lived footers.
|
||||
// TODO(subfooters): widow prevention for non-repeated footers with a
|
||||
// similar mechanism / when implementing multiple footers.
|
||||
let footer_would_be_widow = matches!(&self.grid.footer, Some(footer) if footer.repeated)
|
||||
&& self.current.lrows.is_empty()
|
||||
&& self.current.could_progress_at_top;
|
||||
|
||||
let mut laid_out_footer_start = None;
|
||||
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer {
|
||||
// Don't layout the footer if it would be alone with the header in
|
||||
// the page, and don't layout it twice.
|
||||
if !footer_would_be_orphan
|
||||
&& self.lrows.iter().all(|row| row.index() < footer.start)
|
||||
if !footer_would_be_widow {
|
||||
if let Some(footer) = &self.grid.footer {
|
||||
// Don't layout the footer if it would be alone with the header
|
||||
// in the page (hence the widow check), and don't layout it
|
||||
// twice (check below).
|
||||
//
|
||||
// TODO(subfooters): this check can be replaced by a vector of
|
||||
// repeating footers in the future, and/or some "pending
|
||||
// footers" vector for footers we're about to place.
|
||||
if footer.repeated
|
||||
&& self.current.lrows.iter().all(|row| row.index() < footer.start)
|
||||
{
|
||||
laid_out_footer_start = Some(footer.start);
|
||||
self.layout_footer(footer, engine, self.finished.len())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the height of existing rows in the region.
|
||||
let mut used = Abs::zero();
|
||||
let mut fr = Fr::zero();
|
||||
for row in &self.lrows {
|
||||
for row in &self.current.lrows {
|
||||
match row {
|
||||
Row::Frame(frame, _, _) => used += frame.height(),
|
||||
Row::Fr(v, _, _) => fr += *v,
|
||||
@ -1415,9 +1601,9 @@ impl<'a> GridLayouter<'a> {
|
||||
|
||||
// Determine the size of the grid in this region, expanding fully if
|
||||
// there are fr rows.
|
||||
let mut size = Size::new(self.width, used).min(self.initial);
|
||||
if fr.get() > 0.0 && self.initial.y.is_finite() {
|
||||
size.y = self.initial.y;
|
||||
let mut size = Size::new(self.width, used).min(self.current.initial);
|
||||
if fr.get() > 0.0 && self.current.initial.y.is_finite() {
|
||||
size.y = self.current.initial.y;
|
||||
}
|
||||
|
||||
// The frame for the region.
|
||||
@ -1425,9 +1611,10 @@ impl<'a> GridLayouter<'a> {
|
||||
let mut pos = Point::zero();
|
||||
let mut rrows = vec![];
|
||||
let current_region = self.finished.len();
|
||||
let mut repeated_header_row_height = Abs::zero();
|
||||
|
||||
// Place finished rows and layout fractional rows.
|
||||
for row in std::mem::take(&mut self.lrows) {
|
||||
for (i, row) in std::mem::take(&mut self.current.lrows).into_iter().enumerate() {
|
||||
let (frame, y, is_last) = match row {
|
||||
Row::Frame(frame, y, is_last) => (frame, y, is_last),
|
||||
Row::Fr(v, y, disambiguator) => {
|
||||
@ -1438,6 +1625,9 @@ impl<'a> GridLayouter<'a> {
|
||||
};
|
||||
|
||||
let height = frame.height();
|
||||
if i < self.current.repeated_header_rows {
|
||||
repeated_header_row_height += height;
|
||||
}
|
||||
|
||||
// Ensure rowspans which span this row will have enough space to
|
||||
// be laid out over it later.
|
||||
@ -1446,7 +1636,7 @@ impl<'a> GridLayouter<'a> {
|
||||
.iter_mut()
|
||||
.filter(|rowspan| (rowspan.y..rowspan.y + rowspan.rowspan).contains(&y))
|
||||
.filter(|rowspan| {
|
||||
rowspan.max_resolved_row.map_or(true, |max_row| y > max_row)
|
||||
rowspan.max_resolved_row.is_none_or(|max_row| y > max_row)
|
||||
})
|
||||
{
|
||||
// If the first region wasn't defined yet, it will have the
|
||||
@ -1469,7 +1659,7 @@ impl<'a> GridLayouter<'a> {
|
||||
// last height is the one for the current region.
|
||||
rowspan
|
||||
.heights
|
||||
.extend(std::iter::repeat(Abs::zero()).take(amount_missing_heights));
|
||||
.extend(std::iter::repeat_n(Abs::zero(), amount_missing_heights));
|
||||
|
||||
// Ensure that, in this region, the rowspan will span at least
|
||||
// this row.
|
||||
@ -1494,7 +1684,7 @@ impl<'a> GridLayouter<'a> {
|
||||
// laid out at the first frame of the row).
|
||||
// Any rowspans ending before this row are laid out even
|
||||
// on this row's first frame.
|
||||
if laid_out_footer_start.map_or(true, |footer_start| {
|
||||
if laid_out_footer_start.is_none_or(|footer_start| {
|
||||
// If this is a footer row, then only lay out this rowspan
|
||||
// if the rowspan is contained within the footer.
|
||||
y < footer_start || rowspan.y >= footer_start
|
||||
@ -1516,7 +1706,11 @@ impl<'a> GridLayouter<'a> {
|
||||
// we have to check the same index again in the next
|
||||
// iteration.
|
||||
let rowspan = self.rowspans.remove(i);
|
||||
self.layout_rowspan(rowspan, Some((&mut output, &rrows)), engine)?;
|
||||
self.layout_rowspan(
|
||||
rowspan,
|
||||
Some((&mut output, repeated_header_row_height)),
|
||||
engine,
|
||||
)?;
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
@ -1527,21 +1721,40 @@ impl<'a> GridLayouter<'a> {
|
||||
pos.y += height;
|
||||
}
|
||||
|
||||
self.finish_region_internal(output, rrows);
|
||||
self.finish_region_internal(
|
||||
output,
|
||||
rrows,
|
||||
FinishedHeaderRowInfo {
|
||||
repeated_amount: self.current.repeated_header_rows,
|
||||
last_repeated_header_end: self.current.last_repeated_header_end,
|
||||
repeated_height: repeated_header_row_height,
|
||||
},
|
||||
);
|
||||
|
||||
if !last {
|
||||
self.current.repeated_header_rows = 0;
|
||||
self.current.last_repeated_header_end = 0;
|
||||
self.current.repeating_header_height = Abs::zero();
|
||||
self.current.repeating_header_heights.clear();
|
||||
|
||||
let disambiguator = self.finished.len();
|
||||
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer {
|
||||
if let Some(footer) =
|
||||
self.grid.footer.as_ref().and_then(Repeatable::as_repeated)
|
||||
{
|
||||
self.prepare_footer(footer, engine, disambiguator)?;
|
||||
}
|
||||
|
||||
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
|
||||
// Add a header to the new region.
|
||||
self.layout_header(header, engine, disambiguator)?;
|
||||
}
|
||||
|
||||
// Ensure rows don't try to overrun the footer.
|
||||
self.regions.size.y -= self.footer_height;
|
||||
// Note that header layout will only subtract this again if it has
|
||||
// to skip regions to fit headers, so there is no risk of
|
||||
// subtracting this twice.
|
||||
self.regions.size.y -= self.current.footer_height;
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
|
||||
if !self.repeating_headers.is_empty() || !self.pending_headers.is_empty() {
|
||||
// Add headers to the new region.
|
||||
self.layout_active_headers(engine)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -1553,11 +1766,26 @@ impl<'a> GridLayouter<'a> {
|
||||
&mut self,
|
||||
output: Frame,
|
||||
resolved_rows: Vec<RowPiece>,
|
||||
header_row_info: FinishedHeaderRowInfo,
|
||||
) {
|
||||
self.finished.push(output);
|
||||
self.rrows.push(resolved_rows);
|
||||
self.regions.next();
|
||||
self.initial = self.regions.size;
|
||||
self.current.initial = self.regions.size;
|
||||
|
||||
// Repeats haven't been laid out yet, so in the meantime, this will
|
||||
// represent the initial height after repeats laid out so far, and will
|
||||
// be gradually updated when preparing footers and repeating headers.
|
||||
self.current.initial_after_repeats = self.current.initial.y;
|
||||
|
||||
self.current.could_progress_at_top = self.regions.may_progress();
|
||||
|
||||
if !self.grid.headers.is_empty() {
|
||||
self.finished_header_rows.push(header_row_info);
|
||||
}
|
||||
|
||||
// Ensure orphan prevention is handled before resolving rows.
|
||||
debug_assert!(self.current.lrows_orphan_snapshot.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1572,13 +1800,3 @@ pub(super) fn points(
|
||||
offset
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks if the first region of a sequence of regions is the last usable
|
||||
/// region, assuming that the last region will always be occupied by some
|
||||
/// specific offset height, even after calling `.next()`, due to some
|
||||
/// additional logic which adds content automatically on each region turn (in
|
||||
/// our case, headers).
|
||||
pub(super) fn in_last_with_offset(regions: Regions<'_>, offset: Abs) -> bool {
|
||||
regions.backlog.is_empty()
|
||||
&& regions.last.map_or(true, |height| regions.size.y + offset == height)
|
||||
}
|
||||
|
@ -391,10 +391,12 @@ pub fn vline_stroke_at_row(
|
||||
///
|
||||
/// This function assumes columns are sorted by increasing `x`, and rows are
|
||||
/// sorted by increasing `y`.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn hline_stroke_at_column(
|
||||
grid: &CellGrid,
|
||||
rows: &[RowPiece],
|
||||
local_top_y: Option<usize>,
|
||||
header_end_above: Option<usize>,
|
||||
in_last_region: bool,
|
||||
y: usize,
|
||||
x: usize,
|
||||
@ -463,7 +465,7 @@ pub fn hline_stroke_at_column(
|
||||
// region, we have the last index, and (as a failsafe) we don't have the
|
||||
// last row of cells above us.
|
||||
let use_bottom_border_stroke = !in_last_region
|
||||
&& local_top_y.map_or(true, |top_y| top_y + 1 != grid.rows.len())
|
||||
&& local_top_y.is_none_or(|top_y| top_y + 1 != grid.rows.len())
|
||||
&& y == grid.rows.len();
|
||||
let bottom_y =
|
||||
if use_bottom_border_stroke { grid.rows.len().saturating_sub(1) } else { y };
|
||||
@ -499,17 +501,15 @@ pub fn hline_stroke_at_column(
|
||||
// Top border stroke and header stroke are generally prioritized, unless
|
||||
// they don't have explicit hline overrides and one or more user-provided
|
||||
// hlines would appear at the same position, which then are prioritized.
|
||||
let top_stroke_comes_from_header = grid
|
||||
.header
|
||||
.as_ref()
|
||||
.and_then(Repeatable::as_repeated)
|
||||
.zip(local_top_y)
|
||||
.is_some_and(|(header, local_top_y)| {
|
||||
// Ensure the row above us is a repeated header.
|
||||
// FIXME: Make this check more robust when headers at arbitrary
|
||||
// positions are added.
|
||||
local_top_y < header.end && y > header.end
|
||||
});
|
||||
let top_stroke_comes_from_header = header_end_above.zip(local_top_y).is_some_and(
|
||||
|(last_repeated_header_end, local_top_y)| {
|
||||
// Check if the last repeated header row is above this line.
|
||||
//
|
||||
// Note that `y == last_repeated_header_end` is impossible for a
|
||||
// strictly repeated header (not in its original position).
|
||||
local_top_y < last_repeated_header_end && y > last_repeated_header_end
|
||||
},
|
||||
);
|
||||
|
||||
// Prioritize the footer's top stroke as well where applicable.
|
||||
let bottom_stroke_comes_from_footer = grid
|
||||
@ -637,7 +637,7 @@ mod test {
|
||||
},
|
||||
vec![],
|
||||
vec![],
|
||||
None,
|
||||
vec![],
|
||||
None,
|
||||
entries,
|
||||
)
|
||||
@ -1175,7 +1175,7 @@ mod test {
|
||||
},
|
||||
vec![],
|
||||
vec![],
|
||||
None,
|
||||
vec![],
|
||||
None,
|
||||
entries,
|
||||
)
|
||||
@ -1268,6 +1268,7 @@ mod test {
|
||||
grid,
|
||||
&rows,
|
||||
y.checked_sub(1),
|
||||
None,
|
||||
true,
|
||||
y,
|
||||
x,
|
||||
@ -1461,6 +1462,7 @@ mod test {
|
||||
grid,
|
||||
&rows,
|
||||
y.checked_sub(1),
|
||||
None,
|
||||
true,
|
||||
y,
|
||||
x,
|
||||
@ -1506,6 +1508,7 @@ mod test {
|
||||
grid,
|
||||
&rows,
|
||||
if y == 4 { Some(2) } else { y.checked_sub(1) },
|
||||
None,
|
||||
true,
|
||||
y,
|
||||
x,
|
||||
|
@ -1,57 +1,446 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::layout::grid::resolve::{Footer, Header, Repeatable};
|
||||
use typst_library::layout::{Abs, Axes, Frame, Regions};
|
||||
|
||||
use super::layouter::GridLayouter;
|
||||
use super::layouter::{GridLayouter, RowState};
|
||||
use super::rowspans::UnbreakableRowGroup;
|
||||
|
||||
impl GridLayouter<'_> {
|
||||
/// Layouts the header's rows.
|
||||
/// Skips regions as necessary.
|
||||
pub fn layout_header(
|
||||
impl<'a> GridLayouter<'a> {
|
||||
/// Checks whether a region break could help a situation where we're out of
|
||||
/// space for the next row. The criteria are:
|
||||
///
|
||||
/// 1. If we could progress at the top of the region, that indicates the
|
||||
/// region has a backlog, or (if we're at the first region) a region break
|
||||
/// is at all possible (`regions.last` is `Some()`), so that's sufficient.
|
||||
///
|
||||
/// 2. Otherwise, we may progress if another region break is possible
|
||||
/// (`regions.last` is still `Some()`) and non-repeating rows have been
|
||||
/// placed, since that means the space they occupy will be available in the
|
||||
/// next region.
|
||||
#[inline]
|
||||
pub fn may_progress_with_repeats(&self) -> bool {
|
||||
// TODO(subfooters): check below isn't enough to detect non-repeating
|
||||
// footers... we can also change 'initial_after_repeats' to stop being
|
||||
// calculated if there were any non-repeating footers.
|
||||
self.current.could_progress_at_top
|
||||
|| self.regions.last.is_some()
|
||||
&& self.regions.size.y != self.current.initial_after_repeats
|
||||
}
|
||||
|
||||
pub fn place_new_headers(
|
||||
&mut self,
|
||||
consecutive_header_count: &mut usize,
|
||||
engine: &mut Engine,
|
||||
) -> SourceResult<()> {
|
||||
*consecutive_header_count += 1;
|
||||
let (consecutive_headers, new_upcoming_headers) =
|
||||
self.upcoming_headers.split_at(*consecutive_header_count);
|
||||
|
||||
if new_upcoming_headers.first().is_some_and(|next_header| {
|
||||
consecutive_headers.last().is_none_or(|latest_header| {
|
||||
!latest_header.short_lived
|
||||
&& next_header.range.start == latest_header.range.end
|
||||
}) && !next_header.short_lived
|
||||
}) {
|
||||
// More headers coming, so wait until we reach them.
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.upcoming_headers = new_upcoming_headers;
|
||||
*consecutive_header_count = 0;
|
||||
|
||||
let [first_header, ..] = consecutive_headers else {
|
||||
self.flush_orphans();
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// Assuming non-conflicting headers sorted by increasing y, this must
|
||||
// be the header with the lowest level (sorted by increasing levels).
|
||||
let first_level = first_header.level;
|
||||
|
||||
// Stop repeating conflicting headers, even if the new headers are
|
||||
// short-lived or won't repeat.
|
||||
//
|
||||
// If we go to a new region before the new headers fit alongside their
|
||||
// children (or in general, for short-lived), the old headers should
|
||||
// not be displayed anymore.
|
||||
let first_conflicting_pos =
|
||||
self.repeating_headers.partition_point(|h| h.level < first_level);
|
||||
self.repeating_headers.truncate(first_conflicting_pos);
|
||||
|
||||
// Ensure upcoming rows won't see that these headers will occupy any
|
||||
// space in future regions anymore.
|
||||
for removed_height in
|
||||
self.current.repeating_header_heights.drain(first_conflicting_pos..)
|
||||
{
|
||||
self.current.repeating_header_height -= removed_height;
|
||||
}
|
||||
|
||||
// Layout short-lived headers immediately.
|
||||
if consecutive_headers.last().is_some_and(|h| h.short_lived) {
|
||||
// No chance of orphans as we're immediately placing conflicting
|
||||
// headers afterwards, which basically are not headers, for all intents
|
||||
// and purposes. It is therefore guaranteed that all new headers have
|
||||
// been placed at least once.
|
||||
self.flush_orphans();
|
||||
|
||||
// Layout each conflicting header independently, without orphan
|
||||
// prevention (as they don't go into 'pending_headers').
|
||||
// These headers are short-lived as they are immediately followed by a
|
||||
// header of the same or lower level, such that they never actually get
|
||||
// to repeat.
|
||||
self.layout_new_headers(consecutive_headers, true, engine)?;
|
||||
} else {
|
||||
// Let's try to place pending headers at least once.
|
||||
// This might be a waste as we could generate an orphan and thus have
|
||||
// to try to place old and new headers all over again, but that happens
|
||||
// for every new region anyway, so it's rather unavoidable.
|
||||
let snapshot_created =
|
||||
self.layout_new_headers(consecutive_headers, false, engine)?;
|
||||
|
||||
// Queue the new headers for layout. They will remain in this
|
||||
// vector due to orphan prevention.
|
||||
//
|
||||
// After the first subsequent row is laid out, move to repeating, as
|
||||
// it's then confirmed the headers won't be moved due to orphan
|
||||
// prevention anymore.
|
||||
self.pending_headers = consecutive_headers;
|
||||
|
||||
if !snapshot_created {
|
||||
// Region probably couldn't progress.
|
||||
//
|
||||
// Mark new pending headers as final and ensure there isn't a
|
||||
// snapshot.
|
||||
self.flush_orphans();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lays out rows belonging to a header, returning the calculated header
|
||||
/// height only for that header. Indicates to the laid out rows that they
|
||||
/// should inform their laid out heights if appropriate (auto or fixed
|
||||
/// size rows only).
|
||||
#[inline]
|
||||
fn layout_header_rows(
|
||||
&mut self,
|
||||
header: &Header,
|
||||
engine: &mut Engine,
|
||||
disambiguator: usize,
|
||||
) -> SourceResult<()> {
|
||||
let header_rows =
|
||||
self.simulate_header(header, &self.regions, engine, disambiguator)?;
|
||||
as_short_lived: bool,
|
||||
) -> SourceResult<Abs> {
|
||||
let mut header_height = Abs::zero();
|
||||
for y in header.range.clone() {
|
||||
header_height += self
|
||||
.layout_row_with_state(
|
||||
y,
|
||||
engine,
|
||||
disambiguator,
|
||||
RowState {
|
||||
current_row_height: Some(Abs::zero()),
|
||||
in_active_repeatable: !as_short_lived,
|
||||
},
|
||||
)?
|
||||
.current_row_height
|
||||
.unwrap_or_default();
|
||||
}
|
||||
Ok(header_height)
|
||||
}
|
||||
|
||||
/// This function should be called each time an additional row has been
|
||||
/// laid out in a region to indicate that orphan prevention has succeeded.
|
||||
///
|
||||
/// It removes the current orphan snapshot and flushes pending headers,
|
||||
/// such that a non-repeating header won't try to be laid out again
|
||||
/// anymore, and a repeating header will begin to be part of
|
||||
/// `repeating_headers`.
|
||||
pub fn flush_orphans(&mut self) {
|
||||
self.current.lrows_orphan_snapshot = None;
|
||||
self.flush_pending_headers();
|
||||
}
|
||||
|
||||
/// Indicates all currently pending headers have been successfully placed
|
||||
/// once, since another row has been placed after them, so they are
|
||||
/// certainly not orphans.
|
||||
pub fn flush_pending_headers(&mut self) {
|
||||
if self.pending_headers.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
for header in self.pending_headers {
|
||||
if header.repeated {
|
||||
// Vector remains sorted by increasing levels:
|
||||
// - 'pending_headers' themselves are sorted, since we only
|
||||
// push non-mutually-conflicting headers at a time.
|
||||
// - Before pushing new pending headers in
|
||||
// 'layout_new_pending_headers', we truncate repeating headers
|
||||
// to remove anything with the same or higher levels as the
|
||||
// first pending header.
|
||||
// - Assuming it was sorted before, that truncation only keeps
|
||||
// elements with a lower level.
|
||||
// - Therefore, by pushing this header to the end, it will have
|
||||
// a level larger than all the previous headers, and is thus
|
||||
// in its 'correct' position.
|
||||
self.repeating_headers.push(header);
|
||||
}
|
||||
}
|
||||
|
||||
self.pending_headers = Default::default();
|
||||
}
|
||||
|
||||
/// Lays out the rows of repeating and pending headers at the top of the
|
||||
/// region.
|
||||
///
|
||||
/// Assumes the footer height for the current region has already been
|
||||
/// calculated. Skips regions as necessary to fit all headers and all
|
||||
/// footers.
|
||||
pub fn layout_active_headers(&mut self, engine: &mut Engine) -> SourceResult<()> {
|
||||
// Generate different locations for content in headers across its
|
||||
// repetitions by assigning a unique number for each one.
|
||||
let disambiguator = self.finished.len();
|
||||
|
||||
let header_height = self.simulate_header_height(
|
||||
self.repeating_headers
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(self.pending_headers.iter().map(Repeatable::deref)),
|
||||
&self.regions,
|
||||
engine,
|
||||
disambiguator,
|
||||
)?;
|
||||
|
||||
// We already take the footer into account below.
|
||||
// While skipping regions, footer height won't be automatically
|
||||
// re-calculated until the end.
|
||||
let mut skipped_region = false;
|
||||
while self.unbreakable_rows_left == 0
|
||||
&& !self.regions.size.y.fits(header_rows.height + self.footer_height)
|
||||
&& self.regions.may_progress()
|
||||
&& !self.regions.size.y.fits(header_height)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
// Advance regions without any output until we can place the
|
||||
// header and the footer.
|
||||
self.finish_region_internal(Frame::soft(Axes::splat(Abs::zero())), vec![]);
|
||||
self.finish_region_internal(
|
||||
Frame::soft(Axes::splat(Abs::zero())),
|
||||
vec![],
|
||||
Default::default(),
|
||||
);
|
||||
|
||||
// TODO(layout model): re-calculate heights of headers and footers
|
||||
// on each region if 'full' changes? (Assuming height doesn't
|
||||
// change for now...)
|
||||
//
|
||||
// Would remove the footer height update below (move it here).
|
||||
skipped_region = true;
|
||||
|
||||
self.regions.size.y -= self.current.footer_height;
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
}
|
||||
|
||||
if let Some(footer) = &self.grid.footer {
|
||||
if footer.repeated && skipped_region {
|
||||
// Simulate the footer again; the region's 'full' might have
|
||||
// changed.
|
||||
self.regions.size.y += self.current.footer_height;
|
||||
self.current.footer_height = self
|
||||
.simulate_footer(footer, &self.regions, engine, disambiguator)?
|
||||
.height;
|
||||
self.regions.size.y -= self.current.footer_height;
|
||||
}
|
||||
}
|
||||
|
||||
let repeating_header_rows =
|
||||
total_header_row_count(self.repeating_headers.iter().copied());
|
||||
|
||||
let pending_header_rows =
|
||||
total_header_row_count(self.pending_headers.iter().map(Repeatable::deref));
|
||||
|
||||
// Group of headers is unbreakable.
|
||||
// Thus, no risk of 'finish_region' being recursively called from
|
||||
// within 'layout_row'.
|
||||
self.unbreakable_rows_left += repeating_header_rows + pending_header_rows;
|
||||
|
||||
self.current.last_repeated_header_end =
|
||||
self.repeating_headers.last().map(|h| h.range.end).unwrap_or_default();
|
||||
|
||||
// Reset the header height for this region.
|
||||
// It will be re-calculated when laying out each header row.
|
||||
self.header_height = Abs::zero();
|
||||
self.current.repeating_header_height = Abs::zero();
|
||||
self.current.repeating_header_heights.clear();
|
||||
|
||||
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer {
|
||||
if skipped_region {
|
||||
// Simulate the footer again; the region's 'full' might have
|
||||
// changed.
|
||||
self.footer_height = self
|
||||
.simulate_footer(footer, &self.regions, engine, disambiguator)?
|
||||
.height;
|
||||
debug_assert!(self.current.lrows.is_empty());
|
||||
debug_assert!(self.current.lrows_orphan_snapshot.is_none());
|
||||
let may_progress = self.may_progress_with_repeats();
|
||||
|
||||
if may_progress {
|
||||
// Enable orphan prevention for headers at the top of the region.
|
||||
// Otherwise, we will flush pending headers below, after laying
|
||||
// them out.
|
||||
//
|
||||
// It is very rare for this to make a difference as we're usually
|
||||
// at the 'last' region after the first skip, at which the snapshot
|
||||
// is handled by 'layout_new_headers'. Either way, we keep this
|
||||
// here for correctness.
|
||||
self.current.lrows_orphan_snapshot = Some(self.current.lrows.len());
|
||||
}
|
||||
|
||||
// Use indices to avoid double borrow. We don't mutate headers in
|
||||
// 'layout_row' so this is fine.
|
||||
let mut i = 0;
|
||||
while let Some(&header) = self.repeating_headers.get(i) {
|
||||
let header_height =
|
||||
self.layout_header_rows(header, engine, disambiguator, false)?;
|
||||
self.current.repeating_header_height += header_height;
|
||||
|
||||
// We assume that this vector will be sorted according
|
||||
// to increasing levels like 'repeating_headers' and
|
||||
// 'pending_headers' - and, in particular, their union, as this
|
||||
// vector is pushed repeating heights from both.
|
||||
//
|
||||
// This is guaranteed by:
|
||||
// 1. We always push pending headers after repeating headers,
|
||||
// as we assume they don't conflict because we remove
|
||||
// conflicting repeating headers when pushing a new pending
|
||||
// header.
|
||||
//
|
||||
// 2. We push in the same order as each.
|
||||
//
|
||||
// 3. This vector is also modified when pushing a new pending
|
||||
// header, where we remove heights for conflicting repeating
|
||||
// headers which have now stopped repeating. They are always at
|
||||
// the end and new pending headers respect the existing sort,
|
||||
// so the vector will remain sorted.
|
||||
self.current.repeating_header_heights.push(header_height);
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
self.current.repeated_header_rows = self.current.lrows.len();
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
|
||||
let mut has_non_repeated_pending_header = false;
|
||||
for header in self.pending_headers {
|
||||
if !header.repeated {
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
has_non_repeated_pending_header = true;
|
||||
}
|
||||
let header_height =
|
||||
self.layout_header_rows(header, engine, disambiguator, false)?;
|
||||
if header.repeated {
|
||||
self.current.repeating_header_height += header_height;
|
||||
self.current.repeating_header_heights.push(header_height);
|
||||
}
|
||||
}
|
||||
|
||||
// Header is unbreakable.
|
||||
// Thus, no risk of 'finish_region' being recursively called from
|
||||
// within 'layout_row'.
|
||||
self.unbreakable_rows_left += header.end;
|
||||
for y in 0..header.end {
|
||||
self.layout_row(y, engine, disambiguator)?;
|
||||
if !has_non_repeated_pending_header {
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
}
|
||||
|
||||
if !may_progress {
|
||||
// Flush pending headers immediately, as placing them again later
|
||||
// won't help.
|
||||
self.flush_orphans();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lays out headers found for the first time during row layout.
|
||||
///
|
||||
/// If 'short_lived' is true, these headers are immediately followed by
|
||||
/// a conflicting header, so it is assumed they will not be pushed to
|
||||
/// pending headers.
|
||||
///
|
||||
/// Returns whether orphan prevention was successfully setup, or couldn't
|
||||
/// due to short-lived headers or the region couldn't progress.
|
||||
pub fn layout_new_headers(
|
||||
&mut self,
|
||||
headers: &'a [Repeatable<Header>],
|
||||
short_lived: bool,
|
||||
engine: &mut Engine,
|
||||
) -> SourceResult<bool> {
|
||||
// At first, only consider the height of the given headers. However,
|
||||
// for upcoming regions, we will have to consider repeating headers as
|
||||
// well.
|
||||
let header_height = self.simulate_header_height(
|
||||
headers.iter().map(Repeatable::deref),
|
||||
&self.regions,
|
||||
engine,
|
||||
0,
|
||||
)?;
|
||||
|
||||
while self.unbreakable_rows_left == 0
|
||||
&& !self.regions.size.y.fits(header_height)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
// Note that, after the first region skip, the new headers will go
|
||||
// at the top of the region, but after the repeating headers that
|
||||
// remained (which will be automatically placed in 'finish_region').
|
||||
self.finish_region(engine, false)?;
|
||||
}
|
||||
|
||||
// Remove new headers at the end of the region if the upcoming row
|
||||
// doesn't fit.
|
||||
// TODO(subfooters): what if there is a footer right after it?
|
||||
let should_snapshot = !short_lived
|
||||
&& self.current.lrows_orphan_snapshot.is_none()
|
||||
&& self.may_progress_with_repeats();
|
||||
|
||||
if should_snapshot {
|
||||
// If we don't enter this branch while laying out non-short lived
|
||||
// headers, that means we will have to immediately flush pending
|
||||
// headers and mark them as final, since trying to place them in
|
||||
// the next page won't help get more space.
|
||||
self.current.lrows_orphan_snapshot = Some(self.current.lrows.len());
|
||||
}
|
||||
|
||||
let mut at_top = self.regions.size.y == self.current.initial_after_repeats;
|
||||
|
||||
self.unbreakable_rows_left +=
|
||||
total_header_row_count(headers.iter().map(Repeatable::deref));
|
||||
|
||||
for header in headers {
|
||||
let header_height = self.layout_header_rows(header, engine, 0, false)?;
|
||||
|
||||
// Only store this header height if it is actually going to
|
||||
// become a pending header. Otherwise, pretend it's not a
|
||||
// header... This is fine for consumers of 'header_height' as
|
||||
// it is guaranteed this header won't appear in a future
|
||||
// region, so multi-page rows and cells can effectively ignore
|
||||
// this header.
|
||||
if !short_lived && header.repeated {
|
||||
self.current.repeating_header_height += header_height;
|
||||
self.current.repeating_header_heights.push(header_height);
|
||||
if at_top {
|
||||
self.current.initial_after_repeats = self.regions.size.y;
|
||||
}
|
||||
} else {
|
||||
at_top = false;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(should_snapshot)
|
||||
}
|
||||
|
||||
/// Calculates the total expected height of several headers.
|
||||
pub fn simulate_header_height<'h: 'a>(
|
||||
&self,
|
||||
headers: impl IntoIterator<Item = &'h Header>,
|
||||
regions: &Regions<'_>,
|
||||
engine: &mut Engine,
|
||||
disambiguator: usize,
|
||||
) -> SourceResult<Abs> {
|
||||
let mut height = Abs::zero();
|
||||
for header in headers {
|
||||
height +=
|
||||
self.simulate_header(header, regions, engine, disambiguator)?.height;
|
||||
}
|
||||
Ok(height)
|
||||
}
|
||||
|
||||
/// Simulate the header's group of rows.
|
||||
pub fn simulate_header(
|
||||
&self,
|
||||
@ -66,8 +455,8 @@ impl GridLayouter<'_> {
|
||||
// assume that the amount of unbreakable rows following the first row
|
||||
// in the header will be precisely the rows in the header.
|
||||
self.simulate_unbreakable_row_group(
|
||||
0,
|
||||
Some(header.end),
|
||||
header.range.start,
|
||||
Some(header.range.end - header.range.start),
|
||||
regions,
|
||||
engine,
|
||||
disambiguator,
|
||||
@ -91,11 +480,22 @@ impl GridLayouter<'_> {
|
||||
{
|
||||
// Advance regions without any output until we can place the
|
||||
// footer.
|
||||
self.finish_region_internal(Frame::soft(Axes::splat(Abs::zero())), vec![]);
|
||||
self.finish_region_internal(
|
||||
Frame::soft(Axes::splat(Abs::zero())),
|
||||
vec![],
|
||||
Default::default(),
|
||||
);
|
||||
skipped_region = true;
|
||||
}
|
||||
|
||||
self.footer_height = if skipped_region {
|
||||
// TODO(subfooters): Consider resetting header height etc. if we skip
|
||||
// region. (Maybe move that step to `finish_region_internal`.)
|
||||
//
|
||||
// That is unnecessary at the moment as 'prepare_footers' is only
|
||||
// called at the start of the region, so header height is always zero
|
||||
// and no headers were placed so far, but what about when we can have
|
||||
// footers in the middle of the region? Let's think about this then.
|
||||
self.current.footer_height = if skipped_region {
|
||||
// Simulate the footer again; the region's 'full' might have
|
||||
// changed.
|
||||
self.simulate_footer(footer, &self.regions, engine, disambiguator)?
|
||||
@ -118,12 +518,22 @@ impl GridLayouter<'_> {
|
||||
// Ensure footer rows have their own height available.
|
||||
// Won't change much as we're creating an unbreakable row group
|
||||
// anyway, so this is mostly for correctness.
|
||||
self.regions.size.y += self.footer_height;
|
||||
self.regions.size.y += self.current.footer_height;
|
||||
|
||||
let repeats = self.grid.footer.as_ref().is_some_and(|f| f.repeated);
|
||||
let footer_len = self.grid.rows.len() - footer.start;
|
||||
self.unbreakable_rows_left += footer_len;
|
||||
|
||||
for y in footer.start..self.grid.rows.len() {
|
||||
self.layout_row(y, engine, disambiguator)?;
|
||||
self.layout_row_with_state(
|
||||
y,
|
||||
engine,
|
||||
disambiguator,
|
||||
RowState {
|
||||
in_active_repeatable: repeats,
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -144,10 +554,18 @@ impl GridLayouter<'_> {
|
||||
// in the footer will be precisely the rows in the footer.
|
||||
self.simulate_unbreakable_row_group(
|
||||
footer.start,
|
||||
Some(self.grid.rows.len() - footer.start),
|
||||
Some(footer.end - footer.start),
|
||||
regions,
|
||||
engine,
|
||||
disambiguator,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// The total amount of rows in the given list of headers.
|
||||
#[inline]
|
||||
pub fn total_header_row_count<'h>(
|
||||
headers: impl IntoIterator<Item = &'h Header>,
|
||||
) -> usize {
|
||||
headers.into_iter().map(|h| h.range.end - h.range.start).sum()
|
||||
}
|
||||
|
@ -3,9 +3,8 @@ use typst_library::engine::Engine;
|
||||
use typst_library::foundations::Resolve;
|
||||
use typst_library::layout::grid::resolve::Repeatable;
|
||||
use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing};
|
||||
use typst_utils::MaybeReverseIter;
|
||||
|
||||
use super::layouter::{in_last_with_offset, points, Row, RowPiece};
|
||||
use super::layouter::{points, Row};
|
||||
use super::{layout_cell, Cell, GridLayouter};
|
||||
|
||||
/// All information needed to layout a single rowspan.
|
||||
@ -23,6 +22,10 @@ pub struct Rowspan {
|
||||
/// specified for the parent cell's `breakable` field.
|
||||
pub is_effectively_unbreakable: bool,
|
||||
/// The horizontal offset of this rowspan in all regions.
|
||||
///
|
||||
/// This is the offset from the text direction start, meaning that, on RTL
|
||||
/// grids, this is the offset from the right of the grid, whereas, on LTR
|
||||
/// grids, it is the offset from the left.
|
||||
pub dx: Abs,
|
||||
/// The vertical offset of this rowspan in the first region.
|
||||
pub dy: Abs,
|
||||
@ -87,10 +90,10 @@ pub struct CellMeasurementData<'layouter> {
|
||||
|
||||
impl GridLayouter<'_> {
|
||||
/// Layout a rowspan over the already finished regions, plus the current
|
||||
/// region's frame and resolved rows, if it wasn't finished yet (because
|
||||
/// we're being called from `finish_region`, but note that this function is
|
||||
/// also called once after all regions are finished, in which case
|
||||
/// `current_region_data` is `None`).
|
||||
/// region's frame and height of resolved header rows, if it wasn't
|
||||
/// finished yet (because we're being called from `finish_region`, but note
|
||||
/// that this function is also called once after all regions are finished,
|
||||
/// in which case `current_region_data` is `None`).
|
||||
///
|
||||
/// We need to do this only once we already know the heights of all
|
||||
/// spanned rows, which is only possible after laying out the last row
|
||||
@ -98,7 +101,7 @@ impl GridLayouter<'_> {
|
||||
pub fn layout_rowspan(
|
||||
&mut self,
|
||||
rowspan_data: Rowspan,
|
||||
current_region_data: Option<(&mut Frame, &[RowPiece])>,
|
||||
current_region_data: Option<(&mut Frame, Abs)>,
|
||||
engine: &mut Engine,
|
||||
) -> SourceResult<()> {
|
||||
let Rowspan {
|
||||
@ -118,10 +121,11 @@ impl GridLayouter<'_> {
|
||||
// Nothing to layout.
|
||||
return Ok(());
|
||||
};
|
||||
let first_column = self.rcols[x];
|
||||
let cell = self.grid.cell(x, y).unwrap();
|
||||
let width = self.cell_spanned_width(cell, x);
|
||||
let dx = if self.is_rtl { dx - width + first_column } else { dx };
|
||||
// In RTL cells expand to the left, thus the position
|
||||
// must additionally be offset by the cell's width.
|
||||
let dx = if self.is_rtl { self.width - (dx + width) } else { dx };
|
||||
|
||||
// Prepare regions.
|
||||
let size = Size::new(width, *first_height);
|
||||
@ -142,11 +146,31 @@ impl GridLayouter<'_> {
|
||||
|
||||
// Push the layouted frames directly into the finished frames.
|
||||
let fragment = layout_cell(cell, engine, disambiguator, self.styles, pod)?;
|
||||
let (current_region, current_rrows) = current_region_data.unzip();
|
||||
for ((i, finished), frame) in self
|
||||
let (current_region, current_header_row_height) = current_region_data.unzip();
|
||||
|
||||
// Clever trick to process finished header rows:
|
||||
// - If there are grid headers, the vector will be filled with one
|
||||
// finished header row height per region, so, chaining with the height
|
||||
// for the current one, we get the header row height for each region.
|
||||
//
|
||||
// - But if there are no grid headers, the vector will be empty, so in
|
||||
// theory the regions and resolved header row heights wouldn't match.
|
||||
// But that's fine - 'current_header_row_height' can only be either
|
||||
// 'Some(zero)' or 'None' in such a case, and for all other rows we
|
||||
// append infinite zeros. That is, in such a case, the resolved header
|
||||
// row height is always zero, so that's our fallback.
|
||||
let finished_header_rows = self
|
||||
.finished_header_rows
|
||||
.iter()
|
||||
.map(|info| info.repeated_height)
|
||||
.chain(current_header_row_height)
|
||||
.chain(std::iter::repeat(Abs::zero()));
|
||||
|
||||
for ((i, (finished, header_dy)), frame) in self
|
||||
.finished
|
||||
.iter_mut()
|
||||
.chain(current_region.into_iter())
|
||||
.zip(finished_header_rows)
|
||||
.skip(first_region)
|
||||
.enumerate()
|
||||
.zip(fragment)
|
||||
@ -158,22 +182,9 @@ impl GridLayouter<'_> {
|
||||
} else {
|
||||
// The rowspan continuation starts after the header (thus,
|
||||
// at a position after the sum of the laid out header
|
||||
// rows).
|
||||
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
|
||||
let header_rows = self
|
||||
.rrows
|
||||
.get(i)
|
||||
.map(Vec::as_slice)
|
||||
.or(current_rrows)
|
||||
.unwrap_or(&[])
|
||||
.iter()
|
||||
.take_while(|row| row.y < header.end);
|
||||
|
||||
header_rows.map(|row| row.height).sum()
|
||||
} else {
|
||||
// Without a header, start at the very top of the region.
|
||||
Abs::zero()
|
||||
}
|
||||
// rows). Without a header, this is zero, so the rowspan can
|
||||
// start at the very top of the region as usual.
|
||||
header_dy
|
||||
};
|
||||
|
||||
finished.push_frame(Point::new(dx, dy), frame);
|
||||
@ -185,10 +196,8 @@ impl GridLayouter<'_> {
|
||||
/// Checks if a row contains the beginning of one or more rowspan cells.
|
||||
/// If so, adds them to the rowspans vector.
|
||||
pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) {
|
||||
// We will compute the horizontal offset of each rowspan in advance.
|
||||
// For that reason, we must reverse the column order when using RTL.
|
||||
let offsets = points(self.rcols.iter().copied().rev_if(self.is_rtl));
|
||||
for (x, dx) in (0..self.rcols.len()).rev_if(self.is_rtl).zip(offsets) {
|
||||
let offsets = points(self.rcols.iter().copied());
|
||||
for (x, dx) in (0..self.rcols.len()).zip(offsets) {
|
||||
let Some(cell) = self.grid.cell(x, y) else {
|
||||
continue;
|
||||
};
|
||||
@ -229,15 +238,13 @@ impl GridLayouter<'_> {
|
||||
// current row is dynamic and depends on the amount of upcoming
|
||||
// unbreakable cells (with or without a rowspan setting).
|
||||
let mut amount_unbreakable_rows = None;
|
||||
if let Some(Repeatable::NotRepeated(header)) = &self.grid.header {
|
||||
if current_row < header.end {
|
||||
// Non-repeated header, so keep it unbreakable.
|
||||
amount_unbreakable_rows = Some(header.end);
|
||||
}
|
||||
}
|
||||
if let Some(Repeatable::NotRepeated(footer)) = &self.grid.footer {
|
||||
if current_row >= footer.start {
|
||||
if let Some(footer) = &self.grid.footer {
|
||||
if !footer.repeated && current_row >= footer.start {
|
||||
// Non-repeated footer, so keep it unbreakable.
|
||||
//
|
||||
// TODO(subfooters): This will become unnecessary
|
||||
// once non-repeated footers are treated differently and
|
||||
// have widow prevention.
|
||||
amount_unbreakable_rows = Some(self.grid.rows.len() - footer.start);
|
||||
}
|
||||
}
|
||||
@ -252,10 +259,7 @@ impl GridLayouter<'_> {
|
||||
|
||||
// Skip to fitting region.
|
||||
while !self.regions.size.y.fits(row_group.height)
|
||||
&& !in_last_with_offset(
|
||||
self.regions,
|
||||
self.header_height + self.footer_height,
|
||||
)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
self.finish_region(engine, false)?;
|
||||
}
|
||||
@ -394,16 +398,29 @@ impl GridLayouter<'_> {
|
||||
// auto rows don't depend on the backlog, as they only span one
|
||||
// region.
|
||||
if breakable
|
||||
&& (matches!(self.grid.header, Some(Repeatable::Repeated(_)))
|
||||
|| matches!(self.grid.footer, Some(Repeatable::Repeated(_))))
|
||||
&& (!self.repeating_headers.is_empty()
|
||||
|| !self.pending_headers.is_empty()
|
||||
|| matches!(&self.grid.footer, Some(footer) if footer.repeated))
|
||||
{
|
||||
// Subtract header and footer height from all upcoming regions
|
||||
// when measuring the cell, including the last repeated region.
|
||||
//
|
||||
// This will update the 'custom_backlog' vector with the
|
||||
// updated heights of the upcoming regions.
|
||||
//
|
||||
// We predict that header height will only include that of
|
||||
// repeating headers, as we can assume non-repeating headers in
|
||||
// the first region have been successfully placed, unless
|
||||
// something didn't fit on the first region of the auto row,
|
||||
// but we will only find that out after measurement, and if
|
||||
// that happens, we discard the measurement and try again.
|
||||
let mapped_regions = self.regions.map(&mut custom_backlog, |size| {
|
||||
Size::new(size.x, size.y - self.header_height - self.footer_height)
|
||||
Size::new(
|
||||
size.x,
|
||||
size.y
|
||||
- self.current.repeating_header_height
|
||||
- self.current.footer_height,
|
||||
)
|
||||
});
|
||||
|
||||
// Callees must use the custom backlog instead of the current
|
||||
@ -457,6 +474,7 @@ impl GridLayouter<'_> {
|
||||
// Height of the rowspan covered by spanned rows in the current
|
||||
// region.
|
||||
let laid_out_height: Abs = self
|
||||
.current
|
||||
.lrows
|
||||
.iter()
|
||||
.filter_map(|row| match row {
|
||||
@ -504,7 +522,12 @@ impl GridLayouter<'_> {
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(std::iter::once(if breakable {
|
||||
self.initial.y - self.header_height - self.footer_height
|
||||
// Here we are calculating the available height for a
|
||||
// rowspan from the top of the current region, so
|
||||
// we have to use initial header heights (note that
|
||||
// header height can change in the middle of the
|
||||
// region).
|
||||
self.current.initial_after_repeats
|
||||
} else {
|
||||
// When measuring unbreakable auto rows, infinite
|
||||
// height is available for content to expand.
|
||||
@ -516,11 +539,13 @@ impl GridLayouter<'_> {
|
||||
// rowspan's already laid out heights with the current
|
||||
// region's height and current backlog to ensure a good
|
||||
// level of accuracy in the measurements.
|
||||
let backlog = self
|
||||
.regions
|
||||
.backlog
|
||||
.iter()
|
||||
.map(|&size| size - self.header_height - self.footer_height);
|
||||
//
|
||||
// Assume only repeating headers will survive starting at
|
||||
// the next region.
|
||||
let backlog = self.regions.backlog.iter().map(|&size| {
|
||||
size - self.current.repeating_header_height
|
||||
- self.current.footer_height
|
||||
});
|
||||
|
||||
heights_up_to_current_region.chain(backlog).collect::<Vec<_>>()
|
||||
} else {
|
||||
@ -534,10 +559,10 @@ impl GridLayouter<'_> {
|
||||
height = *rowspan_height;
|
||||
backlog = None;
|
||||
full = rowspan_full;
|
||||
last = self
|
||||
.regions
|
||||
.last
|
||||
.map(|size| size - self.header_height - self.footer_height);
|
||||
last = self.regions.last.map(|size| {
|
||||
size - self.current.repeating_header_height
|
||||
- self.current.footer_height
|
||||
});
|
||||
} else {
|
||||
// The rowspan started in the current region, as its vector
|
||||
// of heights in regions is currently empty.
|
||||
@ -588,7 +613,7 @@ impl GridLayouter<'_> {
|
||||
measurement_data: &CellMeasurementData<'_>,
|
||||
) -> bool {
|
||||
if sizes.len() <= 1
|
||||
&& sizes.first().map_or(true, |&first_frame_size| {
|
||||
&& sizes.first().is_none_or(|&first_frame_size| {
|
||||
first_frame_size <= measurement_data.height_in_this_region
|
||||
})
|
||||
{
|
||||
@ -739,10 +764,11 @@ impl GridLayouter<'_> {
|
||||
simulated_regions.next();
|
||||
disambiguator += 1;
|
||||
|
||||
// Subtract the initial header and footer height, since that's the
|
||||
// height we used when subtracting from the region backlog's
|
||||
// Subtract the repeating header and footer height, since that's
|
||||
// the height we used when subtracting from the region backlog's
|
||||
// heights while measuring cells.
|
||||
simulated_regions.size.y -= self.header_height + self.footer_height;
|
||||
simulated_regions.size.y -=
|
||||
self.current.repeating_header_height + self.current.footer_height;
|
||||
}
|
||||
|
||||
if let Some(original_last_resolved_size) = last_resolved_size {
|
||||
@ -874,12 +900,8 @@ impl GridLayouter<'_> {
|
||||
// which, when used and combined with upcoming spanned rows, covers all
|
||||
// of the requested rowspan height, we give up.
|
||||
for _attempt in 0..5 {
|
||||
let rowspan_simulator = RowspanSimulator::new(
|
||||
disambiguator,
|
||||
simulated_regions,
|
||||
self.header_height,
|
||||
self.footer_height,
|
||||
);
|
||||
let rowspan_simulator =
|
||||
RowspanSimulator::new(disambiguator, simulated_regions, &self.current);
|
||||
|
||||
let total_spanned_height = rowspan_simulator.simulate_rowspan_layout(
|
||||
y,
|
||||
@ -961,7 +983,8 @@ impl GridLayouter<'_> {
|
||||
{
|
||||
extra_amount_to_grow -= simulated_regions.size.y.max(Abs::zero());
|
||||
simulated_regions.next();
|
||||
simulated_regions.size.y -= self.header_height + self.footer_height;
|
||||
simulated_regions.size.y -=
|
||||
self.current.repeating_header_height + self.current.footer_height;
|
||||
disambiguator += 1;
|
||||
}
|
||||
simulated_regions.size.y -= extra_amount_to_grow;
|
||||
@ -978,10 +1001,17 @@ struct RowspanSimulator<'a> {
|
||||
finished: usize,
|
||||
/// The state of regions during the simulation.
|
||||
regions: Regions<'a>,
|
||||
/// The height of the header in the currently simulated region.
|
||||
/// The total height of headers in the currently simulated region.
|
||||
header_height: Abs,
|
||||
/// The height of the footer in the currently simulated region.
|
||||
/// The total height of footers in the currently simulated region.
|
||||
footer_height: Abs,
|
||||
/// Whether `self.regions.may_progress()` was `true` at the top of the
|
||||
/// region, indicating we can progress anywhere in the current region,
|
||||
/// even right after a repeated header.
|
||||
could_progress_at_top: bool,
|
||||
/// Available height after laying out repeated headers at the top of the
|
||||
/// currently simulated region.
|
||||
initial_after_repeats: Abs,
|
||||
/// The total spanned height so far in the simulation.
|
||||
total_spanned_height: Abs,
|
||||
/// Height of the latest spanned gutter row in the simulation.
|
||||
@ -995,14 +1025,19 @@ impl<'a> RowspanSimulator<'a> {
|
||||
fn new(
|
||||
finished: usize,
|
||||
regions: Regions<'a>,
|
||||
header_height: Abs,
|
||||
footer_height: Abs,
|
||||
current: &super::layouter::Current,
|
||||
) -> Self {
|
||||
Self {
|
||||
finished,
|
||||
regions,
|
||||
header_height,
|
||||
footer_height,
|
||||
// There can be no new headers or footers within a multi-page
|
||||
// rowspan, since headers and footers are unbreakable, so
|
||||
// assuming the repeating header height and footer height
|
||||
// won't change is safe.
|
||||
header_height: current.repeating_header_height,
|
||||
footer_height: current.footer_height,
|
||||
could_progress_at_top: current.could_progress_at_top,
|
||||
initial_after_repeats: current.initial_after_repeats,
|
||||
total_spanned_height: Abs::zero(),
|
||||
latest_spanned_gutter_height: Abs::zero(),
|
||||
}
|
||||
@ -1051,10 +1086,7 @@ impl<'a> RowspanSimulator<'a> {
|
||||
0,
|
||||
)?;
|
||||
while !self.regions.size.y.fits(row_group.height)
|
||||
&& !in_last_with_offset(
|
||||
self.regions,
|
||||
self.header_height + self.footer_height,
|
||||
)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
self.finish_region(layouter, engine)?;
|
||||
}
|
||||
@ -1076,10 +1108,7 @@ impl<'a> RowspanSimulator<'a> {
|
||||
let mut skipped_region = false;
|
||||
while unbreakable_rows_left == 0
|
||||
&& !self.regions.size.y.fits(height)
|
||||
&& !in_last_with_offset(
|
||||
self.regions,
|
||||
self.header_height + self.footer_height,
|
||||
)
|
||||
&& self.may_progress_with_repeats()
|
||||
{
|
||||
self.finish_region(layouter, engine)?;
|
||||
|
||||
@ -1125,17 +1154,31 @@ impl<'a> RowspanSimulator<'a> {
|
||||
// our simulation checks what happens AFTER the auto row, so we can
|
||||
// just use the original backlog from `self.regions`.
|
||||
let disambiguator = self.finished;
|
||||
let header_height =
|
||||
if let Some(Repeatable::Repeated(header)) = &layouter.grid.header {
|
||||
layouter
|
||||
.simulate_header(header, &self.regions, engine, disambiguator)?
|
||||
.height
|
||||
|
||||
let (repeating_headers, header_height) = if !layouter.repeating_headers.is_empty()
|
||||
|| !layouter.pending_headers.is_empty()
|
||||
{
|
||||
// Only repeating headers have survived after the first region
|
||||
// break.
|
||||
let repeating_headers = layouter.repeating_headers.iter().copied().chain(
|
||||
layouter.pending_headers.iter().filter_map(Repeatable::as_repeated),
|
||||
);
|
||||
|
||||
let header_height = layouter.simulate_header_height(
|
||||
repeating_headers.clone(),
|
||||
&self.regions,
|
||||
engine,
|
||||
disambiguator,
|
||||
)?;
|
||||
|
||||
(Some(repeating_headers), header_height)
|
||||
} else {
|
||||
Abs::zero()
|
||||
(None, Abs::zero())
|
||||
};
|
||||
|
||||
let footer_height =
|
||||
if let Some(Repeatable::Repeated(footer)) = &layouter.grid.footer {
|
||||
let footer_height = if let Some(footer) =
|
||||
layouter.grid.footer.as_ref().and_then(Repeatable::as_repeated)
|
||||
{
|
||||
layouter
|
||||
.simulate_footer(footer, &self.regions, engine, disambiguator)?
|
||||
.height
|
||||
@ -1154,19 +1197,24 @@ impl<'a> RowspanSimulator<'a> {
|
||||
skipped_region = true;
|
||||
}
|
||||
|
||||
if let Some(Repeatable::Repeated(header)) = &layouter.grid.header {
|
||||
if let Some(repeating_headers) = repeating_headers {
|
||||
self.header_height = if skipped_region {
|
||||
// Simulate headers again, at the new region, as
|
||||
// the full region height may change.
|
||||
layouter
|
||||
.simulate_header(header, &self.regions, engine, disambiguator)?
|
||||
.height
|
||||
layouter.simulate_header_height(
|
||||
repeating_headers,
|
||||
&self.regions,
|
||||
engine,
|
||||
disambiguator,
|
||||
)?
|
||||
} else {
|
||||
header_height
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(Repeatable::Repeated(footer)) = &layouter.grid.footer {
|
||||
if let Some(footer) =
|
||||
layouter.grid.footer.as_ref().and_then(Repeatable::as_repeated)
|
||||
{
|
||||
self.footer_height = if skipped_region {
|
||||
// Simulate footers again, at the new region, as
|
||||
// the full region height may change.
|
||||
@ -1183,6 +1231,7 @@ impl<'a> RowspanSimulator<'a> {
|
||||
// header or footer (as an invariant, any rowspans spanning any header
|
||||
// or footer rows are fully contained within that header's or footer's rows).
|
||||
self.regions.size.y -= self.header_height + self.footer_height;
|
||||
self.initial_after_repeats = self.regions.size.y;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1199,8 +1248,18 @@ impl<'a> RowspanSimulator<'a> {
|
||||
self.regions.next();
|
||||
self.finished += 1;
|
||||
|
||||
self.could_progress_at_top = self.regions.may_progress();
|
||||
self.simulate_header_footer_layout(layouter, engine)
|
||||
}
|
||||
|
||||
/// Similar to [`GridLayouter::may_progress_with_repeats`] but for rowspan
|
||||
/// simulation.
|
||||
#[inline]
|
||||
fn may_progress_with_repeats(&self) -> bool {
|
||||
self.could_progress_at_top
|
||||
|| self.regions.last.is_some()
|
||||
&& self.regions.size.y != self.initial_after_repeats
|
||||
}
|
||||
}
|
||||
|
||||
/// Subtracts some size from the end of a vector of sizes.
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
use typst_library::diag::{warning, At, SourceResult, StrResult};
|
||||
use typst_library::diag::{warning, At, LoadedWithin, SourceResult, StrResult};
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{Bytes, Derived, Packed, Smart, StyleChain};
|
||||
use typst_library::introspection::Locator;
|
||||
@ -27,17 +27,17 @@ pub fn layout_image(
|
||||
|
||||
// Take the format that was explicitly defined, or parse the extension,
|
||||
// or try to detect the format.
|
||||
let Derived { source, derived: data } = &elem.source;
|
||||
let Derived { source, derived: loaded } = &elem.source;
|
||||
let format = match elem.format(styles) {
|
||||
Smart::Custom(v) => v,
|
||||
Smart::Auto => determine_format(source, data).at(span)?,
|
||||
Smart::Auto => determine_format(source, &loaded.data).at(span)?,
|
||||
};
|
||||
|
||||
// Warn the user if the image contains a foreign object. Not perfect
|
||||
// because the svg could also be encoded, but that's an edge case.
|
||||
if format == ImageFormat::Vector(VectorFormat::Svg) {
|
||||
let has_foreign_object =
|
||||
data.as_str().is_ok_and(|s| s.contains("<foreignObject"));
|
||||
memchr::memmem::find(&loaded.data, b"<foreignObject").is_some();
|
||||
|
||||
if has_foreign_object {
|
||||
engine.sink.warn(warning!(
|
||||
@ -53,7 +53,7 @@ pub fn layout_image(
|
||||
let kind = match format {
|
||||
ImageFormat::Raster(format) => ImageKind::Raster(
|
||||
RasterImage::new(
|
||||
data.clone(),
|
||||
loaded.data.clone(),
|
||||
format,
|
||||
elem.icc(styles).as_ref().map(|icc| icc.derived.clone()),
|
||||
)
|
||||
@ -61,12 +61,11 @@ pub fn layout_image(
|
||||
),
|
||||
ImageFormat::Vector(VectorFormat::Svg) => ImageKind::Svg(
|
||||
SvgImage::with_fonts(
|
||||
data.clone(),
|
||||
loaded.data.clone(),
|
||||
engine.world,
|
||||
elem.flatten_text(styles),
|
||||
&families(styles).map(|f| f.as_str()).collect::<Vec<_>>(),
|
||||
)
|
||||
.at(span)?,
|
||||
.within(loaded)?,
|
||||
),
|
||||
};
|
||||
|
||||
@ -96,6 +95,8 @@ pub fn layout_image(
|
||||
} else {
|
||||
// If neither is forced, take the natural image size at the image's
|
||||
// DPI bounded by the available space.
|
||||
//
|
||||
// Division by DPI is fine since it's guaranteed to be positive.
|
||||
let dpi = image.dpi().unwrap_or(Image::DEFAULT_DPI);
|
||||
let natural = Axes::new(pxw, pxh).map(|v| Abs::inches(v / dpi));
|
||||
Size::new(
|
||||
@ -146,6 +147,7 @@ fn determine_format(source: &DataSource, data: &Bytes) -> StrResult<ImageFormat>
|
||||
"jpg" | "jpeg" => return Ok(ExchangeFormat::Jpg.into()),
|
||||
"gif" => return Ok(ExchangeFormat::Gif.into()),
|
||||
"svg" | "svgz" => return Ok(VectorFormat::Svg.into()),
|
||||
"webp" => return Ok(ExchangeFormat::Webp.into()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -2,10 +2,8 @@ use typst_library::diag::warning;
|
||||
use typst_library::foundations::{Packed, Resolve};
|
||||
use typst_library::introspection::{SplitLocator, Tag, TagElem};
|
||||
use typst_library::layout::{
|
||||
Abs, AlignElem, BoxElem, Dir, Fr, Frame, HElem, InlineElem, InlineItem, Sizing,
|
||||
Spacing,
|
||||
Abs, BoxElem, Dir, Fr, Frame, HElem, InlineElem, InlineItem, Sizing, Spacing,
|
||||
};
|
||||
use typst_library::model::{EnumElem, ListElem, TermsElem};
|
||||
use typst_library::routines::Pair;
|
||||
use typst_library::text::{
|
||||
is_default_ignorable, LinebreakElem, SmartQuoteElem, SmartQuoter, SmartQuotes,
|
||||
@ -123,41 +121,21 @@ pub fn collect<'a>(
|
||||
children: &[Pair<'a>],
|
||||
engine: &mut Engine<'_>,
|
||||
locator: &mut SplitLocator<'a>,
|
||||
styles: StyleChain<'a>,
|
||||
config: &Config,
|
||||
region: Size,
|
||||
situation: Option<ParSituation>,
|
||||
) -> SourceResult<(String, Vec<Segment<'a>>, SpanMapper)> {
|
||||
let mut collector = Collector::new(2 + children.len());
|
||||
let mut quoter = SmartQuoter::new();
|
||||
|
||||
let outer_dir = TextElem::dir_in(styles);
|
||||
|
||||
if let Some(situation) = situation {
|
||||
let first_line_indent = ParElem::first_line_indent_in(styles);
|
||||
if !first_line_indent.amount.is_zero()
|
||||
&& match situation {
|
||||
// First-line indent for the first paragraph after a list bullet
|
||||
// just looks bad.
|
||||
ParSituation::First => first_line_indent.all && !in_list(styles),
|
||||
ParSituation::Consecutive => true,
|
||||
ParSituation::Other => first_line_indent.all,
|
||||
}
|
||||
&& AlignElem::alignment_in(styles).resolve(styles).x
|
||||
== outer_dir.start().into()
|
||||
{
|
||||
collector.push_item(Item::Absolute(
|
||||
first_line_indent.amount.resolve(styles),
|
||||
false,
|
||||
));
|
||||
if !config.first_line_indent.is_zero() {
|
||||
collector.push_item(Item::Absolute(config.first_line_indent, false));
|
||||
collector.spans.push(1, Span::detached());
|
||||
}
|
||||
|
||||
let hang = ParElem::hanging_indent_in(styles);
|
||||
if !hang.is_zero() {
|
||||
collector.push_item(Item::Absolute(-hang, false));
|
||||
if !config.hanging_indent.is_zero() {
|
||||
collector.push_item(Item::Absolute(-config.hanging_indent, false));
|
||||
collector.spans.push(1, Span::detached());
|
||||
}
|
||||
}
|
||||
|
||||
for &(child, styles) in children {
|
||||
let prev_len = collector.full.len();
|
||||
@ -167,7 +145,7 @@ pub fn collect<'a>(
|
||||
} else if let Some(elem) = child.to_packed::<TextElem>() {
|
||||
collector.build_text(styles, |full| {
|
||||
let dir = TextElem::dir_in(styles);
|
||||
if dir != outer_dir {
|
||||
if dir != config.dir {
|
||||
// Insert "Explicit Directional Embedding".
|
||||
match dir {
|
||||
Dir::LTR => full.push_str(LTR_EMBEDDING),
|
||||
@ -182,7 +160,7 @@ pub fn collect<'a>(
|
||||
full.push_str(&elem.text);
|
||||
}
|
||||
|
||||
if dir != outer_dir {
|
||||
if dir != config.dir {
|
||||
// Insert "Pop Directional Formatting".
|
||||
full.push_str(POP_EMBEDDING);
|
||||
}
|
||||
@ -265,16 +243,6 @@ pub fn collect<'a>(
|
||||
Ok((collector.full, collector.segments, collector.spans))
|
||||
}
|
||||
|
||||
/// Whether we have a list ancestor.
|
||||
///
|
||||
/// When we support some kind of more general ancestry mechanism, this can
|
||||
/// become more elegant.
|
||||
fn in_list(styles: StyleChain) -> bool {
|
||||
ListElem::depth_in(styles).0 > 0
|
||||
|| !EnumElem::parents_in(styles).is_empty()
|
||||
|| TermsElem::within_in(styles)
|
||||
}
|
||||
|
||||
/// Collects segments.
|
||||
struct Collector<'a> {
|
||||
full: String,
|
||||
|
@ -9,7 +9,6 @@ pub fn finalize(
|
||||
engine: &mut Engine,
|
||||
p: &Preparation,
|
||||
lines: &[Line],
|
||||
styles: StyleChain,
|
||||
region: Size,
|
||||
expand: bool,
|
||||
locator: &mut SplitLocator<'_>,
|
||||
@ -19,9 +18,10 @@ pub fn finalize(
|
||||
let width = if !region.x.is_finite()
|
||||
|| (!expand && lines.iter().all(|line| line.fr().is_zero()))
|
||||
{
|
||||
region
|
||||
.x
|
||||
.min(p.hang + lines.iter().map(|line| line.width).max().unwrap_or_default())
|
||||
region.x.min(
|
||||
p.config.hanging_indent
|
||||
+ lines.iter().map(|line| line.width).max().unwrap_or_default(),
|
||||
)
|
||||
} else {
|
||||
region.x
|
||||
};
|
||||
@ -29,7 +29,7 @@ pub fn finalize(
|
||||
// Stack the lines into one frame per region.
|
||||
lines
|
||||
.iter()
|
||||
.map(|line| commit(engine, p, line, width, region.y, locator, styles))
|
||||
.map(|line| commit(engine, p, line, width, region.y, locator))
|
||||
.collect::<SourceResult<_>>()
|
||||
.map(Fragment::frames)
|
||||
}
|
||||
|
@ -2,10 +2,9 @@ use std::fmt::{self, Debug, Formatter};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::NativeElement;
|
||||
use typst_library::introspection::{SplitLocator, Tag};
|
||||
use typst_library::layout::{Abs, Dir, Em, Fr, Frame, FrameItem, Point};
|
||||
use typst_library::model::{ParLine, ParLineMarker};
|
||||
use typst_library::model::ParLineMarker;
|
||||
use typst_library::text::{Lang, TextElem};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
@ -135,7 +134,7 @@ pub fn line<'a>(
|
||||
|
||||
// Whether the line is justified.
|
||||
let justify = full.ends_with(LINE_SEPARATOR)
|
||||
|| (p.justify && breakpoint != Breakpoint::Mandatory);
|
||||
|| (p.config.justify && breakpoint != Breakpoint::Mandatory);
|
||||
|
||||
// Process dashes.
|
||||
let dash = if breakpoint.is_hyphen() || full.ends_with(SHY) {
|
||||
@ -155,16 +154,16 @@ pub fn line<'a>(
|
||||
let mut items = collect_items(engine, p, range, trim);
|
||||
|
||||
// Add a hyphen at the line start, if a previous dash should be repeated.
|
||||
if pred.map_or(false, |pred| should_repeat_hyphen(pred, full)) {
|
||||
if pred.is_some_and(|pred| should_repeat_hyphen(pred, full)) {
|
||||
if let Some(shaped) = items.first_text_mut() {
|
||||
shaped.prepend_hyphen(engine, p.fallback);
|
||||
shaped.prepend_hyphen(engine, p.config.fallback);
|
||||
}
|
||||
}
|
||||
|
||||
// Add a hyphen at the line end, if we ended on a soft hyphen.
|
||||
if dash == Some(Dash::Soft) {
|
||||
if let Some(shaped) = items.last_text_mut() {
|
||||
shaped.push_hyphen(engine, p.fallback);
|
||||
shaped.push_hyphen(engine, p.config.fallback);
|
||||
}
|
||||
}
|
||||
|
||||
@ -234,13 +233,13 @@ where
|
||||
{
|
||||
// If there is nothing bidirectional going on, skip reordering.
|
||||
let Some(bidi) = &p.bidi else {
|
||||
f(range, p.dir == Dir::RTL);
|
||||
f(range, p.config.dir == Dir::RTL);
|
||||
return;
|
||||
};
|
||||
|
||||
// The bidi crate panics for empty lines.
|
||||
if range.is_empty() {
|
||||
f(range, p.dir == Dir::RTL);
|
||||
f(range, p.config.dir == Dir::RTL);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -308,13 +307,13 @@ fn collect_range<'a>(
|
||||
/// punctuation marks at line start or line end.
|
||||
fn adjust_cj_at_line_boundaries(p: &Preparation, text: &str, items: &mut Items) {
|
||||
if text.starts_with(BEGIN_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && text.starts_with(is_of_cj_script))
|
||||
|| (p.config.cjk_latin_spacing && text.starts_with(is_of_cj_script))
|
||||
{
|
||||
adjust_cj_at_line_start(p, items);
|
||||
}
|
||||
|
||||
if text.ends_with(END_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && text.ends_with(is_of_cj_script))
|
||||
|| (p.config.cjk_latin_spacing && text.ends_with(is_of_cj_script))
|
||||
{
|
||||
adjust_cj_at_line_end(p, items);
|
||||
}
|
||||
@ -332,7 +331,10 @@ fn adjust_cj_at_line_start(p: &Preparation, items: &mut Items) {
|
||||
let shrink = glyph.shrinkability().0;
|
||||
glyph.shrink_left(shrink);
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
} else if p.cjk_latin_spacing && glyph.is_cj_script() && glyph.x_offset > Em::zero() {
|
||||
} else if p.config.cjk_latin_spacing
|
||||
&& glyph.is_cj_script()
|
||||
&& glyph.x_offset > Em::zero()
|
||||
{
|
||||
// If the first glyph is a CJK character adjusted by
|
||||
// [`add_cjk_latin_spacing`], restore the original width.
|
||||
let glyph = shaped.glyphs.to_mut().first_mut().unwrap();
|
||||
@ -359,7 +361,7 @@ fn adjust_cj_at_line_end(p: &Preparation, items: &mut Items) {
|
||||
let punct = shaped.glyphs.to_mut().last_mut().unwrap();
|
||||
punct.shrink_right(shrink);
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
} else if p.cjk_latin_spacing
|
||||
} else if p.config.cjk_latin_spacing
|
||||
&& glyph.is_cj_script()
|
||||
&& (glyph.x_advance - glyph.x_offset) > Em::one()
|
||||
{
|
||||
@ -404,7 +406,7 @@ fn should_repeat_hyphen(pred_line: &Line, text: &str) -> bool {
|
||||
//
|
||||
// See § 4.1.1.1.2.e on the "Ortografía de la lengua española"
|
||||
// https://www.rae.es/ortografía/como-signo-de-división-de-palabras-a-final-de-línea
|
||||
Lang::SPANISH => text.chars().next().map_or(false, |c| !c.is_uppercase()),
|
||||
Lang::SPANISH => text.chars().next().is_some_and(|c| !c.is_uppercase()),
|
||||
|
||||
_ => false,
|
||||
}
|
||||
@ -424,16 +426,15 @@ pub fn commit(
|
||||
width: Abs,
|
||||
full: Abs,
|
||||
locator: &mut SplitLocator<'_>,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<Frame> {
|
||||
let mut remaining = width - line.width - p.hang;
|
||||
let mut remaining = width - line.width - p.config.hanging_indent;
|
||||
let mut offset = Abs::zero();
|
||||
|
||||
// We always build the line from left to right. In an LTR paragraph, we must
|
||||
// thus add the hanging indent to the offset. In an RTL paragraph, the
|
||||
// hanging indent arises naturally due to the line width.
|
||||
if p.dir == Dir::LTR {
|
||||
offset += p.hang;
|
||||
if p.config.dir == Dir::LTR {
|
||||
offset += p.config.hanging_indent;
|
||||
}
|
||||
|
||||
// Handle hanging punctuation to the left.
|
||||
@ -554,11 +555,13 @@ pub fn commit(
|
||||
let mut output = Frame::soft(size);
|
||||
output.set_baseline(top);
|
||||
|
||||
add_par_line_marker(&mut output, styles, engine, locator, top);
|
||||
if let Some(marker) = &p.config.numbering_marker {
|
||||
add_par_line_marker(&mut output, marker, engine, locator, top);
|
||||
}
|
||||
|
||||
// Construct the line's frame.
|
||||
for (offset, frame) in frames {
|
||||
let x = offset + p.align.position(remaining);
|
||||
let x = offset + p.config.align.position(remaining);
|
||||
let y = top - frame.baseline();
|
||||
output.push_frame(Point::new(x, y), frame);
|
||||
}
|
||||
@ -575,26 +578,18 @@ pub fn commit(
|
||||
/// number in the margin, is aligned to the line's baseline.
|
||||
fn add_par_line_marker(
|
||||
output: &mut Frame,
|
||||
styles: StyleChain,
|
||||
marker: &Packed<ParLineMarker>,
|
||||
engine: &mut Engine,
|
||||
locator: &mut SplitLocator,
|
||||
top: Abs,
|
||||
) {
|
||||
let Some(numbering) = ParLine::numbering_in(styles) else { return };
|
||||
let margin = ParLine::number_margin_in(styles);
|
||||
let align = ParLine::number_align_in(styles);
|
||||
|
||||
// Delay resolving the number clearance until line numbers are laid out to
|
||||
// avoid inconsistent spacing depending on varying font size.
|
||||
let clearance = ParLine::number_clearance_in(styles);
|
||||
|
||||
// Elements in tags must have a location for introspection to work. We do
|
||||
// the work here instead of going through all of the realization process
|
||||
// just for this, given we don't need to actually place the marker as we
|
||||
// manually search for it in the frame later (when building a root flow,
|
||||
// where line numbers can be displayed), so we just need it to be in a tag
|
||||
// and to be valid (to have a location).
|
||||
let mut marker = ParLineMarker::new(numbering, align, margin, clearance).pack();
|
||||
let mut marker = marker.clone();
|
||||
let key = typst_utils::hash128(&marker);
|
||||
let loc = locator.next_location(engine.introspector, key);
|
||||
marker.set_location(loc);
|
||||
@ -606,7 +601,7 @@ fn add_par_line_marker(
|
||||
// line's general baseline. However, the line number will still need to
|
||||
// manually adjust its own 'y' position based on its own baseline.
|
||||
let pos = Point::with_y(top);
|
||||
output.push(pos, FrameItem::Tag(Tag::Start(marker)));
|
||||
output.push(pos, FrameItem::Tag(Tag::Start(marker.pack())));
|
||||
output.push(pos, FrameItem::Tag(Tag::End(loc, key)));
|
||||
}
|
||||
|
||||
|
@ -110,15 +110,7 @@ pub fn linebreak<'a>(
|
||||
p: &'a Preparation<'a>,
|
||||
width: Abs,
|
||||
) -> Vec<Line<'a>> {
|
||||
let linebreaks = p.linebreaks.unwrap_or_else(|| {
|
||||
if p.justify {
|
||||
Linebreaks::Optimized
|
||||
} else {
|
||||
Linebreaks::Simple
|
||||
}
|
||||
});
|
||||
|
||||
match linebreaks {
|
||||
match p.config.linebreaks {
|
||||
Linebreaks::Simple => linebreak_simple(engine, p, width),
|
||||
Linebreaks::Optimized => linebreak_optimized(engine, p, width),
|
||||
}
|
||||
@ -298,7 +290,7 @@ fn linebreak_optimized_bounded<'a>(
|
||||
}
|
||||
|
||||
// If this attempt is better than what we had before, take it!
|
||||
if best.as_ref().map_or(true, |best| best.total >= total) {
|
||||
if best.as_ref().is_none_or(|best| best.total >= total) {
|
||||
best = Some(Entry { pred: pred_index, total, line: attempt, end });
|
||||
}
|
||||
}
|
||||
@ -384,7 +376,7 @@ fn linebreak_optimized_approximate(
|
||||
|
||||
// Whether the line is justified. This is not 100% accurate w.r.t
|
||||
// to line()'s behaviour, but good enough.
|
||||
let justify = p.justify && breakpoint != Breakpoint::Mandatory;
|
||||
let justify = p.config.justify && breakpoint != Breakpoint::Mandatory;
|
||||
|
||||
// We don't really know whether the line naturally ends with a dash
|
||||
// here, so we can miss that case, but it's ok, since all of this
|
||||
@ -431,7 +423,7 @@ fn linebreak_optimized_approximate(
|
||||
let total = pred.total + line_cost;
|
||||
|
||||
// If this attempt is better than what we had before, take it!
|
||||
if best.as_ref().map_or(true, |best| best.total >= total) {
|
||||
if best.as_ref().is_none_or(|best| best.total >= total) {
|
||||
best = Some(Entry {
|
||||
pred: pred_index,
|
||||
total,
|
||||
@ -573,7 +565,7 @@ fn raw_ratio(
|
||||
// calculate the extra amount. Also, don't divide by zero.
|
||||
let extra_stretch = (delta - adjustability) / justifiables.max(1) as f64;
|
||||
// Normalize the amount by half the em size.
|
||||
ratio = 1.0 + extra_stretch / (p.size / 2.0);
|
||||
ratio = 1.0 + extra_stretch / (p.config.font_size / 2.0);
|
||||
}
|
||||
|
||||
// The min value must be < MIN_RATIO, but how much smaller doesn't matter
|
||||
@ -663,9 +655,9 @@ fn breakpoints(p: &Preparation, mut f: impl FnMut(usize, Breakpoint)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let hyphenate = p.hyphenate != Some(false);
|
||||
let hyphenate = p.config.hyphenate != Some(false);
|
||||
let lb = LINEBREAK_DATA.as_borrowed();
|
||||
let segmenter = match p.lang {
|
||||
let segmenter = match p.config.lang {
|
||||
Some(Lang::CHINESE | Lang::JAPANESE) => &CJ_SEGMENTER,
|
||||
_ => &SEGMENTER,
|
||||
};
|
||||
@ -698,13 +690,34 @@ fn breakpoints(p: &Preparation, mut f: impl FnMut(usize, Breakpoint)) {
|
||||
let breakpoint = if point == text.len() {
|
||||
Breakpoint::Mandatory
|
||||
} else {
|
||||
const OBJ_REPLACE: char = '\u{FFFC}';
|
||||
match lb.get(c) {
|
||||
// Fix for: https://github.com/unicode-org/icu4x/issues/4146
|
||||
LineBreak::Glue | LineBreak::WordJoiner | LineBreak::ZWJ => continue,
|
||||
LineBreak::MandatoryBreak
|
||||
| LineBreak::CarriageReturn
|
||||
| LineBreak::LineFeed
|
||||
| LineBreak::NextLine => Breakpoint::Mandatory,
|
||||
|
||||
// https://github.com/typst/typst/issues/5489
|
||||
//
|
||||
// OBJECT-REPLACEMENT-CHARACTERs provide Contingent Break
|
||||
// opportunities before and after by default. This behaviour
|
||||
// is however tailorable, see:
|
||||
// https://www.unicode.org/reports/tr14/#CB
|
||||
// https://www.unicode.org/reports/tr14/#TailorableBreakingRules
|
||||
// https://www.unicode.org/reports/tr14/#LB20
|
||||
//
|
||||
// Don't provide a line breaking opportunity between a LTR-
|
||||
// ISOLATE (or any other Combining Mark) and an OBJECT-
|
||||
// REPLACEMENT-CHARACTER representing an inline item, if the
|
||||
// LTR-ISOLATE could end up as the only character on the
|
||||
// previous line.
|
||||
LineBreak::CombiningMark
|
||||
if text[point..].starts_with(OBJ_REPLACE)
|
||||
&& last + c.len_utf8() == point =>
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
_ => Breakpoint::Normal,
|
||||
}
|
||||
};
|
||||
@ -830,18 +843,18 @@ fn linebreak_link(link: &str, mut f: impl FnMut(usize)) {
|
||||
|
||||
/// Whether hyphenation is enabled at the given offset.
|
||||
fn hyphenate_at(p: &Preparation, offset: usize) -> bool {
|
||||
p.hyphenate
|
||||
.or_else(|| {
|
||||
p.config.hyphenate.unwrap_or_else(|| {
|
||||
let (_, item) = p.get(offset);
|
||||
let styles = item.text()?.styles;
|
||||
Some(TextElem::hyphenate_in(styles))
|
||||
match item.text() {
|
||||
Some(text) => TextElem::hyphenate_in(text.styles).unwrap_or(p.config.justify),
|
||||
None => false,
|
||||
}
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// The text language at the given offset.
|
||||
fn lang_at(p: &Preparation, offset: usize) -> Option<hypher::Lang> {
|
||||
let lang = p.lang.or_else(|| {
|
||||
let lang = p.config.lang.or_else(|| {
|
||||
let (_, item) = p.get(offset);
|
||||
let styles = item.text()?.styles;
|
||||
Some(TextElem::lang_in(styles))
|
||||
@ -865,13 +878,13 @@ impl CostMetrics {
|
||||
fn compute(p: &Preparation) -> Self {
|
||||
Self {
|
||||
// When justifying, we may stretch spaces below their natural width.
|
||||
min_ratio: if p.justify { MIN_RATIO } else { 0.0 },
|
||||
min_approx_ratio: if p.justify { MIN_APPROX_RATIO } else { 0.0 },
|
||||
min_ratio: if p.config.justify { MIN_RATIO } else { 0.0 },
|
||||
min_approx_ratio: if p.config.justify { MIN_APPROX_RATIO } else { 0.0 },
|
||||
// Approximate hyphen width for estimates.
|
||||
approx_hyphen_width: Em::new(0.33).at(p.size),
|
||||
approx_hyphen_width: Em::new(0.33).at(p.config.font_size),
|
||||
// Costs.
|
||||
hyph_cost: DEFAULT_HYPH_COST * p.costs.hyphenation().get(),
|
||||
runt_cost: DEFAULT_RUNT_COST * p.costs.runt().get(),
|
||||
hyph_cost: DEFAULT_HYPH_COST * p.config.costs.hyphenation().get(),
|
||||
runt_cost: DEFAULT_RUNT_COST * p.config.costs.runt().get(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9,16 +9,22 @@ mod prepare;
|
||||
mod shaping;
|
||||
|
||||
pub use self::box_::layout_box;
|
||||
pub use self::shaping::create_shape_plan;
|
||||
|
||||
use comemo::{Track, Tracked, TrackedMut};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::engine::{Engine, Route, Sink, Traced};
|
||||
use typst_library::foundations::{Packed, StyleChain};
|
||||
use typst_library::foundations::{Packed, Resolve, Smart, StyleChain};
|
||||
use typst_library::introspection::{Introspector, Locator, LocatorLink, SplitLocator};
|
||||
use typst_library::layout::{Fragment, Size};
|
||||
use typst_library::model::ParElem;
|
||||
use typst_library::layout::{Abs, AlignElem, Dir, FixedAlignment, Fragment, Size};
|
||||
use typst_library::model::{
|
||||
EnumElem, FirstLineIndent, Linebreaks, ListElem, ParElem, ParLine, ParLineMarker,
|
||||
TermsElem,
|
||||
};
|
||||
use typst_library::routines::{Arenas, Pair, RealizationKind, Routines};
|
||||
use typst_library::text::{Costs, Lang, TextElem};
|
||||
use typst_library::World;
|
||||
use typst_utils::{Numeric, SliceExt};
|
||||
|
||||
use self::collect::{collect, Item, Segment, SpanMapper};
|
||||
use self::deco::decorate;
|
||||
@ -98,7 +104,7 @@ fn layout_par_impl(
|
||||
styles,
|
||||
)?;
|
||||
|
||||
layout_inline(
|
||||
layout_inline_impl(
|
||||
&mut engine,
|
||||
&children,
|
||||
&mut locator,
|
||||
@ -106,33 +112,134 @@ fn layout_par_impl(
|
||||
region,
|
||||
expand,
|
||||
Some(situation),
|
||||
&ConfigBase {
|
||||
justify: elem.justify(styles),
|
||||
linebreaks: elem.linebreaks(styles),
|
||||
first_line_indent: elem.first_line_indent(styles),
|
||||
hanging_indent: elem.hanging_indent(styles),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Lays out realized content with inline layout.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn layout_inline<'a>(
|
||||
engine: &mut Engine,
|
||||
children: &[Pair<'a>],
|
||||
locator: &mut SplitLocator<'a>,
|
||||
styles: StyleChain<'a>,
|
||||
shared: StyleChain<'a>,
|
||||
region: Size,
|
||||
expand: bool,
|
||||
) -> SourceResult<Fragment> {
|
||||
layout_inline_impl(
|
||||
engine,
|
||||
children,
|
||||
locator,
|
||||
shared,
|
||||
region,
|
||||
expand,
|
||||
None,
|
||||
&ConfigBase {
|
||||
justify: ParElem::justify_in(shared),
|
||||
linebreaks: ParElem::linebreaks_in(shared),
|
||||
first_line_indent: ParElem::first_line_indent_in(shared),
|
||||
hanging_indent: ParElem::hanging_indent_in(shared),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// The internal implementation of [`layout_inline`].
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_inline_impl<'a>(
|
||||
engine: &mut Engine,
|
||||
children: &[Pair<'a>],
|
||||
locator: &mut SplitLocator<'a>,
|
||||
shared: StyleChain<'a>,
|
||||
region: Size,
|
||||
expand: bool,
|
||||
par: Option<ParSituation>,
|
||||
base: &ConfigBase,
|
||||
) -> SourceResult<Fragment> {
|
||||
// Prepare configuration that is shared across the whole inline layout.
|
||||
let config = configuration(base, children, shared, par);
|
||||
|
||||
// Collect all text into one string for BiDi analysis.
|
||||
let (text, segments, spans) =
|
||||
collect(children, engine, locator, styles, region, par)?;
|
||||
let (text, segments, spans) = collect(children, engine, locator, &config, region)?;
|
||||
|
||||
// Perform BiDi analysis and performs some preparation steps before we
|
||||
// proceed to line breaking.
|
||||
let p = prepare(engine, children, &text, segments, spans, styles, par)?;
|
||||
let p = prepare(engine, &config, &text, segments, spans)?;
|
||||
|
||||
// Break the text into lines.
|
||||
let lines = linebreak(engine, &p, region.x - p.hang);
|
||||
let lines = linebreak(engine, &p, region.x - config.hanging_indent);
|
||||
|
||||
// Turn the selected lines into frames.
|
||||
finalize(engine, &p, &lines, styles, region, expand, locator)
|
||||
finalize(engine, &p, &lines, region, expand, locator)
|
||||
}
|
||||
|
||||
/// Determine the inline layout's configuration.
|
||||
fn configuration(
|
||||
base: &ConfigBase,
|
||||
children: &[Pair],
|
||||
shared: StyleChain,
|
||||
situation: Option<ParSituation>,
|
||||
) -> Config {
|
||||
let justify = base.justify;
|
||||
let font_size = TextElem::size_in(shared);
|
||||
let dir = TextElem::dir_in(shared);
|
||||
|
||||
Config {
|
||||
justify,
|
||||
linebreaks: base.linebreaks.unwrap_or_else(|| {
|
||||
if justify {
|
||||
Linebreaks::Optimized
|
||||
} else {
|
||||
Linebreaks::Simple
|
||||
}
|
||||
}),
|
||||
first_line_indent: {
|
||||
let FirstLineIndent { amount, all } = base.first_line_indent;
|
||||
if !amount.is_zero()
|
||||
&& match situation {
|
||||
// First-line indent for the first paragraph after a list
|
||||
// bullet just looks bad.
|
||||
Some(ParSituation::First) => all && !in_list(shared),
|
||||
Some(ParSituation::Consecutive) => true,
|
||||
Some(ParSituation::Other) => all,
|
||||
None => false,
|
||||
}
|
||||
&& AlignElem::alignment_in(shared).resolve(shared).x == dir.start().into()
|
||||
{
|
||||
amount.at(font_size)
|
||||
} else {
|
||||
Abs::zero()
|
||||
}
|
||||
},
|
||||
hanging_indent: if situation.is_some() {
|
||||
base.hanging_indent
|
||||
} else {
|
||||
Abs::zero()
|
||||
},
|
||||
numbering_marker: ParLine::numbering_in(shared).map(|numbering| {
|
||||
Packed::new(ParLineMarker::new(
|
||||
numbering,
|
||||
ParLine::number_align_in(shared),
|
||||
ParLine::number_margin_in(shared),
|
||||
// Delay resolving the number clearance until line numbers are
|
||||
// laid out to avoid inconsistent spacing depending on varying
|
||||
// font size.
|
||||
ParLine::number_clearance_in(shared),
|
||||
))
|
||||
}),
|
||||
align: AlignElem::alignment_in(shared).fix(dir).x,
|
||||
font_size,
|
||||
dir,
|
||||
hyphenate: shared_get(children, shared, TextElem::hyphenate_in)
|
||||
.map(|uniform| uniform.unwrap_or(justify)),
|
||||
lang: shared_get(children, shared, TextElem::lang_in),
|
||||
fallback: TextElem::fallback_in(shared),
|
||||
cjk_latin_spacing: TextElem::cjk_latin_spacing_in(shared).is_auto(),
|
||||
costs: TextElem::costs_in(shared),
|
||||
}
|
||||
}
|
||||
|
||||
/// Distinguishes between a few different kinds of paragraphs.
|
||||
@ -148,3 +255,66 @@ pub enum ParSituation {
|
||||
/// Any other kind of paragraph.
|
||||
Other,
|
||||
}
|
||||
|
||||
/// Raw values from a `ParElem` or style chain. Used to initialize a [`Config`].
|
||||
struct ConfigBase {
|
||||
justify: bool,
|
||||
linebreaks: Smart<Linebreaks>,
|
||||
first_line_indent: FirstLineIndent,
|
||||
hanging_indent: Abs,
|
||||
}
|
||||
|
||||
/// Shared configuration for the whole inline layout.
|
||||
struct Config {
|
||||
/// Whether to justify text.
|
||||
justify: bool,
|
||||
/// How to determine line breaks.
|
||||
linebreaks: Linebreaks,
|
||||
/// The indent the first line of a paragraph should have.
|
||||
first_line_indent: Abs,
|
||||
/// The indent that all but the first line of a paragraph should have.
|
||||
hanging_indent: Abs,
|
||||
/// Configuration for line numbering.
|
||||
numbering_marker: Option<Packed<ParLineMarker>>,
|
||||
/// The resolved horizontal alignment.
|
||||
align: FixedAlignment,
|
||||
/// The text size.
|
||||
font_size: Abs,
|
||||
/// The dominant direction.
|
||||
dir: Dir,
|
||||
/// A uniform hyphenation setting (only `Some(_)` if it's the same for all
|
||||
/// children, otherwise `None`).
|
||||
hyphenate: Option<bool>,
|
||||
/// The text language (only `Some(_)` if it's the same for all
|
||||
/// children, otherwise `None`).
|
||||
lang: Option<Lang>,
|
||||
/// Whether font fallback is enabled.
|
||||
fallback: bool,
|
||||
/// Whether to add spacing between CJK and Latin characters.
|
||||
cjk_latin_spacing: bool,
|
||||
/// Costs for various layout decisions.
|
||||
costs: Costs,
|
||||
}
|
||||
|
||||
/// Get a style property, but only if it is the same for all of the children.
|
||||
fn shared_get<T: PartialEq>(
|
||||
children: &[Pair],
|
||||
styles: StyleChain<'_>,
|
||||
getter: fn(StyleChain) -> T,
|
||||
) -> Option<T> {
|
||||
let value = getter(styles);
|
||||
children
|
||||
.group_by_key(|&(_, s)| s)
|
||||
.all(|(s, _)| getter(s) == value)
|
||||
.then_some(value)
|
||||
}
|
||||
|
||||
/// Whether we have a list ancestor.
|
||||
///
|
||||
/// When we support some kind of more general ancestry mechanism, this can
|
||||
/// become more elegant.
|
||||
fn in_list(styles: StyleChain) -> bool {
|
||||
ListElem::depth_in(styles).0 > 0
|
||||
|| !EnumElem::parents_in(styles).is_empty()
|
||||
|| TermsElem::within_in(styles)
|
||||
}
|
||||
|
@ -1,9 +1,4 @@
|
||||
use typst_library::foundations::{Resolve, Smart};
|
||||
use typst_library::layout::{Abs, AlignElem, Dir, Em, FixedAlignment};
|
||||
use typst_library::model::Linebreaks;
|
||||
use typst_library::routines::Pair;
|
||||
use typst_library::text::{Costs, Lang, TextElem};
|
||||
use typst_utils::SliceExt;
|
||||
use typst_library::layout::{Dir, Em};
|
||||
use unicode_bidi::{BidiInfo, Level as BidiLevel};
|
||||
|
||||
use super::*;
|
||||
@ -17,6 +12,8 @@ use super::*;
|
||||
pub struct Preparation<'a> {
|
||||
/// The full text.
|
||||
pub text: &'a str,
|
||||
/// Configuration for inline layout.
|
||||
pub config: &'a Config,
|
||||
/// Bidirectional text embedding levels.
|
||||
///
|
||||
/// This is `None` if all text directions are uniform (all the base
|
||||
@ -28,28 +25,6 @@ pub struct Preparation<'a> {
|
||||
pub indices: Vec<usize>,
|
||||
/// The span mapper.
|
||||
pub spans: SpanMapper,
|
||||
/// Whether to hyphenate if it's the same for all children.
|
||||
pub hyphenate: Option<bool>,
|
||||
/// Costs for various layout decisions.
|
||||
pub costs: Costs,
|
||||
/// The dominant direction.
|
||||
pub dir: Dir,
|
||||
/// The text language if it's the same for all children.
|
||||
pub lang: Option<Lang>,
|
||||
/// The resolved horizontal alignment.
|
||||
pub align: FixedAlignment,
|
||||
/// Whether to justify text.
|
||||
pub justify: bool,
|
||||
/// Hanging indent to apply.
|
||||
pub hang: Abs,
|
||||
/// Whether to add spacing between CJK and Latin characters.
|
||||
pub cjk_latin_spacing: bool,
|
||||
/// Whether font fallback is enabled.
|
||||
pub fallback: bool,
|
||||
/// How to determine line breaks.
|
||||
pub linebreaks: Smart<Linebreaks>,
|
||||
/// The text size.
|
||||
pub size: Abs,
|
||||
}
|
||||
|
||||
impl<'a> Preparation<'a> {
|
||||
@ -80,15 +55,12 @@ impl<'a> Preparation<'a> {
|
||||
#[typst_macros::time]
|
||||
pub fn prepare<'a>(
|
||||
engine: &mut Engine,
|
||||
children: &[Pair<'a>],
|
||||
config: &'a Config,
|
||||
text: &'a str,
|
||||
segments: Vec<Segment<'a>>,
|
||||
spans: SpanMapper,
|
||||
styles: StyleChain<'a>,
|
||||
situation: Option<ParSituation>,
|
||||
) -> SourceResult<Preparation<'a>> {
|
||||
let dir = TextElem::dir_in(styles);
|
||||
let default_level = match dir {
|
||||
let default_level = match config.dir {
|
||||
Dir::RTL => BidiLevel::rtl(),
|
||||
_ => BidiLevel::ltr(),
|
||||
};
|
||||
@ -124,51 +96,20 @@ pub fn prepare<'a>(
|
||||
indices.extend(range.clone().map(|_| i));
|
||||
}
|
||||
|
||||
let cjk_latin_spacing = TextElem::cjk_latin_spacing_in(styles).is_auto();
|
||||
if cjk_latin_spacing {
|
||||
if config.cjk_latin_spacing {
|
||||
add_cjk_latin_spacing(&mut items);
|
||||
}
|
||||
|
||||
// Only apply hanging indent to real paragraphs.
|
||||
let hang = if situation.is_some() {
|
||||
ParElem::hanging_indent_in(styles)
|
||||
} else {
|
||||
Abs::zero()
|
||||
};
|
||||
|
||||
Ok(Preparation {
|
||||
config,
|
||||
text,
|
||||
bidi: is_bidi.then_some(bidi),
|
||||
items,
|
||||
indices,
|
||||
spans,
|
||||
hyphenate: shared_get(children, styles, TextElem::hyphenate_in),
|
||||
costs: TextElem::costs_in(styles),
|
||||
dir,
|
||||
lang: shared_get(children, styles, TextElem::lang_in),
|
||||
align: AlignElem::alignment_in(styles).resolve(styles).x,
|
||||
justify: ParElem::justify_in(styles),
|
||||
hang,
|
||||
cjk_latin_spacing,
|
||||
fallback: TextElem::fallback_in(styles),
|
||||
linebreaks: ParElem::linebreaks_in(styles),
|
||||
size: TextElem::size_in(styles),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a style property, but only if it is the same for all of the children.
|
||||
fn shared_get<T: PartialEq>(
|
||||
children: &[Pair],
|
||||
styles: StyleChain<'_>,
|
||||
getter: fn(StyleChain) -> T,
|
||||
) -> Option<T> {
|
||||
let value = getter(styles);
|
||||
children
|
||||
.group_by_key(|&(_, s)| s)
|
||||
.all(|(s, _)| getter(s) == value)
|
||||
.then_some(value)
|
||||
}
|
||||
|
||||
/// Add some spacing between Han characters and western characters. See
|
||||
/// Requirements for Chinese Text Layout, Section 3.2.2 Mixed Text Composition
|
||||
/// in Horizontal Written Mode
|
||||
|
@ -1,18 +1,16 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use az::SaturatingAs;
|
||||
use ecow::EcoString;
|
||||
use rustybuzz::{BufferFlags, ShapePlan, UnicodeBuffer};
|
||||
use ttf_parser::Tag;
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{Smart, StyleChain};
|
||||
use typst_library::layout::{Abs, Dir, Em, Frame, FrameItem, Point, Size};
|
||||
use typst_library::text::{
|
||||
families, features, is_default_ignorable, variant, Font, FontFamily, FontVariant,
|
||||
Glyph, Lang, Region, TextEdgeBounds, TextElem, TextItem,
|
||||
families, features, is_default_ignorable, language, variant, Font, FontFamily,
|
||||
FontVariant, Glyph, Lang, Region, TextEdgeBounds, TextElem, TextItem,
|
||||
};
|
||||
use typst_library::World;
|
||||
use typst_utils::SliceExt;
|
||||
@ -20,7 +18,7 @@ use unicode_bidi::{BidiInfo, Level as BidiLevel};
|
||||
use unicode_script::{Script, UnicodeScript};
|
||||
|
||||
use super::{decorate, Item, Range, SpanMapper};
|
||||
use crate::modifiers::{FrameModifiers, FrameModify};
|
||||
use crate::modifiers::FrameModifyText;
|
||||
|
||||
/// The result of shaping text.
|
||||
///
|
||||
@ -295,6 +293,8 @@ impl<'a> ShapedText<'a> {
|
||||
+ justification_left
|
||||
+ justification_right,
|
||||
x_offset: shaped.x_offset + justification_left,
|
||||
y_advance: Em::zero(),
|
||||
y_offset: Em::zero(),
|
||||
range: (shaped.range.start - range.start).saturating_as()
|
||||
..(shaped.range.end - range.start).saturating_as(),
|
||||
span,
|
||||
@ -327,7 +327,7 @@ impl<'a> ShapedText<'a> {
|
||||
offset += width;
|
||||
}
|
||||
|
||||
frame.modify(&FrameModifiers::get_in(self.styles));
|
||||
frame.modify_text(self.styles);
|
||||
frame
|
||||
}
|
||||
|
||||
@ -465,7 +465,7 @@ impl<'a> ShapedText<'a> {
|
||||
None
|
||||
};
|
||||
let mut chain = families(self.styles)
|
||||
.filter(|family| family.covers().map_or(true, |c| c.is_match("-")))
|
||||
.filter(|family| family.covers().is_none_or(|c| c.is_match("-")))
|
||||
.map(|family| book.select(family.as_str(), self.variant))
|
||||
.chain(fallback_func.iter().map(|f| f()))
|
||||
.flatten();
|
||||
@ -570,7 +570,7 @@ impl<'a> ShapedText<'a> {
|
||||
// for the next line.
|
||||
let dec = if ltr { usize::checked_sub } else { usize::checked_add };
|
||||
while let Some(next) = dec(idx, 1) {
|
||||
if self.glyphs.get(next).map_or(true, |g| g.range.start != text_index) {
|
||||
if self.glyphs.get(next).is_none_or(|g| g.range.start != text_index) {
|
||||
break;
|
||||
}
|
||||
idx = next;
|
||||
@ -812,7 +812,7 @@ fn shape_segment<'a>(
|
||||
.nth(1)
|
||||
.map(|(i, _)| offset + i)
|
||||
.unwrap_or(text.len());
|
||||
covers.map_or(true, |cov| cov.is_match(&text[offset..end]))
|
||||
covers.is_none_or(|cov| cov.is_match(&text[offset..end]))
|
||||
};
|
||||
|
||||
// Collect the shaped glyphs, doing fallback and shaping parts again with
|
||||
@ -824,12 +824,42 @@ fn shape_segment<'a>(
|
||||
|
||||
// Add the glyph to the shaped output.
|
||||
if info.glyph_id != 0 && is_covered(cluster) {
|
||||
// Determine the text range of the glyph.
|
||||
// Assume we have the following sequence of (glyph_id, cluster):
|
||||
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
|
||||
// (63, 13), (193, 25), (80, 25), (3, 31)
|
||||
//
|
||||
// We then want the sequence of (glyph_id, text_range) to look as follows:
|
||||
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
|
||||
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
|
||||
//
|
||||
// Each glyph in the same cluster should be assigned the full text
|
||||
// range. This is necessary because only this way krilla can
|
||||
// properly assign `ActualText` attributes in complex shaping
|
||||
// scenarios.
|
||||
|
||||
// The start of the glyph's text range.
|
||||
let start = base + cluster;
|
||||
let end = base
|
||||
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) }
|
||||
.and_then(|last| infos.get(last))
|
||||
.map_or(text.len(), |info| info.cluster as usize);
|
||||
|
||||
// Determine the end of the glyph's text range.
|
||||
let mut k = i;
|
||||
let step: isize = if ltr { 1 } else { -1 };
|
||||
let end = loop {
|
||||
// If we've reached the end of the glyphs, the `end` of the
|
||||
// range should be the end of the full text.
|
||||
let Some((next, next_info)) = k
|
||||
.checked_add_signed(step)
|
||||
.and_then(|n| infos.get(n).map(|info| (n, info)))
|
||||
else {
|
||||
break base + text.len();
|
||||
};
|
||||
|
||||
// If the cluster doesn't match anymore, we've reached the end.
|
||||
if next_info.cluster != info.cluster {
|
||||
break base + next_info.cluster as usize;
|
||||
}
|
||||
|
||||
k = next;
|
||||
};
|
||||
|
||||
let c = text[cluster..].chars().next().unwrap();
|
||||
let script = c.script();
|
||||
@ -904,7 +934,7 @@ fn shape_segment<'a>(
|
||||
|
||||
/// Create a shape plan.
|
||||
#[comemo::memoize]
|
||||
fn create_shape_plan(
|
||||
pub fn create_shape_plan(
|
||||
font: &Font,
|
||||
direction: rustybuzz::Direction,
|
||||
script: rustybuzz::Script,
|
||||
@ -922,7 +952,7 @@ fn create_shape_plan(
|
||||
|
||||
/// Shape the text with tofus from the given font.
|
||||
fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, font: Font) {
|
||||
let x_advance = font.advance(0).unwrap_or_default();
|
||||
let x_advance = font.x_advance(0).unwrap_or_default();
|
||||
let add_glyph = |(cluster, c): (usize, char)| {
|
||||
let start = base + cluster;
|
||||
let end = start + c.len_utf8();
|
||||
@ -1014,20 +1044,8 @@ fn calculate_adjustability(ctx: &mut ShapingContext, lang: Lang, region: Option<
|
||||
|
||||
/// Difference between non-breaking and normal space.
|
||||
fn nbsp_delta(font: &Font) -> Option<Em> {
|
||||
let space = font.ttf().glyph_index(' ')?.0;
|
||||
let nbsp = font.ttf().glyph_index('\u{00A0}')?.0;
|
||||
Some(font.advance(nbsp)? - font.advance(space)?)
|
||||
}
|
||||
|
||||
/// Process the language and region of a style chain into a
|
||||
/// rustybuzz-compatible BCP 47 language.
|
||||
fn language(styles: StyleChain) -> rustybuzz::Language {
|
||||
let mut bcp: EcoString = TextElem::lang_in(styles).as_str().into();
|
||||
if let Some(region) = TextElem::region_in(styles) {
|
||||
bcp.push('-');
|
||||
bcp.push_str(region.as_str());
|
||||
}
|
||||
rustybuzz::Language::from_str(&bcp).unwrap()
|
||||
Some(font.x_advance(nbsp)? - font.space_width()?)
|
||||
}
|
||||
|
||||
/// Returns true if all glyphs in `glyphs` have ranges within the range `range`.
|
||||
|
@ -96,9 +96,13 @@ pub fn layout_enum(
|
||||
|
||||
let mut cells = vec![];
|
||||
let mut locator = locator.split();
|
||||
let mut number =
|
||||
elem.start(styles)
|
||||
.unwrap_or_else(|| if reversed { elem.children.len() } else { 1 });
|
||||
let mut number = elem.start(styles).unwrap_or_else(|| {
|
||||
if reversed {
|
||||
elem.children.len() as u64
|
||||
} else {
|
||||
1
|
||||
}
|
||||
});
|
||||
let mut parents = EnumElem::parents_in(styles);
|
||||
|
||||
let full = elem.full(styles);
|
||||
|
@ -1,9 +1,12 @@
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::{Packed, StyleChain};
|
||||
use typst_library::layout::{Em, Frame, Point, Size};
|
||||
use typst_library::math::{Accent, AccentElem};
|
||||
use typst_library::math::AccentElem;
|
||||
|
||||
use super::{style_cramped, FrameFragment, GlyphFragment, MathContext, MathFragment};
|
||||
use super::{
|
||||
style_cramped, style_dtls, style_flac, FrameFragment, GlyphFragment, MathContext,
|
||||
MathFragment,
|
||||
};
|
||||
|
||||
/// How much the accent can be shorter than the base.
|
||||
const ACCENT_SHORT_FALL: Em = Em::new(0.5);
|
||||
@ -15,53 +18,71 @@ pub fn layout_accent(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let cramped = style_cramped();
|
||||
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?;
|
||||
let accent = elem.accent;
|
||||
let top_accent = !accent.is_bottom();
|
||||
|
||||
// Try to replace a glyph with its dotless variant.
|
||||
if let MathFragment::Glyph(glyph) = &mut base {
|
||||
glyph.make_dotless_form(ctx);
|
||||
}
|
||||
// Try to replace the base glyph with its dotless variant.
|
||||
let dtls = style_dtls();
|
||||
let base_styles =
|
||||
if top_accent && elem.dotless(styles) { styles.chain(&dtls) } else { styles };
|
||||
|
||||
let cramped = style_cramped();
|
||||
let base = ctx.layout_into_fragment(&elem.base, base_styles.chain(&cramped))?;
|
||||
|
||||
// Preserve class to preserve automatic spacing.
|
||||
let base_class = base.class();
|
||||
let base_attach = base.accent_attach();
|
||||
|
||||
let width = elem.size(styles).relative_to(base.width());
|
||||
|
||||
let Accent(c) = elem.accent;
|
||||
let mut glyph = GlyphFragment::new(ctx, styles, c, elem.span());
|
||||
|
||||
// Try to replace accent glyph with flattened variant.
|
||||
// Try to replace the accent glyph with its flattened variant.
|
||||
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
|
||||
if base.height() > flattened_base_height {
|
||||
glyph.make_flattened_accent_form(ctx);
|
||||
}
|
||||
let flac = style_flac();
|
||||
let accent_styles = if top_accent && base.ascent() > flattened_base_height {
|
||||
styles.chain(&flac)
|
||||
} else {
|
||||
styles
|
||||
};
|
||||
|
||||
// Forcing the accent to be at least as large as the base makes it too
|
||||
// wide in many case.
|
||||
let short_fall = ACCENT_SHORT_FALL.at(glyph.font_size);
|
||||
let variant = glyph.stretch_horizontal(ctx, width, short_fall);
|
||||
let accent = variant.frame;
|
||||
let accent_attach = variant.accent_attach;
|
||||
let mut glyph =
|
||||
GlyphFragment::new_char(ctx.font, accent_styles, accent.0, elem.span())?;
|
||||
|
||||
// Forcing the accent to be at least as large as the base makes it too wide
|
||||
// in many cases.
|
||||
let width = elem.size(styles).relative_to(base.width());
|
||||
let short_fall = ACCENT_SHORT_FALL.at(glyph.item.size);
|
||||
glyph.stretch_horizontal(ctx, width - short_fall);
|
||||
let accent_attach = glyph.accent_attach.0;
|
||||
let accent = glyph.into_frame();
|
||||
|
||||
let (gap, accent_pos, base_pos) = if top_accent {
|
||||
// Descent is negative because the accent's ink bottom is above the
|
||||
// baseline. Therefore, the default gap is the accent's negated descent
|
||||
// minus the accent base height. Only if the base is very small, we need
|
||||
// a larger gap so that the accent doesn't move too low.
|
||||
// minus the accent base height. Only if the base is very small, we
|
||||
// need a larger gap so that the accent doesn't move too low.
|
||||
let accent_base_height = scaled!(ctx, styles, accent_base_height);
|
||||
let gap = -accent.descent() - base.height().min(accent_base_height);
|
||||
let size = Size::new(base.width(), accent.height() + gap + base.height());
|
||||
let accent_pos = Point::with_x(base_attach - accent_attach);
|
||||
let gap = -accent.descent() - base.ascent().min(accent_base_height);
|
||||
let accent_pos = Point::with_x(base_attach.0 - accent_attach);
|
||||
let base_pos = Point::with_y(accent.height() + gap);
|
||||
(gap, accent_pos, base_pos)
|
||||
} else {
|
||||
let gap = -accent.ascent();
|
||||
let accent_pos = Point::new(base_attach.1 - accent_attach, base.height() + gap);
|
||||
let base_pos = Point::zero();
|
||||
(gap, accent_pos, base_pos)
|
||||
};
|
||||
|
||||
let size = Size::new(base.width(), accent.height() + gap + base.height());
|
||||
let baseline = base_pos.y + base.ascent();
|
||||
|
||||
let base_italics_correction = base.italics_correction();
|
||||
let base_text_like = base.is_text_like();
|
||||
|
||||
let base_ascent = match &base {
|
||||
MathFragment::Frame(frame) => frame.base_ascent,
|
||||
_ => base.ascent(),
|
||||
};
|
||||
let base_descent = match &base {
|
||||
MathFragment::Frame(frame) => frame.base_descent,
|
||||
_ => base.descent(),
|
||||
};
|
||||
|
||||
let mut frame = Frame::soft(size);
|
||||
frame.set_baseline(baseline);
|
||||
@ -71,6 +92,7 @@ pub fn layout_accent(
|
||||
FrameFragment::new(styles, frame)
|
||||
.with_class(base_class)
|
||||
.with_base_ascent(base_ascent)
|
||||
.with_base_descent(base_descent)
|
||||
.with_italics_correction(base_italics_correction)
|
||||
.with_accent_attach(base_attach)
|
||||
.with_text_like(base_text_like),
|
||||
|
@ -66,7 +66,6 @@ pub fn layout_attach(
|
||||
let relative_to_width = measure!(t, width).max(measure!(b, width));
|
||||
stretch_fragment(
|
||||
ctx,
|
||||
styles,
|
||||
&mut base,
|
||||
Some(Axis::X),
|
||||
Some(relative_to_width),
|
||||
@ -220,7 +219,6 @@ fn layout_attachments(
|
||||
// Calculate the distance each pre-script extends to the left of the base's
|
||||
// width.
|
||||
let (tl_pre_width, bl_pre_width) = compute_pre_script_widths(
|
||||
ctx,
|
||||
&base,
|
||||
[tl.as_ref(), bl.as_ref()],
|
||||
(tx_shift, bx_shift),
|
||||
@ -231,7 +229,6 @@ fn layout_attachments(
|
||||
// base's width. Also calculate each post-script's kerning (we need this for
|
||||
// its position later).
|
||||
let ((tr_post_width, tr_kern), (br_post_width, br_kern)) = compute_post_script_widths(
|
||||
ctx,
|
||||
&base,
|
||||
[tr.as_ref(), br.as_ref()],
|
||||
(tx_shift, bx_shift),
|
||||
@ -287,14 +284,13 @@ fn layout_attachments(
|
||||
/// post-script's kerning value. The first tuple is for the post-superscript,
|
||||
/// and the second is for the post-subscript.
|
||||
fn compute_post_script_widths(
|
||||
ctx: &MathContext,
|
||||
base: &MathFragment,
|
||||
[tr, br]: [Option<&MathFragment>; 2],
|
||||
(tr_shift, br_shift): (Abs, Abs),
|
||||
space_after_post_script: Abs,
|
||||
) -> ((Abs, Abs), (Abs, Abs)) {
|
||||
let tr_values = tr.map_or_default(|tr| {
|
||||
let kern = math_kern(ctx, base, tr, tr_shift, Corner::TopRight);
|
||||
let kern = math_kern(base, tr, tr_shift, Corner::TopRight);
|
||||
(space_after_post_script + tr.width() + kern, kern)
|
||||
});
|
||||
|
||||
@ -302,7 +298,7 @@ fn compute_post_script_widths(
|
||||
// need to shift the post-subscript left by the base's italic correction
|
||||
// (see the kerning algorithm as described in the OpenType MATH spec).
|
||||
let br_values = br.map_or_default(|br| {
|
||||
let kern = math_kern(ctx, base, br, br_shift, Corner::BottomRight)
|
||||
let kern = math_kern(base, br, br_shift, Corner::BottomRight)
|
||||
- base.italics_correction();
|
||||
(space_after_post_script + br.width() + kern, kern)
|
||||
});
|
||||
@ -317,19 +313,18 @@ fn compute_post_script_widths(
|
||||
/// extends left of the base's width and the second being the distance the
|
||||
/// pre-subscript extends left of the base's width.
|
||||
fn compute_pre_script_widths(
|
||||
ctx: &MathContext,
|
||||
base: &MathFragment,
|
||||
[tl, bl]: [Option<&MathFragment>; 2],
|
||||
(tl_shift, bl_shift): (Abs, Abs),
|
||||
space_before_pre_script: Abs,
|
||||
) -> (Abs, Abs) {
|
||||
let tl_pre_width = tl.map_or_default(|tl| {
|
||||
let kern = math_kern(ctx, base, tl, tl_shift, Corner::TopLeft);
|
||||
let kern = math_kern(base, tl, tl_shift, Corner::TopLeft);
|
||||
space_before_pre_script + tl.width() + kern
|
||||
});
|
||||
|
||||
let bl_pre_width = bl.map_or_default(|bl| {
|
||||
let kern = math_kern(ctx, base, bl, bl_shift, Corner::BottomLeft);
|
||||
let kern = math_kern(base, bl, bl_shift, Corner::BottomLeft);
|
||||
space_before_pre_script + bl.width() + kern
|
||||
});
|
||||
|
||||
@ -434,9 +429,13 @@ fn compute_script_shifts(
|
||||
}
|
||||
|
||||
if bl.is_some() || br.is_some() {
|
||||
let descent = match &base {
|
||||
MathFragment::Frame(frame) => frame.base_descent,
|
||||
_ => base.descent(),
|
||||
};
|
||||
shift_down = shift_down
|
||||
.max(sub_shift_down)
|
||||
.max(if is_text_like { Abs::zero() } else { base.descent() + sub_drop_min })
|
||||
.max(if is_text_like { Abs::zero() } else { descent + sub_drop_min })
|
||||
.max(measure!(bl, ascent) - sub_top_max)
|
||||
.max(measure!(br, ascent) - sub_top_max);
|
||||
}
|
||||
@ -467,13 +466,7 @@ fn compute_script_shifts(
|
||||
/// a negative value means shifting the script closer to the base. Requires the
|
||||
/// distance from the base's baseline to the script's baseline, as well as the
|
||||
/// script's corner (tl, tr, bl, br).
|
||||
fn math_kern(
|
||||
ctx: &MathContext,
|
||||
base: &MathFragment,
|
||||
script: &MathFragment,
|
||||
shift: Abs,
|
||||
pos: Corner,
|
||||
) -> Abs {
|
||||
fn math_kern(base: &MathFragment, script: &MathFragment, shift: Abs, pos: Corner) -> Abs {
|
||||
// This process is described under the MathKernInfo table in the OpenType
|
||||
// MATH spec.
|
||||
|
||||
@ -498,8 +491,8 @@ fn math_kern(
|
||||
|
||||
// Calculate the sum of kerning values for each correction height.
|
||||
let summed_kern = |height| {
|
||||
let base_kern = base.kern_at_height(ctx, pos, height);
|
||||
let attach_kern = script.kern_at_height(ctx, pos.inv(), height);
|
||||
let base_kern = base.kern_at_height(pos, height);
|
||||
let attach_kern = script.kern_at_height(pos.inv(), height);
|
||||
base_kern + attach_kern
|
||||
};
|
||||
|
||||
|
@ -109,14 +109,14 @@ fn layout_frac_like(
|
||||
frame.push_frame(denom_pos, denom);
|
||||
|
||||
if binom {
|
||||
let mut left = GlyphFragment::new(ctx, styles, '(', span)
|
||||
.stretch_vertical(ctx, height, short_fall);
|
||||
left.center_on_axis(ctx);
|
||||
let mut left = GlyphFragment::new_char(ctx.font, styles, '(', span)?;
|
||||
left.stretch_vertical(ctx, height - short_fall);
|
||||
left.center_on_axis();
|
||||
ctx.push(left);
|
||||
ctx.push(FrameFragment::new(styles, frame));
|
||||
let mut right = GlyphFragment::new(ctx, styles, ')', span)
|
||||
.stretch_vertical(ctx, height, short_fall);
|
||||
right.center_on_axis(ctx);
|
||||
let mut right = GlyphFragment::new_char(ctx.font, styles, ')', span)?;
|
||||
right.stretch_vertical(ctx, height - short_fall);
|
||||
right.center_on_axis();
|
||||
ctx.push(right);
|
||||
} else {
|
||||
frame.push(
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -45,20 +45,20 @@ pub fn layout_lr(
|
||||
|
||||
// Scale up fragments at both ends.
|
||||
match inner_fragments {
|
||||
[one] => scale(ctx, styles, one, relative_to, height, None),
|
||||
[one] => scale_if_delimiter(ctx, one, relative_to, height, None),
|
||||
[first, .., last] => {
|
||||
scale(ctx, styles, first, relative_to, height, Some(MathClass::Opening));
|
||||
scale(ctx, styles, last, relative_to, height, Some(MathClass::Closing));
|
||||
scale_if_delimiter(ctx, first, relative_to, height, Some(MathClass::Opening));
|
||||
scale_if_delimiter(ctx, last, relative_to, height, Some(MathClass::Closing));
|
||||
}
|
||||
_ => {}
|
||||
[] => {}
|
||||
}
|
||||
|
||||
// Handle MathFragment::Variant fragments that should be scaled up.
|
||||
// Handle MathFragment::Glyph fragments that should be scaled up.
|
||||
for fragment in inner_fragments.iter_mut() {
|
||||
if let MathFragment::Variant(ref mut variant) = fragment {
|
||||
if variant.mid_stretched == Some(false) {
|
||||
variant.mid_stretched = Some(true);
|
||||
scale(ctx, styles, fragment, relative_to, height, Some(MathClass::Large));
|
||||
if let MathFragment::Glyph(ref mut glyph) = fragment {
|
||||
if glyph.mid_stretched == Some(false) {
|
||||
glyph.mid_stretched = Some(true);
|
||||
scale(ctx, fragment, relative_to, height);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -95,18 +95,9 @@ pub fn layout_mid(
|
||||
let mut fragments = ctx.layout_into_fragments(&elem.body, styles)?;
|
||||
|
||||
for fragment in &mut fragments {
|
||||
match fragment {
|
||||
MathFragment::Glyph(glyph) => {
|
||||
let mut new = glyph.clone().into_variant();
|
||||
new.mid_stretched = Some(false);
|
||||
new.class = MathClass::Fence;
|
||||
*fragment = MathFragment::Variant(new);
|
||||
}
|
||||
MathFragment::Variant(variant) => {
|
||||
variant.mid_stretched = Some(false);
|
||||
variant.class = MathClass::Fence;
|
||||
}
|
||||
_ => {}
|
||||
if let MathFragment::Glyph(ref mut glyph) = fragment {
|
||||
glyph.mid_stretched = Some(false);
|
||||
glyph.class = MathClass::Relation;
|
||||
}
|
||||
}
|
||||
|
||||
@ -114,10 +105,13 @@ pub fn layout_mid(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Scale a math fragment to a height.
|
||||
fn scale(
|
||||
/// Scales a math fragment to a height if it has the class Opening, Closing, or
|
||||
/// Fence.
|
||||
///
|
||||
/// In case `apply` is `Some(class)`, `class` will be applied to the fragment if
|
||||
/// it is a delimiter, in a way that cannot be overridden by the user.
|
||||
fn scale_if_delimiter(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
fragment: &mut MathFragment,
|
||||
relative_to: Abs,
|
||||
height: Rel<Abs>,
|
||||
@ -127,21 +121,23 @@ fn scale(
|
||||
fragment.class(),
|
||||
MathClass::Opening | MathClass::Closing | MathClass::Fence
|
||||
) {
|
||||
// This unwrap doesn't really matter. If it is None, then the fragment
|
||||
// won't be stretchable anyways.
|
||||
let short_fall = DELIM_SHORT_FALL.at(fragment.font_size().unwrap_or_default());
|
||||
stretch_fragment(
|
||||
ctx,
|
||||
styles,
|
||||
fragment,
|
||||
Some(Axis::Y),
|
||||
Some(relative_to),
|
||||
height,
|
||||
short_fall,
|
||||
);
|
||||
scale(ctx, fragment, relative_to, height);
|
||||
|
||||
if let Some(class) = apply {
|
||||
fragment.set_class(class);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Scales a math fragment to a height.
|
||||
fn scale(
|
||||
ctx: &mut MathContext,
|
||||
fragment: &mut MathFragment,
|
||||
relative_to: Abs,
|
||||
height: Rel<Abs>,
|
||||
) {
|
||||
// This unwrap doesn't really matter. If it is None, then the fragment
|
||||
// won't be stretchable anyways.
|
||||
let short_fall = DELIM_SHORT_FALL.at(fragment.font_size().unwrap_or_default());
|
||||
stretch_fragment(ctx, fragment, Some(Axis::Y), Some(relative_to), height, short_fall);
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::diag::{bail, warning, SourceResult};
|
||||
use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
|
||||
use typst_library::layout::{
|
||||
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
|
||||
@ -9,8 +9,8 @@ use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use super::{
|
||||
alignments, delimiter_alignment, stack, style_for_denominator, AlignmentResult,
|
||||
FrameFragment, GlyphFragment, LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
|
||||
alignments, style_for_denominator, AlignmentResult, FrameFragment, GlyphFragment,
|
||||
LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
|
||||
};
|
||||
|
||||
const VERTICAL_PADDING: Ratio = Ratio::new(0.1);
|
||||
@ -23,67 +23,23 @@ pub fn layout_vec(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_vec_body(
|
||||
let span = elem.span();
|
||||
|
||||
let column: Vec<&Content> = elem.children.iter().collect();
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&elem.children,
|
||||
&[column],
|
||||
elem.align(styles),
|
||||
elem.gap(styles),
|
||||
LeftRightAlternator::Right,
|
||||
None,
|
||||
Axes::with_y(elem.gap(styles)),
|
||||
span,
|
||||
"elements",
|
||||
)?;
|
||||
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
|
||||
}
|
||||
|
||||
/// Lays out a [`MatElem`].
|
||||
#[typst_macros::time(name = "math.mat", span = elem.span())]
|
||||
pub fn layout_mat(
|
||||
elem: &Packed<MatElem>,
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let augment = elem.augment(styles);
|
||||
let rows = &elem.rows;
|
||||
|
||||
if let Some(aug) = &augment {
|
||||
for &offset in &aug.hline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= rows.len() {
|
||||
bail!(
|
||||
elem.span(),
|
||||
"cannot draw a horizontal line after row {} of a matrix with {} rows",
|
||||
if offset < 0 { rows.len() as isize + offset } else { offset },
|
||||
rows.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
|
||||
for &offset in &aug.vline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= ncols {
|
||||
bail!(
|
||||
elem.span(),
|
||||
"cannot draw a vertical line after column {} of a matrix with {} columns",
|
||||
if offset < 0 { ncols as isize + offset } else { offset },
|
||||
ncols
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_mat_body(
|
||||
ctx,
|
||||
styles,
|
||||
rows,
|
||||
elem.align(styles),
|
||||
augment,
|
||||
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
|
||||
elem.span(),
|
||||
)?;
|
||||
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
|
||||
}
|
||||
|
||||
/// Lays out a [`CasesElem`].
|
||||
@ -93,60 +49,100 @@ pub fn layout_cases(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_vec_body(
|
||||
let span = elem.span();
|
||||
|
||||
let column: Vec<&Content> = elem.children.iter().collect();
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&elem.children,
|
||||
&[column],
|
||||
FixedAlignment::Start,
|
||||
elem.gap(styles),
|
||||
LeftRightAlternator::None,
|
||||
None,
|
||||
Axes::with_y(elem.gap(styles)),
|
||||
span,
|
||||
"branches",
|
||||
)?;
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
let (open, close) =
|
||||
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
|
||||
|
||||
layout_delimiters(ctx, styles, frame, open, close, elem.span())
|
||||
layout_delimiters(ctx, styles, frame, open, close, span)
|
||||
}
|
||||
|
||||
/// Layout the inner contents of a vector.
|
||||
fn layout_vec_body(
|
||||
/// Lays out a [`MatElem`].
|
||||
#[typst_macros::time(name = "math.mat", span = elem.span())]
|
||||
pub fn layout_mat(
|
||||
elem: &Packed<MatElem>,
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
column: &[Content],
|
||||
align: FixedAlignment,
|
||||
row_gap: Rel<Abs>,
|
||||
alternator: LeftRightAlternator,
|
||||
) -> SourceResult<Frame> {
|
||||
let gap = row_gap.relative_to(ctx.region.size.y);
|
||||
) -> SourceResult<()> {
|
||||
let span = elem.span();
|
||||
let rows = &elem.rows;
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
|
||||
let denom_style = style_for_denominator(styles);
|
||||
let mut flat = vec![];
|
||||
for child in column {
|
||||
// We allow linebreaks in cases and vectors, which are functionally
|
||||
// identical to commas.
|
||||
flat.extend(ctx.layout_into_run(child, styles.chain(&denom_style))?.rows());
|
||||
let augment = elem.augment(styles);
|
||||
if let Some(aug) = &augment {
|
||||
for &offset in &aug.hline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= rows.len() {
|
||||
bail!(
|
||||
span,
|
||||
"cannot draw a horizontal line after row {} of a matrix with {} rows",
|
||||
if offset < 0 { rows.len() as isize + offset } else { offset },
|
||||
rows.len()
|
||||
);
|
||||
}
|
||||
// We pad ascent and descent with the ascent and descent of the paren
|
||||
// to ensure that normal vectors are aligned with others unless they are
|
||||
// way too big.
|
||||
let paren =
|
||||
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
|
||||
Ok(stack(flat, align, gap, 0, alternator, Some((paren.ascent, paren.descent))))
|
||||
}
|
||||
|
||||
for &offset in &aug.vline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= ncols {
|
||||
bail!(
|
||||
span,
|
||||
"cannot draw a vertical line after column {} of a matrix with {} columns",
|
||||
if offset < 0 { ncols as isize + offset } else { offset },
|
||||
ncols
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Transpose rows of the matrix into columns.
|
||||
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
|
||||
let columns: Vec<Vec<_>> = (0..ncols)
|
||||
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
|
||||
.collect();
|
||||
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&columns,
|
||||
elem.align(styles),
|
||||
LeftRightAlternator::Right,
|
||||
augment,
|
||||
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
|
||||
span,
|
||||
"cells",
|
||||
)?;
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
|
||||
}
|
||||
|
||||
/// Layout the inner contents of a matrix.
|
||||
fn layout_mat_body(
|
||||
/// Layout the inner contents of a matrix, vector, or cases.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_body(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
rows: &[Vec<Content>],
|
||||
columns: &[Vec<&Content>],
|
||||
align: FixedAlignment,
|
||||
alternator: LeftRightAlternator,
|
||||
augment: Option<Augment<Abs>>,
|
||||
gap: Axes<Rel<Abs>>,
|
||||
span: Span,
|
||||
children: &str,
|
||||
) -> SourceResult<Frame> {
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
let nrows = rows.len();
|
||||
let nrows = columns.first().map_or(0, |col| col.len());
|
||||
let ncols = columns.len();
|
||||
if ncols == 0 || nrows == 0 {
|
||||
return Ok(Frame::soft(Size::zero()));
|
||||
}
|
||||
@ -178,29 +174,40 @@ fn layout_mat_body(
|
||||
// Before the full matrix body can be laid out, the
|
||||
// individual cells must first be independently laid out
|
||||
// so we can ensure alignment across rows and columns.
|
||||
let mut cols = vec![vec![]; ncols];
|
||||
|
||||
// This variable stores the maximum ascent and descent for each row.
|
||||
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
|
||||
|
||||
// We want to transpose our data layout to columns
|
||||
// before final layout. For efficiency, the columns
|
||||
// variable is set up here and newly generated
|
||||
// individual cells are then added to it.
|
||||
let mut cols = vec![vec![]; ncols];
|
||||
|
||||
let denom_style = style_for_denominator(styles);
|
||||
// We pad ascent and descent with the ascent and descent of the paren
|
||||
// to ensure that normal matrices are aligned with others unless they are
|
||||
// way too big.
|
||||
let paren =
|
||||
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
|
||||
let paren = GlyphFragment::new_char(
|
||||
ctx.font,
|
||||
styles.chain(&denom_style),
|
||||
'(',
|
||||
Span::detached(),
|
||||
)?;
|
||||
|
||||
for (row, (ascent, descent)) in rows.iter().zip(&mut heights) {
|
||||
for (cell, col) in row.iter().zip(&mut cols) {
|
||||
for (column, col) in columns.iter().zip(&mut cols) {
|
||||
for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
|
||||
let cell_span = cell.span();
|
||||
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
|
||||
|
||||
ascent.set_max(cell.ascent().max(paren.ascent));
|
||||
descent.set_max(cell.descent().max(paren.descent));
|
||||
// We ignore linebreaks in the cells as we can't differentiate
|
||||
// alignment points for the whole body from ones for a specific
|
||||
// cell, and multiline cells don't quite make sense at the moment.
|
||||
if cell.is_multiline() {
|
||||
ctx.engine.sink.warn(warning!(
|
||||
cell_span,
|
||||
"linebreaks are ignored in {}", children;
|
||||
hint: "use commas instead to separate each line"
|
||||
));
|
||||
}
|
||||
|
||||
ascent.set_max(cell.ascent().max(paren.ascent()));
|
||||
descent.set_max(cell.descent().max(paren.descent()));
|
||||
|
||||
col.push(cell);
|
||||
}
|
||||
@ -222,7 +229,7 @@ fn layout_mat_body(
|
||||
let mut y = Abs::zero();
|
||||
|
||||
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
|
||||
let cell = cell.into_line_frame(&points, LeftRightAlternator::Right);
|
||||
let cell = cell.into_line_frame(&points, alternator);
|
||||
let pos = Point::new(
|
||||
if points.is_empty() {
|
||||
x + align.position(rcol - cell.width())
|
||||
@ -309,19 +316,19 @@ fn layout_delimiters(
|
||||
let target = height + VERTICAL_PADDING.of(height);
|
||||
frame.set_baseline(height / 2.0 + axis);
|
||||
|
||||
if let Some(left) = left {
|
||||
let mut left = GlyphFragment::new(ctx, styles, left, span)
|
||||
.stretch_vertical(ctx, target, short_fall);
|
||||
left.align_on_axis(ctx, delimiter_alignment(left.c));
|
||||
if let Some(left_c) = left {
|
||||
let mut left = GlyphFragment::new_char(ctx.font, styles, left_c, span)?;
|
||||
left.stretch_vertical(ctx, target - short_fall);
|
||||
left.center_on_axis();
|
||||
ctx.push(left);
|
||||
}
|
||||
|
||||
ctx.push(FrameFragment::new(styles, frame));
|
||||
|
||||
if let Some(right) = right {
|
||||
let mut right = GlyphFragment::new(ctx, styles, right, span)
|
||||
.stretch_vertical(ctx, target, short_fall);
|
||||
right.align_on_axis(ctx, delimiter_alignment(right.c));
|
||||
if let Some(right_c) = right {
|
||||
let mut right = GlyphFragment::new_char(ctx.font, styles, right_c, span)?;
|
||||
right.stretch_vertical(ctx, target - short_fall);
|
||||
right.center_on_axis();
|
||||
ctx.push(right);
|
||||
}
|
||||
|
||||
|
@ -13,8 +13,6 @@ mod stretch;
|
||||
mod text;
|
||||
mod underover;
|
||||
|
||||
use rustybuzz::Feature;
|
||||
use ttf_parser::Tag;
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::engine::Engine;
|
||||
use typst_library::foundations::{
|
||||
@ -30,7 +28,7 @@ use typst_library::math::*;
|
||||
use typst_library::model::ParElem;
|
||||
use typst_library::routines::{Arenas, RealizationKind};
|
||||
use typst_library::text::{
|
||||
families, features, variant, Font, LinebreakElem, SpaceElem, TextEdgeBounds, TextElem,
|
||||
families, variant, Font, LinebreakElem, SpaceElem, TextEdgeBounds, TextElem,
|
||||
};
|
||||
use typst_library::World;
|
||||
use typst_syntax::Span;
|
||||
@ -38,11 +36,11 @@ use typst_utils::Numeric;
|
||||
use unicode_math_class::MathClass;
|
||||
|
||||
use self::fragment::{
|
||||
FrameFragment, GlyphFragment, GlyphwiseSubsts, Limits, MathFragment, VariantFragment,
|
||||
has_dtls_feat, stretch_axes, FrameFragment, GlyphFragment, Limits, MathFragment,
|
||||
};
|
||||
use self::run::{LeftRightAlternator, MathRun, MathRunFrameBuilder};
|
||||
use self::shared::*;
|
||||
use self::stretch::{stretch_fragment, stretch_glyph};
|
||||
use self::stretch::stretch_fragment;
|
||||
|
||||
/// Layout an inline equation (in a paragraph).
|
||||
#[typst_macros::time(span = elem.span())]
|
||||
@ -58,7 +56,7 @@ pub fn layout_equation_inline(
|
||||
let font = find_math_font(engine, styles, elem.span())?;
|
||||
|
||||
let mut locator = locator.split();
|
||||
let mut ctx = MathContext::new(engine, &mut locator, styles, region, &font);
|
||||
let mut ctx = MathContext::new(engine, &mut locator, region, &font);
|
||||
|
||||
let scale_style = style_for_script_scale(&ctx);
|
||||
let styles = styles.chain(&scale_style);
|
||||
@ -113,7 +111,7 @@ pub fn layout_equation_block(
|
||||
let font = find_math_font(engine, styles, span)?;
|
||||
|
||||
let mut locator = locator.split();
|
||||
let mut ctx = MathContext::new(engine, &mut locator, styles, regions.base(), &font);
|
||||
let mut ctx = MathContext::new(engine, &mut locator, regions.base(), &font);
|
||||
|
||||
let scale_style = style_for_script_scale(&ctx);
|
||||
let styles = styles.chain(&scale_style);
|
||||
@ -374,14 +372,7 @@ struct MathContext<'a, 'v, 'e> {
|
||||
region: Region,
|
||||
// Font-related.
|
||||
font: &'a Font,
|
||||
ttf: &'a ttf_parser::Face<'a>,
|
||||
table: ttf_parser::math::Table<'a>,
|
||||
constants: ttf_parser::math::Constants<'a>,
|
||||
dtls_table: Option<GlyphwiseSubsts<'a>>,
|
||||
flac_table: Option<GlyphwiseSubsts<'a>>,
|
||||
ssty_table: Option<GlyphwiseSubsts<'a>>,
|
||||
glyphwise_tables: Option<Vec<GlyphwiseSubsts<'a>>>,
|
||||
space_width: Em,
|
||||
// Mutable.
|
||||
fragments: Vec<MathFragment>,
|
||||
}
|
||||
@ -391,46 +382,20 @@ impl<'a, 'v, 'e> MathContext<'a, 'v, 'e> {
|
||||
fn new(
|
||||
engine: &'v mut Engine<'e>,
|
||||
locator: &'v mut SplitLocator<'a>,
|
||||
styles: StyleChain<'a>,
|
||||
base: Size,
|
||||
font: &'a Font,
|
||||
) -> Self {
|
||||
let math_table = font.ttf().tables().math.unwrap();
|
||||
let gsub_table = font.ttf().tables().gsub;
|
||||
let constants = math_table.constants.unwrap();
|
||||
|
||||
let feat = |tag: &[u8; 4]| {
|
||||
GlyphwiseSubsts::new(gsub_table, Feature::new(Tag::from_bytes(tag), 0, ..))
|
||||
};
|
||||
|
||||
let features = features(styles);
|
||||
let glyphwise_tables = Some(
|
||||
features
|
||||
.into_iter()
|
||||
.filter_map(|feature| GlyphwiseSubsts::new(gsub_table, feature))
|
||||
.collect(),
|
||||
);
|
||||
|
||||
let ttf = font.ttf();
|
||||
let space_width = ttf
|
||||
.glyph_index(' ')
|
||||
.and_then(|id| ttf.glyph_hor_advance(id))
|
||||
.map(|advance| font.to_em(advance))
|
||||
.unwrap_or(THICK);
|
||||
// These unwraps are safe as the font given is one returned by the
|
||||
// find_math_font function, which only returns fonts that have a math
|
||||
// constants table.
|
||||
let constants = font.ttf().tables().math.unwrap().constants.unwrap();
|
||||
|
||||
Self {
|
||||
engine,
|
||||
locator,
|
||||
region: Region::new(base, Axes::splat(false)),
|
||||
font,
|
||||
ttf,
|
||||
table: math_table,
|
||||
constants,
|
||||
dtls_table: feat(b"dtls"),
|
||||
flac_table: feat(b"flac"),
|
||||
ssty_table: feat(b"ssty"),
|
||||
glyphwise_tables,
|
||||
space_width,
|
||||
fragments: vec![],
|
||||
}
|
||||
}
|
||||
@ -529,7 +494,8 @@ fn layout_realized(
|
||||
if let Some(elem) = elem.to_packed::<TagElem>() {
|
||||
ctx.push(MathFragment::Tag(elem.tag.clone()));
|
||||
} else if elem.is::<SpaceElem>() {
|
||||
ctx.push(MathFragment::Space(ctx.space_width.resolve(styles)));
|
||||
let space_width = ctx.font.space_width().unwrap_or(THICK);
|
||||
ctx.push(MathFragment::Space(space_width.resolve(styles)));
|
||||
} else if elem.is::<LinebreakElem>() {
|
||||
ctx.push(MathFragment::Linebreak);
|
||||
} else if let Some(elem) = elem.to_packed::<HElem>() {
|
||||
@ -644,7 +610,7 @@ fn layout_h(
|
||||
}
|
||||
|
||||
/// Lays out a [`ClassElem`].
|
||||
#[typst_macros::time(name = "math.op", span = elem.span())]
|
||||
#[typst_macros::time(name = "math.class", span = elem.span())]
|
||||
fn layout_class(
|
||||
elem: &Packed<ClassElem>,
|
||||
ctx: &mut MathContext,
|
||||
|
@ -49,9 +49,9 @@ pub fn layout_root(
|
||||
|
||||
// Layout root symbol.
|
||||
let target = radicand.height() + thickness + gap;
|
||||
let sqrt = GlyphFragment::new(ctx, styles, '√', span)
|
||||
.stretch_vertical(ctx, target, Abs::zero())
|
||||
.frame;
|
||||
let mut sqrt = GlyphFragment::new_char(ctx.font, styles, '√', span)?;
|
||||
sqrt.stretch_vertical(ctx, target);
|
||||
let sqrt = sqrt.into_frame();
|
||||
|
||||
// Layout the index.
|
||||
let sscript = EquationElem::set_size(MathSize::ScriptScript).wrap();
|
||||
@ -85,14 +85,15 @@ pub fn layout_root(
|
||||
ascent.set_max(shift_up + index.ascent());
|
||||
}
|
||||
|
||||
let radicand_x = sqrt_offset + sqrt.width();
|
||||
let sqrt_x = sqrt_offset.max(Abs::zero());
|
||||
let radicand_x = sqrt_x + sqrt.width();
|
||||
let radicand_y = ascent - radicand.ascent();
|
||||
let width = radicand_x + radicand.width();
|
||||
let size = Size::new(width, ascent + descent);
|
||||
|
||||
// The extra "- thickness" comes from the fact that the sqrt is placed
|
||||
// in `push_frame` with respect to its top, not its baseline.
|
||||
let sqrt_pos = Point::new(sqrt_offset, radicand_y - gap - thickness);
|
||||
let sqrt_pos = Point::new(sqrt_x, radicand_y - gap - thickness);
|
||||
let line_pos = Point::new(radicand_x, radicand_y - gap - (thickness / 2.0));
|
||||
let radicand_pos = Point::new(radicand_x, radicand_y);
|
||||
|
||||
@ -100,7 +101,8 @@ pub fn layout_root(
|
||||
frame.set_baseline(ascent);
|
||||
|
||||
if let Some(index) = index {
|
||||
let index_pos = Point::new(kern_before, ascent - index.ascent() - shift_up);
|
||||
let index_x = -sqrt_offset.min(Abs::zero()) + kern_before;
|
||||
let index_pos = Point::new(index_x, ascent - index.ascent() - shift_up);
|
||||
frame.push_frame(index_pos, index);
|
||||
}
|
||||
|
||||
|
@ -278,6 +278,9 @@ impl MathRun {
|
||||
frame
|
||||
}
|
||||
|
||||
/// Convert this run of math fragments into a vector of inline items for
|
||||
/// paragraph layout. Creates multiple fragments when relation or binary
|
||||
/// operators are present to allow for line-breaking opportunities later.
|
||||
pub fn into_par_items(self) -> Vec<InlineItem> {
|
||||
let mut items = vec![];
|
||||
|
||||
@ -295,22 +298,25 @@ impl MathRun {
|
||||
|
||||
let mut space_is_visible = false;
|
||||
|
||||
let is_relation = |f: &MathFragment| matches!(f.class(), MathClass::Relation);
|
||||
let is_space = |f: &MathFragment| {
|
||||
matches!(f, MathFragment::Space(_) | MathFragment::Spacing(_, _))
|
||||
};
|
||||
let is_line_break_opportunity = |class, next_fragment| match class {
|
||||
// Don't split when two relations are in a row or when preceding a
|
||||
// closing parenthesis.
|
||||
MathClass::Binary => next_fragment != Some(MathClass::Closing),
|
||||
MathClass::Relation => {
|
||||
!matches!(next_fragment, Some(MathClass::Relation | MathClass::Closing))
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let mut iter = self.0.into_iter().peekable();
|
||||
while let Some(fragment) = iter.next() {
|
||||
if space_is_visible {
|
||||
match fragment {
|
||||
MathFragment::Space(width) | MathFragment::Spacing(width, _) => {
|
||||
items.push(InlineItem::Space(width, true));
|
||||
if space_is_visible && is_space(&fragment) {
|
||||
items.push(InlineItem::Space(fragment.width(), true));
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let class = fragment.class();
|
||||
let y = fragment.ascent();
|
||||
@ -323,10 +329,9 @@ impl MathRun {
|
||||
frame.push_frame(pos, fragment.into_frame());
|
||||
empty = false;
|
||||
|
||||
if class == MathClass::Binary
|
||||
|| (class == MathClass::Relation
|
||||
&& !iter.peek().map(is_relation).unwrap_or_default())
|
||||
{
|
||||
// Split our current frame when we encounter a binary operator or
|
||||
// relation so that there is a line-breaking opportunity.
|
||||
if is_line_break_opportunity(class, iter.peek().map(|f| f.class())) {
|
||||
let mut frame_prev =
|
||||
std::mem::replace(&mut frame, Frame::soft(Size::zero()));
|
||||
|
||||
|
@ -1,7 +1,9 @@
|
||||
use ttf_parser::math::MathValue;
|
||||
use ttf_parser::Tag;
|
||||
use typst_library::foundations::{Style, StyleChain};
|
||||
use typst_library::layout::{Abs, Em, FixedAlignment, Frame, Point, Size, VAlignment};
|
||||
use typst_library::layout::{Abs, Em, FixedAlignment, Frame, Point, Size};
|
||||
use typst_library::math::{EquationElem, MathSize};
|
||||
use typst_library::text::{FontFeatures, TextElem};
|
||||
use typst_utils::LazyHash;
|
||||
|
||||
use super::{LeftRightAlternator, MathContext, MathFragment, MathRun};
|
||||
@ -59,6 +61,16 @@ pub fn style_cramped() -> LazyHash<Style> {
|
||||
EquationElem::set_cramped(true).wrap()
|
||||
}
|
||||
|
||||
/// Sets flac OpenType feature.
|
||||
pub fn style_flac() -> LazyHash<Style> {
|
||||
TextElem::set_features(FontFeatures(vec![(Tag::from_bytes(b"flac"), 1)])).wrap()
|
||||
}
|
||||
|
||||
/// Sets dtls OpenType feature.
|
||||
pub fn style_dtls() -> LazyHash<Style> {
|
||||
TextElem::set_features(FontFeatures(vec![(Tag::from_bytes(b"dtls"), 1)])).wrap()
|
||||
}
|
||||
|
||||
/// The style for subscripts in the current style.
|
||||
pub fn style_for_subscript(styles: StyleChain) -> [LazyHash<Style>; 2] {
|
||||
[style_for_superscript(styles), EquationElem::set_cramped(true).wrap()]
|
||||
@ -97,15 +109,6 @@ pub fn style_for_script_scale(ctx: &MathContext) -> LazyHash<Style> {
|
||||
.wrap()
|
||||
}
|
||||
|
||||
/// How a delimieter should be aligned when scaling.
|
||||
pub fn delimiter_alignment(delimiter: char) -> VAlignment {
|
||||
match delimiter {
|
||||
'⌜' | '⌝' => VAlignment::Top,
|
||||
'⌞' | '⌟' => VAlignment::Bottom,
|
||||
_ => VAlignment::Horizon,
|
||||
}
|
||||
}
|
||||
|
||||
/// Stack rows on top of each other.
|
||||
///
|
||||
/// Add a `gap` between each row and uses the baseline of the `baseline`-th
|
||||
@ -117,7 +120,6 @@ pub fn stack(
|
||||
gap: Abs,
|
||||
baseline: usize,
|
||||
alternator: LeftRightAlternator,
|
||||
minimum_ascent_descent: Option<(Abs, Abs)>,
|
||||
) -> Frame {
|
||||
let AlignmentResult { points, width } = alignments(&rows);
|
||||
let rows: Vec<_> = rows
|
||||
@ -125,13 +127,9 @@ pub fn stack(
|
||||
.map(|row| row.into_line_frame(&points, alternator))
|
||||
.collect();
|
||||
|
||||
let padded_height = |height: Abs| {
|
||||
height.max(minimum_ascent_descent.map_or(Abs::zero(), |(a, d)| a + d))
|
||||
};
|
||||
|
||||
let mut frame = Frame::soft(Size::new(
|
||||
width,
|
||||
rows.iter().map(|row| padded_height(row.height())).sum::<Abs>()
|
||||
rows.iter().map(|row| row.height()).sum::<Abs>()
|
||||
+ rows.len().saturating_sub(1) as f64 * gap,
|
||||
));
|
||||
|
||||
@ -142,14 +140,11 @@ pub fn stack(
|
||||
} else {
|
||||
Abs::zero()
|
||||
};
|
||||
let ascent_padded_part = minimum_ascent_descent
|
||||
.map_or(Abs::zero(), |(a, _)| (a - row.ascent()))
|
||||
.max(Abs::zero());
|
||||
let pos = Point::new(x, y + ascent_padded_part);
|
||||
let pos = Point::new(x, y);
|
||||
if i == baseline {
|
||||
frame.set_baseline(y + row.baseline() + ascent_padded_part);
|
||||
frame.set_baseline(y + row.baseline());
|
||||
}
|
||||
y += padded_height(row.height()) + gap;
|
||||
y += row.height() + gap;
|
||||
frame.push_frame(pos, row);
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,10 @@
|
||||
use ttf_parser::math::{GlyphAssembly, GlyphConstruction, GlyphPart};
|
||||
use ttf_parser::LazyArray16;
|
||||
use typst_library::diag::{warning, SourceResult};
|
||||
use typst_library::foundations::{Packed, StyleChain};
|
||||
use typst_library::layout::{Abs, Axis, Frame, Point, Rel, Size};
|
||||
use typst_library::layout::{Abs, Axis, Rel};
|
||||
use typst_library::math::StretchElem;
|
||||
use typst_utils::Get;
|
||||
|
||||
use super::{
|
||||
delimiter_alignment, GlyphFragment, MathContext, MathFragment, Scaled,
|
||||
VariantFragment,
|
||||
};
|
||||
use crate::modifiers::FrameModify;
|
||||
|
||||
/// Maximum number of times extenders can be repeated.
|
||||
const MAX_REPEATS: usize = 1024;
|
||||
use super::{stretch_axes, MathContext, MathFragment};
|
||||
|
||||
/// Lays out a [`StretchElem`].
|
||||
#[typst_macros::time(name = "math.stretch", span = elem.span())]
|
||||
@ -23,15 +14,7 @@ pub fn layout_stretch(
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
|
||||
stretch_fragment(
|
||||
ctx,
|
||||
styles,
|
||||
&mut fragment,
|
||||
None,
|
||||
None,
|
||||
elem.size(styles),
|
||||
Abs::zero(),
|
||||
);
|
||||
stretch_fragment(ctx, &mut fragment, None, None, elem.size(styles), Abs::zero());
|
||||
ctx.push(fragment);
|
||||
Ok(())
|
||||
}
|
||||
@ -39,269 +22,49 @@ pub fn layout_stretch(
|
||||
/// Attempts to stretch the given fragment by/to the amount given in stretch.
|
||||
pub fn stretch_fragment(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
fragment: &mut MathFragment,
|
||||
axis: Option<Axis>,
|
||||
relative_to: Option<Abs>,
|
||||
stretch: Rel<Abs>,
|
||||
short_fall: Abs,
|
||||
) {
|
||||
let glyph = match fragment {
|
||||
MathFragment::Glyph(glyph) => glyph.clone(),
|
||||
MathFragment::Variant(variant) => {
|
||||
GlyphFragment::new(ctx, styles, variant.c, variant.span)
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
let size = fragment.size();
|
||||
|
||||
let MathFragment::Glyph(ref mut glyph) = fragment else { return };
|
||||
|
||||
// Return if we attempt to stretch along an axis which isn't stretchable,
|
||||
// so that the original fragment isn't modified.
|
||||
let Some(stretch_axis) = stretch_axis(ctx, &glyph) else { return };
|
||||
let axis = axis.unwrap_or(stretch_axis);
|
||||
if axis != stretch_axis {
|
||||
let axes = stretch_axes(&glyph.item.font, glyph.base_glyph.id);
|
||||
let stretch_axis = if let Some(axis) = axis {
|
||||
if !axes.get(axis) {
|
||||
return;
|
||||
}
|
||||
|
||||
let relative_to_size = relative_to.unwrap_or_else(|| fragment.size().get(axis));
|
||||
|
||||
let mut variant = stretch_glyph(
|
||||
ctx,
|
||||
glyph,
|
||||
stretch.relative_to(relative_to_size),
|
||||
short_fall,
|
||||
axis,
|
||||
);
|
||||
|
||||
if axis == Axis::Y {
|
||||
variant.align_on_axis(ctx, delimiter_alignment(variant.c));
|
||||
}
|
||||
|
||||
*fragment = MathFragment::Variant(variant);
|
||||
}
|
||||
|
||||
/// Return whether the glyph is stretchable and if it is, along which axis it
|
||||
/// can be stretched.
|
||||
fn stretch_axis(ctx: &mut MathContext, base: &GlyphFragment) -> Option<Axis> {
|
||||
let base_id = base.id;
|
||||
let vertical = ctx
|
||||
.table
|
||||
.variants
|
||||
.and_then(|variants| variants.vertical_constructions.get(base_id))
|
||||
.map(|_| Axis::Y);
|
||||
let horizontal = ctx
|
||||
.table
|
||||
.variants
|
||||
.and_then(|variants| variants.horizontal_constructions.get(base_id))
|
||||
.map(|_| Axis::X);
|
||||
|
||||
match (vertical, horizontal) {
|
||||
(vertical, None) => vertical,
|
||||
(None, horizontal) => horizontal,
|
||||
_ => {
|
||||
axis
|
||||
} else {
|
||||
match (axes.x, axes.y) {
|
||||
(true, false) => Axis::X,
|
||||
(false, true) => Axis::Y,
|
||||
(false, false) => return,
|
||||
(true, true) => {
|
||||
// As far as we know, there aren't any glyphs that have both
|
||||
// vertical and horizontal constructions. So for the time being, we
|
||||
// will assume that a glyph cannot have both.
|
||||
ctx.engine.sink.warn(warning!(
|
||||
base.span,
|
||||
glyph.item.glyphs[0].span.0,
|
||||
"glyph has both vertical and horizontal constructions";
|
||||
hint: "this is probably a font bug";
|
||||
hint: "please file an issue at https://github.com/typst/typst/issues"
|
||||
));
|
||||
|
||||
None
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to stretch a glyph to a desired width or height.
|
||||
///
|
||||
/// The resulting frame may not have the exact desired width.
|
||||
pub fn stretch_glyph(
|
||||
ctx: &mut MathContext,
|
||||
mut base: GlyphFragment,
|
||||
target: Abs,
|
||||
short_fall: Abs,
|
||||
axis: Axis,
|
||||
) -> VariantFragment {
|
||||
// If the base glyph is good enough, use it.
|
||||
let advance = match axis {
|
||||
Axis::X => base.width,
|
||||
Axis::Y => base.height(),
|
||||
};
|
||||
let short_target = target - short_fall;
|
||||
if short_target <= advance {
|
||||
return base.into_variant();
|
||||
}
|
||||
|
||||
let mut min_overlap = Abs::zero();
|
||||
let construction = ctx
|
||||
.table
|
||||
.variants
|
||||
.and_then(|variants| {
|
||||
min_overlap = variants.min_connector_overlap.scaled(ctx, base.font_size);
|
||||
match axis {
|
||||
Axis::X => variants.horizontal_constructions,
|
||||
Axis::Y => variants.vertical_constructions,
|
||||
}
|
||||
.get(base.id)
|
||||
})
|
||||
.unwrap_or(GlyphConstruction { assembly: None, variants: LazyArray16::new(&[]) });
|
||||
|
||||
// Search for a pre-made variant with a good advance.
|
||||
let mut best_id = base.id;
|
||||
let mut best_advance = base.width;
|
||||
for variant in construction.variants {
|
||||
best_id = variant.variant_glyph;
|
||||
best_advance = base.font.to_em(variant.advance_measurement).at(base.font_size);
|
||||
if short_target <= best_advance {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// This is either good or the best we've got.
|
||||
if short_target <= best_advance || construction.assembly.is_none() {
|
||||
base.set_id(ctx, best_id);
|
||||
return base.into_variant();
|
||||
}
|
||||
|
||||
// Assemble from parts.
|
||||
let assembly = construction.assembly.unwrap();
|
||||
assemble(ctx, base, assembly, min_overlap, target, axis)
|
||||
}
|
||||
|
||||
/// Assemble a glyph from parts.
|
||||
fn assemble(
|
||||
ctx: &mut MathContext,
|
||||
base: GlyphFragment,
|
||||
assembly: GlyphAssembly,
|
||||
min_overlap: Abs,
|
||||
target: Abs,
|
||||
axis: Axis,
|
||||
) -> VariantFragment {
|
||||
// Determine the number of times the extenders need to be repeated as well
|
||||
// as a ratio specifying how much to spread the parts apart
|
||||
// (0 = maximal overlap, 1 = minimal overlap).
|
||||
let mut full;
|
||||
let mut ratio;
|
||||
let mut repeat = 0;
|
||||
loop {
|
||||
full = Abs::zero();
|
||||
ratio = 0.0;
|
||||
|
||||
let mut parts = parts(assembly, repeat).peekable();
|
||||
let mut growable = Abs::zero();
|
||||
|
||||
while let Some(part) = parts.next() {
|
||||
let mut advance = part.full_advance.scaled(ctx, base.font_size);
|
||||
if let Some(next) = parts.peek() {
|
||||
let max_overlap = part
|
||||
.end_connector_length
|
||||
.min(next.start_connector_length)
|
||||
.scaled(ctx, base.font_size);
|
||||
if max_overlap < min_overlap {
|
||||
// This condition happening is indicative of a bug in the
|
||||
// font.
|
||||
ctx.engine.sink.warn(warning!(
|
||||
base.span,
|
||||
"glyph has assembly parts with overlap less than minConnectorOverlap";
|
||||
hint: "its rendering may appear broken - this is probably a font bug";
|
||||
hint: "please file an issue at https://github.com/typst/typst/issues"
|
||||
));
|
||||
}
|
||||
|
||||
advance -= max_overlap;
|
||||
growable += max_overlap - min_overlap;
|
||||
}
|
||||
|
||||
full += advance;
|
||||
}
|
||||
|
||||
if full < target {
|
||||
let delta = target - full;
|
||||
ratio = (delta / growable).min(1.0);
|
||||
full += ratio * growable;
|
||||
}
|
||||
|
||||
if target <= full || repeat >= MAX_REPEATS {
|
||||
break;
|
||||
}
|
||||
|
||||
repeat += 1;
|
||||
}
|
||||
|
||||
let mut selected = vec![];
|
||||
let mut parts = parts(assembly, repeat).peekable();
|
||||
while let Some(part) = parts.next() {
|
||||
let mut advance = part.full_advance.scaled(ctx, base.font_size);
|
||||
if let Some(next) = parts.peek() {
|
||||
let max_overlap = part
|
||||
.end_connector_length
|
||||
.min(next.start_connector_length)
|
||||
.scaled(ctx, base.font_size);
|
||||
advance -= max_overlap;
|
||||
advance += ratio * (max_overlap - min_overlap);
|
||||
}
|
||||
|
||||
let mut fragment = base.clone();
|
||||
fragment.set_id(ctx, part.glyph_id);
|
||||
selected.push((fragment, advance));
|
||||
}
|
||||
|
||||
let size;
|
||||
let baseline;
|
||||
match axis {
|
||||
Axis::X => {
|
||||
let height = base.ascent + base.descent;
|
||||
size = Size::new(full, height);
|
||||
baseline = base.ascent;
|
||||
}
|
||||
Axis::Y => {
|
||||
let axis = ctx.constants.axis_height().scaled(ctx, base.font_size);
|
||||
let width = selected.iter().map(|(f, _)| f.width).max().unwrap_or_default();
|
||||
size = Size::new(width, full);
|
||||
baseline = full / 2.0 + axis;
|
||||
}
|
||||
}
|
||||
|
||||
let mut frame = Frame::soft(size);
|
||||
let mut offset = Abs::zero();
|
||||
frame.set_baseline(baseline);
|
||||
frame.modify(&base.modifiers);
|
||||
|
||||
for (fragment, advance) in selected {
|
||||
let pos = match axis {
|
||||
Axis::X => Point::new(offset, frame.baseline() - fragment.ascent),
|
||||
Axis::Y => Point::with_y(full - offset - fragment.height()),
|
||||
};
|
||||
frame.push_frame(pos, fragment.into_frame());
|
||||
offset += advance;
|
||||
}
|
||||
|
||||
let accent_attach = match axis {
|
||||
Axis::X => frame.width() / 2.0,
|
||||
Axis::Y => base.accent_attach,
|
||||
};
|
||||
|
||||
VariantFragment {
|
||||
c: base.c,
|
||||
frame,
|
||||
font_size: base.font_size,
|
||||
italics_correction: Abs::zero(),
|
||||
accent_attach,
|
||||
class: base.class,
|
||||
math_size: base.math_size,
|
||||
span: base.span,
|
||||
limits: base.limits,
|
||||
mid_stretched: None,
|
||||
extended_shape: true,
|
||||
let relative_to_size = relative_to.unwrap_or_else(|| size.get(stretch_axis));
|
||||
|
||||
glyph.stretch(ctx, stretch.relative_to(relative_to_size) - short_fall, stretch_axis);
|
||||
|
||||
if stretch_axis == Axis::Y {
|
||||
glyph.center_on_axis();
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the assembly's parts with extenders repeated the
|
||||
/// specified number of times.
|
||||
fn parts(assembly: GlyphAssembly, repeat: usize) -> impl Iterator<Item = GlyphPart> + '_ {
|
||||
assembly.parts.into_iter().flat_map(move |part| {
|
||||
let count = if part.part_flags.extender() { repeat } else { 1 };
|
||||
std::iter::repeat(part).take(count)
|
||||
})
|
||||
}
|
||||
|
@ -12,7 +12,10 @@ use typst_syntax::{is_newline, Span};
|
||||
use unicode_math_class::MathClass;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use super::{FrameFragment, GlyphFragment, MathContext, MathFragment, MathRun};
|
||||
use super::{
|
||||
has_dtls_feat, style_dtls, FrameFragment, GlyphFragment, MathContext, MathFragment,
|
||||
MathRun,
|
||||
};
|
||||
|
||||
/// Lays out a [`TextElem`].
|
||||
pub fn layout_text(
|
||||
@ -65,19 +68,9 @@ fn layout_inline_text(
|
||||
// Small optimization for numbers. Note that this lays out slightly
|
||||
// differently to normal text and is worth re-evaluating in the future.
|
||||
let mut fragments = vec![];
|
||||
let is_single = text.chars().count() == 1;
|
||||
for unstyled_c in text.chars() {
|
||||
let c = styled_char(styles, unstyled_c, false);
|
||||
let mut glyph = GlyphFragment::new(ctx, styles, c, span);
|
||||
if is_single {
|
||||
// Duplicate what `layout_glyph` does exactly even if it's
|
||||
// probably incorrect here.
|
||||
match EquationElem::size_in(styles) {
|
||||
MathSize::Script => glyph.make_script_size(ctx),
|
||||
MathSize::ScriptScript => glyph.make_script_script_size(ctx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let glyph = GlyphFragment::new_char(ctx.font, styles, c, span)?;
|
||||
fragments.push(glyph.into());
|
||||
}
|
||||
let frame = MathRun::new(fragments).into_frame(styles);
|
||||
@ -107,7 +100,6 @@ fn layout_inline_text(
|
||||
styles,
|
||||
Size::splat(Abs::inf()),
|
||||
false,
|
||||
None,
|
||||
)?
|
||||
.into_frame();
|
||||
|
||||
@ -127,15 +119,21 @@ pub fn layout_symbol(
|
||||
) -> SourceResult<()> {
|
||||
// Switch dotless char to normal when we have the dtls OpenType feature.
|
||||
// This should happen before the main styling pass.
|
||||
let (unstyled_c, dtls) = match try_dotless(elem.text) {
|
||||
Some(c) if ctx.dtls_table.is_some() => (c, true),
|
||||
_ => (elem.text, false),
|
||||
let dtls = style_dtls();
|
||||
let (unstyled_c, symbol_styles) = match try_dotless(elem.text) {
|
||||
Some(c) if has_dtls_feat(ctx.font) => (c, styles.chain(&dtls)),
|
||||
_ => (elem.text, styles),
|
||||
};
|
||||
let c = styled_char(styles, unstyled_c, true);
|
||||
let fragment = match GlyphFragment::try_new(ctx, styles, c, elem.span()) {
|
||||
Some(glyph) => layout_glyph(glyph, dtls, ctx, styles),
|
||||
None => {
|
||||
let fragment: MathFragment =
|
||||
match GlyphFragment::new_char(ctx.font, symbol_styles, c, elem.span()) {
|
||||
Ok(mut glyph) => {
|
||||
adjust_glyph_layout(&mut glyph, ctx, styles);
|
||||
glyph.into()
|
||||
}
|
||||
Err(_) => {
|
||||
// Not in the math font, fallback to normal inline text layout.
|
||||
// TODO: Should replace this with proper fallback in [`GlyphFragment::new`].
|
||||
layout_inline_text(c.encode_utf8(&mut [0; 4]), elem.span(), ctx, styles)?
|
||||
.into()
|
||||
}
|
||||
@ -144,37 +142,22 @@ pub fn layout_symbol(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Layout a [`GlyphFragment`].
|
||||
fn layout_glyph(
|
||||
mut glyph: GlyphFragment,
|
||||
dtls: bool,
|
||||
/// Centers large glyphs vertically on the axis, scaling them if in display
|
||||
/// style.
|
||||
fn adjust_glyph_layout(
|
||||
glyph: &mut GlyphFragment,
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> MathFragment {
|
||||
if dtls {
|
||||
glyph.make_dotless_form(ctx);
|
||||
}
|
||||
let math_size = EquationElem::size_in(styles);
|
||||
match math_size {
|
||||
MathSize::Script => glyph.make_script_size(ctx),
|
||||
MathSize::ScriptScript => glyph.make_script_script_size(ctx),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
) {
|
||||
if glyph.class == MathClass::Large {
|
||||
let mut variant = if math_size == MathSize::Display {
|
||||
if EquationElem::size_in(styles) == MathSize::Display {
|
||||
let height = scaled!(ctx, styles, display_operator_min_height)
|
||||
.max(SQRT_2 * glyph.height());
|
||||
glyph.stretch_vertical(ctx, height, Abs::zero())
|
||||
} else {
|
||||
glyph.into_variant()
|
||||
.max(SQRT_2 * glyph.size.y);
|
||||
glyph.stretch_vertical(ctx, height);
|
||||
};
|
||||
// TeXbook p 155. Large operators are always vertically centered on the
|
||||
// axis.
|
||||
variant.center_on_axis(ctx);
|
||||
variant.into()
|
||||
} else {
|
||||
glyph.into()
|
||||
glyph.center_on_axis();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,14 +285,14 @@ fn layout_underoverspreader(
|
||||
let body = ctx.layout_into_run(body, styles)?;
|
||||
let body_class = body.class();
|
||||
let body = body.into_fragment(styles);
|
||||
let glyph = GlyphFragment::new(ctx, styles, c, span);
|
||||
let stretched = glyph.stretch_horizontal(ctx, body.width(), Abs::zero());
|
||||
let mut glyph = GlyphFragment::new_char(ctx.font, styles, c, span)?;
|
||||
glyph.stretch_horizontal(ctx, body.width());
|
||||
|
||||
let mut rows = vec![];
|
||||
let baseline = match position {
|
||||
Position::Under => {
|
||||
rows.push(MathRun::new(vec![body]));
|
||||
rows.push(stretched.into());
|
||||
rows.push(glyph.into());
|
||||
if let Some(annotation) = annotation {
|
||||
let under_style = style_for_subscript(styles);
|
||||
let annotation_styles = styles.chain(&under_style);
|
||||
@ -306,20 +306,14 @@ fn layout_underoverspreader(
|
||||
let annotation_styles = styles.chain(&over_style);
|
||||
rows.extend(ctx.layout_into_run(annotation, annotation_styles)?.rows());
|
||||
}
|
||||
rows.push(stretched.into());
|
||||
rows.push(glyph.into());
|
||||
rows.push(MathRun::new(vec![body]));
|
||||
rows.len() - 1
|
||||
}
|
||||
};
|
||||
|
||||
let frame = stack(
|
||||
rows,
|
||||
FixedAlignment::Center,
|
||||
gap,
|
||||
baseline,
|
||||
LeftRightAlternator::Right,
|
||||
None,
|
||||
);
|
||||
let frame =
|
||||
stack(rows, FixedAlignment::Center, gap, baseline, LeftRightAlternator::Right);
|
||||
ctx.push(FrameFragment::new(styles, frame).with_class(body_class));
|
||||
|
||||
Ok(())
|
||||
|
@ -1,6 +1,6 @@
|
||||
use typst_library::foundations::StyleChain;
|
||||
use typst_library::layout::{Fragment, Frame, FrameItem, HideElem, Point};
|
||||
use typst_library::model::{Destination, LinkElem};
|
||||
use typst_library::layout::{Abs, Fragment, Frame, FrameItem, HideElem, Point, Sides};
|
||||
use typst_library::model::{Destination, LinkElem, ParElem};
|
||||
|
||||
/// Frame-level modifications resulting from styles that do not impose any
|
||||
/// layout structure.
|
||||
@ -52,14 +52,7 @@ pub trait FrameModify {
|
||||
|
||||
impl FrameModify for Frame {
|
||||
fn modify(&mut self, modifiers: &FrameModifiers) {
|
||||
if let Some(dest) = &modifiers.dest {
|
||||
let size = self.size();
|
||||
self.push(Point::zero(), FrameItem::Link(dest.clone(), size));
|
||||
}
|
||||
|
||||
if modifiers.hidden {
|
||||
self.hide();
|
||||
}
|
||||
modify_frame(self, modifiers, None);
|
||||
}
|
||||
}
|
||||
|
||||
@ -82,6 +75,41 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FrameModifyText {
|
||||
/// Resolve and apply [`FrameModifiers`] for this text frame.
|
||||
fn modify_text(&mut self, styles: StyleChain);
|
||||
}
|
||||
|
||||
impl FrameModifyText for Frame {
|
||||
fn modify_text(&mut self, styles: StyleChain) {
|
||||
let modifiers = FrameModifiers::get_in(styles);
|
||||
let expand_y = 0.5 * ParElem::leading_in(styles);
|
||||
let outset = Sides::new(Abs::zero(), expand_y, Abs::zero(), expand_y);
|
||||
modify_frame(self, &modifiers, Some(outset));
|
||||
}
|
||||
}
|
||||
|
||||
fn modify_frame(
|
||||
frame: &mut Frame,
|
||||
modifiers: &FrameModifiers,
|
||||
link_box_outset: Option<Sides<Abs>>,
|
||||
) {
|
||||
if let Some(dest) = &modifiers.dest {
|
||||
let mut pos = Point::zero();
|
||||
let mut size = frame.size();
|
||||
if let Some(outset) = link_box_outset {
|
||||
pos.y -= outset.top;
|
||||
pos.x -= outset.left;
|
||||
size += outset.sum_by_axis();
|
||||
}
|
||||
frame.push(pos, FrameItem::Link(dest.clone(), size));
|
||||
}
|
||||
|
||||
if modifiers.hidden {
|
||||
frame.hide();
|
||||
}
|
||||
}
|
||||
|
||||
/// Performs layout and modification in one step.
|
||||
///
|
||||
/// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`,
|
||||
|
@ -284,6 +284,7 @@ impl<'a> CurveBuilder<'a> {
|
||||
self.last_point = point;
|
||||
self.last_control_from = point;
|
||||
self.is_started = true;
|
||||
self.is_empty = true;
|
||||
}
|
||||
|
||||
/// Add a line segment.
|
||||
@ -1374,7 +1375,7 @@ impl ControlPoints {
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to draw arcs with bezier curves.
|
||||
/// Helper to draw arcs with Bézier curves.
|
||||
trait CurveExt {
|
||||
fn arc(&mut self, start: Point, center: Point, end: Point);
|
||||
fn arc_move(&mut self, start: Point, center: Point, end: Point);
|
||||
@ -1398,7 +1399,7 @@ impl CurveExt for Curve {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the control points for a bezier curve that approximates a circular arc for
|
||||
/// Get the control points for a Bézier curve that approximates a circular arc for
|
||||
/// a start point, an end point and a center of the circle whose arc connects
|
||||
/// the two.
|
||||
fn bezier_arc_control(start: Point, center: Point, end: Point) -> [Point; 2] {
|
||||
|
@ -29,6 +29,7 @@ csv = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
fontdb = { workspace = true }
|
||||
glidesort = { workspace = true }
|
||||
hayagriva = { workspace = true }
|
||||
icu_properties = { workspace = true }
|
||||
icu_provider = { workspace = true }
|
||||
@ -38,6 +39,7 @@ indexmap = { workspace = true }
|
||||
kamadak-exif = { workspace = true }
|
||||
kurbo = { workspace = true }
|
||||
lipsum = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
palette = { workspace = true }
|
||||
phf = { workspace = true }
|
||||
png = { workspace = true }
|
||||
@ -60,9 +62,11 @@ ttf-parser = { workspace = true }
|
||||
two-face = { workspace = true }
|
||||
typed-arena = { workspace = true }
|
||||
unicode-math-class = { workspace = true }
|
||||
unicode-normalization = { workspace = true }
|
||||
unicode-segmentation = { workspace = true }
|
||||
unscanny = { workspace = true }
|
||||
usvg = { workspace = true }
|
||||
utf8_iter = { workspace = true }
|
||||
wasmi = { workspace = true }
|
||||
xmlwriter = { workspace = true }
|
||||
|
||||
|
@ -1,16 +1,20 @@
|
||||
//! Diagnostics.
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::fmt::{self, Display, Formatter, Write as _};
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::Utf8Error;
|
||||
use std::string::FromUtf8Error;
|
||||
|
||||
use az::SaturatingAs;
|
||||
use comemo::Tracked;
|
||||
use ecow::{eco_vec, EcoVec};
|
||||
use typst_syntax::package::{PackageSpec, PackageVersion};
|
||||
use typst_syntax::{Span, Spanned, SyntaxError};
|
||||
use typst_syntax::{Lines, Span, Spanned, SyntaxError};
|
||||
use utf8_iter::ErrorReportingUtf8Chars;
|
||||
|
||||
use crate::engine::Engine;
|
||||
use crate::loading::{LoadSource, Loaded};
|
||||
use crate::{World, WorldExt};
|
||||
|
||||
/// Early-return with a [`StrResult`] or [`SourceResult`].
|
||||
@ -147,7 +151,7 @@ pub struct Warned<T> {
|
||||
pub warnings: EcoVec<SourceDiagnostic>,
|
||||
}
|
||||
|
||||
/// An error or warning in a source file.
|
||||
/// An error or warning in a source or text file.
|
||||
///
|
||||
/// The contained spans will only be detached if any of the input source files
|
||||
/// were detached.
|
||||
@ -228,6 +232,23 @@ impl From<SyntaxError> for SourceDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
/// Destination for a deprecation message when accessing a deprecated value.
|
||||
pub trait DeprecationSink {
|
||||
/// Emits the given deprecation message into this sink.
|
||||
fn emit(self, message: &str);
|
||||
}
|
||||
|
||||
impl DeprecationSink for () {
|
||||
fn emit(self, _: &str) {}
|
||||
}
|
||||
|
||||
impl DeprecationSink for (&mut Engine<'_>, Span) {
|
||||
/// Emits the deprecation message as a warning.
|
||||
fn emit(self, message: &str) {
|
||||
self.0.sink.warn(SourceDiagnostic::warning(self.1, message));
|
||||
}
|
||||
}
|
||||
|
||||
/// A part of a diagnostic's [trace](SourceDiagnostic::trace).
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum Tracepoint {
|
||||
@ -550,31 +571,287 @@ impl From<PackageError> for EcoString {
|
||||
}
|
||||
}
|
||||
|
||||
/// A result type with a data-loading-related error.
|
||||
pub type LoadResult<T> = Result<T, LoadError>;
|
||||
|
||||
/// A call site independent error that occurred during data loading. This avoids
|
||||
/// polluting the memoization with [`Span`]s and [`FileId`]s from source files.
|
||||
/// Can be turned into a [`SourceDiagnostic`] using the [`LoadedWithin::within`]
|
||||
/// method available on [`LoadResult`].
|
||||
///
|
||||
/// [`FileId`]: typst_syntax::FileId
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LoadError {
|
||||
/// The position in the file at which the error occured.
|
||||
pos: ReportPos,
|
||||
/// Must contain a message formatted like this: `"failed to do thing (cause)"`.
|
||||
message: EcoString,
|
||||
}
|
||||
|
||||
impl LoadError {
|
||||
/// Creates a new error from a position in a file, a base message
|
||||
/// (e.g. `failed to parse JSON`) and a concrete error (e.g. `invalid
|
||||
/// number`)
|
||||
pub fn new(
|
||||
pos: impl Into<ReportPos>,
|
||||
message: impl std::fmt::Display,
|
||||
error: impl std::fmt::Display,
|
||||
) -> Self {
|
||||
Self {
|
||||
pos: pos.into(),
|
||||
message: eco_format!("{message} ({error})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Utf8Error> for LoadError {
|
||||
fn from(err: Utf8Error) -> Self {
|
||||
let start = err.valid_up_to();
|
||||
let end = start + err.error_len().unwrap_or(0);
|
||||
LoadError::new(
|
||||
start..end,
|
||||
"failed to convert to string",
|
||||
"file is not valid utf-8",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a [`LoadResult`] to a [`SourceResult`] by adding the [`Loaded`]
|
||||
/// context.
|
||||
pub trait LoadedWithin<T> {
|
||||
/// Report an error, possibly in an external file.
|
||||
fn within(self, loaded: &Loaded) -> SourceResult<T>;
|
||||
}
|
||||
|
||||
impl<T, E> LoadedWithin<T> for Result<T, E>
|
||||
where
|
||||
E: Into<LoadError>,
|
||||
{
|
||||
fn within(self, loaded: &Loaded) -> SourceResult<T> {
|
||||
self.map_err(|err| {
|
||||
let LoadError { pos, message } = err.into();
|
||||
load_err_in_text(loaded, pos, message)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Report an error, possibly in an external file. This will delegate to
|
||||
/// [`load_err_in_invalid_text`] if the data isn't valid utf-8.
|
||||
fn load_err_in_text(
|
||||
loaded: &Loaded,
|
||||
pos: impl Into<ReportPos>,
|
||||
mut message: EcoString,
|
||||
) -> EcoVec<SourceDiagnostic> {
|
||||
let pos = pos.into();
|
||||
// This also does utf-8 validation. Only report an error in an external
|
||||
// file if it is human readable (valid utf-8), otherwise fall back to
|
||||
// `load_err_in_invalid_text`.
|
||||
let lines = Lines::try_from(&loaded.data);
|
||||
match (loaded.source.v, lines) {
|
||||
(LoadSource::Path(file_id), Ok(lines)) => {
|
||||
if let Some(range) = pos.range(&lines) {
|
||||
let span = Span::from_range(file_id, range);
|
||||
return eco_vec![SourceDiagnostic::error(span, message)];
|
||||
}
|
||||
|
||||
// Either `ReportPos::None` was provided, or resolving the range
|
||||
// from the line/column failed. If present report the possibly
|
||||
// wrong line/column in the error message anyway.
|
||||
let span = Span::from_range(file_id, 0..loaded.data.len());
|
||||
if let Some(pair) = pos.line_col(&lines) {
|
||||
message.pop();
|
||||
let (line, col) = pair.numbers();
|
||||
write!(&mut message, " at {line}:{col})").ok();
|
||||
}
|
||||
eco_vec![SourceDiagnostic::error(span, message)]
|
||||
}
|
||||
(LoadSource::Bytes, Ok(lines)) => {
|
||||
if let Some(pair) = pos.line_col(&lines) {
|
||||
message.pop();
|
||||
let (line, col) = pair.numbers();
|
||||
write!(&mut message, " at {line}:{col})").ok();
|
||||
}
|
||||
eco_vec![SourceDiagnostic::error(loaded.source.span, message)]
|
||||
}
|
||||
_ => load_err_in_invalid_text(loaded, pos, message),
|
||||
}
|
||||
}
|
||||
|
||||
/// Report an error (possibly from an external file) that isn't valid utf-8.
|
||||
fn load_err_in_invalid_text(
|
||||
loaded: &Loaded,
|
||||
pos: impl Into<ReportPos>,
|
||||
mut message: EcoString,
|
||||
) -> EcoVec<SourceDiagnostic> {
|
||||
let line_col = pos.into().try_line_col(&loaded.data).map(|p| p.numbers());
|
||||
match (loaded.source.v, line_col) {
|
||||
(LoadSource::Path(file), _) => {
|
||||
message.pop();
|
||||
if let Some(package) = file.package() {
|
||||
write!(
|
||||
&mut message,
|
||||
" in {package}{}",
|
||||
file.vpath().as_rooted_path().display()
|
||||
)
|
||||
.ok();
|
||||
} else {
|
||||
write!(&mut message, " in {}", file.vpath().as_rootless_path().display())
|
||||
.ok();
|
||||
};
|
||||
if let Some((line, col)) = line_col {
|
||||
write!(&mut message, ":{line}:{col}").ok();
|
||||
}
|
||||
message.push(')');
|
||||
}
|
||||
(LoadSource::Bytes, Some((line, col))) => {
|
||||
message.pop();
|
||||
write!(&mut message, " at {line}:{col})").ok();
|
||||
}
|
||||
(LoadSource::Bytes, None) => (),
|
||||
}
|
||||
eco_vec![SourceDiagnostic::error(loaded.source.span, message)]
|
||||
}
|
||||
|
||||
/// A position at which an error was reported.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub enum ReportPos {
|
||||
/// Contains a range, and a line/column pair.
|
||||
Full(std::ops::Range<u32>, LineCol),
|
||||
/// Contains a range.
|
||||
Range(std::ops::Range<u32>),
|
||||
/// Contains a line/column pair.
|
||||
LineCol(LineCol),
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
||||
impl From<std::ops::Range<usize>> for ReportPos {
|
||||
fn from(value: std::ops::Range<usize>) -> Self {
|
||||
Self::Range(value.start.saturating_as()..value.end.saturating_as())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LineCol> for ReportPos {
|
||||
fn from(value: LineCol) -> Self {
|
||||
Self::LineCol(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl ReportPos {
|
||||
/// Creates a position from a pre-existing range and line-column pair.
|
||||
pub fn full(range: std::ops::Range<usize>, pair: LineCol) -> Self {
|
||||
let range = range.start.saturating_as()..range.end.saturating_as();
|
||||
Self::Full(range, pair)
|
||||
}
|
||||
|
||||
/// Tries to determine the byte range for this position.
|
||||
fn range(&self, lines: &Lines<String>) -> Option<std::ops::Range<usize>> {
|
||||
match self {
|
||||
ReportPos::Full(range, _) => Some(range.start as usize..range.end as usize),
|
||||
ReportPos::Range(range) => Some(range.start as usize..range.end as usize),
|
||||
&ReportPos::LineCol(pair) => {
|
||||
let i =
|
||||
lines.line_column_to_byte(pair.line as usize, pair.col as usize)?;
|
||||
Some(i..i)
|
||||
}
|
||||
ReportPos::None => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to determine the line/column for this position.
|
||||
fn line_col(&self, lines: &Lines<String>) -> Option<LineCol> {
|
||||
match self {
|
||||
&ReportPos::Full(_, pair) => Some(pair),
|
||||
ReportPos::Range(range) => {
|
||||
let (line, col) = lines.byte_to_line_column(range.start as usize)?;
|
||||
Some(LineCol::zero_based(line, col))
|
||||
}
|
||||
&ReportPos::LineCol(pair) => Some(pair),
|
||||
ReportPos::None => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Either gets the line/column pair, or tries to compute it from possibly
|
||||
/// invalid utf-8 data.
|
||||
fn try_line_col(&self, bytes: &[u8]) -> Option<LineCol> {
|
||||
match self {
|
||||
&ReportPos::Full(_, pair) => Some(pair),
|
||||
ReportPos::Range(range) => {
|
||||
LineCol::try_from_byte_pos(range.start as usize, bytes)
|
||||
}
|
||||
&ReportPos::LineCol(pair) => Some(pair),
|
||||
ReportPos::None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A line/column pair.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineCol {
|
||||
/// The 0-based line.
|
||||
line: u32,
|
||||
/// The 0-based column.
|
||||
col: u32,
|
||||
}
|
||||
|
||||
impl LineCol {
|
||||
/// Constructs the line/column pair from 0-based indices.
|
||||
pub fn zero_based(line: usize, col: usize) -> Self {
|
||||
Self {
|
||||
line: line.saturating_as(),
|
||||
col: col.saturating_as(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs the line/column pair from 1-based numbers.
|
||||
pub fn one_based(line: usize, col: usize) -> Self {
|
||||
Self::zero_based(line.saturating_sub(1), col.saturating_sub(1))
|
||||
}
|
||||
|
||||
/// Try to compute a line/column pair from possibly invalid utf-8 data.
|
||||
pub fn try_from_byte_pos(pos: usize, bytes: &[u8]) -> Option<Self> {
|
||||
let bytes = &bytes[..pos];
|
||||
let mut line = 0;
|
||||
#[allow(clippy::double_ended_iterator_last)]
|
||||
let line_start = memchr::memchr_iter(b'\n', bytes)
|
||||
.inspect(|_| line += 1)
|
||||
.last()
|
||||
.map(|i| i + 1)
|
||||
.unwrap_or(bytes.len());
|
||||
|
||||
let col = ErrorReportingUtf8Chars::new(&bytes[line_start..]).count();
|
||||
Some(LineCol::zero_based(line, col))
|
||||
}
|
||||
|
||||
/// Returns the 0-based line/column indices.
|
||||
pub fn indices(&self) -> (usize, usize) {
|
||||
(self.line as usize, self.col as usize)
|
||||
}
|
||||
|
||||
/// Returns the 1-based line/column numbers.
|
||||
pub fn numbers(&self) -> (usize, usize) {
|
||||
(self.line as usize + 1, self.col as usize + 1)
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a user-facing error message for an XML-like file format.
|
||||
pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString {
|
||||
match error {
|
||||
roxmltree::Error::UnexpectedCloseTag(expected, actual, pos) => {
|
||||
eco_format!(
|
||||
"failed to parse {format} (found closing tag '{actual}' \
|
||||
instead of '{expected}' in line {})",
|
||||
pos.row
|
||||
)
|
||||
pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> LoadError {
|
||||
let pos = LineCol::one_based(error.pos().row as usize, error.pos().col as usize);
|
||||
let message = match error {
|
||||
roxmltree::Error::UnexpectedCloseTag(expected, actual, _) => {
|
||||
eco_format!("failed to parse {format} (found closing tag '{actual}' instead of '{expected}')")
|
||||
}
|
||||
roxmltree::Error::UnknownEntityReference(entity, pos) => {
|
||||
eco_format!(
|
||||
"failed to parse {format} (unknown entity '{entity}' in line {})",
|
||||
pos.row
|
||||
)
|
||||
roxmltree::Error::UnknownEntityReference(entity, _) => {
|
||||
eco_format!("failed to parse {format} (unknown entity '{entity}')")
|
||||
}
|
||||
roxmltree::Error::DuplicatedAttribute(attr, pos) => {
|
||||
eco_format!(
|
||||
"failed to parse {format} (duplicate attribute '{attr}' in line {})",
|
||||
pos.row
|
||||
)
|
||||
roxmltree::Error::DuplicatedAttribute(attr, _) => {
|
||||
eco_format!("failed to parse {format} (duplicate attribute '{attr}')")
|
||||
}
|
||||
roxmltree::Error::NoRootNode => {
|
||||
eco_format!("failed to parse {format} (missing root node)")
|
||||
}
|
||||
err => eco_format!("failed to parse {format} ({err})"),
|
||||
}
|
||||
};
|
||||
|
||||
LoadError { pos: pos.into(), message }
|
||||
}
|
||||
|
@ -312,7 +312,8 @@ impl Route<'_> {
|
||||
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
|
||||
bail!(
|
||||
"maximum show rule depth exceeded";
|
||||
hint: "check whether the show rule matches its own output"
|
||||
hint: "maybe a show rule matches its own output";
|
||||
hint: "maybe there are too deeply nested elements"
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
|
@ -172,17 +172,29 @@ impl Array {
|
||||
}
|
||||
|
||||
/// Returns the first item in the array. May be used on the left-hand side
|
||||
/// of an assignment. Fails with an error if the array is empty.
|
||||
/// an assignment. Returns the default value if the array is empty
|
||||
/// or fails with an error is no default value was specified.
|
||||
#[func]
|
||||
pub fn first(&self) -> StrResult<Value> {
|
||||
self.0.first().cloned().ok_or_else(array_is_empty)
|
||||
pub fn first(
|
||||
&self,
|
||||
/// A default value to return if the array is empty.
|
||||
#[named]
|
||||
default: Option<Value>,
|
||||
) -> StrResult<Value> {
|
||||
self.0.first().cloned().or(default).ok_or_else(array_is_empty)
|
||||
}
|
||||
|
||||
/// Returns the last item in the array. May be used on the left-hand side of
|
||||
/// an assignment. Fails with an error if the array is empty.
|
||||
/// an assignment. Returns the default value if the array is empty
|
||||
/// or fails with an error is no default value was specified.
|
||||
#[func]
|
||||
pub fn last(&self) -> StrResult<Value> {
|
||||
self.0.last().cloned().ok_or_else(array_is_empty)
|
||||
pub fn last(
|
||||
&self,
|
||||
/// A default value to return if the array is empty.
|
||||
#[named]
|
||||
default: Option<Value>,
|
||||
) -> StrResult<Value> {
|
||||
self.0.last().cloned().or(default).ok_or_else(array_is_empty)
|
||||
}
|
||||
|
||||
/// Returns the item at the specified index in the array. May be used on the
|
||||
@ -751,7 +763,7 @@ impl Array {
|
||||
///
|
||||
/// ```example
|
||||
/// #let array = (1, 2, 3, 4, 5, 6, 7, 8)
|
||||
/// #array.chunks(3)
|
||||
/// #array.chunks(3) \
|
||||
/// #array.chunks(3, exact: true)
|
||||
/// ```
|
||||
#[func]
|
||||
@ -796,7 +808,7 @@ impl Array {
|
||||
/// function. The sorting algorithm used is stable.
|
||||
///
|
||||
/// Returns an error if two values could not be compared or if the key
|
||||
/// function (if given) yields an error.
|
||||
/// or comparison function (if given) yields an error.
|
||||
///
|
||||
/// To sort according to multiple criteria at once, e.g. in case of equality
|
||||
/// between some criteria, the key function can return an array. The results
|
||||
@ -820,17 +832,116 @@ impl Array {
|
||||
/// determine the keys to sort by.
|
||||
#[named]
|
||||
key: Option<Func>,
|
||||
/// If given, uses this function to compare elements in the array.
|
||||
///
|
||||
/// This function should return a boolean: `{true}` indicates that the
|
||||
/// elements are in order, while `{false}` indicates that they should be
|
||||
/// swapped. To keep the sort stable, if the two elements are equal, the
|
||||
/// function should return `{true}`.
|
||||
///
|
||||
/// If this function does not order the elements properly (e.g., by
|
||||
/// returning `{false}` for both `{(x, y)}` and `{(y, x)}`, or for
|
||||
/// `{(x, x)}`), the resulting array will be in unspecified order.
|
||||
///
|
||||
/// When used together with `key`, `by` will be passed the keys instead
|
||||
/// of the elements.
|
||||
///
|
||||
/// ```example
|
||||
/// #(
|
||||
/// "sorted",
|
||||
/// "by",
|
||||
/// "decreasing",
|
||||
/// "length",
|
||||
/// ).sorted(
|
||||
/// key: s => s.len(),
|
||||
/// by: (l, r) => l >= r,
|
||||
/// )
|
||||
/// ```
|
||||
#[named]
|
||||
by: Option<Func>,
|
||||
) -> SourceResult<Array> {
|
||||
match by {
|
||||
Some(by) => {
|
||||
let mut are_in_order = |mut x, mut y| {
|
||||
if let Some(f) = &key {
|
||||
// We rely on `comemo`'s memoization of function
|
||||
// evaluation to not excessively reevaluate the key.
|
||||
x = f.call(engine, context, [x])?;
|
||||
y = f.call(engine, context, [y])?;
|
||||
}
|
||||
match by.call(engine, context, [x, y])? {
|
||||
Value::Bool(b) => Ok(b),
|
||||
x => {
|
||||
bail!(
|
||||
span,
|
||||
"expected boolean from `by` function, got {}",
|
||||
x.ty(),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
// If a comparison function is provided, we use `glidesort`
|
||||
// instead of the standard library sorting algorithm to prevent
|
||||
// panics in case the comparison function does not define a
|
||||
// valid order (see https://github.com/typst/typst/pull/5627).
|
||||
let mut result = Ok(());
|
||||
let mut vec = self.0;
|
||||
let mut vec = self.0.into_iter().enumerate().collect::<Vec<_>>();
|
||||
glidesort::sort_by(&mut vec, |(i, x), (j, y)| {
|
||||
// Because we use booleans for the comparison function, in
|
||||
// order to keep the sort stable, we need to compare in the
|
||||
// right order.
|
||||
if i < j {
|
||||
// If `x` and `y` appear in this order in the original
|
||||
// array, then we should change their order (i.e.,
|
||||
// return `Ordering::Greater`) iff `y` is strictly less
|
||||
// than `x` (i.e., `compare(x, y)` returns `false`).
|
||||
// Otherwise, we should keep them in the same order
|
||||
// (i.e., return `Ordering::Less`).
|
||||
match are_in_order(x.clone(), y.clone()) {
|
||||
Ok(false) => Ordering::Greater,
|
||||
Ok(true) => Ordering::Less,
|
||||
Err(err) => {
|
||||
if result.is_ok() {
|
||||
result = Err(err);
|
||||
}
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If `x` and `y` appear in the opposite order in the
|
||||
// original array, then we should change their order
|
||||
// (i.e., return `Ordering::Less`) iff `x` is strictly
|
||||
// less than `y` (i.e., `compare(y, x)` returns
|
||||
// `false`). Otherwise, we should keep them in the same
|
||||
// order (i.e., return `Ordering::Less`).
|
||||
match are_in_order(y.clone(), x.clone()) {
|
||||
Ok(false) => Ordering::Less,
|
||||
Ok(true) => Ordering::Greater,
|
||||
Err(err) => {
|
||||
if result.is_ok() {
|
||||
result = Err(err);
|
||||
}
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
result.map(|()| vec.into_iter().map(|(_, x)| x).collect())
|
||||
}
|
||||
|
||||
None => {
|
||||
let mut key_of = |x: Value| match &key {
|
||||
// NOTE: We are relying on `comemo`'s memoization of function
|
||||
// evaluation to not excessively reevaluate the `key`.
|
||||
// We rely on `comemo`'s memoization of function evaluation
|
||||
// to not excessively reevaluate the key.
|
||||
Some(f) => f.call(engine, context, [x]),
|
||||
None => Ok(x),
|
||||
};
|
||||
// If no comparison function is provided, we know the order is
|
||||
// valid, so we can use the standard library sort and prevent an
|
||||
// extra allocation.
|
||||
let mut result = Ok(());
|
||||
let mut vec = self.0;
|
||||
vec.make_mut().sort_by(|a, b| {
|
||||
// Until we get `try` blocks :)
|
||||
match (key_of(a.clone()), key_of(b.clone())) {
|
||||
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
|
||||
if result.is_ok() {
|
||||
@ -846,7 +957,9 @@ impl Array {
|
||||
}
|
||||
}
|
||||
});
|
||||
result.map(|_| vec.into())
|
||||
result.map(|()| vec.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates all items in the array.
|
||||
|
@ -7,6 +7,7 @@ use std::sync::Arc;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use serde::{Serialize, Serializer};
|
||||
use typst_syntax::Lines;
|
||||
use typst_utils::LazyHash;
|
||||
|
||||
use crate::diag::{bail, StrResult};
|
||||
@ -286,6 +287,16 @@ impl Serialize for Bytes {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Bytes> for Lines<String> {
|
||||
type Error = Utf8Error;
|
||||
|
||||
#[comemo::memoize]
|
||||
fn try_from(value: &Bytes) -> Result<Lines<String>, Utf8Error> {
|
||||
let text = value.as_str()?;
|
||||
Ok(Lines::new(text.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Any type that can back a byte buffer.
|
||||
trait Bytelike: Send + Sync {
|
||||
fn as_bytes(&self) -> &[u8];
|
||||
|
@ -708,12 +708,13 @@ pub fn fract(
|
||||
}
|
||||
}
|
||||
|
||||
/// Rounds a number to the nearest integer away from zero.
|
||||
/// Rounds a number to the nearest integer.
|
||||
///
|
||||
/// Optionally, a number of decimal places can be specified.
|
||||
/// Half-integers are rounded away from zero.
|
||||
///
|
||||
/// If the number of digits is negative, its absolute value will indicate the
|
||||
/// amount of significant integer digits to remove before the decimal point.
|
||||
/// Optionally, a number of decimal places can be specified. If negative, its
|
||||
/// absolute value will indicate the amount of significant integer digits to
|
||||
/// remove before the decimal point.
|
||||
///
|
||||
/// Note that this function will return the same type as the operand. That is,
|
||||
/// applying `round` to a [`float`] will return a `float`, and to a [`decimal`],
|
||||
|
@ -9,7 +9,7 @@ use std::ops::Add;
|
||||
|
||||
use ecow::eco_format;
|
||||
use smallvec::SmallVec;
|
||||
use typst_syntax::{Span, Spanned};
|
||||
use typst_syntax::{Span, Spanned, SyntaxMode};
|
||||
use unicode_math_class::MathClass;
|
||||
|
||||
use crate::diag::{At, HintedStrResult, HintedString, SourceResult, StrResult};
|
||||
@ -459,6 +459,21 @@ impl FromValue for Never {
|
||||
}
|
||||
}
|
||||
|
||||
cast! {
|
||||
SyntaxMode,
|
||||
self => IntoValue::into_value(match self {
|
||||
SyntaxMode::Markup => "markup",
|
||||
SyntaxMode::Math => "math",
|
||||
SyntaxMode::Code => "code",
|
||||
}),
|
||||
/// Evaluate as markup, as in a Typst file.
|
||||
"markup" => SyntaxMode::Markup,
|
||||
/// Evaluate as math, as in an equation.
|
||||
"math" => SyntaxMode::Math,
|
||||
/// Evaluate as code, as after a hash.
|
||||
"code" => SyntaxMode::Code,
|
||||
}
|
||||
|
||||
cast! {
|
||||
MathClass,
|
||||
self => IntoValue::into_value(match self {
|
||||
|
@ -3,7 +3,7 @@ use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::{self, Sum};
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::{Add, AddAssign, Deref, DerefMut};
|
||||
use std::ops::{Add, AddAssign, ControlFlow, Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Tracked;
|
||||
@ -414,10 +414,11 @@ impl Content {
|
||||
/// Elements produced in `show` rules will not be included in the results.
|
||||
pub fn query(&self, selector: Selector) -> Vec<Content> {
|
||||
let mut results = Vec::new();
|
||||
self.traverse(&mut |element| {
|
||||
let _ = self.traverse(&mut |element| -> ControlFlow<()> {
|
||||
if selector.matches(&element, None) {
|
||||
results.push(element);
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
results
|
||||
}
|
||||
@ -427,54 +428,58 @@ impl Content {
|
||||
///
|
||||
/// Elements produced in `show` rules will not be included in the results.
|
||||
pub fn query_first(&self, selector: &Selector) -> Option<Content> {
|
||||
let mut result = None;
|
||||
self.traverse(&mut |element| {
|
||||
if result.is_none() && selector.matches(&element, None) {
|
||||
result = Some(element);
|
||||
self.traverse(&mut |element| -> ControlFlow<Content> {
|
||||
if selector.matches(&element, None) {
|
||||
ControlFlow::Break(element)
|
||||
} else {
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
});
|
||||
result
|
||||
})
|
||||
.break_value()
|
||||
}
|
||||
|
||||
/// Extracts the plain text of this content.
|
||||
pub fn plain_text(&self) -> EcoString {
|
||||
let mut text = EcoString::new();
|
||||
self.traverse(&mut |element| {
|
||||
let _ = self.traverse(&mut |element| -> ControlFlow<()> {
|
||||
if let Some(textable) = element.with::<dyn PlainText>() {
|
||||
textable.plain_text(&mut text);
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
text
|
||||
}
|
||||
|
||||
/// Traverse this content.
|
||||
fn traverse<F>(&self, f: &mut F)
|
||||
fn traverse<F, B>(&self, f: &mut F) -> ControlFlow<B>
|
||||
where
|
||||
F: FnMut(Content),
|
||||
F: FnMut(Content) -> ControlFlow<B>,
|
||||
{
|
||||
f(self.clone());
|
||||
|
||||
self.inner
|
||||
.elem
|
||||
.fields()
|
||||
.into_iter()
|
||||
.for_each(|(_, value)| walk_value(value, f));
|
||||
|
||||
/// Walks a given value to find any content that matches the selector.
|
||||
fn walk_value<F>(value: Value, f: &mut F)
|
||||
///
|
||||
/// Returns early if the function gives `ControlFlow::Break`.
|
||||
fn walk_value<F, B>(value: Value, f: &mut F) -> ControlFlow<B>
|
||||
where
|
||||
F: FnMut(Content),
|
||||
F: FnMut(Content) -> ControlFlow<B>,
|
||||
{
|
||||
match value {
|
||||
Value::Content(content) => content.traverse(f),
|
||||
Value::Array(array) => {
|
||||
for value in array {
|
||||
walk_value(value, f);
|
||||
walk_value(value, f)?;
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
_ => ControlFlow::Continue(()),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Call f on the element itself before recursively iterating its fields.
|
||||
f(self.clone())?;
|
||||
for (_, value) in self.inner.elem.fields() {
|
||||
walk_value(value, f)?;
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -261,7 +261,12 @@ pub struct ToDict(Dict);
|
||||
|
||||
cast! {
|
||||
ToDict,
|
||||
v: Module => Self(v.scope().iter().map(|(k, v, _)| (Str::from(k.clone()), v.clone())).collect()),
|
||||
v: Module => Self(v
|
||||
.scope()
|
||||
.iter()
|
||||
.map(|(k, b)| (Str::from(k.clone()), b.read().clone()))
|
||||
.collect()
|
||||
),
|
||||
}
|
||||
|
||||
impl Debug for Dict {
|
||||
|
@ -110,7 +110,7 @@ impl f64 {
|
||||
f64::signum(self)
|
||||
}
|
||||
|
||||
/// Converts bytes to a float.
|
||||
/// Interprets bytes as a float.
|
||||
///
|
||||
/// ```example
|
||||
/// #float.from-bytes(bytes((0, 0, 0, 0, 0, 0, 240, 63))) \
|
||||
@ -120,8 +120,10 @@ impl f64 {
|
||||
pub fn from_bytes(
|
||||
/// The bytes that should be converted to a float.
|
||||
///
|
||||
/// Must be of length exactly 8 so that the result fits into a 64-bit
|
||||
/// float.
|
||||
/// Must have a length of either 4 or 8. The bytes are then
|
||||
/// interpreted in [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754)'s
|
||||
/// binary32 (single-precision) or binary64 (double-precision) format
|
||||
/// depending on the length of the bytes.
|
||||
bytes: Bytes,
|
||||
/// The endianness of the conversion.
|
||||
#[named]
|
||||
@ -158,6 +160,13 @@ impl f64 {
|
||||
#[named]
|
||||
#[default(Endianness::Little)]
|
||||
endian: Endianness,
|
||||
/// The size of the resulting bytes.
|
||||
///
|
||||
/// This must be either 4 or 8. The call will return the
|
||||
/// representation of this float in either
|
||||
/// [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754)'s binary32
|
||||
/// (single-precision) or binary64 (double-precision) format
|
||||
/// depending on the provided size.
|
||||
#[named]
|
||||
#[default(8)]
|
||||
size: u32,
|
||||
|
@ -9,7 +9,7 @@ use ecow::{eco_format, EcoString};
|
||||
use typst_syntax::{ast, Span, SyntaxNode};
|
||||
use typst_utils::{singleton, LazyHash, Static};
|
||||
|
||||
use crate::diag::{bail, At, SourceResult, StrResult};
|
||||
use crate::diag::{bail, At, DeprecationSink, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{
|
||||
cast, repr, scope, ty, Args, Bytes, CastInfo, Content, Context, Element, IntoArgs,
|
||||
@ -112,7 +112,7 @@ use crate::foundations::{
|
||||
/// it into another file by writing `{import "foo.typ": alert}`.
|
||||
///
|
||||
/// # Unnamed functions { #unnamed }
|
||||
/// You can also created an unnamed function without creating a binding by
|
||||
/// You can also create an unnamed function without creating a binding by
|
||||
/// specifying a parameter list followed by `=>` and the function body. If your
|
||||
/// function has just one parameter, the parentheses around the parameter list
|
||||
/// are optional. Unnamed functions are mainly useful for show rules, but also
|
||||
@ -255,11 +255,15 @@ impl Func {
|
||||
}
|
||||
|
||||
/// Get a field from this function's scope, if possible.
|
||||
pub fn field(&self, field: &str) -> StrResult<&'static Value> {
|
||||
pub fn field(
|
||||
&self,
|
||||
field: &str,
|
||||
sink: impl DeprecationSink,
|
||||
) -> StrResult<&'static Value> {
|
||||
let scope =
|
||||
self.scope().ok_or("cannot access fields on user-defined functions")?;
|
||||
match scope.get(field) {
|
||||
Some(field) => Ok(field),
|
||||
Some(binding) => Ok(binding.read_checked(sink)),
|
||||
None => match self.name() {
|
||||
Some(name) => bail!("function `{name}` does not contain field `{field}`"),
|
||||
None => bail!("function does not contain field `{field}`"),
|
||||
@ -433,10 +437,10 @@ impl PartialEq for Func {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&NativeFuncData> for Func {
|
||||
fn eq(&self, other: &&NativeFuncData) -> bool {
|
||||
impl PartialEq<&'static NativeFuncData> for Func {
|
||||
fn eq(&self, other: &&'static NativeFuncData) -> bool {
|
||||
match &self.repr {
|
||||
Repr::Native(native) => native.function == other.function,
|
||||
Repr::Native(native) => *native == Static(*other),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
use std::num::{NonZeroI64, NonZeroIsize, NonZeroU64, NonZeroUsize, ParseIntError};
|
||||
use std::num::{
|
||||
NonZeroI64, NonZeroIsize, NonZeroU32, NonZeroU64, NonZeroUsize, ParseIntError,
|
||||
};
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use smallvec::SmallVec;
|
||||
@ -482,3 +484,16 @@ cast! {
|
||||
"number too large"
|
||||
})?,
|
||||
}
|
||||
|
||||
cast! {
|
||||
NonZeroU32,
|
||||
self => Value::Int(self.get() as _),
|
||||
v: i64 => v
|
||||
.try_into()
|
||||
.and_then(|v: u32| v.try_into())
|
||||
.map_err(|_| if v <= 0 {
|
||||
"number must be positive"
|
||||
} else {
|
||||
"number too large"
|
||||
})?,
|
||||
}
|
||||
|
@ -79,7 +79,12 @@ impl Label {
|
||||
|
||||
impl Repr for Label {
|
||||
fn repr(&self) -> EcoString {
|
||||
eco_format!("<{}>", self.resolve())
|
||||
let resolved = self.resolve();
|
||||
if typst_syntax::is_valid_label_literal_id(&resolved) {
|
||||
eco_format!("<{resolved}>")
|
||||
} else {
|
||||
eco_format!("label({})", resolved.repr())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,6 +69,7 @@ pub use self::ty::*;
|
||||
pub use self::value::*;
|
||||
pub use self::version::*;
|
||||
pub use typst_macros::{scope, ty};
|
||||
use typst_syntax::SyntaxMode;
|
||||
|
||||
#[rustfmt::skip]
|
||||
#[doc(hidden)]
|
||||
@ -77,24 +78,17 @@ pub use {
|
||||
indexmap::IndexMap,
|
||||
};
|
||||
|
||||
use comemo::TrackedMut;
|
||||
use ecow::EcoString;
|
||||
use typst_syntax::Spanned;
|
||||
|
||||
use crate::diag::{bail, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::routines::EvalMode;
|
||||
use crate::{Feature, Features};
|
||||
|
||||
/// Foundational types and functions.
|
||||
///
|
||||
/// Here, you'll find documentation for basic data types like [integers]($int)
|
||||
/// and [strings]($str) as well as details about core computational functions.
|
||||
#[category]
|
||||
pub static FOUNDATIONS: Category;
|
||||
|
||||
/// Hook up all `foundations` definitions.
|
||||
pub(super) fn define(global: &mut Scope, inputs: Dict, features: &Features) {
|
||||
global.category(FOUNDATIONS);
|
||||
global.start_category(crate::Category::Foundations);
|
||||
global.define_type::<bool>();
|
||||
global.define_type::<i64>();
|
||||
global.define_type::<f64>();
|
||||
@ -125,6 +119,7 @@ pub(super) fn define(global: &mut Scope, inputs: Dict, features: &Features) {
|
||||
}
|
||||
global.define("calc", calc::module());
|
||||
global.define("sys", sys::module(inputs));
|
||||
global.reset_category();
|
||||
}
|
||||
|
||||
/// Fails with an error.
|
||||
@ -278,8 +273,8 @@ pub fn eval(
|
||||
/// #eval("1_2^3", mode: "math")
|
||||
/// ```
|
||||
#[named]
|
||||
#[default(EvalMode::Code)]
|
||||
mode: EvalMode,
|
||||
#[default(SyntaxMode::Code)]
|
||||
mode: SyntaxMode,
|
||||
/// A scope of definitions that are made available.
|
||||
///
|
||||
/// ```example
|
||||
@ -301,7 +296,16 @@ pub fn eval(
|
||||
let dict = scope;
|
||||
let mut scope = Scope::new();
|
||||
for (key, value) in dict {
|
||||
scope.define_spanned(key, value, span);
|
||||
scope.bind(key.into(), Binding::new(value, span));
|
||||
}
|
||||
(engine.routines.eval_string)(engine.routines, engine.world, &text, span, mode, scope)
|
||||
|
||||
(engine.routines.eval_string)(
|
||||
engine.routines,
|
||||
engine.world,
|
||||
TrackedMut::reborrow_mut(&mut engine.sink),
|
||||
&text,
|
||||
span,
|
||||
mode,
|
||||
scope,
|
||||
)
|
||||
}
|
||||
|
@ -4,12 +4,13 @@ use std::sync::Arc;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use typst_syntax::FileId;
|
||||
|
||||
use crate::diag::StrResult;
|
||||
use crate::diag::{bail, DeprecationSink, StrResult};
|
||||
use crate::foundations::{repr, ty, Content, Scope, Value};
|
||||
|
||||
/// An module of definitions.
|
||||
/// A collection of variables and functions that are commonly related to
|
||||
/// a single theme.
|
||||
///
|
||||
/// A module
|
||||
/// A module can
|
||||
/// - be built-in
|
||||
/// - stem from a [file import]($scripting/#modules)
|
||||
/// - stem from a [package import]($scripting/#packages) (and thus indirectly
|
||||
@ -118,11 +119,14 @@ impl Module {
|
||||
}
|
||||
|
||||
/// Try to access a definition in the module.
|
||||
pub fn field(&self, name: &str) -> StrResult<&Value> {
|
||||
self.scope().get(name).ok_or_else(|| match &self.name {
|
||||
Some(module) => eco_format!("module `{module}` does not contain `{name}`"),
|
||||
None => eco_format!("module does not contain `{name}`"),
|
||||
})
|
||||
pub fn field(&self, field: &str, sink: impl DeprecationSink) -> StrResult<&Value> {
|
||||
match self.scope().get(field) {
|
||||
Some(binding) => Ok(binding.read_checked(sink)),
|
||||
None => match &self.name {
|
||||
Some(name) => bail!("module `{name}` does not contain `{field}`"),
|
||||
None => bail!("module does not contain `{field}`"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the module's content.
|
||||
|
@ -8,7 +8,7 @@ use wasmi::Memory;
|
||||
|
||||
use crate::diag::{bail, At, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{cast, func, scope, Bytes, Func, Module, Scope, Value};
|
||||
use crate::foundations::{cast, func, scope, Binding, Bytes, Func, Module, Scope, Value};
|
||||
use crate::loading::{DataSource, Load};
|
||||
|
||||
/// Loads a WebAssembly module.
|
||||
@ -148,13 +148,11 @@ use crate::loading::{DataSource, Load};
|
||||
#[func(scope)]
|
||||
pub fn plugin(
|
||||
engine: &mut Engine,
|
||||
/// A path to a WebAssembly file or raw WebAssembly bytes.
|
||||
///
|
||||
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||
/// A [path]($syntax/#paths) to a WebAssembly file or raw WebAssembly bytes.
|
||||
source: Spanned<DataSource>,
|
||||
) -> SourceResult<Module> {
|
||||
let data = source.load(engine.world)?;
|
||||
Plugin::module(data).at(source.span)
|
||||
let loaded = source.load(engine.world)?;
|
||||
Plugin::module(loaded.data).at(source.span)
|
||||
}
|
||||
|
||||
#[scope]
|
||||
@ -369,7 +367,7 @@ impl Plugin {
|
||||
if matches!(export.ty(), wasmi::ExternType::Func(_)) {
|
||||
let name = EcoString::from(export.name());
|
||||
let func = PluginFunc { plugin: shared.clone(), name: name.clone() };
|
||||
scope.define(name, Func::from(func));
|
||||
scope.bind(name, Binding::detached(Func::from(func)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,21 +1,17 @@
|
||||
#[doc(inline)]
|
||||
pub use typst_macros::category;
|
||||
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use indexmap::map::Entry;
|
||||
use indexmap::IndexMap;
|
||||
use typst_syntax::ast::{self, AstNode};
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::Static;
|
||||
|
||||
use crate::diag::{bail, HintedStrResult, HintedString, StrResult};
|
||||
use crate::diag::{bail, DeprecationSink, HintedStrResult, HintedString, StrResult};
|
||||
use crate::foundations::{
|
||||
Element, Func, IntoValue, NativeElement, NativeFunc, NativeFuncData, NativeType,
|
||||
Type, Value,
|
||||
};
|
||||
use crate::Library;
|
||||
use crate::{Category, Library};
|
||||
|
||||
/// A stack of scopes.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
@ -46,14 +42,14 @@ impl<'a> Scopes<'a> {
|
||||
self.top = self.scopes.pop().expect("no pushed scope");
|
||||
}
|
||||
|
||||
/// Try to access a variable immutably.
|
||||
pub fn get(&self, var: &str) -> HintedStrResult<&Value> {
|
||||
/// Try to access a binding immutably.
|
||||
pub fn get(&self, var: &str) -> HintedStrResult<&Binding> {
|
||||
std::iter::once(&self.top)
|
||||
.chain(self.scopes.iter().rev())
|
||||
.find_map(|scope| scope.get(var))
|
||||
.or_else(|| {
|
||||
self.base.and_then(|base| match base.global.scope().get(var) {
|
||||
Some(value) => Some(value),
|
||||
Some(binding) => Some(binding),
|
||||
None if var == "std" => Some(&base.std),
|
||||
None => None,
|
||||
})
|
||||
@ -61,14 +57,28 @@ impl<'a> Scopes<'a> {
|
||||
.ok_or_else(|| unknown_variable(var))
|
||||
}
|
||||
|
||||
/// Try to access a variable immutably in math.
|
||||
pub fn get_in_math(&self, var: &str) -> HintedStrResult<&Value> {
|
||||
/// Try to access a binding mutably.
|
||||
pub fn get_mut(&mut self, var: &str) -> HintedStrResult<&mut Binding> {
|
||||
std::iter::once(&mut self.top)
|
||||
.chain(&mut self.scopes.iter_mut().rev())
|
||||
.find_map(|scope| scope.get_mut(var))
|
||||
.ok_or_else(|| {
|
||||
match self.base.and_then(|base| base.global.scope().get(var)) {
|
||||
Some(_) => cannot_mutate_constant(var),
|
||||
_ if var == "std" => cannot_mutate_constant(var),
|
||||
_ => unknown_variable(var),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to access a binding immutably in math.
|
||||
pub fn get_in_math(&self, var: &str) -> HintedStrResult<&Binding> {
|
||||
std::iter::once(&self.top)
|
||||
.chain(self.scopes.iter().rev())
|
||||
.find_map(|scope| scope.get(var))
|
||||
.or_else(|| {
|
||||
self.base.and_then(|base| match base.math.scope().get(var) {
|
||||
Some(value) => Some(value),
|
||||
Some(binding) => Some(binding),
|
||||
None if var == "std" => Some(&base.std),
|
||||
None => None,
|
||||
})
|
||||
@ -81,20 +91,6 @@ impl<'a> Scopes<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Try to access a variable mutably.
|
||||
pub fn get_mut(&mut self, var: &str) -> HintedStrResult<&mut Value> {
|
||||
std::iter::once(&mut self.top)
|
||||
.chain(&mut self.scopes.iter_mut().rev())
|
||||
.find_map(|scope| scope.get_mut(var))
|
||||
.ok_or_else(|| {
|
||||
match self.base.and_then(|base| base.global.scope().get(var)) {
|
||||
Some(_) => cannot_mutate_constant(var),
|
||||
_ if var == "std" => cannot_mutate_constant(var),
|
||||
_ => unknown_variable(var),
|
||||
}
|
||||
})?
|
||||
}
|
||||
|
||||
/// Check if an std variable is shadowed.
|
||||
pub fn check_std_shadowed(&self, var: &str) -> bool {
|
||||
self.base.is_some_and(|base| base.global.scope().get(var).is_some())
|
||||
@ -104,84 +100,28 @@ impl<'a> Scopes<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn cannot_mutate_constant(var: &str) -> HintedString {
|
||||
eco_format!("cannot mutate a constant: {}", var).into()
|
||||
}
|
||||
|
||||
/// The error message when a variable is not found.
|
||||
#[cold]
|
||||
fn unknown_variable(var: &str) -> HintedString {
|
||||
let mut res = HintedString::new(eco_format!("unknown variable: {}", var));
|
||||
|
||||
if var.contains('-') {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to use subtraction, try adding spaces around the minus sign{}: `{}`",
|
||||
if var.matches('-').count() > 1 { "s" } else { "" },
|
||||
var.replace('-', " - ")
|
||||
));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn unknown_variable_math(var: &str, in_global: bool) -> HintedString {
|
||||
let mut res = HintedString::new(eco_format!("unknown variable: {}", var));
|
||||
|
||||
if matches!(var, "none" | "auto" | "false" | "true") {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to use a literal, try adding a hash before it: `#{var}`",
|
||||
));
|
||||
} else if in_global {
|
||||
res.hint(eco_format!(
|
||||
"`{var}` is not available directly in math, try adding a hash before it: `#{var}`",
|
||||
));
|
||||
} else {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to display multiple letters as is, try adding spaces between each letter: `{}`",
|
||||
var.chars()
|
||||
.flat_map(|c| [' ', c])
|
||||
.skip(1)
|
||||
.collect::<EcoString>()
|
||||
));
|
||||
res.hint(eco_format!(
|
||||
"or if you meant to display this as text, try placing it in quotes: `\"{var}\"`"
|
||||
));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// A map from binding names to values.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct Scope {
|
||||
map: IndexMap<EcoString, Slot>,
|
||||
map: IndexMap<EcoString, Binding>,
|
||||
deduplicate: bool,
|
||||
category: Option<Category>,
|
||||
}
|
||||
|
||||
/// Scope construction.
|
||||
impl Scope {
|
||||
/// Create a new empty scope.
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
/// Create a new scope with the given capacity.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
map: IndexMap::with_capacity(capacity),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new scope with duplication prevention.
|
||||
pub fn deduplicating() -> Self {
|
||||
Self { deduplicate: true, ..Default::default() }
|
||||
}
|
||||
|
||||
/// Enter a new category.
|
||||
pub fn category(&mut self, category: Category) {
|
||||
pub fn start_category(&mut self, category: Category) {
|
||||
self.category = Some(category);
|
||||
}
|
||||
|
||||
@ -190,102 +130,87 @@ impl Scope {
|
||||
self.category = None;
|
||||
}
|
||||
|
||||
/// Bind a value to a name.
|
||||
#[track_caller]
|
||||
pub fn define(&mut self, name: impl Into<EcoString>, value: impl IntoValue) {
|
||||
self.define_spanned(name, value, Span::detached())
|
||||
}
|
||||
|
||||
/// Bind a value to a name defined by an identifier.
|
||||
#[track_caller]
|
||||
pub fn define_ident(&mut self, ident: ast::Ident, value: impl IntoValue) {
|
||||
self.define_spanned(ident.get().clone(), value, ident.span())
|
||||
}
|
||||
|
||||
/// Bind a value to a name.
|
||||
#[track_caller]
|
||||
pub fn define_spanned(
|
||||
&mut self,
|
||||
name: impl Into<EcoString>,
|
||||
value: impl IntoValue,
|
||||
span: Span,
|
||||
) {
|
||||
let name = name.into();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if self.deduplicate && self.map.contains_key(&name) {
|
||||
panic!("duplicate definition: {name}");
|
||||
}
|
||||
|
||||
self.map.insert(
|
||||
name,
|
||||
Slot::new(value.into_value(), span, Kind::Normal, self.category),
|
||||
);
|
||||
}
|
||||
|
||||
/// Define a captured, immutable binding.
|
||||
pub fn define_captured(
|
||||
&mut self,
|
||||
name: EcoString,
|
||||
value: Value,
|
||||
capturer: Capturer,
|
||||
span: Span,
|
||||
) {
|
||||
self.map.insert(
|
||||
name,
|
||||
Slot::new(value.into_value(), span, Kind::Captured(capturer), self.category),
|
||||
);
|
||||
}
|
||||
|
||||
/// Define a native function through a Rust type that shadows the function.
|
||||
pub fn define_func<T: NativeFunc>(&mut self) {
|
||||
#[track_caller]
|
||||
pub fn define_func<T: NativeFunc>(&mut self) -> &mut Binding {
|
||||
let data = T::data();
|
||||
self.define(data.name, Func::from(data));
|
||||
self.define(data.name, Func::from(data))
|
||||
}
|
||||
|
||||
/// Define a native function with raw function data.
|
||||
pub fn define_func_with_data(&mut self, data: &'static NativeFuncData) {
|
||||
self.define(data.name, Func::from(data));
|
||||
#[track_caller]
|
||||
pub fn define_func_with_data(
|
||||
&mut self,
|
||||
data: &'static NativeFuncData,
|
||||
) -> &mut Binding {
|
||||
self.define(data.name, Func::from(data))
|
||||
}
|
||||
|
||||
/// Define a native type.
|
||||
pub fn define_type<T: NativeType>(&mut self) {
|
||||
#[track_caller]
|
||||
pub fn define_type<T: NativeType>(&mut self) -> &mut Binding {
|
||||
let data = T::data();
|
||||
self.define(data.name, Type::from(data));
|
||||
self.define(data.name, Type::from(data))
|
||||
}
|
||||
|
||||
/// Define a native element.
|
||||
pub fn define_elem<T: NativeElement>(&mut self) {
|
||||
#[track_caller]
|
||||
pub fn define_elem<T: NativeElement>(&mut self) -> &mut Binding {
|
||||
let data = T::data();
|
||||
self.define(data.name, Element::from(data));
|
||||
self.define(data.name, Element::from(data))
|
||||
}
|
||||
|
||||
/// Try to access a variable immutably.
|
||||
pub fn get(&self, var: &str) -> Option<&Value> {
|
||||
self.map.get(var).map(Slot::read)
|
||||
/// Define a built-in with compile-time known name and returns a mutable
|
||||
/// reference to it.
|
||||
///
|
||||
/// When the name isn't compile-time known, you should instead use:
|
||||
/// - `Vm::bind` if you already have [`Binding`]
|
||||
/// - `Vm::define` if you only have a [`Value`]
|
||||
/// - [`Scope::bind`](Self::bind) if you are not operating in the context of
|
||||
/// a `Vm` or if you are binding to something that is not an AST
|
||||
/// identifier (e.g. when constructing a dynamic
|
||||
/// [`Module`](super::Module))
|
||||
#[track_caller]
|
||||
pub fn define(&mut self, name: &'static str, value: impl IntoValue) -> &mut Binding {
|
||||
#[cfg(debug_assertions)]
|
||||
if self.deduplicate && self.map.contains_key(name) {
|
||||
panic!("duplicate definition: {name}");
|
||||
}
|
||||
|
||||
/// Try to access a variable mutably.
|
||||
pub fn get_mut(&mut self, var: &str) -> Option<HintedStrResult<&mut Value>> {
|
||||
self.map
|
||||
.get_mut(var)
|
||||
.map(Slot::write)
|
||||
.map(|res| res.map_err(HintedString::from))
|
||||
let mut binding = Binding::detached(value);
|
||||
binding.category = self.category;
|
||||
self.bind(name.into(), binding)
|
||||
}
|
||||
}
|
||||
|
||||
/// Scope manipulation and access.
|
||||
impl Scope {
|
||||
/// Inserts a binding into this scope and returns a mutable reference to it.
|
||||
///
|
||||
/// Prefer `Vm::bind` if you are operating in the context of a `Vm`.
|
||||
pub fn bind(&mut self, name: EcoString, binding: Binding) -> &mut Binding {
|
||||
match self.map.entry(name) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
entry.insert(binding);
|
||||
entry.into_mut()
|
||||
}
|
||||
Entry::Vacant(entry) => entry.insert(binding),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the span of a definition.
|
||||
pub fn get_span(&self, var: &str) -> Option<Span> {
|
||||
Some(self.map.get(var)?.span)
|
||||
/// Try to access a binding immutably.
|
||||
pub fn get(&self, var: &str) -> Option<&Binding> {
|
||||
self.map.get(var)
|
||||
}
|
||||
|
||||
/// Get the category of a definition.
|
||||
pub fn get_category(&self, var: &str) -> Option<Category> {
|
||||
self.map.get(var)?.category
|
||||
/// Try to access a binding mutably.
|
||||
pub fn get_mut(&mut self, var: &str) -> Option<&mut Binding> {
|
||||
self.map.get_mut(var)
|
||||
}
|
||||
|
||||
/// Iterate over all definitions.
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&EcoString, &Value, Span)> {
|
||||
self.map.iter().map(|(k, v)| (k, v.read(), v.span))
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&EcoString, &Binding)> {
|
||||
self.map.iter()
|
||||
}
|
||||
}
|
||||
|
||||
@ -318,28 +243,111 @@ pub trait NativeScope {
|
||||
fn scope() -> Scope;
|
||||
}
|
||||
|
||||
/// A slot where a value is stored.
|
||||
#[derive(Clone, Hash)]
|
||||
struct Slot {
|
||||
/// The stored value.
|
||||
/// A bound value with metadata.
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
pub struct Binding {
|
||||
/// The bound value.
|
||||
value: Value,
|
||||
/// The kind of slot, determines how the value can be accessed.
|
||||
kind: Kind,
|
||||
/// A span associated with the stored value.
|
||||
/// The kind of binding, determines how the value can be accessed.
|
||||
kind: BindingKind,
|
||||
/// A span associated with the binding.
|
||||
span: Span,
|
||||
/// The category of the slot.
|
||||
/// The category of the binding.
|
||||
category: Option<Category>,
|
||||
/// A deprecation message for the definition.
|
||||
deprecation: Option<&'static str>,
|
||||
}
|
||||
|
||||
/// The different kinds of slots.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
enum Kind {
|
||||
enum BindingKind {
|
||||
/// A normal, mutable binding.
|
||||
Normal,
|
||||
/// A captured copy of another variable.
|
||||
Captured(Capturer),
|
||||
}
|
||||
|
||||
impl Binding {
|
||||
/// Create a new binding with a span marking its definition site.
|
||||
pub fn new(value: impl IntoValue, span: Span) -> Self {
|
||||
Self {
|
||||
value: value.into_value(),
|
||||
span,
|
||||
kind: BindingKind::Normal,
|
||||
category: None,
|
||||
deprecation: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a binding without a span.
|
||||
pub fn detached(value: impl IntoValue) -> Self {
|
||||
Self::new(value, Span::detached())
|
||||
}
|
||||
|
||||
/// Marks this binding as deprecated, with the given `message`.
|
||||
pub fn deprecated(&mut self, message: &'static str) -> &mut Self {
|
||||
self.deprecation = Some(message);
|
||||
self
|
||||
}
|
||||
|
||||
/// Read the value.
|
||||
pub fn read(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
|
||||
/// Read the value, checking for deprecation.
|
||||
///
|
||||
/// As the `sink`
|
||||
/// - pass `()` to ignore the message.
|
||||
/// - pass `(&mut engine, span)` to emit a warning into the engine.
|
||||
pub fn read_checked(&self, sink: impl DeprecationSink) -> &Value {
|
||||
if let Some(message) = self.deprecation {
|
||||
sink.emit(message);
|
||||
}
|
||||
&self.value
|
||||
}
|
||||
|
||||
/// Try to write to the value.
|
||||
///
|
||||
/// This fails if the value is a read-only closure capture.
|
||||
pub fn write(&mut self) -> StrResult<&mut Value> {
|
||||
match self.kind {
|
||||
BindingKind::Normal => Ok(&mut self.value),
|
||||
BindingKind::Captured(capturer) => bail!(
|
||||
"variables from outside the {} are \
|
||||
read-only and cannot be modified",
|
||||
match capturer {
|
||||
Capturer::Function => "function",
|
||||
Capturer::Context => "context expression",
|
||||
}
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a copy of the binding for closure capturing.
|
||||
pub fn capture(&self, capturer: Capturer) -> Self {
|
||||
Self {
|
||||
kind: BindingKind::Captured(capturer),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// A span associated with the stored value.
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
/// A deprecation message for the value, if any.
|
||||
pub fn deprecation(&self) -> Option<&'static str> {
|
||||
self.deprecation
|
||||
}
|
||||
|
||||
/// The category of the value, if any.
|
||||
pub fn category(&self) -> Option<Category> {
|
||||
self.category
|
||||
}
|
||||
}
|
||||
|
||||
/// What the variable was captured by.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum Capturer {
|
||||
@ -349,71 +357,56 @@ pub enum Capturer {
|
||||
Context,
|
||||
}
|
||||
|
||||
impl Slot {
|
||||
/// Create a new slot.
|
||||
fn new(value: Value, span: Span, kind: Kind, category: Option<Category>) -> Self {
|
||||
Self { value, span, kind, category }
|
||||
}
|
||||
|
||||
/// Read the value.
|
||||
fn read(&self) -> &Value {
|
||||
&self.value
|
||||
}
|
||||
|
||||
/// Try to write to the value.
|
||||
fn write(&mut self) -> StrResult<&mut Value> {
|
||||
match self.kind {
|
||||
Kind::Normal => Ok(&mut self.value),
|
||||
Kind::Captured(capturer) => {
|
||||
bail!(
|
||||
"variables from outside the {} are \
|
||||
read-only and cannot be modified",
|
||||
match capturer {
|
||||
Capturer::Function => "function",
|
||||
Capturer::Context => "context expression",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
/// The error message when trying to mutate a variable from the standard
|
||||
/// library.
|
||||
#[cold]
|
||||
fn cannot_mutate_constant(var: &str) -> HintedString {
|
||||
eco_format!("cannot mutate a constant: {}", var).into()
|
||||
}
|
||||
|
||||
/// A group of related definitions.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Category(Static<CategoryData>);
|
||||
/// The error message when a variable wasn't found.
|
||||
#[cold]
|
||||
fn unknown_variable(var: &str) -> HintedString {
|
||||
let mut res = HintedString::new(eco_format!("unknown variable: {}", var));
|
||||
|
||||
impl Category {
|
||||
/// Create a new category from raw data.
|
||||
pub const fn from_data(data: &'static CategoryData) -> Self {
|
||||
Self(Static(data))
|
||||
if var.contains('-') {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to use subtraction, \
|
||||
try adding spaces around the minus sign{}: `{}`",
|
||||
if var.matches('-').count() > 1 { "s" } else { "" },
|
||||
var.replace('-', " - ")
|
||||
));
|
||||
}
|
||||
|
||||
/// The category's name.
|
||||
pub fn name(&self) -> &'static str {
|
||||
self.0.name
|
||||
}
|
||||
|
||||
/// The type's title case name, for use in documentation (e.g. `String`).
|
||||
pub fn title(&self) -> &'static str {
|
||||
self.0.title
|
||||
}
|
||||
|
||||
/// Documentation for the category.
|
||||
pub fn docs(&self) -> &'static str {
|
||||
self.0.docs
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
impl Debug for Category {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "Category({})", self.name())
|
||||
}
|
||||
}
|
||||
/// The error message when a variable wasn't found it math.
|
||||
#[cold]
|
||||
fn unknown_variable_math(var: &str, in_global: bool) -> HintedString {
|
||||
let mut res = HintedString::new(eco_format!("unknown variable: {}", var));
|
||||
|
||||
/// Defines a category.
|
||||
#[derive(Debug)]
|
||||
pub struct CategoryData {
|
||||
pub name: &'static str,
|
||||
pub title: &'static str,
|
||||
pub docs: &'static str,
|
||||
if matches!(var, "none" | "auto" | "false" | "true") {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to use a literal, \
|
||||
try adding a hash before it: `#{var}`",
|
||||
));
|
||||
} else if in_global {
|
||||
res.hint(eco_format!(
|
||||
"`{var}` is not available directly in math, \
|
||||
try adding a hash before it: `#{var}`",
|
||||
));
|
||||
} else {
|
||||
res.hint(eco_format!(
|
||||
"if you meant to display multiple letters as is, \
|
||||
try adding spaces between each letter: `{}`",
|
||||
var.chars().flat_map(|c| [' ', c]).skip(1).collect::<EcoString>()
|
||||
));
|
||||
res.hint(eco_format!(
|
||||
"or if you meant to display this as text, \
|
||||
try placing it in quotes: `\"{var}\"`"
|
||||
));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
@ -7,12 +7,13 @@ use comemo::Tracked;
|
||||
use ecow::EcoString;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst_syntax::{Span, Spanned};
|
||||
use unicode_normalization::UnicodeNormalization;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use crate::diag::{bail, At, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{
|
||||
cast, dict, func, repr, scope, ty, Array, Bytes, Context, Decimal, Dict, Func,
|
||||
cast, dict, func, repr, scope, ty, Array, Bytes, Cast, Context, Decimal, Dict, Func,
|
||||
IntoValue, Label, Repr, Type, Value, Version,
|
||||
};
|
||||
use crate::layout::Alignment;
|
||||
@ -286,6 +287,30 @@ impl Str {
|
||||
Ok(c.into())
|
||||
}
|
||||
|
||||
/// Normalizes the string to the given Unicode normal form.
|
||||
///
|
||||
/// This is useful when manipulating strings containing Unicode combining
|
||||
/// characters.
|
||||
///
|
||||
/// ```typ
|
||||
/// #assert.eq("é".normalize(form: "nfd"), "e\u{0301}")
|
||||
/// #assert.eq("ſ́".normalize(form: "nfkc"), "ś")
|
||||
/// ```
|
||||
#[func]
|
||||
pub fn normalize(
|
||||
&self,
|
||||
#[named]
|
||||
#[default(UnicodeNormalForm::Nfc)]
|
||||
form: UnicodeNormalForm,
|
||||
) -> Str {
|
||||
match form {
|
||||
UnicodeNormalForm::Nfc => self.nfc().collect(),
|
||||
UnicodeNormalForm::Nfd => self.nfd().collect(),
|
||||
UnicodeNormalForm::Nfkc => self.nfkc().collect(),
|
||||
UnicodeNormalForm::Nfkd => self.nfkd().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the string contains the specified pattern.
|
||||
///
|
||||
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
|
||||
@ -788,6 +813,25 @@ cast! {
|
||||
v: Str => Self::Str(v),
|
||||
}
|
||||
|
||||
/// A Unicode normalization form.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
|
||||
pub enum UnicodeNormalForm {
|
||||
/// Canonical composition where e.g. accented letters are turned into a
|
||||
/// single Unicode codepoint.
|
||||
#[string("nfc")]
|
||||
Nfc,
|
||||
/// Canonical decomposition where e.g. accented letters are split into a
|
||||
/// separate base and diacritic.
|
||||
#[string("nfd")]
|
||||
Nfd,
|
||||
/// Like NFC, but using the Unicode compatibility decompositions.
|
||||
#[string("nfkc")]
|
||||
Nfkc,
|
||||
/// Like NFD, but using the Unicode compatibility decompositions.
|
||||
#[string("nfkd")]
|
||||
Nfkd,
|
||||
}
|
||||
|
||||
/// Convert an item of std's `match_indices` to a dictionary.
|
||||
fn match_to_dict((start, text): (usize, &str)) -> Dict {
|
||||
dict! {
|
||||
|
@ -471,7 +471,8 @@ impl Debug for Recipe {
|
||||
selector.fmt(f)?;
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
self.transform.fmt(f)
|
||||
self.transform.fmt(f)?;
|
||||
f.write_str(")")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::fmt::{self, Debug, Display, Formatter, Write};
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex::ModifierSet;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use serde::{Serialize, Serializer};
|
||||
use typst_syntax::{is_ident, Span, Spanned};
|
||||
@ -21,6 +21,7 @@ use crate::foundations::{
|
||||
/// be accessed using [field access notation]($scripting/#fields):
|
||||
///
|
||||
/// - General symbols are defined in the [`sym` module]($category/symbols/sym)
|
||||
/// and are accessible without the `sym.` prefix in math mode.
|
||||
/// - Emoji are defined in the [`emoji` module]($category/symbols/emoji)
|
||||
///
|
||||
/// Moreover, you can define custom symbols with this type's constructor
|
||||
@ -53,18 +54,18 @@ enum Repr {
|
||||
/// A native symbol that has no named variant.
|
||||
Single(char),
|
||||
/// A native symbol with multiple named variants.
|
||||
Complex(&'static [(&'static str, char)]),
|
||||
Complex(&'static [(ModifierSet<&'static str>, char)]),
|
||||
/// A symbol with multiple named variants, where some modifiers may have
|
||||
/// been applied. Also used for symbols defined at runtime by the user with
|
||||
/// no modifier applied.
|
||||
Modified(Arc<(List, EcoString)>),
|
||||
Modified(Arc<(List, ModifierSet<EcoString>)>),
|
||||
}
|
||||
|
||||
/// A collection of symbols.
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
enum List {
|
||||
Static(&'static [(&'static str, char)]),
|
||||
Runtime(Box<[(EcoString, char)]>),
|
||||
Static(&'static [(ModifierSet<&'static str>, char)]),
|
||||
Runtime(Box<[(ModifierSet<EcoString>, char)]>),
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
@ -75,24 +76,26 @@ impl Symbol {
|
||||
|
||||
/// Create a symbol with a static variant list.
|
||||
#[track_caller]
|
||||
pub const fn list(list: &'static [(&'static str, char)]) -> Self {
|
||||
pub const fn list(list: &'static [(ModifierSet<&'static str>, char)]) -> Self {
|
||||
debug_assert!(!list.is_empty());
|
||||
Self(Repr::Complex(list))
|
||||
}
|
||||
|
||||
/// Create a symbol with a runtime variant list.
|
||||
#[track_caller]
|
||||
pub fn runtime(list: Box<[(EcoString, char)]>) -> Self {
|
||||
pub fn runtime(list: Box<[(ModifierSet<EcoString>, char)]>) -> Self {
|
||||
debug_assert!(!list.is_empty());
|
||||
Self(Repr::Modified(Arc::new((List::Runtime(list), EcoString::new()))))
|
||||
Self(Repr::Modified(Arc::new((List::Runtime(list), ModifierSet::default()))))
|
||||
}
|
||||
|
||||
/// Get the symbol's character.
|
||||
pub fn get(&self) -> char {
|
||||
match &self.0 {
|
||||
Repr::Single(c) => *c,
|
||||
Repr::Complex(_) => find(self.variants(), "").unwrap(),
|
||||
Repr::Modified(arc) => find(self.variants(), &arc.1).unwrap(),
|
||||
Repr::Complex(_) => ModifierSet::<&'static str>::default()
|
||||
.best_match_in(self.variants())
|
||||
.unwrap(),
|
||||
Repr::Modified(arc) => arc.1.best_match_in(self.variants()).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,16 +130,14 @@ impl Symbol {
|
||||
/// Apply a modifier to the symbol.
|
||||
pub fn modified(mut self, modifier: &str) -> StrResult<Self> {
|
||||
if let Repr::Complex(list) = self.0 {
|
||||
self.0 = Repr::Modified(Arc::new((List::Static(list), EcoString::new())));
|
||||
self.0 =
|
||||
Repr::Modified(Arc::new((List::Static(list), ModifierSet::default())));
|
||||
}
|
||||
|
||||
if let Repr::Modified(arc) = &mut self.0 {
|
||||
let (list, modifiers) = Arc::make_mut(arc);
|
||||
if !modifiers.is_empty() {
|
||||
modifiers.push('.');
|
||||
}
|
||||
modifiers.push_str(modifier);
|
||||
if find(list.variants(), modifiers).is_some() {
|
||||
modifiers.insert_raw(modifier);
|
||||
if modifiers.best_match_in(list.variants()).is_some() {
|
||||
return Ok(self);
|
||||
}
|
||||
}
|
||||
@ -145,7 +146,7 @@ impl Symbol {
|
||||
}
|
||||
|
||||
/// The characters that are covered by this symbol.
|
||||
pub fn variants(&self) -> impl Iterator<Item = (&str, char)> {
|
||||
pub fn variants(&self) -> impl Iterator<Item = (ModifierSet<&str>, char)> {
|
||||
match &self.0 {
|
||||
Repr::Single(c) => Variants::Single(Some(*c).into_iter()),
|
||||
Repr::Complex(list) => Variants::Static(list.iter()),
|
||||
@ -155,17 +156,15 @@ impl Symbol {
|
||||
|
||||
/// Possible modifiers.
|
||||
pub fn modifiers(&self) -> impl Iterator<Item = &str> + '_ {
|
||||
let mut set = BTreeSet::new();
|
||||
let modifiers = match &self.0 {
|
||||
Repr::Modified(arc) => arc.1.as_str(),
|
||||
_ => "",
|
||||
Repr::Modified(arc) => arc.1.as_deref(),
|
||||
_ => ModifierSet::default(),
|
||||
};
|
||||
for modifier in self.variants().flat_map(|(name, _)| name.split('.')) {
|
||||
if !modifier.is_empty() && !contained(modifiers, modifier) {
|
||||
set.insert(modifier);
|
||||
}
|
||||
}
|
||||
set.into_iter()
|
||||
self.variants()
|
||||
.flat_map(|(m, _)| m)
|
||||
.filter(|modifier| !modifier.is_empty() && !modifiers.contains(modifier))
|
||||
.collect::<BTreeSet<_>>()
|
||||
.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
@ -255,7 +254,10 @@ impl Symbol {
|
||||
seen.insert(hash, i);
|
||||
}
|
||||
|
||||
let list = variants.into_iter().map(|s| (s.v.0, s.v.1)).collect();
|
||||
let list = variants
|
||||
.into_iter()
|
||||
.map(|s| (ModifierSet::from_raw_dotted(s.v.0), s.v.1))
|
||||
.collect();
|
||||
Ok(Symbol::runtime(list))
|
||||
}
|
||||
}
|
||||
@ -290,14 +292,23 @@ impl crate::foundations::Repr for Symbol {
|
||||
match &self.0 {
|
||||
Repr::Single(c) => eco_format!("symbol(\"{}\")", *c),
|
||||
Repr::Complex(variants) => {
|
||||
eco_format!("symbol{}", repr_variants(variants.iter().copied(), ""))
|
||||
eco_format!(
|
||||
"symbol{}",
|
||||
repr_variants(variants.iter().copied(), ModifierSet::default())
|
||||
)
|
||||
}
|
||||
Repr::Modified(arc) => {
|
||||
let (list, modifiers) = arc.as_ref();
|
||||
if modifiers.is_empty() {
|
||||
eco_format!("symbol{}", repr_variants(list.variants(), ""))
|
||||
eco_format!(
|
||||
"symbol{}",
|
||||
repr_variants(list.variants(), ModifierSet::default())
|
||||
)
|
||||
} else {
|
||||
eco_format!("symbol{}", repr_variants(list.variants(), modifiers))
|
||||
eco_format!(
|
||||
"symbol{}",
|
||||
repr_variants(list.variants(), modifiers.as_deref())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -305,24 +316,24 @@ impl crate::foundations::Repr for Symbol {
|
||||
}
|
||||
|
||||
fn repr_variants<'a>(
|
||||
variants: impl Iterator<Item = (&'a str, char)>,
|
||||
applied_modifiers: &str,
|
||||
variants: impl Iterator<Item = (ModifierSet<&'a str>, char)>,
|
||||
applied_modifiers: ModifierSet<&str>,
|
||||
) -> String {
|
||||
crate::foundations::repr::pretty_array_like(
|
||||
&variants
|
||||
.filter(|(variant, _)| {
|
||||
.filter(|(modifiers, _)| {
|
||||
// Only keep variants that can still be accessed, i.e., variants
|
||||
// that contain all applied modifiers.
|
||||
parts(applied_modifiers).all(|am| variant.split('.').any(|m| m == am))
|
||||
applied_modifiers.iter().all(|am| modifiers.contains(am))
|
||||
})
|
||||
.map(|(variant, c)| {
|
||||
let trimmed_variant = variant
|
||||
.split('.')
|
||||
.filter(|&m| parts(applied_modifiers).all(|am| m != am));
|
||||
if trimmed_variant.clone().all(|m| m.is_empty()) {
|
||||
.map(|(modifiers, c)| {
|
||||
let trimmed_modifiers =
|
||||
modifiers.into_iter().filter(|&m| !applied_modifiers.contains(m));
|
||||
if trimmed_modifiers.clone().all(|m| m.is_empty()) {
|
||||
eco_format!("\"{c}\"")
|
||||
} else {
|
||||
let trimmed_modifiers = trimmed_variant.collect::<Vec<_>>().join(".");
|
||||
let trimmed_modifiers =
|
||||
trimmed_modifiers.collect::<Vec<_>>().join(".");
|
||||
eco_format!("(\"{}\", \"{}\")", trimmed_modifiers, c)
|
||||
}
|
||||
})
|
||||
@ -368,67 +379,22 @@ cast! {
|
||||
/// Iterator over variants.
|
||||
enum Variants<'a> {
|
||||
Single(std::option::IntoIter<char>),
|
||||
Static(std::slice::Iter<'static, (&'static str, char)>),
|
||||
Runtime(std::slice::Iter<'a, (EcoString, char)>),
|
||||
Static(std::slice::Iter<'static, (ModifierSet<&'static str>, char)>),
|
||||
Runtime(std::slice::Iter<'a, (ModifierSet<EcoString>, char)>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Variants<'a> {
|
||||
type Item = (&'a str, char);
|
||||
type Item = (ModifierSet<&'a str>, char);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
Self::Single(iter) => Some(("", iter.next()?)),
|
||||
Self::Single(iter) => Some((ModifierSet::default(), iter.next()?)),
|
||||
Self::Static(list) => list.next().copied(),
|
||||
Self::Runtime(list) => list.next().map(|(s, c)| (s.as_str(), *c)),
|
||||
Self::Runtime(list) => list.next().map(|(m, c)| (m.as_deref(), *c)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the best symbol from the list.
|
||||
fn find<'a>(
|
||||
variants: impl Iterator<Item = (&'a str, char)>,
|
||||
modifiers: &str,
|
||||
) -> Option<char> {
|
||||
let mut best = None;
|
||||
let mut best_score = None;
|
||||
|
||||
// Find the best table entry with this name.
|
||||
'outer: for candidate in variants {
|
||||
for modifier in parts(modifiers) {
|
||||
if !contained(candidate.0, modifier) {
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
|
||||
let mut matching = 0;
|
||||
let mut total = 0;
|
||||
for modifier in parts(candidate.0) {
|
||||
if contained(modifiers, modifier) {
|
||||
matching += 1;
|
||||
}
|
||||
total += 1;
|
||||
}
|
||||
|
||||
let score = (matching, Reverse(total));
|
||||
if best_score.map_or(true, |b| score > b) {
|
||||
best = Some(candidate.1);
|
||||
best_score = Some(score);
|
||||
}
|
||||
}
|
||||
|
||||
best
|
||||
}
|
||||
|
||||
/// Split a modifier list into its parts.
|
||||
fn parts(modifiers: &str) -> impl Iterator<Item = &str> {
|
||||
modifiers.split('.').filter(|s| !s.is_empty())
|
||||
}
|
||||
|
||||
/// Whether the modifier string contains the modifier `m`.
|
||||
fn contained(modifiers: &str, m: &str) -> bool {
|
||||
parts(modifiers).any(|part| part == m)
|
||||
}
|
||||
|
||||
/// A single character.
|
||||
#[elem(Repr, PlainText)]
|
||||
pub struct SymbolElem {
|
||||
|
@ -3,7 +3,7 @@ use comemo::Tracked;
|
||||
use crate::diag::HintedStrResult;
|
||||
use crate::foundations::{elem, func, Cast, Context};
|
||||
|
||||
/// The compilation target.
|
||||
/// The export target.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Hash, Cast)]
|
||||
pub enum Target {
|
||||
/// The target that is used for paged, fully laid-out content.
|
||||
@ -28,7 +28,49 @@ pub struct TargetElem {
|
||||
pub target: Target,
|
||||
}
|
||||
|
||||
/// Returns the current compilation target.
|
||||
/// Returns the current export target.
|
||||
///
|
||||
/// This function returns either
|
||||
/// - `{"paged"}` (for PDF, PNG, and SVG export), or
|
||||
/// - `{"html"}` (for HTML export).
|
||||
///
|
||||
/// The design of this function is not yet finalized and for this reason it is
|
||||
/// guarded behind the `html` feature. Visit the [HTML documentation
|
||||
/// page]($html) for more details.
|
||||
///
|
||||
/// # When to use it
|
||||
/// This function allows you to format your document properly across both HTML
|
||||
/// and paged export targets. It should primarily be used in templates and show
|
||||
/// rules, rather than directly in content. This way, the document's contents
|
||||
/// can be fully agnostic to the export target and content can be shared between
|
||||
/// PDF and HTML export.
|
||||
///
|
||||
/// # Varying targets
|
||||
/// This function is [contextual]($context) as the target can vary within a
|
||||
/// single compilation: When exporting to HTML, the target will be `{"paged"}`
|
||||
/// while within an [`html.frame`].
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #let kbd(it) = context {
|
||||
/// if target() == "html" {
|
||||
/// html.elem("kbd", it)
|
||||
/// } else {
|
||||
/// set text(fill: rgb("#1f2328"))
|
||||
/// let r = 3pt
|
||||
/// box(
|
||||
/// fill: rgb("#f6f8fa"),
|
||||
/// stroke: rgb("#d1d9e0b3"),
|
||||
/// outset: (y: r),
|
||||
/// inset: (x: r),
|
||||
/// radius: r,
|
||||
/// raw(it)
|
||||
/// )
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// Press #kbd("F1") for help.
|
||||
/// ```
|
||||
#[func(contextual)]
|
||||
pub fn target(context: Tracked<Context>) -> HintedStrResult<Target> {
|
||||
Ok(TargetElem::target_in(context.styles()?))
|
||||
|
@ -8,7 +8,7 @@ use std::sync::LazyLock;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use typst_utils::Static;
|
||||
|
||||
use crate::diag::StrResult;
|
||||
use crate::diag::{bail, DeprecationSink, StrResult};
|
||||
use crate::foundations::{
|
||||
cast, func, AutoValue, Func, NativeFuncData, NoneValue, Repr, Scope, Value,
|
||||
};
|
||||
@ -39,11 +39,25 @@ use crate::foundations::{
|
||||
/// #type(image("glacier.jpg")).
|
||||
/// ```
|
||||
///
|
||||
/// The type of `10` is `int`. Now, what is the type of `int` or even `type`?
|
||||
/// The type of `{10}` is `int`. Now, what is the type of `int` or even `type`?
|
||||
/// ```example
|
||||
/// #type(int) \
|
||||
/// #type(type)
|
||||
/// ```
|
||||
///
|
||||
/// Unlike other types like `int`, [none] and [auto] do not have a name
|
||||
/// representing them. To test if a value is one of these, compare your value to
|
||||
/// them directly, e.g:
|
||||
/// ```example
|
||||
/// #let val = none
|
||||
/// #if val == none [
|
||||
/// Yep, it's none.
|
||||
/// ]
|
||||
/// ```
|
||||
///
|
||||
/// Note that `type` will return [`content`] for all document elements. To
|
||||
/// programmatically determine which kind of content you are dealing with, see
|
||||
/// [`content.func`].
|
||||
#[ty(scope, cast)]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Type(Static<NativeTypeData>);
|
||||
@ -94,10 +108,15 @@ impl Type {
|
||||
}
|
||||
|
||||
/// Get a field from this type's scope, if possible.
|
||||
pub fn field(&self, field: &str) -> StrResult<&'static Value> {
|
||||
self.scope()
|
||||
.get(field)
|
||||
.ok_or_else(|| eco_format!("type {self} does not contain field `{field}`"))
|
||||
pub fn field(
|
||||
&self,
|
||||
field: &str,
|
||||
sink: impl DeprecationSink,
|
||||
) -> StrResult<&'static Value> {
|
||||
match self.scope().get(field) {
|
||||
Some(binding) => Ok(binding.read_checked(sink)),
|
||||
None => bail!("type {self} does not contain field `{field}`"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use typst_syntax::{ast, Span};
|
||||
use typst_utils::ArcExt;
|
||||
|
||||
use crate::diag::{HintedStrResult, HintedString, StrResult};
|
||||
use crate::diag::{DeprecationSink, HintedStrResult, HintedString, StrResult};
|
||||
use crate::foundations::{
|
||||
fields, ops, repr, Args, Array, AutoValue, Bytes, CastInfo, Content, Datetime,
|
||||
Decimal, Dict, Duration, Fold, FromValue, Func, IntoValue, Label, Module,
|
||||
@ -155,15 +155,15 @@ impl Value {
|
||||
}
|
||||
|
||||
/// Try to access a field on the value.
|
||||
pub fn field(&self, field: &str) -> StrResult<Value> {
|
||||
pub fn field(&self, field: &str, sink: impl DeprecationSink) -> StrResult<Value> {
|
||||
match self {
|
||||
Self::Symbol(symbol) => symbol.clone().modified(field).map(Self::Symbol),
|
||||
Self::Version(version) => version.component(field).map(Self::Int),
|
||||
Self::Dict(dict) => dict.get(field).cloned(),
|
||||
Self::Content(content) => content.field_by_name(field),
|
||||
Self::Type(ty) => ty.field(field).cloned(),
|
||||
Self::Func(func) => func.field(field).cloned(),
|
||||
Self::Module(module) => module.field(field).cloned(),
|
||||
Self::Type(ty) => ty.field(field, sink).cloned(),
|
||||
Self::Func(func) => func.field(field, sink).cloned(),
|
||||
Self::Module(module) => module.field(field, sink).cloned(),
|
||||
_ => fields::field(self, field),
|
||||
}
|
||||
}
|
||||
|
@ -6,53 +6,77 @@ pub use self::dom::*;
|
||||
|
||||
use ecow::EcoString;
|
||||
|
||||
use crate::foundations::{category, elem, Category, Content, Module, Scope};
|
||||
|
||||
/// HTML output.
|
||||
#[category]
|
||||
pub static HTML: Category;
|
||||
use crate::foundations::{elem, Content, Module, Scope};
|
||||
|
||||
/// Create a module with all HTML definitions.
|
||||
pub fn module() -> Module {
|
||||
let mut html = Scope::deduplicating();
|
||||
html.category(HTML);
|
||||
html.start_category(crate::Category::Html);
|
||||
html.define_elem::<HtmlElem>();
|
||||
html.define_elem::<FrameElem>();
|
||||
Module::new("html", html)
|
||||
}
|
||||
|
||||
/// A HTML element that can contain Typst content.
|
||||
/// An HTML element that can contain Typst content.
|
||||
///
|
||||
/// Typst's HTML export automatically generates the appropriate tags for most
|
||||
/// elements. However, sometimes, it is desirable to retain more control. For
|
||||
/// example, when using Typst to generate your blog, you could use this function
|
||||
/// to wrap each article in an `<article>` tag.
|
||||
///
|
||||
/// Typst is aware of what is valid HTML. A tag and its attributes must form
|
||||
/// syntactically valid HTML. Some tags, like `meta` do not accept content.
|
||||
/// Hence, you must not provide a body for them. We may add more checks in the
|
||||
/// future, so be sure that you are generating valid HTML when using this
|
||||
/// function.
|
||||
///
|
||||
/// Normally, Typst will generate `html`, `head`, and `body` tags for you. If
|
||||
/// you instead create them with this function, Typst will omit its own tags.
|
||||
///
|
||||
/// ```typ
|
||||
/// #html.elem("div", attrs: (style: "background: aqua"))[
|
||||
/// A div with _Typst content_ inside!
|
||||
/// ]
|
||||
/// ```
|
||||
#[elem(name = "elem")]
|
||||
pub struct HtmlElem {
|
||||
/// The element's tag.
|
||||
#[required]
|
||||
pub tag: HtmlTag,
|
||||
|
||||
/// The element's attributes.
|
||||
/// The element's HTML attributes.
|
||||
#[borrowed]
|
||||
pub attrs: HtmlAttrs,
|
||||
|
||||
/// The contents of the HTML element.
|
||||
///
|
||||
/// The body can be arbitrary Typst content.
|
||||
#[positional]
|
||||
#[borrowed]
|
||||
pub body: Option<Content>,
|
||||
}
|
||||
|
||||
impl HtmlElem {
|
||||
/// Add an atribute to the element.
|
||||
/// Add an attribute to the element.
|
||||
pub fn with_attr(mut self, attr: HtmlAttr, value: impl Into<EcoString>) -> Self {
|
||||
self.attrs.get_or_insert_with(Default::default).push(attr, value);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// An element that forces its contents to be laid out.
|
||||
/// An element that lays out its content as an inline SVG.
|
||||
///
|
||||
/// Integrates content that requires layout (e.g. a plot) into HTML output
|
||||
/// by turning it into an inline SVG.
|
||||
/// Sometimes, converting Typst content to HTML is not desirable. This can be
|
||||
/// the case for plots and other content that relies on positioning and styling
|
||||
/// to convey its message.
|
||||
///
|
||||
/// This function allows you to use the Typst layout engine that would also be
|
||||
/// used for PDF, SVG, and PNG export to render a part of your document exactly
|
||||
/// how it would appear when exported in one of these formats. It embeds the
|
||||
/// content as an inline SVG.
|
||||
#[elem]
|
||||
pub struct FrameElem {
|
||||
/// The contents that shall be laid out.
|
||||
/// The content that shall be laid out.
|
||||
#[positional]
|
||||
#[required]
|
||||
pub body: Content,
|
||||
|
@ -229,10 +229,10 @@ impl Counter {
|
||||
if self.is_page() {
|
||||
let at_delta =
|
||||
engine.introspector.page(location).get().saturating_sub(at_page.get());
|
||||
at_state.step(NonZeroUsize::ONE, at_delta);
|
||||
at_state.step(NonZeroUsize::ONE, at_delta as u64);
|
||||
let final_delta =
|
||||
engine.introspector.pages().get().saturating_sub(final_page.get());
|
||||
final_state.step(NonZeroUsize::ONE, final_delta);
|
||||
final_state.step(NonZeroUsize::ONE, final_delta as u64);
|
||||
}
|
||||
Ok(CounterState(smallvec![at_state.first(), final_state.first()]))
|
||||
}
|
||||
@ -250,7 +250,7 @@ impl Counter {
|
||||
if self.is_page() {
|
||||
let delta =
|
||||
engine.introspector.page(location).get().saturating_sub(page.get());
|
||||
state.step(NonZeroUsize::ONE, delta);
|
||||
state.step(NonZeroUsize::ONE, delta as u64);
|
||||
}
|
||||
Ok(state)
|
||||
}
|
||||
@ -319,7 +319,7 @@ impl Counter {
|
||||
|
||||
let delta = page.get() - prev.get();
|
||||
if delta > 0 {
|
||||
state.step(NonZeroUsize::ONE, delta);
|
||||
state.step(NonZeroUsize::ONE, delta as u64);
|
||||
}
|
||||
}
|
||||
|
||||
@ -500,7 +500,7 @@ impl Counter {
|
||||
let (mut state, page) = sequence.last().unwrap().clone();
|
||||
if self.is_page() {
|
||||
let delta = engine.introspector.pages().get().saturating_sub(page.get());
|
||||
state.step(NonZeroUsize::ONE, delta);
|
||||
state.step(NonZeroUsize::ONE, delta as u64);
|
||||
}
|
||||
Ok(state)
|
||||
}
|
||||
@ -616,13 +616,13 @@ pub trait Count {
|
||||
|
||||
/// Counts through elements with different levels.
|
||||
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||
pub struct CounterState(pub SmallVec<[usize; 3]>);
|
||||
pub struct CounterState(pub SmallVec<[u64; 3]>);
|
||||
|
||||
impl CounterState {
|
||||
/// Get the initial counter state for the key.
|
||||
pub fn init(page: bool) -> Self {
|
||||
// Special case, because pages always start at one.
|
||||
Self(smallvec![usize::from(page)])
|
||||
Self(smallvec![u64::from(page)])
|
||||
}
|
||||
|
||||
/// Advance the counter and return the numbers for the given heading.
|
||||
@ -645,7 +645,7 @@ impl CounterState {
|
||||
}
|
||||
|
||||
/// Advance the number of the given level by the specified amount.
|
||||
pub fn step(&mut self, level: NonZeroUsize, by: usize) {
|
||||
pub fn step(&mut self, level: NonZeroUsize, by: u64) {
|
||||
let level = level.get();
|
||||
|
||||
while self.0.len() < level {
|
||||
@ -657,7 +657,7 @@ impl CounterState {
|
||||
}
|
||||
|
||||
/// Get the first number of the state.
|
||||
pub fn first(&self) -> usize {
|
||||
pub fn first(&self) -> u64 {
|
||||
self.0.first().copied().unwrap_or(1)
|
||||
}
|
||||
|
||||
@ -675,7 +675,7 @@ impl CounterState {
|
||||
cast! {
|
||||
CounterState,
|
||||
self => Value::Array(self.0.into_iter().map(IntoValue::into_value).collect()),
|
||||
num: usize => Self(smallvec![num]),
|
||||
num: u64 => Self(smallvec![num]),
|
||||
array: Array => Self(array
|
||||
.into_iter()
|
||||
.map(Value::cast)
|
||||
@ -758,7 +758,7 @@ impl Show for Packed<CounterDisplayElem> {
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct ManualPageCounter {
|
||||
physical: NonZeroUsize,
|
||||
logical: usize,
|
||||
logical: u64,
|
||||
}
|
||||
|
||||
impl ManualPageCounter {
|
||||
@ -773,7 +773,7 @@ impl ManualPageCounter {
|
||||
}
|
||||
|
||||
/// Get the current logical page counter state.
|
||||
pub fn logical(&self) -> usize {
|
||||
pub fn logical(&self) -> u64 {
|
||||
self.logical
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,7 @@ use typst_utils::NonZeroExt;
|
||||
|
||||
use crate::diag::{bail, StrResult};
|
||||
use crate::foundations::{Content, Label, Repr, Selector};
|
||||
use crate::html::{HtmlElement, HtmlNode};
|
||||
use crate::html::HtmlNode;
|
||||
use crate::introspection::{Location, Tag};
|
||||
use crate::layout::{Frame, FrameItem, Page, Point, Position, Transform};
|
||||
use crate::model::Numbering;
|
||||
@ -55,8 +55,8 @@ impl Introspector {
|
||||
|
||||
/// Creates an introspector for HTML.
|
||||
#[typst_macros::time(name = "introspect html")]
|
||||
pub fn html(root: &HtmlElement) -> Self {
|
||||
IntrospectorBuilder::new().build_html(root)
|
||||
pub fn html(output: &[HtmlNode]) -> Self {
|
||||
IntrospectorBuilder::new().build_html(output)
|
||||
}
|
||||
|
||||
/// Iterates over all locatable elements.
|
||||
@ -392,9 +392,9 @@ impl IntrospectorBuilder {
|
||||
}
|
||||
|
||||
/// Build an introspector for an HTML document.
|
||||
fn build_html(mut self, root: &HtmlElement) -> Introspector {
|
||||
fn build_html(mut self, output: &[HtmlNode]) -> Introspector {
|
||||
let mut elems = Vec::new();
|
||||
self.discover_in_html(&mut elems, root);
|
||||
self.discover_in_html(&mut elems, output);
|
||||
self.finalize(elems)
|
||||
}
|
||||
|
||||
@ -434,16 +434,16 @@ impl IntrospectorBuilder {
|
||||
}
|
||||
|
||||
/// Processes the tags in the HTML element.
|
||||
fn discover_in_html(&mut self, sink: &mut Vec<Pair>, elem: &HtmlElement) {
|
||||
for child in &elem.children {
|
||||
match child {
|
||||
fn discover_in_html(&mut self, sink: &mut Vec<Pair>, nodes: &[HtmlNode]) {
|
||||
for node in nodes {
|
||||
match node {
|
||||
HtmlNode::Tag(tag) => self.discover_in_tag(
|
||||
sink,
|
||||
tag,
|
||||
Position { page: NonZeroUsize::ONE, point: Point::zero() },
|
||||
),
|
||||
HtmlNode::Text(_, _) => {}
|
||||
HtmlNode::Element(elem) => self.discover_in_html(sink, elem),
|
||||
HtmlNode::Element(elem) => self.discover_in_html(sink, &elem.children),
|
||||
HtmlNode::Frame(frame) => self.discover_in_frame(
|
||||
sink,
|
||||
frame,
|
||||
|
@ -25,24 +25,11 @@ pub use self::query_::*;
|
||||
pub use self::state::*;
|
||||
pub use self::tag::*;
|
||||
|
||||
use crate::foundations::{category, Category, Scope};
|
||||
|
||||
/// Interactions between document parts.
|
||||
///
|
||||
/// This category is home to Typst's introspection capabilities: With the
|
||||
/// `counter` function, you can access and manipulate page, section, figure, and
|
||||
/// equation counters or create custom ones. Meanwhile, the `query` function
|
||||
/// lets you search for elements in the document to construct things like a list
|
||||
/// of figures or headers which show the current chapter title.
|
||||
///
|
||||
/// Most of the functions are _contextual._ It is recommended to read the chapter
|
||||
/// on [context] before continuing here.
|
||||
#[category]
|
||||
pub static INTROSPECTION: Category;
|
||||
use crate::foundations::Scope;
|
||||
|
||||
/// Hook up all `introspection` definitions.
|
||||
pub fn define(global: &mut Scope) {
|
||||
global.category(INTROSPECTION);
|
||||
global.start_category(crate::Category::Introspection);
|
||||
global.define_type::<Location>();
|
||||
global.define_type::<Counter>();
|
||||
global.define_type::<State>();
|
||||
@ -50,4 +37,5 @@ pub fn define(global: &mut Scope) {
|
||||
global.define_func::<here>();
|
||||
global.define_func::<query>();
|
||||
global.define_func::<locate>();
|
||||
global.reset_category();
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ impl Show for Packed<AlignElem> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Where to [align] something along an axis.
|
||||
/// Where to align something along an axis.
|
||||
///
|
||||
/// Possible values are:
|
||||
/// - `start`: Aligns at the [start]($direction.start) of the [text
|
||||
|
@ -50,6 +50,42 @@ impl Dir {
|
||||
pub const TTB: Self = Self::TTB;
|
||||
pub const BTT: Self = Self::BTT;
|
||||
|
||||
/// Returns a direction from a starting point.
|
||||
///
|
||||
/// ```example
|
||||
/// direction.from(left) \
|
||||
/// direction.from(right) \
|
||||
/// direction.from(top) \
|
||||
/// direction.from(bottom)
|
||||
/// ```
|
||||
#[func]
|
||||
pub const fn from(side: Side) -> Dir {
|
||||
match side {
|
||||
Side::Left => Self::LTR,
|
||||
Side::Right => Self::RTL,
|
||||
Side::Top => Self::TTB,
|
||||
Side::Bottom => Self::BTT,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a direction from an end point.
|
||||
///
|
||||
/// ```example
|
||||
/// direction.to(left) \
|
||||
/// direction.to(right) \
|
||||
/// direction.to(top) \
|
||||
/// direction.to(bottom)
|
||||
/// ```
|
||||
#[func]
|
||||
pub const fn to(side: Side) -> Dir {
|
||||
match side {
|
||||
Side::Right => Self::LTR,
|
||||
Side::Left => Self::RTL,
|
||||
Side::Bottom => Self::TTB,
|
||||
Side::Top => Self::BTT,
|
||||
}
|
||||
}
|
||||
|
||||
/// The axis this direction belongs to, either `{"horizontal"}` or
|
||||
/// `{"vertical"}`.
|
||||
///
|
||||
@ -65,6 +101,22 @@ impl Dir {
|
||||
}
|
||||
}
|
||||
|
||||
/// The corresponding sign, for use in calculations.
|
||||
///
|
||||
/// ```example
|
||||
/// #ltr.sign() \
|
||||
/// #rtl.sign() \
|
||||
/// #ttb.sign() \
|
||||
/// #btt.sign()
|
||||
/// ```
|
||||
#[func]
|
||||
pub const fn sign(self) -> i64 {
|
||||
match self {
|
||||
Self::LTR | Self::TTB => 1,
|
||||
Self::RTL | Self::BTT => -1,
|
||||
}
|
||||
}
|
||||
|
||||
/// The start point of this direction, as an alignment.
|
||||
///
|
||||
/// ```example
|
||||
|
@ -1,6 +1,6 @@
|
||||
pub mod resolve;
|
||||
|
||||
use std::num::NonZeroUsize;
|
||||
use std::num::{NonZeroU32, NonZeroUsize};
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Track;
|
||||
@ -468,6 +468,17 @@ pub struct GridHeader {
|
||||
#[default(true)]
|
||||
pub repeat: bool,
|
||||
|
||||
/// The level of the header. Must not be zero.
|
||||
///
|
||||
/// This allows repeating multiple headers at once. Headers with different
|
||||
/// levels can repeat together, as long as they have ascending levels.
|
||||
///
|
||||
/// Notably, when a header with a lower level starts repeating, all higher
|
||||
/// or equal level headers stop repeating (they are "replaced" by the new
|
||||
/// header).
|
||||
#[default(NonZeroU32::ONE)]
|
||||
pub level: NonZeroU32,
|
||||
|
||||
/// The cells and lines within the header.
|
||||
#[variadic]
|
||||
pub children: Vec<GridItem>,
|
||||
@ -755,7 +766,14 @@ impl Show for Packed<GridCell> {
|
||||
|
||||
impl Default for Packed<GridCell> {
|
||||
fn default() -> Self {
|
||||
Packed::new(GridCell::new(Content::default()))
|
||||
Packed::new(
|
||||
// Explicitly set colspan and rowspan to ensure they won't be
|
||||
// overridden by set rules (default cells are created after
|
||||
// colspans and rowspans are processed in the resolver)
|
||||
GridCell::new(Content::default())
|
||||
.with_colspan(NonZeroUsize::ONE)
|
||||
.with_rowspan(NonZeroUsize::ONE),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -22,7 +22,8 @@ use crate::layout::{BlockElem, Size};
|
||||
/// #let text = lorem(30)
|
||||
/// #layout(size => [
|
||||
/// #let (height,) = measure(
|
||||
/// block(width: size.width, text),
|
||||
/// width: size.width,
|
||||
/// text,
|
||||
/// )
|
||||
/// This text is #height high with
|
||||
/// the current page width: \
|
||||
@ -40,8 +41,23 @@ use crate::layout::{BlockElem, Size};
|
||||
/// receives the page's dimensions minus its margins. This is mostly useful in
|
||||
/// combination with [measurement]($measure).
|
||||
///
|
||||
/// You can also use this function to resolve [`ratio`] to fixed lengths. This
|
||||
/// might come in handy if you're building your own layout abstractions.
|
||||
/// To retrieve the _remaining_ height of the page rather than its full size,
|
||||
/// you can wrap your `layout` call in a `{block(height: 1fr)}`. This works
|
||||
/// because the block automatically grows to fill the remaining space (see the
|
||||
/// [fraction] documentation for more details).
|
||||
///
|
||||
/// ```example
|
||||
/// #set page(height: 150pt)
|
||||
///
|
||||
/// #lorem(20)
|
||||
///
|
||||
/// #block(height: 1fr, layout(size => [
|
||||
/// Remaining height: #size.height
|
||||
/// ]))
|
||||
/// ```
|
||||
///
|
||||
/// You can also use this function to resolve a [`ratio`] to a fixed length.
|
||||
/// This might come in handy if you're building your own layout abstractions.
|
||||
///
|
||||
/// ```example
|
||||
/// #layout(size => {
|
||||
|
@ -64,17 +64,11 @@ pub use self::spacing::*;
|
||||
pub use self::stack::*;
|
||||
pub use self::transform::*;
|
||||
|
||||
use crate::foundations::{category, Category, Scope};
|
||||
|
||||
/// Arranging elements on the page in different ways.
|
||||
///
|
||||
/// By combining layout functions, you can create complex and automatic layouts.
|
||||
#[category]
|
||||
pub static LAYOUT: Category;
|
||||
use crate::foundations::Scope;
|
||||
|
||||
/// Hook up all `layout` definitions.
|
||||
pub fn define(global: &mut Scope) {
|
||||
global.category(LAYOUT);
|
||||
global.start_category(crate::Category::Layout);
|
||||
global.define_type::<Length>();
|
||||
global.define_type::<Angle>();
|
||||
global.define_type::<Ratio>();
|
||||
@ -103,4 +97,5 @@ pub fn define(global: &mut Scope) {
|
||||
global.define_elem::<HideElem>();
|
||||
global.define_func::<measure>();
|
||||
global.define_func::<layout>();
|
||||
global.reset_category();
|
||||
}
|
||||
|
@ -1,16 +1,14 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::str::FromStr;
|
||||
|
||||
use comemo::Track;
|
||||
use typst_utils::{singleton, NonZeroExt, Scalar};
|
||||
|
||||
use crate::diag::{bail, SourceResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{
|
||||
cast, elem, Args, AutoValue, Cast, Construct, Content, Context, Dict, Fold, Func,
|
||||
NativeElement, Set, Smart, StyleChain, Value,
|
||||
cast, elem, Args, AutoValue, Cast, Construct, Content, Dict, Fold, NativeElement,
|
||||
Set, Smart, Value,
|
||||
};
|
||||
use crate::introspection::Introspector;
|
||||
use crate::layout::{
|
||||
@ -75,9 +73,10 @@ pub struct PageElem {
|
||||
/// The height of the page.
|
||||
///
|
||||
/// If this is set to `{auto}`, page breaks can only be triggered manually
|
||||
/// by inserting a [page break]($pagebreak). Most examples throughout this
|
||||
/// documentation use `{auto}` for the height of the page to dynamically
|
||||
/// grow and shrink to fit their content.
|
||||
/// by inserting a [page break]($pagebreak) or by adding another non-empty
|
||||
/// page set rule. Most examples throughout this documentation use `{auto}`
|
||||
/// for the height of the page to dynamically grow and shrink to fit their
|
||||
/// content.
|
||||
#[resolve]
|
||||
#[parse(
|
||||
args.named("height")?
|
||||
@ -270,7 +269,7 @@ pub struct PageElem {
|
||||
/// margin: (top: 32pt, bottom: 20pt),
|
||||
/// header: [
|
||||
/// #set text(8pt)
|
||||
/// #smallcaps[Typst Academcy]
|
||||
/// #smallcaps[Typst Academy]
|
||||
/// #h(1fr) _Exercise Sheet 3_
|
||||
/// ],
|
||||
/// )
|
||||
@ -483,7 +482,7 @@ pub struct Page {
|
||||
pub supplement: Content,
|
||||
/// The logical page number (controlled by `counter(page)` and may thus not
|
||||
/// match the physical number).
|
||||
pub number: usize,
|
||||
pub number: u64,
|
||||
}
|
||||
|
||||
impl Page {
|
||||
@ -648,43 +647,6 @@ cast! {
|
||||
},
|
||||
}
|
||||
|
||||
/// A header, footer, foreground or background definition.
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
pub enum Marginal {
|
||||
/// Bare content.
|
||||
Content(Content),
|
||||
/// A closure mapping from a page number to content.
|
||||
Func(Func),
|
||||
}
|
||||
|
||||
impl Marginal {
|
||||
/// Resolve the marginal based on the page number.
|
||||
pub fn resolve(
|
||||
&self,
|
||||
engine: &mut Engine,
|
||||
styles: StyleChain,
|
||||
page: usize,
|
||||
) -> SourceResult<Cow<'_, Content>> {
|
||||
Ok(match self {
|
||||
Self::Content(content) => Cow::Borrowed(content),
|
||||
Self::Func(func) => Cow::Owned(
|
||||
func.call(engine, Context::new(None, Some(styles)).track(), [page])?
|
||||
.display(),
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
cast! {
|
||||
Marginal,
|
||||
self => match self {
|
||||
Self::Content(v) => v.into_value(),
|
||||
Self::Func(v) => v.into_value(),
|
||||
},
|
||||
v: Content => Self::Content(v),
|
||||
v: Func => Self::Func(v),
|
||||
}
|
||||
|
||||
/// A list of page ranges to be exported.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PageRanges(Vec<PageRange>);
|
||||
|
@ -8,15 +8,35 @@ use crate::foundations::{repr, ty, Repr};
|
||||
|
||||
/// A ratio of a whole.
|
||||
///
|
||||
/// Written as a number, followed by a percent sign.
|
||||
/// A ratio is written as a number, followed by a percent sign. Ratios most
|
||||
/// often appear as part of a [relative length]($relative), to specify the size
|
||||
/// of some layout element relative to the page or some container.
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #set align(center)
|
||||
/// #scale(x: 150%)[
|
||||
/// Scaled apart.
|
||||
/// ]
|
||||
/// #rect(width: 25%)
|
||||
/// ```
|
||||
///
|
||||
/// However, they can also describe any other property that is relative to some
|
||||
/// base, e.g. an amount of [horizontal scaling]($scale.x) or the
|
||||
/// [height of parentheses]($math.lr.size) relative to the height of the content
|
||||
/// they enclose.
|
||||
///
|
||||
/// # Scripting
|
||||
/// Within your own code, you can use ratios as you like. You can multiply them
|
||||
/// with various other types as shown below:
|
||||
///
|
||||
/// | Multiply by | Example | Result |
|
||||
/// |-----------------|-------------------------|-----------------|
|
||||
/// | [`ratio`] | `{27% * 10%}` | `{2.7%}` |
|
||||
/// | [`length`] | `{27% * 100pt}` | `{27pt}` |
|
||||
/// | [`relative`] | `{27% * (10% + 100pt)}` | `{2.7% + 27pt}` |
|
||||
/// | [`angle`] | `{27% * 100deg}` | `{27deg}` |
|
||||
/// | [`int`] | `{27% * 2}` | `{54%}` |
|
||||
/// | [`float`] | `{27% * 0.37037}` | `{10%}` |
|
||||
/// | [`fraction`] | `{27% * 3fr}` | `{0.81fr}` |
|
||||
///
|
||||
/// When ratios are displayed in the document, they are rounded to two
|
||||
/// significant digits for readability.
|
||||
#[ty(cast)]
|
||||
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct Ratio(Scalar);
|
||||
|
@ -14,17 +14,58 @@ use crate::layout::{Abs, Em, Length, Ratio};
|
||||
/// addition and subtraction of a length and a ratio. Wherever a relative length
|
||||
/// is expected, you can also use a bare length or ratio.
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #rect(width: 100% - 50pt)
|
||||
/// # Relative to the page
|
||||
/// A common use case is setting the width or height of a layout element (e.g.,
|
||||
/// [block], [rect], etc.) as a certain percentage of the width of the page.
|
||||
/// Here, the rectangle's width is set to `{25%}`, so it takes up one fourth of
|
||||
/// the page's _inner_ width (the width minus margins).
|
||||
///
|
||||
/// #(100% - 50pt).length \
|
||||
/// #(100% - 50pt).ratio
|
||||
/// ```example
|
||||
/// #rect(width: 25%)
|
||||
/// ```
|
||||
///
|
||||
/// Bare lengths or ratios are always valid where relative lengths are expected,
|
||||
/// but the two can also be freely mixed:
|
||||
/// ```example
|
||||
/// #rect(width: 25% + 1cm)
|
||||
/// ```
|
||||
///
|
||||
/// If you're trying to size an element so that it takes up the page's _full_
|
||||
/// width, you have a few options (this highly depends on your exact use case):
|
||||
///
|
||||
/// 1. Set page margins to `{0pt}` (`[#set page(margin: 0pt)]`)
|
||||
/// 2. Multiply the ratio by the known full page width (`{21cm * 69%}`)
|
||||
/// 3. Use padding which will negate the margins (`[#pad(x: -2.5cm, ...)]`)
|
||||
/// 4. Use the page [background](page.background) or
|
||||
/// [foreground](page.foreground) field as those don't take margins into
|
||||
/// account (note that it will render the content outside of the document
|
||||
/// flow, see [place] to control the content position)
|
||||
///
|
||||
/// # Relative to a container
|
||||
/// When a layout element (e.g. a [rect]) is nested in another layout container
|
||||
/// (e.g. a [block]) instead of being a direct descendant of the page, relative
|
||||
/// widths become relative to the container:
|
||||
///
|
||||
/// ```example
|
||||
/// #block(
|
||||
/// width: 100pt,
|
||||
/// fill: aqua,
|
||||
/// rect(width: 50%),
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
/// # Scripting
|
||||
/// You can multiply relative lengths by [ratios]($ratio), [integers]($int), and
|
||||
/// [floats]($float).
|
||||
///
|
||||
/// A relative length has the following fields:
|
||||
/// - `length`: Its length component.
|
||||
/// - `ratio`: Its ratio component.
|
||||
///
|
||||
/// ```example
|
||||
/// #(100% - 50pt).length \
|
||||
/// #(100% - 50pt).ratio
|
||||
/// ```
|
||||
#[ty(cast, name = "relative", title = "Relative Length")]
|
||||
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Rel<T: Numeric = Length> {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user