Compare commits

..

39 Commits

Author SHA1 Message Date
Laurenz
8ace67d942 Version bump 2025-03-07 11:13:08 +01:00
Laurenz
81e9bc7c8f 0.13.1 changelog (#6025) 2025-03-07 11:10:26 +01:00
Laurenz
381ff0cc2c Mark breaking symbol changes as breaking in 0.13.0 changelog (#6024) 2025-03-07 10:22:12 +01:00
Malo
393be881f8 Mention that sym.ohm was removed in the 0.13.0 changelog (#6017)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-07 10:22:12 +01:00
Laurenz
74826fc6ec Replace par function call in tutorial (#6023) 2025-03-07 09:53:39 +01:00
Laurenz
fe94b2b54f Hotfix for labels on symbols (#6015) 2025-03-07 09:53:39 +01:00
Andrew Voynov
e0074dfc01 Make array.chunks example more readable (#5975) 2025-03-06 16:30:59 +01:00
F2011
d97967dd40 Correct typo (#5971) 2025-03-06 16:30:59 +01:00
Tijme
9c41234574 Fix docs example with type/string comparison (#5987) 2025-03-06 16:30:59 +01:00
Emmanuel Lesueur
59569cbf61 Fix curve with multiple non-closed components. (#5963) 2025-02-26 21:21:15 +01:00
Laurenz
d04f014fc6 Fix paper name in page setup guide (#5956) 2025-02-26 21:21:15 +01:00
Laurenz
4a78a7d082
Fix false positive for type/str comparison warning (#5957) 2025-02-25 17:00:21 +01:00
aodenis
a754be513d Fix high CPU usage due to inotify watch triggering itself (#5905)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-02-25 15:28:14 +01:00
Laurenz
7d4010afad Fix introspection of HTML root sibling metadata (#5953) 2025-02-25 15:28:14 +01:00
Sharzy
4893eb501e HTML export: fix elem counting on classify_output (#5910)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-02-25 15:28:14 +01:00
Laurenz
20d4f8135a Fix comparison of Func and NativeFuncData (#5943) 2025-02-25 15:28:14 +01:00
PgBiel
a998775edc Fix HTML export of table with gutter (#5920) 2025-02-25 15:28:14 +01:00
Laurenz
8dce676dcd Version bump 2025-02-19 11:13:25 +01:00
Laurenz
c02cb70f27 Update changelog (#5894) 2025-02-19 11:07:46 +01:00
Matthew Toohey
0a534f2c0e --make-deps fixes (#5873) 2025-02-18 19:19:16 +01:00
ᡥᠠᡳᡤᡳᠶᠠ ᡥᠠᠯᠠ·ᠨᡝᡴᠣ 猫
de16a2ced1 HTML export: Use <code> for inline RawElem (#5884) 2025-02-18 11:25:46 +01:00
Laurenz
d48708c5d5 More robust SVG auto-detection (#5878) 2025-02-17 12:49:15 +01:00
Laurenz
e294fe85a5
Bring back type/str compatibility for 0.13, with warnings and hints (#5877) 2025-02-17 11:52:11 +01:00
Laurenz
2f1a5ab914 Remove Linux Libertine warning (#5876) 2025-02-16 15:13:42 +01:00
Ana Gelez
c247dbc42d Lazy parsing of the package index (#5851) 2025-02-12 17:12:01 +01:00
+merlan #flirora
c259545c6e Gradient::repeat: Fix floating-point error in stop calculation (#5837) 2025-02-12 13:53:05 +01:00
+merlan #flirora
e470ccff19 Update documentation for float.{to-bits, from-bits} (#5836) 2025-02-12 13:53:05 +01:00
Laurenz
024bbb2b46 Fix autocomplete and jumps in math (#5849) 2025-02-11 12:09:33 +01:00
Laurenz
93fe02b457 Bump typst-assets 2025-02-10 16:36:30 +01:00
Laurenz
9c3ecf43a0 Respect par constructor arguments (#5842) 2025-02-10 16:28:49 +01:00
TwoF1nger
ab5e356d81 Add smart quotes for Bulgarian (#5807) 2025-02-10 16:28:49 +01:00
Malo
88f88016e0 Add warning for pdf.embed elem used with HTML (#5829) 2025-02-10 16:28:49 +01:00
PgBiel
72060d0142 Don't crash on image with zero DPI (#5835) 2025-02-10 16:28:48 +01:00
Laurenz
20dd19c64e Fix unnecessary import rename warning (#5828) 2025-02-06 22:16:07 +01:00
Laurenz
f64d029fe6 Document removals in changelog (#5827) 2025-02-06 22:16:07 +01:00
Laurenz
c417b17442 Fix docs outline for nested definitions (#5823) 2025-02-06 11:24:19 +01:00
Malo
c2316b9a3e Documentation fixes and improvements (#5816) 2025-02-06 11:24:19 +01:00
Laurenz
d8b79b5b9b Autocomplete content methods (#5822) 2025-02-06 11:24:19 +01:00
Laurenz
56d8188c61 Release Candidate 1 2025-02-05 15:49:19 +01:00
285 changed files with 6853 additions and 5980 deletions

View File

@ -5,7 +5,6 @@ env:
RUSTFLAGS: "-Dwarnings"
RUSTDOCFLAGS: "-Dwarnings"
TYPST_TESTS_EXTENDED: true
PKG_CONFIG_i686-unknown-linux-gnu: /usr/bin/i686-linux-gnu-pkgconf
jobs:
# This allows us to have one branch protection rule for the full test matrix.
@ -28,43 +27,30 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
bits: [64]
include:
- os: ubuntu-latest
bits: 32
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- if: startsWith(matrix.os, 'ubuntu-') && matrix.bits == 32
run: |
sudo dpkg --add-architecture i386
sudo apt update
sudo apt install -y gcc-multilib libssl-dev:i386 pkg-config:i386
- uses: dtolnay/rust-toolchain@1.85.0
with:
targets: ${{ matrix.bits == 32 && 'i686-unknown-linux-gnu' || '' }}
- uses: dtolnay/rust-toolchain@1.83.0
- uses: Swatinem/rust-cache@v2
with:
key: ${{ matrix.bits }}
- run: cargo test --workspace --no-run ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
- run: cargo test --workspace --no-fail-fast ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
- run: cargo test --workspace --no-run
- run: cargo test --workspace --no-fail-fast
- name: Upload rendered test output
if: failure()
uses: actions/upload-artifact@v4
with:
name: tests-rendered-${{ matrix.os }}-${{ matrix.bits }}
name: tests-rendered-${{ matrix.os }}
path: tests/store/render/**
retention-days: 3
- name: Update test artifacts
if: failure()
run: |
cargo test --workspace --test tests ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }} -- --update
cargo test --workspace --test tests -- --update
echo 'updated_artifacts=1' >> "$GITHUB_ENV"
- name: Upload updated reference output (for use if the test changes are desired)
if: failure() && env.updated_artifacts
uses: actions/upload-artifact@v4
with:
name: tests-updated-${{ matrix.os }}-${{ matrix.bits }}
name: tests-updated-${{ matrix.os }}
path: tests/ref/**
retention-days: 3
@ -73,7 +59,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.85.0
- uses: dtolnay/rust-toolchain@1.83.0
with:
components: clippy, rustfmt
- uses: Swatinem/rust-cache@v2
@ -87,7 +73,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.83.0
- uses: dtolnay/rust-toolchain@1.80.0
- uses: Swatinem/rust-cache@v2
- run: cargo check --workspace

View File

@ -44,7 +44,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.85.0
- uses: dtolnay/rust-toolchain@1.83.0
with:
target: ${{ matrix.target }}

278
Cargo.lock generated
View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
version = 3
[[package]]
name = "adler2"
@ -217,20 +217,6 @@ name = "bytemuck"
version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "byteorder"
@ -749,12 +735,11 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.1.0"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
dependencies = [
"crc32fast",
"libz-rs-sys",
"miniz_oxide",
]
@ -764,15 +749,6 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -785,15 +761,6 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]]
name = "font-types"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa6a5e5a77b5f3f7f9e32879f484aa5b3632ddfbe568a16266c904a6f32cdaf"
dependencies = [
"bytemuck",
]
[[package]]
name = "fontconfig-parser"
version = "0.5.7"
@ -805,9 +772,9 @@ dependencies = [
[[package]]
name = "fontdb"
version = "0.23.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "457e789b3d1202543297a350643cf459f836cade38934e7a4cf6a39e7cde2905"
checksum = "37be9fc20d966be438cd57a45767f73349477fb0f85ce86e000557f787298afb"
dependencies = [
"fontconfig-parser",
"log",
@ -862,15 +829,6 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "getopts"
version = "0.2.21"
@ -913,12 +871,6 @@ dependencies = [
"weezl",
]
[[package]]
name = "glidesort"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e102e6eb644d3e0b186fc161e4460417880a0a0b87d235f2e5b8fb30f2e9e0"
[[package]]
name = "half"
version = "2.4.1"
@ -1014,7 +966,7 @@ checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
dependencies = [
"displaydoc",
"serde",
"yoke 0.7.5",
"yoke",
"zerofrom",
"zerovec",
]
@ -1112,7 +1064,7 @@ dependencies = [
"stable_deref_trait",
"tinystr",
"writeable",
"yoke 0.7.5",
"yoke",
"zerofrom",
"zerovec",
]
@ -1223,9 +1175,9 @@ dependencies = [
[[package]]
name = "image-webp"
version = "0.2.1"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b77d01e822461baa8409e156015a1d91735549f0f2c17691bd2d996bef238f7f"
checksum = "f79afb8cbee2ef20f59ccd477a218c12a93943d075b492015ecb1bb81f8ee904"
dependencies = [
"byteorder-lite",
"quick-error",
@ -1259,12 +1211,6 @@ dependencies = [
"serde",
]
[[package]]
name = "infer"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7"
[[package]]
name = "inotify"
version = "0.11.0"
@ -1364,50 +1310,6 @@ dependencies = [
"libc",
]
[[package]]
name = "krilla"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ee6128ebf52d7ce684613b6431ead2959f2be9ff8cf776eeaaad0427c953e9"
dependencies = [
"base64",
"bumpalo",
"comemo",
"flate2",
"float-cmp 0.10.0",
"fxhash",
"gif",
"image-webp",
"imagesize",
"once_cell",
"pdf-writer",
"png",
"rayon",
"rustybuzz",
"siphasher",
"skrifa",
"subsetter",
"tiny-skia-path",
"xmp-writer",
"yoke 0.8.0",
"zune-jpeg",
]
[[package]]
name = "krilla-svg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3462989578155cf620ef8035f8921533cc95c28e2a0c75de172f7219e6aba84e"
dependencies = [
"flate2",
"fontdb",
"krilla",
"png",
"resvg",
"tiny-skia",
"usvg",
]
[[package]]
name = "kurbo"
version = "0.11.1"
@ -1469,15 +1371,6 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "libz-rs-sys"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d"
dependencies = [
"zlib-rs",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@ -1565,9 +1458,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.5"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924"
dependencies = [
"adler2",
"simd-adler32",
@ -1708,9 +1601,9 @@ dependencies = [
[[package]]
name = "openssl"
version = "0.10.72"
version = "0.10.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
dependencies = [
"bitflags 2.8.0",
"cfg-if",
@ -1749,9 +1642,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
version = "0.9.107"
version = "0.9.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
dependencies = [
"cc",
"libc",
@ -1845,9 +1738,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "pdf-writer"
version = "0.13.0"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ea27c5015ab81753fc61e49f8cde74999346605ee148bb20008ef3d3150e0dc"
checksum = "5df03c7d216de06f93f398ef06f1385a60f2c597bb96f8195c8d98e08a26b1d5"
dependencies = [
"bitflags 2.8.0",
"itoa",
@ -1911,9 +1804,9 @@ checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
[[package]]
name = "pixglyph"
version = "0.6.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c1106193bc18a4b840eb075ff6664c8a0b0270f0531bb12a7e9c803e53b55c5"
checksum = "d15afa937836bf3d876f5a04ce28810c06045857bf46c3d0d31073b8aada5494"
dependencies = [
"ttf-parser",
]
@ -2104,16 +1997,6 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "read-fonts"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "600e807b48ac55bad68a8cb75cc3c7739f139b9248f7e003e01e080f589b5288"
dependencies = [
"bytemuck",
"font-types",
]
[[package]]
name = "redox_syscall"
version = "0.5.8"
@ -2165,9 +2048,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "resvg"
version = "0.45.0"
version = "0.43.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd43d1c474e9dadf09a8fdf22d713ba668b499b5117b9b9079500224e26b5b29"
checksum = "c7314563c59c7ce31c18e23ad3dd092c37b928a0fa4e1c0a1a6504351ab411d1"
dependencies = [
"gif",
"image-webp",
@ -2238,9 +2121,9 @@ checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
[[package]]
name = "rustybuzz"
version = "0.20.1"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd3c7c96f8a08ee34eff8857b11b49b07d71d1c3f4e88f8a88d4c9e9f90b1702"
checksum = "c85d1ccd519e61834798eb52c4e886e8c2d7d698dd3d6ce0b1b47eb8557f1181"
dependencies = [
"bitflags 2.8.0",
"bytemuck",
@ -2432,16 +2315,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "skrifa"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fa1e5622e4f7b98877e8a19890efddcac1230cec6198bd9de91ec0e00010dc8"
dependencies = [
"bytemuck",
"read-fonts",
]
[[package]]
name = "slotmap"
version = "1.0.7"
@ -2488,7 +2361,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731"
dependencies = [
"float-cmp 0.9.0",
"float-cmp",
]
[[package]]
@ -2531,11 +2404,28 @@ dependencies = [
[[package]]
name = "subsetter"
version = "0.2.1"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35539e8de3dcce8dd0c01f3575f85db1e5ac1aea1b996d2d09d89f148bc91497"
checksum = "74f98178f34057d4d4de93d68104007c6dea4dfac930204a69ab4622daefa648"
[[package]]
name = "svg2pdf"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5014c9dadcf318fb7ef8c16438e95abcc9de1ae24d60d5bccc64c55100c50364"
dependencies = [
"fxhash",
"fontdb",
"image",
"log",
"miniz_oxide",
"once_cell",
"pdf-writer",
"resvg",
"siphasher",
"subsetter",
"tiny-skia",
"ttf-parser",
"usvg",
]
[[package]]
@ -2819,9 +2709,9 @@ dependencies = [
[[package]]
name = "ttf-parser"
version = "0.25.1"
version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31"
checksum = "5be21190ff5d38e8b4a2d3b6a3ae57f612cc39c96e83cedeaf7abc338a8bac4a"
dependencies = [
"core_maths",
]
@ -2863,7 +2753,8 @@ dependencies = [
[[package]]
name = "typst-assets"
version = "0.13.1"
source = "git+https://github.com/typst/typst-assets?rev=ab1295f#ab1295ff896444e51902e03c2669955e1d73604a"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5bf0cc3c2265502b51fcb73147cc7c951ceb694507195b93c2ab0b901abb902"
[[package]]
name = "typst-cli"
@ -2913,7 +2804,7 @@ dependencies = [
[[package]]
name = "typst-dev-assets"
version = "0.13.1"
source = "git+https://github.com/typst/typst-dev-assets?rev=fddbf8b#fddbf8b99506bc370ac0edcd4959add603a7fc92"
source = "git+https://github.com/typst/typst-dev-assets?tag=v0.13.1#9879589f4b3247b12c5e694d0d7fa86d4d8a198e"
[[package]]
name = "typst-docs"
@ -3006,7 +2897,6 @@ dependencies = [
"dirs",
"ecow",
"env_proxy",
"fastrand",
"flate2",
"fontdb",
"native-tls",
@ -3068,7 +2958,6 @@ dependencies = [
"ecow",
"flate2",
"fontdb",
"glidesort",
"hayagriva",
"icu_properties",
"icu_provider",
@ -3107,7 +2996,6 @@ dependencies = [
"typst-timing",
"typst-utils",
"unicode-math-class",
"unicode-normalization",
"unicode-segmentation",
"unscanny",
"usvg",
@ -3129,20 +3017,26 @@ dependencies = [
name = "typst-pdf"
version = "0.13.1"
dependencies = [
"arrayvec",
"base64",
"bytemuck",
"comemo",
"ecow",
"image",
"infer",
"krilla",
"krilla-svg",
"indexmap 2.7.1",
"miniz_oxide",
"pdf-writer",
"serde",
"subsetter",
"svg2pdf",
"ttf-parser",
"typst-assets",
"typst-library",
"typst-macros",
"typst-syntax",
"typst-timing",
"typst-utils",
"xmp-writer",
]
[[package]]
@ -3291,15 +3185,15 @@ checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
[[package]]
name = "unicode-bidi-mirroring"
version = "0.4.0"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfa6e8c60bb66d49db113e0125ee8711b7647b5579dc7f5f19c42357ed039fe"
checksum = "64af057ad7466495ca113126be61838d8af947f41d93a949980b2389a118082f"
[[package]]
name = "unicode-ccc"
version = "0.4.0"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce61d488bcdc9bc8b5d1772c404828b17fc481c0a582b5581e95fb233aef503e"
checksum = "260bc6647b3893a9a90668360803a15f96b85a5257b1c3a0c3daf6ae2496de42"
[[package]]
name = "unicode-ident"
@ -3394,9 +3288,9 @@ dependencies = [
[[package]]
name = "usvg"
version = "0.45.0"
version = "0.43.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ac8e0e3e4696253dc06167990b3fe9a2668ab66270adf949a464db4088cb354"
checksum = "6803057b5cbb426e9fb8ce2216f3a9b4ca1dd2c705ba3cbebc13006e437735fd"
dependencies = [
"base64",
"data-url",
@ -3766,9 +3660,9 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]]
name = "xmp-writer"
version = "0.3.2"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce9e2f4a404d9ebffc0a9832cf4f50907220ba3d7fffa9099261a5cab52f2dd7"
checksum = "7eb5954c9ca6dcc869e98d3e42760ed9dab08f3e70212b31d7ab8ae7f3b7a487"
[[package]]
name = "xz2"
@ -3806,19 +3700,7 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.7.5",
"zerofrom",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.8.0",
"yoke-derive",
"zerofrom",
]
@ -3834,18 +3716,6 @@ dependencies = [
"synstructure",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.7.35"
@ -3907,7 +3777,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
dependencies = [
"serde",
"yoke 0.7.5",
"yoke",
"zerofrom",
"zerovec-derive",
]
@ -3925,25 +3795,21 @@ dependencies = [
[[package]]
name = "zip"
version = "2.5.0"
version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88"
checksum = "ae9c1ea7b3a5e1f4b922ff856a129881167511563dc219869afe3787fc0c1a45"
dependencies = [
"arbitrary",
"crc32fast",
"crossbeam-utils",
"displaydoc",
"flate2",
"indexmap 2.7.1",
"memchr",
"thiserror 2.0.11",
"zopfli",
]
[[package]]
name = "zlib-rs"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05"
[[package]]
name = "zopfli"
version = "0.8.1"

View File

@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
version = "0.13.1"
rust-version = "1.83" # also change in ci.yml
rust-version = "1.80" # also change in ci.yml
authors = ["The Typst Project Developers"]
edition = "2021"
homepage = "https://typst.app"
@ -32,8 +32,8 @@ typst-svg = { path = "crates/typst-svg", version = "0.13.1" }
typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "ab1295f" }
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "fddbf8b" }
typst-assets = "0.13.1"
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", tag = "v0.13.1" }
arrayvec = "0.7.4"
az = "1.2"
base64 = "0.22"
@ -55,11 +55,9 @@ ctrlc = "3.4.1"
dirs = "6"
ecow = { version = "0.2", features = ["serde"] }
env_proxy = "0.4"
fastrand = "2.3"
flate2 = "1"
fontdb = { version = "0.23", default-features = false }
fontdb = { version = "0.21", default-features = false }
fs_extra = "1.3"
glidesort = "0.1.2"
hayagriva = "0.8.1"
heck = "0.5"
hypher = "0.1.4"
@ -71,25 +69,24 @@ icu_segmenter = { version = "1.4", features = ["serde"] }
if_chain = "1"
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
indexmap = { version = "2", features = ["serde"] }
infer = { version = "0.19.0", default-features = false }
kamadak-exif = "0.6"
krilla = { version = "0.4.0", default-features = false, features = ["raster-images", "comemo", "rayon"] }
krilla-svg = "0.1.0"
kurbo = "0.11"
libfuzzer-sys = "0.4"
lipsum = "0.9"
memchr = "2"
miniz_oxide = "0.8"
native-tls = "0.2"
notify = "8"
once_cell = "1"
open = "5.0.1"
openssl = "0.10.72"
openssl = "0.10"
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
parking_lot = "0.12.1"
pathdiff = "0.2"
pdf-writer = "0.12.1"
phf = { version = "0.11", features = ["macros"] }
pixglyph = "0.6"
pixglyph = "0.5.1"
png = "0.17"
portable-atomic = "1.6"
proc-macro2 = "1"
@ -99,10 +96,10 @@ quote = "1"
rayon = "1.7.0"
regex = "1"
regex-syntax = "0.8"
resvg = { version = "0.45", default-features = false, features = ["raster-images"] }
resvg = { version = "0.43", default-features = false, features = ["raster-images"] }
roxmltree = "0.20"
rust_decimal = { version = "1.36.0", default-features = false, features = ["maths"] }
rustybuzz = "0.20"
rustybuzz = "0.18"
same-file = "1"
self-replace = "1.3.7"
semver = "1"
@ -114,6 +111,8 @@ sigpipe = "0.1"
siphasher = "1"
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
stacker = "0.1.15"
subsetter = "0.2"
svg2pdf = "0.12"
syn = { version = "2", features = ["full", "extra-traits"] }
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
tar = "0.4"
@ -123,26 +122,26 @@ time = { version = "0.3.20", features = ["formatting", "macros", "parsing"] }
tiny_http = "0.12"
tiny-skia = "0.11"
toml = { version = "0.8", default-features = false, features = ["parse", "display"] }
ttf-parser = "0.25.0"
ttf-parser = "0.24.1"
two-face = { version = "0.4.3", default-features = false, features = ["syntect-fancy"] }
typed-arena = "2"
unicode-bidi = "0.3.18"
unicode-ident = "1.0"
unicode-math-class = "0.1"
unicode-script = "0.5"
unicode-normalization = "0.1.24"
unicode-segmentation = "1"
unscanny = "0.1"
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
usvg = { version = "0.45", default-features = false, features = ["text"] }
usvg = { version = "0.43", default-features = false, features = ["text"] }
walkdir = "2"
wasmi = "0.40.0"
web-sys = "0.3"
xmlparser = "0.13.5"
xmlwriter = "0.1.0"
xmp-writer = "0.3.1"
xz2 = { version = "0.1", features = ["static"] }
yaml-front-matter = "0.1"
zip = { version = "2.5", default-features = false, features = ["deflate"] }
zip = { version = "2", default-features = false, features = ["deflate"] }
[profile.dev.package."*"]
opt-level = 2

View File

@ -113,9 +113,7 @@ Typst's CLI is available from different sources:
- You can install Typst through different package managers. Note that the
versions in the package managers might lag behind the latest release.
- Linux:
- View [Typst on Repology][repology]
- View [Typst's Snap][snap]
- Linux: View [Typst on Repology][repology]
- macOS: `brew install typst`
- Windows: `winget install --id Typst.Typst`
@ -177,22 +175,22 @@ If you prefer an integrated IDE-like experience with autocompletion and instant
preview, you can also check out [Typst's free web app][app].
## Community
The main places where the community gathers are our [Forum][forum] and our
[Discord server][discord]. The Forum is a great place to ask questions, help
others, and share cool things you created with Typst. The Discord server is more
suitable for quicker questions, discussions about contributing, or just to chat.
We'd be happy to see you there!
The main place where the community gathers is our [Discord server][discord].
Feel free to join there to ask questions, help out others, share cool things
you created with Typst, or just to chat.
[Typst Universe][universe] is where the community shares templates and packages.
If you want to share your own creations, you can submit them to our
[package repository][packages].
Aside from that there are a few places where you can find things built by
the community:
- The official [package list](https://typst.app/docs/packages)
- The [Awesome Typst](https://github.com/qjcg/awesome-typst) repository
If you had a bad experience in our community, please [reach out to us][contact].
## Contributing
We love to see contributions from the community. If you experience bugs, feel
free to open an issue. If you would like to implement a new feature or bug fix,
please follow the steps outlined in the [contribution guide][contributing].
We would love to see contributions from the community. If you experience bugs,
feel free to open an issue. If you would like to implement a new feature or bug
fix, please follow the steps outlined in the [contribution guide][contributing].
To build Typst yourself, first ensure that you have the
[latest stable Rust][rust] installed. Then, clone this repository and build the
@ -243,8 +241,6 @@ instant preview. To achieve these goals, we follow three core design principles:
[docs]: https://typst.app/docs/
[app]: https://typst.app/
[discord]: https://discord.gg/2uDybryKPe
[forum]: https://forum.typst.app/
[universe]: https://typst.app/universe/
[tutorial]: https://typst.app/docs/tutorial/
[show]: https://typst.app/docs/reference/styling/#show-rules
[math]: https://typst.app/docs/reference/math/
@ -258,4 +254,3 @@ instant preview. To achieve these goals, we follow three core design principles:
[contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md
[packages]: https://github.com/typst/packages/
[`comemo`]: https://github.com/typst/comemo/
[snap]: https://snapcraft.io/typst

View File

@ -361,7 +361,7 @@ pub struct FontArgs {
/// Ensures system fonts won't be searched, unless explicitly included via
/// `--font-path`.
#[arg(long, env = "TYPST_IGNORE_SYSTEM_FONTS")]
#[arg(long)]
pub ignore_system_fonts: bool,
}
@ -467,45 +467,15 @@ display_possible_values!(Feature);
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.4.
#[value(name = "1.4")]
V_1_4,
/// PDF 1.5.
#[value(name = "1.5")]
V_1_5,
/// PDF 1.5.
#[value(name = "1.6")]
V_1_6,
/// PDF 1.7.
#[value(name = "1.7")]
V_1_7,
/// PDF 2.0.
#[value(name = "2.0")]
V_2_0,
/// PDF/A-1b.
#[value(name = "a-1b")]
A_1b,
/// PDF/A-2b.
#[value(name = "a-2b")]
A_2b,
/// PDF/A-2u.
#[value(name = "a-2u")]
A_2u,
/// PDF/A-3u.
/// PDF/A-3b.
#[value(name = "a-3b")]
A_3b,
/// PDF/A-3u.
#[value(name = "a-3u")]
A_3u,
/// PDF/A-4.
#[value(name = "a-4")]
A_4,
/// PDF/A-4f.
#[value(name = "a-4f")]
A_4f,
/// PDF/A-4e.
#[value(name = "a-4e")]
A_4e,
}
display_possible_values!(PdfStandard);

View File

@ -63,7 +63,8 @@ pub struct CompileConfig {
/// Opens the output file with the default viewer or a specific program after
/// compilation.
pub open: Option<Option<String>>,
/// A list of standards the PDF should conform to.
/// One (or multiple comma-separated) PDF standards that Typst will enforce
/// conformance with.
pub pdf_standards: PdfStandards,
/// A path to write a Makefile rule describing the current compilation.
pub make_deps: Option<PathBuf>,
@ -129,9 +130,18 @@ impl CompileConfig {
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
});
let pdf_standards = PdfStandards::new(
&args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
)?;
let pdf_standards = {
let list = args
.pdf_standard
.iter()
.map(|standard| match standard {
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
})
.collect::<Vec<_>>();
PdfStandards::new(&list)?
};
#[cfg(feature = "http-server")]
let server = match watch {
@ -285,7 +295,6 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
})
}
};
let options = PdfOptions {
ident: Smart::Auto,
timestamp,
@ -341,7 +350,7 @@ fn export_image(
.iter()
.enumerate()
.filter(|(i, _)| {
config.pages.as_ref().is_none_or(|exported_page_ranges| {
config.pages.as_ref().map_or(true, |exported_page_ranges| {
exported_page_ranges.includes_page_index(*i)
})
})
@ -756,23 +765,3 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
})
}
}
impl From<PdfStandard> for typst_pdf::PdfStandard {
fn from(standard: PdfStandard) -> Self {
match standard {
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
}
}
}

View File

@ -2,7 +2,6 @@ use comemo::Track;
use ecow::{eco_format, EcoString};
use serde::Serialize;
use typst::diag::{bail, HintedStrResult, StrResult, Warned};
use typst::engine::Sink;
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
use typst::layout::PagedDocument;
use typst::syntax::Span;
@ -59,8 +58,6 @@ fn retrieve(
let selector = eval_string(
&typst::ROUTINES,
world.track(),
// TODO: propagate warnings
Sink::new().track_mut(),
&command.selector,
Span::detached(),
EvalMode::Code,

View File

@ -55,11 +55,11 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
// Perform initial compilation.
timer.record(&mut world, |world| compile_once(world, &mut config))??;
// Watch all dependencies of the initial compilation.
watcher.update(world.dependencies())?;
// Recompile whenever something relevant happens.
loop {
// Watch all dependencies of the most recent compilation.
watcher.update(world.dependencies())?;
// Wait until anything relevant happens.
watcher.wait()?;
@ -71,6 +71,9 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
// Evict the cache.
comemo::evict(10);
// Adjust the file watching.
watcher.update(world.dependencies())?;
}
}

View File

@ -210,9 +210,7 @@ impl World for SystemWorld {
}
fn font(&self, index: usize) -> Option<Font> {
// comemo's validation may invoke this function with an invalid index. This is
// impossible in typst-cli but possible if a custom tool mutates the fonts.
self.fonts.get(index)?.get()
self.fonts[index].get()
}
fn today(&self, offset: Option<i64>) -> Option<Datetime> {

View File

@ -466,7 +466,7 @@ impl<'a> CapturesVisitor<'a> {
}
// Code and content blocks create a scope.
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
Some(ast::Expr::Code(_) | ast::Expr::Content(_)) => {
self.internal.enter();
for child in node.children() {
self.visit(child);
@ -516,7 +516,7 @@ impl<'a> CapturesVisitor<'a> {
// A let expression contains a binding, but that binding is only
// active after the body is evaluated.
Some(ast::Expr::LetBinding(expr)) => {
Some(ast::Expr::Let(expr)) => {
if let Some(init) = expr.init() {
self.visit(init.to_untyped());
}
@ -529,7 +529,7 @@ impl<'a> CapturesVisitor<'a> {
// A for loop contains one or two bindings in its pattern. These are
// active after the iterable is evaluated but before the body is
// evaluated.
Some(ast::Expr::ForLoop(expr)) => {
Some(ast::Expr::For(expr)) => {
self.visit(expr.iterable().to_untyped());
self.internal.enter();
@ -544,7 +544,7 @@ impl<'a> CapturesVisitor<'a> {
// An import contains items, but these are active only after the
// path is evaluated.
Some(ast::Expr::ModuleImport(expr)) => {
Some(ast::Expr::Import(expr)) => {
self.visit(expr.source().to_untyped());
if let Some(ast::Imports::Items(items)) = expr.imports() {
for item in items.iter() {

View File

@ -30,7 +30,7 @@ fn eval_code<'a>(
while let Some(expr) = exprs.next() {
let span = expr.span();
let value = match expr {
ast::Expr::SetRule(set) => {
ast::Expr::Set(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -39,7 +39,7 @@ fn eval_code<'a>(
let tail = eval_code(vm, exprs)?.display();
Value::Content(tail.styled_with_map(styles))
}
ast::Expr::ShowRule(show) => {
ast::Expr::Show(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;
@ -55,7 +55,7 @@ fn eval_code<'a>(
_ => expr.eval(vm)?,
};
output = ops::join(output, value).at(span)?;
output = ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
if let Some(event) = &vm.flow {
warn_for_discarded_content(&mut vm.engine, event, &output);
@ -94,9 +94,9 @@ impl Eval for ast::Expr<'_> {
Self::Label(v) => v.eval(vm),
Self::Ref(v) => v.eval(vm).map(Value::Content),
Self::Heading(v) => v.eval(vm).map(Value::Content),
Self::ListItem(v) => v.eval(vm).map(Value::Content),
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
Self::TermItem(v) => v.eval(vm).map(Value::Content),
Self::List(v) => v.eval(vm).map(Value::Content),
Self::Enum(v) => v.eval(vm).map(Value::Content),
Self::Term(v) => v.eval(vm).map(Value::Content),
Self::Equation(v) => v.eval(vm).map(Value::Content),
Self::Math(v) => v.eval(vm).map(Value::Content),
Self::MathText(v) => v.eval(vm).map(Value::Content),
@ -116,8 +116,8 @@ impl Eval for ast::Expr<'_> {
Self::Float(v) => v.eval(vm),
Self::Numeric(v) => v.eval(vm),
Self::Str(v) => v.eval(vm),
Self::CodeBlock(v) => v.eval(vm),
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
Self::Code(v) => v.eval(vm),
Self::Content(v) => v.eval(vm).map(Value::Content),
Self::Array(v) => v.eval(vm).map(Value::Array),
Self::Dict(v) => v.eval(vm).map(Value::Dict),
Self::Parenthesized(v) => v.eval(vm),
@ -126,19 +126,19 @@ impl Eval for ast::Expr<'_> {
Self::Closure(v) => v.eval(vm),
Self::Unary(v) => v.eval(vm),
Self::Binary(v) => v.eval(vm),
Self::LetBinding(v) => v.eval(vm),
Self::DestructAssignment(v) => v.eval(vm),
Self::SetRule(_) => bail!(forbidden("set")),
Self::ShowRule(_) => bail!(forbidden("show")),
Self::Let(v) => v.eval(vm),
Self::DestructAssign(v) => v.eval(vm),
Self::Set(_) => bail!(forbidden("set")),
Self::Show(_) => bail!(forbidden("show")),
Self::Contextual(v) => v.eval(vm).map(Value::Content),
Self::Conditional(v) => v.eval(vm),
Self::WhileLoop(v) => v.eval(vm),
Self::ForLoop(v) => v.eval(vm),
Self::ModuleImport(v) => v.eval(vm),
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
Self::LoopBreak(v) => v.eval(vm),
Self::LoopContinue(v) => v.eval(vm),
Self::FuncReturn(v) => v.eval(vm),
Self::While(v) => v.eval(vm),
Self::For(v) => v.eval(vm),
Self::Import(v) => v.eval(vm),
Self::Include(v) => v.eval(vm).map(Value::Content),
Self::Break(v) => v.eval(vm),
Self::Continue(v) => v.eval(vm),
Self::Return(v) => v.eval(vm),
}?
.spanned(span);

View File

@ -83,7 +83,8 @@ impl Eval for ast::WhileLoop<'_> {
}
let value = body.eval(vm)?;
output = ops::join(output, value).at(body.span())?;
let span = body.span();
output = ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
match vm.flow {
Some(FlowEvent::Break(_)) => {
@ -129,7 +130,9 @@ impl Eval for ast::ForLoop<'_> {
let body = self.body();
let value = body.eval(vm)?;
output = ops::join(output, value).at(body.span())?;
let span = body.span();
output =
ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
match vm.flow {
Some(FlowEvent::Break(_)) => {

View File

@ -101,7 +101,6 @@ pub fn eval(
pub fn eval_string(
routines: &Routines,
world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str,
span: Span,
mode: EvalMode,
@ -122,6 +121,7 @@ pub fn eval_string(
}
// Prepare the engine.
let mut sink = Sink::new();
let introspector = Introspector::default();
let traced = Traced::default();
let engine = Engine {
@ -129,7 +129,7 @@ pub fn eval_string(
world,
introspector: introspector.track(),
traced: traced.track(),
sink,
sink: sink.track_mut(),
route: Route::default(),
};

View File

@ -33,7 +33,7 @@ fn eval_markup<'a>(
while let Some(expr) = exprs.next() {
match expr {
ast::Expr::SetRule(set) => {
ast::Expr::Set(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -41,7 +41,7 @@ fn eval_markup<'a>(
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
}
ast::Expr::ShowRule(show) => {
ast::Expr::Show(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;

View File

@ -1,4 +1,4 @@
use typst_library::diag::{At, HintedStrResult, SourceResult};
use typst_library::diag::{At, DeprecationSink, HintedStrResult, SourceResult};
use typst_library::foundations::{ops, IntoValue, Value};
use typst_syntax::ast::{self, AstNode};
@ -23,22 +23,22 @@ impl Eval for ast::Binary<'_> {
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
match self.op() {
ast::BinOp::Add => apply_binary(self, vm, ops::add),
ast::BinOp::Add => apply_binary_with_sink(self, vm, ops::add),
ast::BinOp::Sub => apply_binary(self, vm, ops::sub),
ast::BinOp::Mul => apply_binary(self, vm, ops::mul),
ast::BinOp::Div => apply_binary(self, vm, ops::div),
ast::BinOp::And => apply_binary(self, vm, ops::and),
ast::BinOp::Or => apply_binary(self, vm, ops::or),
ast::BinOp::Eq => apply_binary(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary(self, vm, ops::neq),
ast::BinOp::Eq => apply_binary_with_sink(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary_with_sink(self, vm, ops::neq),
ast::BinOp::Lt => apply_binary(self, vm, ops::lt),
ast::BinOp::Leq => apply_binary(self, vm, ops::leq),
ast::BinOp::Gt => apply_binary(self, vm, ops::gt),
ast::BinOp::Geq => apply_binary(self, vm, ops::geq),
ast::BinOp::In => apply_binary(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary(self, vm, ops::not_in),
ast::BinOp::In => apply_binary_with_sink(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary_with_sink(self, vm, ops::not_in),
ast::BinOp::Assign => apply_assignment(self, vm, |_, b| Ok(b)),
ast::BinOp::AddAssign => apply_assignment(self, vm, ops::add),
ast::BinOp::AddAssign => apply_assignment_with_sink(self, vm, ops::add),
ast::BinOp::SubAssign => apply_assignment(self, vm, ops::sub),
ast::BinOp::MulAssign => apply_assignment(self, vm, ops::mul),
ast::BinOp::DivAssign => apply_assignment(self, vm, ops::div),
@ -65,6 +65,18 @@ fn apply_binary(
op(lhs, rhs).at(binary.span())
}
/// Apply a basic binary operation, with the possiblity of deprecations.
fn apply_binary_with_sink(
binary: ast::Binary,
vm: &mut Vm,
op: impl Fn(Value, Value, &mut dyn DeprecationSink) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let span = binary.span();
let lhs = binary.lhs().eval(vm)?;
let rhs = binary.rhs().eval(vm)?;
op(lhs, rhs, &mut (&mut vm.engine, span)).at(span)
}
/// Apply an assignment operation.
fn apply_assignment(
binary: ast::Binary,
@ -89,3 +101,23 @@ fn apply_assignment(
*location = op(lhs, rhs).at(binary.span())?;
Ok(Value::None)
}
/// Apply an assignment operation, with the possiblity of deprecations.
fn apply_assignment_with_sink(
binary: ast::Binary,
vm: &mut Vm,
op: fn(Value, Value, &mut dyn DeprecationSink) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let rhs = binary.rhs().eval(vm)?;
let location = binary.lhs().access(vm)?;
let lhs = std::mem::take(&mut *location);
let mut sink = vec![];
let span = binary.span();
*location = op(lhs, rhs, &mut (&mut sink, span)).at(span)?;
if !sink.is_empty() {
for warning in sink {
vm.engine.sink.warn(warning);
}
}
Ok(Value::None)
}

View File

@ -45,7 +45,7 @@ impl Eval for ast::ShowRule<'_> {
let transform = self.transform();
let transform = match transform {
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
ast::Expr::Set(set) => Transformation::Style(set.eval(vm)?),
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
};

View File

@ -263,13 +263,13 @@ fn handle(
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
/// supplying a suitable `<head>`.
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
let head = head_element(info);
let body = match classify_output(output)? {
OutputKind::Html(element) => return Ok(element),
OutputKind::Body(body) => body,
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
};
Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
Ok(HtmlElement::new(tag::html)
.with_children(vec![head_element(info).into(), body.into()]))
}
/// Generate a `<head>` element.
@ -302,24 +302,6 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
);
}
if !info.author.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "authors")
.with_attr(attr::content, info.author.join(", "))
.into(),
)
}
if !info.keywords.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "keywords")
.with_attr(attr::content, info.keywords.join(", "))
.into(),
)
}
HtmlElement::new(tag::head).with_children(children)
}

View File

@ -26,7 +26,7 @@ pub fn analyze_expr(
ast::Expr::Str(v) => Value::Str(v.get().into()),
_ => {
if node.kind() == SyntaxKind::Contextual {
if let Some(child) = node.children().next_back() {
if let Some(child) = node.children().last() {
return analyze_expr(world, &child);
}
}

View File

@ -410,17 +410,9 @@ fn field_access_completions(
elem.into_iter().chain(Some(ty))
};
// Autocomplete methods from the element's or type's scope. We only complete
// those which have a `self` parameter.
// Autocomplete methods from the element's or type's scope.
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
let Ok(func) = binding.read().clone().cast::<Func>() else { continue };
if func
.params()
.and_then(|params| params.first())
.is_some_and(|param| param.name == "self")
{
ctx.call_completion(name.clone(), binding.read());
}
ctx.call_completion(name.clone(), binding.read());
}
if let Some(scope) = value.scope() {
@ -517,7 +509,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
// "#import "path.typ": a, b, |".
if_chain! {
if let Some(prev) = ctx.leaf.prev_sibling();
if let Some(ast::Expr::ModuleImport(import)) = prev.get().cast();
if let Some(ast::Expr::Import(import)) = prev.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
then {
@ -536,7 +528,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
if let Some(grand) = parent.parent();
if grand.kind() == SyntaxKind::ImportItems;
if let Some(great) = grand.parent();
if let Some(ast::Expr::ModuleImport(import)) = great.get().cast();
if let Some(ast::Expr::Import(import)) = great.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
then {
@ -677,10 +669,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
if let Some(args) = parent.get().cast::<ast::Args>();
if let Some(grand) = parent.parent();
if let Some(expr) = grand.get().cast::<ast::Expr>();
let set = matches!(expr, ast::Expr::SetRule(_));
let set = matches!(expr, ast::Expr::Set(_));
if let Some(callee) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::SetRule(set) => Some(set.target()),
ast::Expr::Set(set) => Some(set.target()),
_ => None,
};
then {
@ -1463,7 +1455,7 @@ impl<'a> CompletionContext<'a> {
let mut defined = BTreeMap::<EcoString, Option<Value>>::new();
named_items(self.world, self.leaf.clone(), |item| {
let name = item.name();
if !name.is_empty() && item.value().as_ref().is_none_or(filter) {
if !name.is_empty() && item.value().as_ref().map_or(true, filter) {
defined.insert(name.clone(), item.value());
}
@ -1772,7 +1764,6 @@ mod tests {
#[test]
fn test_autocomplete_type_methods() {
test("#\"hello\".", -1).must_include(["len", "contains"]);
test("#table().", -1).must_exclude(["cell"]);
}
#[test]

View File

@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size};
use typst::model::{Destination, Url};
use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind};
use typst::visualize::{Curve, CurveItem, FillRule, Geometry};
use typst::visualize::Geometry;
use typst::WorldExt;
use crate::IdeWorld;
@ -53,20 +53,10 @@ pub fn jump_from_click(
for (mut pos, item) in frame.items().rev() {
match item {
FrameItem::Group(group) => {
let pos = click - pos;
if let Some(clip) = &group.clip {
if !clip.contains(FillRule::NonZero, pos) {
continue;
}
}
// Realistic transforms should always be invertible.
// An example of one that isn't is a scale of 0, which would
// not be clickable anyway.
let Some(inv_transform) = group.transform.invert() else {
continue;
};
let pos = pos.transform_inf(inv_transform);
if let Some(span) = jump_from_click(world, document, &group.frame, pos) {
// TODO: Handle transformation.
if let Some(span) =
jump_from_click(world, document, &group.frame, click - pos)
{
return Some(span);
}
}
@ -104,32 +94,9 @@ pub fn jump_from_click(
}
FrameItem::Shape(shape, span) => {
if shape.fill.is_some() {
let within = match &shape.geometry {
Geometry::Line(..) => false,
Geometry::Rect(size) => is_in_rect(pos, *size, click),
Geometry::Curve(curve) => {
curve.contains(shape.fill_rule, click - pos)
}
};
if within {
return Jump::from_span(world, *span);
}
}
if let Some(stroke) = &shape.stroke {
let within = !stroke.thickness.approx_empty() && {
// This curve is rooted at (0, 0), not `pos`.
let base_curve = match &shape.geometry {
Geometry::Line(to) => &Curve(vec![CurveItem::Line(*to)]),
Geometry::Rect(size) => &Curve::rect(*size),
Geometry::Curve(curve) => curve,
};
base_curve.stroke_contains(stroke, click - pos)
};
if within {
return Jump::from_span(world, *span);
}
let Geometry::Rect(size) = shape.geometry else { continue };
if is_in_rect(pos, size, click) {
return Jump::from_span(world, *span);
}
}
@ -179,8 +146,9 @@ pub fn jump_from_cursor(
fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> {
for (mut pos, item) in frame.items() {
if let FrameItem::Group(group) = item {
// TODO: Handle transformation.
if let Some(point) = find_in_frame(&group.frame, span) {
return Some(pos + point.transform(group.transform));
return Some(point + pos);
}
}
@ -301,97 +269,6 @@ mod tests {
test_click("$a + b$", point(28.0, 14.0), cursor(5));
}
#[test]
fn test_jump_from_click_transform_clip() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 20pt, height: 20pt, fill: black)",
point(10.0, 10.0) + margin,
cursor(1),
);
test_click(
"#rect(width: 60pt, height: 10pt, fill: black)",
point(5.0, 30.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left, rect(width: 60pt, height: 10pt, fill: black))",
point(5.0, 30.0) + margin,
cursor(38),
);
test_click(
"#scale(x: 300%, y: 300%, origin: top + left, rect(width: 10pt, height: 10pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, scale(x: 300%, y: 300%, \
origin: top + left, rect(width: 10pt, height: 10pt, fill: black)))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#box(width: 10pt, height: 10pt, clip: false, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left)[hello world]",
point(5.0, 15.0) + margin,
cursor(40),
);
}
#[test]
fn test_jump_from_click_shapes() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 30pt, height: 30pt, fill: black)",
point(15.0, 15.0) + margin,
cursor(1),
);
let circle = "#circle(width: 30pt, height: 30pt, fill: black)";
test_click(circle, point(15.0, 15.0) + margin, cursor(1));
test_click(circle, point(1.0, 1.0) + margin, None);
let bowtie =
"#polygon(fill: black, (0pt, 0pt), (20pt, 20pt), (20pt, 0pt), (0pt, 20pt))";
test_click(bowtie, point(1.0, 2.0) + margin, cursor(1));
test_click(bowtie, point(2.0, 1.0) + margin, None);
test_click(bowtie, point(19.0, 10.0) + margin, cursor(1));
let evenodd = r#"#polygon(fill: black, fill-rule: "even-odd",
(0pt, 10pt), (30pt, 10pt), (30pt, 20pt), (20pt, 20pt),
(20pt, 0pt), (10pt, 0pt), (10pt, 30pt), (20pt, 30pt),
(20pt, 20pt), (0pt, 20pt))"#;
test_click(evenodd, point(15.0, 15.0) + margin, None);
test_click(evenodd, point(5.0, 15.0) + margin, cursor(1));
test_click(evenodd, point(15.0, 5.0) + margin, cursor(1));
}
#[test]
fn test_jump_from_click_shapes_stroke() {
let margin = point(10.0, 10.0);
let rect =
"#place(dx: 10pt, dy: 10pt, rect(width: 10pt, height: 10pt, stroke: 5pt))";
test_click(rect, point(15.0, 15.0) + margin, None);
test_click(rect, point(10.0, 15.0) + margin, cursor(27));
test_click(
"#line(angle: 45deg, length: 10pt, stroke: 2pt)",
point(2.0, 2.0) + margin,
cursor(1),
);
}
#[test]
fn test_jump_from_cursor() {
let s = "*Hello* #box[ABC] World";
@ -404,15 +281,6 @@ mod tests {
test_cursor("$a + b$", -3, pos(1, 27.51, 16.83));
}
#[test]
fn test_jump_from_cursor_transform() {
test_cursor(
r#"#rotate(90deg, origin: bottom + left, [hello world])"#,
-5,
pos(1, 10.0, 16.58),
);
}
#[test]
fn test_backlink() {
let s = "#footnote[Hi]";

View File

@ -232,9 +232,7 @@ pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
ast::Expr::FuncCall(call) => {
DerefTarget::Callee(expr_node.find(call.callee().span())?)
}
ast::Expr::SetRule(set) => {
DerefTarget::Callee(expr_node.find(set.target().span())?)
}
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
DerefTarget::VarAccess(expr_node)
}

View File

@ -97,7 +97,7 @@ impl World for TestWorld {
}
fn font(&self, index: usize) -> Option<Font> {
self.base.fonts.get(index).cloned()
Some(self.base.fonts[index].clone())
}
fn today(&self, _: Option<i64>) -> Option<Datetime> {

View File

@ -201,7 +201,7 @@ fn named_param_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Toolti
if let Some(expr) = grand_grand.cast::<ast::Expr>();
if let Some(ast::Expr::Ident(callee)) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::SetRule(set) => Some(set.target()),
ast::Expr::Set(set) => Some(set.target()),
_ => None,
};

View File

@ -19,7 +19,6 @@ typst-utils = { workspace = true }
dirs = { workspace = true, optional = true }
ecow = { workspace = true }
env_proxy = { workspace = true, optional = true }
fastrand = { workspace = true, optional = true }
flate2 = { workspace = true, optional = true }
fontdb = { workspace = true, optional = true }
native-tls = { workspace = true, optional = true }
@ -44,7 +43,7 @@ fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
downloads = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
# Add package downloading utilities, implies `downloads`
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar", "dep:fastrand"]
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]
# Embeds some fonts into the binary:
# - For text: Libertinus Serif, New Computer Modern

View File

@ -1,7 +1,6 @@
//! Download and unpack packages and package indices.
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use ecow::eco_format;
@ -78,8 +77,7 @@ impl PackageStorage {
self.package_path.as_deref()
}
/// Makes a package available on-disk and returns the path at which it is
/// located (will be either in the cache or package directory).
/// Make a package available in the on-disk.
pub fn prepare_package(
&self,
spec: &PackageSpec,
@ -102,7 +100,7 @@ impl PackageStorage {
// Download from network if it doesn't exist yet.
if spec.namespace == DEFAULT_NAMESPACE {
self.download_package(spec, cache_dir, progress)?;
self.download_package(spec, &dir, progress)?;
if dir.exists() {
return Ok(dir);
}
@ -112,7 +110,7 @@ impl PackageStorage {
Err(PackageError::NotFound(spec.clone()))
}
/// Tries to determine the latest version of a package.
/// Try to determine the latest version of a package.
pub fn determine_latest_version(
&self,
spec: &VersionlessPackageSpec,
@ -145,7 +143,7 @@ impl PackageStorage {
}
/// Download the package index. The result of this is cached for efficiency.
fn download_index(&self) -> StrResult<&[serde_json::Value]> {
pub fn download_index(&self) -> StrResult<&[serde_json::Value]> {
self.index
.get_or_try_init(|| {
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
@ -166,10 +164,10 @@ impl PackageStorage {
///
/// # Panics
/// Panics if the package spec namespace isn't `DEFAULT_NAMESPACE`.
fn download_package(
pub fn download_package(
&self,
spec: &PackageSpec,
cache_dir: &Path,
package_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
@ -193,52 +191,11 @@ impl PackageStorage {
}
};
// The directory in which the package's version lives.
let base_dir = cache_dir.join(format!("{}/{}", spec.namespace, spec.name));
// The place at which the specific package version will live in the end.
let package_dir = base_dir.join(format!("{}", spec.version));
// To prevent multiple Typst instances from interferring, we download
// into a temporary directory first and then move this directory to
// its final destination.
//
// In the `rename` function's documentation it is stated:
// > This will not work if the new name is on a different mount point.
//
// By locating the temporary directory directly next to where the
// package directory will live, we are (trying our best) making sure
// that `tempdir` and `package_dir` are on the same mount point.
let tempdir = Tempdir::create(base_dir.join(format!(
".tmp-{}-{}",
spec.version,
fastrand::u32(..),
)))
.map_err(|err| error("failed to create temporary package directory", err))?;
// Decompress the archive into the temporary directory.
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed)
.unpack(&tempdir)
.map_err(|err| PackageError::MalformedArchive(Some(eco_format!("{err}"))))?;
// When trying to move (i.e., `rename`) the directory from one place to
// another and the target/destination directory is empty, then the
// operation will succeed (if it's atomic, or hardware doesn't fail, or
// power doesn't go off, etc.). If however the target directory is not
// empty, i.e., another instance already successfully moved the package,
// then we can safely ignore the `DirectoryNotEmpty` error.
//
// This means that we do not check the integrity of an existing moved
// package, just like we don't check the integrity if the package
// directory already existed in the first place. If situations with
// broken packages still occur even with the rename safeguard, we might
// consider more complex solutions like file locking or checksums.
match fs::rename(&tempdir, &package_dir) {
Ok(()) => Ok(()),
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(err) => Err(error("failed to move downloaded package directory", err)),
}
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive(Some(eco_format!("{err}")))
})
}
}
@ -250,36 +207,6 @@ struct MinimalPackageInfo {
version: PackageVersion,
}
/// A temporary directory that is a automatically cleaned up.
struct Tempdir(PathBuf);
impl Tempdir {
/// Creates a directory at the path and auto-cleans it.
fn create(path: PathBuf) -> io::Result<Self> {
std::fs::create_dir_all(&path)?;
Ok(Self(path))
}
}
impl Drop for Tempdir {
fn drop(&mut self) {
_ = fs::remove_dir_all(&self.0);
}
}
impl AsRef<Path> for Tempdir {
fn as_ref(&self) -> &Path {
&self.0
}
}
/// Enriches an I/O error with a message and turns it into a
/// `PackageError::Other`.
#[cold]
fn error(message: &str, err: io::Error) -> PackageError {
PackageError::Other(Some(eco_format!("{message}: {err}")))
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -115,7 +115,7 @@ impl<'a, 'b> Composer<'a, 'b, '_, '_> {
let column_height = regions.size.y;
let backlog: Vec<_> = std::iter::once(&column_height)
.chain(regions.backlog)
.flat_map(|&h| std::iter::repeat_n(h, self.config.columns.count))
.flat_map(|&h| std::iter::repeat(h).take(self.config.columns.count))
.skip(1)
.collect();

View File

@ -11,7 +11,7 @@ use typst_library::layout::{
use typst_library::text::TextElem;
use typst_library::visualize::Geometry;
use typst_syntax::Span;
use typst_utils::Numeric;
use typst_utils::{MaybeReverseIter, Numeric};
use super::{
generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row,
@ -574,7 +574,7 @@ impl<'a> GridLayouter<'a> {
// Reverse with RTL so that later columns start first.
let mut dx = Abs::zero();
for (x, &col) in self.rcols.iter().enumerate() {
for (x, &col) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
let mut dy = Abs::zero();
for row in rows {
// We want to only draw the fill starting at the parent
@ -643,13 +643,18 @@ impl<'a> GridLayouter<'a> {
.sum()
};
let width = self.cell_spanned_width(cell, x);
let mut pos = Point::new(dx, dy);
if self.is_rtl {
// In RTL cells expand to the left, thus the
// position must additionally be offset by the
// cell's width.
pos.x = self.width - (dx + width);
}
// In the grid, cell colspans expand to the right,
// so we're at the leftmost (lowest 'x') column
// spanned by the cell. However, in RTL, cells
// expand to the left. Therefore, without the
// offset below, cell fills would start at the
// rightmost visual position of a cell and extend
// over to unrelated columns to the right in RTL.
// We avoid this by ensuring the fill starts at the
// very left of the cell, even with colspan > 1.
let offset =
if self.is_rtl { -width + col } else { Abs::zero() };
let pos = Point::new(dx + offset, dy);
let size = Size::new(width, height);
let rect = Geometry::Rect(size).filled(fill);
fills.push((pos, FrameItem::Shape(rect, self.span)));
@ -1231,9 +1236,10 @@ impl<'a> GridLayouter<'a> {
}
let mut output = Frame::soft(Size::new(self.width, height));
let mut offset = Point::zero();
let mut pos = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate() {
// Reverse the column order when using RTL.
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step
if cell.rowspan.get() == 1 {
@ -1251,17 +1257,25 @@ impl<'a> GridLayouter<'a> {
let frame =
layout_cell(cell, engine, disambiguator, self.styles, pod)?
.into_frame();
let mut pos = offset;
let mut pos = pos;
if self.is_rtl {
// In RTL cells expand to the left, thus the position
// must additionally be offset by the cell's width.
pos.x = self.width - (pos.x + width);
// In the grid, cell colspans expand to the right,
// so we're at the leftmost (lowest 'x') column
// spanned by the cell. However, in RTL, cells
// expand to the left. Therefore, without the
// offset below, the cell's contents would be laid out
// starting at its rightmost visual position and extend
// over to unrelated cells to its right in RTL.
// We avoid this by ensuring the rendered cell starts at
// the very left of the cell, even with colspan > 1.
let offset = -width + rcol;
pos.x += offset;
}
output.push_frame(pos, frame);
}
}
offset.x += rcol;
pos.x += rcol;
}
Ok(output)
@ -1288,8 +1302,8 @@ impl<'a> GridLayouter<'a> {
pod.backlog = &heights[1..];
// Layout the row.
let mut offset = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate() {
let mut pos = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step
if cell.rowspan.get() == 1 {
@ -1300,19 +1314,17 @@ impl<'a> GridLayouter<'a> {
let fragment =
layout_cell(cell, engine, disambiguator, self.styles, pod)?;
for (output, frame) in outputs.iter_mut().zip(fragment) {
let mut pos = offset;
let mut pos = pos;
if self.is_rtl {
// In RTL cells expand to the left, thus the
// position must additionally be offset by the
// cell's width.
pos.x = self.width - (offset.x + width);
let offset = -width + rcol;
pos.x += offset;
}
output.push_frame(pos, frame);
}
}
}
offset.x += rcol;
pos.x += rcol;
}
Ok(Fragment::frames(outputs))
@ -1365,7 +1377,7 @@ impl<'a> GridLayouter<'a> {
.footer
.as_ref()
.and_then(Repeatable::as_repeated)
.is_none_or(|footer| footer.start != header.end)
.map_or(true, |footer| footer.start != header.end)
&& self.lrows.last().is_some_and(|row| row.index() < header.end)
&& !in_last_with_offset(
self.regions,
@ -1434,7 +1446,7 @@ impl<'a> GridLayouter<'a> {
.iter_mut()
.filter(|rowspan| (rowspan.y..rowspan.y + rowspan.rowspan).contains(&y))
.filter(|rowspan| {
rowspan.max_resolved_row.is_none_or(|max_row| y > max_row)
rowspan.max_resolved_row.map_or(true, |max_row| y > max_row)
})
{
// If the first region wasn't defined yet, it will have the
@ -1457,7 +1469,7 @@ impl<'a> GridLayouter<'a> {
// last height is the one for the current region.
rowspan
.heights
.extend(std::iter::repeat_n(Abs::zero(), amount_missing_heights));
.extend(std::iter::repeat(Abs::zero()).take(amount_missing_heights));
// Ensure that, in this region, the rowspan will span at least
// this row.
@ -1482,7 +1494,7 @@ impl<'a> GridLayouter<'a> {
// laid out at the first frame of the row).
// Any rowspans ending before this row are laid out even
// on this row's first frame.
if laid_out_footer_start.is_none_or(|footer_start| {
if laid_out_footer_start.map_or(true, |footer_start| {
// If this is a footer row, then only lay out this rowspan
// if the rowspan is contained within the footer.
y < footer_start || rowspan.y >= footer_start
@ -1568,5 +1580,5 @@ pub(super) fn points(
/// our case, headers).
pub(super) fn in_last_with_offset(regions: Regions<'_>, offset: Abs) -> bool {
regions.backlog.is_empty()
&& regions.last.is_none_or(|height| regions.size.y + offset == height)
&& regions.last.map_or(true, |height| regions.size.y + offset == height)
}

View File

@ -463,7 +463,7 @@ pub fn hline_stroke_at_column(
// region, we have the last index, and (as a failsafe) we don't have the
// last row of cells above us.
let use_bottom_border_stroke = !in_last_region
&& local_top_y.is_none_or(|top_y| top_y + 1 != grid.rows.len())
&& local_top_y.map_or(true, |top_y| top_y + 1 != grid.rows.len())
&& y == grid.rows.len();
let bottom_y =
if use_bottom_border_stroke { grid.rows.len().saturating_sub(1) } else { y };

View File

@ -3,6 +3,7 @@ use typst_library::engine::Engine;
use typst_library::foundations::Resolve;
use typst_library::layout::grid::resolve::Repeatable;
use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing};
use typst_utils::MaybeReverseIter;
use super::layouter::{in_last_with_offset, points, Row, RowPiece};
use super::{layout_cell, Cell, GridLayouter};
@ -22,10 +23,6 @@ pub struct Rowspan {
/// specified for the parent cell's `breakable` field.
pub is_effectively_unbreakable: bool,
/// The horizontal offset of this rowspan in all regions.
///
/// This is the offset from the text direction start, meaning that, on RTL
/// grids, this is the offset from the right of the grid, whereas, on LTR
/// grids, it is the offset from the left.
pub dx: Abs,
/// The vertical offset of this rowspan in the first region.
pub dy: Abs,
@ -121,11 +118,10 @@ impl GridLayouter<'_> {
// Nothing to layout.
return Ok(());
};
let first_column = self.rcols[x];
let cell = self.grid.cell(x, y).unwrap();
let width = self.cell_spanned_width(cell, x);
// In RTL cells expand to the left, thus the position
// must additionally be offset by the cell's width.
let dx = if self.is_rtl { self.width - (dx + width) } else { dx };
let dx = if self.is_rtl { dx - width + first_column } else { dx };
// Prepare regions.
let size = Size::new(width, *first_height);
@ -189,8 +185,10 @@ impl GridLayouter<'_> {
/// Checks if a row contains the beginning of one or more rowspan cells.
/// If so, adds them to the rowspans vector.
pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) {
let offsets = points(self.rcols.iter().copied());
for (x, dx) in (0..self.rcols.len()).zip(offsets) {
// We will compute the horizontal offset of each rowspan in advance.
// For that reason, we must reverse the column order when using RTL.
let offsets = points(self.rcols.iter().copied().rev_if(self.is_rtl));
for (x, dx) in (0..self.rcols.len()).rev_if(self.is_rtl).zip(offsets) {
let Some(cell) = self.grid.cell(x, y) else {
continue;
};
@ -590,7 +588,7 @@ impl GridLayouter<'_> {
measurement_data: &CellMeasurementData<'_>,
) -> bool {
if sizes.len() <= 1
&& sizes.first().is_none_or(|&first_frame_size| {
&& sizes.first().map_or(true, |&first_frame_size| {
first_frame_size <= measurement_data.height_in_this_region
})
{

View File

@ -154,7 +154,7 @@ pub fn line<'a>(
let mut items = collect_items(engine, p, range, trim);
// Add a hyphen at the line start, if a previous dash should be repeated.
if pred.is_some_and(|pred| should_repeat_hyphen(pred, full)) {
if pred.map_or(false, |pred| should_repeat_hyphen(pred, full)) {
if let Some(shaped) = items.first_text_mut() {
shaped.prepend_hyphen(engine, p.config.fallback);
}
@ -406,7 +406,7 @@ fn should_repeat_hyphen(pred_line: &Line, text: &str) -> bool {
//
// See § 4.1.1.1.2.e on the "Ortografía de la lengua española"
// https://www.rae.es/ortografía/como-signo-de-división-de-palabras-a-final-de-línea
Lang::SPANISH => text.chars().next().is_some_and(|c| !c.is_uppercase()),
Lang::SPANISH => text.chars().next().map_or(false, |c| !c.is_uppercase()),
_ => false,
}

View File

@ -290,7 +290,7 @@ fn linebreak_optimized_bounded<'a>(
}
// If this attempt is better than what we had before, take it!
if best.as_ref().is_none_or(|best| best.total >= total) {
if best.as_ref().map_or(true, |best| best.total >= total) {
best = Some(Entry { pred: pred_index, total, line: attempt, end });
}
}
@ -423,7 +423,7 @@ fn linebreak_optimized_approximate(
let total = pred.total + line_cost;
// If this attempt is better than what we had before, take it!
if best.as_ref().is_none_or(|best| best.total >= total) {
if best.as_ref().map_or(true, |best| best.total >= total) {
best = Some(Entry {
pred: pred_index,
total,
@ -690,34 +690,13 @@ fn breakpoints(p: &Preparation, mut f: impl FnMut(usize, Breakpoint)) {
let breakpoint = if point == text.len() {
Breakpoint::Mandatory
} else {
const OBJ_REPLACE: char = '\u{FFFC}';
match lb.get(c) {
// Fix for: https://github.com/unicode-org/icu4x/issues/4146
LineBreak::Glue | LineBreak::WordJoiner | LineBreak::ZWJ => continue,
LineBreak::MandatoryBreak
| LineBreak::CarriageReturn
| LineBreak::LineFeed
| LineBreak::NextLine => Breakpoint::Mandatory,
// https://github.com/typst/typst/issues/5489
//
// OBJECT-REPLACEMENT-CHARACTERs provide Contingent Break
// opportunities before and after by default. This behaviour
// is however tailorable, see:
// https://www.unicode.org/reports/tr14/#CB
// https://www.unicode.org/reports/tr14/#TailorableBreakingRules
// https://www.unicode.org/reports/tr14/#LB20
//
// Don't provide a line breaking opportunity between a LTR-
// ISOLATE (or any other Combining Mark) and an OBJECT-
// REPLACEMENT-CHARACTER representing an inline item, if the
// LTR-ISOLATE could end up as the only character on the
// previous line.
LineBreak::CombiningMark
if text[point..].starts_with(OBJ_REPLACE)
&& last + c.len_utf8() == point =>
{
continue;
}
_ => Breakpoint::Normal,
}
};

View File

@ -20,7 +20,7 @@ use unicode_bidi::{BidiInfo, Level as BidiLevel};
use unicode_script::{Script, UnicodeScript};
use super::{decorate, Item, Range, SpanMapper};
use crate::modifiers::FrameModifyText;
use crate::modifiers::{FrameModifiers, FrameModify};
/// The result of shaping text.
///
@ -327,7 +327,7 @@ impl<'a> ShapedText<'a> {
offset += width;
}
frame.modify_text(self.styles);
frame.modify(&FrameModifiers::get_in(self.styles));
frame
}
@ -465,7 +465,7 @@ impl<'a> ShapedText<'a> {
None
};
let mut chain = families(self.styles)
.filter(|family| family.covers().is_none_or(|c| c.is_match("-")))
.filter(|family| family.covers().map_or(true, |c| c.is_match("-")))
.map(|family| book.select(family.as_str(), self.variant))
.chain(fallback_func.iter().map(|f| f()))
.flatten();
@ -570,7 +570,7 @@ impl<'a> ShapedText<'a> {
// for the next line.
let dec = if ltr { usize::checked_sub } else { usize::checked_add };
while let Some(next) = dec(idx, 1) {
if self.glyphs.get(next).is_none_or(|g| g.range.start != text_index) {
if self.glyphs.get(next).map_or(true, |g| g.range.start != text_index) {
break;
}
idx = next;
@ -812,7 +812,7 @@ fn shape_segment<'a>(
.nth(1)
.map(|(i, _)| offset + i)
.unwrap_or(text.len());
covers.is_none_or(|cov| cov.is_match(&text[offset..end]))
covers.map_or(true, |cov| cov.is_match(&text[offset..end]))
};
// Collect the shaped glyphs, doing fallback and shaping parts again with
@ -824,42 +824,12 @@ fn shape_segment<'a>(
// Add the glyph to the shaped output.
if info.glyph_id != 0 && is_covered(cluster) {
// Assume we have the following sequence of (glyph_id, cluster):
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
// (63, 13), (193, 25), (80, 25), (3, 31)
//
// We then want the sequence of (glyph_id, text_range) to look as follows:
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
//
// Each glyph in the same cluster should be assigned the full text
// range. This is necessary because only this way krilla can
// properly assign `ActualText` attributes in complex shaping
// scenarios.
// The start of the glyph's text range.
// Determine the text range of the glyph.
let start = base + cluster;
// Determine the end of the glyph's text range.
let mut k = i;
let step: isize = if ltr { 1 } else { -1 };
let end = loop {
// If we've reached the end of the glyphs, the `end` of the
// range should be the end of the full text.
let Some((next, next_info)) = k
.checked_add_signed(step)
.and_then(|n| infos.get(n).map(|info| (n, info)))
else {
break base + text.len();
};
// If the cluster doesn't match anymore, we've reached the end.
if next_info.cluster != info.cluster {
break base + next_info.cluster as usize;
}
k = next;
};
let end = base
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) }
.and_then(|last| infos.get(last))
.map_or(text.len(), |info| info.cluster as usize);
let c = text[cluster..].chars().next().unwrap();
let script = c.script();

View File

@ -96,13 +96,9 @@ pub fn layout_enum(
let mut cells = vec![];
let mut locator = locator.split();
let mut number = elem.start(styles).unwrap_or_else(|| {
if reversed {
elem.children.len() as u64
} else {
1
}
});
let mut number =
elem.start(styles)
.unwrap_or_else(|| if reversed { elem.children.len() } else { 1 });
let mut parents = EnumElem::parents_in(styles);
let full = elem.full(styles);

View File

@ -19,10 +19,8 @@ pub fn layout_accent(
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?;
// Try to replace a glyph with its dotless variant.
if elem.dotless(styles) {
if let MathFragment::Glyph(glyph) = &mut base {
glyph.make_dotless_form(ctx);
}
if let MathFragment::Glyph(glyph) = &mut base {
glyph.make_dotless_form(ctx);
}
// Preserve class to preserve automatic spacing.
@ -36,7 +34,7 @@ pub fn layout_accent(
// Try to replace accent glyph with flattened variant.
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
if base.ascent() > flattened_base_height {
if base.height() > flattened_base_height {
glyph.make_flattened_accent_form(ctx);
}
@ -52,7 +50,7 @@ pub fn layout_accent(
// minus the accent base height. Only if the base is very small, we need
// a larger gap so that the accent doesn't move too low.
let accent_base_height = scaled!(ctx, styles, accent_base_height);
let gap = -accent.descent() - base.ascent().min(accent_base_height);
let gap = -accent.descent() - base.height().min(accent_base_height);
let size = Size::new(base.width(), accent.height() + gap + base.height());
let accent_pos = Point::with_x(base_attach - accent_attach);
let base_pos = Point::with_y(accent.height() + gap);

View File

@ -1,4 +1,4 @@
use typst_library::diag::{bail, warning, SourceResult};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
use typst_library::layout::{
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
@ -9,7 +9,7 @@ use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
use typst_syntax::Span;
use super::{
alignments, delimiter_alignment, style_for_denominator, AlignmentResult,
alignments, delimiter_alignment, stack, style_for_denominator, AlignmentResult,
FrameFragment, GlyphFragment, LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
};
@ -23,51 +23,17 @@ pub fn layout_vec(
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
let delim = elem.delim(styles);
let frame = layout_vec_body(
ctx,
styles,
&[column],
&elem.children,
elem.align(styles),
elem.gap(styles),
LeftRightAlternator::Right,
None,
Axes::with_y(elem.gap(styles)),
span,
"elements",
)?;
let delim = elem.delim(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
}
/// Lays out a [`CasesElem`].
#[typst_macros::time(name = "math.cases", span = elem.span())]
pub fn layout_cases(
elem: &Packed<CasesElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx,
styles,
&[column],
FixedAlignment::Start,
LeftRightAlternator::None,
None,
Axes::with_y(elem.gap(styles)),
span,
"branches",
)?;
let delim = elem.delim(styles);
let (open, close) =
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
layout_delimiters(ctx, styles, frame, open, close, span)
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
}
/// Lays out a [`MatElem`].
@ -77,16 +43,14 @@ pub fn layout_mat(
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let span = elem.span();
let rows = &elem.rows;
let ncols = rows.first().map_or(0, |row| row.len());
let augment = elem.augment(styles);
let rows = &elem.rows;
if let Some(aug) = &augment {
for &offset in &aug.hline.0 {
if offset == 0 || offset.unsigned_abs() >= rows.len() {
bail!(
span,
elem.span(),
"cannot draw a horizontal line after row {} of a matrix with {} rows",
if offset < 0 { rows.len() as isize + offset } else { offset },
rows.len()
@ -94,55 +58,95 @@ pub fn layout_mat(
}
}
let ncols = rows.first().map_or(0, |row| row.len());
for &offset in &aug.vline.0 {
if offset == 0 || offset.unsigned_abs() >= ncols {
bail!(
span,
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
elem.span(),
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
}
}
}
// Transpose rows of the matrix into columns.
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
let columns: Vec<Vec<_>> = (0..ncols)
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
.collect();
let frame = layout_body(
let delim = elem.delim(styles);
let frame = layout_mat_body(
ctx,
styles,
&columns,
rows,
elem.align(styles),
LeftRightAlternator::Right,
augment,
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
span,
"cells",
elem.span(),
)?;
let delim = elem.delim(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
}
/// Layout the inner contents of a matrix, vector, or cases.
#[allow(clippy::too_many_arguments)]
fn layout_body(
/// Lays out a [`CasesElem`].
#[typst_macros::time(name = "math.cases", span = elem.span())]
pub fn layout_cases(
elem: &Packed<CasesElem>,
ctx: &mut MathContext,
styles: StyleChain,
columns: &[Vec<&Content>],
) -> SourceResult<()> {
let delim = elem.delim(styles);
let frame = layout_vec_body(
ctx,
styles,
&elem.children,
FixedAlignment::Start,
elem.gap(styles),
LeftRightAlternator::None,
)?;
let (open, close) =
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
layout_delimiters(ctx, styles, frame, open, close, elem.span())
}
/// Layout the inner contents of a vector.
fn layout_vec_body(
ctx: &mut MathContext,
styles: StyleChain,
column: &[Content],
align: FixedAlignment,
row_gap: Rel<Abs>,
alternator: LeftRightAlternator,
) -> SourceResult<Frame> {
let gap = row_gap.relative_to(ctx.region.size.y);
let denom_style = style_for_denominator(styles);
let mut flat = vec![];
for child in column {
// We allow linebreaks in cases and vectors, which are functionally
// identical to commas.
flat.extend(ctx.layout_into_run(child, styles.chain(&denom_style))?.rows());
}
// We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal vectors are aligned with others unless they are
// way too big.
let paren =
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
Ok(stack(flat, align, gap, 0, alternator, Some((paren.ascent, paren.descent))))
}
/// Layout the inner contents of a matrix.
fn layout_mat_body(
ctx: &mut MathContext,
styles: StyleChain,
rows: &[Vec<Content>],
align: FixedAlignment,
augment: Option<Augment<Abs>>,
gap: Axes<Rel<Abs>>,
span: Span,
children: &str,
) -> SourceResult<Frame> {
let nrows = columns.first().map_or(0, |col| col.len());
let ncols = columns.len();
let ncols = rows.first().map_or(0, |row| row.len());
let nrows = rows.len();
if ncols == 0 || nrows == 0 {
return Ok(Frame::soft(Size::zero()));
}
@ -174,11 +178,16 @@ fn layout_body(
// Before the full matrix body can be laid out, the
// individual cells must first be independently laid out
// so we can ensure alignment across rows and columns.
let mut cols = vec![vec![]; ncols];
// This variable stores the maximum ascent and descent for each row.
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
// We want to transpose our data layout to columns
// before final layout. For efficiency, the columns
// variable is set up here and newly generated
// individual cells are then added to it.
let mut cols = vec![vec![]; ncols];
let denom_style = style_for_denominator(styles);
// We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal matrices are aligned with others unless they are
@ -186,22 +195,10 @@ fn layout_body(
let paren =
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
for (column, col) in columns.iter().zip(&mut cols) {
for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
let cell_span = cell.span();
for (row, (ascent, descent)) in rows.iter().zip(&mut heights) {
for (cell, col) in row.iter().zip(&mut cols) {
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
// We ignore linebreaks in the cells as we can't differentiate
// alignment points for the whole body from ones for a specific
// cell, and multiline cells don't quite make sense at the moment.
if cell.is_multiline() {
ctx.engine.sink.warn(warning!(
cell_span,
"linebreaks are ignored in {}", children;
hint: "use commas instead to separate each line"
));
}
ascent.set_max(cell.ascent().max(paren.ascent));
descent.set_max(cell.descent().max(paren.descent));
@ -225,7 +222,7 @@ fn layout_body(
let mut y = Abs::zero();
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
let cell = cell.into_line_frame(&points, alternator);
let cell = cell.into_line_frame(&points, LeftRightAlternator::Right);
let pos = Point::new(
if points.is_empty() {
x + align.position(rcol - cell.width())

View File

@ -85,15 +85,14 @@ pub fn layout_root(
ascent.set_max(shift_up + index.ascent());
}
let sqrt_x = sqrt_offset.max(Abs::zero());
let radicand_x = sqrt_x + sqrt.width();
let radicand_x = sqrt_offset + sqrt.width();
let radicand_y = ascent - radicand.ascent();
let width = radicand_x + radicand.width();
let size = Size::new(width, ascent + descent);
// The extra "- thickness" comes from the fact that the sqrt is placed
// in `push_frame` with respect to its top, not its baseline.
let sqrt_pos = Point::new(sqrt_x, radicand_y - gap - thickness);
let sqrt_pos = Point::new(sqrt_offset, radicand_y - gap - thickness);
let line_pos = Point::new(radicand_x, radicand_y - gap - (thickness / 2.0));
let radicand_pos = Point::new(radicand_x, radicand_y);
@ -101,8 +100,7 @@ pub fn layout_root(
frame.set_baseline(ascent);
if let Some(index) = index {
let index_x = -sqrt_offset.min(Abs::zero()) + kern_before;
let index_pos = Point::new(index_x, ascent - index.ascent() - shift_up);
let index_pos = Point::new(kern_before, ascent - index.ascent() - shift_up);
frame.push_frame(index_pos, index);
}

View File

@ -117,6 +117,7 @@ pub fn stack(
gap: Abs,
baseline: usize,
alternator: LeftRightAlternator,
minimum_ascent_descent: Option<(Abs, Abs)>,
) -> Frame {
let AlignmentResult { points, width } = alignments(&rows);
let rows: Vec<_> = rows
@ -124,9 +125,13 @@ pub fn stack(
.map(|row| row.into_line_frame(&points, alternator))
.collect();
let padded_height = |height: Abs| {
height.max(minimum_ascent_descent.map_or(Abs::zero(), |(a, d)| a + d))
};
let mut frame = Frame::soft(Size::new(
width,
rows.iter().map(|row| row.height()).sum::<Abs>()
rows.iter().map(|row| padded_height(row.height())).sum::<Abs>()
+ rows.len().saturating_sub(1) as f64 * gap,
));
@ -137,11 +142,14 @@ pub fn stack(
} else {
Abs::zero()
};
let pos = Point::new(x, y);
let ascent_padded_part = minimum_ascent_descent
.map_or(Abs::zero(), |(a, _)| (a - row.ascent()))
.max(Abs::zero());
let pos = Point::new(x, y + ascent_padded_part);
if i == baseline {
frame.set_baseline(y + row.baseline());
frame.set_baseline(y + row.baseline() + ascent_padded_part);
}
y += row.height() + gap;
y += padded_height(row.height()) + gap;
frame.push_frame(pos, row);
}

View File

@ -302,6 +302,6 @@ fn assemble(
fn parts(assembly: GlyphAssembly, repeat: usize) -> impl Iterator<Item = GlyphPart> + '_ {
assembly.parts.into_iter().flat_map(move |part| {
let count = if part.part_flags.extender() { repeat } else { 1 };
std::iter::repeat_n(part, count)
std::iter::repeat(part).take(count)
})
}

View File

@ -312,8 +312,14 @@ fn layout_underoverspreader(
}
};
let frame =
stack(rows, FixedAlignment::Center, gap, baseline, LeftRightAlternator::Right);
let frame = stack(
rows,
FixedAlignment::Center,
gap,
baseline,
LeftRightAlternator::Right,
None,
);
ctx.push(FrameFragment::new(styles, frame).with_class(body_class));
Ok(())

View File

@ -1,6 +1,6 @@
use typst_library::foundations::StyleChain;
use typst_library::layout::{Abs, Fragment, Frame, FrameItem, HideElem, Point, Sides};
use typst_library::model::{Destination, LinkElem, ParElem};
use typst_library::layout::{Fragment, Frame, FrameItem, HideElem, Point};
use typst_library::model::{Destination, LinkElem};
/// Frame-level modifications resulting from styles that do not impose any
/// layout structure.
@ -52,7 +52,14 @@ pub trait FrameModify {
impl FrameModify for Frame {
fn modify(&mut self, modifiers: &FrameModifiers) {
modify_frame(self, modifiers, None);
if let Some(dest) = &modifiers.dest {
let size = self.size();
self.push(Point::zero(), FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
self.hide();
}
}
}
@ -75,41 +82,6 @@ where
}
}
pub trait FrameModifyText {
/// Resolve and apply [`FrameModifiers`] for this text frame.
fn modify_text(&mut self, styles: StyleChain);
}
impl FrameModifyText for Frame {
fn modify_text(&mut self, styles: StyleChain) {
let modifiers = FrameModifiers::get_in(styles);
let expand_y = 0.5 * ParElem::leading_in(styles);
let outset = Sides::new(Abs::zero(), expand_y, Abs::zero(), expand_y);
modify_frame(self, &modifiers, Some(outset));
}
}
fn modify_frame(
frame: &mut Frame,
modifiers: &FrameModifiers,
link_box_outset: Option<Sides<Abs>>,
) {
if let Some(dest) = &modifiers.dest {
let mut pos = Point::zero();
let mut size = frame.size();
if let Some(outset) = link_box_outset {
pos.y -= outset.top;
pos.x -= outset.left;
size += outset.sum_by_axis();
}
frame.push(pos, FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
frame.hide();
}
}
/// Performs layout and modification in one step.
///
/// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`,

View File

@ -29,7 +29,6 @@ csv = { workspace = true }
ecow = { workspace = true }
flate2 = { workspace = true }
fontdb = { workspace = true }
glidesort = { workspace = true }
hayagriva = { workspace = true }
icu_properties = { workspace = true }
icu_provider = { workspace = true }
@ -62,7 +61,6 @@ ttf-parser = { workspace = true }
two-face = { workspace = true }
typed-arena = { workspace = true }
unicode-math-class = { workspace = true }
unicode-normalization = { workspace = true }
unicode-segmentation = { workspace = true }
unscanny = { workspace = true }
usvg = { workspace = true }

View File

@ -232,18 +232,42 @@ impl From<SyntaxError> for SourceDiagnostic {
/// Destination for a deprecation message when accessing a deprecated value.
pub trait DeprecationSink {
/// Emits the given deprecation message into this sink.
fn emit(self, message: &str);
fn emit(&mut self, message: &str);
/// Emits the given deprecation message into this sink, with the given
/// hints.
fn emit_with_hints(&mut self, message: &str, hints: &[&str]);
}
impl DeprecationSink for () {
fn emit(self, _: &str) {}
fn emit(&mut self, _: &str) {}
fn emit_with_hints(&mut self, _: &str, _: &[&str]) {}
}
impl DeprecationSink for (&mut Vec<SourceDiagnostic>, Span) {
fn emit(&mut self, message: &str) {
self.0.push(SourceDiagnostic::warning(self.1, message));
}
fn emit_with_hints(&mut self, message: &str, hints: &[&str]) {
self.0.push(
SourceDiagnostic::warning(self.1, message)
.with_hints(hints.iter().copied().map(Into::into)),
);
}
}
impl DeprecationSink for (&mut Engine<'_>, Span) {
/// Emits the deprecation message as a warning.
fn emit(self, message: &str) {
fn emit(&mut self, message: &str) {
self.0.sink.warn(SourceDiagnostic::warning(self.1, message));
}
fn emit_with_hints(&mut self, message: &str, hints: &[&str]) {
self.0.sink.warn(
SourceDiagnostic::warning(self.1, message)
.with_hints(hints.iter().copied().map(Into::into)),
);
}
}
/// A part of a diagnostic's [trace](SourceDiagnostic::trace).

View File

@ -312,8 +312,7 @@ impl Route<'_> {
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
bail!(
"maximum show rule depth exceeded";
hint: "maybe a show rule matches its own output";
hint: "maybe there are too deeply nested elements"
hint: "check whether the show rule matches its own output"
);
}
Ok(())

View File

@ -9,7 +9,9 @@ use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use typst_syntax::{Span, Spanned};
use crate::diag::{bail, At, HintedStrResult, SourceDiagnostic, SourceResult, StrResult};
use crate::diag::{
bail, At, DeprecationSink, HintedStrResult, SourceDiagnostic, SourceResult, StrResult,
};
use crate::engine::Engine;
use crate::foundations::{
cast, func, ops, repr, scope, ty, Args, Bytes, CastInfo, Context, Dict, FromValue,
@ -143,6 +145,11 @@ impl Array {
Ok(self.iter().cloned().cycle().take(count).collect())
}
/// The internal implementation of [`Array::contains`].
pub fn contains_impl(&self, value: &Value, sink: &mut dyn DeprecationSink) -> bool {
self.0.iter().any(|v| ops::equal(v, value, sink))
}
}
#[scope]
@ -172,29 +179,17 @@ impl Array {
}
/// Returns the first item in the array. May be used on the left-hand side
/// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
/// of an assignment. Fails with an error if the array is empty.
#[func]
pub fn first(
&self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.first().cloned().or(default).ok_or_else(array_is_empty)
pub fn first(&self) -> StrResult<Value> {
self.0.first().cloned().ok_or_else(array_is_empty)
}
/// Returns the last item in the array. May be used on the left-hand side of
/// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
/// an assignment. Fails with an error if the array is empty.
#[func]
pub fn last(
&self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.last().cloned().or(default).ok_or_else(array_is_empty)
pub fn last(&self) -> StrResult<Value> {
self.0.last().cloned().ok_or_else(array_is_empty)
}
/// Returns the item at the specified index in the array. May be used on the
@ -302,10 +297,12 @@ impl Array {
#[func]
pub fn contains(
&self,
engine: &mut Engine,
span: Span,
/// The value to search for.
value: Value,
) -> bool {
self.0.contains(&value)
self.contains_impl(&value, &mut (engine, span))
}
/// Searches for an item for which the given function returns `{true}` and
@ -588,6 +585,8 @@ impl Array {
#[func]
pub fn sum(
self,
engine: &mut Engine,
span: Span,
/// What to return if the array is empty. Must be set if the array can
/// be empty.
#[named]
@ -599,7 +598,7 @@ impl Array {
.or(default)
.ok_or("cannot calculate sum of empty array with no default")?;
for item in iter {
acc = ops::add(acc, item)?;
acc = ops::add(acc, item, &mut (&mut *engine, span))?;
}
Ok(acc)
}
@ -698,6 +697,8 @@ impl Array {
#[func]
pub fn join(
self,
engine: &mut Engine,
span: Span,
/// A value to insert between each item of the array.
#[default]
separator: Option<Value>,
@ -713,13 +714,18 @@ impl Array {
for (i, value) in self.into_iter().enumerate() {
if i > 0 {
if i + 1 == len && last.is_some() {
result = ops::join(result, last.take().unwrap())?;
result = ops::join(
result,
last.take().unwrap(),
&mut (&mut *engine, span),
)?;
} else {
result = ops::join(result, separator.clone())?;
result =
ops::join(result, separator.clone(), &mut (&mut *engine, span))?;
}
}
result = ops::join(result, value)?;
result = ops::join(result, value, &mut (&mut *engine, span))?;
}
Ok(result)
@ -808,7 +814,7 @@ impl Array {
/// function. The sorting algorithm used is stable.
///
/// Returns an error if two values could not be compared or if the key
/// or comparison function (if given) yields an error.
/// function (if given) yields an error.
///
/// To sort according to multiple criteria at once, e.g. in case of equality
/// between some criteria, the key function can return an array. The results
@ -832,134 +838,33 @@ impl Array {
/// determine the keys to sort by.
#[named]
key: Option<Func>,
/// If given, uses this function to compare elements in the array.
///
/// This function should return a boolean: `{true}` indicates that the
/// elements are in order, while `{false}` indicates that they should be
/// swapped. To keep the sort stable, if the two elements are equal, the
/// function should return `{true}`.
///
/// If this function does not order the elements properly (e.g., by
/// returning `{false}` for both `{(x, y)}` and `{(y, x)}`, or for
/// `{(x, x)}`), the resulting array will be in unspecified order.
///
/// When used together with `key`, `by` will be passed the keys instead
/// of the elements.
///
/// ```example
/// #(
/// "sorted",
/// "by",
/// "decreasing",
/// "length",
/// ).sorted(
/// key: s => s.len(),
/// by: (l, r) => l >= r,
/// )
/// ```
#[named]
by: Option<Func>,
) -> SourceResult<Array> {
match by {
Some(by) => {
let mut are_in_order = |mut x, mut y| {
if let Some(f) = &key {
// We rely on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the key.
x = f.call(engine, context, [x])?;
y = f.call(engine, context, [y])?;
let mut result = Ok(());
let mut vec = self.0;
let mut key_of = |x: Value| match &key {
// NOTE: We are relying on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the `key`.
Some(f) => f.call(engine, context, [x]),
None => Ok(x),
};
vec.make_mut().sort_by(|a, b| {
// Until we get `try` blocks :)
match (key_of(a.clone()), key_of(b.clone())) {
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
if result.is_ok() {
result = Err(err).at(span);
}
match by.call(engine, context, [x, y])? {
Value::Bool(b) => Ok(b),
x => {
bail!(
span,
"expected boolean from `by` function, got {}",
x.ty(),
)
}
Ordering::Equal
}),
(Err(e), _) | (_, Err(e)) => {
if result.is_ok() {
result = Err(e);
}
};
// If a comparison function is provided, we use `glidesort`
// instead of the standard library sorting algorithm to prevent
// panics in case the comparison function does not define a
// valid order (see https://github.com/typst/typst/pull/5627).
let mut result = Ok(());
let mut vec = self.0.into_iter().enumerate().collect::<Vec<_>>();
glidesort::sort_by(&mut vec, |(i, x), (j, y)| {
// Because we use booleans for the comparison function, in
// order to keep the sort stable, we need to compare in the
// right order.
if i < j {
// If `x` and `y` appear in this order in the original
// array, then we should change their order (i.e.,
// return `Ordering::Greater`) iff `y` is strictly less
// than `x` (i.e., `compare(x, y)` returns `false`).
// Otherwise, we should keep them in the same order
// (i.e., return `Ordering::Less`).
match are_in_order(x.clone(), y.clone()) {
Ok(false) => Ordering::Greater,
Ok(true) => Ordering::Less,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
} else {
// If `x` and `y` appear in the opposite order in the
// original array, then we should change their order
// (i.e., return `Ordering::Less`) iff `x` is strictly
// less than `y` (i.e., `compare(y, x)` returns
// `false`). Otherwise, we should keep them in the same
// order (i.e., return `Ordering::Less`).
match are_in_order(y.clone(), x.clone()) {
Ok(false) => Ordering::Less,
Ok(true) => Ordering::Greater,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
}
});
result.map(|()| vec.into_iter().map(|(_, x)| x).collect())
Ordering::Equal
}
}
None => {
let mut key_of = |x: Value| match &key {
// We rely on `comemo`'s memoization of function evaluation
// to not excessively reevaluate the key.
Some(f) => f.call(engine, context, [x]),
None => Ok(x),
};
// If no comparison function is provided, we know the order is
// valid, so we can use the standard library sort and prevent an
// extra allocation.
let mut result = Ok(());
let mut vec = self.0;
vec.make_mut().sort_by(|a, b| {
match (key_of(a.clone()), key_of(b.clone())) {
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
if result.is_ok() {
result = Err(err).at(span);
}
Ordering::Equal
}),
(Err(e), _) | (_, Err(e)) => {
if result.is_ok() {
result = Err(e);
}
Ordering::Equal
}
}
});
result.map(|()| vec.into())
}
}
});
result.map(|_| vec.into())
}
/// Deduplicates all items in the array.
@ -975,13 +880,14 @@ impl Array {
self,
engine: &mut Engine,
context: Tracked<Context>,
span: Span,
/// If given, applies this function to the elements in the array to
/// determine the keys to deduplicate by.
#[named]
key: Option<Func>,
) -> SourceResult<Array> {
let mut out = EcoVec::with_capacity(self.0.len());
let mut key_of = |x: Value| match &key {
let key_of = |engine: &mut Engine, x: Value| match &key {
// NOTE: We are relying on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the `key`.
Some(f) => f.call(engine, context, [x]),
@ -992,14 +898,18 @@ impl Array {
// 1. We would like to preserve the order of the elements.
// 2. We cannot hash arbitrary `Value`.
'outer: for value in self {
let key = key_of(value.clone())?;
let key = key_of(&mut *engine, value.clone())?;
if out.is_empty() {
out.push(value);
continue;
}
for second in out.iter() {
if ops::equal(&key, &key_of(second.clone())?) {
if ops::equal(
&key,
&key_of(&mut *engine, second.clone())?,
&mut (&mut *engine, span),
) {
continue 'outer;
}
}

View File

@ -21,7 +21,7 @@ use crate::foundations::{
///
/// Type casting works as follows:
/// - [`Reflect for T`](Reflect) describes the possible Typst values for `T`
/// (for documentation and autocomplete).
/// (for documentation and autocomplete).
/// - [`IntoValue for T`](IntoValue) is for conversion from `T -> Value`
/// (infallible)
/// - [`FromValue for T`](FromValue) is for conversion from `Value -> T`

View File

@ -3,7 +3,7 @@ use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::iter::{self, Sum};
use std::marker::PhantomData;
use std::ops::{Add, AddAssign, ControlFlow, Deref, DerefMut};
use std::ops::{Add, AddAssign, Deref, DerefMut};
use std::sync::Arc;
use comemo::Tracked;
@ -414,11 +414,10 @@ impl Content {
/// Elements produced in `show` rules will not be included in the results.
pub fn query(&self, selector: Selector) -> Vec<Content> {
let mut results = Vec::new();
self.traverse(&mut |element| -> ControlFlow<()> {
self.traverse(&mut |element| {
if selector.matches(&element, None) {
results.push(element);
}
ControlFlow::Continue(())
});
results
}
@ -428,58 +427,54 @@ impl Content {
///
/// Elements produced in `show` rules will not be included in the results.
pub fn query_first(&self, selector: &Selector) -> Option<Content> {
self.traverse(&mut |element| -> ControlFlow<Content> {
if selector.matches(&element, None) {
ControlFlow::Break(element)
} else {
ControlFlow::Continue(())
let mut result = None;
self.traverse(&mut |element| {
if result.is_none() && selector.matches(&element, None) {
result = Some(element);
}
})
.break_value()
});
result
}
/// Extracts the plain text of this content.
pub fn plain_text(&self) -> EcoString {
let mut text = EcoString::new();
self.traverse(&mut |element| -> ControlFlow<()> {
self.traverse(&mut |element| {
if let Some(textable) = element.with::<dyn PlainText>() {
textable.plain_text(&mut text);
}
ControlFlow::Continue(())
});
text
}
/// Traverse this content.
fn traverse<F, B>(&self, f: &mut F) -> ControlFlow<B>
fn traverse<F>(&self, f: &mut F)
where
F: FnMut(Content) -> ControlFlow<B>,
F: FnMut(Content),
{
f(self.clone());
self.inner
.elem
.fields()
.into_iter()
.for_each(|(_, value)| walk_value(value, f));
/// Walks a given value to find any content that matches the selector.
///
/// Returns early if the function gives `ControlFlow::Break`.
fn walk_value<F, B>(value: Value, f: &mut F) -> ControlFlow<B>
fn walk_value<F>(value: Value, f: &mut F)
where
F: FnMut(Content) -> ControlFlow<B>,
F: FnMut(Content),
{
match value {
Value::Content(content) => content.traverse(f),
Value::Array(array) => {
for value in array {
walk_value(value, f)?;
walk_value(value, f);
}
ControlFlow::Continue(())
}
_ => ControlFlow::Continue(()),
_ => {}
}
}
// Call f on the element itself before recursively iterating its fields.
f(self.clone())?;
for (_, value) in self.inner.elem.fields() {
walk_value(value, f)?;
}
ControlFlow::Continue(())
}
}

View File

@ -112,7 +112,7 @@ use crate::foundations::{
/// it into another file by writing `{import "foo.typ": alert}`.
///
/// # Unnamed functions { #unnamed }
/// You can also create an unnamed function without creating a binding by
/// You can also created an unnamed function without creating a binding by
/// specifying a parameter list followed by `=>` and the function body. If your
/// function has just one parameter, the parentheses around the parameter list
/// are optional. Unnamed functions are mainly useful for show rules, but also

View File

@ -77,7 +77,6 @@ pub use {
indexmap::IndexMap,
};
use comemo::TrackedMut;
use ecow::EcoString;
use typst_syntax::Spanned;
@ -298,14 +297,5 @@ pub fn eval(
for (key, value) in dict {
scope.bind(key.into(), Binding::new(value, span));
}
(engine.routines.eval_string)(
engine.routines,
engine.world,
TrackedMut::reborrow_mut(&mut engine.sink),
&text,
span,
mode,
scope,
)
(engine.routines.eval_string)(engine.routines, engine.world, &text, span, mode, scope)
}

View File

@ -7,7 +7,7 @@ use typst_syntax::FileId;
use crate::diag::{bail, DeprecationSink, StrResult};
use crate::foundations::{repr, ty, Content, Scope, Value};
/// A module of definitions.
/// An module of definitions.
///
/// A module
/// - be built-in

View File

@ -5,7 +5,7 @@ use std::cmp::Ordering;
use ecow::eco_format;
use typst_utils::Numeric;
use crate::diag::{bail, HintedStrResult, StrResult};
use crate::diag::{bail, DeprecationSink, HintedStrResult, StrResult};
use crate::foundations::{
format_str, Datetime, IntoValue, Regex, Repr, SymbolElem, Value,
};
@ -21,7 +21,7 @@ macro_rules! mismatch {
}
/// Join a value with another value.
pub fn join(lhs: Value, rhs: Value) -> StrResult<Value> {
pub fn join(lhs: Value, rhs: Value, sink: &mut dyn DeprecationSink) -> StrResult<Value> {
use Value::*;
Ok(match (lhs, rhs) {
(a, None) => a,
@ -39,6 +39,17 @@ pub fn join(lhs: Value, rhs: Value) -> StrResult<Value> {
(Array(a), Array(b)) => Array(a + b),
(Dict(a), Dict(b)) => Dict(a + b),
(Args(a), Args(b)) => Args(a + b),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_str_join(sink);
Str(format_str!("{a}{b}"))
}
(Str(a), Type(b)) => {
warn_type_str_join(sink);
Str(format_str!("{a}{b}"))
}
(a, b) => mismatch!("cannot join {} with {}", a, b),
})
}
@ -88,7 +99,11 @@ pub fn neg(value: Value) -> HintedStrResult<Value> {
}
/// Compute the sum of two values.
pub fn add(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
pub fn add(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
use Value::*;
Ok(match (lhs, rhs) {
(a, None) => a,
@ -156,6 +171,16 @@ pub fn add(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
(Datetime(a), Duration(b)) => Datetime(a + b),
(Duration(a), Datetime(b)) => Datetime(b + a),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_str_add(sink);
Str(format_str!("{a}{b}"))
}
(Str(a), Type(b)) => {
warn_type_str_add(sink);
Str(format_str!("{a}{b}"))
}
(Dyn(a), Dyn(b)) => {
// Alignments can be summed.
if let (Some(&a), Some(&b)) =
@ -394,13 +419,21 @@ pub fn or(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
}
/// Compute whether two values are equal.
pub fn eq(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
Ok(Value::Bool(equal(&lhs, &rhs)))
pub fn eq(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
Ok(Value::Bool(equal(&lhs, &rhs, sink)))
}
/// Compute whether two values are unequal.
pub fn neq(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
Ok(Value::Bool(!equal(&lhs, &rhs)))
pub fn neq(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
Ok(Value::Bool(!equal(&lhs, &rhs, sink)))
}
macro_rules! comparison {
@ -419,7 +452,7 @@ comparison!(gt, ">", Ordering::Greater);
comparison!(geq, ">=", Ordering::Greater | Ordering::Equal);
/// Determine whether two values are equal.
pub fn equal(lhs: &Value, rhs: &Value) -> bool {
pub fn equal(lhs: &Value, rhs: &Value, sink: &mut dyn DeprecationSink) -> bool {
use Value::*;
match (lhs, rhs) {
// Compare reflexively.
@ -463,6 +496,12 @@ pub fn equal(lhs: &Value, rhs: &Value) -> bool {
rat == rel.rel && rel.abs.is_zero()
}
// Type compatibility.
(Type(ty), Str(str)) | (Str(str), Type(ty)) => {
warn_type_str_equal(sink, str);
ty.compat_name() == str.as_str()
}
_ => false,
}
}
@ -534,8 +573,12 @@ fn try_cmp_arrays(a: &[Value], b: &[Value]) -> StrResult<Ordering> {
}
/// Test whether one value is "in" another one.
pub fn in_(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs) {
pub fn in_(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs, sink) {
Ok(Value::Bool(b))
} else {
mismatch!("cannot apply 'in' to {} and {}", lhs, rhs)
@ -543,8 +586,12 @@ pub fn in_(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
}
/// Test whether one value is "not in" another one.
pub fn not_in(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs) {
pub fn not_in(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs, sink) {
Ok(Value::Bool(!b))
} else {
mismatch!("cannot apply 'not in' to {} and {}", lhs, rhs)
@ -552,13 +599,27 @@ pub fn not_in(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
}
/// Test for containment.
pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
pub fn contains(
lhs: &Value,
rhs: &Value,
sink: &mut dyn DeprecationSink,
) -> Option<bool> {
use Value::*;
match (lhs, rhs) {
(Str(a), Str(b)) => Some(b.as_str().contains(a.as_str())),
(Dyn(a), Str(b)) => a.downcast::<Regex>().map(|regex| regex.is_match(b)),
(Str(a), Dict(b)) => Some(b.contains(a)),
(a, Array(b)) => Some(b.contains(a.clone())),
(a, Array(b)) => Some(b.contains_impl(a, sink)),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_in_str(sink);
Some(b.as_str().contains(a.compat_name()))
}
(Type(a), Dict(b)) => {
warn_type_in_dict(sink);
Some(b.contains(a.compat_name()))
}
_ => Option::None,
}
@ -568,3 +629,90 @@ pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
fn too_large() -> &'static str {
"value is too large"
}
#[cold]
fn warn_type_str_add(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"adding strings and types is deprecated",
&["convert the type to a string with `str` first"],
);
}
#[cold]
fn warn_type_str_join(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"joining strings and types is deprecated",
&["convert the type to a string with `str` first"],
);
}
#[cold]
fn warn_type_str_equal(sink: &mut dyn DeprecationSink, s: &str) {
// Only warn if `s` looks like a type name to prevent false positives.
if is_compat_type_name(s) {
sink.emit_with_hints(
"comparing strings with types is deprecated",
&[
"compare with the literal type instead",
"this comparison will always return `false` in future Typst releases",
],
);
}
}
#[cold]
fn warn_type_in_str(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"checking whether a type is contained in a string is deprecated",
&["this compatibility behavior only exists because `type` used to return a string"],
);
}
#[cold]
fn warn_type_in_dict(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"checking whether a type is contained in a dictionary is deprecated",
&["this compatibility behavior only exists because `type` used to return a string"],
);
}
fn is_compat_type_name(s: &str) -> bool {
matches!(
s,
"boolean"
| "alignment"
| "angle"
| "arguments"
| "array"
| "bytes"
| "color"
| "content"
| "counter"
| "datetime"
| "decimal"
| "dictionary"
| "direction"
| "duration"
| "float"
| "fraction"
| "function"
| "gradient"
| "integer"
| "label"
| "length"
| "location"
| "module"
| "pattern"
| "ratio"
| "regex"
| "relative length"
| "selector"
| "state"
| "string"
| "stroke"
| "symbol"
| "tiling"
| "type"
| "version"
)
}

View File

@ -300,7 +300,7 @@ impl Binding {
/// As the `sink`
/// - pass `()` to ignore the message.
/// - pass `(&mut engine, span)` to emit a warning into the engine.
pub fn read_checked(&self, sink: impl DeprecationSink) -> &Value {
pub fn read_checked(&self, mut sink: impl DeprecationSink) -> &Value {
if let Some(message) = self.deprecation {
sink.emit(message);
}

View File

@ -7,13 +7,12 @@ use comemo::Tracked;
use ecow::EcoString;
use serde::{Deserialize, Serialize};
use typst_syntax::{Span, Spanned};
use unicode_normalization::UnicodeNormalization;
use unicode_segmentation::UnicodeSegmentation;
use crate::diag::{bail, At, SourceResult, StrResult};
use crate::engine::Engine;
use crate::foundations::{
cast, dict, func, repr, scope, ty, Array, Bytes, Cast, Context, Decimal, Dict, Func,
cast, dict, func, repr, scope, ty, Array, Bytes, Context, Decimal, Dict, Func,
IntoValue, Label, Repr, Type, Value, Version,
};
use crate::layout::Alignment;
@ -287,30 +286,6 @@ impl Str {
Ok(c.into())
}
/// Normalizes the string to the given Unicode normal form.
///
/// This is useful when manipulating strings containing Unicode combining
/// characters.
///
/// ```typ
/// #assert.eq("é".normalize(form: "nfd"), "e\u{0301}")
/// #assert.eq("ſ́".normalize(form: "nfkc"), "ś")
/// ```
#[func]
pub fn normalize(
&self,
#[named]
#[default(UnicodeNormalForm::Nfc)]
form: UnicodeNormalForm,
) -> Str {
match form {
UnicodeNormalForm::Nfc => self.nfc().collect(),
UnicodeNormalForm::Nfd => self.nfd().collect(),
UnicodeNormalForm::Nfkc => self.nfkc().collect(),
UnicodeNormalForm::Nfkd => self.nfkd().collect(),
}
}
/// Whether the string contains the specified pattern.
///
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
@ -813,25 +788,6 @@ cast! {
v: Str => Self::Str(v),
}
/// A Unicode normalization form.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
pub enum UnicodeNormalForm {
/// Canonical composition where e.g. accented letters are turned into a
/// single Unicode codepoint.
#[string("nfc")]
Nfc,
/// Canonical decomposition where e.g. accented letters are split into a
/// separate base and diacritic.
#[string("nfd")]
Nfd,
/// Like NFC, but using the Unicode compatibility decompositions.
#[string("nfkc")]
Nfkc,
/// Like NFD, but using the Unicode compatibility decompositions.
#[string("nfkd")]
Nfkd,
}
/// Convert an item of std's `match_indices` to a dictionary.
fn match_to_dict((start, text): (usize, &str)) -> Dict {
dict! {

View File

@ -471,8 +471,7 @@ impl Debug for Recipe {
selector.fmt(f)?;
f.write_str(", ")?;
}
self.transform.fmt(f)?;
f.write_str(")")
self.transform.fmt(f)
}
}

View File

@ -21,7 +21,6 @@ use crate::foundations::{
/// be accessed using [field access notation]($scripting/#fields):
///
/// - General symbols are defined in the [`sym` module]($category/symbols/sym)
/// and are accessible without the `sym.` prefix in math mode.
/// - Emoji are defined in the [`emoji` module]($category/symbols/emoji)
///
/// Moreover, you can define custom symbols with this type's constructor
@ -411,7 +410,7 @@ fn find<'a>(
}
let score = (matching, Reverse(total));
if best_score.is_none_or(|b| score > b) {
if best_score.map_or(true, |b| score > b) {
best = Some(candidate.1);
best_score = Some(score);
}

View File

@ -39,25 +39,21 @@ use crate::foundations::{
/// #type(image("glacier.jpg")).
/// ```
///
/// The type of `{10}` is `int`. Now, what is the type of `int` or even `type`?
/// The type of `10` is `int`. Now, what is the type of `int` or even `type`?
/// ```example
/// #type(int) \
/// #type(type)
/// ```
///
/// Unlike other types like `int`, [none] and [auto] do not have a name
/// representing them. To test if a value is one of these, compare your value to
/// them directly, e.g:
/// ```example
/// #let val = none
/// #if val == none [
/// Yep, it's none.
/// ]
/// ```
/// # Compatibility
/// In Typst 0.7 and lower, the `type` function returned a string instead of a
/// type. Compatibility with the old way will remain until Typst 0.14 to give
/// package authors time to upgrade.
///
/// Note that `type` will return [`content`] for all document elements. To
/// programmatically determine which kind of content you are dealing with, see
/// [`content.func`].
/// - Checks like `{int == "integer"}` evaluate to `{true}`
/// - Adding/joining a type and string will yield a string
/// - The `{in}` operator on a type and a dictionary will evaluate to `{true}`
/// if the dictionary has a string key matching the type's name
#[ty(scope, cast)]
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Type(Static<NativeTypeData>);
@ -120,6 +116,14 @@ impl Type {
}
}
// Type compatibility.
impl Type {
/// The type's backward-compatible name.
pub fn compat_name(&self) -> &str {
self.long_name()
}
}
#[scope]
impl Type {
/// Determines a value's type.

View File

@ -292,7 +292,8 @@ impl Repr for Value {
impl PartialEq for Value {
fn eq(&self, other: &Self) -> bool {
ops::equal(self, other)
// No way to emit deprecation warnings here :(
ops::equal(self, other, &mut ())
}
}

View File

@ -229,10 +229,10 @@ impl Counter {
if self.is_page() {
let at_delta =
engine.introspector.page(location).get().saturating_sub(at_page.get());
at_state.step(NonZeroUsize::ONE, at_delta as u64);
at_state.step(NonZeroUsize::ONE, at_delta);
let final_delta =
engine.introspector.pages().get().saturating_sub(final_page.get());
final_state.step(NonZeroUsize::ONE, final_delta as u64);
final_state.step(NonZeroUsize::ONE, final_delta);
}
Ok(CounterState(smallvec![at_state.first(), final_state.first()]))
}
@ -250,7 +250,7 @@ impl Counter {
if self.is_page() {
let delta =
engine.introspector.page(location).get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta as u64);
state.step(NonZeroUsize::ONE, delta);
}
Ok(state)
}
@ -319,7 +319,7 @@ impl Counter {
let delta = page.get() - prev.get();
if delta > 0 {
state.step(NonZeroUsize::ONE, delta as u64);
state.step(NonZeroUsize::ONE, delta);
}
}
@ -500,7 +500,7 @@ impl Counter {
let (mut state, page) = sequence.last().unwrap().clone();
if self.is_page() {
let delta = engine.introspector.pages().get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta as u64);
state.step(NonZeroUsize::ONE, delta);
}
Ok(state)
}
@ -616,13 +616,13 @@ pub trait Count {
/// Counts through elements with different levels.
#[derive(Debug, Clone, PartialEq, Hash)]
pub struct CounterState(pub SmallVec<[u64; 3]>);
pub struct CounterState(pub SmallVec<[usize; 3]>);
impl CounterState {
/// Get the initial counter state for the key.
pub fn init(page: bool) -> Self {
// Special case, because pages always start at one.
Self(smallvec![u64::from(page)])
Self(smallvec![usize::from(page)])
}
/// Advance the counter and return the numbers for the given heading.
@ -645,7 +645,7 @@ impl CounterState {
}
/// Advance the number of the given level by the specified amount.
pub fn step(&mut self, level: NonZeroUsize, by: u64) {
pub fn step(&mut self, level: NonZeroUsize, by: usize) {
let level = level.get();
while self.0.len() < level {
@ -657,7 +657,7 @@ impl CounterState {
}
/// Get the first number of the state.
pub fn first(&self) -> u64 {
pub fn first(&self) -> usize {
self.0.first().copied().unwrap_or(1)
}
@ -675,7 +675,7 @@ impl CounterState {
cast! {
CounterState,
self => Value::Array(self.0.into_iter().map(IntoValue::into_value).collect()),
num: u64 => Self(smallvec![num]),
num: usize => Self(smallvec![num]),
array: Array => Self(array
.into_iter()
.map(Value::cast)
@ -758,7 +758,7 @@ impl Show for Packed<CounterDisplayElem> {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct ManualPageCounter {
physical: NonZeroUsize,
logical: u64,
logical: usize,
}
impl ManualPageCounter {
@ -773,7 +773,7 @@ impl ManualPageCounter {
}
/// Get the current logical page counter state.
pub fn logical(&self) -> u64 {
pub fn logical(&self) -> usize {
self.logical
}

View File

@ -50,42 +50,6 @@ impl Dir {
pub const TTB: Self = Self::TTB;
pub const BTT: Self = Self::BTT;
/// Returns a direction from a starting point.
///
/// ```example
/// direction.from(left) \
/// direction.from(right) \
/// direction.from(top) \
/// direction.from(bottom)
/// ```
#[func]
pub const fn from(side: Side) -> Dir {
match side {
Side::Left => Self::LTR,
Side::Right => Self::RTL,
Side::Top => Self::TTB,
Side::Bottom => Self::BTT,
}
}
/// Returns a direction from an end point.
///
/// ```example
/// direction.to(left) \
/// direction.to(right) \
/// direction.to(top) \
/// direction.to(bottom)
/// ```
#[func]
pub const fn to(side: Side) -> Dir {
match side {
Side::Right => Self::LTR,
Side::Left => Self::RTL,
Side::Bottom => Self::TTB,
Side::Top => Self::BTT,
}
}
/// The axis this direction belongs to, either `{"horizontal"}` or
/// `{"vertical"}`.
///
@ -101,22 +65,6 @@ impl Dir {
}
}
/// The corresponding sign, for use in calculations.
///
/// ```example
/// #ltr.sign() \
/// #rtl.sign() \
/// #ttb.sign() \
/// #btt.sign()
/// ```
#[func]
pub const fn sign(self) -> i64 {
match self {
Self::LTR | Self::TTB => 1,
Self::RTL | Self::BTT => -1,
}
}
/// The start point of this direction, as an alignment.
///
/// ```example

File diff suppressed because it is too large Load Diff

View File

@ -22,8 +22,7 @@ use crate::layout::{BlockElem, Size};
/// #let text = lorem(30)
/// #layout(size => [
/// #let (height,) = measure(
/// width: size.width,
/// text,
/// block(width: size.width, text),
/// )
/// This text is #height high with
/// the current page width: \

View File

@ -75,10 +75,9 @@ pub struct PageElem {
/// The height of the page.
///
/// If this is set to `{auto}`, page breaks can only be triggered manually
/// by inserting a [page break]($pagebreak) or by adding another non-empty
/// page set rule. Most examples throughout this documentation use `{auto}`
/// for the height of the page to dynamically grow and shrink to fit their
/// content.
/// by inserting a [page break]($pagebreak). Most examples throughout this
/// documentation use `{auto}` for the height of the page to dynamically
/// grow and shrink to fit their content.
#[resolve]
#[parse(
args.named("height")?
@ -484,7 +483,7 @@ pub struct Page {
pub supplement: Content,
/// The logical page number (controlled by `counter(page)` and may thus not
/// match the physical number).
pub number: u64,
pub number: usize,
}
impl Page {

View File

@ -8,35 +8,15 @@ use crate::foundations::{repr, ty, Repr};
/// A ratio of a whole.
///
/// A ratio is written as a number, followed by a percent sign. Ratios most
/// often appear as part of a [relative length]($relative), to specify the size
/// of some layout element relative to the page or some container.
/// Written as a number, followed by a percent sign.
///
/// # Example
/// ```example
/// #rect(width: 25%)
/// #set align(center)
/// #scale(x: 150%)[
/// Scaled apart.
/// ]
/// ```
///
/// However, they can also describe any other property that is relative to some
/// base, e.g. an amount of [horizontal scaling]($scale.x) or the
/// [height of parentheses]($math.lr.size) relative to the height of the content
/// they enclose.
///
/// # Scripting
/// Within your own code, you can use ratios as you like. You can multiply them
/// with various other types as shown below:
///
/// | Multiply by | Example | Result |
/// |-----------------|-------------------------|-----------------|
/// | [`ratio`] | `{27% * 10%}` | `{2.7%}` |
/// | [`length`] | `{27% * 100pt}` | `{27pt}` |
/// | [`relative`] | `{27% * (10% + 100pt)}` | `{2.7% + 27pt}` |
/// | [`angle`] | `{27% * 100deg}` | `{27deg}` |
/// | [`int`] | `{27% * 2}` | `{54%}` |
/// | [`float`] | `{27% * 0.37037}` | `{10%}` |
/// | [`fraction`] | `{27% * 3fr}` | `{0.81fr}` |
///
/// When ratios are displayed in the document, they are rounded to two
/// significant digits for readability.
#[ty(cast)]
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Ratio(Scalar);

View File

@ -14,58 +14,17 @@ use crate::layout::{Abs, Em, Length, Ratio};
/// addition and subtraction of a length and a ratio. Wherever a relative length
/// is expected, you can also use a bare length or ratio.
///
/// # Relative to the page
/// A common use case is setting the width or height of a layout element (e.g.,
/// [block], [rect], etc.) as a certain percentage of the width of the page.
/// Here, the rectangle's width is set to `{25%}`, so it takes up one fourth of
/// the page's _inner_ width (the width minus margins).
///
/// # Example
/// ```example
/// #rect(width: 25%)
/// #rect(width: 100% - 50pt)
///
/// #(100% - 50pt).length \
/// #(100% - 50pt).ratio
/// ```
///
/// Bare lengths or ratios are always valid where relative lengths are expected,
/// but the two can also be freely mixed:
/// ```example
/// #rect(width: 25% + 1cm)
/// ```
///
/// If you're trying to size an element so that it takes up the page's _full_
/// width, you have a few options (this highly depends on your exact use case):
///
/// 1. Set page margins to `{0pt}` (`[#set page(margin: 0pt)]`)
/// 2. Multiply the ratio by the known full page width (`{21cm * 69%}`)
/// 3. Use padding which will negate the margins (`[#pad(x: -2.5cm, ...)]`)
/// 4. Use the page [background](page.background) or
/// [foreground](page.foreground) field as those don't take margins into
/// account (note that it will render the content outside of the document
/// flow, see [place] to control the content position)
///
/// # Relative to a container
/// When a layout element (e.g. a [rect]) is nested in another layout container
/// (e.g. a [block]) instead of being a direct descendant of the page, relative
/// widths become relative to the container:
///
/// ```example
/// #block(
/// width: 100pt,
/// fill: aqua,
/// rect(width: 50%),
/// )
/// ```
///
/// # Scripting
/// You can multiply relative lengths by [ratios]($ratio), [integers]($int), and
/// [floats]($float).
///
/// A relative length has the following fields:
/// - `length`: Its length component.
/// - `ratio`: Its ratio component.
///
/// ```example
/// #(100% - 50pt).length \
/// #(100% - 50pt).ratio
/// ```
#[ty(cast, name = "relative", title = "Relative Length")]
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Rel<T: Numeric = Length> {

View File

@ -307,20 +307,6 @@ impl Transform {
Self { sx, sy, ..Self::identity() }
}
/// A scale transform at a specific position.
pub fn scale_at(sx: Ratio, sy: Ratio, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::scale(sx, sy))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform at a specific position.
pub fn rotate_at(angle: Angle, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::rotate(angle))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform.
pub fn rotate(angle: Angle) -> Self {
let cos = Ratio::new(angle.cos());

View File

@ -13,8 +13,8 @@ use crate::math::Mathy;
/// ```
#[elem(Mathy)]
pub struct AccentElem {
/// The base to which the accent is applied. May consist of multiple
/// letters.
/// The base to which the accent is applied.
/// May consist of multiple letters.
///
/// ```example
/// $arrow(A B C)$
@ -51,24 +51,9 @@ pub struct AccentElem {
pub accent: Accent,
/// The size of the accent, relative to the width of the base.
///
/// ```example
/// $dash(A, size: #150%)$
/// ```
#[resolve]
#[default(Rel::one())]
pub size: Rel<Length>,
/// Whether to remove the dot on top of lowercase i and j when adding a top
/// accent.
///
/// This enables the `dtls` OpenType feature.
///
/// ```example
/// $hat(dotless: #false, i)$
/// ```
#[default(true)]
pub dotless: bool,
}
/// An accent character.
@ -118,18 +103,11 @@ macro_rules! accents {
/// The size of the accent, relative to the width of the base.
#[named]
size: Option<Rel<Length>>,
/// Whether to remove the dot on top of lowercase i and j when
/// adding a top accent.
#[named]
dotless: Option<bool>,
) -> Content {
let mut accent = AccentElem::new(base, Accent::new($primary));
if let Some(size) = size {
accent = accent.with_size(size);
}
if let Some(dotless) = dotless {
accent = accent.with_dotless(dotless);
}
accent.pack()
}
)+

View File

@ -15,7 +15,7 @@ use crate::math::Mathy;
/// # Syntax
/// This function also has dedicated syntax: Use a slash to turn neighbouring
/// expressions into a fraction. Multiple atoms can be grouped into a single
/// expression using round grouping parentheses. Such parentheses are removed
/// expression using round grouping parenthesis. Such parentheses are removed
/// from the output, but you can nest multiple to force them.
#[elem(title = "Fraction", Mathy)]
pub struct FracElem {

View File

@ -6,7 +6,7 @@ use std::num::NonZeroUsize;
use std::path::Path;
use std::sync::{Arc, LazyLock};
use comemo::{Track, Tracked};
use comemo::Tracked;
use ecow::{eco_format, EcoString, EcoVec};
use hayagriva::archive::ArchivedStyle;
use hayagriva::io::BibLaTeXError;
@ -20,7 +20,7 @@ use typst_syntax::{Span, Spanned};
use typst_utils::{Get, ManuallyHash, NonZeroExt, PicoStr};
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult};
use crate::engine::{Engine, Sink};
use crate::engine::Engine;
use crate::foundations::{
elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement,
OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles,
@ -94,7 +94,7 @@ pub struct BibliographyElem {
/// - A path string to load a bibliography file from the given path. For
/// more details about paths, see the [Paths section]($syntax/#paths).
/// - Raw bytes from which the bibliography should be decoded.
/// - An array where each item is one of the above.
/// - An array where each item is one the above.
#[required]
#[parse(
let sources = args.expect("sources")?;
@ -999,8 +999,6 @@ impl ElemRenderer<'_> {
(self.routines.eval_string)(
self.routines,
self.world,
// TODO: propagate warnings
Sink::new().track_mut(),
math,
self.span,
EvalMode::Math,

View File

@ -129,7 +129,7 @@ pub struct EnumElem {
/// [Ahead],
/// )
/// ```
pub start: Smart<u64>,
pub start: Smart<usize>,
/// Whether to display the full numbering, including the numbers of
/// all parent enumerations.
@ -217,7 +217,7 @@ pub struct EnumElem {
#[internal]
#[fold]
#[ghost]
pub parents: SmallVec<[u64; 4]>,
pub parents: SmallVec<[usize; 4]>,
}
#[scope]
@ -259,11 +259,10 @@ impl Show for Packed<EnumElem> {
.spanned(self.span());
if tight {
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
let leading = ParElem::leading_in(styles);
let spacing =
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
realized = spacing + realized;
}
Ok(realized)
@ -275,7 +274,7 @@ impl Show for Packed<EnumElem> {
pub struct EnumItem {
/// The item's number.
#[positional]
pub number: Option<u64>,
pub number: Option<usize>,
/// The item's body.
#[required]

View File

@ -457,7 +457,7 @@ impl Outlinable for Packed<FigureElem> {
/// customize the appearance of captions for all figures or figures of a
/// specific kind.
///
/// In addition to its `position` and `body`, the `caption` also provides the
/// In addition to its `pos` and `body`, the `caption` also provides the
/// figure's `kind`, `supplement`, `counter`, and `numbering` as fields. These
/// parts can be used in [`where`]($function.where) selectors and show rules to
/// build a completely custom caption.

View File

@ -166,11 +166,10 @@ impl Show for Packed<ListElem> {
.spanned(self.span());
if tight {
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
let leading = ParElem::leading_in(styles);
let spacing =
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
realized = spacing + realized;
}
Ok(realized)

View File

@ -1,7 +1,7 @@
use std::str::FromStr;
use chinese_number::{
from_u64_to_chinese_ten_thousand as u64_to_chinese, ChineseCase, ChineseVariant,
from_usize_to_chinese_ten_thousand as usize_to_chinese, ChineseCase, ChineseVariant,
};
use comemo::Tracked;
use ecow::{eco_format, EcoString, EcoVec};
@ -85,7 +85,7 @@ pub fn numbering(
/// If `numbering` is a pattern and more numbers than counting symbols are
/// given, the last counting symbol with its prefix is repeated.
#[variadic]
numbers: Vec<u64>,
numbers: Vec<usize>,
) -> SourceResult<Value> {
numbering.apply(engine, context, &numbers)
}
@ -105,7 +105,7 @@ impl Numbering {
&self,
engine: &mut Engine,
context: Tracked<Context>,
numbers: &[u64],
numbers: &[usize],
) -> SourceResult<Value> {
Ok(match self {
Self::Pattern(pattern) => Value::Str(pattern.apply(numbers).into()),
@ -156,7 +156,7 @@ pub struct NumberingPattern {
impl NumberingPattern {
/// Apply the pattern to the given number.
pub fn apply(&self, numbers: &[u64]) -> EcoString {
pub fn apply(&self, numbers: &[usize]) -> EcoString {
let mut fmt = EcoString::new();
let mut numbers = numbers.iter();
@ -185,7 +185,7 @@ impl NumberingPattern {
}
/// Apply only the k-th segment of the pattern to a number.
pub fn apply_kth(&self, k: usize, number: u64) -> EcoString {
pub fn apply_kth(&self, k: usize, number: usize) -> EcoString {
let mut fmt = EcoString::new();
if let Some((prefix, _)) = self.pieces.first() {
fmt.push_str(prefix);
@ -379,7 +379,7 @@ impl NumberingKind {
}
/// Apply the numbering to the given number.
pub fn apply(self, n: u64) -> EcoString {
pub fn apply(self, n: usize) -> EcoString {
match self {
Self::Arabic => eco_format!("{n}"),
Self::LowerRoman => roman_numeral(n, Case::Lower),
@ -392,10 +392,9 @@ impl NumberingKind {
}
const SYMBOLS: &[char] = &['*', '†', '‡', '§', '¶', '‖'];
let n_symbols = SYMBOLS.len() as u64;
let symbol = SYMBOLS[((n - 1) % n_symbols) as usize];
let amount = ((n - 1) / n_symbols) + 1;
std::iter::repeat_n(symbol, amount.try_into().unwrap()).collect()
let symbol = SYMBOLS[(n - 1) % SYMBOLS.len()];
let amount = ((n - 1) / SYMBOLS.len()) + 1;
std::iter::repeat(symbol).take(amount).collect()
}
Self::Hebrew => hebrew_numeral(n),
@ -490,16 +489,18 @@ impl NumberingKind {
}
Self::LowerSimplifiedChinese => {
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
}
Self::UpperSimplifiedChinese => {
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
}
Self::LowerTraditionalChinese => {
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n).into()
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n)
.into()
}
Self::UpperTraditionalChinese => {
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n).into()
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n)
.into()
}
Self::EasternArabic => decimal('\u{0660}', n),
@ -511,7 +512,7 @@ impl NumberingKind {
}
/// Stringify an integer to a Hebrew number.
fn hebrew_numeral(mut n: u64) -> EcoString {
fn hebrew_numeral(mut n: usize) -> EcoString {
if n == 0 {
return '-'.into();
}
@ -565,7 +566,7 @@ fn hebrew_numeral(mut n: u64) -> EcoString {
}
/// Stringify an integer to a Roman numeral.
fn roman_numeral(mut n: u64, case: Case) -> EcoString {
fn roman_numeral(mut n: usize, case: Case) -> EcoString {
if n == 0 {
return match case {
Case::Lower => 'n'.into(),
@ -621,7 +622,7 @@ fn roman_numeral(mut n: u64, case: Case) -> EcoString {
///
/// [converter]: https://www.russellcottrell.com/greek/utilities/GreekNumberConverter.htm
/// [numbers]: https://mathshistory.st-andrews.ac.uk/HistTopics/Greek_numbers/
fn greek_numeral(n: u64, case: Case) -> EcoString {
fn greek_numeral(n: usize, case: Case) -> EcoString {
let thousands = [
["͵α", "͵Α"],
["͵β", "͵Β"],
@ -682,7 +683,7 @@ fn greek_numeral(n: u64, case: Case) -> EcoString {
let mut decimal_digits: Vec<usize> = Vec::new();
let mut n = n;
while n > 0 {
decimal_digits.push((n % 10) as usize);
decimal_digits.push(n % 10);
n /= 10;
}
@ -777,16 +778,18 @@ fn greek_numeral(n: u64, case: Case) -> EcoString {
///
/// You might be familiar with this scheme from the way spreadsheet software
/// tends to label its columns.
fn zeroless<const N_DIGITS: usize>(alphabet: [char; N_DIGITS], mut n: u64) -> EcoString {
fn zeroless<const N_DIGITS: usize>(
alphabet: [char; N_DIGITS],
mut n: usize,
) -> EcoString {
if n == 0 {
return '-'.into();
}
let n_digits = N_DIGITS as u64;
let mut cs = EcoString::new();
while n > 0 {
n -= 1;
cs.push(alphabet[(n % n_digits) as usize]);
n /= n_digits;
cs.push(alphabet[n % N_DIGITS]);
n /= N_DIGITS;
}
cs.chars().rev().collect()
}
@ -794,7 +797,7 @@ fn zeroless<const N_DIGITS: usize>(alphabet: [char; N_DIGITS], mut n: u64) -> Ec
/// Stringify a number using a base-10 counting system with a zero digit.
///
/// This function assumes that the digits occupy contiguous codepoints.
fn decimal(start: char, mut n: u64) -> EcoString {
fn decimal(start: char, mut n: usize) -> EcoString {
if n == 0 {
return start.into();
}

View File

@ -388,7 +388,7 @@ pub struct OutlineEntry {
/// space between the entry's body and the page number. When using show
/// rules to override outline entries, it is thus recommended to wrap the
/// fill in a [`box`] with fractional width, i.e.
/// `{box(width: 1fr, it.fill)}`.
/// `{box(width: 1fr, it.fill}`.
///
/// When using [`repeat`], the [`gap`]($repeat.gap) property can be useful
/// to tweak the visual weight of the fill.

View File

@ -161,7 +161,7 @@ impl Show for Packed<QuoteElem> {
let block = self.block(styles);
let html = TargetElem::target_in(styles).is_html();
if self.quotes(styles).unwrap_or(!block) {
if self.quotes(styles) == Smart::Custom(true) || !block {
let quotes = SmartQuotes::get(
SmartQuoteElem::quotes_in(styles),
TextElem::lang_in(styles),

View File

@ -189,15 +189,13 @@ impl Show for Packed<TermsElem> {
.styled(TermsElem::set_within(true));
if tight {
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into())
let leading = ParElem::leading_in(styles);
let spacing = VElem::new(leading.into())
.with_weak(true)
.with_attach(true)
.pack()
.spanned(span);
realized = v + realized;
realized = spacing + realized;
}
Ok(realized)

View File

@ -55,7 +55,6 @@ routines! {
fn eval_string(
routines: &Routines,
world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str,
span: Span,
mode: EvalMode,

View File

@ -160,7 +160,7 @@ impl FontBook {
current.variant.weight.distance(variant.weight),
);
if best_key.is_none_or(|b| key < b) {
if best_key.map_or(true, |b| key < b) {
best = Some(id);
best_key = Some(key);
}

View File

@ -14,7 +14,7 @@ macro_rules! translation {
};
}
const TRANSLATIONS: [(&str, &str); 39] = [
const TRANSLATIONS: [(&str, &str); 38] = [
translation!("ar"),
translation!("bg"),
translation!("ca"),
@ -31,7 +31,6 @@ const TRANSLATIONS: [(&str, &str); 39] = [
translation!("el"),
translation!("he"),
translation!("hu"),
translation!("id"),
translation!("is"),
translation!("it"),
translation!("ja"),
@ -83,7 +82,6 @@ impl Lang {
pub const HEBREW: Self = Self(*b"he ", 2);
pub const HUNGARIAN: Self = Self(*b"hu ", 2);
pub const ICELANDIC: Self = Self(*b"is ", 2);
pub const INDONESIAN: Self = Self(*b"id ", 2);
pub const ITALIAN: Self = Self(*b"it ", 2);
pub const JAPANESE: Self = Self(*b"ja ", 2);
pub const LATIN: Self = Self(*b"la ", 2);

View File

@ -42,7 +42,7 @@ use ttf_parser::Tag;
use typst_syntax::Spanned;
use typst_utils::singleton;
use crate::diag::{bail, warning, HintedStrResult, SourceResult, StrResult};
use crate::diag::{bail, warning, HintedStrResult, SourceResult};
use crate::engine::Engine;
use crate::foundations::{
cast, dict, elem, Args, Array, Cast, Construct, Content, Dict, Fold, IntoValue,
@ -891,21 +891,9 @@ cast! {
}
/// Font family fallback list.
///
/// Must contain at least one font.
#[derive(Debug, Default, Clone, PartialEq, Hash)]
pub struct FontList(pub Vec<FontFamily>);
impl FontList {
pub fn new(fonts: Vec<FontFamily>) -> StrResult<Self> {
if fonts.is_empty() {
bail!("font fallback list must not be empty")
} else {
Ok(Self(fonts))
}
}
}
impl<'a> IntoIterator for &'a FontList {
type IntoIter = std::slice::Iter<'a, FontFamily>;
type Item = &'a FontFamily;
@ -923,7 +911,7 @@ cast! {
self.0.into_value()
},
family: FontFamily => Self(vec![family]),
values: Array => Self::new(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?)?,
values: Array => Self(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?),
}
/// Resolve a prioritized iterator over the font families.

View File

@ -188,7 +188,7 @@ pub struct RawElem {
/// - A path string to load a syntax file from the given path. For more
/// details about paths, see the [Paths section]($syntax/#paths).
/// - Raw bytes from which the syntax should be decoded.
/// - An array where each item is one of the above.
/// - An array where each item is one the above.
///
/// ````example
/// #set raw(syntaxes: "SExpressions.sublime-syntax")

View File

@ -159,7 +159,7 @@ fn is_shapable(engine: &Engine, text: &str, styles: StyleChain) -> bool {
{
let covers = family.covers();
return text.chars().all(|c| {
covers.is_none_or(|cov| cov.is_match(c.encode_utf8(&mut [0; 4])))
covers.map_or(true, |cov| cov.is_match(c.encode_utf8(&mut [0; 4])))
&& font.ttf().glyph_index(c).is_some()
});
}

View File

@ -238,7 +238,7 @@ impl<'s> SmartQuotes<'s> {
"cs" | "de" | "et" | "is" | "lt" | "lv" | "sk" | "sl" => low_high,
"da" => ("", "", "", ""),
"fr" | "ru" if alternative => default,
"fr" => ("", "", "«\u{202F}", "\u{202F}»"),
"fr" => ("\u{00A0}", "\u{00A0}", "«\u{00A0}", "\u{00A0}»"),
"fi" | "sv" if alternative => ("", "", "»", "»"),
"bs" | "fi" | "sv" => ("", "", "", ""),
"it" if alternative => default,

View File

@ -130,7 +130,7 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
///
/// # Predefined color maps
/// Typst also includes a number of preset color maps that can be used for
/// [gradients]($gradient/#stops). These are simply arrays of colors defined in
/// [gradients]($gradient.linear). These are simply arrays of colors defined in
/// the module `color.map`.
///
/// ```example
@ -148,11 +148,11 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
/// | `magma` | A black to purple to yellow color map. |
/// | `plasma` | A purple to pink to yellow color map. |
/// | `rocket` | A black to red to white color map. |
/// | `mako` | A black to teal to white color map. |
/// | `mako` | A black to teal to yellow color map. |
/// | `vlag` | A light blue to white to red color map. |
/// | `icefire` | A light teal to black to orange color map. |
/// | `icefire` | A light teal to black to yellow color map. |
/// | `flare` | A orange to purple color map that is perceptually uniform. |
/// | `crest` | A light green to blue color map. |
/// | `crest` | A blue to white to red color map. |
///
/// Some popular presets are not included because they are not available under a
/// free licence. Others, like

View File

@ -10,8 +10,6 @@ use crate::foundations::{
use crate::layout::{Abs, Axes, BlockElem, Length, Point, Rel, Size};
use crate::visualize::{FillRule, Paint, Stroke};
use super::FixedStroke;
/// A curve consisting of movements, lines, and Bézier segments.
///
/// At any point in time, there is a conceptual pen or cursor.
@ -532,65 +530,3 @@ impl Curve {
Size::new(max_x - min_x, max_y - min_y)
}
}
impl Curve {
fn to_kurbo(&self) -> impl Iterator<Item = kurbo::PathEl> + '_ {
use kurbo::PathEl;
self.0.iter().map(|item| match *item {
CurveItem::Move(point) => PathEl::MoveTo(point_to_kurbo(point)),
CurveItem::Line(point) => PathEl::LineTo(point_to_kurbo(point)),
CurveItem::Cubic(point, point1, point2) => PathEl::CurveTo(
point_to_kurbo(point),
point_to_kurbo(point1),
point_to_kurbo(point2),
),
CurveItem::Close => PathEl::ClosePath,
})
}
/// When this curve is interpreted as a clip mask, would it contain `point`?
pub fn contains(&self, fill_rule: FillRule, needle: Point) -> bool {
let kurbo = kurbo::BezPath::from_vec(self.to_kurbo().collect());
let windings = kurbo::Shape::winding(&kurbo, point_to_kurbo(needle));
match fill_rule {
FillRule::NonZero => windings != 0,
FillRule::EvenOdd => windings % 2 != 0,
}
}
/// When this curve is stroked with `stroke`, would the stroke contain
/// `point`?
pub fn stroke_contains(&self, stroke: &FixedStroke, needle: Point) -> bool {
let width = stroke.thickness.to_raw();
let cap = match stroke.cap {
super::LineCap::Butt => kurbo::Cap::Butt,
super::LineCap::Round => kurbo::Cap::Round,
super::LineCap::Square => kurbo::Cap::Square,
};
let join = match stroke.join {
super::LineJoin::Miter => kurbo::Join::Miter,
super::LineJoin::Round => kurbo::Join::Round,
super::LineJoin::Bevel => kurbo::Join::Bevel,
};
let miter_limit = stroke.miter_limit.get();
let mut style = kurbo::Stroke::new(width)
.with_caps(cap)
.with_join(join)
.with_miter_limit(miter_limit);
if let Some(dash) = &stroke.dash {
style = style.with_dashes(
dash.phase.to_raw(),
dash.array.iter().copied().map(Abs::to_raw),
);
}
let opts = kurbo::StrokeOpts::default();
let tolerance = 0.01;
let expanded = kurbo::stroke(self.to_kurbo(), &style, &opts, tolerance);
kurbo::Shape::contains(&expanded, point_to_kurbo(needle))
}
}
fn point_to_kurbo(point: Point) -> kurbo::Point {
kurbo::Point::new(point.x.to_raw(), point.y.to_raw())
}

View File

@ -70,9 +70,6 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// the offsets when defining a gradient. In this case, Typst will space all
/// stops evenly.
///
/// Typst predefines color maps that you can use as stops. See the
/// [`color`]($color/#predefined-color-maps) documentation for more details.
///
/// # Relativeness
/// The location of the `{0%}` and `{100%}` stops depends on the dimensions
/// of a container. This container can either be the shape that it is being
@ -120,12 +117,12 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// #let spaces = (
/// ("Oklab", color.oklab),
/// ("Oklch", color.oklch),
/// ("sRGB", color.rgb),
/// ("linear-RGB", color.linear-rgb),
/// ("sRGB", color.rgb),
/// ("CMYK", color.cmyk),
/// ("Grayscale", color.luma),
/// ("HSL", color.hsl),
/// ("HSV", color.hsv),
/// ("Grayscale", color.luma),
/// )
///
/// #for (name, space) in spaces {
@ -160,6 +157,10 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// )
/// ```
///
/// # Presets
/// Typst predefines color maps that you can use with your gradients. See the
/// [`color`]($color/#predefined-color-maps) documentation for more details.
///
/// # Note on file sizes
///
/// Gradients can be quite large, especially if they have many stops. This is
@ -287,7 +288,7 @@ impl Gradient {
/// )),
/// )
/// ```
#[func(title = "Radial Gradient")]
#[func]
fn radial(
span: Span,
/// The color [stops](#stops) of the gradient.
@ -401,7 +402,7 @@ impl Gradient {
/// )),
/// )
/// ```
#[func(title = "Conic Gradient")]
#[func]
pub fn conic(
span: Span,
/// The color [stops](#stops) of the gradient.
@ -574,7 +575,8 @@ impl Gradient {
}
let n = repetitions.v;
let mut stops = std::iter::repeat_n(self.stops_ref(), n)
let mut stops = std::iter::repeat(self.stops_ref())
.take(n)
.enumerate()
.flat_map(|(i, stops)| {
let mut stops = stops

View File

@ -3,8 +3,6 @@ use std::hash::{Hash, Hasher};
use std::io;
use std::sync::Arc;
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
use ecow::{eco_format, EcoString};
use image::codecs::gif::GifDecoder;
use image::codecs::jpeg::JpegDecoder;
@ -13,6 +11,9 @@ use image::{
guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel,
};
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
/// A decoded raster image.
#[derive(Clone, Hash)]
pub struct RasterImage(Arc<Repr>);
@ -21,8 +22,7 @@ pub struct RasterImage(Arc<Repr>);
struct Repr {
data: Bytes,
format: RasterFormat,
dynamic: Arc<DynamicImage>,
exif_rotation: Option<u32>,
dynamic: image::DynamicImage,
icc: Option<Bytes>,
dpi: Option<f64>,
}
@ -50,8 +50,6 @@ impl RasterImage {
format: RasterFormat,
icc: Smart<Bytes>,
) -> StrResult<RasterImage> {
let mut exif_rot = None;
let (dynamic, icc, dpi) = match format {
RasterFormat::Exchange(format) => {
fn decode<T: ImageDecoder>(
@ -87,7 +85,6 @@ impl RasterImage {
// Apply rotation from EXIF metadata.
if let Some(rotation) = exif.as_ref().and_then(exif_rotation) {
apply_rotation(&mut dynamic, rotation);
exif_rot = Some(rotation);
}
// Extract pixel density.
@ -139,14 +136,7 @@ impl RasterImage {
}
};
Ok(Self(Arc::new(Repr {
data,
format,
exif_rotation: exif_rot,
dynamic: Arc::new(dynamic),
icc,
dpi,
})))
Ok(Self(Arc::new(Repr { data, format, dynamic, icc, dpi })))
}
/// The raw image data.
@ -169,11 +159,6 @@ impl RasterImage {
self.dynamic().height()
}
/// TODO.
pub fn exif_rotation(&self) -> Option<u32> {
self.0.exif_rotation
}
/// The image's pixel density in pixels per inch, if known.
///
/// This is guaranteed to be positive.
@ -182,7 +167,7 @@ impl RasterImage {
}
/// Access the underlying dynamic image.
pub fn dynamic(&self) -> &Arc<DynamicImage> {
pub fn dynamic(&self) -> &image::DynamicImage {
&self.0.dynamic
}
@ -340,12 +325,12 @@ fn apply_rotation(image: &mut DynamicImage, rotation: u32) {
ops::flip_horizontal_in_place(image);
*image = image.rotate270();
}
6 => *image = image.rotate270(),
6 => *image = image.rotate90(),
7 => {
ops::flip_horizontal_in_place(image);
*image = image.rotate90();
}
8 => *image = image.rotate90(),
8 => *image = image.rotate270(),
_ => {}
}
}

View File

@ -106,7 +106,7 @@ pub struct RectElem {
pub radius: Corners<Option<Rel<Length>>>,
/// How much to pad the rectangle's content.
/// See the [box's documentation]($box.inset) for more details.
/// See the [box's documentation]($box.outset) for more details.
#[resolve]
#[fold]
#[default(Sides::splat(Some(Abs::pt(5.0).into())))]

View File

@ -4,5 +4,5 @@ equation = Rovnice
bibliography = Bibliografie
heading = Kapitola
outline = Obsah
raw = Výpis
raw = Seznam
page = strana

View File

@ -1,8 +0,0 @@
figure = Gambar
table = Tabel
equation = Persamaan
bibliography = Daftar Pustaka
heading = Bagian
outline = Daftar Isi
raw = Kode
page = halaman

View File

@ -19,14 +19,20 @@ typst-macros = { workspace = true }
typst-syntax = { workspace = true }
typst-timing = { workspace = true }
typst-utils = { workspace = true }
arrayvec = { workspace = true }
base64 = { workspace = true }
bytemuck = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
image = { workspace = true }
infer = { workspace = true }
krilla = { workspace = true }
krilla-svg = { workspace = true }
indexmap = { workspace = true }
miniz_oxide = { workspace = true }
pdf-writer = { workspace = true }
serde = { workspace = true }
subsetter = { workspace = true }
svg2pdf = { workspace = true }
ttf-parser = { workspace = true }
xmp-writer = { workspace = true }
[lints]
workspace = true

View File

@ -0,0 +1,385 @@
use std::num::NonZeroUsize;
use ecow::eco_format;
use pdf_writer::types::Direction;
use pdf_writer::writers::PageLabel;
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{Datetime, Smart};
use typst_library::layout::Dir;
use typst_library::text::Lang;
use typst_syntax::Span;
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
use crate::page::PdfPageLabel;
use crate::{hash_base64, outline, TextStrExt, Timestamp, Timezone, WithEverything};
/// Write the document catalog.
pub fn write_catalog(
ctx: WithEverything,
pdf: &mut Pdf,
alloc: &mut Ref,
) -> SourceResult<()> {
let lang = ctx
.resources
.languages
.iter()
.max_by_key(|(_, &count)| count)
.map(|(&l, _)| l);
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
Direction::R2L
} else {
Direction::L2R
};
// Write the outline tree.
let outline_root_id = outline::write_outline(pdf, alloc, &ctx);
// Write the page labels.
let page_labels = write_page_labels(pdf, alloc, &ctx);
// Write the document information.
let info_ref = alloc.bump();
let mut info = pdf.document_info(info_ref);
let mut xmp = XmpWriter::new();
if let Some(title) = &ctx.document.info.title {
info.title(TextStr::trimmed(title));
xmp.title([(None, title.as_str())]);
}
if let Some(description) = &ctx.document.info.description {
info.subject(TextStr::trimmed(description));
xmp.description([(None, description.as_str())]);
}
let authors = &ctx.document.info.author;
if !authors.is_empty() {
// Turns out that if the authors are given in both the document
// information dictionary and the XMP metadata, Acrobat takes a little
// bit of both: The first author from the document information
// dictionary and the remaining authors from the XMP metadata.
//
// To fix this for Acrobat, we could omit the remaining authors or all
// metadata from the document information catalog (it is optional) and
// only write XMP. However, not all other tools (including Apple
// Preview) read the XMP data. This means we do want to include all
// authors in the document information dictionary.
//
// Thus, the only alternative is to fold all authors into a single
// `<rdf:li>` in the XMP metadata. This is, in fact, exactly what the
// PDF/A spec Part 1 section 6.7.3 has to say about the matter. It's a
// bit weird to not use the array (and it makes Acrobat show the author
// list in quotes), but there's not much we can do about that.
let joined = authors.join(", ");
info.author(TextStr::trimmed(&joined));
xmp.creator([joined.as_str()]);
}
let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
info.creator(TextStr(&creator));
xmp.creator_tool(&creator);
let keywords = &ctx.document.info.keywords;
if !keywords.is_empty() {
let joined = keywords.join(", ");
info.keywords(TextStr::trimmed(&joined));
xmp.pdf_keywords(&joined);
}
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
info.creation_date(pdf_date);
info.modified_date(pdf_date);
}
info.finish();
// A unique ID for this instance of the document. Changes if anything
// changes in the frames.
let instance_id = hash_base64(&pdf.as_bytes());
// Determine the document's ID. It should be as stable as possible.
const PDF_VERSION: &str = "PDF-1.7";
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
// We were provided with a stable ID. Yay!
hash_base64(&(PDF_VERSION, ident))
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
// If not provided from the outside, but title and author were given, we
// compute a hash of them, which should be reasonably stable and unique.
hash_base64(&(PDF_VERSION, &ctx.document.info.title, &ctx.document.info.author))
} else {
// The user provided no usable metadata which we can use as an `/ID`.
instance_id.clone()
};
xmp.document_id(&doc_id);
xmp.instance_id(&instance_id);
xmp.format("application/pdf");
xmp.pdf_version("1.7");
xmp.language(ctx.resources.languages.keys().map(|lang| LangId(lang.as_str())));
xmp.num_pages(ctx.document.pages.len() as u32);
xmp.rendition_class(RenditionClass::Proof);
if let Some(xmp_date) = date.and_then(|date| xmp_date(date, tz)) {
xmp.create_date(xmp_date);
xmp.modify_date(xmp_date);
if ctx.options.standards.pdfa {
let mut history = xmp.history();
history
.add_event()
.action(xmp_writer::ResourceEventAction::Saved)
.when(xmp_date)
.instance_id(&eco_format!("{instance_id}_source"));
history
.add_event()
.action(xmp_writer::ResourceEventAction::Converted)
.when(xmp_date)
.instance_id(&instance_id)
.software_agent(&creator);
}
}
// Assert dominance.
if let Some((part, conformance)) = ctx.options.standards.pdfa_part {
let mut extension_schemas = xmp.extension_schemas();
extension_schemas
.xmp_media_management()
.properties()
.describe_instance_id();
extension_schemas.pdf().properties().describe_all();
extension_schemas.finish();
xmp.pdfa_part(part);
xmp.pdfa_conformance(conformance);
}
let xmp_buf = xmp.finish(None);
let meta_ref = alloc.bump();
pdf.stream(meta_ref, xmp_buf.as_bytes())
.pair(Name(b"Type"), Name(b"Metadata"))
.pair(Name(b"Subtype"), Name(b"XML"));
// Set IDs only now, so that we don't need to clone them.
pdf.set_file_id((doc_id.into_bytes(), instance_id.into_bytes()));
// Write the document catalog.
let catalog_ref = alloc.bump();
let mut catalog = pdf.catalog(catalog_ref);
catalog.pages(ctx.page_tree_ref);
catalog.viewer_preferences().direction(dir);
catalog.metadata(meta_ref);
let has_dests = !ctx.references.named_destinations.dests.is_empty();
let has_embeddings = !ctx.references.embedded_files.is_empty();
// Write the `/Names` dictionary.
if has_dests || has_embeddings {
// Write the named destination tree if there are any entries.
let mut name_dict = catalog.names();
if has_dests {
let mut dests_name_tree = name_dict.destinations();
let mut names = dests_name_tree.names();
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
names.insert(Str(name.resolve().as_bytes()), dest_ref);
}
}
if has_embeddings {
let mut embedded_files = name_dict.embedded_files();
let mut names = embedded_files.names();
for (name, file_ref) in &ctx.references.embedded_files {
names.insert(Str(name.as_bytes()), *file_ref);
}
}
}
if has_embeddings && ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
let mut associated_files = catalog.insert(Name(b"AF")).array().typed();
for (_, file_ref) in ctx.references.embedded_files {
associated_files.item(file_ref).finish();
}
}
// Insert the page labels.
if !page_labels.is_empty() {
let mut num_tree = catalog.page_labels();
let mut entries = num_tree.nums();
for (n, r) in &page_labels {
entries.insert(n.get() as i32 - 1, *r);
}
}
if let Some(outline_root_id) = outline_root_id {
catalog.outlines(outline_root_id);
}
if let Some(lang) = lang {
catalog.lang(TextStr(lang.as_str()));
}
if ctx.options.standards.pdfa {
catalog
.output_intents()
.push()
.subtype(pdf_writer::types::OutputIntentSubtype::PDFA)
.output_condition(TextStr("sRGB"))
.output_condition_identifier(TextStr("Custom"))
.info(TextStr("sRGB IEC61966-2.1"))
.dest_output_profile(ctx.globals.color_functions.srgb.unwrap());
}
catalog.finish();
if ctx.options.standards.pdfa && pdf.refs().count() > 8388607 {
bail!(Span::detached(), "too many PDF objects");
}
Ok(())
}
/// Write the page labels.
pub(crate) fn write_page_labels(
chunk: &mut Pdf,
alloc: &mut Ref,
ctx: &WithEverything,
) -> Vec<(NonZeroUsize, Ref)> {
// If there is no exported page labeled, we skip the writing
if !ctx.pages.iter().filter_map(Option::as_ref).any(|p| {
p.label
.as_ref()
.is_some_and(|l| l.prefix.is_some() || l.style.is_some())
}) {
return Vec::new();
}
let empty_label = PdfPageLabel::default();
let mut result = vec![];
let mut prev: Option<&PdfPageLabel> = None;
// Skip non-exported pages for numbering.
for (i, page) in ctx.pages.iter().filter_map(Option::as_ref).enumerate() {
let nr = NonZeroUsize::new(1 + i).unwrap();
// If there are pages with empty labels between labeled pages, we must
// write empty PageLabel entries.
let label = page.label.as_ref().unwrap_or(&empty_label);
if let Some(pre) = prev {
if label.prefix == pre.prefix
&& label.style == pre.style
&& label.offset == pre.offset.map(|n| n.saturating_add(1))
{
prev = Some(label);
continue;
}
}
let id = alloc.bump();
let mut entry = chunk.indirect(id).start::<PageLabel>();
// Only add what is actually provided. Don't add empty prefix string if
// it wasn't given for example.
if let Some(prefix) = &label.prefix {
entry.prefix(TextStr::trimmed(prefix));
}
if let Some(style) = label.style {
entry.style(style.to_pdf_numbering_style());
}
if let Some(offset) = label.offset {
entry.offset(offset.get() as i32);
}
result.push((nr, id));
prev = Some(label);
}
result
}
/// Resolve the document date.
///
/// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
/// (2) If the `document.date` is set to `auto` or not set, try to use the
/// date from the options.
/// (3) Otherwise, we don't write date metadata.
pub fn document_date(
document_date: Smart<Option<Datetime>>,
timestamp: Option<Timestamp>,
) -> (Option<Datetime>, Option<Timezone>) {
match (document_date, timestamp) {
(Smart::Custom(date), _) => (date, None),
(Smart::Auto, Some(timestamp)) => {
(Some(timestamp.datetime), Some(timestamp.timezone))
}
_ => (None, None),
}
}
/// Converts a datetime to a pdf-writer date.
pub fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let mut pdf_date = pdf_writer::Date::new(year);
if let Some(month) = datetime.month() {
pdf_date = pdf_date.month(month);
}
if let Some(day) = datetime.day() {
pdf_date = pdf_date.day(day);
}
if let Some(h) = datetime.hour() {
pdf_date = pdf_date.hour(h);
}
if let Some(m) = datetime.minute() {
pdf_date = pdf_date.minute(m);
}
if let Some(s) = datetime.second() {
pdf_date = pdf_date.second(s);
}
match tz {
Some(Timezone::UTC) => {
pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0)
}
Some(Timezone::Local { hour_offset, minute_offset }) => {
pdf_date =
pdf_date.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
}
None => {}
}
Some(pdf_date)
}
/// Converts a datetime to an xmp-writer datetime.
fn xmp_date(
datetime: Datetime,
timezone: Option<Timezone>,
) -> Option<xmp_writer::DateTime> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let timezone = timezone.map(|tz| match tz {
Timezone::UTC => xmp_writer::Timezone::Utc,
Timezone::Local { hour_offset, minute_offset } => {
// The xmp-writer use signed integers for the minute offset, which
// can be buggy if the minute offset is negative. And because our
// minute_offset is ensured to be `0 <= minute_offset < 60`, we can
// safely cast it to a signed integer.
xmp_writer::Timezone::Local { hour: hour_offset, minute: minute_offset as i8 }
}
});
Some(DateTime {
year,
month: datetime.month(),
day: datetime.day(),
hour: datetime.hour(),
minute: datetime.minute(),
second: datetime.second(),
timezone,
})
}

View File

@ -0,0 +1,394 @@
use std::sync::LazyLock;
use arrayvec::ArrayVec;
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
use typst_library::diag::{bail, SourceResult};
use typst_library::visualize::{Color, ColorSpace, Paint};
use typst_syntax::Span;
use crate::{content, deflate, PdfChunk, PdfOptions, Renumber, WithResources};
// The names of the color spaces.
pub const SRGB: Name<'static> = Name(b"srgb");
pub const D65_GRAY: Name<'static> = Name(b"d65gray");
pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
// The ICC profiles.
static SRGB_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_RGB_V4));
static GRAY_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_GREY_V4));
/// The color spaces present in the PDF document
#[derive(Default)]
pub struct ColorSpaces {
use_srgb: bool,
use_d65_gray: bool,
use_linear_rgb: bool,
}
impl ColorSpaces {
/// Mark a color space as used.
pub fn mark_as_used(&mut self, color_space: ColorSpace) {
match color_space {
ColorSpace::Oklch
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Srgb => {
self.use_srgb = true;
}
ColorSpace::D65Gray => {
self.use_d65_gray = true;
}
ColorSpace::LinearRgb => {
self.use_linear_rgb = true;
}
ColorSpace::Cmyk => {}
}
}
/// Write the color spaces to the PDF file.
pub fn write_color_spaces(&self, mut spaces: Dict, refs: &ColorFunctionRefs) {
if self.use_srgb {
write(ColorSpace::Srgb, spaces.insert(SRGB).start(), refs);
}
if self.use_d65_gray {
write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), refs);
}
if self.use_linear_rgb {
write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), refs);
}
}
/// Write the necessary color spaces functions and ICC profiles to the
/// PDF file.
pub fn write_functions(&self, chunk: &mut Chunk, refs: &ColorFunctionRefs) {
// Write the sRGB color space.
if let Some(id) = refs.srgb {
chunk
.icc_profile(id, &SRGB_ICC_DEFLATED)
.n(3)
.range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
.filter(Filter::FlateDecode);
}
// Write the gray color space.
if let Some(id) = refs.d65_gray {
chunk
.icc_profile(id, &GRAY_ICC_DEFLATED)
.n(1)
.range([0.0, 1.0])
.filter(Filter::FlateDecode);
}
}
/// Merge two color space usage information together: a given color space is
/// considered to be used if it is used on either side.
pub fn merge(&mut self, other: &Self) {
self.use_d65_gray |= other.use_d65_gray;
self.use_linear_rgb |= other.use_linear_rgb;
self.use_srgb |= other.use_srgb;
}
}
/// Write the color space.
pub fn write(
color_space: ColorSpace,
writer: writers::ColorSpace,
refs: &ColorFunctionRefs,
) {
match color_space {
ColorSpace::Srgb
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Oklch => writer.icc_based(refs.srgb.unwrap()),
ColorSpace::D65Gray => writer.icc_based(refs.d65_gray.unwrap()),
ColorSpace::LinearRgb => {
writer.cal_rgb(
[0.9505, 1.0, 1.0888],
None,
Some([1.0, 1.0, 1.0]),
Some([
0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722, 0.9505,
]),
);
}
ColorSpace::Cmyk => writer.device_cmyk(),
}
}
/// Global references for color conversion functions.
///
/// These functions are only written once (at most, they are not written if not
/// needed) in the final document, and be shared by all color space
/// dictionaries.
pub struct ColorFunctionRefs {
pub srgb: Option<Ref>,
d65_gray: Option<Ref>,
}
impl Renumber for ColorFunctionRefs {
fn renumber(&mut self, offset: i32) {
if let Some(r) = &mut self.srgb {
r.renumber(offset);
}
if let Some(r) = &mut self.d65_gray {
r.renumber(offset);
}
}
}
/// Allocate all necessary [`ColorFunctionRefs`].
pub fn alloc_color_functions_refs(
context: &WithResources,
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
let mut chunk = PdfChunk::new();
let mut used_color_spaces = ColorSpaces::default();
if context.options.standards.pdfa {
used_color_spaces.mark_as_used(ColorSpace::Srgb);
}
context.resources.traverse(&mut |r| {
used_color_spaces.merge(&r.colors);
Ok(())
})?;
let refs = ColorFunctionRefs {
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
};
Ok((chunk, refs))
}
/// Encodes the color into four f32s, which can be used in a PDF file.
/// Ensures that the values are in the range [0.0, 1.0].
///
/// # Why?
/// - Oklab: The a and b components are in the range [-0.5, 0.5] and the PDF
/// specifies (and some readers enforce) that all color values be in the range
/// [0.0, 1.0]. This means that the PostScript function and the encoded color
/// must be offset by 0.5.
/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
/// specifies that it must be in the range [0.0, 1.0]. This means that the
/// PostScript function and the encoded color must be divided by 360.0.
pub trait ColorEncode {
/// Performs the color to PDF f32 array conversion.
fn encode(&self, color: Color) -> [f32; 4];
}
impl ColorEncode for ColorSpace {
fn encode(&self, color: Color) -> [f32; 4] {
match self {
ColorSpace::Oklab | ColorSpace::Oklch | ColorSpace::Hsl | ColorSpace::Hsv => {
color.to_space(ColorSpace::Srgb).to_vec4()
}
_ => color.to_space(*self).to_vec4(),
}
}
}
/// Encodes a paint into either a fill or stroke color.
pub(super) trait PaintEncode {
/// Set the paint as the fill color.
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
/// Set the paint as the stroke color.
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
}
impl PaintEncode for Paint {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_fill(ctx, on_text, transforms),
}
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_stroke(ctx, on_text, transforms),
}
}
}
impl PaintEncode for Color {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_fill_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_fill_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_fill_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_fill_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_fill_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_fill_cmyk(c, m, y, k);
}
}
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_stroke_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_stroke_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_stroke_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_stroke_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_stroke_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_stroke_cmyk(c, m, y, k);
}
}
Ok(())
}
}
/// Extra color space functions.
pub(super) trait ColorSpaceExt {
/// Returns the range of the color space.
fn range(self) -> &'static [f32];
/// Converts a color to the color space.
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4>;
}
impl ColorSpaceExt for ColorSpace {
fn range(self) -> &'static [f32] {
match self {
ColorSpace::D65Gray => &[0.0, 1.0],
ColorSpace::Oklab => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Oklch => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::LinearRgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Srgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Cmyk => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsl => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsv => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
}
}
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4> {
let components = self.encode(color);
self.range()
.chunks(2)
.zip(components)
.map(|(range, component)| U::quantize(component, [range[0], range[1]]))
.collect()
}
}
/// Quantizes a color component to a specific type.
pub(super) trait QuantizedColor {
fn quantize(color: f32, range: [f32; 2]) -> Self;
}
impl QuantizedColor for u16 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
let value = (color - min) / (max - min);
(value * Self::MAX as f32).round().clamp(0.0, Self::MAX as f32) as Self
}
}
impl QuantizedColor for f32 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
color.clamp(min, max)
}
}
/// Fails with an error if PDF/A processing is enabled.
pub(super) fn check_cmyk_allowed(options: &PdfOptions) -> SourceResult<()> {
if options.standards.pdfa {
bail!(
Span::detached(),
"cmyk colors are not currently supported by PDF/A export"
);
}
Ok(())
}

View File

@ -0,0 +1,344 @@
//! OpenType fonts generally define monochrome glyphs, but they can also define
//! glyphs with colors. This is how emojis are generally implemented for
//! example.
//!
//! There are various standards to represent color glyphs, but PDF readers don't
//! support any of them natively, so Typst has to handle them manually.
use std::collections::HashMap;
use ecow::eco_format;
use indexmap::IndexMap;
use pdf_writer::types::UnicodeCmap;
use pdf_writer::writers::WMode;
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::Em;
use typst_library::text::color::glyph_frame;
use typst_library::text::{Font, Glyph, TextItemView};
use crate::font::{base_font_name, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
use crate::resources::{Resources, ResourcesRefs};
use crate::{content, EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
/// Write color fonts in the PDF document.
///
/// They are written as Type3 fonts, which map glyph IDs to arbitrary PDF
/// instructions.
pub fn write_color_fonts(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
let mut out = HashMap::new();
let mut chunk = PdfChunk::new();
context.resources.traverse(&mut |resources: &Resources| {
let Some(color_fonts) = &resources.color_fonts else {
return Ok(());
};
for (color_font, font_slice) in color_fonts.iter() {
if out.contains_key(&font_slice) {
continue;
}
// Allocate some IDs.
let subfont_id = chunk.alloc();
let cmap_ref = chunk.alloc();
let descriptor_ref = chunk.alloc();
let widths_ref = chunk.alloc();
// And a map between glyph IDs and the instructions to draw this
// glyph.
let mut glyphs_to_instructions = Vec::new();
let start = font_slice.subfont * 256;
let end = (start + 256).min(color_font.glyphs.len());
let glyph_count = end - start;
let subset = &color_font.glyphs[start..end];
let mut widths = Vec::new();
let mut gids = Vec::new();
let scale_factor = font_slice.font.ttf().units_per_em() as f32;
// Write the instructions for each glyph.
for color_glyph in subset {
let instructions_stream_ref = chunk.alloc();
let width = font_slice
.font
.advance(color_glyph.gid)
.unwrap_or(Em::new(0.0))
.get() as f32
* scale_factor;
widths.push(width);
chunk
.stream(
instructions_stream_ref,
color_glyph.instructions.content.wait(),
)
.filter(Filter::FlateDecode);
// Use this stream as instructions to draw the glyph.
glyphs_to_instructions.push(instructions_stream_ref);
gids.push(color_glyph.gid);
}
// Determine the base font name.
gids.sort();
let base_font = base_font_name(&font_slice.font, &gids);
// Write the Type3 font object.
let mut pdf_font = chunk.type3_font(subfont_id);
pdf_font.name(Name(base_font.as_bytes()));
pdf_font.pair(Name(b"Resources"), color_fonts.resources.reference);
pdf_font.bbox(color_font.bbox);
pdf_font.matrix([1.0 / scale_factor, 0.0, 0.0, 1.0 / scale_factor, 0.0, 0.0]);
pdf_font.first_char(0);
pdf_font.last_char((glyph_count - 1) as u8);
pdf_font.pair(Name(b"Widths"), widths_ref);
pdf_font.to_unicode(cmap_ref);
pdf_font.font_descriptor(descriptor_ref);
// Write the /CharProcs dictionary, that maps glyph names to
// drawing instructions.
let mut char_procs = pdf_font.char_procs();
for (gid, instructions_ref) in glyphs_to_instructions.iter().enumerate() {
char_procs
.pair(Name(eco_format!("glyph{gid}").as_bytes()), *instructions_ref);
}
char_procs.finish();
// Write the /Encoding dictionary.
let names = (0..glyph_count)
.map(|gid| eco_format!("glyph{gid}"))
.collect::<Vec<_>>();
pdf_font
.encoding_custom()
.differences()
.consecutive(0, names.iter().map(|name| Name(name.as_bytes())));
pdf_font.finish();
// Encode a CMAP to make it possible to search or copy glyphs.
let glyph_set = resources.color_glyph_sets.get(&font_slice.font).unwrap();
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
for (index, glyph) in subset.iter().enumerate() {
let Some(text) = glyph_set.get(&glyph.gid) else {
continue;
};
if !text.is_empty() {
cmap.pair_with_multiple(index as u8, text.chars());
}
}
chunk.cmap(cmap_ref, &cmap.finish()).writing_mode(WMode::Horizontal);
// Write the font descriptor.
write_font_descriptor(
&mut chunk,
descriptor_ref,
&font_slice.font,
&base_font,
);
// Write the widths array
chunk.indirect(widths_ref).array().items(widths);
out.insert(font_slice, subfont_id);
}
Ok(())
})?;
Ok((chunk, out))
}
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
///
/// This mapping is one-to-many because there can only be 256 glyphs in a Type 3
/// font, and fonts generally have more color glyphs than that.
pub struct ColorFontMap<R> {
/// The mapping itself.
map: IndexMap<Font, ColorFont>,
/// The resources required to render the fonts in this map.
///
/// For example, this can be the images for glyphs based on bitmaps or SVG.
pub resources: Resources<R>,
/// The number of font slices (groups of 256 color glyphs), across all color
/// fonts.
total_slice_count: usize,
}
/// A collection of Type3 font, belonging to the same TTF font.
pub struct ColorFont {
/// The IDs of each sub-slice of this font. They are the numbers after "Cf"
/// in the Resources dictionaries.
slice_ids: Vec<usize>,
/// The list of all color glyphs in this family.
///
/// The index in this vector modulo 256 corresponds to the index in one of
/// the Type3 fonts in `refs` (the `n`-th in the vector, where `n` is the
/// quotient of the index divided by 256).
pub glyphs: Vec<ColorGlyph>,
/// The global bounding box of the font.
pub bbox: Rect,
/// A mapping between glyph IDs and character indices in the `glyphs`
/// vector.
glyph_indices: HashMap<u16, usize>,
}
/// A single color glyph.
pub struct ColorGlyph {
/// The ID of the glyph.
pub gid: u16,
/// Instructions to draw the glyph.
pub instructions: content::Encoded,
}
impl ColorFontMap<()> {
/// Creates a new empty mapping
pub fn new() -> Self {
Self {
map: IndexMap::new(),
total_slice_count: 0,
resources: Resources::default(),
}
}
/// For a given glyph in a TTF font, give the ID of the Type3 font and the
/// index of the glyph inside of this Type3 font.
///
/// If this is the first occurrence of this glyph in this font, it will
/// start its encoding and add it to the list of known glyphs.
pub fn get(
&mut self,
options: &PdfOptions,
text: &TextItemView,
glyph: &Glyph,
) -> SourceResult<(usize, u8)> {
let font = &text.item.font;
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
let global_bbox = font.ttf().global_bounding_box();
let bbox = Rect::new(
font.to_em(global_bbox.x_min).to_font_units(),
font.to_em(global_bbox.y_min).to_font_units(),
font.to_em(global_bbox.x_max).to_font_units(),
font.to_em(global_bbox.y_max).to_font_units(),
);
ColorFont {
bbox,
slice_ids: Vec::new(),
glyphs: Vec::new(),
glyph_indices: HashMap::new(),
}
});
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&glyph.id) {
// If we already know this glyph, return it.
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
} else {
// Otherwise, allocate a new ColorGlyph in the font, and a new Type3 font
// if needed
let index = color_font.glyphs.len();
if index % 256 == 0 {
color_font.slice_ids.push(self.total_slice_count);
self.total_slice_count += 1;
}
let (frame, tofu) = glyph_frame(font, glyph.id);
if options.standards.pdfa && tofu {
bail!(failed_to_convert(text, glyph));
}
let width = font.advance(glyph.id).unwrap_or(Em::new(0.0)).get()
* font.units_per_em();
let instructions = content::build(
options,
&mut self.resources,
&frame,
None,
Some(width as f32),
)?;
color_font.glyphs.push(ColorGlyph { gid: glyph.id, instructions });
color_font.glyph_indices.insert(glyph.id, index);
(color_font.slice_ids[index / 256], index as u8)
})
}
/// Assign references to the resource dictionary used by this set of color
/// fonts.
pub fn with_refs(self, refs: &ResourcesRefs) -> ColorFontMap<Ref> {
ColorFontMap {
map: self.map,
resources: self.resources.with_refs(refs),
total_slice_count: self.total_slice_count,
}
}
}
impl<R> ColorFontMap<R> {
/// Iterate over all Type3 fonts.
///
/// Each item of this iterator maps to a Type3 font: it contains
/// at most 256 glyphs. A same TTF font can yield multiple Type3 fonts.
pub fn iter(&self) -> ColorFontMapIter<'_, R> {
ColorFontMapIter { map: self, font_index: 0, slice_index: 0 }
}
}
/// Iterator over a [`ColorFontMap`].
///
/// See [`ColorFontMap::iter`].
pub struct ColorFontMapIter<'a, R> {
/// The map over which to iterate
map: &'a ColorFontMap<R>,
/// The index of TTF font on which we currently iterate
font_index: usize,
/// The sub-font (slice of at most 256 glyphs) at which we currently are.
slice_index: usize,
}
impl<'a, R> Iterator for ColorFontMapIter<'a, R> {
type Item = (&'a ColorFont, ColorFontSlice);
fn next(&mut self) -> Option<Self::Item> {
let (font, color_font) = self.map.map.get_index(self.font_index)?;
let slice_count = (color_font.glyphs.len() / 256) + 1;
if self.slice_index >= slice_count {
self.font_index += 1;
self.slice_index = 0;
return self.next();
}
let slice = ColorFontSlice { font: font.clone(), subfont: self.slice_index };
self.slice_index += 1;
Some((color_font, slice))
}
}
/// A set of at most 256 glyphs (a limit imposed on Type3 fonts by the PDF
/// specification) that represents a part of a TTF font.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ColorFontSlice {
/// The original TTF font.
pub font: Font,
/// The index of the Type3 font, among all those that are necessary to
/// represent the subset of the TTF font we are interested in.
pub subfont: usize,
}
/// The error when the glyph could not be converted.
#[cold]
fn failed_to_convert(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
let mut diag = error!(
glyph.span.0,
"the glyph for {} could not be exported",
text.glyph_text(glyph).repr()
);
if text.item.font.ttf().tables().cff2.is_some() {
diag.hint("CFF2 fonts are not currently supported");
}
diag
}

View File

@ -0,0 +1,823 @@
//! Generic writer for PDF content.
//!
//! It is used to write page contents, color glyph instructions, and tilings.
//!
//! See also [`pdf_writer::Content`].
use ecow::eco_format;
use pdf_writer::types::{
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
};
use pdf_writer::writers::PositionedItems;
use pdf_writer::{Content, Finish, Name, Rect, Str};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::{
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
};
use typst_library::model::Destination;
use typst_library::text::color::should_outline;
use typst_library::text::{Font, Glyph, TextItem, TextItemView};
use typst_library::visualize::{
Curve, CurveItem, FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint,
Shape,
};
use typst_syntax::Span;
use typst_utils::{Deferred, Numeric, SliceExt};
use crate::color::PaintEncode;
use crate::color_font::ColorFontMap;
use crate::extg::ExtGState;
use crate::image::deferred_image;
use crate::resources::Resources;
use crate::{deflate_deferred, AbsExt, ContentExt, EmExt, PdfOptions, StrExt};
/// Encode a [`Frame`] into a content stream.
///
/// The resources that were used in the stream will be added to `resources`.
///
/// `color_glyph_width` should be `None` unless the `Frame` represents a [color
/// glyph].
///
/// [color glyph]: `crate::color_font`
pub fn build(
options: &PdfOptions,
resources: &mut Resources<()>,
frame: &Frame,
fill: Option<Paint>,
color_glyph_width: Option<f32>,
) -> SourceResult<Encoded> {
let size = frame.size();
let mut ctx = Builder::new(options, resources, size);
if let Some(width) = color_glyph_width {
ctx.content.start_color_glyph(width);
}
// Make the coordinate system start at the top-left.
ctx.transform(
// Make the Y axis go upwards
Transform::scale(Ratio::one(), -Ratio::one())
// Also move the origin to the top left corner
.post_concat(Transform::translate(Abs::zero(), size.y)),
);
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
write_shape(&mut ctx, Point::zero(), &shape)?;
}
// Encode the frame into the content stream.
write_frame(&mut ctx, frame)?;
Ok(Encoded {
size,
content: deflate_deferred(ctx.content.finish()),
uses_opacities: ctx.uses_opacities,
links: ctx.links,
})
}
/// An encoded content stream.
pub struct Encoded {
/// The dimensions of the content.
pub size: Size,
/// The actual content stream.
pub content: Deferred<Vec<u8>>,
/// Whether the content opacities.
pub uses_opacities: bool,
/// Links in the PDF coordinate system.
pub links: Vec<(Destination, Rect)>,
}
/// An exporter for a single PDF content stream.
///
/// Content streams are a series of PDF commands. They can reference external
/// objects only through resources.
///
/// Content streams can be used for page contents, but also to describe color
/// glyphs and tilings.
pub struct Builder<'a, R = ()> {
/// Settings for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// A list of all resources that are used in the content stream.
pub(crate) resources: &'a mut Resources<R>,
/// The PDF content stream that is being built.
pub content: Content,
/// Current graphic state.
state: State,
/// Stack of saved graphic states.
saves: Vec<State>,
/// Whether any stroke or fill was not totally opaque.
uses_opacities: bool,
/// All clickable links that are present in this content.
links: Vec<(Destination, Rect)>,
}
impl<'a, R> Builder<'a, R> {
/// Create a new content builder.
pub fn new(
options: &'a PdfOptions<'a>,
resources: &'a mut Resources<R>,
size: Size,
) -> Self {
Builder {
options,
resources,
uses_opacities: false,
content: Content::new(),
state: State::new(size),
saves: vec![],
links: vec![],
}
}
}
/// A simulated graphics state used to deduplicate graphics state changes and
/// keep track of the current transformation matrix for link annotations.
#[derive(Debug, Clone)]
struct State {
/// The transform of the current item.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
size: Size,
/// The current font.
font: Option<(Font, Abs)>,
/// The current fill paint.
fill: Option<Paint>,
/// The color space of the current fill paint.
fill_space: Option<Name<'static>>,
/// The current external graphic state.
external_graphics_state: ExtGState,
/// The current stroke paint.
stroke: Option<FixedStroke>,
/// The color space of the current stroke paint.
stroke_space: Option<Name<'static>>,
/// The current text rendering mode.
text_rendering_mode: TextRenderingMode,
}
impl State {
/// Creates a new, clean state for a given `size`.
pub fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
size,
font: None,
fill: None,
fill_space: None,
external_graphics_state: ExtGState::default(),
stroke: None,
stroke_space: None,
text_rendering_mode: TextRenderingMode::Fill,
}
}
/// Creates the [`Transforms`] structure for the current item.
pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
Transforms {
transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
container_transform: self.container_transform,
container_size: self.size,
size,
}
}
}
/// Subset of the state used to calculate the transform of gradients and tilings.
#[derive(Debug, Clone, Copy)]
pub(super) struct Transforms {
/// The transform of the current item.
pub transform: Transform,
/// The transform of first hard frame in the hierarchy.
pub container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
pub container_size: Size,
/// The size of the item.
pub size: Size,
}
impl Builder<'_, ()> {
fn save_state(&mut self) -> SourceResult<()> {
self.saves.push(self.state.clone());
self.content.save_state_checked()
}
fn restore_state(&mut self) {
self.content.restore_state();
self.state = self.saves.pop().expect("missing state save");
}
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
let current_state = &self.state.external_graphics_state;
if current_state != graphics_state {
let index = self.resources.ext_gs.insert(*graphics_state);
let name = eco_format!("Gs{index}");
self.content.set_parameters(Name(name.as_bytes()));
self.state.external_graphics_state = *graphics_state;
if graphics_state.uses_opacities() {
self.uses_opacities = true;
}
}
}
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
let get_opacity = |paint: &Paint| {
let color = match paint {
Paint::Solid(color) => *color,
Paint::Gradient(_) | Paint::Tiling(_) => return 255,
};
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
};
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
let fill_opacity = fill.map_or(255, get_opacity);
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
}
fn reset_opacities(&mut self) {
self.set_external_graphics_state(&ExtGState {
stroke_opacity: 255,
fill_opacity: 255,
});
}
pub fn transform(&mut self, transform: Transform) {
let Transform { sx, ky, kx, sy, tx, ty } = transform;
self.state.transform = self.state.transform.pre_concat(transform);
if self.state.container_transform.is_identity() {
self.state.container_transform = self.state.transform;
}
self.content.transform([
sx.get() as _,
ky.get() as _,
kx.get() as _,
sy.get() as _,
tx.to_f32(),
ty.to_f32(),
]);
}
fn group_transform(&mut self, transform: Transform) {
self.state.container_transform =
self.state.container_transform.pre_concat(transform);
}
fn set_font(&mut self, font: &Font, size: Abs) {
if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
let index = self.resources.fonts.insert(font.clone());
let name = eco_format!("F{index}");
self.content.set_font(Name(name.as_bytes()), size.to_f32());
self.state.font = Some((font.clone(), size));
}
}
fn size(&mut self, size: Size) {
self.state.size = size;
}
fn set_fill(
&mut self,
fill: &Paint,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.fill.as_ref() != Some(fill)
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
{
fill.set_as_fill(self, on_text, transforms)?;
self.state.fill = Some(fill.clone());
}
Ok(())
}
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
if self.state.fill_space != Some(space) {
self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
self.state.fill_space = Some(space);
}
}
pub fn reset_fill_color_space(&mut self) {
self.state.fill_space = None;
}
fn set_stroke(
&mut self,
stroke: &FixedStroke,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.stroke.as_ref() != Some(stroke)
|| matches!(
self.state.stroke.as_ref().map(|s| &s.paint),
Some(Paint::Gradient(_))
)
{
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
paint.set_as_stroke(self, on_text, transforms)?;
self.content.set_line_width(thickness.to_f32());
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
self.content.set_line_cap(to_pdf_line_cap(*cap));
}
if self.state.stroke.as_ref().map(|s| &s.join) != Some(join) {
self.content.set_line_join(to_pdf_line_join(*join));
}
if self.state.stroke.as_ref().map(|s| &s.dash) != Some(dash) {
if let Some(dash) = dash {
self.content.set_dash_pattern(
dash.array.iter().map(|l| l.to_f32()),
dash.phase.to_f32(),
);
} else {
self.content.set_dash_pattern([], 0.0);
}
}
if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
self.content.set_miter_limit(miter_limit.get() as f32);
}
self.state.stroke = Some(stroke.clone());
}
Ok(())
}
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
if self.state.stroke_space != Some(space) {
self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
self.state.stroke_space = Some(space);
}
}
pub fn reset_stroke_color_space(&mut self) {
self.state.stroke_space = None;
}
fn set_text_rendering_mode(&mut self, mode: TextRenderingMode) {
if self.state.text_rendering_mode != mode {
self.content.set_text_rendering_mode(mode);
self.state.text_rendering_mode = mode;
}
}
}
/// Encode a frame into the content stream.
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
for &(pos, ref item) in frame.items() {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
match item {
FrameItem::Group(group) => write_group(ctx, pos, group)?,
FrameItem::Text(text) => write_text(ctx, pos, text)?,
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
FrameItem::Image(image, size, span) => {
write_image(ctx, x, y, image, *size, *span)?
}
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
FrameItem::Tag(_) => {}
}
}
Ok(())
}
/// Encode a group into the content stream.
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
let translation = Transform::translate(pos.x, pos.y);
ctx.save_state()?;
if group.frame.kind().is_hard() {
ctx.group_transform(
ctx.state
.transform
.post_concat(ctx.state.container_transform.invert().unwrap())
.pre_concat(translation)
.pre_concat(group.transform),
);
ctx.size(group.frame.size());
}
ctx.transform(translation.pre_concat(group.transform));
if let Some(clip_curve) = &group.clip {
write_curve(ctx, 0.0, 0.0, clip_curve);
ctx.content.clip_nonzero();
ctx.content.end_path();
}
write_frame(ctx, &group.frame)?;
ctx.restore_state();
Ok(())
}
/// Encode a text run into the content stream.
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
if ctx.options.standards.pdfa && text.font.info().is_last_resort() {
bail!(
Span::find(text.glyphs.iter().map(|g| g.span.0)),
"the text {} could not be displayed with any font",
&text.text,
);
}
let outline_glyphs =
text.glyphs.iter().filter(|g| should_outline(&text.font, g)).count();
if outline_glyphs == text.glyphs.len() {
write_normal_text(ctx, pos, TextItemView::full(text))?;
} else if outline_glyphs == 0 {
write_complex_glyphs(ctx, pos, TextItemView::full(text))?;
} else {
// Otherwise we need to split it into smaller text runs.
let mut offset = 0;
let mut position_in_run = Abs::zero();
for (should_outline, sub_run) in
text.glyphs.group_by_key(|g| should_outline(&text.font, g))
{
let end = offset + sub_run.len();
// Build a sub text-run
let text_item_view = TextItemView::from_glyph_range(text, offset..end);
// Adjust the position of the run on the line
let pos = pos + Point::new(position_in_run, Abs::zero());
position_in_run += text_item_view.width();
offset = end;
// Actually write the sub text-run.
if should_outline {
write_normal_text(ctx, pos, text_item_view)?;
} else {
write_complex_glyphs(ctx, pos, text_item_view)?;
}
}
}
Ok(())
}
/// Encodes a text run (without any color glyph) into the content stream.
fn write_normal_text(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
*ctx.resources.languages.entry(text.item.lang).or_insert(0) += text.glyph_range.len();
let glyph_set = ctx.resources.glyph_sets.entry(text.item.font.clone()).or_default();
for g in text.glyphs() {
glyph_set.entry(g.id).or_insert_with(|| text.glyph_text(g));
}
let fill_transform = ctx.state.transforms(Size::zero(), pos);
ctx.set_fill(&text.item.fill, true, fill_transform)?;
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if let Some(stroke) = stroke {
ctx.set_stroke(stroke, true, fill_transform)?;
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
} else {
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
}
ctx.set_font(&text.item.font, text.item.size);
ctx.set_opacities(text.item.stroke.as_ref(), Some(&text.item.fill));
ctx.content.begin_text();
// Position the text.
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
let mut positioned = ctx.content.show_positioned();
let mut items = positioned.items();
let mut adjustment = Em::zero();
let mut encoded = vec![];
let glyph_remapper = ctx
.resources
.glyph_remappers
.entry(text.item.font.clone())
.or_default();
// Write the glyphs with kerning adjustments.
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
adjustment += glyph.x_offset;
if !adjustment.is_zero() {
if !encoded.is_empty() {
show_text(&mut items, &encoded);
encoded.clear();
}
items.adjust(-adjustment.to_font_units());
adjustment = Em::zero();
}
// In PDF, we use CIDs to index the glyphs in a font, not GIDs. What a
// CID actually refers to depends on the type of font we are embedding:
//
// - For TrueType fonts, the CIDs are defined by an external mapping.
// - For SID-keyed CFF fonts, the CID is the same as the GID in the font.
// - For CID-keyed CFF fonts, the CID refers to the CID in the font.
//
// (See in the PDF-spec for more details on this.)
//
// However, in our case:
// - We use the identity-mapping for TrueType fonts.
// - SID-keyed fonts will get converted into CID-keyed fonts by the
// subsetter.
// - CID-keyed fonts will be rewritten in a way so that the mapping
// between CID and GID is always the identity mapping, regardless of
// the mapping before.
//
// Because of this, we can always use the remapped GID as the CID,
// regardless of which type of font we are actually embedding.
let cid = glyph_remapper.remap(glyph.id);
encoded.push((cid >> 8) as u8);
encoded.push((cid & 0xff) as u8);
if let Some(advance) = text.item.font.advance(glyph.id) {
adjustment += glyph.x_advance - advance;
}
adjustment -= glyph.x_offset;
}
if !encoded.is_empty() {
show_text(&mut items, &encoded);
}
items.finish();
positioned.finish();
ctx.content.end_text();
Ok(())
}
/// Shows text, ensuring that each individual string doesn't exceed the
/// implementation limits.
fn show_text(items: &mut PositionedItems, encoded: &[u8]) {
for chunk in encoded.chunks(Str::PDFA_LIMIT) {
items.show(Str(chunk));
}
}
/// Encodes a text run made only of color glyphs into the content stream
fn write_complex_glyphs(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let mut last_font = None;
ctx.reset_opacities();
ctx.content.begin_text();
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
// So that the next call to ctx.set_font() will change the font to one that
// displays regular glyphs and not color glyphs.
ctx.state.font = None;
let glyph_set = ctx
.resources
.color_glyph_sets
.entry(text.item.font.clone())
.or_default();
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
// Retrieve the Type3 font reference and the glyph index in the font.
let color_fonts = ctx
.resources
.color_fonts
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
let (font, index) = color_fonts.get(ctx.options, &text, glyph)?;
if last_font != Some(font) {
ctx.content.set_font(
Name(eco_format!("Cf{}", font).as_bytes()),
text.item.size.to_f32(),
);
last_font = Some(font);
}
ctx.content.show(Str(&[index]));
glyph_set.entry(glyph.id).or_insert_with(|| text.glyph_text(glyph));
}
ctx.content.end_text();
Ok(())
}
/// Encode a geometrical shape into the content stream.
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let stroke = shape.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if shape.fill.is_none() && stroke.is_none() {
return Ok(());
}
if let Some(fill) = &shape.fill {
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
}
if let Some(stroke) = stroke {
ctx.set_stroke(
stroke,
false,
ctx.state.transforms(shape.geometry.bbox_size(), pos),
)?;
}
ctx.set_opacities(stroke, shape.fill.as_ref());
match &shape.geometry {
Geometry::Line(target) => {
let dx = target.x.to_f32();
let dy = target.y.to_f32();
ctx.content.move_to(x, y);
ctx.content.line_to(x + dx, y + dy);
}
Geometry::Rect(size) => {
let w = size.x.to_f32();
let h = size.y.to_f32();
if w.abs() > f32::EPSILON && h.abs() > f32::EPSILON {
ctx.content.rect(x, y, w, h);
}
}
Geometry::Curve(curve) => {
write_curve(ctx, x, y, curve);
}
}
match (&shape.fill, &shape.fill_rule, stroke) {
(None, _, None) => unreachable!(),
(Some(_), FillRule::NonZero, None) => ctx.content.fill_nonzero(),
(Some(_), FillRule::EvenOdd, None) => ctx.content.fill_even_odd(),
(None, _, Some(_)) => ctx.content.stroke(),
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
};
Ok(())
}
/// Encode a curve into the content stream.
fn write_curve(ctx: &mut Builder, x: f32, y: f32, curve: &Curve) {
for elem in &curve.0 {
match elem {
CurveItem::Move(p) => ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Line(p) => ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Cubic(p1, p2, p3) => ctx.content.cubic_to(
x + p1.x.to_f32(),
y + p1.y.to_f32(),
x + p2.x.to_f32(),
y + p2.y.to_f32(),
x + p3.x.to_f32(),
y + p3.y.to_f32(),
),
CurveItem::Close => ctx.content.close_path(),
};
}
}
/// Encode a vector or raster image into the content stream.
fn write_image(
ctx: &mut Builder,
x: f32,
y: f32,
image: &Image,
size: Size,
span: Span,
) -> SourceResult<()> {
let index = ctx.resources.images.insert(image.clone());
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
let (image, color_space) =
deferred_image(image.clone(), ctx.options.standards.pdfa);
if let Some(color_space) = color_space {
ctx.resources.colors.mark_as_used(color_space);
}
(image, span)
});
ctx.reset_opacities();
let name = eco_format!("Im{index}");
let w = size.x.to_f32();
let h = size.y.to_f32();
ctx.content.save_state_checked()?;
ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
if let Some(alt) = image.alt() {
if ctx.options.standards.pdfa && alt.len() > Str::PDFA_LIMIT {
bail!(span, "the image's alt text is too long");
}
let mut image_span =
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
let mut image_alt = image_span.properties();
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
image_alt.finish();
image_span.finish();
ctx.content.x_object(Name(name.as_bytes()));
ctx.content.end_marked_content();
} else {
ctx.content.x_object(Name(name.as_bytes()));
}
ctx.content.restore_state();
Ok(())
}
/// Save a link for later writing in the annotations dictionary.
fn write_link(ctx: &mut Builder, pos: Point, dest: &Destination, size: Size) {
let mut min_x = Abs::inf();
let mut min_y = Abs::inf();
let mut max_x = -Abs::inf();
let mut max_y = -Abs::inf();
// Compute the bounding box of the transformed link.
for point in [
pos,
pos + Point::with_x(size.x),
pos + Point::with_y(size.y),
pos + size.to_point(),
] {
let t = point.transform(ctx.state.transform);
min_x.set_min(t.x);
min_y.set_min(t.y);
max_x.set_max(t.x);
max_y.set_max(t.y);
}
let x1 = min_x.to_f32();
let x2 = max_x.to_f32();
let y1 = max_y.to_f32();
let y2 = min_y.to_f32();
let rect = Rect::new(x1, y1, x2, y2);
ctx.links.push((dest.clone(), rect));
}
fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
match cap {
LineCap::Butt => LineCapStyle::ButtCap,
LineCap::Round => LineCapStyle::RoundCap,
LineCap::Square => LineCapStyle::ProjectingSquareCap,
}
}
fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
match join {
LineJoin::Miter => LineJoinStyle::MiterJoin,
LineJoin::Round => LineJoinStyle::RoundJoin,
LineJoin::Bevel => LineJoinStyle::BevelJoin,
}
}
/// The error when there is a tofu glyph.
#[cold]
fn tofu(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
error!(
glyph.span.0,
"the text {} could not be displayed with any font",
text.glyph_text(glyph).repr(),
)
}

View File

@ -1,661 +0,0 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::num::NonZeroU64;
use ecow::{eco_format, EcoVec};
use krilla::annotation::Annotation;
use krilla::configure::{Configuration, ValidationError, Validator};
use krilla::destination::{NamedDestination, XyzDestination};
use krilla::embed::EmbedError;
use krilla::error::KrillaError;
use krilla::geom::PathBuilder;
use krilla::page::{PageLabel, PageSettings};
use krilla::surface::Surface;
use krilla::{Document, SerializeSettings};
use krilla_svg::render_svg_glyph;
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::NativeElement;
use typst_library::introspection::Location;
use typst_library::layout::{
Abs, Frame, FrameItem, GroupItem, PagedDocument, Size, Transform,
};
use typst_library::model::HeadingElem;
use typst_library::text::{Font, Lang};
use typst_library::visualize::{Geometry, Paint};
use typst_syntax::Span;
use crate::embed::embed_files;
use crate::image::handle_image;
use crate::link::handle_link;
use crate::metadata::build_metadata;
use crate::outline::build_outline;
use crate::page::PageLabelExt;
use crate::shape::handle_shape;
use crate::text::handle_text;
use crate::util::{convert_path, display_font, AbsExt, TransformExt};
use crate::PdfOptions;
#[typst_macros::time(name = "convert document")]
pub fn convert(
typst_document: &PagedDocument,
options: &PdfOptions,
) -> SourceResult<Vec<u8>> {
let settings = SerializeSettings {
compress_content_streams: true,
no_device_cs: true,
ascii_compatible: false,
xmp_metadata: true,
cmyk_profile: None,
configuration: options.standards.config,
enable_tagging: false,
render_svg_glyph_fn: render_svg_glyph,
};
let mut document = Document::new_with(settings);
let page_index_converter = PageIndexConverter::new(typst_document, options);
let named_destinations =
collect_named_destinations(typst_document, &page_index_converter);
let mut gc = GlobalContext::new(
typst_document,
options,
named_destinations,
page_index_converter,
);
convert_pages(&mut gc, &mut document)?;
embed_files(typst_document, &mut document)?;
document.set_outline(build_outline(&gc));
document.set_metadata(build_metadata(&gc));
finish(document, gc, options.standards.config)
}
fn convert_pages(gc: &mut GlobalContext, document: &mut Document) -> SourceResult<()> {
for (i, typst_page) in gc.document.pages.iter().enumerate() {
if gc.page_index_converter.pdf_page_index(i).is_none() {
// Don't export this page.
continue;
} else {
let mut settings = PageSettings::new(
typst_page.frame.width().to_f32(),
typst_page.frame.height().to_f32(),
);
if let Some(label) = typst_page
.numbering
.as_ref()
.and_then(|num| PageLabel::generate(num, typst_page.number))
.or_else(|| {
// When some pages were ignored from export, we show a page label with
// the correct real (not logical) page number.
// This is for consistency with normal output when pages have no numbering
// and all are exported: the final PDF page numbers always correspond to
// the real (not logical) page numbers. Here, the final PDF page number
// will differ, but we can at least use labels to indicate what was
// the corresponding real page number in the Typst document.
gc.page_index_converter
.has_skipped_pages()
.then(|| PageLabel::arabic((i + 1) as u64))
})
{
settings = settings.with_page_label(label);
}
let mut page = document.start_page_with(settings);
let mut surface = page.surface();
let mut fc = FrameContext::new(typst_page.frame.size());
handle_frame(
&mut fc,
&typst_page.frame,
typst_page.fill_or_transparent(),
&mut surface,
gc,
)?;
surface.finish();
for annotation in fc.annotations {
page.add_annotation(annotation);
}
}
}
Ok(())
}
/// A state allowing us to keep track of transforms and container sizes,
/// which is mainly needed to resolve gradients and patterns correctly.
#[derive(Debug, Clone)]
pub(crate) struct State {
/// The current transform.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
container_size: Size,
}
impl State {
/// Creates a new, clean state for a given `size`.
fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
container_size: size,
}
}
pub(crate) fn register_container(&mut self, size: Size) {
self.container_transform = self.transform;
self.container_size = size;
}
pub(crate) fn pre_concat(&mut self, transform: Transform) {
self.transform = self.transform.pre_concat(transform);
}
pub(crate) fn transform(&self) -> Transform {
self.transform
}
pub(crate) fn container_transform(&self) -> Transform {
self.container_transform
}
pub(crate) fn container_size(&self) -> Size {
self.container_size
}
}
/// Context needed for converting a single frame.
pub(crate) struct FrameContext {
states: Vec<State>,
annotations: Vec<Annotation>,
}
impl FrameContext {
pub(crate) fn new(size: Size) -> Self {
Self {
states: vec![State::new(size)],
annotations: vec![],
}
}
pub(crate) fn push(&mut self) {
self.states.push(self.states.last().unwrap().clone());
}
pub(crate) fn pop(&mut self) {
self.states.pop();
}
pub(crate) fn state(&self) -> &State {
self.states.last().unwrap()
}
pub(crate) fn state_mut(&mut self) -> &mut State {
self.states.last_mut().unwrap()
}
pub(crate) fn push_annotation(&mut self, annotation: Annotation) {
self.annotations.push(annotation);
}
}
/// Globally needed context for converting a typst document.
pub(crate) struct GlobalContext<'a> {
/// Cache the conversion between krilla and Typst fonts (forward and backward).
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
/// Mapping between images and their span.
// Note: In theory, the same image can have multiple spans
// if it appears in the document multiple times. We just store the
// first appearance, though.
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
/// The spans of all images that appear in the document. We use this so
/// we can give more accurate error messages.
pub(crate) image_spans: HashSet<Span>,
/// The document to convert.
pub(crate) document: &'a PagedDocument,
/// Options for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// Mapping between locations in the document and named destinations.
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
/// The languages used throughout the document.
pub(crate) languages: BTreeMap<Lang, usize>,
pub(crate) page_index_converter: PageIndexConverter,
}
impl<'a> GlobalContext<'a> {
pub(crate) fn new(
document: &'a PagedDocument,
options: &'a PdfOptions,
loc_to_names: HashMap<Location, NamedDestination>,
page_index_converter: PageIndexConverter,
) -> GlobalContext<'a> {
Self {
fonts_forward: HashMap::new(),
fonts_backward: HashMap::new(),
document,
options,
loc_to_names,
image_to_spans: HashMap::new(),
image_spans: HashSet::new(),
languages: BTreeMap::new(),
page_index_converter,
}
}
}
#[typst_macros::time(name = "handle page")]
pub(crate) fn handle_frame(
fc: &mut FrameContext,
frame: &Frame,
fill: Option<Paint>,
surface: &mut Surface,
gc: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
if frame.kind().is_hard() {
fc.state_mut().register_container(frame.size());
}
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
handle_shape(fc, &shape, surface, gc, Span::detached())?;
}
for (point, item) in frame.items() {
fc.push();
fc.state_mut().pre_concat(Transform::translate(point.x, point.y));
match item {
FrameItem::Group(g) => handle_group(fc, g, surface, gc)?,
FrameItem::Text(t) => handle_text(fc, t, surface, gc)?,
FrameItem::Shape(s, span) => handle_shape(fc, s, surface, gc, *span)?,
FrameItem::Image(image, size, span) => {
handle_image(gc, fc, image, *size, surface, *span)?
}
FrameItem::Link(d, s) => handle_link(fc, gc, d, *s),
FrameItem::Tag(_) => {}
}
fc.pop();
}
fc.pop();
Ok(())
}
pub(crate) fn handle_group(
fc: &mut FrameContext,
group: &GroupItem,
surface: &mut Surface,
context: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
fc.state_mut().pre_concat(group.transform);
let clip_path = group
.clip
.as_ref()
.and_then(|p| {
let mut builder = PathBuilder::new();
convert_path(p, &mut builder);
builder.finish()
})
.and_then(|p| p.transform(fc.state().transform.to_krilla()));
if let Some(clip_path) = &clip_path {
surface.push_clip_path(clip_path, &krilla::paint::FillRule::NonZero);
}
handle_frame(fc, &group.frame, None, surface, context)?;
if clip_path.is_some() {
surface.pop();
}
fc.pop();
Ok(())
}
#[typst_macros::time(name = "finish export")]
/// Finish a krilla document and handle export errors.
fn finish(
document: Document,
gc: GlobalContext,
configuration: Configuration,
) -> SourceResult<Vec<u8>> {
let validator = configuration.validator();
match document.finish() {
Ok(r) => Ok(r),
Err(e) => match e {
KrillaError::Font(f, s) => {
let font_str = display_font(gc.fonts_backward.get(&f).unwrap());
bail!(
Span::detached(),
"failed to process font {font_str}: {s}";
hint: "make sure the font is valid";
hint: "the used font might be unsupported by Typst"
);
}
KrillaError::Validation(ve) => {
let errors = ve
.iter()
.map(|e| convert_error(&gc, validator, e))
.collect::<EcoVec<_>>();
Err(errors)
}
KrillaError::Image(_, loc) => {
let span = to_span(loc);
bail!(span, "failed to process image");
}
KrillaError::SixteenBitImage(image, _) => {
let span = gc.image_to_spans.get(&image).unwrap();
bail!(
*span, "16 bit images are not supported in this export mode";
hint: "convert the image to 8 bit instead"
)
}
},
}
}
/// Converts a krilla error into a Typst error.
fn convert_error(
gc: &GlobalContext,
validator: Validator,
error: &ValidationError,
) -> SourceDiagnostic {
let prefix = eco_format!("{} error:", validator.as_str());
match error {
ValidationError::TooLongString => error!(
Span::detached(),
"{prefix} a PDF string is longer than 32767 characters";
hint: "ensure title and author names are short enough"
),
// Should in theory never occur, as krilla always trims font names.
ValidationError::TooLongName => error!(
Span::detached(),
"{prefix} a PDF name is longer than 127 characters";
hint: "perhaps a font name is too long"
),
ValidationError::TooLongArray => error!(
Span::detached(),
"{prefix} a PDF array is longer than 8191 elements";
hint: "this can happen if you have a very long text in a single line"
),
ValidationError::TooLongDictionary => error!(
Span::detached(),
"{prefix} a PDF dictionary has more than 4095 entries";
hint: "try reducing the complexity of your document"
),
ValidationError::TooLargeFloat => error!(
Span::detached(),
"{prefix} a PDF floating point number is larger than the allowed limit";
hint: "try exporting with a higher PDF version"
),
ValidationError::TooManyIndirectObjects => error!(
Span::detached(),
"{prefix} the PDF has too many indirect objects";
hint: "reduce the size of your document"
),
// Can only occur if we have 27+ nested clip paths
ValidationError::TooHighQNestingLevel => error!(
Span::detached(),
"{prefix} the PDF has too high q nesting";
hint: "reduce the number of nested containers"
),
ValidationError::ContainsPostScript(loc) => error!(
to_span(*loc),
"{prefix} the PDF contains PostScript code";
hint: "conic gradients are not supported in this PDF standard"
),
ValidationError::MissingCMYKProfile => error!(
Span::detached(),
"{prefix} the PDF is missing a CMYK profile";
hint: "CMYK colors are not yet supported in this export mode"
),
ValidationError::ContainsNotDefGlyph(f, loc, text) => error!(
to_span(*loc),
"{prefix} the text '{text}' cannot be displayed using {}",
display_font(gc.fonts_backward.get(f).unwrap());
hint: "try using a different font"
),
ValidationError::InvalidCodepointMapping(_, _, cp, loc) => {
if let Some(c) = cp.map(|c| eco_format!("{:#06x}", c as u32)) {
let msg = if loc.is_some() {
"the PDF contains text with"
} else {
"the text contains"
};
error!(to_span(*loc), "{prefix} {msg} the disallowed codepoint {c}")
} else {
// I think this code path is in theory unreachable,
// but just to be safe.
let msg = if loc.is_some() {
"the PDF contains text with missing codepoints"
} else {
"the text was not mapped to a code point"
};
error!(
to_span(*loc),
"{prefix} {msg}";
hint: "for complex scripts like Arabic, it might not be \
possible to produce a compliant document"
)
}
}
ValidationError::UnicodePrivateArea(_, _, c, loc) => {
let code_point = eco_format!("{:#06x}", *c as u32);
let msg = if loc.is_some() { "the PDF" } else { "the text" };
error!(
to_span(*loc),
"{prefix} {msg} contains the codepoint {code_point}";
hint: "codepoints from the Unicode private area are \
forbidden in this export mode"
)
}
ValidationError::Transparency(loc) => {
let span = to_span(*loc);
let hint1 = "try exporting with a different standard that \
supports transparency";
if loc.is_some() {
if gc.image_spans.contains(&span) {
error!(
span, "{prefix} the image contains transparency";
hint: "{hint1}";
hint: "or convert the image to a non-transparent one";
hint: "you might have to convert SVGs into \
non-transparent bitmap images"
)
} else {
error!(
span, "{prefix} the used fill or stroke has transparency";
hint: "{hint1}";
hint: "or don't use colors with transparency in \
this export mode"
)
}
} else {
error!(
span, "{prefix} the PDF contains transparency";
hint: "{hint1}"
)
}
}
ValidationError::ImageInterpolation(loc) => {
let span = to_span(*loc);
if loc.is_some() {
error!(
span, "{prefix} the image has smooth scaling";
hint: "set the `scaling` attribute to `pixelated`"
)
} else {
error!(
span, "{prefix} an image in the PDF has smooth scaling";
hint: "set the `scaling` attribute of all images to `pixelated`"
)
}
}
ValidationError::EmbeddedFile(e, s) => {
// We always set the span for embedded files, so it cannot be detached.
let span = to_span(*s);
match e {
EmbedError::Existence => {
error!(
span, "{prefix} document contains an embedded file";
hint: "embedded files are not supported in this export mode"
)
}
EmbedError::MissingDate => {
error!(
span, "{prefix} document date is missing";
hint: "the document must have a date when embedding files";
hint: "`set document(date: none)` must not be used in this case"
)
}
EmbedError::MissingDescription => {
error!(span, "{prefix} the file description is missing")
}
EmbedError::MissingMimeType => {
error!(span, "{prefix} the file mime type is missing")
}
}
}
// The below errors cannot occur yet, only once Typst supports full PDF/A
// and PDF/UA. But let's still add a message just to be on the safe side.
ValidationError::MissingAnnotationAltText => error!(
Span::detached(),
"{prefix} missing annotation alt text";
hint: "please report this as a bug"
),
ValidationError::MissingAltText => error!(
Span::detached(),
"{prefix} missing alt text";
hint: "make sure your images and equations have alt text"
),
ValidationError::NoDocumentLanguage => error!(
Span::detached(),
"{prefix} missing document language";
hint: "set the language of the document"
),
// Needs to be set by typst-pdf.
ValidationError::MissingHeadingTitle => error!(
Span::detached(),
"{prefix} missing heading title";
hint: "please report this as a bug"
),
ValidationError::MissingDocumentOutline => error!(
Span::detached(),
"{prefix} missing document outline";
hint: "please report this as a bug"
),
ValidationError::MissingTagging => error!(
Span::detached(),
"{prefix} missing document tags";
hint: "please report this as a bug"
),
ValidationError::NoDocumentTitle => error!(
Span::detached(),
"{prefix} missing document title";
hint: "set the title of the document"
),
ValidationError::MissingDocumentDate => error!(
Span::detached(),
"{prefix} missing document date";
hint: "set the date of the document"
),
}
}
/// Convert a krilla location to a span.
fn to_span(loc: Option<krilla::surface::Location>) -> Span {
loc.map(|l| Span::from_raw(NonZeroU64::new(l).unwrap()))
.unwrap_or(Span::detached())
}
fn collect_named_destinations(
document: &PagedDocument,
pic: &PageIndexConverter,
) -> HashMap<Location, NamedDestination> {
let mut locs_to_names = HashMap::new();
// Find all headings that have a label and are the first among other
// headings with the same label.
let matches: Vec<_> = {
let mut seen = HashSet::new();
document
.introspector
.query(&HeadingElem::elem().select())
.iter()
.filter_map(|elem| elem.location().zip(elem.label()))
.filter(|&(_, label)| seen.insert(label))
.collect()
};
for (loc, label) in matches {
let pos = document.introspector.position(loc);
let index = pos.page.get() - 1;
// We are subtracting 10 because the position of links e.g. to headings is always at the
// baseline and if you link directly to it, the text will not be visible
// because it is right above.
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
// Only add named destination if page belonging to the position is exported.
if let Some(index) = pic.pdf_page_index(index) {
let named = NamedDestination::new(
label.resolve().to_string(),
XyzDestination::new(
index,
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
),
);
locs_to_names.insert(loc, named);
}
}
locs_to_names
}
pub(crate) struct PageIndexConverter {
page_indices: HashMap<usize, usize>,
skipped_pages: usize,
}
impl PageIndexConverter {
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
let mut page_indices = HashMap::new();
let mut skipped_pages = 0;
for i in 0..document.pages.len() {
if options
.page_ranges
.as_ref()
.is_some_and(|ranges| !ranges.includes_page_index(i))
{
skipped_pages += 1;
} else {
page_indices.insert(i, i - skipped_pages);
}
}
Self { page_indices, skipped_pages }
}
pub(crate) fn has_skipped_pages(&self) -> bool {
self.skipped_pages > 0
}
/// Get the PDF page index of a page index, if it's not excluded.
pub(crate) fn pdf_page_index(&self, page_index: usize) -> Option<usize> {
self.page_indices.get(&page_index).copied()
}
}

View File

@ -1,122 +1,122 @@
use std::sync::Arc;
use std::collections::BTreeMap;
use krilla::embed::{AssociationKind, EmbeddedFile};
use krilla::Document;
use ecow::EcoString;
use pdf_writer::types::AssociationKind;
use pdf_writer::{Filter, Finish, Name, Ref, Str, TextStr};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{NativeElement, StyleChain};
use typst_library::layout::PagedDocument;
use typst_library::foundations::{NativeElement, Packed, StyleChain};
use typst_library::pdf::{EmbedElem, EmbeddedFileRelationship};
pub(crate) fn embed_files(
typst_doc: &PagedDocument,
document: &mut Document,
) -> SourceResult<()> {
let elements = typst_doc.introspector.query(&EmbedElem::elem().select());
use crate::catalog::{document_date, pdf_date};
use crate::{deflate, NameExt, PdfChunk, StrExt, WithGlobalRefs};
/// Query for all [`EmbedElem`] and write them and their file specifications.
///
/// This returns a map of embedding names and references so that we can later
/// add them to the catalog's `/Names` dictionary.
pub fn write_embedded_files(
ctx: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, BTreeMap<EcoString, Ref>)> {
let mut chunk = PdfChunk::new();
let mut embedded_files = BTreeMap::default();
let elements = ctx.document.introspector.query(&EmbedElem::elem().select());
for elem in &elements {
if !ctx.options.standards.embedded_files {
// PDF/A-2 requires embedded files to be PDF/A-1 or PDF/A-2,
// which we don't currently check.
bail!(
elem.span(),
"file embeddings are not currently supported for PDF/A-2";
hint: "PDF/A-3 supports arbitrary embedded files"
);
}
let embed = elem.to_packed::<EmbedElem>().unwrap();
let span = embed.span();
let derived_path = &embed.path.derived;
let path = derived_path.to_string();
let mime_type =
embed.mime_type(StyleChain::default()).clone().map(|s| s.to_string());
let description = embed
.description(StyleChain::default())
.clone()
.map(|s| s.to_string());
let association_kind = match embed.relationship(StyleChain::default()) {
None => AssociationKind::Unspecified,
Some(e) => match e {
EmbeddedFileRelationship::Source => AssociationKind::Source,
EmbeddedFileRelationship::Data => AssociationKind::Data,
EmbeddedFileRelationship::Alternative => AssociationKind::Alternative,
EmbeddedFileRelationship::Supplement => AssociationKind::Supplement,
},
};
let data: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(embed.data.clone());
// TODO: update when new krilla version lands (https://github.com/LaurenzV/krilla/pull/203)
let compress = should_compress(&embed.data).unwrap_or(true);
if embed.path.derived.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file path is too long");
}
let file = EmbeddedFile {
path,
mime_type,
description,
association_kind,
data: data.into(),
compress,
location: Some(span.into_raw().get()),
};
if document.embed_file(file).is_none() {
bail!(span, "attempted to embed file {derived_path} twice");
let id = embed_file(ctx, &mut chunk, embed)?;
if embedded_files.insert(embed.path.derived.clone(), id).is_some() {
bail!(
elem.span(),
"duplicate embedded file for path `{}`", embed.path.derived;
hint: "embedded file paths must be unique",
);
}
}
Ok(())
Ok((chunk, embedded_files))
}
fn should_compress(data: &[u8]) -> Option<bool> {
let ty = infer::get(data)?;
match ty.matcher_type() {
infer::MatcherType::App => None,
infer::MatcherType::Archive => match ty.mime_type() {
#[rustfmt::skip]
"application/zip"
| "application/vnd.rar"
| "application/gzip"
| "application/x-bzip2"
| "application/vnd.bzip3"
| "application/x-7z-compressed"
| "application/x-xz"
| "application/vnd.ms-cab-compressed"
| "application/vnd.debian.binary-package"
| "application/x-compress"
| "application/x-lzip"
| "application/x-rpm"
| "application/zstd"
| "application/x-lz4"
| "application/x-ole-storage" => Some(false),
_ => None,
},
infer::MatcherType::Audio => match ty.mime_type() {
#[rustfmt::skip]
"audio/mpeg"
| "audio/m4a"
| "audio/opus"
| "audio/ogg"
| "audio/x-flac"
| "audio/amr"
| "audio/aac"
| "audio/x-ape" => Some(false),
_ => None,
},
infer::MatcherType::Book => None,
infer::MatcherType::Doc => None,
infer::MatcherType::Font => None,
infer::MatcherType::Image => match ty.mime_type() {
#[rustfmt::skip]
"image/jpeg"
| "image/jp2"
| "image/png"
| "image/webp"
| "image/vnd.ms-photo"
| "image/heif"
| "image/avif"
| "image/jxl"
| "image/vnd.djvu" => None,
_ => None,
},
infer::MatcherType::Text => None,
infer::MatcherType::Video => match ty.mime_type() {
#[rustfmt::skip]
"video/mp4"
| "video/x-m4v"
| "video/x-matroska"
| "video/webm"
| "video/quicktime"
| "video/x-flv" => Some(false),
_ => None,
},
infer::MatcherType::Custom => None,
/// Write the embedded file stream and its file specification.
fn embed_file(
ctx: &WithGlobalRefs,
chunk: &mut PdfChunk,
embed: &Packed<EmbedElem>,
) -> SourceResult<Ref> {
let embedded_file_stream_ref = chunk.alloc.bump();
let file_spec_dict_ref = chunk.alloc.bump();
let data = embed.data.as_slice();
let compressed = deflate(data);
let mut embedded_file = chunk.embedded_file(embedded_file_stream_ref, &compressed);
embedded_file.filter(Filter::FlateDecode);
if let Some(mime_type) = embed.mime_type(StyleChain::default()) {
if mime_type.len() > Name::PDFA_LIMIT {
bail!(embed.span(), "embedded file MIME type is too long");
}
embedded_file.subtype(Name(mime_type.as_bytes()));
} else if ctx.options.standards.pdfa {
bail!(embed.span(), "embedded files must have a MIME type in PDF/A-3");
}
let mut params = embedded_file.params();
params.size(data.len() as i32);
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
params.modification_date(pdf_date);
} else if ctx.options.standards.pdfa {
bail!(
embed.span(),
"the document must have a date when embedding files in PDF/A-3";
hint: "`set document(date: none)` must not be used in this case"
);
}
params.finish();
embedded_file.finish();
let mut file_spec = chunk.file_spec(file_spec_dict_ref);
file_spec.path(Str(embed.path.derived.as_bytes()));
file_spec.unic_file(TextStr(&embed.path.derived));
file_spec
.insert(Name(b"EF"))
.dict()
.pair(Name(b"F"), embedded_file_stream_ref)
.pair(Name(b"UF"), embedded_file_stream_ref);
if ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
file_spec.association_kind(match embed.relationship(StyleChain::default()) {
Some(EmbeddedFileRelationship::Source) => AssociationKind::Source,
Some(EmbeddedFileRelationship::Data) => AssociationKind::Data,
Some(EmbeddedFileRelationship::Alternative) => AssociationKind::Alternative,
Some(EmbeddedFileRelationship::Supplement) => AssociationKind::Supplement,
None => AssociationKind::Unspecified,
});
}
if let Some(description) = embed.description(StyleChain::default()) {
if description.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file description is too long");
}
file_spec.description(TextStr(description));
}
Ok(file_spec_dict_ref)
}

View File

@ -0,0 +1,53 @@
use std::collections::HashMap;
use pdf_writer::Ref;
use typst_library::diag::SourceResult;
use crate::{PdfChunk, WithGlobalRefs};
/// A PDF external graphics state.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct ExtGState {
// In the range 0-255, needs to be divided before being written into the graphics state!
pub stroke_opacity: u8,
// In the range 0-255, needs to be divided before being written into the graphics state!
pub fill_opacity: u8,
}
impl Default for ExtGState {
fn default() -> Self {
Self { stroke_opacity: 255, fill_opacity: 255 }
}
}
impl ExtGState {
pub fn uses_opacities(&self) -> bool {
self.stroke_opacity != 255 || self.fill_opacity != 255
}
}
/// Embed all used external graphics states into the PDF.
pub fn write_graphic_states(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<ExtGState, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for external_gs in resources.ext_gs.items() {
if out.contains_key(external_gs) {
continue;
}
let id = chunk.alloc();
out.insert(*external_gs, id);
chunk
.ext_graphics(id)
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
}
Ok(())
})?;
Ok((chunk, out))
}

View File

@ -0,0 +1,278 @@
use std::collections::{BTreeMap, HashMap};
use std::hash::Hash;
use std::sync::Arc;
use ecow::{eco_format, EcoString};
use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
use pdf_writer::writers::{FontDescriptor, WMode};
use pdf_writer::{Chunk, Filter, Finish, Name, Rect, Ref, Str};
use subsetter::GlyphRemapper;
use ttf_parser::{name_id, GlyphId, Tag};
use typst_library::diag::{At, SourceResult};
use typst_library::text::Font;
use typst_syntax::Span;
use typst_utils::SliceExt;
use crate::{deflate, EmExt, NameExt, PdfChunk, WithGlobalRefs};
const CFF: Tag = Tag::from_bytes(b"CFF ");
const CFF2: Tag = Tag::from_bytes(b"CFF2");
const SUBSET_TAG_LEN: usize = 6;
const IDENTITY_H: &str = "Identity-H";
pub(crate) const CMAP_NAME: Name = Name(b"Custom");
pub(crate) const SYSTEM_INFO: SystemInfo = SystemInfo {
registry: Str(b"Adobe"),
ordering: Str(b"Identity"),
supplement: 0,
};
/// Embed all used fonts into the PDF.
#[typst_macros::time(name = "write fonts")]
pub fn write_fonts(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<Font, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for font in resources.fonts.items() {
if out.contains_key(font) {
continue;
}
let type0_ref = chunk.alloc();
let cid_ref = chunk.alloc();
let descriptor_ref = chunk.alloc();
let cmap_ref = chunk.alloc();
let data_ref = chunk.alloc();
out.insert(font.clone(), type0_ref);
let glyph_set = resources.glyph_sets.get(font).unwrap();
let glyph_remapper = resources.glyph_remappers.get(font).unwrap();
let ttf = font.ttf();
// Do we have a TrueType or CFF font?
//
// FIXME: CFF2 must be handled differently and requires PDF 2.0
// (or we have to convert it to CFF).
let is_cff = ttf
.raw_face()
.table(CFF)
.or_else(|| ttf.raw_face().table(CFF2))
.is_some();
let base_font = base_font_name(font, glyph_set);
let base_font_type0 = if is_cff {
eco_format!("{base_font}-{IDENTITY_H}")
} else {
base_font.clone()
};
// Write the base font object referencing the CID font.
chunk
.type0_font(type0_ref)
.base_font(Name(base_font_type0.as_bytes()))
.encoding_predefined(Name(IDENTITY_H.as_bytes()))
.descendant_font(cid_ref)
.to_unicode(cmap_ref);
// Write the CID font referencing the font descriptor.
let mut cid = chunk.cid_font(cid_ref);
cid.subtype(if is_cff { CidFontType::Type0 } else { CidFontType::Type2 });
cid.base_font(Name(base_font.as_bytes()));
cid.system_info(SYSTEM_INFO);
cid.font_descriptor(descriptor_ref);
cid.default_width(0.0);
if !is_cff {
cid.cid_to_gid_map_predefined(Name(b"Identity"));
}
// Extract the widths of all glyphs.
// `remapped_gids` returns an iterator over the old GIDs in their new sorted
// order, so we can append the widths as is.
let widths = glyph_remapper
.remapped_gids()
.map(|gid| {
let width = ttf.glyph_hor_advance(GlyphId(gid)).unwrap_or(0);
font.to_em(width).to_font_units()
})
.collect::<Vec<_>>();
// Write all non-zero glyph widths.
let mut first = 0;
let mut width_writer = cid.widths();
for (w, group) in widths.group_by_key(|&w| w) {
let end = first + group.len();
if w != 0.0 {
let last = end - 1;
width_writer.same(first as u16, last as u16, w);
}
first = end;
}
width_writer.finish();
cid.finish();
// Write the /ToUnicode character map, which maps glyph ids back to
// unicode codepoints to enable copying out of the PDF.
let cmap = create_cmap(glyph_set, glyph_remapper);
chunk
.cmap(cmap_ref, &cmap)
.writing_mode(WMode::Horizontal)
.filter(Filter::FlateDecode);
let subset = subset_font(font, glyph_remapper)
.map_err(|err| {
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
let name = postscript_name.as_deref().unwrap_or(&font.info().family);
eco_format!("failed to process font {name}: {err}")
})
.at(Span::detached())?;
let mut stream = chunk.stream(data_ref, &subset);
stream.filter(Filter::FlateDecode);
if is_cff {
stream.pair(Name(b"Subtype"), Name(b"CIDFontType0C"));
}
stream.finish();
let mut font_descriptor =
write_font_descriptor(&mut chunk, descriptor_ref, font, &base_font);
if is_cff {
font_descriptor.font_file3(data_ref);
} else {
font_descriptor.font_file2(data_ref);
}
}
Ok(())
})?;
Ok((chunk, out))
}
/// Writes a FontDescriptor dictionary.
pub fn write_font_descriptor<'a>(
pdf: &'a mut Chunk,
descriptor_ref: Ref,
font: &'a Font,
base_font: &str,
) -> FontDescriptor<'a> {
let ttf = font.ttf();
let metrics = font.metrics();
let serif = font
.find_name(name_id::POST_SCRIPT_NAME)
.is_some_and(|name| name.contains("Serif"));
let mut flags = FontFlags::empty();
flags.set(FontFlags::SERIF, serif);
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
flags.set(FontFlags::ITALIC, ttf.is_italic());
flags.insert(FontFlags::SYMBOLIC);
flags.insert(FontFlags::SMALL_CAP);
let global_bbox = ttf.global_bounding_box();
let bbox = Rect::new(
font.to_em(global_bbox.x_min).to_font_units(),
font.to_em(global_bbox.y_min).to_font_units(),
font.to_em(global_bbox.x_max).to_font_units(),
font.to_em(global_bbox.y_max).to_font_units(),
);
let italic_angle = ttf.italic_angle().unwrap_or(0.0);
let ascender = metrics.ascender.to_font_units();
let descender = metrics.descender.to_font_units();
let cap_height = metrics.cap_height.to_font_units();
let stem_v = 10.0 + 0.244 * (f32::from(ttf.weight().to_number()) - 50.0);
// Write the font descriptor (contains metrics about the font).
let mut font_descriptor = pdf.font_descriptor(descriptor_ref);
font_descriptor
.name(Name(base_font.as_bytes()))
.flags(flags)
.bbox(bbox)
.italic_angle(italic_angle)
.ascent(ascender)
.descent(descender)
.cap_height(cap_height)
.stem_v(stem_v);
font_descriptor
}
/// Subset a font to the given glyphs.
///
/// - For a font with TrueType outlines, this produces the whole OpenType font.
/// - For a font with CFF outlines, this produces just the CFF font program.
///
/// In both cases, this returns the already compressed data.
#[comemo::memoize]
#[typst_macros::time(name = "subset font")]
fn subset_font(
font: &Font,
glyph_remapper: &GlyphRemapper,
) -> Result<Arc<Vec<u8>>, subsetter::Error> {
let data = font.data();
let subset = subsetter::subset(data, font.index(), glyph_remapper)?;
let mut data = subset.as_ref();
// Extract the standalone CFF font program if applicable.
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
if let Some(cff) = raw.table(CFF) {
data = cff;
}
Ok(Arc::new(deflate(data)))
}
/// Creates the base font name for a font with a specific glyph subset.
/// Consists of a subset tag and the PostScript name of the font.
///
/// Returns a string of length maximum 116, so that even with `-Identity-H`
/// added it does not exceed the maximum PDF/A name length of 127.
pub(crate) fn base_font_name<T: Hash>(font: &Font, glyphs: &T) -> EcoString {
const MAX_LEN: usize = Name::PDFA_LIMIT - REST_LEN;
const REST_LEN: usize = SUBSET_TAG_LEN + 1 + 1 + IDENTITY_H.len();
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
let name = postscript_name.as_deref().unwrap_or("unknown");
let trimmed = &name[..name.len().min(MAX_LEN)];
// Hash the full name (we might have trimmed) and the glyphs to produce
// a fairly unique subset tag.
let subset_tag = subset_tag(&(name, glyphs));
eco_format!("{subset_tag}+{trimmed}")
}
/// Produce a unique 6 letter tag for a glyph set.
pub(crate) fn subset_tag<T: Hash>(glyphs: &T) -> EcoString {
const BASE: u128 = 26;
let mut hash = typst_utils::hash128(&glyphs);
let mut letter = [b'A'; SUBSET_TAG_LEN];
for l in letter.iter_mut() {
*l = b'A' + (hash % BASE) as u8;
hash /= BASE;
}
std::str::from_utf8(&letter).unwrap().into()
}
/// Create a compressed `/ToUnicode` CMap.
#[comemo::memoize]
#[typst_macros::time(name = "create cmap")]
fn create_cmap(
glyph_set: &BTreeMap<u16, EcoString>,
glyph_remapper: &GlyphRemapper,
) -> Arc<Vec<u8>> {
// Produce a reverse mapping from glyphs' CIDs to unicode strings.
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
for (&g, text) in glyph_set.iter() {
// See commend in `write_normal_text` for why we can choose the CID this way.
let cid = glyph_remapper.get(g).unwrap();
if !text.is_empty() {
cmap.pair_with_multiple(cid, text.chars());
}
}
Arc::new(deflate(&cmap.finish()))
}

View File

@ -0,0 +1,512 @@
use std::collections::HashMap;
use std::f32::consts::{PI, TAU};
use std::sync::Arc;
use ecow::eco_format;
use pdf_writer::types::{ColorSpaceOperand, FunctionShadingType};
use pdf_writer::writers::StreamShadingType;
use pdf_writer::{Filter, Finish, Name, Ref};
use typst_library::diag::SourceResult;
use typst_library::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
use typst_library::visualize::{
Color, ColorSpace, Gradient, RatioOrAngle, RelativeTo, WeightedColor,
};
use typst_utils::Numeric;
use crate::color::{
self, check_cmyk_allowed, ColorSpaceExt, PaintEncode, QuantizedColor,
};
use crate::{content, deflate, transform_to_array, AbsExt, PdfChunk, WithGlobalRefs};
/// A unique-transform-aspect-ratio combination that will be encoded into the
/// PDF.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PdfGradient {
/// The transform to apply to the gradient.
pub transform: Transform,
/// The aspect ratio of the gradient.
/// Required for aspect ratio correction.
pub aspect_ratio: Ratio,
/// The gradient.
pub gradient: Gradient,
/// The corrected angle of the gradient.
pub angle: Angle,
}
/// Writes the actual gradients (shading patterns) to the PDF.
/// This is performed once after writing all pages.
pub fn write_gradients(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<PdfGradient, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for pdf_gradient in resources.gradients.items() {
if out.contains_key(pdf_gradient) {
continue;
}
let shading = chunk.alloc();
out.insert(pdf_gradient.clone(), shading);
let PdfGradient { transform, aspect_ratio, gradient, angle } = pdf_gradient;
let color_space = if gradient.space().hue_index().is_some() {
ColorSpace::Oklab
} else {
gradient.space()
};
if color_space == ColorSpace::Cmyk {
check_cmyk_allowed(context.options)?;
}
let mut shading_pattern = match &gradient {
Gradient::Linear(_) => {
let shading_function =
shading_function(gradient, &mut chunk, color_space);
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
let mut shading = shading_pattern.function_shading();
shading.shading_type(FunctionShadingType::Axial);
color::write(
color_space,
shading.color_space(),
&context.globals.color_functions,
);
let (mut sin, mut cos) = (angle.sin(), angle.cos());
// Scale to edges of unit square.
let factor = cos.abs() + sin.abs();
sin *= factor;
cos *= factor;
let (x1, y1, x2, y2): (f64, f64, f64, f64) = match angle.quadrant() {
Quadrant::First => (0.0, 0.0, cos, sin),
Quadrant::Second => (1.0, 0.0, cos + 1.0, sin),
Quadrant::Third => (1.0, 1.0, cos + 1.0, sin + 1.0),
Quadrant::Fourth => (0.0, 1.0, cos, sin + 1.0),
};
shading
.anti_alias(gradient.anti_alias())
.function(shading_function)
.coords([x1 as f32, y1 as f32, x2 as f32, y2 as f32])
.extend([true; 2]);
shading.finish();
shading_pattern
}
Gradient::Radial(radial) => {
let shading_function =
shading_function(gradient, &mut chunk, color_space_of(gradient));
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
let mut shading = shading_pattern.function_shading();
shading.shading_type(FunctionShadingType::Radial);
color::write(
color_space,
shading.color_space(),
&context.globals.color_functions,
);
shading
.anti_alias(gradient.anti_alias())
.function(shading_function)
.coords([
radial.focal_center.x.get() as f32,
radial.focal_center.y.get() as f32,
radial.focal_radius.get() as f32,
radial.center.x.get() as f32,
radial.center.y.get() as f32,
radial.radius.get() as f32,
])
.extend([true; 2]);
shading.finish();
shading_pattern
}
Gradient::Conic(_) => {
let vertices = compute_vertex_stream(gradient, *aspect_ratio);
let stream_shading_id = chunk.alloc();
let mut stream_shading =
chunk.chunk.stream_shading(stream_shading_id, &vertices);
color::write(
color_space,
stream_shading.color_space(),
&context.globals.color_functions,
);
let range = color_space.range();
stream_shading
.bits_per_coordinate(16)
.bits_per_component(16)
.bits_per_flag(8)
.shading_type(StreamShadingType::CoonsPatch)
.decode(
[0.0, 1.0, 0.0, 1.0].into_iter().chain(range.iter().copied()),
)
.anti_alias(gradient.anti_alias())
.filter(Filter::FlateDecode);
stream_shading.finish();
let mut shading_pattern = chunk.shading_pattern(shading);
shading_pattern.shading_ref(stream_shading_id);
shading_pattern
}
};
shading_pattern.matrix(transform_to_array(*transform));
}
Ok(())
})?;
Ok((chunk, out))
}
/// Writes an exponential or stitched function that expresses the gradient.
fn shading_function(
gradient: &Gradient,
chunk: &mut PdfChunk,
color_space: ColorSpace,
) -> Ref {
let function = chunk.alloc();
let mut functions = vec![];
let mut bounds = vec![];
let mut encode = vec![];
// Create the individual gradient functions for each pair of stops.
for window in gradient.stops_ref().windows(2) {
let (first, second) = (window[0], window[1]);
// If we have a hue index or are using Oklab, we will create several
// stops in-between to make the gradient smoother without interpolation
// issues with native color spaces.
let mut last_c = first.0;
if gradient.space().hue_index().is_some() {
for i in 0..=32 {
let t = i as f64 / 32.0;
let real_t = first.1.get() * (1.0 - t) + second.1.get() * t;
let c = gradient.sample(RatioOrAngle::Ratio(Ratio::new(real_t)));
functions.push(single_gradient(chunk, last_c, c, color_space));
bounds.push(real_t as f32);
encode.extend([0.0, 1.0]);
last_c = c;
}
}
bounds.push(second.1.get() as f32);
functions.push(single_gradient(chunk, first.0, second.0, color_space));
encode.extend([0.0, 1.0]);
}
// Special case for gradients with only two stops.
if functions.len() == 1 {
return functions[0];
}
// Remove the last bound, since it's not needed for the stitching function.
bounds.pop();
// Create the stitching function.
chunk
.stitching_function(function)
.domain([0.0, 1.0])
.range(color_space.range().iter().copied())
.functions(functions)
.bounds(bounds)
.encode(encode);
function
}
/// Writes an exponential function that expresses a single segment (between two
/// stops) of a gradient.
fn single_gradient(
chunk: &mut PdfChunk,
first_color: Color,
second_color: Color,
color_space: ColorSpace,
) -> Ref {
let reference = chunk.alloc();
chunk
.exponential_function(reference)
.range(color_space.range().iter().copied())
.c0(color_space.convert(first_color))
.c1(color_space.convert(second_color))
.domain([0.0, 1.0])
.n(1.0);
reference
}
impl PaintEncode for Gradient {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_fill_color_space();
let index = register_gradient(ctx, self, on_text, transforms);
let id = eco_format!("Gr{index}");
let name = Name(id.as_bytes());
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_fill_pattern(None, name);
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_stroke_color_space();
let index = register_gradient(ctx, self, on_text, transforms);
let id = eco_format!("Gr{index}");
let name = Name(id.as_bytes());
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_stroke_pattern(None, name);
Ok(())
}
}
/// Deduplicates a gradient to a named PDF resource.
fn register_gradient(
ctx: &mut content::Builder,
gradient: &Gradient,
on_text: bool,
mut transforms: content::Transforms,
) -> usize {
// Edge cases for strokes.
if transforms.size.x.is_zero() {
transforms.size.x = Abs::pt(1.0);
}
if transforms.size.y.is_zero() {
transforms.size.y = Abs::pt(1.0);
}
let size = match gradient.unwrap_relative(on_text) {
RelativeTo::Self_ => transforms.size,
RelativeTo::Parent => transforms.container_size,
};
let (offset_x, offset_y) = match gradient {
Gradient::Conic(conic) => (
-size.x * (1.0 - conic.center.x.get() / 2.0) / 2.0,
-size.y * (1.0 - conic.center.y.get() / 2.0) / 2.0,
),
_ => (Abs::zero(), Abs::zero()),
};
let rotation = gradient.angle().unwrap_or_else(Angle::zero);
let transform = match gradient.unwrap_relative(on_text) {
RelativeTo::Self_ => transforms.transform,
RelativeTo::Parent => transforms.container_transform,
};
let scale_offset = match gradient {
Gradient::Conic(_) => 4.0_f64,
_ => 1.0,
};
let pdf_gradient = PdfGradient {
aspect_ratio: size.aspect_ratio(),
transform: transform
.pre_concat(Transform::translate(
offset_x * scale_offset,
offset_y * scale_offset,
))
.pre_concat(Transform::scale(
Ratio::new(size.x.to_pt() * scale_offset),
Ratio::new(size.y.to_pt() * scale_offset),
)),
gradient: gradient.clone(),
angle: Gradient::correct_aspect_ratio(rotation, size.aspect_ratio()),
};
ctx.resources.colors.mark_as_used(color_space_of(gradient));
ctx.resources.gradients.insert(pdf_gradient)
}
/// Writes a single Coons Patch as defined in the PDF specification
/// to a binary vec.
///
/// Structure:
/// - flag: `u8`
/// - points: `[u16; 24]`
/// - colors: `[u16; 4*N]` (N = number of components)
fn write_patch(
target: &mut Vec<u8>,
t: f32,
t1: f32,
c0: &[u16],
c1: &[u16],
angle: Angle,
) {
let theta = -TAU * t + angle.to_rad() as f32 + PI;
let theta1 = -TAU * t1 + angle.to_rad() as f32 + PI;
let (cp1, cp2) =
control_point(Point::new(Abs::pt(0.5), Abs::pt(0.5)), 0.5, theta, theta1);
// Push the flag
target.push(0);
let p1 =
[u16::quantize(0.5, [0.0, 1.0]).to_be(), u16::quantize(0.5, [0.0, 1.0]).to_be()];
let p2 = [
u16::quantize(theta.cos(), [-1.0, 1.0]).to_be(),
u16::quantize(theta.sin(), [-1.0, 1.0]).to_be(),
];
let p3 = [
u16::quantize(theta1.cos(), [-1.0, 1.0]).to_be(),
u16::quantize(theta1.sin(), [-1.0, 1.0]).to_be(),
];
let cp1 = [
u16::quantize(cp1.x.to_f32(), [0.0, 1.0]).to_be(),
u16::quantize(cp1.y.to_f32(), [0.0, 1.0]).to_be(),
];
let cp2 = [
u16::quantize(cp2.x.to_f32(), [0.0, 1.0]).to_be(),
u16::quantize(cp2.y.to_f32(), [0.0, 1.0]).to_be(),
];
// Push the points
target.extend_from_slice(bytemuck::cast_slice(&[
p1, p1, p2, p2, cp1, cp2, p3, p3, p1, p1, p1, p1,
]));
// Push the colors.
let colors = [c0, c0, c1, c1]
.into_iter()
.flat_map(|c| c.iter().copied().map(u16::to_be_bytes))
.flatten();
target.extend(colors);
}
fn control_point(c: Point, r: f32, angle_start: f32, angle_end: f32) -> (Point, Point) {
let n = (TAU / (angle_end - angle_start)).abs();
let f = ((angle_end - angle_start) / n).tan() * 4.0 / 3.0;
let p1 = c + Point::new(
Abs::pt((r * angle_start.cos() - f * r * angle_start.sin()) as f64),
Abs::pt((r * angle_start.sin() + f * r * angle_start.cos()) as f64),
);
let p2 = c + Point::new(
Abs::pt((r * angle_end.cos() + f * r * angle_end.sin()) as f64),
Abs::pt((r * angle_end.sin() - f * r * angle_end.cos()) as f64),
);
(p1, p2)
}
#[comemo::memoize]
fn compute_vertex_stream(gradient: &Gradient, aspect_ratio: Ratio) -> Arc<Vec<u8>> {
let Gradient::Conic(conic) = gradient else { unreachable!() };
// Generated vertices for the Coons patches
let mut vertices = Vec::new();
// Correct the gradient's angle
let angle = Gradient::correct_aspect_ratio(conic.angle, aspect_ratio);
for window in conic.stops.windows(2) {
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
// Precision:
// - On an even color, insert a stop every 90deg
// - For a hue-based color space, insert 200 stops minimum
// - On any other, insert 20 stops minimum
let max_dt = if c0 == c1 {
0.25
} else if conic.space.hue_index().is_some() {
0.005
} else {
0.05
};
let encode_space = conic
.space
.hue_index()
.map(|_| ColorSpace::Oklab)
.unwrap_or(conic.space);
let mut t_x = t0.get();
let dt = (t1.get() - t0.get()).min(max_dt);
// Special casing for sharp gradients.
if t0 == t1 {
write_patch(
&mut vertices,
t0.get() as f32,
t1.get() as f32,
&encode_space.convert(c0),
&encode_space.convert(c1),
angle,
);
continue;
}
while t_x < t1.get() {
let t_next = (t_x + dt).min(t1.get());
// The current progress in the current window.
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
let c = Color::mix_iter(
[WeightedColor::new(c0, 1.0 - t(t_x)), WeightedColor::new(c1, t(t_x))],
conic.space,
)
.unwrap();
let c_next = Color::mix_iter(
[
WeightedColor::new(c0, 1.0 - t(t_next)),
WeightedColor::new(c1, t(t_next)),
],
conic.space,
)
.unwrap();
write_patch(
&mut vertices,
t_x as f32,
t_next as f32,
&encode_space.convert(c),
&encode_space.convert(c_next),
angle,
);
t_x = t_next;
}
}
Arc::new(deflate(&vertices))
}
fn color_space_of(gradient: &Gradient) -> ColorSpace {
if gradient.space().hue_index().is_some() {
ColorSpace::Oklab
} else {
gradient.space()
}
}

View File

@ -1,244 +1,249 @@
use std::hash::{Hash, Hasher};
use std::sync::{Arc, OnceLock};
use std::collections::HashMap;
use std::io::Cursor;
use image::{DynamicImage, EncodableLayout, GenericImageView, Rgba};
use krilla::image::{BitsPerComponent, CustomImage, ImageColorspace};
use krilla::surface::Surface;
use krilla_svg::{SurfaceExt, SvgSettings};
use typst_library::diag::{bail, SourceResult};
use ecow::eco_format;
use image::{DynamicImage, GenericImageView, Rgba};
use pdf_writer::{Chunk, Filter, Finish, Ref};
use typst_library::diag::{At, SourceResult, StrResult};
use typst_library::foundations::Smart;
use typst_library::layout::{Abs, Angle, Ratio, Size, Transform};
use typst_library::visualize::{
ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat, RasterImage,
ColorSpace, ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat,
RasterImage, SvgImage,
};
use typst_syntax::Span;
use typst_utils::Deferred;
use crate::convert::{FrameContext, GlobalContext};
use crate::util::{SizeExt, TransformExt};
use crate::{color, deflate, PdfChunk, WithGlobalRefs};
#[typst_macros::time(name = "handle image")]
pub(crate) fn handle_image(
gc: &mut GlobalContext,
fc: &mut FrameContext,
image: &Image,
size: Size,
surface: &mut Surface,
span: Span,
) -> SourceResult<()> {
surface.push_transform(&fc.state().transform().to_krilla());
surface.set_location(span.into_raw().get());
let interpolate = image.scaling() == Smart::Custom(ImageScaling::Smooth);
if let Some(alt) = image.alt() {
surface.start_alt_text(alt);
}
gc.image_spans.insert(span);
match image.kind() {
ImageKind::Raster(raster) => {
let (exif_transform, new_size) = exif_transform(raster, size);
surface.push_transform(&exif_transform.to_krilla());
let image = match convert_raster(raster.clone(), interpolate) {
None => bail!(span, "failed to process image"),
Some(i) => i,
};
if !gc.image_to_spans.contains_key(&image) {
gc.image_to_spans.insert(image.clone(), span);
/// Embed all used images into the PDF.
#[typst_macros::time(name = "write images")]
pub fn write_images(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<Image, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for (i, image) in resources.images.items().enumerate() {
if out.contains_key(image) {
continue;
}
surface.draw_image(image, new_size.to_krilla());
surface.pop();
}
ImageKind::Svg(svg) => {
surface.draw_svg(
svg.tree(),
size.to_krilla(),
SvgSettings { embed_text: true, ..Default::default() },
);
}
}
let (handle, span) = resources.deferred_images.get(&i).unwrap();
let encoded = handle.wait().as_ref().map_err(Clone::clone).at(*span)?;
if image.alt().is_some() {
surface.end_alt_text();
}
match encoded {
EncodedImage::Raster {
data,
filter,
color_space,
bits_per_component,
width,
height,
compressed_icc,
alpha,
interpolate,
} => {
let image_ref = chunk.alloc();
out.insert(image.clone(), image_ref);
surface.pop();
surface.reset_location();
let mut image = chunk.chunk.image_xobject(image_ref, data);
image.filter(*filter);
image.width(*width as i32);
image.height(*height as i32);
image.bits_per_component(i32::from(*bits_per_component));
image.interpolate(*interpolate);
Ok(())
}
let mut icc_ref = None;
let space = image.color_space();
if compressed_icc.is_some() {
let id = chunk.alloc.bump();
space.icc_based(id);
icc_ref = Some(id);
} else {
color::write(
*color_space,
space,
&context.globals.color_functions,
);
}
struct Repr {
/// The original, underlying raster image.
raster: RasterImage,
/// The alpha channel of the raster image, if existing.
alpha_channel: OnceLock<Option<Vec<u8>>>,
/// A (potentially) converted version of the dynamic image stored `raster` that is
/// guaranteed to either be in luma8 or rgb8, and thus can be used for the
/// `color_channel` method of `CustomImage`.
actual_dynamic: OnceLock<Arc<DynamicImage>>,
}
// Add a second gray-scale image containing the alpha values if
// this image has an alpha channel.
if let Some((alpha_data, alpha_filter)) = alpha {
let mask_ref = chunk.alloc.bump();
image.s_mask(mask_ref);
image.finish();
/// A wrapper around `RasterImage` so that we can implement `CustomImage`.
#[derive(Clone)]
struct PdfImage(Arc<Repr>);
let mut mask = chunk.image_xobject(mask_ref, alpha_data);
mask.filter(*alpha_filter);
mask.width(*width as i32);
mask.height(*height as i32);
mask.color_space().device_gray();
mask.bits_per_component(i32::from(*bits_per_component));
mask.interpolate(*interpolate);
} else {
image.finish();
}
impl PdfImage {
pub fn new(raster: RasterImage) -> Self {
Self(Arc::new(Repr {
raster,
alpha_channel: OnceLock::new(),
actual_dynamic: OnceLock::new(),
}))
}
}
impl Hash for PdfImage {
fn hash<H: Hasher>(&self, state: &mut H) {
// `alpha_channel` and `actual_dynamic` are generated from the underlying `RasterImage`,
// so this is enough. Since `raster` is prehashed, this is also very cheap.
self.0.raster.hash(state);
}
}
impl CustomImage for PdfImage {
fn color_channel(&self) -> &[u8] {
self.0
.actual_dynamic
.get_or_init(|| {
let dynamic = self.0.raster.dynamic();
let channel_count = dynamic.color().channel_count();
match (dynamic.as_ref(), channel_count) {
// Pure luma8 or rgb8 image, can use it directly.
(DynamicImage::ImageLuma8(_), _) => dynamic.clone(),
(DynamicImage::ImageRgb8(_), _) => dynamic.clone(),
// Grey-scale image, convert to luma8.
(_, 1 | 2) => Arc::new(DynamicImage::ImageLuma8(dynamic.to_luma8())),
// Anything else, convert to rgb8.
_ => Arc::new(DynamicImage::ImageRgb8(dynamic.to_rgb8())),
if let (Some(compressed_icc), Some(icc_ref)) =
(compressed_icc, icc_ref)
{
let mut stream = chunk.icc_profile(icc_ref, compressed_icc);
stream.filter(Filter::FlateDecode);
match color_space {
ColorSpace::Srgb => {
stream.n(3);
stream.alternate().srgb();
}
ColorSpace::D65Gray => {
stream.n(1);
stream.alternate().d65_gray();
}
_ => unimplemented!(),
}
}
}
})
.as_bytes()
}
fn alpha_channel(&self) -> Option<&[u8]> {
self.0
.alpha_channel
.get_or_init(|| {
self.0.raster.dynamic().color().has_alpha().then(|| {
self.0
.raster
.dynamic()
.pixels()
.map(|(_, _, Rgba([_, _, _, a]))| a)
.collect()
})
})
.as_ref()
.map(|v| &**v)
}
fn bits_per_component(&self) -> BitsPerComponent {
BitsPerComponent::Eight
}
fn size(&self) -> (u32, u32) {
(self.0.raster.width(), self.0.raster.height())
}
fn icc_profile(&self) -> Option<&[u8]> {
if matches!(
self.0.raster.dynamic().as_ref(),
DynamicImage::ImageLuma8(_)
| DynamicImage::ImageLumaA8(_)
| DynamicImage::ImageRgb8(_)
| DynamicImage::ImageRgba8(_)
) {
self.0.raster.icc().map(|b| b.as_bytes())
} else {
// In all other cases, the dynamic will be converted into RGB8 or LUMA8, so the ICC
// profile may become invalid, and thus we don't include it.
None
EncodedImage::Svg(svg_chunk, id) => {
let mut map = HashMap::new();
svg_chunk.renumber_into(&mut chunk.chunk, |old| {
*map.entry(old).or_insert_with(|| chunk.alloc.bump())
});
out.insert(image.clone(), map[id]);
}
}
}
}
fn color_space(&self) -> ImageColorspace {
// Remember that we convert all images to either RGB or luma.
if self.0.raster.dynamic().color().has_color() {
ImageColorspace::Rgb
} else {
ImageColorspace::Luma
}
}
Ok(())
})?;
Ok((chunk, out))
}
/// Creates a new PDF image from the given image.
///
/// Also starts the deferred encoding of the image.
#[comemo::memoize]
fn convert_raster(
raster: RasterImage,
interpolate: bool,
) -> Option<krilla::image::Image> {
if let RasterFormat::Exchange(ExchangeFormat::Jpg) = raster.format() {
let image_data: Arc<dyn AsRef<[u8]> + Send + Sync> =
Arc::new(raster.data().clone());
let icc_profile = raster.icc().map(|i| {
let i: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(i.clone());
i
});
pub fn deferred_image(
image: Image,
pdfa: bool,
) -> (Deferred<StrResult<EncodedImage>>, Option<ColorSpace>) {
let color_space = match image.kind() {
ImageKind::Raster(raster) if raster.icc().is_none() => {
Some(to_color_space(raster.dynamic().color()))
}
_ => None,
};
krilla::image::Image::from_jpeg_with_icc(
image_data.into(),
icc_profile.map(|i| i.into()),
interpolate,
)
} else {
krilla::image::Image::from_custom(PdfImage::new(raster), interpolate)
// PDF/A does not appear to allow interpolation.
// See https://github.com/typst/typst/issues/2942.
let interpolate = !pdfa && image.scaling() == Smart::Custom(ImageScaling::Smooth);
let deferred = Deferred::new(move || match image.kind() {
ImageKind::Raster(raster) => Ok(encode_raster_image(raster, interpolate)),
ImageKind::Svg(svg) => {
let (chunk, id) = encode_svg(svg, pdfa)
.map_err(|err| eco_format!("failed to convert SVG to PDF: {err}"))?;
Ok(EncodedImage::Svg(chunk, id))
}
});
(deferred, color_space)
}
/// Encode an image with a suitable filter.
#[typst_macros::time(name = "encode raster image")]
fn encode_raster_image(image: &RasterImage, interpolate: bool) -> EncodedImage {
let dynamic = image.dynamic();
let color_space = to_color_space(dynamic.color());
let (filter, data, bits_per_component) =
if image.format() == RasterFormat::Exchange(ExchangeFormat::Jpg) {
let mut data = Cursor::new(vec![]);
dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
(Filter::DctDecode, data.into_inner(), 8)
} else {
// TODO: Encode flate streams with PNG-predictor?
let (data, bits_per_component) = match (dynamic, color_space) {
// RGB image.
(DynamicImage::ImageRgb8(rgb), _) => (deflate(rgb.as_raw()), 8),
// Grayscale image
(DynamicImage::ImageLuma8(luma), _) => (deflate(luma.as_raw()), 8),
(_, ColorSpace::D65Gray) => (deflate(dynamic.to_luma8().as_raw()), 8),
// Anything else
_ => (deflate(dynamic.to_rgb8().as_raw()), 8),
};
(Filter::FlateDecode, data, bits_per_component)
};
let compressed_icc = image.icc().map(|data| deflate(data));
let alpha = dynamic.color().has_alpha().then(|| encode_alpha(dynamic));
EncodedImage::Raster {
data,
filter,
color_space,
bits_per_component,
width: image.width(),
height: image.height(),
compressed_icc,
alpha,
interpolate,
}
}
fn exif_transform(image: &RasterImage, size: Size) -> (Transform, Size) {
let base = |hp: bool, vp: bool, mut base_ts: Transform, size: Size| {
if hp {
// Flip horizontally in-place.
base_ts = base_ts.pre_concat(
Transform::scale(-Ratio::one(), Ratio::one())
.pre_concat(Transform::translate(-size.x, Abs::zero())),
)
}
/// Encode an image's alpha channel if present.
#[typst_macros::time(name = "encode alpha")]
fn encode_alpha(image: &DynamicImage) -> (Vec<u8>, Filter) {
let pixels: Vec<_> = image.pixels().map(|(_, _, Rgba([_, _, _, a]))| a).collect();
(deflate(&pixels), Filter::FlateDecode)
}
if vp {
// Flip vertically in-place.
base_ts = base_ts.pre_concat(
Transform::scale(Ratio::one(), -Ratio::one())
.pre_concat(Transform::translate(Abs::zero(), -size.y)),
)
}
/// Encode an SVG into a chunk of PDF objects.
#[typst_macros::time(name = "encode svg")]
fn encode_svg(
svg: &SvgImage,
pdfa: bool,
) -> Result<(Chunk, Ref), svg2pdf::ConversionError> {
svg2pdf::to_chunk(
svg.tree(),
svg2pdf::ConversionOptions { pdfa, ..Default::default() },
)
}
base_ts
};
/// A pre-encoded image.
pub enum EncodedImage {
/// A pre-encoded rasterized image.
Raster {
/// The raw, pre-deflated image data.
data: Vec<u8>,
/// The filter to use for the image.
filter: Filter,
/// Which color space this image is encoded in.
color_space: ColorSpace,
/// How many bits of each color component are stored.
bits_per_component: u8,
/// The image's width.
width: u32,
/// The image's height.
height: u32,
/// The image's ICC profile, deflated, if any.
compressed_icc: Option<Vec<u8>>,
/// The alpha channel of the image, pre-deflated, if any.
alpha: Option<(Vec<u8>, Filter)>,
/// Whether image interpolation should be enabled.
interpolate: bool,
},
/// A vector graphic.
///
/// The chunk is the SVG converted to PDF objects.
Svg(Chunk, Ref),
}
let no_flipping =
|hp: bool, vp: bool| (base(hp, vp, Transform::identity(), size), size);
let with_flipping = |hp: bool, vp: bool| {
let base_ts = Transform::rotate_at(Angle::deg(90.0), Abs::zero(), Abs::zero())
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()));
let inv_size = Size::new(size.y, size.x);
(base(hp, vp, base_ts, inv_size), inv_size)
};
match image.exif_rotation() {
Some(2) => no_flipping(true, false),
Some(3) => no_flipping(true, true),
Some(4) => no_flipping(false, true),
Some(5) => with_flipping(false, false),
Some(6) => with_flipping(true, false),
Some(7) => with_flipping(true, true),
Some(8) => with_flipping(false, true),
_ => no_flipping(false, false),
/// Matches an [`image::ColorType`] to [`ColorSpace`].
fn to_color_space(color: image::ColorType) -> ColorSpace {
use image::ColorType::*;
match color {
L8 | La8 | L16 | La16 => ColorSpace::D65Gray,
Rgb8 | Rgba8 | Rgb16 | Rgba16 | Rgb32F | Rgba32F => ColorSpace::Srgb,
_ => unimplemented!(),
}
}

Some files were not shown because too many files have changed in this diff Show More