Compare commits

..

100 Commits

Author SHA1 Message Date
Tobias Schmitz
d204a28818
Expand text link boxes vertically by half the leading spacing (#6252) 2025-05-12 18:12:35 +00:00
Tobias Schmitz
22a117a091
Prohibit some line break opportunities between LTR-ISOLATE and OBJECT-REPLACEMENT-CHARACTER (#6251)
Co-authored-by: Max <max@mkor.je>
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-05-12 09:16:38 +00:00
Tobias Schmitz
26c19a49c8
Use the infer crate to determine if pdf embeds should be compressed (#6256) 2025-05-12 08:07:43 +00:00
Tobias Schmitz
54c5113a83
Catch indefinite loop in realization due to cycle between show and grouping rule (#6259) 2025-05-12 08:06:18 +00:00
Tobias Schmitz
9b09146a6b
Use list spacing for attach spacing in tight lists (#6242) 2025-05-06 14:03:48 +00:00
Tobias Schmitz
b322da930f
Respect RTL cell layouting order in grid layout (#6232)
Co-authored-by: PgBiel <9021226+PgBiel@users.noreply.github.com>
2025-05-06 08:26:55 +00:00
Malo
14241ec1aa
Use the right field name for figure.caption.position (#6226) 2025-05-01 15:43:07 +00:00
Andrew Voynov
3e6691a93b
Fix frac syntax section typo (#6193) 2025-04-18 14:27:07 +00:00
Max
7e072e2493
Add test for flattened accents in math (#6188) 2025-04-17 14:10:27 +00:00
Malo
c21c1c391b
Use measure width argument in layout doc (#6160) 2025-04-10 09:27:42 +00:00
Approximately Equal
94a497a01f
Add HTML meta tags for document authors and keywords (#6134) 2025-04-07 20:18:52 +00:00
alluring-mushroom
9829bd8326
Document exceptions and alternatives to using type (#6027)
Co-authored-by: Zedd Serjeant <Zedd.Serjeant@PumpkinEng.com.au>
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-07 19:56:20 +00:00
Andrew Voynov
43c3d5d3af
Improved ratio and relative length docs (#5750)
Co-authored-by: PgBiel <9021226+PgBiel@users.noreply.github.com>
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-07 19:47:02 +00:00
+merlan #flirora
14a0565d95
Show warnings from eval (#6100)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-07 18:42:29 +00:00
Laurenz
bd2e76e11d
Bump OpenSSL (#6153) 2025-04-07 18:20:27 +00:00
Andrew Voynov
14928ef962
Fix typo in module docs (#6146)
Co-authored-by: Alberto Corbi <alberto_corbi@icloud.com>
2025-04-07 17:47:29 +00:00
Laurenz
d55abf0842
Update community section in README (#6150) 2025-04-07 17:46:46 +00:00
Markus Langgeng Iman Saputra
ea336a6ac7
Add Indonesian translation (#6108)
Co-authored-by: Malo <57839069+MDLC01@users.noreply.github.com>
2025-04-04 15:50:13 +00:00
Malo
387a8b4895
Display color spaces in the order in which they are presented in the doc (#6140) 2025-04-04 11:53:14 +00:00
Laurenz
bf8751c063
Switch to released krilla version (#6137) 2025-04-04 08:35:51 +00:00
Malo
ed2106e28d
Disallow empty font lists (#6049) 2025-04-02 11:47:42 +00:00
Malo
417f5846b6
Support comparison functions in array.sorted (#5627)
Co-authored-by: +merlan #flirora <uruwi@protonmail.com>
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-02 09:41:45 +00:00
Ian Wrzesinski
12699eb7f4
Parse multi-character numbers consistently in math (#5996)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-02 09:30:04 +00:00
Laurenz Stampfl
96dd67e011
Switch PDF backend to krilla (#5420)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-04-01 14:42:52 +00:00
Max
012e14d40c
Unify layout of vec and cases with mat (#5934) 2025-03-31 09:38:04 +00:00
Max
4f0fbfb7e0
Add dotless parameter to math.accent (#5939)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-31 09:17:49 +00:00
+merlan #flirora
a64af130dc
Add default parameter for array.{first, last} (#5970) 2025-03-31 09:06:18 +00:00
Malo
1082181a6f
Improve french smartquotes (#5976) 2025-03-31 09:01:01 +00:00
+merlan #flirora
e60d3021a7
Add env setting for ignore_system_fonts (#6092) 2025-03-31 08:17:37 +00:00
Astra3
326bec1f0d
Correcting Czech translation in typst-library (#6101) 2025-03-31 08:16:47 +00:00
Myriad-Dreamin
758ee78ef5
Make World::font implementations safe (#6117) 2025-03-31 08:08:55 +00:00
Matt Fellenz
efdb75558f
IDE: complete jump-to-cursor impl (#6037) 2025-03-28 17:33:16 +00:00
frozolotl
20ee446eba
Fix descriptions of color maps (#6096) 2025-03-28 15:30:30 +00:00
Philipp Niedermayer
b7a4382a73
Fix typo (#6104) 2025-03-28 15:28:03 +00:00
Laurenz Stampfl
838a46dbb7
Test all exif rotation types and fix two of them (#6102) 2025-03-27 10:59:32 +00:00
PgBiel
1f1c133878
Refactor grid header and footer resolving (#5919) 2025-03-24 20:42:48 +00:00
Laurenz
1e591ac8dc
Bump zip (#6091) 2025-03-24 18:17:29 +00:00
Eduardo Sánchez Muñoz
38213ed534
Use u64 instead of usize to store counter and enumeration item numbers, so behavior does not vary from 64-bit to 32-bit platforms (#6026) 2025-03-24 18:16:33 +00:00
Andrew Voynov
636eea18bc
Expand page breaks' triggers for page(height: auto) in docs (#6081) 2025-03-24 18:08:39 +00:00
Ian Wrzesinski
91956d1f03
Use std::ops::ControlFlow in Content::traverse (#6053)
Co-authored-by: Max Mynter <maxmynter@me.com>
2025-03-24 18:07:19 +00:00
Wolf-SO
1b2714e1a7
Update 1-writing.md to improve readability (#6040) 2025-03-12 18:29:35 +00:00
Laurenz
95a7e28e25
Make two typst-kit functions private (#6045) 2025-03-12 12:46:03 +00:00
Kevin K.
37bb632d2e
Fix missing words and paren in docs (#6046) 2025-03-12 12:45:57 +00:00
Michael Fortunato
24b2f98bf9
Fix typo in 4-template.md (#6047) 2025-03-12 12:45:22 +00:00
Andrew Voynov
0214320087
Fix parallel package installation (#5979)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-11 20:20:41 +00:00
Max
96f6957371
Fix math.root frame size (#6021) 2025-03-11 10:18:15 +00:00
evie
3650859ae8
Fix cargo clippy warnings (mostly about .repeat.take and .next_back) (#6038) 2025-03-11 10:00:53 +00:00
Caleb Maclennan
bd531e08dc
Bump rustybuzz (and adjacent crates) (#5407) 2025-03-10 12:45:08 +00:00
Ludovico Gerardi
e66e190a21
Fix typo in docs (#6034) 2025-03-10 11:39:30 +00:00
Laurenz
db9a83d9fc Bump version on main
The tagged commit itself is on the 0.13 branch.
2025-03-07 11:19:12 +01:00
Laurenz
8d3488a07d
0.13.1 changelog (#6025) 2025-03-07 10:03:52 +00:00
Laurenz
476c2df312
Mark breaking symbol changes as breaking in 0.13.0 changelog (#6024) 2025-03-07 09:17:11 +00:00
Malo
e0b2c32a8e
Mention that sym.ohm was removed in the 0.13.0 changelog (#6017)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-07 09:05:16 +00:00
Laurenz
99b7d2898e
Replace par function call in tutorial (#6023) 2025-03-07 08:47:56 +00:00
Laurenz
e1a9166e1d
Hotfix for labels on symbols (#6015) 2025-03-07 08:22:42 +00:00
Andrew Voynov
6271cdceae
Fix debug implementation of Recipe (#5997) 2025-03-04 09:33:39 +00:00
LN Liberda
63fda9935f
Run tests on 32-bit via Ubuntu multilib (#5937)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-03 13:10:58 +00:00
3w36zj6
8820a00beb
Respect quotes: false in inline quote (#5991)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-03-03 11:50:47 +00:00
andis854
9a6ffbc7db
Added snap to installation instructions (#5984) 2025-03-03 11:40:58 +00:00
Andrew Voynov
bf0d45e2c0
Make array.chunks example more readable (#5975) 2025-03-03 11:31:39 +00:00
F2011
d4def09962
Correct typo (#5971) 2025-03-03 11:23:29 +00:00
Tijme
66679920b2
Fix docs example with type/string comparison (#5987) 2025-03-03 09:32:06 +00:00
Ian Wrzesinski
cfb3b1a270
Improve clarity of ast.rs for newcomers to the codebase (#5784)
Co-authored-by: PgBiel <9021226+PgBiel@users.noreply.github.com>
Co-authored-by: T0mstone <39707032+T0mstone@users.noreply.github.com>
2025-02-26 20:10:36 +00:00
Emmanuel Lesueur
52f1f53973
Fix curve with multiple non-closed components. (#5963) 2025-02-26 18:07:29 +00:00
Malo
d6b0d68ffa
Add more methods to direction (#5893) 2025-02-25 14:19:17 +00:00
Laurenz
8f039dd614
Only autocomplete methods which take self (#5824) 2025-02-25 14:10:01 +00:00
Malo
2eef9e84e1
Improve hints for show rule recursion depth (#5856) 2025-02-25 14:09:52 +00:00
evie
d11ad80dee
Add #str.normalize(form) (#5631)
Co-authored-by: +merlan #flirora <uruwi@protonmail.com>
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-02-25 14:01:01 +00:00
Laurenz
bad343748b
Fix paper name in page setup guide (#5956) 2025-02-25 13:00:22 +00:00
Laurenz
f31c971624
Deduplicate watcher update call (#5955) 2025-02-25 12:47:41 +00:00
aodenis
acd3a5b7a5
Fix high CPU usage due to inotify watch triggering itself (#5905)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-02-25 12:41:54 +00:00
Laurenz
225e845021
Fix introspection of HTML root sibling metadata (#5953) 2025-02-25 11:31:15 +00:00
Sharzy
36d83c8c09
HTML export: fix elem counting on classify_output (#5910)
Co-authored-by: Laurenz <laurmaedje@gmail.com>
2025-02-24 16:35:13 +00:00
Malo
3744c99b07
Override the default math class of some characters (#5949) 2025-02-24 16:15:17 +00:00
Max
81efc82d3c
Fix math accent base height calculation (#5941) 2025-02-24 16:05:36 +00:00
Laurenz
69c3f95705
Bump MSRV to 1.83 and Rust in CI to 1.85 (#5946) 2025-02-24 12:28:01 +00:00
Laurenz
ebe2543264
Fix comparison of Func and NativeFuncData (#5943) 2025-02-24 11:17:31 +00:00
Malo
56f4fa2b4d
Documentation improvements (#5888) 2025-02-23 11:31:28 +00:00
Max
55bc5f4c94
Make math shorthands noncontinuable (#5925) 2025-02-23 11:28:24 +00:00
PgBiel
240f238eee
Fix HTML export of table with gutter (#5920) 2025-02-23 11:26:14 +00:00
Laurenz
d199546f9f Bump version on main
The tagged commit itself is on the 0.13 branch.
2025-02-19 11:25:31 +01:00
Laurenz
a543ee9445
Update changelog (#5894) 2025-02-19 09:59:27 +00:00
Matthew Toohey
3de3813ca0
--make-deps fixes (#5873) 2025-02-18 18:04:40 +00:00
ᡥᠠᡳᡤᡳᠶᠠ ᡥᠠᠯᠠ·ᠨᡝᡴᠣ 猫
74e4f78687
HTML export: Use <code> for inline RawElem (#5884) 2025-02-18 10:16:19 +00:00
Laurenz
25c86accbb
More robust SVG auto-detection (#5878) 2025-02-17 10:56:00 +00:00
Laurenz
5fc679f3e7
Remove Linux Libertine warning (#5876) 2025-02-16 13:18:39 +00:00
Ana Gelez
19a12f379f
Lazy parsing of the package index (#5851) 2025-02-12 15:50:48 +00:00
+merlan #flirora
02cd43e27f
Gradient::repeat: Fix floating-point error in stop calculation (#5837) 2025-02-12 12:38:40 +00:00
+merlan #flirora
83ad407d3c
Update documentation for float.{to-bits, from-bits} (#5836) 2025-02-12 12:35:03 +00:00
Laurenz
a0cd89b478
Fix autocomplete and jumps in math (#5849) 2025-02-11 10:30:30 +00:00
Laurenz
81021fa1a2
Bump typst-assets (#5845) 2025-02-10 15:39:14 +00:00
Laurenz
89e71acecd
Respect par constructor arguments (#5842) 2025-02-10 14:37:19 +00:00
TwoF1nger
ee47cb8469
Add smart quotes for Bulgarian (#5807) 2025-02-10 10:42:16 +00:00
Malo
25e27169e1
Add warning for pdf.embed elem used with HTML (#5829) 2025-02-10 10:39:32 +00:00
PgBiel
3fba256405
Don't crash on image with zero DPI (#5835) 2025-02-10 10:39:04 +00:00
Laurenz
e4f8e57c53
Fix unnecessary import rename warning (#5828) 2025-02-06 21:10:43 +00:00
Laurenz
a1c73b41b8
Document removals in changelog (#5827) 2025-02-06 20:57:46 +00:00
Laurenz
d61f57365b
Fix docs outline for nested definitions (#5823) 2025-02-06 10:18:35 +00:00
Malo
ca702c7f82
Documentation fixes and improvements (#5816) 2025-02-06 10:18:10 +00:00
Laurenz
d897ab5e7d
Autocomplete content methods (#5822) 2025-02-06 09:34:28 +00:00
307 changed files with 6296 additions and 6932 deletions

View File

@ -5,6 +5,7 @@ env:
RUSTFLAGS: "-Dwarnings"
RUSTDOCFLAGS: "-Dwarnings"
TYPST_TESTS_EXTENDED: true
PKG_CONFIG_i686-unknown-linux-gnu: /usr/bin/i686-linux-gnu-pkgconf
jobs:
# This allows us to have one branch protection rule for the full test matrix.
@ -27,30 +28,43 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
bits: [64]
include:
- os: ubuntu-latest
bits: 32
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.83.0
- if: startsWith(matrix.os, 'ubuntu-') && matrix.bits == 32
run: |
sudo dpkg --add-architecture i386
sudo apt update
sudo apt install -y gcc-multilib libssl-dev:i386 pkg-config:i386
- uses: dtolnay/rust-toolchain@1.85.0
with:
targets: ${{ matrix.bits == 32 && 'i686-unknown-linux-gnu' || '' }}
- uses: Swatinem/rust-cache@v2
- run: cargo test --workspace --no-run
- run: cargo test --workspace --no-fail-fast
with:
key: ${{ matrix.bits }}
- run: cargo test --workspace --no-run ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
- run: cargo test --workspace --no-fail-fast ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }}
- name: Upload rendered test output
if: failure()
uses: actions/upload-artifact@v4
with:
name: tests-rendered-${{ matrix.os }}
name: tests-rendered-${{ matrix.os }}-${{ matrix.bits }}
path: tests/store/render/**
retention-days: 3
- name: Update test artifacts
if: failure()
run: |
cargo test --workspace --test tests -- --update
cargo test --workspace --test tests ${{ matrix.bits == 32 && '--target i686-unknown-linux-gnu' || '' }} -- --update
echo 'updated_artifacts=1' >> "$GITHUB_ENV"
- name: Upload updated reference output (for use if the test changes are desired)
if: failure() && env.updated_artifacts
uses: actions/upload-artifact@v4
with:
name: tests-updated-${{ matrix.os }}
name: tests-updated-${{ matrix.os }}-${{ matrix.bits }}
path: tests/ref/**
retention-days: 3
@ -59,7 +73,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.83.0
- uses: dtolnay/rust-toolchain@1.85.0
with:
components: clippy, rustfmt
- uses: Swatinem/rust-cache@v2
@ -73,7 +87,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.80.0
- uses: dtolnay/rust-toolchain@1.83.0
- uses: Swatinem/rust-cache@v2
- run: cargo check --workspace

View File

@ -44,7 +44,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.83.0
- uses: dtolnay/rust-toolchain@1.85.0
with:
target: ${{ matrix.target }}

320
Cargo.lock generated
View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "adler2"
@ -217,6 +217,20 @@ name = "bytemuck"
version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "byteorder"
@ -735,11 +749,12 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.35"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
dependencies = [
"crc32fast",
"libz-rs-sys",
"miniz_oxide",
]
@ -749,6 +764,15 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -761,6 +785,15 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]]
name = "font-types"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa6a5e5a77b5f3f7f9e32879f484aa5b3632ddfbe568a16266c904a6f32cdaf"
dependencies = [
"bytemuck",
]
[[package]]
name = "fontconfig-parser"
version = "0.5.7"
@ -772,9 +805,9 @@ dependencies = [
[[package]]
name = "fontdb"
version = "0.21.0"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37be9fc20d966be438cd57a45767f73349477fb0f85ce86e000557f787298afb"
checksum = "457e789b3d1202543297a350643cf459f836cade38934e7a4cf6a39e7cde2905"
dependencies = [
"fontconfig-parser",
"log",
@ -829,6 +862,15 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "getopts"
version = "0.2.21"
@ -871,6 +913,12 @@ dependencies = [
"weezl",
]
[[package]]
name = "glidesort"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e102e6eb644d3e0b186fc161e4460417880a0a0b87d235f2e5b8fb30f2e9e0"
[[package]]
name = "half"
version = "2.4.1"
@ -966,7 +1014,7 @@ checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
dependencies = [
"displaydoc",
"serde",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec",
]
@ -1064,7 +1112,7 @@ dependencies = [
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec",
]
@ -1175,9 +1223,9 @@ dependencies = [
[[package]]
name = "image-webp"
version = "0.1.3"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f79afb8cbee2ef20f59ccd477a218c12a93943d075b492015ecb1bb81f8ee904"
checksum = "b77d01e822461baa8409e156015a1d91735549f0f2c17691bd2d996bef238f7f"
dependencies = [
"byteorder-lite",
"quick-error",
@ -1211,6 +1259,12 @@ dependencies = [
"serde",
]
[[package]]
name = "infer"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7"
[[package]]
name = "inotify"
version = "0.11.0"
@ -1310,6 +1364,50 @@ dependencies = [
"libc",
]
[[package]]
name = "krilla"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ee6128ebf52d7ce684613b6431ead2959f2be9ff8cf776eeaaad0427c953e9"
dependencies = [
"base64",
"bumpalo",
"comemo",
"flate2",
"float-cmp 0.10.0",
"fxhash",
"gif",
"image-webp",
"imagesize",
"once_cell",
"pdf-writer",
"png",
"rayon",
"rustybuzz",
"siphasher",
"skrifa",
"subsetter",
"tiny-skia-path",
"xmp-writer",
"yoke 0.8.0",
"zune-jpeg",
]
[[package]]
name = "krilla-svg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3462989578155cf620ef8035f8921533cc95c28e2a0c75de172f7219e6aba84e"
dependencies = [
"flate2",
"fontdb",
"krilla",
"png",
"resvg",
"tiny-skia",
"usvg",
]
[[package]]
name = "kurbo"
version = "0.11.1"
@ -1371,6 +1469,15 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "libz-rs-sys"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d"
dependencies = [
"zlib-rs",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@ -1458,9 +1565,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.3"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
dependencies = [
"adler2",
"simd-adler32",
@ -1601,9 +1708,9 @@ dependencies = [
[[package]]
name = "openssl"
version = "0.10.70"
version = "0.10.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
dependencies = [
"bitflags 2.8.0",
"cfg-if",
@ -1642,9 +1749,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
version = "0.9.105"
version = "0.9.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
dependencies = [
"cc",
"libc",
@ -1738,9 +1845,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "pdf-writer"
version = "0.12.1"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5df03c7d216de06f93f398ef06f1385a60f2c597bb96f8195c8d98e08a26b1d5"
checksum = "3ea27c5015ab81753fc61e49f8cde74999346605ee148bb20008ef3d3150e0dc"
dependencies = [
"bitflags 2.8.0",
"itoa",
@ -1804,9 +1911,9 @@ checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
[[package]]
name = "pixglyph"
version = "0.5.1"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d15afa937836bf3d876f5a04ce28810c06045857bf46c3d0d31073b8aada5494"
checksum = "3c1106193bc18a4b840eb075ff6664c8a0b0270f0531bb12a7e9c803e53b55c5"
dependencies = [
"ttf-parser",
]
@ -1997,6 +2104,16 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "read-fonts"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "600e807b48ac55bad68a8cb75cc3c7739f139b9248f7e003e01e080f589b5288"
dependencies = [
"bytemuck",
"font-types",
]
[[package]]
name = "redox_syscall"
version = "0.5.8"
@ -2048,9 +2165,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "resvg"
version = "0.43.0"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7314563c59c7ce31c18e23ad3dd092c37b928a0fa4e1c0a1a6504351ab411d1"
checksum = "dd43d1c474e9dadf09a8fdf22d713ba668b499b5117b9b9079500224e26b5b29"
dependencies = [
"gif",
"image-webp",
@ -2121,9 +2238,9 @@ checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
[[package]]
name = "rustybuzz"
version = "0.18.0"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c85d1ccd519e61834798eb52c4e886e8c2d7d698dd3d6ce0b1b47eb8557f1181"
checksum = "fd3c7c96f8a08ee34eff8857b11b49b07d71d1c3f4e88f8a88d4c9e9f90b1702"
dependencies = [
"bitflags 2.8.0",
"bytemuck",
@ -2315,6 +2432,16 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "skrifa"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fa1e5622e4f7b98877e8a19890efddcac1230cec6198bd9de91ec0e00010dc8"
dependencies = [
"bytemuck",
"read-fonts",
]
[[package]]
name = "slotmap"
version = "1.0.7"
@ -2361,7 +2488,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731"
dependencies = [
"float-cmp",
"float-cmp 0.9.0",
]
[[package]]
@ -2404,28 +2531,11 @@ dependencies = [
[[package]]
name = "subsetter"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74f98178f34057d4d4de93d68104007c6dea4dfac930204a69ab4622daefa648"
[[package]]
name = "svg2pdf"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5014c9dadcf318fb7ef8c16438e95abcc9de1ae24d60d5bccc64c55100c50364"
checksum = "35539e8de3dcce8dd0c01f3575f85db1e5ac1aea1b996d2d09d89f148bc91497"
dependencies = [
"fontdb",
"image",
"log",
"miniz_oxide",
"once_cell",
"pdf-writer",
"resvg",
"siphasher",
"subsetter",
"tiny-skia",
"ttf-parser",
"usvg",
"fxhash",
]
[[package]]
@ -2709,9 +2819,9 @@ dependencies = [
[[package]]
name = "ttf-parser"
version = "0.24.1"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5be21190ff5d38e8b4a2d3b6a3ae57f612cc39c96e83cedeaf7abc338a8bac4a"
checksum = "d2df906b07856748fa3f6e0ad0cbaa047052d4a7dd609e231c4f72cee8c36f31"
dependencies = [
"core_maths",
]
@ -2735,7 +2845,7 @@ checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "typst"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"comemo",
"ecow",
@ -2752,13 +2862,12 @@ dependencies = [
[[package]]
name = "typst-assets"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1051c56bbbf74d31ea6c6b1661e62fa0ebb8104403ee53f6dcd321600426e0b6"
version = "0.13.1"
source = "git+https://github.com/typst/typst-assets?rev=ab1295f#ab1295ff896444e51902e03c2669955e1d73604a"
[[package]]
name = "typst-cli"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"chrono",
"clap",
@ -2803,12 +2912,12 @@ dependencies = [
[[package]]
name = "typst-dev-assets"
version = "0.13.0"
source = "git+https://github.com/typst/typst-dev-assets?tag=v0.13.0#61aebe9575a5abff889f76d73c7b01dc8e17e340"
version = "0.13.1"
source = "git+https://github.com/typst/typst-dev-assets?rev=fddbf8b#fddbf8b99506bc370ac0edcd4959add603a7fc92"
[[package]]
name = "typst-docs"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"clap",
"ecow",
@ -2831,7 +2940,7 @@ dependencies = [
[[package]]
name = "typst-eval"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"comemo",
"ecow",
@ -2849,7 +2958,7 @@ dependencies = [
[[package]]
name = "typst-fuzz"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"comemo",
"libfuzzer-sys",
@ -2861,7 +2970,7 @@ dependencies = [
[[package]]
name = "typst-html"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"comemo",
"ecow",
@ -2875,7 +2984,7 @@ dependencies = [
[[package]]
name = "typst-ide"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"comemo",
"ecow",
@ -2892,11 +3001,12 @@ dependencies = [
[[package]]
name = "typst-kit"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"dirs",
"ecow",
"env_proxy",
"fastrand",
"flate2",
"fontdb",
"native-tls",
@ -2915,7 +3025,7 @@ dependencies = [
[[package]]
name = "typst-layout"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"az",
"bumpalo",
@ -2945,7 +3055,7 @@ dependencies = [
[[package]]
name = "typst-library"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"az",
"bitflags 2.8.0",
@ -2958,6 +3068,7 @@ dependencies = [
"ecow",
"flate2",
"fontdb",
"glidesort",
"hayagriva",
"icu_properties",
"icu_provider",
@ -2996,6 +3107,7 @@ dependencies = [
"typst-timing",
"typst-utils",
"unicode-math-class",
"unicode-normalization",
"unicode-segmentation",
"unscanny",
"usvg",
@ -3005,7 +3117,7 @@ dependencies = [
[[package]]
name = "typst-macros"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"heck",
"proc-macro2",
@ -3015,33 +3127,27 @@ dependencies = [
[[package]]
name = "typst-pdf"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"arrayvec",
"base64",
"bytemuck",
"comemo",
"ecow",
"image",
"indexmap 2.7.1",
"miniz_oxide",
"pdf-writer",
"infer",
"krilla",
"krilla-svg",
"serde",
"subsetter",
"svg2pdf",
"ttf-parser",
"typst-assets",
"typst-library",
"typst-macros",
"typst-syntax",
"typst-timing",
"typst-utils",
"xmp-writer",
]
[[package]]
name = "typst-realize"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"arrayvec",
"bumpalo",
@ -3057,7 +3163,7 @@ dependencies = [
[[package]]
name = "typst-render"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"bytemuck",
"comemo",
@ -3073,7 +3179,7 @@ dependencies = [
[[package]]
name = "typst-svg"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"base64",
"comemo",
@ -3091,7 +3197,7 @@ dependencies = [
[[package]]
name = "typst-syntax"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"ecow",
"serde",
@ -3107,7 +3213,7 @@ dependencies = [
[[package]]
name = "typst-tests"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"clap",
"comemo",
@ -3132,7 +3238,7 @@ dependencies = [
[[package]]
name = "typst-timing"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"parking_lot",
"serde",
@ -3142,7 +3248,7 @@ dependencies = [
[[package]]
name = "typst-utils"
version = "0.13.0"
version = "0.13.1"
dependencies = [
"once_cell",
"portable-atomic",
@ -3185,15 +3291,15 @@ checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
[[package]]
name = "unicode-bidi-mirroring"
version = "0.3.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64af057ad7466495ca113126be61838d8af947f41d93a949980b2389a118082f"
checksum = "5dfa6e8c60bb66d49db113e0125ee8711b7647b5579dc7f5f19c42357ed039fe"
[[package]]
name = "unicode-ccc"
version = "0.3.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "260bc6647b3893a9a90668360803a15f96b85a5257b1c3a0c3daf6ae2496de42"
checksum = "ce61d488bcdc9bc8b5d1772c404828b17fc481c0a582b5581e95fb233aef503e"
[[package]]
name = "unicode-ident"
@ -3288,9 +3394,9 @@ dependencies = [
[[package]]
name = "usvg"
version = "0.43.0"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6803057b5cbb426e9fb8ce2216f3a9b4ca1dd2c705ba3cbebc13006e437735fd"
checksum = "2ac8e0e3e4696253dc06167990b3fe9a2668ab66270adf949a464db4088cb354"
dependencies = [
"base64",
"data-url",
@ -3660,9 +3766,9 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]]
name = "xmp-writer"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb5954c9ca6dcc869e98d3e42760ed9dab08f3e70212b31d7ab8ae7f3b7a487"
checksum = "ce9e2f4a404d9ebffc0a9832cf4f50907220ba3d7fffa9099261a5cab52f2dd7"
[[package]]
name = "xz2"
@ -3700,7 +3806,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive",
"yoke-derive 0.7.5",
"zerofrom",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.8.0",
"zerofrom",
]
@ -3716,6 +3834,18 @@ dependencies = [
"synstructure",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.7.35"
@ -3777,7 +3907,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
dependencies = [
"serde",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec-derive",
]
@ -3795,21 +3925,25 @@ dependencies = [
[[package]]
name = "zip"
version = "2.2.2"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae9c1ea7b3a5e1f4b922ff856a129881167511563dc219869afe3787fc0c1a45"
checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88"
dependencies = [
"arbitrary",
"crc32fast",
"crossbeam-utils",
"displaydoc",
"flate2",
"indexmap 2.7.1",
"memchr",
"thiserror 2.0.11",
"zopfli",
]
[[package]]
name = "zlib-rs"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05"
[[package]]
name = "zopfli"
version = "0.8.1"

View File

@ -4,8 +4,8 @@ default-members = ["crates/typst-cli"]
resolver = "2"
[workspace.package]
version = "0.13.0"
rust-version = "1.80" # also change in ci.yml
version = "0.13.1"
rust-version = "1.83" # also change in ci.yml
authors = ["The Typst Project Developers"]
edition = "2021"
homepage = "https://typst.app"
@ -16,24 +16,24 @@ keywords = ["typst"]
readme = "README.md"
[workspace.dependencies]
typst = { path = "crates/typst", version = "0.13.0" }
typst-cli = { path = "crates/typst-cli", version = "0.13.0" }
typst-eval = { path = "crates/typst-eval", version = "0.13.0" }
typst-html = { path = "crates/typst-html", version = "0.13.0" }
typst-ide = { path = "crates/typst-ide", version = "0.13.0" }
typst-kit = { path = "crates/typst-kit", version = "0.13.0" }
typst-layout = { path = "crates/typst-layout", version = "0.13.0" }
typst-library = { path = "crates/typst-library", version = "0.13.0" }
typst-macros = { path = "crates/typst-macros", version = "0.13.0" }
typst-pdf = { path = "crates/typst-pdf", version = "0.13.0" }
typst-realize = { path = "crates/typst-realize", version = "0.13.0" }
typst-render = { path = "crates/typst-render", version = "0.13.0" }
typst-svg = { path = "crates/typst-svg", version = "0.13.0" }
typst-syntax = { path = "crates/typst-syntax", version = "0.13.0" }
typst-timing = { path = "crates/typst-timing", version = "0.13.0" }
typst-utils = { path = "crates/typst-utils", version = "0.13.0" }
typst-assets = "0.13.0"
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", tag = "v0.13.0" }
typst = { path = "crates/typst", version = "0.13.1" }
typst-cli = { path = "crates/typst-cli", version = "0.13.1" }
typst-eval = { path = "crates/typst-eval", version = "0.13.1" }
typst-html = { path = "crates/typst-html", version = "0.13.1" }
typst-ide = { path = "crates/typst-ide", version = "0.13.1" }
typst-kit = { path = "crates/typst-kit", version = "0.13.1" }
typst-layout = { path = "crates/typst-layout", version = "0.13.1" }
typst-library = { path = "crates/typst-library", version = "0.13.1" }
typst-macros = { path = "crates/typst-macros", version = "0.13.1" }
typst-pdf = { path = "crates/typst-pdf", version = "0.13.1" }
typst-realize = { path = "crates/typst-realize", version = "0.13.1" }
typst-render = { path = "crates/typst-render", version = "0.13.1" }
typst-svg = { path = "crates/typst-svg", version = "0.13.1" }
typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "ab1295f" }
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "fddbf8b" }
arrayvec = "0.7.4"
az = "1.2"
base64 = "0.22"
@ -55,9 +55,11 @@ ctrlc = "3.4.1"
dirs = "6"
ecow = { version = "0.2", features = ["serde"] }
env_proxy = "0.4"
fastrand = "2.3"
flate2 = "1"
fontdb = { version = "0.21", default-features = false }
fontdb = { version = "0.23", default-features = false }
fs_extra = "1.3"
glidesort = "0.1.2"
hayagriva = "0.8.1"
heck = "0.5"
hypher = "0.1.4"
@ -69,24 +71,25 @@ icu_segmenter = { version = "1.4", features = ["serde"] }
if_chain = "1"
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
indexmap = { version = "2", features = ["serde"] }
infer = { version = "0.19.0", default-features = false }
kamadak-exif = "0.6"
krilla = { version = "0.4.0", default-features = false, features = ["raster-images", "comemo", "rayon"] }
krilla-svg = "0.1.0"
kurbo = "0.11"
libfuzzer-sys = "0.4"
lipsum = "0.9"
memchr = "2"
miniz_oxide = "0.8"
native-tls = "0.2"
notify = "8"
once_cell = "1"
open = "5.0.1"
openssl = "0.10"
openssl = "0.10.72"
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
parking_lot = "0.12.1"
pathdiff = "0.2"
pdf-writer = "0.12.1"
phf = { version = "0.11", features = ["macros"] }
pixglyph = "0.5.1"
pixglyph = "0.6"
png = "0.17"
portable-atomic = "1.6"
proc-macro2 = "1"
@ -96,10 +99,10 @@ quote = "1"
rayon = "1.7.0"
regex = "1"
regex-syntax = "0.8"
resvg = { version = "0.43", default-features = false, features = ["raster-images"] }
resvg = { version = "0.45", default-features = false, features = ["raster-images"] }
roxmltree = "0.20"
rust_decimal = { version = "1.36.0", default-features = false, features = ["maths"] }
rustybuzz = "0.18"
rustybuzz = "0.20"
same-file = "1"
self-replace = "1.3.7"
semver = "1"
@ -111,8 +114,6 @@ sigpipe = "0.1"
siphasher = "1"
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
stacker = "0.1.15"
subsetter = "0.2"
svg2pdf = "0.12"
syn = { version = "2", features = ["full", "extra-traits"] }
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
tar = "0.4"
@ -122,26 +123,26 @@ time = { version = "0.3.20", features = ["formatting", "macros", "parsing"] }
tiny_http = "0.12"
tiny-skia = "0.11"
toml = { version = "0.8", default-features = false, features = ["parse", "display"] }
ttf-parser = "0.24.1"
ttf-parser = "0.25.0"
two-face = { version = "0.4.3", default-features = false, features = ["syntect-fancy"] }
typed-arena = "2"
unicode-bidi = "0.3.18"
unicode-ident = "1.0"
unicode-math-class = "0.1"
unicode-script = "0.5"
unicode-normalization = "0.1.24"
unicode-segmentation = "1"
unscanny = "0.1"
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
usvg = { version = "0.43", default-features = false, features = ["text"] }
usvg = { version = "0.45", default-features = false, features = ["text"] }
walkdir = "2"
wasmi = "0.40.0"
web-sys = "0.3"
xmlparser = "0.13.5"
xmlwriter = "0.1.0"
xmp-writer = "0.3.1"
xz2 = { version = "0.1", features = ["static"] }
yaml-front-matter = "0.1"
zip = { version = "2", default-features = false, features = ["deflate"] }
zip = { version = "2.5", default-features = false, features = ["deflate"] }
[profile.dev.package."*"]
opt-level = 2

View File

@ -113,7 +113,9 @@ Typst's CLI is available from different sources:
- You can install Typst through different package managers. Note that the
versions in the package managers might lag behind the latest release.
- Linux: View [Typst on Repology][repology]
- Linux:
- View [Typst on Repology][repology]
- View [Typst's Snap][snap]
- macOS: `brew install typst`
- Windows: `winget install --id Typst.Typst`
@ -175,22 +177,22 @@ If you prefer an integrated IDE-like experience with autocompletion and instant
preview, you can also check out [Typst's free web app][app].
## Community
The main place where the community gathers is our [Discord server][discord].
Feel free to join there to ask questions, help out others, share cool things
you created with Typst, or just to chat.
The main places where the community gathers are our [Forum][forum] and our
[Discord server][discord]. The Forum is a great place to ask questions, help
others, and share cool things you created with Typst. The Discord server is more
suitable for quicker questions, discussions about contributing, or just to chat.
We'd be happy to see you there!
Aside from that there are a few places where you can find things built by
the community:
- The official [package list](https://typst.app/docs/packages)
- The [Awesome Typst](https://github.com/qjcg/awesome-typst) repository
[Typst Universe][universe] is where the community shares templates and packages.
If you want to share your own creations, you can submit them to our
[package repository][packages].
If you had a bad experience in our community, please [reach out to us][contact].
## Contributing
We would love to see contributions from the community. If you experience bugs,
feel free to open an issue. If you would like to implement a new feature or bug
fix, please follow the steps outlined in the [contribution guide][contributing].
We love to see contributions from the community. If you experience bugs, feel
free to open an issue. If you would like to implement a new feature or bug fix,
please follow the steps outlined in the [contribution guide][contributing].
To build Typst yourself, first ensure that you have the
[latest stable Rust][rust] installed. Then, clone this repository and build the
@ -241,6 +243,8 @@ instant preview. To achieve these goals, we follow three core design principles:
[docs]: https://typst.app/docs/
[app]: https://typst.app/
[discord]: https://discord.gg/2uDybryKPe
[forum]: https://forum.typst.app/
[universe]: https://typst.app/universe/
[tutorial]: https://typst.app/docs/tutorial/
[show]: https://typst.app/docs/reference/styling/#show-rules
[math]: https://typst.app/docs/reference/math/
@ -254,3 +258,4 @@ instant preview. To achieve these goals, we follow three core design principles:
[contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md
[packages]: https://github.com/typst/packages/
[`comemo`]: https://github.com/typst/comemo/
[snap]: https://snapcraft.io/typst

View File

@ -361,7 +361,7 @@ pub struct FontArgs {
/// Ensures system fonts won't be searched, unless explicitly included via
/// `--font-path`.
#[arg(long)]
#[arg(long, env = "TYPST_IGNORE_SYSTEM_FONTS")]
pub ignore_system_fonts: bool,
}
@ -467,15 +467,45 @@ display_possible_values!(Feature);
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.4.
#[value(name = "1.4")]
V_1_4,
/// PDF 1.5.
#[value(name = "1.5")]
V_1_5,
/// PDF 1.5.
#[value(name = "1.6")]
V_1_6,
/// PDF 1.7.
#[value(name = "1.7")]
V_1_7,
/// PDF 2.0.
#[value(name = "2.0")]
V_2_0,
/// PDF/A-1b.
#[value(name = "a-1b")]
A_1b,
/// PDF/A-2b.
#[value(name = "a-2b")]
A_2b,
/// PDF/A-3b.
/// PDF/A-2u.
#[value(name = "a-2u")]
A_2u,
/// PDF/A-3u.
#[value(name = "a-3b")]
A_3b,
/// PDF/A-3u.
#[value(name = "a-3u")]
A_3u,
/// PDF/A-4.
#[value(name = "a-4")]
A_4,
/// PDF/A-4f.
#[value(name = "a-4f")]
A_4f,
/// PDF/A-4e.
#[value(name = "a-4e")]
A_4e,
}
display_possible_values!(PdfStandard);

View File

@ -63,8 +63,7 @@ pub struct CompileConfig {
/// Opens the output file with the default viewer or a specific program after
/// compilation.
pub open: Option<Option<String>>,
/// One (or multiple comma-separated) PDF standards that Typst will enforce
/// conformance with.
/// A list of standards the PDF should conform to.
pub pdf_standards: PdfStandards,
/// A path to write a Makefile rule describing the current compilation.
pub make_deps: Option<PathBuf>,
@ -130,18 +129,9 @@ impl CompileConfig {
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
});
let pdf_standards = {
let list = args
.pdf_standard
.iter()
.map(|standard| match standard {
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
})
.collect::<Vec<_>>();
PdfStandards::new(&list)?
};
let pdf_standards = PdfStandards::new(
&args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
)?;
#[cfg(feature = "http-server")]
let server = match watch {
@ -295,6 +285,7 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
})
}
};
let options = PdfOptions {
ident: Smart::Auto,
timestamp,
@ -350,7 +341,7 @@ fn export_image(
.iter()
.enumerate()
.filter(|(i, _)| {
config.pages.as_ref().map_or(true, |exported_page_ranges| {
config.pages.as_ref().is_none_or(|exported_page_ranges| {
exported_page_ranges.includes_page_index(*i)
})
})
@ -765,3 +756,23 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
})
}
}
impl From<PdfStandard> for typst_pdf::PdfStandard {
fn from(standard: PdfStandard) -> Self {
match standard {
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
}
}
}

View File

@ -2,6 +2,7 @@ use comemo::Track;
use ecow::{eco_format, EcoString};
use serde::Serialize;
use typst::diag::{bail, HintedStrResult, StrResult, Warned};
use typst::engine::Sink;
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
use typst::layout::PagedDocument;
use typst::syntax::Span;
@ -58,6 +59,8 @@ fn retrieve(
let selector = eval_string(
&typst::ROUTINES,
world.track(),
// TODO: propagate warnings
Sink::new().track_mut(),
&command.selector,
Span::detached(),
EvalMode::Code,

View File

@ -55,11 +55,11 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
// Perform initial compilation.
timer.record(&mut world, |world| compile_once(world, &mut config))??;
// Watch all dependencies of the initial compilation.
watcher.update(world.dependencies())?;
// Recompile whenever something relevant happens.
loop {
// Watch all dependencies of the most recent compilation.
watcher.update(world.dependencies())?;
// Wait until anything relevant happens.
watcher.wait()?;
@ -71,9 +71,6 @@ pub fn watch(timer: &mut Timer, command: &WatchCommand) -> StrResult<()> {
// Evict the cache.
comemo::evict(10);
// Adjust the file watching.
watcher.update(world.dependencies())?;
}
}
@ -204,6 +201,10 @@ impl Watcher {
let event = event
.map_err(|err| eco_format!("failed to watch dependencies ({err})"))?;
if !is_relevant_event_kind(&event.kind) {
continue;
}
// Workaround for notify-rs' implicit unwatch on remove/rename
// (triggered by some editors when saving files) with the
// inotify backend. By keeping track of the potentially
@ -224,7 +225,17 @@ impl Watcher {
}
}
relevant |= self.is_event_relevant(&event);
// Don't recompile because the output file changed.
// FIXME: This doesn't work properly for multifile image export.
if event
.paths
.iter()
.all(|path| is_same_file(path, &self.output).unwrap_or(false))
{
continue;
}
relevant = true;
}
// If we found a relevant event or if any of the missing files now
@ -234,32 +245,23 @@ impl Watcher {
}
}
}
}
/// Whether a watch event is relevant for compilation.
fn is_event_relevant(&self, event: &notify::Event) -> bool {
// Never recompile because the output file changed.
if event
.paths
.iter()
.all(|path| is_same_file(path, &self.output).unwrap_or(false))
{
return false;
}
match &event.kind {
notify::EventKind::Any => true,
notify::EventKind::Access(_) => false,
notify::EventKind::Create(_) => true,
notify::EventKind::Modify(kind) => match kind {
notify::event::ModifyKind::Any => true,
notify::event::ModifyKind::Data(_) => true,
notify::event::ModifyKind::Metadata(_) => false,
notify::event::ModifyKind::Name(_) => true,
notify::event::ModifyKind::Other => false,
},
notify::EventKind::Remove(_) => true,
notify::EventKind::Other => false,
}
/// Whether a kind of watch event is relevant for compilation.
fn is_relevant_event_kind(kind: &notify::EventKind) -> bool {
match kind {
notify::EventKind::Any => true,
notify::EventKind::Access(_) => false,
notify::EventKind::Create(_) => true,
notify::EventKind::Modify(kind) => match kind {
notify::event::ModifyKind::Any => true,
notify::event::ModifyKind::Data(_) => true,
notify::event::ModifyKind::Metadata(_) => false,
notify::event::ModifyKind::Name(_) => true,
notify::event::ModifyKind::Other => false,
},
notify::EventKind::Remove(_) => true,
notify::EventKind::Other => false,
}
}

View File

@ -210,7 +210,9 @@ impl World for SystemWorld {
}
fn font(&self, index: usize) -> Option<Font> {
self.fonts[index].get()
// comemo's validation may invoke this function with an invalid index. This is
// impossible in typst-cli but possible if a custom tool mutates the fonts.
self.fonts.get(index)?.get()
}
fn today(&self, offset: Option<i64>) -> Option<Datetime> {

View File

@ -466,7 +466,7 @@ impl<'a> CapturesVisitor<'a> {
}
// Code and content blocks create a scope.
Some(ast::Expr::Code(_) | ast::Expr::Content(_)) => {
Some(ast::Expr::CodeBlock(_) | ast::Expr::ContentBlock(_)) => {
self.internal.enter();
for child in node.children() {
self.visit(child);
@ -516,7 +516,7 @@ impl<'a> CapturesVisitor<'a> {
// A let expression contains a binding, but that binding is only
// active after the body is evaluated.
Some(ast::Expr::Let(expr)) => {
Some(ast::Expr::LetBinding(expr)) => {
if let Some(init) = expr.init() {
self.visit(init.to_untyped());
}
@ -529,7 +529,7 @@ impl<'a> CapturesVisitor<'a> {
// A for loop contains one or two bindings in its pattern. These are
// active after the iterable is evaluated but before the body is
// evaluated.
Some(ast::Expr::For(expr)) => {
Some(ast::Expr::ForLoop(expr)) => {
self.visit(expr.iterable().to_untyped());
self.internal.enter();
@ -544,7 +544,7 @@ impl<'a> CapturesVisitor<'a> {
// An import contains items, but these are active only after the
// path is evaluated.
Some(ast::Expr::Import(expr)) => {
Some(ast::Expr::ModuleImport(expr)) => {
self.visit(expr.source().to_untyped());
if let Some(ast::Imports::Items(items)) = expr.imports() {
for item in items.iter() {

View File

@ -30,7 +30,7 @@ fn eval_code<'a>(
while let Some(expr) = exprs.next() {
let span = expr.span();
let value = match expr {
ast::Expr::Set(set) => {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -39,7 +39,7 @@ fn eval_code<'a>(
let tail = eval_code(vm, exprs)?.display();
Value::Content(tail.styled_with_map(styles))
}
ast::Expr::Show(show) => {
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;
@ -55,7 +55,7 @@ fn eval_code<'a>(
_ => expr.eval(vm)?,
};
output = ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
output = ops::join(output, value).at(span)?;
if let Some(event) = &vm.flow {
warn_for_discarded_content(&mut vm.engine, event, &output);
@ -94,9 +94,9 @@ impl Eval for ast::Expr<'_> {
Self::Label(v) => v.eval(vm),
Self::Ref(v) => v.eval(vm).map(Value::Content),
Self::Heading(v) => v.eval(vm).map(Value::Content),
Self::List(v) => v.eval(vm).map(Value::Content),
Self::Enum(v) => v.eval(vm).map(Value::Content),
Self::Term(v) => v.eval(vm).map(Value::Content),
Self::ListItem(v) => v.eval(vm).map(Value::Content),
Self::EnumItem(v) => v.eval(vm).map(Value::Content),
Self::TermItem(v) => v.eval(vm).map(Value::Content),
Self::Equation(v) => v.eval(vm).map(Value::Content),
Self::Math(v) => v.eval(vm).map(Value::Content),
Self::MathText(v) => v.eval(vm).map(Value::Content),
@ -116,8 +116,8 @@ impl Eval for ast::Expr<'_> {
Self::Float(v) => v.eval(vm),
Self::Numeric(v) => v.eval(vm),
Self::Str(v) => v.eval(vm),
Self::Code(v) => v.eval(vm),
Self::Content(v) => v.eval(vm).map(Value::Content),
Self::CodeBlock(v) => v.eval(vm),
Self::ContentBlock(v) => v.eval(vm).map(Value::Content),
Self::Array(v) => v.eval(vm).map(Value::Array),
Self::Dict(v) => v.eval(vm).map(Value::Dict),
Self::Parenthesized(v) => v.eval(vm),
@ -126,19 +126,19 @@ impl Eval for ast::Expr<'_> {
Self::Closure(v) => v.eval(vm),
Self::Unary(v) => v.eval(vm),
Self::Binary(v) => v.eval(vm),
Self::Let(v) => v.eval(vm),
Self::DestructAssign(v) => v.eval(vm),
Self::Set(_) => bail!(forbidden("set")),
Self::Show(_) => bail!(forbidden("show")),
Self::LetBinding(v) => v.eval(vm),
Self::DestructAssignment(v) => v.eval(vm),
Self::SetRule(_) => bail!(forbidden("set")),
Self::ShowRule(_) => bail!(forbidden("show")),
Self::Contextual(v) => v.eval(vm).map(Value::Content),
Self::Conditional(v) => v.eval(vm),
Self::While(v) => v.eval(vm),
Self::For(v) => v.eval(vm),
Self::Import(v) => v.eval(vm),
Self::Include(v) => v.eval(vm).map(Value::Content),
Self::Break(v) => v.eval(vm),
Self::Continue(v) => v.eval(vm),
Self::Return(v) => v.eval(vm),
Self::WhileLoop(v) => v.eval(vm),
Self::ForLoop(v) => v.eval(vm),
Self::ModuleImport(v) => v.eval(vm),
Self::ModuleInclude(v) => v.eval(vm).map(Value::Content),
Self::LoopBreak(v) => v.eval(vm),
Self::LoopContinue(v) => v.eval(vm),
Self::FuncReturn(v) => v.eval(vm),
}?
.spanned(span);

View File

@ -83,8 +83,7 @@ impl Eval for ast::WhileLoop<'_> {
}
let value = body.eval(vm)?;
let span = body.span();
output = ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
output = ops::join(output, value).at(body.span())?;
match vm.flow {
Some(FlowEvent::Break(_)) => {
@ -130,9 +129,7 @@ impl Eval for ast::ForLoop<'_> {
let body = self.body();
let value = body.eval(vm)?;
let span = body.span();
output =
ops::join(output, value, &mut (&mut vm.engine, span)).at(span)?;
output = ops::join(output, value).at(body.span())?;
match vm.flow {
Some(FlowEvent::Break(_)) => {

View File

@ -101,6 +101,7 @@ pub fn eval(
pub fn eval_string(
routines: &Routines,
world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str,
span: Span,
mode: EvalMode,
@ -121,7 +122,6 @@ pub fn eval_string(
}
// Prepare the engine.
let mut sink = Sink::new();
let introspector = Introspector::default();
let traced = Traced::default();
let engine = Engine {
@ -129,7 +129,7 @@ pub fn eval_string(
world,
introspector: introspector.track(),
traced: traced.track(),
sink: sink.track_mut(),
sink,
route: Route::default(),
};

View File

@ -33,7 +33,7 @@ fn eval_markup<'a>(
while let Some(expr) = exprs.next() {
match expr {
ast::Expr::Set(set) => {
ast::Expr::SetRule(set) => {
let styles = set.eval(vm)?;
if vm.flow.is_some() {
break;
@ -41,7 +41,7 @@ fn eval_markup<'a>(
seq.push(eval_markup(vm, exprs)?.styled_with_map(styles))
}
ast::Expr::Show(show) => {
ast::Expr::ShowRule(show) => {
let recipe = show.eval(vm)?;
if vm.flow.is_some() {
break;

View File

@ -1,4 +1,4 @@
use typst_library::diag::{At, DeprecationSink, HintedStrResult, SourceResult};
use typst_library::diag::{At, HintedStrResult, SourceResult};
use typst_library::foundations::{ops, IntoValue, Value};
use typst_syntax::ast::{self, AstNode};
@ -23,22 +23,22 @@ impl Eval for ast::Binary<'_> {
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
match self.op() {
ast::BinOp::Add => apply_binary_with_sink(self, vm, ops::add),
ast::BinOp::Add => apply_binary(self, vm, ops::add),
ast::BinOp::Sub => apply_binary(self, vm, ops::sub),
ast::BinOp::Mul => apply_binary(self, vm, ops::mul),
ast::BinOp::Div => apply_binary(self, vm, ops::div),
ast::BinOp::And => apply_binary(self, vm, ops::and),
ast::BinOp::Or => apply_binary(self, vm, ops::or),
ast::BinOp::Eq => apply_binary_with_sink(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary_with_sink(self, vm, ops::neq),
ast::BinOp::Eq => apply_binary(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary(self, vm, ops::neq),
ast::BinOp::Lt => apply_binary(self, vm, ops::lt),
ast::BinOp::Leq => apply_binary(self, vm, ops::leq),
ast::BinOp::Gt => apply_binary(self, vm, ops::gt),
ast::BinOp::Geq => apply_binary(self, vm, ops::geq),
ast::BinOp::In => apply_binary_with_sink(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary_with_sink(self, vm, ops::not_in),
ast::BinOp::In => apply_binary(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary(self, vm, ops::not_in),
ast::BinOp::Assign => apply_assignment(self, vm, |_, b| Ok(b)),
ast::BinOp::AddAssign => apply_assignment_with_sink(self, vm, ops::add),
ast::BinOp::AddAssign => apply_assignment(self, vm, ops::add),
ast::BinOp::SubAssign => apply_assignment(self, vm, ops::sub),
ast::BinOp::MulAssign => apply_assignment(self, vm, ops::mul),
ast::BinOp::DivAssign => apply_assignment(self, vm, ops::div),
@ -65,18 +65,6 @@ fn apply_binary(
op(lhs, rhs).at(binary.span())
}
/// Apply a basic binary operation, with the possiblity of deprecations.
fn apply_binary_with_sink(
binary: ast::Binary,
vm: &mut Vm,
op: impl Fn(Value, Value, &mut dyn DeprecationSink) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let span = binary.span();
let lhs = binary.lhs().eval(vm)?;
let rhs = binary.rhs().eval(vm)?;
op(lhs, rhs, &mut (&mut vm.engine, span)).at(span)
}
/// Apply an assignment operation.
fn apply_assignment(
binary: ast::Binary,
@ -101,23 +89,3 @@ fn apply_assignment(
*location = op(lhs, rhs).at(binary.span())?;
Ok(Value::None)
}
/// Apply an assignment operation, with the possiblity of deprecations.
fn apply_assignment_with_sink(
binary: ast::Binary,
vm: &mut Vm,
op: fn(Value, Value, &mut dyn DeprecationSink) -> HintedStrResult<Value>,
) -> SourceResult<Value> {
let rhs = binary.rhs().eval(vm)?;
let location = binary.lhs().access(vm)?;
let lhs = std::mem::take(&mut *location);
let mut sink = vec![];
let span = binary.span();
*location = op(lhs, rhs, &mut (&mut sink, span)).at(span)?;
if !sink.is_empty() {
for warning in sink {
vm.engine.sink.warn(warning);
}
}
Ok(Value::None)
}

View File

@ -45,7 +45,7 @@ impl Eval for ast::ShowRule<'_> {
let transform = self.transform();
let transform = match transform {
ast::Expr::Set(set) => Transformation::Style(set.eval(vm)?),
ast::Expr::SetRule(set) => Transformation::Style(set.eval(vm)?),
expr => expr.eval(vm)?.cast::<Transformation>().at(transform.span())?,
};

View File

@ -83,8 +83,8 @@ fn html_document_impl(
)?;
let output = handle_list(&mut engine, &mut locator, children.iter().copied())?;
let introspector = Introspector::html(&output);
let root = root_element(output, &info)?;
let introspector = Introspector::html(&root);
Ok(HtmlDocument { info, root, introspector })
}
@ -263,13 +263,13 @@ fn handle(
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
/// supplying a suitable `<head>`.
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
let head = head_element(info);
let body = match classify_output(output)? {
OutputKind::Html(element) => return Ok(element),
OutputKind::Body(body) => body,
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
};
Ok(HtmlElement::new(tag::html)
.with_children(vec![head_element(info).into(), body.into()]))
Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
}
/// Generate a `<head>` element.
@ -302,23 +302,41 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
);
}
if !info.author.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "authors")
.with_attr(attr::content, info.author.join(", "))
.into(),
)
}
if !info.keywords.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "keywords")
.with_attr(attr::content, info.keywords.join(", "))
.into(),
)
}
HtmlElement::new(tag::head).with_children(children)
}
/// Determine which kind of output the user generated.
fn classify_output(mut output: Vec<HtmlNode>) -> SourceResult<OutputKind> {
let len = output.len();
let count = output.iter().filter(|node| !matches!(node, HtmlNode::Tag(_))).count();
for node in &mut output {
let HtmlNode::Element(elem) = node else { continue };
let tag = elem.tag;
let mut take = || std::mem::replace(elem, HtmlElement::new(tag::html));
match (tag, len) {
match (tag, count) {
(tag::html, 1) => return Ok(OutputKind::Html(take())),
(tag::body, 1) => return Ok(OutputKind::Body(take())),
(tag::html | tag::body, _) => bail!(
elem.span,
"`{}` element must be the only element in the document",
elem.tag
elem.tag,
),
_ => {}
}

View File

@ -26,7 +26,7 @@ pub fn analyze_expr(
ast::Expr::Str(v) => Value::Str(v.get().into()),
_ => {
if node.kind() == SyntaxKind::Contextual {
if let Some(child) = node.children().last() {
if let Some(child) = node.children().next_back() {
return analyze_expr(world, &child);
}
}

View File

@ -410,9 +410,17 @@ fn field_access_completions(
elem.into_iter().chain(Some(ty))
};
// Autocomplete methods from the element's or type's scope.
// Autocomplete methods from the element's or type's scope. We only complete
// those which have a `self` parameter.
for (name, binding) in scopes.flat_map(|scope| scope.iter()) {
ctx.call_completion(name.clone(), binding.read());
let Ok(func) = binding.read().clone().cast::<Func>() else { continue };
if func
.params()
.and_then(|params| params.first())
.is_some_and(|param| param.name == "self")
{
ctx.call_completion(name.clone(), binding.read());
}
}
if let Some(scope) = value.scope() {
@ -509,7 +517,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
// "#import "path.typ": a, b, |".
if_chain! {
if let Some(prev) = ctx.leaf.prev_sibling();
if let Some(ast::Expr::Import(import)) = prev.get().cast();
if let Some(ast::Expr::ModuleImport(import)) = prev.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
then {
@ -528,7 +536,7 @@ fn complete_imports(ctx: &mut CompletionContext) -> bool {
if let Some(grand) = parent.parent();
if grand.kind() == SyntaxKind::ImportItems;
if let Some(great) = grand.parent();
if let Some(ast::Expr::Import(import)) = great.get().cast();
if let Some(ast::Expr::ModuleImport(import)) = great.get().cast();
if let Some(ast::Imports::Items(items)) = import.imports();
if let Some(source) = great.children().find(|child| child.is::<ast::Expr>());
then {
@ -669,10 +677,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
if let Some(args) = parent.get().cast::<ast::Args>();
if let Some(grand) = parent.parent();
if let Some(expr) = grand.get().cast::<ast::Expr>();
let set = matches!(expr, ast::Expr::Set(_));
let set = matches!(expr, ast::Expr::SetRule(_));
if let Some(callee) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::Set(set) => Some(set.target()),
ast::Expr::SetRule(set) => Some(set.target()),
_ => None,
};
then {
@ -1455,7 +1463,7 @@ impl<'a> CompletionContext<'a> {
let mut defined = BTreeMap::<EcoString, Option<Value>>::new();
named_items(self.world, self.leaf.clone(), |item| {
let name = item.name();
if !name.is_empty() && item.value().as_ref().map_or(true, filter) {
if !name.is_empty() && item.value().as_ref().is_none_or(filter) {
defined.insert(name.clone(), item.value());
}
@ -1764,6 +1772,7 @@ mod tests {
#[test]
fn test_autocomplete_type_methods() {
test("#\"hello\".", -1).must_include(["len", "contains"]);
test("#table().", -1).must_exclude(["cell"]);
}
#[test]

View File

@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size};
use typst::model::{Destination, Url};
use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind};
use typst::visualize::Geometry;
use typst::visualize::{Curve, CurveItem, FillRule, Geometry};
use typst::WorldExt;
use crate::IdeWorld;
@ -53,10 +53,20 @@ pub fn jump_from_click(
for (mut pos, item) in frame.items().rev() {
match item {
FrameItem::Group(group) => {
// TODO: Handle transformation.
if let Some(span) =
jump_from_click(world, document, &group.frame, click - pos)
{
let pos = click - pos;
if let Some(clip) = &group.clip {
if !clip.contains(FillRule::NonZero, pos) {
continue;
}
}
// Realistic transforms should always be invertible.
// An example of one that isn't is a scale of 0, which would
// not be clickable anyway.
let Some(inv_transform) = group.transform.invert() else {
continue;
};
let pos = pos.transform_inf(inv_transform);
if let Some(span) = jump_from_click(world, document, &group.frame, pos) {
return Some(span);
}
}
@ -94,9 +104,32 @@ pub fn jump_from_click(
}
FrameItem::Shape(shape, span) => {
let Geometry::Rect(size) = shape.geometry else { continue };
if is_in_rect(pos, size, click) {
return Jump::from_span(world, *span);
if shape.fill.is_some() {
let within = match &shape.geometry {
Geometry::Line(..) => false,
Geometry::Rect(size) => is_in_rect(pos, *size, click),
Geometry::Curve(curve) => {
curve.contains(shape.fill_rule, click - pos)
}
};
if within {
return Jump::from_span(world, *span);
}
}
if let Some(stroke) = &shape.stroke {
let within = !stroke.thickness.approx_empty() && {
// This curve is rooted at (0, 0), not `pos`.
let base_curve = match &shape.geometry {
Geometry::Line(to) => &Curve(vec![CurveItem::Line(*to)]),
Geometry::Rect(size) => &Curve::rect(*size),
Geometry::Curve(curve) => curve,
};
base_curve.stroke_contains(stroke, click - pos)
};
if within {
return Jump::from_span(world, *span);
}
}
}
@ -146,9 +179,8 @@ pub fn jump_from_cursor(
fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> {
for (mut pos, item) in frame.items() {
if let FrameItem::Group(group) = item {
// TODO: Handle transformation.
if let Some(point) = find_in_frame(&group.frame, span) {
return Some(point + pos);
return Some(pos + point.transform(group.transform));
}
}
@ -269,6 +301,97 @@ mod tests {
test_click("$a + b$", point(28.0, 14.0), cursor(5));
}
#[test]
fn test_jump_from_click_transform_clip() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 20pt, height: 20pt, fill: black)",
point(10.0, 10.0) + margin,
cursor(1),
);
test_click(
"#rect(width: 60pt, height: 10pt, fill: black)",
point(5.0, 30.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left, rect(width: 60pt, height: 10pt, fill: black))",
point(5.0, 30.0) + margin,
cursor(38),
);
test_click(
"#scale(x: 300%, y: 300%, origin: top + left, rect(width: 10pt, height: 10pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, scale(x: 300%, y: 300%, \
origin: top + left, rect(width: 10pt, height: 10pt, fill: black)))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#box(width: 10pt, height: 10pt, clip: false, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left)[hello world]",
point(5.0, 15.0) + margin,
cursor(40),
);
}
#[test]
fn test_jump_from_click_shapes() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 30pt, height: 30pt, fill: black)",
point(15.0, 15.0) + margin,
cursor(1),
);
let circle = "#circle(width: 30pt, height: 30pt, fill: black)";
test_click(circle, point(15.0, 15.0) + margin, cursor(1));
test_click(circle, point(1.0, 1.0) + margin, None);
let bowtie =
"#polygon(fill: black, (0pt, 0pt), (20pt, 20pt), (20pt, 0pt), (0pt, 20pt))";
test_click(bowtie, point(1.0, 2.0) + margin, cursor(1));
test_click(bowtie, point(2.0, 1.0) + margin, None);
test_click(bowtie, point(19.0, 10.0) + margin, cursor(1));
let evenodd = r#"#polygon(fill: black, fill-rule: "even-odd",
(0pt, 10pt), (30pt, 10pt), (30pt, 20pt), (20pt, 20pt),
(20pt, 0pt), (10pt, 0pt), (10pt, 30pt), (20pt, 30pt),
(20pt, 20pt), (0pt, 20pt))"#;
test_click(evenodd, point(15.0, 15.0) + margin, None);
test_click(evenodd, point(5.0, 15.0) + margin, cursor(1));
test_click(evenodd, point(15.0, 5.0) + margin, cursor(1));
}
#[test]
fn test_jump_from_click_shapes_stroke() {
let margin = point(10.0, 10.0);
let rect =
"#place(dx: 10pt, dy: 10pt, rect(width: 10pt, height: 10pt, stroke: 5pt))";
test_click(rect, point(15.0, 15.0) + margin, None);
test_click(rect, point(10.0, 15.0) + margin, cursor(27));
test_click(
"#line(angle: 45deg, length: 10pt, stroke: 2pt)",
point(2.0, 2.0) + margin,
cursor(1),
);
}
#[test]
fn test_jump_from_cursor() {
let s = "*Hello* #box[ABC] World";
@ -281,6 +404,15 @@ mod tests {
test_cursor("$a + b$", -3, pos(1, 27.51, 16.83));
}
#[test]
fn test_jump_from_cursor_transform() {
test_cursor(
r#"#rotate(90deg, origin: bottom + left, [hello world])"#,
-5,
pos(1, 10.0, 16.58),
);
}
#[test]
fn test_backlink() {
let s = "#footnote[Hi]";

View File

@ -232,7 +232,9 @@ pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
ast::Expr::FuncCall(call) => {
DerefTarget::Callee(expr_node.find(call.callee().span())?)
}
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
ast::Expr::SetRule(set) => {
DerefTarget::Callee(expr_node.find(set.target().span())?)
}
ast::Expr::Ident(_) | ast::Expr::MathIdent(_) | ast::Expr::FieldAccess(_) => {
DerefTarget::VarAccess(expr_node)
}

View File

@ -97,7 +97,7 @@ impl World for TestWorld {
}
fn font(&self, index: usize) -> Option<Font> {
Some(self.base.fonts[index].clone())
self.base.fonts.get(index).cloned()
}
fn today(&self, _: Option<i64>) -> Option<Datetime> {

View File

@ -201,7 +201,7 @@ fn named_param_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Toolti
if let Some(expr) = grand_grand.cast::<ast::Expr>();
if let Some(ast::Expr::Ident(callee)) = match expr {
ast::Expr::FuncCall(call) => Some(call.callee()),
ast::Expr::Set(set) => Some(set.target()),
ast::Expr::SetRule(set) => Some(set.target()),
_ => None,
};

View File

@ -19,6 +19,7 @@ typst-utils = { workspace = true }
dirs = { workspace = true, optional = true }
ecow = { workspace = true }
env_proxy = { workspace = true, optional = true }
fastrand = { workspace = true, optional = true }
flate2 = { workspace = true, optional = true }
fontdb = { workspace = true, optional = true }
native-tls = { workspace = true, optional = true }
@ -43,7 +44,7 @@ fonts = ["dep:fontdb", "fontdb/memmap", "fontdb/fontconfig"]
downloads = ["dep:env_proxy", "dep:native-tls", "dep:ureq", "dep:openssl"]
# Add package downloading utilities, implies `downloads`
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar"]
packages = ["downloads", "dep:dirs", "dep:flate2", "dep:tar", "dep:fastrand"]
# Embeds some fonts into the binary:
# - For text: Libertinus Serif, New Computer Modern

View File

@ -1,6 +1,7 @@
//! Download and unpack packages and package indices.
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use ecow::eco_format;
@ -77,7 +78,8 @@ impl PackageStorage {
self.package_path.as_deref()
}
/// Make a package available in the on-disk.
/// Makes a package available on-disk and returns the path at which it is
/// located (will be either in the cache or package directory).
pub fn prepare_package(
&self,
spec: &PackageSpec,
@ -100,7 +102,7 @@ impl PackageStorage {
// Download from network if it doesn't exist yet.
if spec.namespace == DEFAULT_NAMESPACE {
self.download_package(spec, &dir, progress)?;
self.download_package(spec, cache_dir, progress)?;
if dir.exists() {
return Ok(dir);
}
@ -110,7 +112,7 @@ impl PackageStorage {
Err(PackageError::NotFound(spec.clone()))
}
/// Try to determine the latest version of a package.
/// Tries to determine the latest version of a package.
pub fn determine_latest_version(
&self,
spec: &VersionlessPackageSpec,
@ -143,7 +145,7 @@ impl PackageStorage {
}
/// Download the package index. The result of this is cached for efficiency.
pub fn download_index(&self) -> StrResult<&[serde_json::Value]> {
fn download_index(&self) -> StrResult<&[serde_json::Value]> {
self.index
.get_or_try_init(|| {
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
@ -164,10 +166,10 @@ impl PackageStorage {
///
/// # Panics
/// Panics if the package spec namespace isn't `DEFAULT_NAMESPACE`.
pub fn download_package(
fn download_package(
&self,
spec: &PackageSpec,
package_dir: &Path,
cache_dir: &Path,
progress: &mut dyn Progress,
) -> PackageResult<()> {
assert_eq!(spec.namespace, DEFAULT_NAMESPACE);
@ -191,11 +193,52 @@ impl PackageStorage {
}
};
// The directory in which the package's version lives.
let base_dir = cache_dir.join(format!("{}/{}", spec.namespace, spec.name));
// The place at which the specific package version will live in the end.
let package_dir = base_dir.join(format!("{}", spec.version));
// To prevent multiple Typst instances from interferring, we download
// into a temporary directory first and then move this directory to
// its final destination.
//
// In the `rename` function's documentation it is stated:
// > This will not work if the new name is on a different mount point.
//
// By locating the temporary directory directly next to where the
// package directory will live, we are (trying our best) making sure
// that `tempdir` and `package_dir` are on the same mount point.
let tempdir = Tempdir::create(base_dir.join(format!(
".tmp-{}-{}",
spec.version,
fastrand::u32(..),
)))
.map_err(|err| error("failed to create temporary package directory", err))?;
// Decompress the archive into the temporary directory.
let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive(Some(eco_format!("{err}")))
})
tar::Archive::new(decompressed)
.unpack(&tempdir)
.map_err(|err| PackageError::MalformedArchive(Some(eco_format!("{err}"))))?;
// When trying to move (i.e., `rename`) the directory from one place to
// another and the target/destination directory is empty, then the
// operation will succeed (if it's atomic, or hardware doesn't fail, or
// power doesn't go off, etc.). If however the target directory is not
// empty, i.e., another instance already successfully moved the package,
// then we can safely ignore the `DirectoryNotEmpty` error.
//
// This means that we do not check the integrity of an existing moved
// package, just like we don't check the integrity if the package
// directory already existed in the first place. If situations with
// broken packages still occur even with the rename safeguard, we might
// consider more complex solutions like file locking or checksums.
match fs::rename(&tempdir, &package_dir) {
Ok(()) => Ok(()),
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(err) => Err(error("failed to move downloaded package directory", err)),
}
}
}
@ -207,6 +250,36 @@ struct MinimalPackageInfo {
version: PackageVersion,
}
/// A temporary directory that is a automatically cleaned up.
struct Tempdir(PathBuf);
impl Tempdir {
/// Creates a directory at the path and auto-cleans it.
fn create(path: PathBuf) -> io::Result<Self> {
std::fs::create_dir_all(&path)?;
Ok(Self(path))
}
}
impl Drop for Tempdir {
fn drop(&mut self) {
_ = fs::remove_dir_all(&self.0);
}
}
impl AsRef<Path> for Tempdir {
fn as_ref(&self) -> &Path {
&self.0
}
}
/// Enriches an I/O error with a message and turns it into a
/// `PackageError::Other`.
#[cold]
fn error(message: &str, err: io::Error) -> PackageError {
PackageError::Other(Some(eco_format!("{message}: {err}")))
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -115,7 +115,7 @@ impl<'a, 'b> Composer<'a, 'b, '_, '_> {
let column_height = regions.size.y;
let backlog: Vec<_> = std::iter::once(&column_height)
.chain(regions.backlog)
.flat_map(|&h| std::iter::repeat(h).take(self.config.columns.count))
.flat_map(|&h| std::iter::repeat_n(h, self.config.columns.count))
.skip(1)
.collect();

View File

@ -11,7 +11,7 @@ use typst_library::layout::{
use typst_library::text::TextElem;
use typst_library::visualize::Geometry;
use typst_syntax::Span;
use typst_utils::{MaybeReverseIter, Numeric};
use typst_utils::Numeric;
use super::{
generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row,
@ -574,7 +574,7 @@ impl<'a> GridLayouter<'a> {
// Reverse with RTL so that later columns start first.
let mut dx = Abs::zero();
for (x, &col) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
for (x, &col) in self.rcols.iter().enumerate() {
let mut dy = Abs::zero();
for row in rows {
// We want to only draw the fill starting at the parent
@ -643,18 +643,13 @@ impl<'a> GridLayouter<'a> {
.sum()
};
let width = self.cell_spanned_width(cell, x);
// In the grid, cell colspans expand to the right,
// so we're at the leftmost (lowest 'x') column
// spanned by the cell. However, in RTL, cells
// expand to the left. Therefore, without the
// offset below, cell fills would start at the
// rightmost visual position of a cell and extend
// over to unrelated columns to the right in RTL.
// We avoid this by ensuring the fill starts at the
// very left of the cell, even with colspan > 1.
let offset =
if self.is_rtl { -width + col } else { Abs::zero() };
let pos = Point::new(dx + offset, dy);
let mut pos = Point::new(dx, dy);
if self.is_rtl {
// In RTL cells expand to the left, thus the
// position must additionally be offset by the
// cell's width.
pos.x = self.width - (dx + width);
}
let size = Size::new(width, height);
let rect = Geometry::Rect(size).filled(fill);
fills.push((pos, FrameItem::Shape(rect, self.span)));
@ -1236,10 +1231,9 @@ impl<'a> GridLayouter<'a> {
}
let mut output = Frame::soft(Size::new(self.width, height));
let mut pos = Point::zero();
let mut offset = Point::zero();
// Reverse the column order when using RTL.
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
for (x, &rcol) in self.rcols.iter().enumerate() {
if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step
if cell.rowspan.get() == 1 {
@ -1257,25 +1251,17 @@ impl<'a> GridLayouter<'a> {
let frame =
layout_cell(cell, engine, disambiguator, self.styles, pod)?
.into_frame();
let mut pos = pos;
let mut pos = offset;
if self.is_rtl {
// In the grid, cell colspans expand to the right,
// so we're at the leftmost (lowest 'x') column
// spanned by the cell. However, in RTL, cells
// expand to the left. Therefore, without the
// offset below, the cell's contents would be laid out
// starting at its rightmost visual position and extend
// over to unrelated cells to its right in RTL.
// We avoid this by ensuring the rendered cell starts at
// the very left of the cell, even with colspan > 1.
let offset = -width + rcol;
pos.x += offset;
// In RTL cells expand to the left, thus the position
// must additionally be offset by the cell's width.
pos.x = self.width - (pos.x + width);
}
output.push_frame(pos, frame);
}
}
pos.x += rcol;
offset.x += rcol;
}
Ok(output)
@ -1302,8 +1288,8 @@ impl<'a> GridLayouter<'a> {
pod.backlog = &heights[1..];
// Layout the row.
let mut pos = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
let mut offset = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate() {
if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step
if cell.rowspan.get() == 1 {
@ -1314,17 +1300,19 @@ impl<'a> GridLayouter<'a> {
let fragment =
layout_cell(cell, engine, disambiguator, self.styles, pod)?;
for (output, frame) in outputs.iter_mut().zip(fragment) {
let mut pos = pos;
let mut pos = offset;
if self.is_rtl {
let offset = -width + rcol;
pos.x += offset;
// In RTL cells expand to the left, thus the
// position must additionally be offset by the
// cell's width.
pos.x = self.width - (offset.x + width);
}
output.push_frame(pos, frame);
}
}
}
pos.x += rcol;
offset.x += rcol;
}
Ok(Fragment::frames(outputs))
@ -1377,7 +1365,7 @@ impl<'a> GridLayouter<'a> {
.footer
.as_ref()
.and_then(Repeatable::as_repeated)
.map_or(true, |footer| footer.start != header.end)
.is_none_or(|footer| footer.start != header.end)
&& self.lrows.last().is_some_and(|row| row.index() < header.end)
&& !in_last_with_offset(
self.regions,
@ -1446,7 +1434,7 @@ impl<'a> GridLayouter<'a> {
.iter_mut()
.filter(|rowspan| (rowspan.y..rowspan.y + rowspan.rowspan).contains(&y))
.filter(|rowspan| {
rowspan.max_resolved_row.map_or(true, |max_row| y > max_row)
rowspan.max_resolved_row.is_none_or(|max_row| y > max_row)
})
{
// If the first region wasn't defined yet, it will have the
@ -1469,7 +1457,7 @@ impl<'a> GridLayouter<'a> {
// last height is the one for the current region.
rowspan
.heights
.extend(std::iter::repeat(Abs::zero()).take(amount_missing_heights));
.extend(std::iter::repeat_n(Abs::zero(), amount_missing_heights));
// Ensure that, in this region, the rowspan will span at least
// this row.
@ -1494,7 +1482,7 @@ impl<'a> GridLayouter<'a> {
// laid out at the first frame of the row).
// Any rowspans ending before this row are laid out even
// on this row's first frame.
if laid_out_footer_start.map_or(true, |footer_start| {
if laid_out_footer_start.is_none_or(|footer_start| {
// If this is a footer row, then only lay out this rowspan
// if the rowspan is contained within the footer.
y < footer_start || rowspan.y >= footer_start
@ -1580,5 +1568,5 @@ pub(super) fn points(
/// our case, headers).
pub(super) fn in_last_with_offset(regions: Regions<'_>, offset: Abs) -> bool {
regions.backlog.is_empty()
&& regions.last.map_or(true, |height| regions.size.y + offset == height)
&& regions.last.is_none_or(|height| regions.size.y + offset == height)
}

View File

@ -463,7 +463,7 @@ pub fn hline_stroke_at_column(
// region, we have the last index, and (as a failsafe) we don't have the
// last row of cells above us.
let use_bottom_border_stroke = !in_last_region
&& local_top_y.map_or(true, |top_y| top_y + 1 != grid.rows.len())
&& local_top_y.is_none_or(|top_y| top_y + 1 != grid.rows.len())
&& y == grid.rows.len();
let bottom_y =
if use_bottom_border_stroke { grid.rows.len().saturating_sub(1) } else { y };

View File

@ -3,7 +3,6 @@ use typst_library::engine::Engine;
use typst_library::foundations::Resolve;
use typst_library::layout::grid::resolve::Repeatable;
use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing};
use typst_utils::MaybeReverseIter;
use super::layouter::{in_last_with_offset, points, Row, RowPiece};
use super::{layout_cell, Cell, GridLayouter};
@ -23,6 +22,10 @@ pub struct Rowspan {
/// specified for the parent cell's `breakable` field.
pub is_effectively_unbreakable: bool,
/// The horizontal offset of this rowspan in all regions.
///
/// This is the offset from the text direction start, meaning that, on RTL
/// grids, this is the offset from the right of the grid, whereas, on LTR
/// grids, it is the offset from the left.
pub dx: Abs,
/// The vertical offset of this rowspan in the first region.
pub dy: Abs,
@ -118,10 +121,11 @@ impl GridLayouter<'_> {
// Nothing to layout.
return Ok(());
};
let first_column = self.rcols[x];
let cell = self.grid.cell(x, y).unwrap();
let width = self.cell_spanned_width(cell, x);
let dx = if self.is_rtl { dx - width + first_column } else { dx };
// In RTL cells expand to the left, thus the position
// must additionally be offset by the cell's width.
let dx = if self.is_rtl { self.width - (dx + width) } else { dx };
// Prepare regions.
let size = Size::new(width, *first_height);
@ -185,10 +189,8 @@ impl GridLayouter<'_> {
/// Checks if a row contains the beginning of one or more rowspan cells.
/// If so, adds them to the rowspans vector.
pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) {
// We will compute the horizontal offset of each rowspan in advance.
// For that reason, we must reverse the column order when using RTL.
let offsets = points(self.rcols.iter().copied().rev_if(self.is_rtl));
for (x, dx) in (0..self.rcols.len()).rev_if(self.is_rtl).zip(offsets) {
let offsets = points(self.rcols.iter().copied());
for (x, dx) in (0..self.rcols.len()).zip(offsets) {
let Some(cell) = self.grid.cell(x, y) else {
continue;
};
@ -588,7 +590,7 @@ impl GridLayouter<'_> {
measurement_data: &CellMeasurementData<'_>,
) -> bool {
if sizes.len() <= 1
&& sizes.first().map_or(true, |&first_frame_size| {
&& sizes.first().is_none_or(|&first_frame_size| {
first_frame_size <= measurement_data.height_in_this_region
})
{

View File

@ -154,7 +154,7 @@ pub fn line<'a>(
let mut items = collect_items(engine, p, range, trim);
// Add a hyphen at the line start, if a previous dash should be repeated.
if pred.map_or(false, |pred| should_repeat_hyphen(pred, full)) {
if pred.is_some_and(|pred| should_repeat_hyphen(pred, full)) {
if let Some(shaped) = items.first_text_mut() {
shaped.prepend_hyphen(engine, p.config.fallback);
}
@ -406,7 +406,7 @@ fn should_repeat_hyphen(pred_line: &Line, text: &str) -> bool {
//
// See § 4.1.1.1.2.e on the "Ortografía de la lengua española"
// https://www.rae.es/ortografía/como-signo-de-división-de-palabras-a-final-de-línea
Lang::SPANISH => text.chars().next().map_or(false, |c| !c.is_uppercase()),
Lang::SPANISH => text.chars().next().is_some_and(|c| !c.is_uppercase()),
_ => false,
}

View File

@ -290,7 +290,7 @@ fn linebreak_optimized_bounded<'a>(
}
// If this attempt is better than what we had before, take it!
if best.as_ref().map_or(true, |best| best.total >= total) {
if best.as_ref().is_none_or(|best| best.total >= total) {
best = Some(Entry { pred: pred_index, total, line: attempt, end });
}
}
@ -423,7 +423,7 @@ fn linebreak_optimized_approximate(
let total = pred.total + line_cost;
// If this attempt is better than what we had before, take it!
if best.as_ref().map_or(true, |best| best.total >= total) {
if best.as_ref().is_none_or(|best| best.total >= total) {
best = Some(Entry {
pred: pred_index,
total,
@ -690,13 +690,34 @@ fn breakpoints(p: &Preparation, mut f: impl FnMut(usize, Breakpoint)) {
let breakpoint = if point == text.len() {
Breakpoint::Mandatory
} else {
const OBJ_REPLACE: char = '\u{FFFC}';
match lb.get(c) {
// Fix for: https://github.com/unicode-org/icu4x/issues/4146
LineBreak::Glue | LineBreak::WordJoiner | LineBreak::ZWJ => continue,
LineBreak::MandatoryBreak
| LineBreak::CarriageReturn
| LineBreak::LineFeed
| LineBreak::NextLine => Breakpoint::Mandatory,
// https://github.com/typst/typst/issues/5489
//
// OBJECT-REPLACEMENT-CHARACTERs provide Contingent Break
// opportunities before and after by default. This behaviour
// is however tailorable, see:
// https://www.unicode.org/reports/tr14/#CB
// https://www.unicode.org/reports/tr14/#TailorableBreakingRules
// https://www.unicode.org/reports/tr14/#LB20
//
// Don't provide a line breaking opportunity between a LTR-
// ISOLATE (or any other Combining Mark) and an OBJECT-
// REPLACEMENT-CHARACTER representing an inline item, if the
// LTR-ISOLATE could end up as the only character on the
// previous line.
LineBreak::CombiningMark
if text[point..].starts_with(OBJ_REPLACE)
&& last + c.len_utf8() == point =>
{
continue;
}
_ => Breakpoint::Normal,
}
};

View File

@ -20,7 +20,7 @@ use unicode_bidi::{BidiInfo, Level as BidiLevel};
use unicode_script::{Script, UnicodeScript};
use super::{decorate, Item, Range, SpanMapper};
use crate::modifiers::{FrameModifiers, FrameModify};
use crate::modifiers::FrameModifyText;
/// The result of shaping text.
///
@ -327,7 +327,7 @@ impl<'a> ShapedText<'a> {
offset += width;
}
frame.modify(&FrameModifiers::get_in(self.styles));
frame.modify_text(self.styles);
frame
}
@ -465,7 +465,7 @@ impl<'a> ShapedText<'a> {
None
};
let mut chain = families(self.styles)
.filter(|family| family.covers().map_or(true, |c| c.is_match("-")))
.filter(|family| family.covers().is_none_or(|c| c.is_match("-")))
.map(|family| book.select(family.as_str(), self.variant))
.chain(fallback_func.iter().map(|f| f()))
.flatten();
@ -570,7 +570,7 @@ impl<'a> ShapedText<'a> {
// for the next line.
let dec = if ltr { usize::checked_sub } else { usize::checked_add };
while let Some(next) = dec(idx, 1) {
if self.glyphs.get(next).map_or(true, |g| g.range.start != text_index) {
if self.glyphs.get(next).is_none_or(|g| g.range.start != text_index) {
break;
}
idx = next;
@ -812,7 +812,7 @@ fn shape_segment<'a>(
.nth(1)
.map(|(i, _)| offset + i)
.unwrap_or(text.len());
covers.map_or(true, |cov| cov.is_match(&text[offset..end]))
covers.is_none_or(|cov| cov.is_match(&text[offset..end]))
};
// Collect the shaped glyphs, doing fallback and shaping parts again with
@ -824,12 +824,42 @@ fn shape_segment<'a>(
// Add the glyph to the shaped output.
if info.glyph_id != 0 && is_covered(cluster) {
// Determine the text range of the glyph.
// Assume we have the following sequence of (glyph_id, cluster):
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
// (63, 13), (193, 25), (80, 25), (3, 31)
//
// We then want the sequence of (glyph_id, text_range) to look as follows:
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
//
// Each glyph in the same cluster should be assigned the full text
// range. This is necessary because only this way krilla can
// properly assign `ActualText` attributes in complex shaping
// scenarios.
// The start of the glyph's text range.
let start = base + cluster;
let end = base
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) }
.and_then(|last| infos.get(last))
.map_or(text.len(), |info| info.cluster as usize);
// Determine the end of the glyph's text range.
let mut k = i;
let step: isize = if ltr { 1 } else { -1 };
let end = loop {
// If we've reached the end of the glyphs, the `end` of the
// range should be the end of the full text.
let Some((next, next_info)) = k
.checked_add_signed(step)
.and_then(|n| infos.get(n).map(|info| (n, info)))
else {
break base + text.len();
};
// If the cluster doesn't match anymore, we've reached the end.
if next_info.cluster != info.cluster {
break base + next_info.cluster as usize;
}
k = next;
};
let c = text[cluster..].chars().next().unwrap();
let script = c.script();

View File

@ -96,9 +96,13 @@ pub fn layout_enum(
let mut cells = vec![];
let mut locator = locator.split();
let mut number =
elem.start(styles)
.unwrap_or_else(|| if reversed { elem.children.len() } else { 1 });
let mut number = elem.start(styles).unwrap_or_else(|| {
if reversed {
elem.children.len() as u64
} else {
1
}
});
let mut parents = EnumElem::parents_in(styles);
let full = elem.full(styles);

View File

@ -19,8 +19,10 @@ pub fn layout_accent(
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?;
// Try to replace a glyph with its dotless variant.
if let MathFragment::Glyph(glyph) = &mut base {
glyph.make_dotless_form(ctx);
if elem.dotless(styles) {
if let MathFragment::Glyph(glyph) = &mut base {
glyph.make_dotless_form(ctx);
}
}
// Preserve class to preserve automatic spacing.
@ -34,7 +36,7 @@ pub fn layout_accent(
// Try to replace accent glyph with flattened variant.
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
if base.height() > flattened_base_height {
if base.ascent() > flattened_base_height {
glyph.make_flattened_accent_form(ctx);
}
@ -50,7 +52,7 @@ pub fn layout_accent(
// minus the accent base height. Only if the base is very small, we need
// a larger gap so that the accent doesn't move too low.
let accent_base_height = scaled!(ctx, styles, accent_base_height);
let gap = -accent.descent() - base.height().min(accent_base_height);
let gap = -accent.descent() - base.ascent().min(accent_base_height);
let size = Size::new(base.width(), accent.height() + gap + base.height());
let accent_pos = Point::with_x(base_attach - accent_attach);
let base_pos = Point::with_y(accent.height() + gap);

View File

@ -1,4 +1,4 @@
use typst_library::diag::{bail, SourceResult};
use typst_library::diag::{bail, warning, SourceResult};
use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
use typst_library::layout::{
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
@ -9,7 +9,7 @@ use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
use typst_syntax::Span;
use super::{
alignments, delimiter_alignment, stack, style_for_denominator, AlignmentResult,
alignments, delimiter_alignment, style_for_denominator, AlignmentResult,
FrameFragment, GlyphFragment, LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
};
@ -23,67 +23,23 @@ pub fn layout_vec(
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let delim = elem.delim(styles);
let frame = layout_vec_body(
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx,
styles,
&elem.children,
&[column],
elem.align(styles),
elem.gap(styles),
LeftRightAlternator::Right,
None,
Axes::with_y(elem.gap(styles)),
span,
"elements",
)?;
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
}
/// Lays out a [`MatElem`].
#[typst_macros::time(name = "math.mat", span = elem.span())]
pub fn layout_mat(
elem: &Packed<MatElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let augment = elem.augment(styles);
let rows = &elem.rows;
if let Some(aug) = &augment {
for &offset in &aug.hline.0 {
if offset == 0 || offset.unsigned_abs() >= rows.len() {
bail!(
elem.span(),
"cannot draw a horizontal line after row {} of a matrix with {} rows",
if offset < 0 { rows.len() as isize + offset } else { offset },
rows.len()
);
}
}
let ncols = rows.first().map_or(0, |row| row.len());
for &offset in &aug.vline.0 {
if offset == 0 || offset.unsigned_abs() >= ncols {
bail!(
elem.span(),
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
}
}
}
let delim = elem.delim(styles);
let frame = layout_mat_body(
ctx,
styles,
rows,
elem.align(styles),
augment,
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
elem.span(),
)?;
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
}
/// Lays out a [`CasesElem`].
@ -93,60 +49,100 @@ pub fn layout_cases(
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let delim = elem.delim(styles);
let frame = layout_vec_body(
let span = elem.span();
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx,
styles,
&elem.children,
&[column],
FixedAlignment::Start,
elem.gap(styles),
LeftRightAlternator::None,
None,
Axes::with_y(elem.gap(styles)),
span,
"branches",
)?;
let delim = elem.delim(styles);
let (open, close) =
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
layout_delimiters(ctx, styles, frame, open, close, elem.span())
layout_delimiters(ctx, styles, frame, open, close, span)
}
/// Layout the inner contents of a vector.
fn layout_vec_body(
/// Lays out a [`MatElem`].
#[typst_macros::time(name = "math.mat", span = elem.span())]
pub fn layout_mat(
elem: &Packed<MatElem>,
ctx: &mut MathContext,
styles: StyleChain,
column: &[Content],
align: FixedAlignment,
row_gap: Rel<Abs>,
alternator: LeftRightAlternator,
) -> SourceResult<Frame> {
let gap = row_gap.relative_to(ctx.region.size.y);
) -> SourceResult<()> {
let span = elem.span();
let rows = &elem.rows;
let ncols = rows.first().map_or(0, |row| row.len());
let denom_style = style_for_denominator(styles);
let mut flat = vec![];
for child in column {
// We allow linebreaks in cases and vectors, which are functionally
// identical to commas.
flat.extend(ctx.layout_into_run(child, styles.chain(&denom_style))?.rows());
let augment = elem.augment(styles);
if let Some(aug) = &augment {
for &offset in &aug.hline.0 {
if offset == 0 || offset.unsigned_abs() >= rows.len() {
bail!(
span,
"cannot draw a horizontal line after row {} of a matrix with {} rows",
if offset < 0 { rows.len() as isize + offset } else { offset },
rows.len()
);
}
}
for &offset in &aug.vline.0 {
if offset == 0 || offset.unsigned_abs() >= ncols {
bail!(
span,
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
}
}
}
// We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal vectors are aligned with others unless they are
// way too big.
let paren =
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
Ok(stack(flat, align, gap, 0, alternator, Some((paren.ascent, paren.descent))))
// Transpose rows of the matrix into columns.
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
let columns: Vec<Vec<_>> = (0..ncols)
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
.collect();
let frame = layout_body(
ctx,
styles,
&columns,
elem.align(styles),
LeftRightAlternator::Right,
augment,
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
span,
"cells",
)?;
let delim = elem.delim(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
}
/// Layout the inner contents of a matrix.
fn layout_mat_body(
/// Layout the inner contents of a matrix, vector, or cases.
#[allow(clippy::too_many_arguments)]
fn layout_body(
ctx: &mut MathContext,
styles: StyleChain,
rows: &[Vec<Content>],
columns: &[Vec<&Content>],
align: FixedAlignment,
alternator: LeftRightAlternator,
augment: Option<Augment<Abs>>,
gap: Axes<Rel<Abs>>,
span: Span,
children: &str,
) -> SourceResult<Frame> {
let ncols = rows.first().map_or(0, |row| row.len());
let nrows = rows.len();
let nrows = columns.first().map_or(0, |col| col.len());
let ncols = columns.len();
if ncols == 0 || nrows == 0 {
return Ok(Frame::soft(Size::zero()));
}
@ -178,16 +174,11 @@ fn layout_mat_body(
// Before the full matrix body can be laid out, the
// individual cells must first be independently laid out
// so we can ensure alignment across rows and columns.
let mut cols = vec![vec![]; ncols];
// This variable stores the maximum ascent and descent for each row.
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
// We want to transpose our data layout to columns
// before final layout. For efficiency, the columns
// variable is set up here and newly generated
// individual cells are then added to it.
let mut cols = vec![vec![]; ncols];
let denom_style = style_for_denominator(styles);
// We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal matrices are aligned with others unless they are
@ -195,10 +186,22 @@ fn layout_mat_body(
let paren =
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
for (row, (ascent, descent)) in rows.iter().zip(&mut heights) {
for (cell, col) in row.iter().zip(&mut cols) {
for (column, col) in columns.iter().zip(&mut cols) {
for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
let cell_span = cell.span();
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
// We ignore linebreaks in the cells as we can't differentiate
// alignment points for the whole body from ones for a specific
// cell, and multiline cells don't quite make sense at the moment.
if cell.is_multiline() {
ctx.engine.sink.warn(warning!(
cell_span,
"linebreaks are ignored in {}", children;
hint: "use commas instead to separate each line"
));
}
ascent.set_max(cell.ascent().max(paren.ascent));
descent.set_max(cell.descent().max(paren.descent));
@ -222,7 +225,7 @@ fn layout_mat_body(
let mut y = Abs::zero();
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
let cell = cell.into_line_frame(&points, LeftRightAlternator::Right);
let cell = cell.into_line_frame(&points, alternator);
let pos = Point::new(
if points.is_empty() {
x + align.position(rcol - cell.width())

View File

@ -85,14 +85,15 @@ pub fn layout_root(
ascent.set_max(shift_up + index.ascent());
}
let radicand_x = sqrt_offset + sqrt.width();
let sqrt_x = sqrt_offset.max(Abs::zero());
let radicand_x = sqrt_x + sqrt.width();
let radicand_y = ascent - radicand.ascent();
let width = radicand_x + radicand.width();
let size = Size::new(width, ascent + descent);
// The extra "- thickness" comes from the fact that the sqrt is placed
// in `push_frame` with respect to its top, not its baseline.
let sqrt_pos = Point::new(sqrt_offset, radicand_y - gap - thickness);
let sqrt_pos = Point::new(sqrt_x, radicand_y - gap - thickness);
let line_pos = Point::new(radicand_x, radicand_y - gap - (thickness / 2.0));
let radicand_pos = Point::new(radicand_x, radicand_y);
@ -100,7 +101,8 @@ pub fn layout_root(
frame.set_baseline(ascent);
if let Some(index) = index {
let index_pos = Point::new(kern_before, ascent - index.ascent() - shift_up);
let index_x = -sqrt_offset.min(Abs::zero()) + kern_before;
let index_pos = Point::new(index_x, ascent - index.ascent() - shift_up);
frame.push_frame(index_pos, index);
}

View File

@ -117,7 +117,6 @@ pub fn stack(
gap: Abs,
baseline: usize,
alternator: LeftRightAlternator,
minimum_ascent_descent: Option<(Abs, Abs)>,
) -> Frame {
let AlignmentResult { points, width } = alignments(&rows);
let rows: Vec<_> = rows
@ -125,13 +124,9 @@ pub fn stack(
.map(|row| row.into_line_frame(&points, alternator))
.collect();
let padded_height = |height: Abs| {
height.max(minimum_ascent_descent.map_or(Abs::zero(), |(a, d)| a + d))
};
let mut frame = Frame::soft(Size::new(
width,
rows.iter().map(|row| padded_height(row.height())).sum::<Abs>()
rows.iter().map(|row| row.height()).sum::<Abs>()
+ rows.len().saturating_sub(1) as f64 * gap,
));
@ -142,14 +137,11 @@ pub fn stack(
} else {
Abs::zero()
};
let ascent_padded_part = minimum_ascent_descent
.map_or(Abs::zero(), |(a, _)| (a - row.ascent()))
.max(Abs::zero());
let pos = Point::new(x, y + ascent_padded_part);
let pos = Point::new(x, y);
if i == baseline {
frame.set_baseline(y + row.baseline() + ascent_padded_part);
frame.set_baseline(y + row.baseline());
}
y += padded_height(row.height()) + gap;
y += row.height() + gap;
frame.push_frame(pos, row);
}

View File

@ -302,6 +302,6 @@ fn assemble(
fn parts(assembly: GlyphAssembly, repeat: usize) -> impl Iterator<Item = GlyphPart> + '_ {
assembly.parts.into_iter().flat_map(move |part| {
let count = if part.part_flags.extender() { repeat } else { 1 };
std::iter::repeat(part).take(count)
std::iter::repeat_n(part, count)
})
}

View File

@ -312,14 +312,8 @@ fn layout_underoverspreader(
}
};
let frame = stack(
rows,
FixedAlignment::Center,
gap,
baseline,
LeftRightAlternator::Right,
None,
);
let frame =
stack(rows, FixedAlignment::Center, gap, baseline, LeftRightAlternator::Right);
ctx.push(FrameFragment::new(styles, frame).with_class(body_class));
Ok(())

View File

@ -1,6 +1,6 @@
use typst_library::foundations::StyleChain;
use typst_library::layout::{Fragment, Frame, FrameItem, HideElem, Point};
use typst_library::model::{Destination, LinkElem};
use typst_library::layout::{Abs, Fragment, Frame, FrameItem, HideElem, Point, Sides};
use typst_library::model::{Destination, LinkElem, ParElem};
/// Frame-level modifications resulting from styles that do not impose any
/// layout structure.
@ -52,14 +52,7 @@ pub trait FrameModify {
impl FrameModify for Frame {
fn modify(&mut self, modifiers: &FrameModifiers) {
if let Some(dest) = &modifiers.dest {
let size = self.size();
self.push(Point::zero(), FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
self.hide();
}
modify_frame(self, modifiers, None);
}
}
@ -82,6 +75,41 @@ where
}
}
pub trait FrameModifyText {
/// Resolve and apply [`FrameModifiers`] for this text frame.
fn modify_text(&mut self, styles: StyleChain);
}
impl FrameModifyText for Frame {
fn modify_text(&mut self, styles: StyleChain) {
let modifiers = FrameModifiers::get_in(styles);
let expand_y = 0.5 * ParElem::leading_in(styles);
let outset = Sides::new(Abs::zero(), expand_y, Abs::zero(), expand_y);
modify_frame(self, &modifiers, Some(outset));
}
}
fn modify_frame(
frame: &mut Frame,
modifiers: &FrameModifiers,
link_box_outset: Option<Sides<Abs>>,
) {
if let Some(dest) = &modifiers.dest {
let mut pos = Point::zero();
let mut size = frame.size();
if let Some(outset) = link_box_outset {
pos.y -= outset.top;
pos.x -= outset.left;
size += outset.sum_by_axis();
}
frame.push(pos, FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
frame.hide();
}
}
/// Performs layout and modification in one step.
///
/// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`,

View File

@ -284,6 +284,7 @@ impl<'a> CurveBuilder<'a> {
self.last_point = point;
self.last_control_from = point;
self.is_started = true;
self.is_empty = true;
}
/// Add a line segment.

View File

@ -29,6 +29,7 @@ csv = { workspace = true }
ecow = { workspace = true }
flate2 = { workspace = true }
fontdb = { workspace = true }
glidesort = { workspace = true }
hayagriva = { workspace = true }
icu_properties = { workspace = true }
icu_provider = { workspace = true }
@ -61,6 +62,7 @@ ttf-parser = { workspace = true }
two-face = { workspace = true }
typed-arena = { workspace = true }
unicode-math-class = { workspace = true }
unicode-normalization = { workspace = true }
unicode-segmentation = { workspace = true }
unscanny = { workspace = true }
usvg = { workspace = true }

View File

@ -232,42 +232,18 @@ impl From<SyntaxError> for SourceDiagnostic {
/// Destination for a deprecation message when accessing a deprecated value.
pub trait DeprecationSink {
/// Emits the given deprecation message into this sink.
fn emit(&mut self, message: &str);
/// Emits the given deprecation message into this sink, with the given
/// hints.
fn emit_with_hints(&mut self, message: &str, hints: &[&str]);
fn emit(self, message: &str);
}
impl DeprecationSink for () {
fn emit(&mut self, _: &str) {}
fn emit_with_hints(&mut self, _: &str, _: &[&str]) {}
}
impl DeprecationSink for (&mut Vec<SourceDiagnostic>, Span) {
fn emit(&mut self, message: &str) {
self.0.push(SourceDiagnostic::warning(self.1, message));
}
fn emit_with_hints(&mut self, message: &str, hints: &[&str]) {
self.0.push(
SourceDiagnostic::warning(self.1, message)
.with_hints(hints.iter().copied().map(Into::into)),
);
}
fn emit(self, _: &str) {}
}
impl DeprecationSink for (&mut Engine<'_>, Span) {
fn emit(&mut self, message: &str) {
/// Emits the deprecation message as a warning.
fn emit(self, message: &str) {
self.0.sink.warn(SourceDiagnostic::warning(self.1, message));
}
fn emit_with_hints(&mut self, message: &str, hints: &[&str]) {
self.0.sink.warn(
SourceDiagnostic::warning(self.1, message)
.with_hints(hints.iter().copied().map(Into::into)),
);
}
}
/// A part of a diagnostic's [trace](SourceDiagnostic::trace).

View File

@ -312,7 +312,8 @@ impl Route<'_> {
if !self.within(Route::MAX_SHOW_RULE_DEPTH) {
bail!(
"maximum show rule depth exceeded";
hint: "check whether the show rule matches its own output"
hint: "maybe a show rule matches its own output";
hint: "maybe there are too deeply nested elements"
);
}
Ok(())

View File

@ -9,9 +9,7 @@ use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use typst_syntax::{Span, Spanned};
use crate::diag::{
bail, At, DeprecationSink, HintedStrResult, SourceDiagnostic, SourceResult, StrResult,
};
use crate::diag::{bail, At, HintedStrResult, SourceDiagnostic, SourceResult, StrResult};
use crate::engine::Engine;
use crate::foundations::{
cast, func, ops, repr, scope, ty, Args, Bytes, CastInfo, Context, Dict, FromValue,
@ -145,11 +143,6 @@ impl Array {
Ok(self.iter().cloned().cycle().take(count).collect())
}
/// The internal implementation of [`Array::contains`].
pub fn contains_impl(&self, value: &Value, sink: &mut dyn DeprecationSink) -> bool {
self.0.iter().any(|v| ops::equal(v, value, sink))
}
}
#[scope]
@ -179,17 +172,29 @@ impl Array {
}
/// Returns the first item in the array. May be used on the left-hand side
/// of an assignment. Fails with an error if the array is empty.
/// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
#[func]
pub fn first(&self) -> StrResult<Value> {
self.0.first().cloned().ok_or_else(array_is_empty)
pub fn first(
&self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.first().cloned().or(default).ok_or_else(array_is_empty)
}
/// Returns the last item in the array. May be used on the left-hand side of
/// an assignment. Fails with an error if the array is empty.
/// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
#[func]
pub fn last(&self) -> StrResult<Value> {
self.0.last().cloned().ok_or_else(array_is_empty)
pub fn last(
&self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.last().cloned().or(default).ok_or_else(array_is_empty)
}
/// Returns the item at the specified index in the array. May be used on the
@ -297,12 +302,10 @@ impl Array {
#[func]
pub fn contains(
&self,
engine: &mut Engine,
span: Span,
/// The value to search for.
value: Value,
) -> bool {
self.contains_impl(&value, &mut (engine, span))
self.0.contains(&value)
}
/// Searches for an item for which the given function returns `{true}` and
@ -585,8 +588,6 @@ impl Array {
#[func]
pub fn sum(
self,
engine: &mut Engine,
span: Span,
/// What to return if the array is empty. Must be set if the array can
/// be empty.
#[named]
@ -598,7 +599,7 @@ impl Array {
.or(default)
.ok_or("cannot calculate sum of empty array with no default")?;
for item in iter {
acc = ops::add(acc, item, &mut (&mut *engine, span))?;
acc = ops::add(acc, item)?;
}
Ok(acc)
}
@ -697,8 +698,6 @@ impl Array {
#[func]
pub fn join(
self,
engine: &mut Engine,
span: Span,
/// A value to insert between each item of the array.
#[default]
separator: Option<Value>,
@ -714,18 +713,13 @@ impl Array {
for (i, value) in self.into_iter().enumerate() {
if i > 0 {
if i + 1 == len && last.is_some() {
result = ops::join(
result,
last.take().unwrap(),
&mut (&mut *engine, span),
)?;
result = ops::join(result, last.take().unwrap())?;
} else {
result =
ops::join(result, separator.clone(), &mut (&mut *engine, span))?;
result = ops::join(result, separator.clone())?;
}
}
result = ops::join(result, value, &mut (&mut *engine, span))?;
result = ops::join(result, value)?;
}
Ok(result)
@ -769,7 +763,7 @@ impl Array {
///
/// ```example
/// #let array = (1, 2, 3, 4, 5, 6, 7, 8)
/// #array.chunks(3)
/// #array.chunks(3) \
/// #array.chunks(3, exact: true)
/// ```
#[func]
@ -814,7 +808,7 @@ impl Array {
/// function. The sorting algorithm used is stable.
///
/// Returns an error if two values could not be compared or if the key
/// function (if given) yields an error.
/// or comparison function (if given) yields an error.
///
/// To sort according to multiple criteria at once, e.g. in case of equality
/// between some criteria, the key function can return an array. The results
@ -838,33 +832,134 @@ impl Array {
/// determine the keys to sort by.
#[named]
key: Option<Func>,
/// If given, uses this function to compare elements in the array.
///
/// This function should return a boolean: `{true}` indicates that the
/// elements are in order, while `{false}` indicates that they should be
/// swapped. To keep the sort stable, if the two elements are equal, the
/// function should return `{true}`.
///
/// If this function does not order the elements properly (e.g., by
/// returning `{false}` for both `{(x, y)}` and `{(y, x)}`, or for
/// `{(x, x)}`), the resulting array will be in unspecified order.
///
/// When used together with `key`, `by` will be passed the keys instead
/// of the elements.
///
/// ```example
/// #(
/// "sorted",
/// "by",
/// "decreasing",
/// "length",
/// ).sorted(
/// key: s => s.len(),
/// by: (l, r) => l >= r,
/// )
/// ```
#[named]
by: Option<Func>,
) -> SourceResult<Array> {
let mut result = Ok(());
let mut vec = self.0;
let mut key_of = |x: Value| match &key {
// NOTE: We are relying on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the `key`.
Some(f) => f.call(engine, context, [x]),
None => Ok(x),
};
vec.make_mut().sort_by(|a, b| {
// Until we get `try` blocks :)
match (key_of(a.clone()), key_of(b.clone())) {
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
if result.is_ok() {
result = Err(err).at(span);
match by {
Some(by) => {
let mut are_in_order = |mut x, mut y| {
if let Some(f) = &key {
// We rely on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the key.
x = f.call(engine, context, [x])?;
y = f.call(engine, context, [y])?;
}
Ordering::Equal
}),
(Err(e), _) | (_, Err(e)) => {
if result.is_ok() {
result = Err(e);
match by.call(engine, context, [x, y])? {
Value::Bool(b) => Ok(b),
x => {
bail!(
span,
"expected boolean from `by` function, got {}",
x.ty(),
)
}
}
Ordering::Equal
}
};
// If a comparison function is provided, we use `glidesort`
// instead of the standard library sorting algorithm to prevent
// panics in case the comparison function does not define a
// valid order (see https://github.com/typst/typst/pull/5627).
let mut result = Ok(());
let mut vec = self.0.into_iter().enumerate().collect::<Vec<_>>();
glidesort::sort_by(&mut vec, |(i, x), (j, y)| {
// Because we use booleans for the comparison function, in
// order to keep the sort stable, we need to compare in the
// right order.
if i < j {
// If `x` and `y` appear in this order in the original
// array, then we should change their order (i.e.,
// return `Ordering::Greater`) iff `y` is strictly less
// than `x` (i.e., `compare(x, y)` returns `false`).
// Otherwise, we should keep them in the same order
// (i.e., return `Ordering::Less`).
match are_in_order(x.clone(), y.clone()) {
Ok(false) => Ordering::Greater,
Ok(true) => Ordering::Less,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
} else {
// If `x` and `y` appear in the opposite order in the
// original array, then we should change their order
// (i.e., return `Ordering::Less`) iff `x` is strictly
// less than `y` (i.e., `compare(y, x)` returns
// `false`). Otherwise, we should keep them in the same
// order (i.e., return `Ordering::Less`).
match are_in_order(y.clone(), x.clone()) {
Ok(false) => Ordering::Less,
Ok(true) => Ordering::Greater,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
}
});
result.map(|()| vec.into_iter().map(|(_, x)| x).collect())
}
});
result.map(|_| vec.into())
None => {
let mut key_of = |x: Value| match &key {
// We rely on `comemo`'s memoization of function evaluation
// to not excessively reevaluate the key.
Some(f) => f.call(engine, context, [x]),
None => Ok(x),
};
// If no comparison function is provided, we know the order is
// valid, so we can use the standard library sort and prevent an
// extra allocation.
let mut result = Ok(());
let mut vec = self.0;
vec.make_mut().sort_by(|a, b| {
match (key_of(a.clone()), key_of(b.clone())) {
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
if result.is_ok() {
result = Err(err).at(span);
}
Ordering::Equal
}),
(Err(e), _) | (_, Err(e)) => {
if result.is_ok() {
result = Err(e);
}
Ordering::Equal
}
}
});
result.map(|()| vec.into())
}
}
}
/// Deduplicates all items in the array.
@ -880,14 +975,13 @@ impl Array {
self,
engine: &mut Engine,
context: Tracked<Context>,
span: Span,
/// If given, applies this function to the elements in the array to
/// determine the keys to deduplicate by.
#[named]
key: Option<Func>,
) -> SourceResult<Array> {
let mut out = EcoVec::with_capacity(self.0.len());
let key_of = |engine: &mut Engine, x: Value| match &key {
let mut key_of = |x: Value| match &key {
// NOTE: We are relying on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the `key`.
Some(f) => f.call(engine, context, [x]),
@ -898,18 +992,14 @@ impl Array {
// 1. We would like to preserve the order of the elements.
// 2. We cannot hash arbitrary `Value`.
'outer: for value in self {
let key = key_of(&mut *engine, value.clone())?;
let key = key_of(value.clone())?;
if out.is_empty() {
out.push(value);
continue;
}
for second in out.iter() {
if ops::equal(
&key,
&key_of(&mut *engine, second.clone())?,
&mut (&mut *engine, span),
) {
if ops::equal(&key, &key_of(second.clone())?) {
continue 'outer;
}
}

View File

@ -21,7 +21,7 @@ use crate::foundations::{
///
/// Type casting works as follows:
/// - [`Reflect for T`](Reflect) describes the possible Typst values for `T`
/// (for documentation and autocomplete).
/// (for documentation and autocomplete).
/// - [`IntoValue for T`](IntoValue) is for conversion from `T -> Value`
/// (infallible)
/// - [`FromValue for T`](FromValue) is for conversion from `Value -> T`

View File

@ -3,7 +3,7 @@ use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::iter::{self, Sum};
use std::marker::PhantomData;
use std::ops::{Add, AddAssign, Deref, DerefMut};
use std::ops::{Add, AddAssign, ControlFlow, Deref, DerefMut};
use std::sync::Arc;
use comemo::Tracked;
@ -414,10 +414,11 @@ impl Content {
/// Elements produced in `show` rules will not be included in the results.
pub fn query(&self, selector: Selector) -> Vec<Content> {
let mut results = Vec::new();
self.traverse(&mut |element| {
self.traverse(&mut |element| -> ControlFlow<()> {
if selector.matches(&element, None) {
results.push(element);
}
ControlFlow::Continue(())
});
results
}
@ -427,54 +428,58 @@ impl Content {
///
/// Elements produced in `show` rules will not be included in the results.
pub fn query_first(&self, selector: &Selector) -> Option<Content> {
let mut result = None;
self.traverse(&mut |element| {
if result.is_none() && selector.matches(&element, None) {
result = Some(element);
self.traverse(&mut |element| -> ControlFlow<Content> {
if selector.matches(&element, None) {
ControlFlow::Break(element)
} else {
ControlFlow::Continue(())
}
});
result
})
.break_value()
}
/// Extracts the plain text of this content.
pub fn plain_text(&self) -> EcoString {
let mut text = EcoString::new();
self.traverse(&mut |element| {
self.traverse(&mut |element| -> ControlFlow<()> {
if let Some(textable) = element.with::<dyn PlainText>() {
textable.plain_text(&mut text);
}
ControlFlow::Continue(())
});
text
}
/// Traverse this content.
fn traverse<F>(&self, f: &mut F)
fn traverse<F, B>(&self, f: &mut F) -> ControlFlow<B>
where
F: FnMut(Content),
F: FnMut(Content) -> ControlFlow<B>,
{
f(self.clone());
self.inner
.elem
.fields()
.into_iter()
.for_each(|(_, value)| walk_value(value, f));
/// Walks a given value to find any content that matches the selector.
fn walk_value<F>(value: Value, f: &mut F)
///
/// Returns early if the function gives `ControlFlow::Break`.
fn walk_value<F, B>(value: Value, f: &mut F) -> ControlFlow<B>
where
F: FnMut(Content),
F: FnMut(Content) -> ControlFlow<B>,
{
match value {
Value::Content(content) => content.traverse(f),
Value::Array(array) => {
for value in array {
walk_value(value, f);
walk_value(value, f)?;
}
ControlFlow::Continue(())
}
_ => {}
_ => ControlFlow::Continue(()),
}
}
// Call f on the element itself before recursively iterating its fields.
f(self.clone())?;
for (_, value) in self.inner.elem.fields() {
walk_value(value, f)?;
}
ControlFlow::Continue(())
}
}

View File

@ -112,7 +112,7 @@ use crate::foundations::{
/// it into another file by writing `{import "foo.typ": alert}`.
///
/// # Unnamed functions { #unnamed }
/// You can also created an unnamed function without creating a binding by
/// You can also create an unnamed function without creating a binding by
/// specifying a parameter list followed by `=>` and the function body. If your
/// function has just one parameter, the parentheses around the parameter list
/// are optional. Unnamed functions are mainly useful for show rules, but also
@ -437,10 +437,10 @@ impl PartialEq for Func {
}
}
impl PartialEq<&NativeFuncData> for Func {
fn eq(&self, other: &&NativeFuncData) -> bool {
impl PartialEq<&'static NativeFuncData> for Func {
fn eq(&self, other: &&'static NativeFuncData) -> bool {
match &self.repr {
Repr::Native(native) => native.function == other.function,
Repr::Native(native) => *native == Static(*other),
_ => false,
}
}

View File

@ -77,6 +77,7 @@ pub use {
indexmap::IndexMap,
};
use comemo::TrackedMut;
use ecow::EcoString;
use typst_syntax::Spanned;
@ -297,5 +298,14 @@ pub fn eval(
for (key, value) in dict {
scope.bind(key.into(), Binding::new(value, span));
}
(engine.routines.eval_string)(engine.routines, engine.world, &text, span, mode, scope)
(engine.routines.eval_string)(
engine.routines,
engine.world,
TrackedMut::reborrow_mut(&mut engine.sink),
&text,
span,
mode,
scope,
)
}

View File

@ -7,7 +7,7 @@ use typst_syntax::FileId;
use crate::diag::{bail, DeprecationSink, StrResult};
use crate::foundations::{repr, ty, Content, Scope, Value};
/// An module of definitions.
/// A module of definitions.
///
/// A module
/// - be built-in

View File

@ -5,7 +5,7 @@ use std::cmp::Ordering;
use ecow::eco_format;
use typst_utils::Numeric;
use crate::diag::{bail, DeprecationSink, HintedStrResult, StrResult};
use crate::diag::{bail, HintedStrResult, StrResult};
use crate::foundations::{
format_str, Datetime, IntoValue, Regex, Repr, SymbolElem, Value,
};
@ -21,7 +21,7 @@ macro_rules! mismatch {
}
/// Join a value with another value.
pub fn join(lhs: Value, rhs: Value, sink: &mut dyn DeprecationSink) -> StrResult<Value> {
pub fn join(lhs: Value, rhs: Value) -> StrResult<Value> {
use Value::*;
Ok(match (lhs, rhs) {
(a, None) => a,
@ -39,17 +39,6 @@ pub fn join(lhs: Value, rhs: Value, sink: &mut dyn DeprecationSink) -> StrResult
(Array(a), Array(b)) => Array(a + b),
(Dict(a), Dict(b)) => Dict(a + b),
(Args(a), Args(b)) => Args(a + b),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_str_join(sink);
Str(format_str!("{a}{b}"))
}
(Str(a), Type(b)) => {
warn_type_str_join(sink);
Str(format_str!("{a}{b}"))
}
(a, b) => mismatch!("cannot join {} with {}", a, b),
})
}
@ -99,11 +88,7 @@ pub fn neg(value: Value) -> HintedStrResult<Value> {
}
/// Compute the sum of two values.
pub fn add(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
pub fn add(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
use Value::*;
Ok(match (lhs, rhs) {
(a, None) => a,
@ -171,16 +156,6 @@ pub fn add(
(Datetime(a), Duration(b)) => Datetime(a + b),
(Duration(a), Datetime(b)) => Datetime(b + a),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_str_add(sink);
Str(format_str!("{a}{b}"))
}
(Str(a), Type(b)) => {
warn_type_str_add(sink);
Str(format_str!("{a}{b}"))
}
(Dyn(a), Dyn(b)) => {
// Alignments can be summed.
if let (Some(&a), Some(&b)) =
@ -419,21 +394,13 @@ pub fn or(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
}
/// Compute whether two values are equal.
pub fn eq(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
Ok(Value::Bool(equal(&lhs, &rhs, sink)))
pub fn eq(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
Ok(Value::Bool(equal(&lhs, &rhs)))
}
/// Compute whether two values are unequal.
pub fn neq(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
Ok(Value::Bool(!equal(&lhs, &rhs, sink)))
pub fn neq(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
Ok(Value::Bool(!equal(&lhs, &rhs)))
}
macro_rules! comparison {
@ -452,7 +419,7 @@ comparison!(gt, ">", Ordering::Greater);
comparison!(geq, ">=", Ordering::Greater | Ordering::Equal);
/// Determine whether two values are equal.
pub fn equal(lhs: &Value, rhs: &Value, sink: &mut dyn DeprecationSink) -> bool {
pub fn equal(lhs: &Value, rhs: &Value) -> bool {
use Value::*;
match (lhs, rhs) {
// Compare reflexively.
@ -496,12 +463,6 @@ pub fn equal(lhs: &Value, rhs: &Value, sink: &mut dyn DeprecationSink) -> bool {
rat == rel.rel && rel.abs.is_zero()
}
// Type compatibility.
(Type(ty), Str(str)) | (Str(str), Type(ty)) => {
warn_type_str_equal(sink);
ty.compat_name() == str.as_str()
}
_ => false,
}
}
@ -573,12 +534,8 @@ fn try_cmp_arrays(a: &[Value], b: &[Value]) -> StrResult<Ordering> {
}
/// Test whether one value is "in" another one.
pub fn in_(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs, sink) {
pub fn in_(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs) {
Ok(Value::Bool(b))
} else {
mismatch!("cannot apply 'in' to {} and {}", lhs, rhs)
@ -586,12 +543,8 @@ pub fn in_(
}
/// Test whether one value is "not in" another one.
pub fn not_in(
lhs: Value,
rhs: Value,
sink: &mut dyn DeprecationSink,
) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs, sink) {
pub fn not_in(lhs: Value, rhs: Value) -> HintedStrResult<Value> {
if let Some(b) = contains(&lhs, &rhs) {
Ok(Value::Bool(!b))
} else {
mismatch!("cannot apply 'not in' to {} and {}", lhs, rhs)
@ -599,27 +552,13 @@ pub fn not_in(
}
/// Test for containment.
pub fn contains(
lhs: &Value,
rhs: &Value,
sink: &mut dyn DeprecationSink,
) -> Option<bool> {
pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
use Value::*;
match (lhs, rhs) {
(Str(a), Str(b)) => Some(b.as_str().contains(a.as_str())),
(Dyn(a), Str(b)) => a.downcast::<Regex>().map(|regex| regex.is_match(b)),
(Str(a), Dict(b)) => Some(b.contains(a)),
(a, Array(b)) => Some(b.contains_impl(a, sink)),
// Type compatibility.
(Type(a), Str(b)) => {
warn_type_in_str(sink);
Some(b.as_str().contains(a.compat_name()))
}
(Type(a), Dict(b)) => {
warn_type_in_dict(sink);
Some(b.contains(a.compat_name()))
}
(a, Array(b)) => Some(b.contains(a.clone())),
_ => Option::None,
}
@ -629,46 +568,3 @@ pub fn contains(
fn too_large() -> &'static str {
"value is too large"
}
#[cold]
fn warn_type_str_add(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"adding strings and types is deprecated",
&["convert the type to a string with `str` first"],
);
}
#[cold]
fn warn_type_str_join(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"joining strings and types is deprecated",
&["convert the type to a string with `str` first"],
);
}
#[cold]
fn warn_type_str_equal(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"comparing strings with types is deprecated",
&[
"compare with the literal type instead",
"this comparison will always return `false` in future Typst releases",
],
);
}
#[cold]
fn warn_type_in_str(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"checking whether a type is contained in a string is deprecated",
&["this compatibility behavior only exists because `type` used to return a string"],
);
}
#[cold]
fn warn_type_in_dict(sink: &mut dyn DeprecationSink) {
sink.emit_with_hints(
"checking whether a type is contained in a dictionary is deprecated",
&["this compatibility behavior only exists because `type` used to return a string"],
);
}

View File

@ -300,7 +300,7 @@ impl Binding {
/// As the `sink`
/// - pass `()` to ignore the message.
/// - pass `(&mut engine, span)` to emit a warning into the engine.
pub fn read_checked(&self, mut sink: impl DeprecationSink) -> &Value {
pub fn read_checked(&self, sink: impl DeprecationSink) -> &Value {
if let Some(message) = self.deprecation {
sink.emit(message);
}

View File

@ -7,12 +7,13 @@ use comemo::Tracked;
use ecow::EcoString;
use serde::{Deserialize, Serialize};
use typst_syntax::{Span, Spanned};
use unicode_normalization::UnicodeNormalization;
use unicode_segmentation::UnicodeSegmentation;
use crate::diag::{bail, At, SourceResult, StrResult};
use crate::engine::Engine;
use crate::foundations::{
cast, dict, func, repr, scope, ty, Array, Bytes, Context, Decimal, Dict, Func,
cast, dict, func, repr, scope, ty, Array, Bytes, Cast, Context, Decimal, Dict, Func,
IntoValue, Label, Repr, Type, Value, Version,
};
use crate::layout::Alignment;
@ -286,6 +287,30 @@ impl Str {
Ok(c.into())
}
/// Normalizes the string to the given Unicode normal form.
///
/// This is useful when manipulating strings containing Unicode combining
/// characters.
///
/// ```typ
/// #assert.eq("é".normalize(form: "nfd"), "e\u{0301}")
/// #assert.eq("ſ́".normalize(form: "nfkc"), "ś")
/// ```
#[func]
pub fn normalize(
&self,
#[named]
#[default(UnicodeNormalForm::Nfc)]
form: UnicodeNormalForm,
) -> Str {
match form {
UnicodeNormalForm::Nfc => self.nfc().collect(),
UnicodeNormalForm::Nfd => self.nfd().collect(),
UnicodeNormalForm::Nfkc => self.nfkc().collect(),
UnicodeNormalForm::Nfkd => self.nfkd().collect(),
}
}
/// Whether the string contains the specified pattern.
///
/// This method also has dedicated syntax: You can write `{"bc" in "abcd"}`
@ -788,6 +813,25 @@ cast! {
v: Str => Self::Str(v),
}
/// A Unicode normalization form.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
pub enum UnicodeNormalForm {
/// Canonical composition where e.g. accented letters are turned into a
/// single Unicode codepoint.
#[string("nfc")]
Nfc,
/// Canonical decomposition where e.g. accented letters are split into a
/// separate base and diacritic.
#[string("nfd")]
Nfd,
/// Like NFC, but using the Unicode compatibility decompositions.
#[string("nfkc")]
Nfkc,
/// Like NFD, but using the Unicode compatibility decompositions.
#[string("nfkd")]
Nfkd,
}
/// Convert an item of std's `match_indices` to a dictionary.
fn match_to_dict((start, text): (usize, &str)) -> Dict {
dict! {

View File

@ -471,7 +471,8 @@ impl Debug for Recipe {
selector.fmt(f)?;
f.write_str(", ")?;
}
self.transform.fmt(f)
self.transform.fmt(f)?;
f.write_str(")")
}
}

View File

@ -21,6 +21,7 @@ use crate::foundations::{
/// be accessed using [field access notation]($scripting/#fields):
///
/// - General symbols are defined in the [`sym` module]($category/symbols/sym)
/// and are accessible without the `sym.` prefix in math mode.
/// - Emoji are defined in the [`emoji` module]($category/symbols/emoji)
///
/// Moreover, you can define custom symbols with this type's constructor
@ -410,7 +411,7 @@ fn find<'a>(
}
let score = (matching, Reverse(total));
if best_score.map_or(true, |b| score > b) {
if best_score.is_none_or(|b| score > b) {
best = Some(candidate.1);
best_score = Some(score);
}

View File

@ -39,21 +39,25 @@ use crate::foundations::{
/// #type(image("glacier.jpg")).
/// ```
///
/// The type of `10` is `int`. Now, what is the type of `int` or even `type`?
/// The type of `{10}` is `int`. Now, what is the type of `int` or even `type`?
/// ```example
/// #type(int) \
/// #type(type)
/// ```
///
/// # Compatibility
/// In Typst 0.7 and lower, the `type` function returned a string instead of a
/// type. Compatibility with the old way will remain until Typst 0.14 to give
/// package authors time to upgrade.
/// Unlike other types like `int`, [none] and [auto] do not have a name
/// representing them. To test if a value is one of these, compare your value to
/// them directly, e.g:
/// ```example
/// #let val = none
/// #if val == none [
/// Yep, it's none.
/// ]
/// ```
///
/// - Checks like `{int == "integer"}` evaluate to `{true}`
/// - Adding/joining a type and string will yield a string
/// - The `{in}` operator on a type and a dictionary will evaluate to `{true}`
/// if the dictionary has a string key matching the type's name
/// Note that `type` will return [`content`] for all document elements. To
/// programmatically determine which kind of content you are dealing with, see
/// [`content.func`].
#[ty(scope, cast)]
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Type(Static<NativeTypeData>);
@ -116,14 +120,6 @@ impl Type {
}
}
// Type compatibility.
impl Type {
/// The type's backward-compatible name.
pub fn compat_name(&self) -> &str {
self.long_name()
}
}
#[scope]
impl Type {
/// Determines a value's type.

View File

@ -292,8 +292,7 @@ impl Repr for Value {
impl PartialEq for Value {
fn eq(&self, other: &Self) -> bool {
// No way to emit deprecation warnings here :(
ops::equal(self, other, &mut ())
ops::equal(self, other)
}
}

View File

@ -229,10 +229,10 @@ impl Counter {
if self.is_page() {
let at_delta =
engine.introspector.page(location).get().saturating_sub(at_page.get());
at_state.step(NonZeroUsize::ONE, at_delta);
at_state.step(NonZeroUsize::ONE, at_delta as u64);
let final_delta =
engine.introspector.pages().get().saturating_sub(final_page.get());
final_state.step(NonZeroUsize::ONE, final_delta);
final_state.step(NonZeroUsize::ONE, final_delta as u64);
}
Ok(CounterState(smallvec![at_state.first(), final_state.first()]))
}
@ -250,7 +250,7 @@ impl Counter {
if self.is_page() {
let delta =
engine.introspector.page(location).get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta);
state.step(NonZeroUsize::ONE, delta as u64);
}
Ok(state)
}
@ -319,7 +319,7 @@ impl Counter {
let delta = page.get() - prev.get();
if delta > 0 {
state.step(NonZeroUsize::ONE, delta);
state.step(NonZeroUsize::ONE, delta as u64);
}
}
@ -500,7 +500,7 @@ impl Counter {
let (mut state, page) = sequence.last().unwrap().clone();
if self.is_page() {
let delta = engine.introspector.pages().get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta);
state.step(NonZeroUsize::ONE, delta as u64);
}
Ok(state)
}
@ -616,13 +616,13 @@ pub trait Count {
/// Counts through elements with different levels.
#[derive(Debug, Clone, PartialEq, Hash)]
pub struct CounterState(pub SmallVec<[usize; 3]>);
pub struct CounterState(pub SmallVec<[u64; 3]>);
impl CounterState {
/// Get the initial counter state for the key.
pub fn init(page: bool) -> Self {
// Special case, because pages always start at one.
Self(smallvec![usize::from(page)])
Self(smallvec![u64::from(page)])
}
/// Advance the counter and return the numbers for the given heading.
@ -645,7 +645,7 @@ impl CounterState {
}
/// Advance the number of the given level by the specified amount.
pub fn step(&mut self, level: NonZeroUsize, by: usize) {
pub fn step(&mut self, level: NonZeroUsize, by: u64) {
let level = level.get();
while self.0.len() < level {
@ -657,7 +657,7 @@ impl CounterState {
}
/// Get the first number of the state.
pub fn first(&self) -> usize {
pub fn first(&self) -> u64 {
self.0.first().copied().unwrap_or(1)
}
@ -675,7 +675,7 @@ impl CounterState {
cast! {
CounterState,
self => Value::Array(self.0.into_iter().map(IntoValue::into_value).collect()),
num: usize => Self(smallvec![num]),
num: u64 => Self(smallvec![num]),
array: Array => Self(array
.into_iter()
.map(Value::cast)
@ -758,7 +758,7 @@ impl Show for Packed<CounterDisplayElem> {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct ManualPageCounter {
physical: NonZeroUsize,
logical: usize,
logical: u64,
}
impl ManualPageCounter {
@ -773,7 +773,7 @@ impl ManualPageCounter {
}
/// Get the current logical page counter state.
pub fn logical(&self) -> usize {
pub fn logical(&self) -> u64 {
self.logical
}

View File

@ -10,7 +10,7 @@ use typst_utils::NonZeroExt;
use crate::diag::{bail, StrResult};
use crate::foundations::{Content, Label, Repr, Selector};
use crate::html::{HtmlElement, HtmlNode};
use crate::html::HtmlNode;
use crate::introspection::{Location, Tag};
use crate::layout::{Frame, FrameItem, Page, Point, Position, Transform};
use crate::model::Numbering;
@ -55,8 +55,8 @@ impl Introspector {
/// Creates an introspector for HTML.
#[typst_macros::time(name = "introspect html")]
pub fn html(root: &HtmlElement) -> Self {
IntrospectorBuilder::new().build_html(root)
pub fn html(output: &[HtmlNode]) -> Self {
IntrospectorBuilder::new().build_html(output)
}
/// Iterates over all locatable elements.
@ -392,9 +392,9 @@ impl IntrospectorBuilder {
}
/// Build an introspector for an HTML document.
fn build_html(mut self, root: &HtmlElement) -> Introspector {
fn build_html(mut self, output: &[HtmlNode]) -> Introspector {
let mut elems = Vec::new();
self.discover_in_html(&mut elems, root);
self.discover_in_html(&mut elems, output);
self.finalize(elems)
}
@ -434,16 +434,16 @@ impl IntrospectorBuilder {
}
/// Processes the tags in the HTML element.
fn discover_in_html(&mut self, sink: &mut Vec<Pair>, elem: &HtmlElement) {
for child in &elem.children {
match child {
fn discover_in_html(&mut self, sink: &mut Vec<Pair>, nodes: &[HtmlNode]) {
for node in nodes {
match node {
HtmlNode::Tag(tag) => self.discover_in_tag(
sink,
tag,
Position { page: NonZeroUsize::ONE, point: Point::zero() },
),
HtmlNode::Text(_, _) => {}
HtmlNode::Element(elem) => self.discover_in_html(sink, elem),
HtmlNode::Element(elem) => self.discover_in_html(sink, &elem.children),
HtmlNode::Frame(frame) => self.discover_in_frame(
sink,
frame,

View File

@ -50,6 +50,42 @@ impl Dir {
pub const TTB: Self = Self::TTB;
pub const BTT: Self = Self::BTT;
/// Returns a direction from a starting point.
///
/// ```example
/// direction.from(left) \
/// direction.from(right) \
/// direction.from(top) \
/// direction.from(bottom)
/// ```
#[func]
pub const fn from(side: Side) -> Dir {
match side {
Side::Left => Self::LTR,
Side::Right => Self::RTL,
Side::Top => Self::TTB,
Side::Bottom => Self::BTT,
}
}
/// Returns a direction from an end point.
///
/// ```example
/// direction.to(left) \
/// direction.to(right) \
/// direction.to(top) \
/// direction.to(bottom)
/// ```
#[func]
pub const fn to(side: Side) -> Dir {
match side {
Side::Right => Self::LTR,
Side::Left => Self::RTL,
Side::Bottom => Self::TTB,
Side::Top => Self::BTT,
}
}
/// The axis this direction belongs to, either `{"horizontal"}` or
/// `{"vertical"}`.
///
@ -65,6 +101,22 @@ impl Dir {
}
}
/// The corresponding sign, for use in calculations.
///
/// ```example
/// #ltr.sign() \
/// #rtl.sign() \
/// #ttb.sign() \
/// #btt.sign()
/// ```
#[func]
pub const fn sign(self) -> i64 {
match self {
Self::LTR | Self::TTB => 1,
Self::RTL | Self::BTT => -1,
}
}
/// The start point of this direction, as an alignment.
///
/// ```example

File diff suppressed because it is too large Load Diff

View File

@ -22,7 +22,8 @@ use crate::layout::{BlockElem, Size};
/// #let text = lorem(30)
/// #layout(size => [
/// #let (height,) = measure(
/// block(width: size.width, text),
/// width: size.width,
/// text,
/// )
/// This text is #height high with
/// the current page width: \

View File

@ -75,9 +75,10 @@ pub struct PageElem {
/// The height of the page.
///
/// If this is set to `{auto}`, page breaks can only be triggered manually
/// by inserting a [page break]($pagebreak). Most examples throughout this
/// documentation use `{auto}` for the height of the page to dynamically
/// grow and shrink to fit their content.
/// by inserting a [page break]($pagebreak) or by adding another non-empty
/// page set rule. Most examples throughout this documentation use `{auto}`
/// for the height of the page to dynamically grow and shrink to fit their
/// content.
#[resolve]
#[parse(
args.named("height")?
@ -483,7 +484,7 @@ pub struct Page {
pub supplement: Content,
/// The logical page number (controlled by `counter(page)` and may thus not
/// match the physical number).
pub number: usize,
pub number: u64,
}
impl Page {

View File

@ -8,15 +8,35 @@ use crate::foundations::{repr, ty, Repr};
/// A ratio of a whole.
///
/// Written as a number, followed by a percent sign.
/// A ratio is written as a number, followed by a percent sign. Ratios most
/// often appear as part of a [relative length]($relative), to specify the size
/// of some layout element relative to the page or some container.
///
/// # Example
/// ```example
/// #set align(center)
/// #scale(x: 150%)[
/// Scaled apart.
/// ]
/// #rect(width: 25%)
/// ```
///
/// However, they can also describe any other property that is relative to some
/// base, e.g. an amount of [horizontal scaling]($scale.x) or the
/// [height of parentheses]($math.lr.size) relative to the height of the content
/// they enclose.
///
/// # Scripting
/// Within your own code, you can use ratios as you like. You can multiply them
/// with various other types as shown below:
///
/// | Multiply by | Example | Result |
/// |-----------------|-------------------------|-----------------|
/// | [`ratio`] | `{27% * 10%}` | `{2.7%}` |
/// | [`length`] | `{27% * 100pt}` | `{27pt}` |
/// | [`relative`] | `{27% * (10% + 100pt)}` | `{2.7% + 27pt}` |
/// | [`angle`] | `{27% * 100deg}` | `{27deg}` |
/// | [`int`] | `{27% * 2}` | `{54%}` |
/// | [`float`] | `{27% * 0.37037}` | `{10%}` |
/// | [`fraction`] | `{27% * 3fr}` | `{0.81fr}` |
///
/// When ratios are displayed in the document, they are rounded to two
/// significant digits for readability.
#[ty(cast)]
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Ratio(Scalar);

View File

@ -14,17 +14,58 @@ use crate::layout::{Abs, Em, Length, Ratio};
/// addition and subtraction of a length and a ratio. Wherever a relative length
/// is expected, you can also use a bare length or ratio.
///
/// # Example
/// ```example
/// #rect(width: 100% - 50pt)
/// # Relative to the page
/// A common use case is setting the width or height of a layout element (e.g.,
/// [block], [rect], etc.) as a certain percentage of the width of the page.
/// Here, the rectangle's width is set to `{25%}`, so it takes up one fourth of
/// the page's _inner_ width (the width minus margins).
///
/// #(100% - 50pt).length \
/// #(100% - 50pt).ratio
/// ```example
/// #rect(width: 25%)
/// ```
///
/// Bare lengths or ratios are always valid where relative lengths are expected,
/// but the two can also be freely mixed:
/// ```example
/// #rect(width: 25% + 1cm)
/// ```
///
/// If you're trying to size an element so that it takes up the page's _full_
/// width, you have a few options (this highly depends on your exact use case):
///
/// 1. Set page margins to `{0pt}` (`[#set page(margin: 0pt)]`)
/// 2. Multiply the ratio by the known full page width (`{21cm * 69%}`)
/// 3. Use padding which will negate the margins (`[#pad(x: -2.5cm, ...)]`)
/// 4. Use the page [background](page.background) or
/// [foreground](page.foreground) field as those don't take margins into
/// account (note that it will render the content outside of the document
/// flow, see [place] to control the content position)
///
/// # Relative to a container
/// When a layout element (e.g. a [rect]) is nested in another layout container
/// (e.g. a [block]) instead of being a direct descendant of the page, relative
/// widths become relative to the container:
///
/// ```example
/// #block(
/// width: 100pt,
/// fill: aqua,
/// rect(width: 50%),
/// )
/// ```
///
/// # Scripting
/// You can multiply relative lengths by [ratios]($ratio), [integers]($int), and
/// [floats]($float).
///
/// A relative length has the following fields:
/// - `length`: Its length component.
/// - `ratio`: Its ratio component.
///
/// ```example
/// #(100% - 50pt).length \
/// #(100% - 50pt).ratio
/// ```
#[ty(cast, name = "relative", title = "Relative Length")]
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Rel<T: Numeric = Length> {

View File

@ -307,6 +307,20 @@ impl Transform {
Self { sx, sy, ..Self::identity() }
}
/// A scale transform at a specific position.
pub fn scale_at(sx: Ratio, sy: Ratio, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::scale(sx, sy))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform at a specific position.
pub fn rotate_at(angle: Angle, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::rotate(angle))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform.
pub fn rotate(angle: Angle) -> Self {
let cos = Ratio::new(angle.cos());

View File

@ -34,14 +34,14 @@ use crate::loading::{DataSource, Load, Readable};
/// let author = find-child(elem, "author")
/// let pars = find-child(elem, "content")
///
/// heading(title.children.first())
/// [= #title.children.first()]
/// text(10pt, weight: "medium")[
/// Published by
/// #author.children.first()
/// ]
///
/// for p in pars.children {
/// if (type(p) == "dictionary") {
/// if type(p) == dictionary {
/// parbreak()
/// p.children.first()
/// }
@ -50,7 +50,7 @@ use crate::loading::{DataSource, Load, Readable};
///
/// #let data = xml("example.xml")
/// #for elem in data.first().children {
/// if (type(elem) == "dictionary") {
/// if type(elem) == dictionary {
/// article(elem)
/// }
/// }

View File

@ -13,8 +13,8 @@ use crate::math::Mathy;
/// ```
#[elem(Mathy)]
pub struct AccentElem {
/// The base to which the accent is applied.
/// May consist of multiple letters.
/// The base to which the accent is applied. May consist of multiple
/// letters.
///
/// ```example
/// $arrow(A B C)$
@ -51,9 +51,24 @@ pub struct AccentElem {
pub accent: Accent,
/// The size of the accent, relative to the width of the base.
///
/// ```example
/// $dash(A, size: #150%)$
/// ```
#[resolve]
#[default(Rel::one())]
pub size: Rel<Length>,
/// Whether to remove the dot on top of lowercase i and j when adding a top
/// accent.
///
/// This enables the `dtls` OpenType feature.
///
/// ```example
/// $hat(dotless: #false, i)$
/// ```
#[default(true)]
pub dotless: bool,
}
/// An accent character.
@ -103,11 +118,18 @@ macro_rules! accents {
/// The size of the accent, relative to the width of the base.
#[named]
size: Option<Rel<Length>>,
/// Whether to remove the dot on top of lowercase i and j when
/// adding a top accent.
#[named]
dotless: Option<bool>,
) -> Content {
let mut accent = AccentElem::new(base, Accent::new($primary));
if let Some(size) = size {
accent = accent.with_size(size);
}
if let Some(dotless) = dotless {
accent = accent.with_dotless(dotless);
}
accent.pack()
}
)+

View File

@ -15,7 +15,7 @@ use crate::math::Mathy;
/// # Syntax
/// This function also has dedicated syntax: Use a slash to turn neighbouring
/// expressions into a fraction. Multiple atoms can be grouped into a single
/// expression using round grouping parenthesis. Such parentheses are removed
/// expression using round grouping parentheses. Such parentheses are removed
/// from the output, but you can nest multiple to force them.
#[elem(title = "Fraction", Mathy)]
pub struct FracElem {

View File

@ -6,7 +6,7 @@ use std::num::NonZeroUsize;
use std::path::Path;
use std::sync::{Arc, LazyLock};
use comemo::Tracked;
use comemo::{Track, Tracked};
use ecow::{eco_format, EcoString, EcoVec};
use hayagriva::archive::ArchivedStyle;
use hayagriva::io::BibLaTeXError;
@ -20,7 +20,7 @@ use typst_syntax::{Span, Spanned};
use typst_utils::{Get, ManuallyHash, NonZeroExt, PicoStr};
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult};
use crate::engine::Engine;
use crate::engine::{Engine, Sink};
use crate::foundations::{
elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement,
OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles,
@ -94,7 +94,7 @@ pub struct BibliographyElem {
/// - A path string to load a bibliography file from the given path. For
/// more details about paths, see the [Paths section]($syntax/#paths).
/// - Raw bytes from which the bibliography should be decoded.
/// - An array where each item is one the above.
/// - An array where each item is one of the above.
#[required]
#[parse(
let sources = args.expect("sources")?;
@ -999,6 +999,8 @@ impl ElemRenderer<'_> {
(self.routines.eval_string)(
self.routines,
self.world,
// TODO: propagate warnings
Sink::new().track_mut(),
math,
self.span,
EvalMode::Math,

View File

@ -129,7 +129,7 @@ pub struct EnumElem {
/// [Ahead],
/// )
/// ```
pub start: Smart<usize>,
pub start: Smart<u64>,
/// Whether to display the full numbering, including the numbers of
/// all parent enumerations.
@ -217,7 +217,7 @@ pub struct EnumElem {
#[internal]
#[fold]
#[ghost]
pub parents: SmallVec<[usize; 4]>,
pub parents: SmallVec<[u64; 4]>,
}
#[scope]
@ -259,10 +259,11 @@ impl Show for Packed<EnumElem> {
.spanned(self.span());
if tight {
let leading = ParElem::leading_in(styles);
let spacing =
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
realized = spacing + realized;
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
}
Ok(realized)
@ -274,7 +275,7 @@ impl Show for Packed<EnumElem> {
pub struct EnumItem {
/// The item's number.
#[positional]
pub number: Option<usize>,
pub number: Option<u64>,
/// The item's body.
#[required]

View File

@ -457,7 +457,7 @@ impl Outlinable for Packed<FigureElem> {
/// customize the appearance of captions for all figures or figures of a
/// specific kind.
///
/// In addition to its `pos` and `body`, the `caption` also provides the
/// In addition to its `position` and `body`, the `caption` also provides the
/// figure's `kind`, `supplement`, `counter`, and `numbering` as fields. These
/// parts can be used in [`where`]($function.where) selectors and show rules to
/// build a completely custom caption.

View File

@ -166,10 +166,11 @@ impl Show for Packed<ListElem> {
.spanned(self.span());
if tight {
let leading = ParElem::leading_in(styles);
let spacing =
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
realized = spacing + realized;
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
}
Ok(realized)

View File

@ -1,7 +1,7 @@
use std::str::FromStr;
use chinese_number::{
from_usize_to_chinese_ten_thousand as usize_to_chinese, ChineseCase, ChineseVariant,
from_u64_to_chinese_ten_thousand as u64_to_chinese, ChineseCase, ChineseVariant,
};
use comemo::Tracked;
use ecow::{eco_format, EcoString, EcoVec};
@ -85,7 +85,7 @@ pub fn numbering(
/// If `numbering` is a pattern and more numbers than counting symbols are
/// given, the last counting symbol with its prefix is repeated.
#[variadic]
numbers: Vec<usize>,
numbers: Vec<u64>,
) -> SourceResult<Value> {
numbering.apply(engine, context, &numbers)
}
@ -105,7 +105,7 @@ impl Numbering {
&self,
engine: &mut Engine,
context: Tracked<Context>,
numbers: &[usize],
numbers: &[u64],
) -> SourceResult<Value> {
Ok(match self {
Self::Pattern(pattern) => Value::Str(pattern.apply(numbers).into()),
@ -156,7 +156,7 @@ pub struct NumberingPattern {
impl NumberingPattern {
/// Apply the pattern to the given number.
pub fn apply(&self, numbers: &[usize]) -> EcoString {
pub fn apply(&self, numbers: &[u64]) -> EcoString {
let mut fmt = EcoString::new();
let mut numbers = numbers.iter();
@ -185,7 +185,7 @@ impl NumberingPattern {
}
/// Apply only the k-th segment of the pattern to a number.
pub fn apply_kth(&self, k: usize, number: usize) -> EcoString {
pub fn apply_kth(&self, k: usize, number: u64) -> EcoString {
let mut fmt = EcoString::new();
if let Some((prefix, _)) = self.pieces.first() {
fmt.push_str(prefix);
@ -379,7 +379,7 @@ impl NumberingKind {
}
/// Apply the numbering to the given number.
pub fn apply(self, n: usize) -> EcoString {
pub fn apply(self, n: u64) -> EcoString {
match self {
Self::Arabic => eco_format!("{n}"),
Self::LowerRoman => roman_numeral(n, Case::Lower),
@ -392,9 +392,10 @@ impl NumberingKind {
}
const SYMBOLS: &[char] = &['*', '†', '‡', '§', '¶', '‖'];
let symbol = SYMBOLS[(n - 1) % SYMBOLS.len()];
let amount = ((n - 1) / SYMBOLS.len()) + 1;
std::iter::repeat(symbol).take(amount).collect()
let n_symbols = SYMBOLS.len() as u64;
let symbol = SYMBOLS[((n - 1) % n_symbols) as usize];
let amount = ((n - 1) / n_symbols) + 1;
std::iter::repeat_n(symbol, amount.try_into().unwrap()).collect()
}
Self::Hebrew => hebrew_numeral(n),
@ -489,18 +490,16 @@ impl NumberingKind {
}
Self::LowerSimplifiedChinese => {
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
}
Self::UpperSimplifiedChinese => {
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
}
Self::LowerTraditionalChinese => {
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n)
.into()
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n).into()
}
Self::UpperTraditionalChinese => {
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n)
.into()
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n).into()
}
Self::EasternArabic => decimal('\u{0660}', n),
@ -512,7 +511,7 @@ impl NumberingKind {
}
/// Stringify an integer to a Hebrew number.
fn hebrew_numeral(mut n: usize) -> EcoString {
fn hebrew_numeral(mut n: u64) -> EcoString {
if n == 0 {
return '-'.into();
}
@ -566,7 +565,7 @@ fn hebrew_numeral(mut n: usize) -> EcoString {
}
/// Stringify an integer to a Roman numeral.
fn roman_numeral(mut n: usize, case: Case) -> EcoString {
fn roman_numeral(mut n: u64, case: Case) -> EcoString {
if n == 0 {
return match case {
Case::Lower => 'n'.into(),
@ -622,7 +621,7 @@ fn roman_numeral(mut n: usize, case: Case) -> EcoString {
///
/// [converter]: https://www.russellcottrell.com/greek/utilities/GreekNumberConverter.htm
/// [numbers]: https://mathshistory.st-andrews.ac.uk/HistTopics/Greek_numbers/
fn greek_numeral(n: usize, case: Case) -> EcoString {
fn greek_numeral(n: u64, case: Case) -> EcoString {
let thousands = [
["͵α", "͵Α"],
["͵β", "͵Β"],
@ -683,7 +682,7 @@ fn greek_numeral(n: usize, case: Case) -> EcoString {
let mut decimal_digits: Vec<usize> = Vec::new();
let mut n = n;
while n > 0 {
decimal_digits.push(n % 10);
decimal_digits.push((n % 10) as usize);
n /= 10;
}
@ -778,18 +777,16 @@ fn greek_numeral(n: usize, case: Case) -> EcoString {
///
/// You might be familiar with this scheme from the way spreadsheet software
/// tends to label its columns.
fn zeroless<const N_DIGITS: usize>(
alphabet: [char; N_DIGITS],
mut n: usize,
) -> EcoString {
fn zeroless<const N_DIGITS: usize>(alphabet: [char; N_DIGITS], mut n: u64) -> EcoString {
if n == 0 {
return '-'.into();
}
let n_digits = N_DIGITS as u64;
let mut cs = EcoString::new();
while n > 0 {
n -= 1;
cs.push(alphabet[n % N_DIGITS]);
n /= N_DIGITS;
cs.push(alphabet[(n % n_digits) as usize]);
n /= n_digits;
}
cs.chars().rev().collect()
}
@ -797,7 +794,7 @@ fn zeroless<const N_DIGITS: usize>(
/// Stringify a number using a base-10 counting system with a zero digit.
///
/// This function assumes that the digits occupy contiguous codepoints.
fn decimal(start: char, mut n: usize) -> EcoString {
fn decimal(start: char, mut n: u64) -> EcoString {
if n == 0 {
return start.into();
}

View File

@ -388,7 +388,7 @@ pub struct OutlineEntry {
/// space between the entry's body and the page number. When using show
/// rules to override outline entries, it is thus recommended to wrap the
/// fill in a [`box`] with fractional width, i.e.
/// `{box(width: 1fr, it.fill}`.
/// `{box(width: 1fr, it.fill)}`.
///
/// When using [`repeat`], the [`gap`]($repeat.gap) property can be useful
/// to tweak the visual weight of the fill.

View File

@ -161,7 +161,7 @@ impl Show for Packed<QuoteElem> {
let block = self.block(styles);
let html = TargetElem::target_in(styles).is_html();
if self.quotes(styles) == Smart::Custom(true) || !block {
if self.quotes(styles).unwrap_or(!block) {
let quotes = SmartQuotes::get(
SmartQuoteElem::quotes_in(styles),
TextElem::lang_in(styles),

View File

@ -282,7 +282,7 @@ fn show_cell_html(tag: HtmlTag, cell: &Cell, styles: StyleChain) -> Content {
fn show_cellgrid_html(grid: CellGrid, styles: StyleChain) -> Content {
let elem = |tag, body| HtmlElem::new(tag).with_body(Some(body)).pack();
let mut rows: Vec<_> = grid.entries.chunks(grid.cols.len()).collect();
let mut rows: Vec<_> = grid.entries.chunks(grid.non_gutter_column_count()).collect();
let tr = |tag, row: &[Entry]| {
let row = row

View File

@ -189,13 +189,15 @@ impl Show for Packed<TermsElem> {
.styled(TermsElem::set_within(true));
if tight {
let leading = ParElem::leading_in(styles);
let spacing = VElem::new(leading.into())
let spacing = self
.spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into())
.with_weak(true)
.with_attach(true)
.pack()
.spanned(span);
realized = spacing + realized;
realized = v + realized;
}
Ok(realized)

View File

@ -55,6 +55,7 @@ routines! {
fn eval_string(
routines: &Routines,
world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str,
span: Span,
mode: EvalMode,

View File

@ -160,7 +160,7 @@ impl FontBook {
current.variant.weight.distance(variant.weight),
);
if best_key.map_or(true, |b| key < b) {
if best_key.is_none_or(|b| key < b) {
best = Some(id);
best_key = Some(key);
}

View File

@ -14,7 +14,7 @@ macro_rules! translation {
};
}
const TRANSLATIONS: [(&str, &str); 38] = [
const TRANSLATIONS: [(&str, &str); 39] = [
translation!("ar"),
translation!("bg"),
translation!("ca"),
@ -31,6 +31,7 @@ const TRANSLATIONS: [(&str, &str); 38] = [
translation!("el"),
translation!("he"),
translation!("hu"),
translation!("id"),
translation!("is"),
translation!("it"),
translation!("ja"),
@ -82,6 +83,7 @@ impl Lang {
pub const HEBREW: Self = Self(*b"he ", 2);
pub const HUNGARIAN: Self = Self(*b"hu ", 2);
pub const ICELANDIC: Self = Self(*b"is ", 2);
pub const INDONESIAN: Self = Self(*b"id ", 2);
pub const ITALIAN: Self = Self(*b"it ", 2);
pub const JAPANESE: Self = Self(*b"ja ", 2);
pub const LATIN: Self = Self(*b"la ", 2);

View File

@ -42,7 +42,7 @@ use ttf_parser::Tag;
use typst_syntax::Spanned;
use typst_utils::singleton;
use crate::diag::{bail, warning, HintedStrResult, SourceResult};
use crate::diag::{bail, warning, HintedStrResult, SourceResult, StrResult};
use crate::engine::Engine;
use crate::foundations::{
cast, dict, elem, Args, Array, Cast, Construct, Content, Dict, Fold, IntoValue,
@ -891,9 +891,21 @@ cast! {
}
/// Font family fallback list.
///
/// Must contain at least one font.
#[derive(Debug, Default, Clone, PartialEq, Hash)]
pub struct FontList(pub Vec<FontFamily>);
impl FontList {
pub fn new(fonts: Vec<FontFamily>) -> StrResult<Self> {
if fonts.is_empty() {
bail!("font fallback list must not be empty")
} else {
Ok(Self(fonts))
}
}
}
impl<'a> IntoIterator for &'a FontList {
type IntoIter = std::slice::Iter<'a, FontFamily>;
type Item = &'a FontFamily;
@ -911,7 +923,7 @@ cast! {
self.0.into_value()
},
family: FontFamily => Self(vec![family]),
values: Array => Self(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?),
values: Array => Self::new(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?)?,
}
/// Resolve a prioritized iterator over the font families.

View File

@ -188,7 +188,7 @@ pub struct RawElem {
/// - A path string to load a syntax file from the given path. For more
/// details about paths, see the [Paths section]($syntax/#paths).
/// - Raw bytes from which the syntax should be decoded.
/// - An array where each item is one the above.
/// - An array where each item is one of the above.
///
/// ````example
/// #set raw(syntaxes: "SExpressions.sublime-syntax")

View File

@ -159,7 +159,7 @@ fn is_shapable(engine: &Engine, text: &str, styles: StyleChain) -> bool {
{
let covers = family.covers();
return text.chars().all(|c| {
covers.map_or(true, |cov| cov.is_match(c.encode_utf8(&mut [0; 4])))
covers.is_none_or(|cov| cov.is_match(c.encode_utf8(&mut [0; 4])))
&& font.ttf().glyph_index(c).is_some()
});
}

View File

@ -238,7 +238,7 @@ impl<'s> SmartQuotes<'s> {
"cs" | "de" | "et" | "is" | "lt" | "lv" | "sk" | "sl" => low_high,
"da" => ("", "", "", ""),
"fr" | "ru" if alternative => default,
"fr" => ("\u{00A0}", "\u{00A0}", "«\u{00A0}", "\u{00A0}»"),
"fr" => ("", "", "«\u{202F}", "\u{202F}»"),
"fi" | "sv" if alternative => ("", "", "»", "»"),
"bs" | "fi" | "sv" => ("", "", "", ""),
"it" if alternative => default,

View File

@ -130,7 +130,7 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
///
/// # Predefined color maps
/// Typst also includes a number of preset color maps that can be used for
/// [gradients]($gradient.linear). These are simply arrays of colors defined in
/// [gradients]($gradient/#stops). These are simply arrays of colors defined in
/// the module `color.map`.
///
/// ```example
@ -148,11 +148,11 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
/// | `magma` | A black to purple to yellow color map. |
/// | `plasma` | A purple to pink to yellow color map. |
/// | `rocket` | A black to red to white color map. |
/// | `mako` | A black to teal to yellow color map. |
/// | `mako` | A black to teal to white color map. |
/// | `vlag` | A light blue to white to red color map. |
/// | `icefire` | A light teal to black to yellow color map. |
/// | `icefire` | A light teal to black to orange color map. |
/// | `flare` | A orange to purple color map that is perceptually uniform. |
/// | `crest` | A blue to white to red color map. |
/// | `crest` | A light green to blue color map. |
///
/// Some popular presets are not included because they are not available under a
/// free licence. Others, like

View File

@ -10,6 +10,8 @@ use crate::foundations::{
use crate::layout::{Abs, Axes, BlockElem, Length, Point, Rel, Size};
use crate::visualize::{FillRule, Paint, Stroke};
use super::FixedStroke;
/// A curve consisting of movements, lines, and Bézier segments.
///
/// At any point in time, there is a conceptual pen or cursor.
@ -530,3 +532,65 @@ impl Curve {
Size::new(max_x - min_x, max_y - min_y)
}
}
impl Curve {
fn to_kurbo(&self) -> impl Iterator<Item = kurbo::PathEl> + '_ {
use kurbo::PathEl;
self.0.iter().map(|item| match *item {
CurveItem::Move(point) => PathEl::MoveTo(point_to_kurbo(point)),
CurveItem::Line(point) => PathEl::LineTo(point_to_kurbo(point)),
CurveItem::Cubic(point, point1, point2) => PathEl::CurveTo(
point_to_kurbo(point),
point_to_kurbo(point1),
point_to_kurbo(point2),
),
CurveItem::Close => PathEl::ClosePath,
})
}
/// When this curve is interpreted as a clip mask, would it contain `point`?
pub fn contains(&self, fill_rule: FillRule, needle: Point) -> bool {
let kurbo = kurbo::BezPath::from_vec(self.to_kurbo().collect());
let windings = kurbo::Shape::winding(&kurbo, point_to_kurbo(needle));
match fill_rule {
FillRule::NonZero => windings != 0,
FillRule::EvenOdd => windings % 2 != 0,
}
}
/// When this curve is stroked with `stroke`, would the stroke contain
/// `point`?
pub fn stroke_contains(&self, stroke: &FixedStroke, needle: Point) -> bool {
let width = stroke.thickness.to_raw();
let cap = match stroke.cap {
super::LineCap::Butt => kurbo::Cap::Butt,
super::LineCap::Round => kurbo::Cap::Round,
super::LineCap::Square => kurbo::Cap::Square,
};
let join = match stroke.join {
super::LineJoin::Miter => kurbo::Join::Miter,
super::LineJoin::Round => kurbo::Join::Round,
super::LineJoin::Bevel => kurbo::Join::Bevel,
};
let miter_limit = stroke.miter_limit.get();
let mut style = kurbo::Stroke::new(width)
.with_caps(cap)
.with_join(join)
.with_miter_limit(miter_limit);
if let Some(dash) = &stroke.dash {
style = style.with_dashes(
dash.phase.to_raw(),
dash.array.iter().copied().map(Abs::to_raw),
);
}
let opts = kurbo::StrokeOpts::default();
let tolerance = 0.01;
let expanded = kurbo::stroke(self.to_kurbo(), &style, &opts, tolerance);
kurbo::Shape::contains(&expanded, point_to_kurbo(needle))
}
}
fn point_to_kurbo(point: Point) -> kurbo::Point {
kurbo::Point::new(point.x.to_raw(), point.y.to_raw())
}

View File

@ -70,6 +70,9 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// the offsets when defining a gradient. In this case, Typst will space all
/// stops evenly.
///
/// Typst predefines color maps that you can use as stops. See the
/// [`color`]($color/#predefined-color-maps) documentation for more details.
///
/// # Relativeness
/// The location of the `{0%}` and `{100%}` stops depends on the dimensions
/// of a container. This container can either be the shape that it is being
@ -117,12 +120,12 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// #let spaces = (
/// ("Oklab", color.oklab),
/// ("Oklch", color.oklch),
/// ("linear-RGB", color.linear-rgb),
/// ("sRGB", color.rgb),
/// ("linear-RGB", color.linear-rgb),
/// ("CMYK", color.cmyk),
/// ("Grayscale", color.luma),
/// ("HSL", color.hsl),
/// ("HSV", color.hsv),
/// ("Grayscale", color.luma),
/// )
///
/// #for (name, space) in spaces {
@ -157,10 +160,6 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// )
/// ```
///
/// # Presets
/// Typst predefines color maps that you can use with your gradients. See the
/// [`color`]($color/#predefined-color-maps) documentation for more details.
///
/// # Note on file sizes
///
/// Gradients can be quite large, especially if they have many stops. This is
@ -288,7 +287,7 @@ impl Gradient {
/// )),
/// )
/// ```
#[func]
#[func(title = "Radial Gradient")]
fn radial(
span: Span,
/// The color [stops](#stops) of the gradient.
@ -402,7 +401,7 @@ impl Gradient {
/// )),
/// )
/// ```
#[func]
#[func(title = "Conic Gradient")]
pub fn conic(
span: Span,
/// The color [stops](#stops) of the gradient.
@ -575,8 +574,7 @@ impl Gradient {
}
let n = repetitions.v;
let mut stops = std::iter::repeat(self.stops_ref())
.take(n)
let mut stops = std::iter::repeat_n(self.stops_ref(), n)
.enumerate()
.flat_map(|(i, stops)| {
let mut stops = stops

View File

@ -3,6 +3,8 @@ use std::hash::{Hash, Hasher};
use std::io;
use std::sync::Arc;
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
use ecow::{eco_format, EcoString};
use image::codecs::gif::GifDecoder;
use image::codecs::jpeg::JpegDecoder;
@ -11,9 +13,6 @@ use image::{
guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel,
};
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
/// A decoded raster image.
#[derive(Clone, Hash)]
pub struct RasterImage(Arc<Repr>);
@ -22,7 +21,8 @@ pub struct RasterImage(Arc<Repr>);
struct Repr {
data: Bytes,
format: RasterFormat,
dynamic: image::DynamicImage,
dynamic: Arc<DynamicImage>,
exif_rotation: Option<u32>,
icc: Option<Bytes>,
dpi: Option<f64>,
}
@ -50,6 +50,8 @@ impl RasterImage {
format: RasterFormat,
icc: Smart<Bytes>,
) -> StrResult<RasterImage> {
let mut exif_rot = None;
let (dynamic, icc, dpi) = match format {
RasterFormat::Exchange(format) => {
fn decode<T: ImageDecoder>(
@ -85,6 +87,7 @@ impl RasterImage {
// Apply rotation from EXIF metadata.
if let Some(rotation) = exif.as_ref().and_then(exif_rotation) {
apply_rotation(&mut dynamic, rotation);
exif_rot = Some(rotation);
}
// Extract pixel density.
@ -136,7 +139,14 @@ impl RasterImage {
}
};
Ok(Self(Arc::new(Repr { data, format, dynamic, icc, dpi })))
Ok(Self(Arc::new(Repr {
data,
format,
exif_rotation: exif_rot,
dynamic: Arc::new(dynamic),
icc,
dpi,
})))
}
/// The raw image data.
@ -159,6 +169,11 @@ impl RasterImage {
self.dynamic().height()
}
/// TODO.
pub fn exif_rotation(&self) -> Option<u32> {
self.0.exif_rotation
}
/// The image's pixel density in pixels per inch, if known.
///
/// This is guaranteed to be positive.
@ -167,7 +182,7 @@ impl RasterImage {
}
/// Access the underlying dynamic image.
pub fn dynamic(&self) -> &image::DynamicImage {
pub fn dynamic(&self) -> &Arc<DynamicImage> {
&self.0.dynamic
}
@ -325,12 +340,12 @@ fn apply_rotation(image: &mut DynamicImage, rotation: u32) {
ops::flip_horizontal_in_place(image);
*image = image.rotate270();
}
6 => *image = image.rotate90(),
6 => *image = image.rotate270(),
7 => {
ops::flip_horizontal_in_place(image);
*image = image.rotate90();
}
8 => *image = image.rotate270(),
8 => *image = image.rotate90(),
_ => {}
}
}

View File

@ -106,7 +106,7 @@ pub struct RectElem {
pub radius: Corners<Option<Rel<Length>>>,
/// How much to pad the rectangle's content.
/// See the [box's documentation]($box.outset) for more details.
/// See the [box's documentation]($box.inset) for more details.
#[resolve]
#[fold]
#[default(Sides::splat(Some(Abs::pt(5.0).into())))]

View File

@ -4,5 +4,5 @@ equation = Rovnice
bibliography = Bibliografie
heading = Kapitola
outline = Obsah
raw = Seznam
raw = Výpis
page = strana

View File

@ -0,0 +1,8 @@
figure = Gambar
table = Tabel
equation = Persamaan
bibliography = Daftar Pustaka
heading = Bagian
outline = Daftar Isi
raw = Kode
page = halaman

View File

@ -19,20 +19,14 @@ typst-macros = { workspace = true }
typst-syntax = { workspace = true }
typst-timing = { workspace = true }
typst-utils = { workspace = true }
arrayvec = { workspace = true }
base64 = { workspace = true }
bytemuck = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
image = { workspace = true }
indexmap = { workspace = true }
miniz_oxide = { workspace = true }
pdf-writer = { workspace = true }
infer = { workspace = true }
krilla = { workspace = true }
krilla-svg = { workspace = true }
serde = { workspace = true }
subsetter = { workspace = true }
svg2pdf = { workspace = true }
ttf-parser = { workspace = true }
xmp-writer = { workspace = true }
[lints]
workspace = true

View File

@ -1,385 +0,0 @@
use std::num::NonZeroUsize;
use ecow::eco_format;
use pdf_writer::types::Direction;
use pdf_writer::writers::PageLabel;
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{Datetime, Smart};
use typst_library::layout::Dir;
use typst_library::text::Lang;
use typst_syntax::Span;
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
use crate::page::PdfPageLabel;
use crate::{hash_base64, outline, TextStrExt, Timestamp, Timezone, WithEverything};
/// Write the document catalog.
pub fn write_catalog(
ctx: WithEverything,
pdf: &mut Pdf,
alloc: &mut Ref,
) -> SourceResult<()> {
let lang = ctx
.resources
.languages
.iter()
.max_by_key(|(_, &count)| count)
.map(|(&l, _)| l);
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
Direction::R2L
} else {
Direction::L2R
};
// Write the outline tree.
let outline_root_id = outline::write_outline(pdf, alloc, &ctx);
// Write the page labels.
let page_labels = write_page_labels(pdf, alloc, &ctx);
// Write the document information.
let info_ref = alloc.bump();
let mut info = pdf.document_info(info_ref);
let mut xmp = XmpWriter::new();
if let Some(title) = &ctx.document.info.title {
info.title(TextStr::trimmed(title));
xmp.title([(None, title.as_str())]);
}
if let Some(description) = &ctx.document.info.description {
info.subject(TextStr::trimmed(description));
xmp.description([(None, description.as_str())]);
}
let authors = &ctx.document.info.author;
if !authors.is_empty() {
// Turns out that if the authors are given in both the document
// information dictionary and the XMP metadata, Acrobat takes a little
// bit of both: The first author from the document information
// dictionary and the remaining authors from the XMP metadata.
//
// To fix this for Acrobat, we could omit the remaining authors or all
// metadata from the document information catalog (it is optional) and
// only write XMP. However, not all other tools (including Apple
// Preview) read the XMP data. This means we do want to include all
// authors in the document information dictionary.
//
// Thus, the only alternative is to fold all authors into a single
// `<rdf:li>` in the XMP metadata. This is, in fact, exactly what the
// PDF/A spec Part 1 section 6.7.3 has to say about the matter. It's a
// bit weird to not use the array (and it makes Acrobat show the author
// list in quotes), but there's not much we can do about that.
let joined = authors.join(", ");
info.author(TextStr::trimmed(&joined));
xmp.creator([joined.as_str()]);
}
let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
info.creator(TextStr(&creator));
xmp.creator_tool(&creator);
let keywords = &ctx.document.info.keywords;
if !keywords.is_empty() {
let joined = keywords.join(", ");
info.keywords(TextStr::trimmed(&joined));
xmp.pdf_keywords(&joined);
}
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
info.creation_date(pdf_date);
info.modified_date(pdf_date);
}
info.finish();
// A unique ID for this instance of the document. Changes if anything
// changes in the frames.
let instance_id = hash_base64(&pdf.as_bytes());
// Determine the document's ID. It should be as stable as possible.
const PDF_VERSION: &str = "PDF-1.7";
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
// We were provided with a stable ID. Yay!
hash_base64(&(PDF_VERSION, ident))
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
// If not provided from the outside, but title and author were given, we
// compute a hash of them, which should be reasonably stable and unique.
hash_base64(&(PDF_VERSION, &ctx.document.info.title, &ctx.document.info.author))
} else {
// The user provided no usable metadata which we can use as an `/ID`.
instance_id.clone()
};
xmp.document_id(&doc_id);
xmp.instance_id(&instance_id);
xmp.format("application/pdf");
xmp.pdf_version("1.7");
xmp.language(ctx.resources.languages.keys().map(|lang| LangId(lang.as_str())));
xmp.num_pages(ctx.document.pages.len() as u32);
xmp.rendition_class(RenditionClass::Proof);
if let Some(xmp_date) = date.and_then(|date| xmp_date(date, tz)) {
xmp.create_date(xmp_date);
xmp.modify_date(xmp_date);
if ctx.options.standards.pdfa {
let mut history = xmp.history();
history
.add_event()
.action(xmp_writer::ResourceEventAction::Saved)
.when(xmp_date)
.instance_id(&eco_format!("{instance_id}_source"));
history
.add_event()
.action(xmp_writer::ResourceEventAction::Converted)
.when(xmp_date)
.instance_id(&instance_id)
.software_agent(&creator);
}
}
// Assert dominance.
if let Some((part, conformance)) = ctx.options.standards.pdfa_part {
let mut extension_schemas = xmp.extension_schemas();
extension_schemas
.xmp_media_management()
.properties()
.describe_instance_id();
extension_schemas.pdf().properties().describe_all();
extension_schemas.finish();
xmp.pdfa_part(part);
xmp.pdfa_conformance(conformance);
}
let xmp_buf = xmp.finish(None);
let meta_ref = alloc.bump();
pdf.stream(meta_ref, xmp_buf.as_bytes())
.pair(Name(b"Type"), Name(b"Metadata"))
.pair(Name(b"Subtype"), Name(b"XML"));
// Set IDs only now, so that we don't need to clone them.
pdf.set_file_id((doc_id.into_bytes(), instance_id.into_bytes()));
// Write the document catalog.
let catalog_ref = alloc.bump();
let mut catalog = pdf.catalog(catalog_ref);
catalog.pages(ctx.page_tree_ref);
catalog.viewer_preferences().direction(dir);
catalog.metadata(meta_ref);
let has_dests = !ctx.references.named_destinations.dests.is_empty();
let has_embeddings = !ctx.references.embedded_files.is_empty();
// Write the `/Names` dictionary.
if has_dests || has_embeddings {
// Write the named destination tree if there are any entries.
let mut name_dict = catalog.names();
if has_dests {
let mut dests_name_tree = name_dict.destinations();
let mut names = dests_name_tree.names();
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
names.insert(Str(name.resolve().as_bytes()), dest_ref);
}
}
if has_embeddings {
let mut embedded_files = name_dict.embedded_files();
let mut names = embedded_files.names();
for (name, file_ref) in &ctx.references.embedded_files {
names.insert(Str(name.as_bytes()), *file_ref);
}
}
}
if has_embeddings && ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
let mut associated_files = catalog.insert(Name(b"AF")).array().typed();
for (_, file_ref) in ctx.references.embedded_files {
associated_files.item(file_ref).finish();
}
}
// Insert the page labels.
if !page_labels.is_empty() {
let mut num_tree = catalog.page_labels();
let mut entries = num_tree.nums();
for (n, r) in &page_labels {
entries.insert(n.get() as i32 - 1, *r);
}
}
if let Some(outline_root_id) = outline_root_id {
catalog.outlines(outline_root_id);
}
if let Some(lang) = lang {
catalog.lang(TextStr(lang.as_str()));
}
if ctx.options.standards.pdfa {
catalog
.output_intents()
.push()
.subtype(pdf_writer::types::OutputIntentSubtype::PDFA)
.output_condition(TextStr("sRGB"))
.output_condition_identifier(TextStr("Custom"))
.info(TextStr("sRGB IEC61966-2.1"))
.dest_output_profile(ctx.globals.color_functions.srgb.unwrap());
}
catalog.finish();
if ctx.options.standards.pdfa && pdf.refs().count() > 8388607 {
bail!(Span::detached(), "too many PDF objects");
}
Ok(())
}
/// Write the page labels.
pub(crate) fn write_page_labels(
chunk: &mut Pdf,
alloc: &mut Ref,
ctx: &WithEverything,
) -> Vec<(NonZeroUsize, Ref)> {
// If there is no exported page labeled, we skip the writing
if !ctx.pages.iter().filter_map(Option::as_ref).any(|p| {
p.label
.as_ref()
.is_some_and(|l| l.prefix.is_some() || l.style.is_some())
}) {
return Vec::new();
}
let empty_label = PdfPageLabel::default();
let mut result = vec![];
let mut prev: Option<&PdfPageLabel> = None;
// Skip non-exported pages for numbering.
for (i, page) in ctx.pages.iter().filter_map(Option::as_ref).enumerate() {
let nr = NonZeroUsize::new(1 + i).unwrap();
// If there are pages with empty labels between labeled pages, we must
// write empty PageLabel entries.
let label = page.label.as_ref().unwrap_or(&empty_label);
if let Some(pre) = prev {
if label.prefix == pre.prefix
&& label.style == pre.style
&& label.offset == pre.offset.map(|n| n.saturating_add(1))
{
prev = Some(label);
continue;
}
}
let id = alloc.bump();
let mut entry = chunk.indirect(id).start::<PageLabel>();
// Only add what is actually provided. Don't add empty prefix string if
// it wasn't given for example.
if let Some(prefix) = &label.prefix {
entry.prefix(TextStr::trimmed(prefix));
}
if let Some(style) = label.style {
entry.style(style.to_pdf_numbering_style());
}
if let Some(offset) = label.offset {
entry.offset(offset.get() as i32);
}
result.push((nr, id));
prev = Some(label);
}
result
}
/// Resolve the document date.
///
/// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
/// (2) If the `document.date` is set to `auto` or not set, try to use the
/// date from the options.
/// (3) Otherwise, we don't write date metadata.
pub fn document_date(
document_date: Smart<Option<Datetime>>,
timestamp: Option<Timestamp>,
) -> (Option<Datetime>, Option<Timezone>) {
match (document_date, timestamp) {
(Smart::Custom(date), _) => (date, None),
(Smart::Auto, Some(timestamp)) => {
(Some(timestamp.datetime), Some(timestamp.timezone))
}
_ => (None, None),
}
}
/// Converts a datetime to a pdf-writer date.
pub fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let mut pdf_date = pdf_writer::Date::new(year);
if let Some(month) = datetime.month() {
pdf_date = pdf_date.month(month);
}
if let Some(day) = datetime.day() {
pdf_date = pdf_date.day(day);
}
if let Some(h) = datetime.hour() {
pdf_date = pdf_date.hour(h);
}
if let Some(m) = datetime.minute() {
pdf_date = pdf_date.minute(m);
}
if let Some(s) = datetime.second() {
pdf_date = pdf_date.second(s);
}
match tz {
Some(Timezone::UTC) => {
pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0)
}
Some(Timezone::Local { hour_offset, minute_offset }) => {
pdf_date =
pdf_date.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
}
None => {}
}
Some(pdf_date)
}
/// Converts a datetime to an xmp-writer datetime.
fn xmp_date(
datetime: Datetime,
timezone: Option<Timezone>,
) -> Option<xmp_writer::DateTime> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let timezone = timezone.map(|tz| match tz {
Timezone::UTC => xmp_writer::Timezone::Utc,
Timezone::Local { hour_offset, minute_offset } => {
// The xmp-writer use signed integers for the minute offset, which
// can be buggy if the minute offset is negative. And because our
// minute_offset is ensured to be `0 <= minute_offset < 60`, we can
// safely cast it to a signed integer.
xmp_writer::Timezone::Local { hour: hour_offset, minute: minute_offset as i8 }
}
});
Some(DateTime {
year,
month: datetime.month(),
day: datetime.day(),
hour: datetime.hour(),
minute: datetime.minute(),
second: datetime.second(),
timezone,
})
}

View File

@ -1,394 +0,0 @@
use std::sync::LazyLock;
use arrayvec::ArrayVec;
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
use typst_library::diag::{bail, SourceResult};
use typst_library::visualize::{Color, ColorSpace, Paint};
use typst_syntax::Span;
use crate::{content, deflate, PdfChunk, PdfOptions, Renumber, WithResources};
// The names of the color spaces.
pub const SRGB: Name<'static> = Name(b"srgb");
pub const D65_GRAY: Name<'static> = Name(b"d65gray");
pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
// The ICC profiles.
static SRGB_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_RGB_V4));
static GRAY_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_GREY_V4));
/// The color spaces present in the PDF document
#[derive(Default)]
pub struct ColorSpaces {
use_srgb: bool,
use_d65_gray: bool,
use_linear_rgb: bool,
}
impl ColorSpaces {
/// Mark a color space as used.
pub fn mark_as_used(&mut self, color_space: ColorSpace) {
match color_space {
ColorSpace::Oklch
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Srgb => {
self.use_srgb = true;
}
ColorSpace::D65Gray => {
self.use_d65_gray = true;
}
ColorSpace::LinearRgb => {
self.use_linear_rgb = true;
}
ColorSpace::Cmyk => {}
}
}
/// Write the color spaces to the PDF file.
pub fn write_color_spaces(&self, mut spaces: Dict, refs: &ColorFunctionRefs) {
if self.use_srgb {
write(ColorSpace::Srgb, spaces.insert(SRGB).start(), refs);
}
if self.use_d65_gray {
write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), refs);
}
if self.use_linear_rgb {
write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), refs);
}
}
/// Write the necessary color spaces functions and ICC profiles to the
/// PDF file.
pub fn write_functions(&self, chunk: &mut Chunk, refs: &ColorFunctionRefs) {
// Write the sRGB color space.
if let Some(id) = refs.srgb {
chunk
.icc_profile(id, &SRGB_ICC_DEFLATED)
.n(3)
.range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
.filter(Filter::FlateDecode);
}
// Write the gray color space.
if let Some(id) = refs.d65_gray {
chunk
.icc_profile(id, &GRAY_ICC_DEFLATED)
.n(1)
.range([0.0, 1.0])
.filter(Filter::FlateDecode);
}
}
/// Merge two color space usage information together: a given color space is
/// considered to be used if it is used on either side.
pub fn merge(&mut self, other: &Self) {
self.use_d65_gray |= other.use_d65_gray;
self.use_linear_rgb |= other.use_linear_rgb;
self.use_srgb |= other.use_srgb;
}
}
/// Write the color space.
pub fn write(
color_space: ColorSpace,
writer: writers::ColorSpace,
refs: &ColorFunctionRefs,
) {
match color_space {
ColorSpace::Srgb
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Oklch => writer.icc_based(refs.srgb.unwrap()),
ColorSpace::D65Gray => writer.icc_based(refs.d65_gray.unwrap()),
ColorSpace::LinearRgb => {
writer.cal_rgb(
[0.9505, 1.0, 1.0888],
None,
Some([1.0, 1.0, 1.0]),
Some([
0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722, 0.9505,
]),
);
}
ColorSpace::Cmyk => writer.device_cmyk(),
}
}
/// Global references for color conversion functions.
///
/// These functions are only written once (at most, they are not written if not
/// needed) in the final document, and be shared by all color space
/// dictionaries.
pub struct ColorFunctionRefs {
pub srgb: Option<Ref>,
d65_gray: Option<Ref>,
}
impl Renumber for ColorFunctionRefs {
fn renumber(&mut self, offset: i32) {
if let Some(r) = &mut self.srgb {
r.renumber(offset);
}
if let Some(r) = &mut self.d65_gray {
r.renumber(offset);
}
}
}
/// Allocate all necessary [`ColorFunctionRefs`].
pub fn alloc_color_functions_refs(
context: &WithResources,
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
let mut chunk = PdfChunk::new();
let mut used_color_spaces = ColorSpaces::default();
if context.options.standards.pdfa {
used_color_spaces.mark_as_used(ColorSpace::Srgb);
}
context.resources.traverse(&mut |r| {
used_color_spaces.merge(&r.colors);
Ok(())
})?;
let refs = ColorFunctionRefs {
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
};
Ok((chunk, refs))
}
/// Encodes the color into four f32s, which can be used in a PDF file.
/// Ensures that the values are in the range [0.0, 1.0].
///
/// # Why?
/// - Oklab: The a and b components are in the range [-0.5, 0.5] and the PDF
/// specifies (and some readers enforce) that all color values be in the range
/// [0.0, 1.0]. This means that the PostScript function and the encoded color
/// must be offset by 0.5.
/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
/// specifies that it must be in the range [0.0, 1.0]. This means that the
/// PostScript function and the encoded color must be divided by 360.0.
pub trait ColorEncode {
/// Performs the color to PDF f32 array conversion.
fn encode(&self, color: Color) -> [f32; 4];
}
impl ColorEncode for ColorSpace {
fn encode(&self, color: Color) -> [f32; 4] {
match self {
ColorSpace::Oklab | ColorSpace::Oklch | ColorSpace::Hsl | ColorSpace::Hsv => {
color.to_space(ColorSpace::Srgb).to_vec4()
}
_ => color.to_space(*self).to_vec4(),
}
}
}
/// Encodes a paint into either a fill or stroke color.
pub(super) trait PaintEncode {
/// Set the paint as the fill color.
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
/// Set the paint as the stroke color.
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
}
impl PaintEncode for Paint {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_fill(ctx, on_text, transforms),
}
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_stroke(ctx, on_text, transforms),
}
}
}
impl PaintEncode for Color {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_fill_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_fill_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_fill_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_fill_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_fill_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_fill_cmyk(c, m, y, k);
}
}
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_stroke_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_stroke_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_stroke_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_stroke_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_stroke_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_stroke_cmyk(c, m, y, k);
}
}
Ok(())
}
}
/// Extra color space functions.
pub(super) trait ColorSpaceExt {
/// Returns the range of the color space.
fn range(self) -> &'static [f32];
/// Converts a color to the color space.
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4>;
}
impl ColorSpaceExt for ColorSpace {
fn range(self) -> &'static [f32] {
match self {
ColorSpace::D65Gray => &[0.0, 1.0],
ColorSpace::Oklab => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Oklch => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::LinearRgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Srgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Cmyk => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsl => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsv => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
}
}
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4> {
let components = self.encode(color);
self.range()
.chunks(2)
.zip(components)
.map(|(range, component)| U::quantize(component, [range[0], range[1]]))
.collect()
}
}
/// Quantizes a color component to a specific type.
pub(super) trait QuantizedColor {
fn quantize(color: f32, range: [f32; 2]) -> Self;
}
impl QuantizedColor for u16 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
let value = (color - min) / (max - min);
(value * Self::MAX as f32).round().clamp(0.0, Self::MAX as f32) as Self
}
}
impl QuantizedColor for f32 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
color.clamp(min, max)
}
}
/// Fails with an error if PDF/A processing is enabled.
pub(super) fn check_cmyk_allowed(options: &PdfOptions) -> SourceResult<()> {
if options.standards.pdfa {
bail!(
Span::detached(),
"cmyk colors are not currently supported by PDF/A export"
);
}
Ok(())
}

View File

@ -1,344 +0,0 @@
//! OpenType fonts generally define monochrome glyphs, but they can also define
//! glyphs with colors. This is how emojis are generally implemented for
//! example.
//!
//! There are various standards to represent color glyphs, but PDF readers don't
//! support any of them natively, so Typst has to handle them manually.
use std::collections::HashMap;
use ecow::eco_format;
use indexmap::IndexMap;
use pdf_writer::types::UnicodeCmap;
use pdf_writer::writers::WMode;
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::Em;
use typst_library::text::color::glyph_frame;
use typst_library::text::{Font, Glyph, TextItemView};
use crate::font::{base_font_name, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
use crate::resources::{Resources, ResourcesRefs};
use crate::{content, EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
/// Write color fonts in the PDF document.
///
/// They are written as Type3 fonts, which map glyph IDs to arbitrary PDF
/// instructions.
pub fn write_color_fonts(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
let mut out = HashMap::new();
let mut chunk = PdfChunk::new();
context.resources.traverse(&mut |resources: &Resources| {
let Some(color_fonts) = &resources.color_fonts else {
return Ok(());
};
for (color_font, font_slice) in color_fonts.iter() {
if out.contains_key(&font_slice) {
continue;
}
// Allocate some IDs.
let subfont_id = chunk.alloc();
let cmap_ref = chunk.alloc();
let descriptor_ref = chunk.alloc();
let widths_ref = chunk.alloc();
// And a map between glyph IDs and the instructions to draw this
// glyph.
let mut glyphs_to_instructions = Vec::new();
let start = font_slice.subfont * 256;
let end = (start + 256).min(color_font.glyphs.len());
let glyph_count = end - start;
let subset = &color_font.glyphs[start..end];
let mut widths = Vec::new();
let mut gids = Vec::new();
let scale_factor = font_slice.font.ttf().units_per_em() as f32;
// Write the instructions for each glyph.
for color_glyph in subset {
let instructions_stream_ref = chunk.alloc();
let width = font_slice
.font
.advance(color_glyph.gid)
.unwrap_or(Em::new(0.0))
.get() as f32
* scale_factor;
widths.push(width);
chunk
.stream(
instructions_stream_ref,
color_glyph.instructions.content.wait(),
)
.filter(Filter::FlateDecode);
// Use this stream as instructions to draw the glyph.
glyphs_to_instructions.push(instructions_stream_ref);
gids.push(color_glyph.gid);
}
// Determine the base font name.
gids.sort();
let base_font = base_font_name(&font_slice.font, &gids);
// Write the Type3 font object.
let mut pdf_font = chunk.type3_font(subfont_id);
pdf_font.name(Name(base_font.as_bytes()));
pdf_font.pair(Name(b"Resources"), color_fonts.resources.reference);
pdf_font.bbox(color_font.bbox);
pdf_font.matrix([1.0 / scale_factor, 0.0, 0.0, 1.0 / scale_factor, 0.0, 0.0]);
pdf_font.first_char(0);
pdf_font.last_char((glyph_count - 1) as u8);
pdf_font.pair(Name(b"Widths"), widths_ref);
pdf_font.to_unicode(cmap_ref);
pdf_font.font_descriptor(descriptor_ref);
// Write the /CharProcs dictionary, that maps glyph names to
// drawing instructions.
let mut char_procs = pdf_font.char_procs();
for (gid, instructions_ref) in glyphs_to_instructions.iter().enumerate() {
char_procs
.pair(Name(eco_format!("glyph{gid}").as_bytes()), *instructions_ref);
}
char_procs.finish();
// Write the /Encoding dictionary.
let names = (0..glyph_count)
.map(|gid| eco_format!("glyph{gid}"))
.collect::<Vec<_>>();
pdf_font
.encoding_custom()
.differences()
.consecutive(0, names.iter().map(|name| Name(name.as_bytes())));
pdf_font.finish();
// Encode a CMAP to make it possible to search or copy glyphs.
let glyph_set = resources.color_glyph_sets.get(&font_slice.font).unwrap();
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
for (index, glyph) in subset.iter().enumerate() {
let Some(text) = glyph_set.get(&glyph.gid) else {
continue;
};
if !text.is_empty() {
cmap.pair_with_multiple(index as u8, text.chars());
}
}
chunk.cmap(cmap_ref, &cmap.finish()).writing_mode(WMode::Horizontal);
// Write the font descriptor.
write_font_descriptor(
&mut chunk,
descriptor_ref,
&font_slice.font,
&base_font,
);
// Write the widths array
chunk.indirect(widths_ref).array().items(widths);
out.insert(font_slice, subfont_id);
}
Ok(())
})?;
Ok((chunk, out))
}
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
///
/// This mapping is one-to-many because there can only be 256 glyphs in a Type 3
/// font, and fonts generally have more color glyphs than that.
pub struct ColorFontMap<R> {
/// The mapping itself.
map: IndexMap<Font, ColorFont>,
/// The resources required to render the fonts in this map.
///
/// For example, this can be the images for glyphs based on bitmaps or SVG.
pub resources: Resources<R>,
/// The number of font slices (groups of 256 color glyphs), across all color
/// fonts.
total_slice_count: usize,
}
/// A collection of Type3 font, belonging to the same TTF font.
pub struct ColorFont {
/// The IDs of each sub-slice of this font. They are the numbers after "Cf"
/// in the Resources dictionaries.
slice_ids: Vec<usize>,
/// The list of all color glyphs in this family.
///
/// The index in this vector modulo 256 corresponds to the index in one of
/// the Type3 fonts in `refs` (the `n`-th in the vector, where `n` is the
/// quotient of the index divided by 256).
pub glyphs: Vec<ColorGlyph>,
/// The global bounding box of the font.
pub bbox: Rect,
/// A mapping between glyph IDs and character indices in the `glyphs`
/// vector.
glyph_indices: HashMap<u16, usize>,
}
/// A single color glyph.
pub struct ColorGlyph {
/// The ID of the glyph.
pub gid: u16,
/// Instructions to draw the glyph.
pub instructions: content::Encoded,
}
impl ColorFontMap<()> {
/// Creates a new empty mapping
pub fn new() -> Self {
Self {
map: IndexMap::new(),
total_slice_count: 0,
resources: Resources::default(),
}
}
/// For a given glyph in a TTF font, give the ID of the Type3 font and the
/// index of the glyph inside of this Type3 font.
///
/// If this is the first occurrence of this glyph in this font, it will
/// start its encoding and add it to the list of known glyphs.
pub fn get(
&mut self,
options: &PdfOptions,
text: &TextItemView,
glyph: &Glyph,
) -> SourceResult<(usize, u8)> {
let font = &text.item.font;
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
let global_bbox = font.ttf().global_bounding_box();
let bbox = Rect::new(
font.to_em(global_bbox.x_min).to_font_units(),
font.to_em(global_bbox.y_min).to_font_units(),
font.to_em(global_bbox.x_max).to_font_units(),
font.to_em(global_bbox.y_max).to_font_units(),
);
ColorFont {
bbox,
slice_ids: Vec::new(),
glyphs: Vec::new(),
glyph_indices: HashMap::new(),
}
});
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&glyph.id) {
// If we already know this glyph, return it.
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
} else {
// Otherwise, allocate a new ColorGlyph in the font, and a new Type3 font
// if needed
let index = color_font.glyphs.len();
if index % 256 == 0 {
color_font.slice_ids.push(self.total_slice_count);
self.total_slice_count += 1;
}
let (frame, tofu) = glyph_frame(font, glyph.id);
if options.standards.pdfa && tofu {
bail!(failed_to_convert(text, glyph));
}
let width = font.advance(glyph.id).unwrap_or(Em::new(0.0)).get()
* font.units_per_em();
let instructions = content::build(
options,
&mut self.resources,
&frame,
None,
Some(width as f32),
)?;
color_font.glyphs.push(ColorGlyph { gid: glyph.id, instructions });
color_font.glyph_indices.insert(glyph.id, index);
(color_font.slice_ids[index / 256], index as u8)
})
}
/// Assign references to the resource dictionary used by this set of color
/// fonts.
pub fn with_refs(self, refs: &ResourcesRefs) -> ColorFontMap<Ref> {
ColorFontMap {
map: self.map,
resources: self.resources.with_refs(refs),
total_slice_count: self.total_slice_count,
}
}
}
impl<R> ColorFontMap<R> {
/// Iterate over all Type3 fonts.
///
/// Each item of this iterator maps to a Type3 font: it contains
/// at most 256 glyphs. A same TTF font can yield multiple Type3 fonts.
pub fn iter(&self) -> ColorFontMapIter<'_, R> {
ColorFontMapIter { map: self, font_index: 0, slice_index: 0 }
}
}
/// Iterator over a [`ColorFontMap`].
///
/// See [`ColorFontMap::iter`].
pub struct ColorFontMapIter<'a, R> {
/// The map over which to iterate
map: &'a ColorFontMap<R>,
/// The index of TTF font on which we currently iterate
font_index: usize,
/// The sub-font (slice of at most 256 glyphs) at which we currently are.
slice_index: usize,
}
impl<'a, R> Iterator for ColorFontMapIter<'a, R> {
type Item = (&'a ColorFont, ColorFontSlice);
fn next(&mut self) -> Option<Self::Item> {
let (font, color_font) = self.map.map.get_index(self.font_index)?;
let slice_count = (color_font.glyphs.len() / 256) + 1;
if self.slice_index >= slice_count {
self.font_index += 1;
self.slice_index = 0;
return self.next();
}
let slice = ColorFontSlice { font: font.clone(), subfont: self.slice_index };
self.slice_index += 1;
Some((color_font, slice))
}
}
/// A set of at most 256 glyphs (a limit imposed on Type3 fonts by the PDF
/// specification) that represents a part of a TTF font.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ColorFontSlice {
/// The original TTF font.
pub font: Font,
/// The index of the Type3 font, among all those that are necessary to
/// represent the subset of the TTF font we are interested in.
pub subfont: usize,
}
/// The error when the glyph could not be converted.
#[cold]
fn failed_to_convert(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
let mut diag = error!(
glyph.span.0,
"the glyph for {} could not be exported",
text.glyph_text(glyph).repr()
);
if text.item.font.ttf().tables().cff2.is_some() {
diag.hint("CFF2 fonts are not currently supported");
}
diag
}

View File

@ -1,823 +0,0 @@
//! Generic writer for PDF content.
//!
//! It is used to write page contents, color glyph instructions, and tilings.
//!
//! See also [`pdf_writer::Content`].
use ecow::eco_format;
use pdf_writer::types::{
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
};
use pdf_writer::writers::PositionedItems;
use pdf_writer::{Content, Finish, Name, Rect, Str};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::{
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
};
use typst_library::model::Destination;
use typst_library::text::color::should_outline;
use typst_library::text::{Font, Glyph, TextItem, TextItemView};
use typst_library::visualize::{
Curve, CurveItem, FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint,
Shape,
};
use typst_syntax::Span;
use typst_utils::{Deferred, Numeric, SliceExt};
use crate::color::PaintEncode;
use crate::color_font::ColorFontMap;
use crate::extg::ExtGState;
use crate::image::deferred_image;
use crate::resources::Resources;
use crate::{deflate_deferred, AbsExt, ContentExt, EmExt, PdfOptions, StrExt};
/// Encode a [`Frame`] into a content stream.
///
/// The resources that were used in the stream will be added to `resources`.
///
/// `color_glyph_width` should be `None` unless the `Frame` represents a [color
/// glyph].
///
/// [color glyph]: `crate::color_font`
pub fn build(
options: &PdfOptions,
resources: &mut Resources<()>,
frame: &Frame,
fill: Option<Paint>,
color_glyph_width: Option<f32>,
) -> SourceResult<Encoded> {
let size = frame.size();
let mut ctx = Builder::new(options, resources, size);
if let Some(width) = color_glyph_width {
ctx.content.start_color_glyph(width);
}
// Make the coordinate system start at the top-left.
ctx.transform(
// Make the Y axis go upwards
Transform::scale(Ratio::one(), -Ratio::one())
// Also move the origin to the top left corner
.post_concat(Transform::translate(Abs::zero(), size.y)),
);
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
write_shape(&mut ctx, Point::zero(), &shape)?;
}
// Encode the frame into the content stream.
write_frame(&mut ctx, frame)?;
Ok(Encoded {
size,
content: deflate_deferred(ctx.content.finish()),
uses_opacities: ctx.uses_opacities,
links: ctx.links,
})
}
/// An encoded content stream.
pub struct Encoded {
/// The dimensions of the content.
pub size: Size,
/// The actual content stream.
pub content: Deferred<Vec<u8>>,
/// Whether the content opacities.
pub uses_opacities: bool,
/// Links in the PDF coordinate system.
pub links: Vec<(Destination, Rect)>,
}
/// An exporter for a single PDF content stream.
///
/// Content streams are a series of PDF commands. They can reference external
/// objects only through resources.
///
/// Content streams can be used for page contents, but also to describe color
/// glyphs and tilings.
pub struct Builder<'a, R = ()> {
/// Settings for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// A list of all resources that are used in the content stream.
pub(crate) resources: &'a mut Resources<R>,
/// The PDF content stream that is being built.
pub content: Content,
/// Current graphic state.
state: State,
/// Stack of saved graphic states.
saves: Vec<State>,
/// Whether any stroke or fill was not totally opaque.
uses_opacities: bool,
/// All clickable links that are present in this content.
links: Vec<(Destination, Rect)>,
}
impl<'a, R> Builder<'a, R> {
/// Create a new content builder.
pub fn new(
options: &'a PdfOptions<'a>,
resources: &'a mut Resources<R>,
size: Size,
) -> Self {
Builder {
options,
resources,
uses_opacities: false,
content: Content::new(),
state: State::new(size),
saves: vec![],
links: vec![],
}
}
}
/// A simulated graphics state used to deduplicate graphics state changes and
/// keep track of the current transformation matrix for link annotations.
#[derive(Debug, Clone)]
struct State {
/// The transform of the current item.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
size: Size,
/// The current font.
font: Option<(Font, Abs)>,
/// The current fill paint.
fill: Option<Paint>,
/// The color space of the current fill paint.
fill_space: Option<Name<'static>>,
/// The current external graphic state.
external_graphics_state: ExtGState,
/// The current stroke paint.
stroke: Option<FixedStroke>,
/// The color space of the current stroke paint.
stroke_space: Option<Name<'static>>,
/// The current text rendering mode.
text_rendering_mode: TextRenderingMode,
}
impl State {
/// Creates a new, clean state for a given `size`.
pub fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
size,
font: None,
fill: None,
fill_space: None,
external_graphics_state: ExtGState::default(),
stroke: None,
stroke_space: None,
text_rendering_mode: TextRenderingMode::Fill,
}
}
/// Creates the [`Transforms`] structure for the current item.
pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
Transforms {
transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
container_transform: self.container_transform,
container_size: self.size,
size,
}
}
}
/// Subset of the state used to calculate the transform of gradients and tilings.
#[derive(Debug, Clone, Copy)]
pub(super) struct Transforms {
/// The transform of the current item.
pub transform: Transform,
/// The transform of first hard frame in the hierarchy.
pub container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
pub container_size: Size,
/// The size of the item.
pub size: Size,
}
impl Builder<'_, ()> {
fn save_state(&mut self) -> SourceResult<()> {
self.saves.push(self.state.clone());
self.content.save_state_checked()
}
fn restore_state(&mut self) {
self.content.restore_state();
self.state = self.saves.pop().expect("missing state save");
}
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
let current_state = &self.state.external_graphics_state;
if current_state != graphics_state {
let index = self.resources.ext_gs.insert(*graphics_state);
let name = eco_format!("Gs{index}");
self.content.set_parameters(Name(name.as_bytes()));
self.state.external_graphics_state = *graphics_state;
if graphics_state.uses_opacities() {
self.uses_opacities = true;
}
}
}
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
let get_opacity = |paint: &Paint| {
let color = match paint {
Paint::Solid(color) => *color,
Paint::Gradient(_) | Paint::Tiling(_) => return 255,
};
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
};
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
let fill_opacity = fill.map_or(255, get_opacity);
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
}
fn reset_opacities(&mut self) {
self.set_external_graphics_state(&ExtGState {
stroke_opacity: 255,
fill_opacity: 255,
});
}
pub fn transform(&mut self, transform: Transform) {
let Transform { sx, ky, kx, sy, tx, ty } = transform;
self.state.transform = self.state.transform.pre_concat(transform);
if self.state.container_transform.is_identity() {
self.state.container_transform = self.state.transform;
}
self.content.transform([
sx.get() as _,
ky.get() as _,
kx.get() as _,
sy.get() as _,
tx.to_f32(),
ty.to_f32(),
]);
}
fn group_transform(&mut self, transform: Transform) {
self.state.container_transform =
self.state.container_transform.pre_concat(transform);
}
fn set_font(&mut self, font: &Font, size: Abs) {
if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
let index = self.resources.fonts.insert(font.clone());
let name = eco_format!("F{index}");
self.content.set_font(Name(name.as_bytes()), size.to_f32());
self.state.font = Some((font.clone(), size));
}
}
fn size(&mut self, size: Size) {
self.state.size = size;
}
fn set_fill(
&mut self,
fill: &Paint,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.fill.as_ref() != Some(fill)
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
{
fill.set_as_fill(self, on_text, transforms)?;
self.state.fill = Some(fill.clone());
}
Ok(())
}
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
if self.state.fill_space != Some(space) {
self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
self.state.fill_space = Some(space);
}
}
pub fn reset_fill_color_space(&mut self) {
self.state.fill_space = None;
}
fn set_stroke(
&mut self,
stroke: &FixedStroke,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.stroke.as_ref() != Some(stroke)
|| matches!(
self.state.stroke.as_ref().map(|s| &s.paint),
Some(Paint::Gradient(_))
)
{
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
paint.set_as_stroke(self, on_text, transforms)?;
self.content.set_line_width(thickness.to_f32());
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
self.content.set_line_cap(to_pdf_line_cap(*cap));
}
if self.state.stroke.as_ref().map(|s| &s.join) != Some(join) {
self.content.set_line_join(to_pdf_line_join(*join));
}
if self.state.stroke.as_ref().map(|s| &s.dash) != Some(dash) {
if let Some(dash) = dash {
self.content.set_dash_pattern(
dash.array.iter().map(|l| l.to_f32()),
dash.phase.to_f32(),
);
} else {
self.content.set_dash_pattern([], 0.0);
}
}
if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
self.content.set_miter_limit(miter_limit.get() as f32);
}
self.state.stroke = Some(stroke.clone());
}
Ok(())
}
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
if self.state.stroke_space != Some(space) {
self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
self.state.stroke_space = Some(space);
}
}
pub fn reset_stroke_color_space(&mut self) {
self.state.stroke_space = None;
}
fn set_text_rendering_mode(&mut self, mode: TextRenderingMode) {
if self.state.text_rendering_mode != mode {
self.content.set_text_rendering_mode(mode);
self.state.text_rendering_mode = mode;
}
}
}
/// Encode a frame into the content stream.
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
for &(pos, ref item) in frame.items() {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
match item {
FrameItem::Group(group) => write_group(ctx, pos, group)?,
FrameItem::Text(text) => write_text(ctx, pos, text)?,
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
FrameItem::Image(image, size, span) => {
write_image(ctx, x, y, image, *size, *span)?
}
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
FrameItem::Tag(_) => {}
}
}
Ok(())
}
/// Encode a group into the content stream.
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
let translation = Transform::translate(pos.x, pos.y);
ctx.save_state()?;
if group.frame.kind().is_hard() {
ctx.group_transform(
ctx.state
.transform
.post_concat(ctx.state.container_transform.invert().unwrap())
.pre_concat(translation)
.pre_concat(group.transform),
);
ctx.size(group.frame.size());
}
ctx.transform(translation.pre_concat(group.transform));
if let Some(clip_curve) = &group.clip {
write_curve(ctx, 0.0, 0.0, clip_curve);
ctx.content.clip_nonzero();
ctx.content.end_path();
}
write_frame(ctx, &group.frame)?;
ctx.restore_state();
Ok(())
}
/// Encode a text run into the content stream.
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
if ctx.options.standards.pdfa && text.font.info().is_last_resort() {
bail!(
Span::find(text.glyphs.iter().map(|g| g.span.0)),
"the text {} could not be displayed with any font",
&text.text,
);
}
let outline_glyphs =
text.glyphs.iter().filter(|g| should_outline(&text.font, g)).count();
if outline_glyphs == text.glyphs.len() {
write_normal_text(ctx, pos, TextItemView::full(text))?;
} else if outline_glyphs == 0 {
write_complex_glyphs(ctx, pos, TextItemView::full(text))?;
} else {
// Otherwise we need to split it into smaller text runs.
let mut offset = 0;
let mut position_in_run = Abs::zero();
for (should_outline, sub_run) in
text.glyphs.group_by_key(|g| should_outline(&text.font, g))
{
let end = offset + sub_run.len();
// Build a sub text-run
let text_item_view = TextItemView::from_glyph_range(text, offset..end);
// Adjust the position of the run on the line
let pos = pos + Point::new(position_in_run, Abs::zero());
position_in_run += text_item_view.width();
offset = end;
// Actually write the sub text-run.
if should_outline {
write_normal_text(ctx, pos, text_item_view)?;
} else {
write_complex_glyphs(ctx, pos, text_item_view)?;
}
}
}
Ok(())
}
/// Encodes a text run (without any color glyph) into the content stream.
fn write_normal_text(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
*ctx.resources.languages.entry(text.item.lang).or_insert(0) += text.glyph_range.len();
let glyph_set = ctx.resources.glyph_sets.entry(text.item.font.clone()).or_default();
for g in text.glyphs() {
glyph_set.entry(g.id).or_insert_with(|| text.glyph_text(g));
}
let fill_transform = ctx.state.transforms(Size::zero(), pos);
ctx.set_fill(&text.item.fill, true, fill_transform)?;
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if let Some(stroke) = stroke {
ctx.set_stroke(stroke, true, fill_transform)?;
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
} else {
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
}
ctx.set_font(&text.item.font, text.item.size);
ctx.set_opacities(text.item.stroke.as_ref(), Some(&text.item.fill));
ctx.content.begin_text();
// Position the text.
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
let mut positioned = ctx.content.show_positioned();
let mut items = positioned.items();
let mut adjustment = Em::zero();
let mut encoded = vec![];
let glyph_remapper = ctx
.resources
.glyph_remappers
.entry(text.item.font.clone())
.or_default();
// Write the glyphs with kerning adjustments.
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
adjustment += glyph.x_offset;
if !adjustment.is_zero() {
if !encoded.is_empty() {
show_text(&mut items, &encoded);
encoded.clear();
}
items.adjust(-adjustment.to_font_units());
adjustment = Em::zero();
}
// In PDF, we use CIDs to index the glyphs in a font, not GIDs. What a
// CID actually refers to depends on the type of font we are embedding:
//
// - For TrueType fonts, the CIDs are defined by an external mapping.
// - For SID-keyed CFF fonts, the CID is the same as the GID in the font.
// - For CID-keyed CFF fonts, the CID refers to the CID in the font.
//
// (See in the PDF-spec for more details on this.)
//
// However, in our case:
// - We use the identity-mapping for TrueType fonts.
// - SID-keyed fonts will get converted into CID-keyed fonts by the
// subsetter.
// - CID-keyed fonts will be rewritten in a way so that the mapping
// between CID and GID is always the identity mapping, regardless of
// the mapping before.
//
// Because of this, we can always use the remapped GID as the CID,
// regardless of which type of font we are actually embedding.
let cid = glyph_remapper.remap(glyph.id);
encoded.push((cid >> 8) as u8);
encoded.push((cid & 0xff) as u8);
if let Some(advance) = text.item.font.advance(glyph.id) {
adjustment += glyph.x_advance - advance;
}
adjustment -= glyph.x_offset;
}
if !encoded.is_empty() {
show_text(&mut items, &encoded);
}
items.finish();
positioned.finish();
ctx.content.end_text();
Ok(())
}
/// Shows text, ensuring that each individual string doesn't exceed the
/// implementation limits.
fn show_text(items: &mut PositionedItems, encoded: &[u8]) {
for chunk in encoded.chunks(Str::PDFA_LIMIT) {
items.show(Str(chunk));
}
}
/// Encodes a text run made only of color glyphs into the content stream
fn write_complex_glyphs(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let mut last_font = None;
ctx.reset_opacities();
ctx.content.begin_text();
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
// So that the next call to ctx.set_font() will change the font to one that
// displays regular glyphs and not color glyphs.
ctx.state.font = None;
let glyph_set = ctx
.resources
.color_glyph_sets
.entry(text.item.font.clone())
.or_default();
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
// Retrieve the Type3 font reference and the glyph index in the font.
let color_fonts = ctx
.resources
.color_fonts
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
let (font, index) = color_fonts.get(ctx.options, &text, glyph)?;
if last_font != Some(font) {
ctx.content.set_font(
Name(eco_format!("Cf{}", font).as_bytes()),
text.item.size.to_f32(),
);
last_font = Some(font);
}
ctx.content.show(Str(&[index]));
glyph_set.entry(glyph.id).or_insert_with(|| text.glyph_text(glyph));
}
ctx.content.end_text();
Ok(())
}
/// Encode a geometrical shape into the content stream.
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let stroke = shape.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if shape.fill.is_none() && stroke.is_none() {
return Ok(());
}
if let Some(fill) = &shape.fill {
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
}
if let Some(stroke) = stroke {
ctx.set_stroke(
stroke,
false,
ctx.state.transforms(shape.geometry.bbox_size(), pos),
)?;
}
ctx.set_opacities(stroke, shape.fill.as_ref());
match &shape.geometry {
Geometry::Line(target) => {
let dx = target.x.to_f32();
let dy = target.y.to_f32();
ctx.content.move_to(x, y);
ctx.content.line_to(x + dx, y + dy);
}
Geometry::Rect(size) => {
let w = size.x.to_f32();
let h = size.y.to_f32();
if w.abs() > f32::EPSILON && h.abs() > f32::EPSILON {
ctx.content.rect(x, y, w, h);
}
}
Geometry::Curve(curve) => {
write_curve(ctx, x, y, curve);
}
}
match (&shape.fill, &shape.fill_rule, stroke) {
(None, _, None) => unreachable!(),
(Some(_), FillRule::NonZero, None) => ctx.content.fill_nonzero(),
(Some(_), FillRule::EvenOdd, None) => ctx.content.fill_even_odd(),
(None, _, Some(_)) => ctx.content.stroke(),
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
};
Ok(())
}
/// Encode a curve into the content stream.
fn write_curve(ctx: &mut Builder, x: f32, y: f32, curve: &Curve) {
for elem in &curve.0 {
match elem {
CurveItem::Move(p) => ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Line(p) => ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Cubic(p1, p2, p3) => ctx.content.cubic_to(
x + p1.x.to_f32(),
y + p1.y.to_f32(),
x + p2.x.to_f32(),
y + p2.y.to_f32(),
x + p3.x.to_f32(),
y + p3.y.to_f32(),
),
CurveItem::Close => ctx.content.close_path(),
};
}
}
/// Encode a vector or raster image into the content stream.
fn write_image(
ctx: &mut Builder,
x: f32,
y: f32,
image: &Image,
size: Size,
span: Span,
) -> SourceResult<()> {
let index = ctx.resources.images.insert(image.clone());
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
let (image, color_space) =
deferred_image(image.clone(), ctx.options.standards.pdfa);
if let Some(color_space) = color_space {
ctx.resources.colors.mark_as_used(color_space);
}
(image, span)
});
ctx.reset_opacities();
let name = eco_format!("Im{index}");
let w = size.x.to_f32();
let h = size.y.to_f32();
ctx.content.save_state_checked()?;
ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
if let Some(alt) = image.alt() {
if ctx.options.standards.pdfa && alt.len() > Str::PDFA_LIMIT {
bail!(span, "the image's alt text is too long");
}
let mut image_span =
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
let mut image_alt = image_span.properties();
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
image_alt.finish();
image_span.finish();
ctx.content.x_object(Name(name.as_bytes()));
ctx.content.end_marked_content();
} else {
ctx.content.x_object(Name(name.as_bytes()));
}
ctx.content.restore_state();
Ok(())
}
/// Save a link for later writing in the annotations dictionary.
fn write_link(ctx: &mut Builder, pos: Point, dest: &Destination, size: Size) {
let mut min_x = Abs::inf();
let mut min_y = Abs::inf();
let mut max_x = -Abs::inf();
let mut max_y = -Abs::inf();
// Compute the bounding box of the transformed link.
for point in [
pos,
pos + Point::with_x(size.x),
pos + Point::with_y(size.y),
pos + size.to_point(),
] {
let t = point.transform(ctx.state.transform);
min_x.set_min(t.x);
min_y.set_min(t.y);
max_x.set_max(t.x);
max_y.set_max(t.y);
}
let x1 = min_x.to_f32();
let x2 = max_x.to_f32();
let y1 = max_y.to_f32();
let y2 = min_y.to_f32();
let rect = Rect::new(x1, y1, x2, y2);
ctx.links.push((dest.clone(), rect));
}
fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
match cap {
LineCap::Butt => LineCapStyle::ButtCap,
LineCap::Round => LineCapStyle::RoundCap,
LineCap::Square => LineCapStyle::ProjectingSquareCap,
}
}
fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
match join {
LineJoin::Miter => LineJoinStyle::MiterJoin,
LineJoin::Round => LineJoinStyle::RoundJoin,
LineJoin::Bevel => LineJoinStyle::BevelJoin,
}
}
/// The error when there is a tofu glyph.
#[cold]
fn tofu(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
error!(
glyph.span.0,
"the text {} could not be displayed with any font",
text.glyph_text(glyph).repr(),
)
}

View File

@ -0,0 +1,661 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::num::NonZeroU64;
use ecow::{eco_format, EcoVec};
use krilla::annotation::Annotation;
use krilla::configure::{Configuration, ValidationError, Validator};
use krilla::destination::{NamedDestination, XyzDestination};
use krilla::embed::EmbedError;
use krilla::error::KrillaError;
use krilla::geom::PathBuilder;
use krilla::page::{PageLabel, PageSettings};
use krilla::surface::Surface;
use krilla::{Document, SerializeSettings};
use krilla_svg::render_svg_glyph;
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::NativeElement;
use typst_library::introspection::Location;
use typst_library::layout::{
Abs, Frame, FrameItem, GroupItem, PagedDocument, Size, Transform,
};
use typst_library::model::HeadingElem;
use typst_library::text::{Font, Lang};
use typst_library::visualize::{Geometry, Paint};
use typst_syntax::Span;
use crate::embed::embed_files;
use crate::image::handle_image;
use crate::link::handle_link;
use crate::metadata::build_metadata;
use crate::outline::build_outline;
use crate::page::PageLabelExt;
use crate::shape::handle_shape;
use crate::text::handle_text;
use crate::util::{convert_path, display_font, AbsExt, TransformExt};
use crate::PdfOptions;
#[typst_macros::time(name = "convert document")]
pub fn convert(
typst_document: &PagedDocument,
options: &PdfOptions,
) -> SourceResult<Vec<u8>> {
let settings = SerializeSettings {
compress_content_streams: true,
no_device_cs: true,
ascii_compatible: false,
xmp_metadata: true,
cmyk_profile: None,
configuration: options.standards.config,
enable_tagging: false,
render_svg_glyph_fn: render_svg_glyph,
};
let mut document = Document::new_with(settings);
let page_index_converter = PageIndexConverter::new(typst_document, options);
let named_destinations =
collect_named_destinations(typst_document, &page_index_converter);
let mut gc = GlobalContext::new(
typst_document,
options,
named_destinations,
page_index_converter,
);
convert_pages(&mut gc, &mut document)?;
embed_files(typst_document, &mut document)?;
document.set_outline(build_outline(&gc));
document.set_metadata(build_metadata(&gc));
finish(document, gc, options.standards.config)
}
fn convert_pages(gc: &mut GlobalContext, document: &mut Document) -> SourceResult<()> {
for (i, typst_page) in gc.document.pages.iter().enumerate() {
if gc.page_index_converter.pdf_page_index(i).is_none() {
// Don't export this page.
continue;
} else {
let mut settings = PageSettings::new(
typst_page.frame.width().to_f32(),
typst_page.frame.height().to_f32(),
);
if let Some(label) = typst_page
.numbering
.as_ref()
.and_then(|num| PageLabel::generate(num, typst_page.number))
.or_else(|| {
// When some pages were ignored from export, we show a page label with
// the correct real (not logical) page number.
// This is for consistency with normal output when pages have no numbering
// and all are exported: the final PDF page numbers always correspond to
// the real (not logical) page numbers. Here, the final PDF page number
// will differ, but we can at least use labels to indicate what was
// the corresponding real page number in the Typst document.
gc.page_index_converter
.has_skipped_pages()
.then(|| PageLabel::arabic((i + 1) as u64))
})
{
settings = settings.with_page_label(label);
}
let mut page = document.start_page_with(settings);
let mut surface = page.surface();
let mut fc = FrameContext::new(typst_page.frame.size());
handle_frame(
&mut fc,
&typst_page.frame,
typst_page.fill_or_transparent(),
&mut surface,
gc,
)?;
surface.finish();
for annotation in fc.annotations {
page.add_annotation(annotation);
}
}
}
Ok(())
}
/// A state allowing us to keep track of transforms and container sizes,
/// which is mainly needed to resolve gradients and patterns correctly.
#[derive(Debug, Clone)]
pub(crate) struct State {
/// The current transform.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
container_size: Size,
}
impl State {
/// Creates a new, clean state for a given `size`.
fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
container_size: size,
}
}
pub(crate) fn register_container(&mut self, size: Size) {
self.container_transform = self.transform;
self.container_size = size;
}
pub(crate) fn pre_concat(&mut self, transform: Transform) {
self.transform = self.transform.pre_concat(transform);
}
pub(crate) fn transform(&self) -> Transform {
self.transform
}
pub(crate) fn container_transform(&self) -> Transform {
self.container_transform
}
pub(crate) fn container_size(&self) -> Size {
self.container_size
}
}
/// Context needed for converting a single frame.
pub(crate) struct FrameContext {
states: Vec<State>,
annotations: Vec<Annotation>,
}
impl FrameContext {
pub(crate) fn new(size: Size) -> Self {
Self {
states: vec![State::new(size)],
annotations: vec![],
}
}
pub(crate) fn push(&mut self) {
self.states.push(self.states.last().unwrap().clone());
}
pub(crate) fn pop(&mut self) {
self.states.pop();
}
pub(crate) fn state(&self) -> &State {
self.states.last().unwrap()
}
pub(crate) fn state_mut(&mut self) -> &mut State {
self.states.last_mut().unwrap()
}
pub(crate) fn push_annotation(&mut self, annotation: Annotation) {
self.annotations.push(annotation);
}
}
/// Globally needed context for converting a typst document.
pub(crate) struct GlobalContext<'a> {
/// Cache the conversion between krilla and Typst fonts (forward and backward).
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
/// Mapping between images and their span.
// Note: In theory, the same image can have multiple spans
// if it appears in the document multiple times. We just store the
// first appearance, though.
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
/// The spans of all images that appear in the document. We use this so
/// we can give more accurate error messages.
pub(crate) image_spans: HashSet<Span>,
/// The document to convert.
pub(crate) document: &'a PagedDocument,
/// Options for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// Mapping between locations in the document and named destinations.
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
/// The languages used throughout the document.
pub(crate) languages: BTreeMap<Lang, usize>,
pub(crate) page_index_converter: PageIndexConverter,
}
impl<'a> GlobalContext<'a> {
pub(crate) fn new(
document: &'a PagedDocument,
options: &'a PdfOptions,
loc_to_names: HashMap<Location, NamedDestination>,
page_index_converter: PageIndexConverter,
) -> GlobalContext<'a> {
Self {
fonts_forward: HashMap::new(),
fonts_backward: HashMap::new(),
document,
options,
loc_to_names,
image_to_spans: HashMap::new(),
image_spans: HashSet::new(),
languages: BTreeMap::new(),
page_index_converter,
}
}
}
#[typst_macros::time(name = "handle page")]
pub(crate) fn handle_frame(
fc: &mut FrameContext,
frame: &Frame,
fill: Option<Paint>,
surface: &mut Surface,
gc: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
if frame.kind().is_hard() {
fc.state_mut().register_container(frame.size());
}
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
handle_shape(fc, &shape, surface, gc, Span::detached())?;
}
for (point, item) in frame.items() {
fc.push();
fc.state_mut().pre_concat(Transform::translate(point.x, point.y));
match item {
FrameItem::Group(g) => handle_group(fc, g, surface, gc)?,
FrameItem::Text(t) => handle_text(fc, t, surface, gc)?,
FrameItem::Shape(s, span) => handle_shape(fc, s, surface, gc, *span)?,
FrameItem::Image(image, size, span) => {
handle_image(gc, fc, image, *size, surface, *span)?
}
FrameItem::Link(d, s) => handle_link(fc, gc, d, *s),
FrameItem::Tag(_) => {}
}
fc.pop();
}
fc.pop();
Ok(())
}
pub(crate) fn handle_group(
fc: &mut FrameContext,
group: &GroupItem,
surface: &mut Surface,
context: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
fc.state_mut().pre_concat(group.transform);
let clip_path = group
.clip
.as_ref()
.and_then(|p| {
let mut builder = PathBuilder::new();
convert_path(p, &mut builder);
builder.finish()
})
.and_then(|p| p.transform(fc.state().transform.to_krilla()));
if let Some(clip_path) = &clip_path {
surface.push_clip_path(clip_path, &krilla::paint::FillRule::NonZero);
}
handle_frame(fc, &group.frame, None, surface, context)?;
if clip_path.is_some() {
surface.pop();
}
fc.pop();
Ok(())
}
#[typst_macros::time(name = "finish export")]
/// Finish a krilla document and handle export errors.
fn finish(
document: Document,
gc: GlobalContext,
configuration: Configuration,
) -> SourceResult<Vec<u8>> {
let validator = configuration.validator();
match document.finish() {
Ok(r) => Ok(r),
Err(e) => match e {
KrillaError::Font(f, s) => {
let font_str = display_font(gc.fonts_backward.get(&f).unwrap());
bail!(
Span::detached(),
"failed to process font {font_str}: {s}";
hint: "make sure the font is valid";
hint: "the used font might be unsupported by Typst"
);
}
KrillaError::Validation(ve) => {
let errors = ve
.iter()
.map(|e| convert_error(&gc, validator, e))
.collect::<EcoVec<_>>();
Err(errors)
}
KrillaError::Image(_, loc) => {
let span = to_span(loc);
bail!(span, "failed to process image");
}
KrillaError::SixteenBitImage(image, _) => {
let span = gc.image_to_spans.get(&image).unwrap();
bail!(
*span, "16 bit images are not supported in this export mode";
hint: "convert the image to 8 bit instead"
)
}
},
}
}
/// Converts a krilla error into a Typst error.
fn convert_error(
gc: &GlobalContext,
validator: Validator,
error: &ValidationError,
) -> SourceDiagnostic {
let prefix = eco_format!("{} error:", validator.as_str());
match error {
ValidationError::TooLongString => error!(
Span::detached(),
"{prefix} a PDF string is longer than 32767 characters";
hint: "ensure title and author names are short enough"
),
// Should in theory never occur, as krilla always trims font names.
ValidationError::TooLongName => error!(
Span::detached(),
"{prefix} a PDF name is longer than 127 characters";
hint: "perhaps a font name is too long"
),
ValidationError::TooLongArray => error!(
Span::detached(),
"{prefix} a PDF array is longer than 8191 elements";
hint: "this can happen if you have a very long text in a single line"
),
ValidationError::TooLongDictionary => error!(
Span::detached(),
"{prefix} a PDF dictionary has more than 4095 entries";
hint: "try reducing the complexity of your document"
),
ValidationError::TooLargeFloat => error!(
Span::detached(),
"{prefix} a PDF floating point number is larger than the allowed limit";
hint: "try exporting with a higher PDF version"
),
ValidationError::TooManyIndirectObjects => error!(
Span::detached(),
"{prefix} the PDF has too many indirect objects";
hint: "reduce the size of your document"
),
// Can only occur if we have 27+ nested clip paths
ValidationError::TooHighQNestingLevel => error!(
Span::detached(),
"{prefix} the PDF has too high q nesting";
hint: "reduce the number of nested containers"
),
ValidationError::ContainsPostScript(loc) => error!(
to_span(*loc),
"{prefix} the PDF contains PostScript code";
hint: "conic gradients are not supported in this PDF standard"
),
ValidationError::MissingCMYKProfile => error!(
Span::detached(),
"{prefix} the PDF is missing a CMYK profile";
hint: "CMYK colors are not yet supported in this export mode"
),
ValidationError::ContainsNotDefGlyph(f, loc, text) => error!(
to_span(*loc),
"{prefix} the text '{text}' cannot be displayed using {}",
display_font(gc.fonts_backward.get(f).unwrap());
hint: "try using a different font"
),
ValidationError::InvalidCodepointMapping(_, _, cp, loc) => {
if let Some(c) = cp.map(|c| eco_format!("{:#06x}", c as u32)) {
let msg = if loc.is_some() {
"the PDF contains text with"
} else {
"the text contains"
};
error!(to_span(*loc), "{prefix} {msg} the disallowed codepoint {c}")
} else {
// I think this code path is in theory unreachable,
// but just to be safe.
let msg = if loc.is_some() {
"the PDF contains text with missing codepoints"
} else {
"the text was not mapped to a code point"
};
error!(
to_span(*loc),
"{prefix} {msg}";
hint: "for complex scripts like Arabic, it might not be \
possible to produce a compliant document"
)
}
}
ValidationError::UnicodePrivateArea(_, _, c, loc) => {
let code_point = eco_format!("{:#06x}", *c as u32);
let msg = if loc.is_some() { "the PDF" } else { "the text" };
error!(
to_span(*loc),
"{prefix} {msg} contains the codepoint {code_point}";
hint: "codepoints from the Unicode private area are \
forbidden in this export mode"
)
}
ValidationError::Transparency(loc) => {
let span = to_span(*loc);
let hint1 = "try exporting with a different standard that \
supports transparency";
if loc.is_some() {
if gc.image_spans.contains(&span) {
error!(
span, "{prefix} the image contains transparency";
hint: "{hint1}";
hint: "or convert the image to a non-transparent one";
hint: "you might have to convert SVGs into \
non-transparent bitmap images"
)
} else {
error!(
span, "{prefix} the used fill or stroke has transparency";
hint: "{hint1}";
hint: "or don't use colors with transparency in \
this export mode"
)
}
} else {
error!(
span, "{prefix} the PDF contains transparency";
hint: "{hint1}"
)
}
}
ValidationError::ImageInterpolation(loc) => {
let span = to_span(*loc);
if loc.is_some() {
error!(
span, "{prefix} the image has smooth scaling";
hint: "set the `scaling` attribute to `pixelated`"
)
} else {
error!(
span, "{prefix} an image in the PDF has smooth scaling";
hint: "set the `scaling` attribute of all images to `pixelated`"
)
}
}
ValidationError::EmbeddedFile(e, s) => {
// We always set the span for embedded files, so it cannot be detached.
let span = to_span(*s);
match e {
EmbedError::Existence => {
error!(
span, "{prefix} document contains an embedded file";
hint: "embedded files are not supported in this export mode"
)
}
EmbedError::MissingDate => {
error!(
span, "{prefix} document date is missing";
hint: "the document must have a date when embedding files";
hint: "`set document(date: none)` must not be used in this case"
)
}
EmbedError::MissingDescription => {
error!(span, "{prefix} the file description is missing")
}
EmbedError::MissingMimeType => {
error!(span, "{prefix} the file mime type is missing")
}
}
}
// The below errors cannot occur yet, only once Typst supports full PDF/A
// and PDF/UA. But let's still add a message just to be on the safe side.
ValidationError::MissingAnnotationAltText => error!(
Span::detached(),
"{prefix} missing annotation alt text";
hint: "please report this as a bug"
),
ValidationError::MissingAltText => error!(
Span::detached(),
"{prefix} missing alt text";
hint: "make sure your images and equations have alt text"
),
ValidationError::NoDocumentLanguage => error!(
Span::detached(),
"{prefix} missing document language";
hint: "set the language of the document"
),
// Needs to be set by typst-pdf.
ValidationError::MissingHeadingTitle => error!(
Span::detached(),
"{prefix} missing heading title";
hint: "please report this as a bug"
),
ValidationError::MissingDocumentOutline => error!(
Span::detached(),
"{prefix} missing document outline";
hint: "please report this as a bug"
),
ValidationError::MissingTagging => error!(
Span::detached(),
"{prefix} missing document tags";
hint: "please report this as a bug"
),
ValidationError::NoDocumentTitle => error!(
Span::detached(),
"{prefix} missing document title";
hint: "set the title of the document"
),
ValidationError::MissingDocumentDate => error!(
Span::detached(),
"{prefix} missing document date";
hint: "set the date of the document"
),
}
}
/// Convert a krilla location to a span.
fn to_span(loc: Option<krilla::surface::Location>) -> Span {
loc.map(|l| Span::from_raw(NonZeroU64::new(l).unwrap()))
.unwrap_or(Span::detached())
}
fn collect_named_destinations(
document: &PagedDocument,
pic: &PageIndexConverter,
) -> HashMap<Location, NamedDestination> {
let mut locs_to_names = HashMap::new();
// Find all headings that have a label and are the first among other
// headings with the same label.
let matches: Vec<_> = {
let mut seen = HashSet::new();
document
.introspector
.query(&HeadingElem::elem().select())
.iter()
.filter_map(|elem| elem.location().zip(elem.label()))
.filter(|&(_, label)| seen.insert(label))
.collect()
};
for (loc, label) in matches {
let pos = document.introspector.position(loc);
let index = pos.page.get() - 1;
// We are subtracting 10 because the position of links e.g. to headings is always at the
// baseline and if you link directly to it, the text will not be visible
// because it is right above.
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
// Only add named destination if page belonging to the position is exported.
if let Some(index) = pic.pdf_page_index(index) {
let named = NamedDestination::new(
label.resolve().to_string(),
XyzDestination::new(
index,
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
),
);
locs_to_names.insert(loc, named);
}
}
locs_to_names
}
pub(crate) struct PageIndexConverter {
page_indices: HashMap<usize, usize>,
skipped_pages: usize,
}
impl PageIndexConverter {
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
let mut page_indices = HashMap::new();
let mut skipped_pages = 0;
for i in 0..document.pages.len() {
if options
.page_ranges
.as_ref()
.is_some_and(|ranges| !ranges.includes_page_index(i))
{
skipped_pages += 1;
} else {
page_indices.insert(i, i - skipped_pages);
}
}
Self { page_indices, skipped_pages }
}
pub(crate) fn has_skipped_pages(&self) -> bool {
self.skipped_pages > 0
}
/// Get the PDF page index of a page index, if it's not excluded.
pub(crate) fn pdf_page_index(&self, page_index: usize) -> Option<usize> {
self.page_indices.get(&page_index).copied()
}
}

View File

@ -1,122 +1,122 @@
use std::collections::BTreeMap;
use std::sync::Arc;
use ecow::EcoString;
use pdf_writer::types::AssociationKind;
use pdf_writer::{Filter, Finish, Name, Ref, Str, TextStr};
use krilla::embed::{AssociationKind, EmbeddedFile};
use krilla::Document;
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{NativeElement, Packed, StyleChain};
use typst_library::foundations::{NativeElement, StyleChain};
use typst_library::layout::PagedDocument;
use typst_library::pdf::{EmbedElem, EmbeddedFileRelationship};
use crate::catalog::{document_date, pdf_date};
use crate::{deflate, NameExt, PdfChunk, StrExt, WithGlobalRefs};
pub(crate) fn embed_files(
typst_doc: &PagedDocument,
document: &mut Document,
) -> SourceResult<()> {
let elements = typst_doc.introspector.query(&EmbedElem::elem().select());
/// Query for all [`EmbedElem`] and write them and their file specifications.
///
/// This returns a map of embedding names and references so that we can later
/// add them to the catalog's `/Names` dictionary.
pub fn write_embedded_files(
ctx: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, BTreeMap<EcoString, Ref>)> {
let mut chunk = PdfChunk::new();
let mut embedded_files = BTreeMap::default();
let elements = ctx.document.introspector.query(&EmbedElem::elem().select());
for elem in &elements {
if !ctx.options.standards.embedded_files {
// PDF/A-2 requires embedded files to be PDF/A-1 or PDF/A-2,
// which we don't currently check.
bail!(
elem.span(),
"file embeddings are not currently supported for PDF/A-2";
hint: "PDF/A-3 supports arbitrary embedded files"
);
}
let embed = elem.to_packed::<EmbedElem>().unwrap();
if embed.path.derived.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file path is too long");
}
let id = embed_file(ctx, &mut chunk, embed)?;
if embedded_files.insert(embed.path.derived.clone(), id).is_some() {
bail!(
elem.span(),
"duplicate embedded file for path `{}`", embed.path.derived;
hint: "embedded file paths must be unique",
);
}
}
Ok((chunk, embedded_files))
}
/// Write the embedded file stream and its file specification.
fn embed_file(
ctx: &WithGlobalRefs,
chunk: &mut PdfChunk,
embed: &Packed<EmbedElem>,
) -> SourceResult<Ref> {
let embedded_file_stream_ref = chunk.alloc.bump();
let file_spec_dict_ref = chunk.alloc.bump();
let data = embed.data.as_slice();
let compressed = deflate(data);
let mut embedded_file = chunk.embedded_file(embedded_file_stream_ref, &compressed);
embedded_file.filter(Filter::FlateDecode);
if let Some(mime_type) = embed.mime_type(StyleChain::default()) {
if mime_type.len() > Name::PDFA_LIMIT {
bail!(embed.span(), "embedded file MIME type is too long");
}
embedded_file.subtype(Name(mime_type.as_bytes()));
} else if ctx.options.standards.pdfa {
bail!(embed.span(), "embedded files must have a MIME type in PDF/A-3");
}
let mut params = embedded_file.params();
params.size(data.len() as i32);
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
params.modification_date(pdf_date);
} else if ctx.options.standards.pdfa {
bail!(
embed.span(),
"the document must have a date when embedding files in PDF/A-3";
hint: "`set document(date: none)` must not be used in this case"
);
}
params.finish();
embedded_file.finish();
let mut file_spec = chunk.file_spec(file_spec_dict_ref);
file_spec.path(Str(embed.path.derived.as_bytes()));
file_spec.unic_file(TextStr(&embed.path.derived));
file_spec
.insert(Name(b"EF"))
.dict()
.pair(Name(b"F"), embedded_file_stream_ref)
.pair(Name(b"UF"), embedded_file_stream_ref);
if ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
file_spec.association_kind(match embed.relationship(StyleChain::default()) {
Some(EmbeddedFileRelationship::Source) => AssociationKind::Source,
Some(EmbeddedFileRelationship::Data) => AssociationKind::Data,
Some(EmbeddedFileRelationship::Alternative) => AssociationKind::Alternative,
Some(EmbeddedFileRelationship::Supplement) => AssociationKind::Supplement,
let span = embed.span();
let derived_path = &embed.path.derived;
let path = derived_path.to_string();
let mime_type =
embed.mime_type(StyleChain::default()).clone().map(|s| s.to_string());
let description = embed
.description(StyleChain::default())
.clone()
.map(|s| s.to_string());
let association_kind = match embed.relationship(StyleChain::default()) {
None => AssociationKind::Unspecified,
});
}
Some(e) => match e {
EmbeddedFileRelationship::Source => AssociationKind::Source,
EmbeddedFileRelationship::Data => AssociationKind::Data,
EmbeddedFileRelationship::Alternative => AssociationKind::Alternative,
EmbeddedFileRelationship::Supplement => AssociationKind::Supplement,
},
};
let data: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(embed.data.clone());
// TODO: update when new krilla version lands (https://github.com/LaurenzV/krilla/pull/203)
let compress = should_compress(&embed.data).unwrap_or(true);
if let Some(description) = embed.description(StyleChain::default()) {
if description.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file description is too long");
let file = EmbeddedFile {
path,
mime_type,
description,
association_kind,
data: data.into(),
compress,
location: Some(span.into_raw().get()),
};
if document.embed_file(file).is_none() {
bail!(span, "attempted to embed file {derived_path} twice");
}
file_spec.description(TextStr(description));
}
Ok(file_spec_dict_ref)
Ok(())
}
fn should_compress(data: &[u8]) -> Option<bool> {
let ty = infer::get(data)?;
match ty.matcher_type() {
infer::MatcherType::App => None,
infer::MatcherType::Archive => match ty.mime_type() {
#[rustfmt::skip]
"application/zip"
| "application/vnd.rar"
| "application/gzip"
| "application/x-bzip2"
| "application/vnd.bzip3"
| "application/x-7z-compressed"
| "application/x-xz"
| "application/vnd.ms-cab-compressed"
| "application/vnd.debian.binary-package"
| "application/x-compress"
| "application/x-lzip"
| "application/x-rpm"
| "application/zstd"
| "application/x-lz4"
| "application/x-ole-storage" => Some(false),
_ => None,
},
infer::MatcherType::Audio => match ty.mime_type() {
#[rustfmt::skip]
"audio/mpeg"
| "audio/m4a"
| "audio/opus"
| "audio/ogg"
| "audio/x-flac"
| "audio/amr"
| "audio/aac"
| "audio/x-ape" => Some(false),
_ => None,
},
infer::MatcherType::Book => None,
infer::MatcherType::Doc => None,
infer::MatcherType::Font => None,
infer::MatcherType::Image => match ty.mime_type() {
#[rustfmt::skip]
"image/jpeg"
| "image/jp2"
| "image/png"
| "image/webp"
| "image/vnd.ms-photo"
| "image/heif"
| "image/avif"
| "image/jxl"
| "image/vnd.djvu" => None,
_ => None,
},
infer::MatcherType::Text => None,
infer::MatcherType::Video => match ty.mime_type() {
#[rustfmt::skip]
"video/mp4"
| "video/x-m4v"
| "video/x-matroska"
| "video/webm"
| "video/quicktime"
| "video/x-flv" => Some(false),
_ => None,
},
infer::MatcherType::Custom => None,
}
}

Some files were not shown because too many files have changed in this diff Show More